Catch sqlite OperationalError for websocket messages
- Hopefully fixes #580
This commit is contained in:
parent
25f7c21018
commit
1ab9ff7790
1 changed files with 21 additions and 5 deletions
|
@ -127,11 +127,27 @@ def process_new_item_message(message):
|
||||||
LOG.error('Could not download metadata for %s', message['plex_id'])
|
LOG.error('Could not download metadata for %s', message['plex_id'])
|
||||||
return False, False, False
|
return False, False, False
|
||||||
LOG.debug("Processing new/updated PMS item: %s", message['plex_id'])
|
LOG.debug("Processing new/updated PMS item: %s", message['plex_id'])
|
||||||
with itemtypes.ITEMTYPE_FROM_PLEXTYPE[plex_type](timing.unix_timestamp()) as typus:
|
attempts = 3
|
||||||
typus.add_update(xml[0],
|
while True:
|
||||||
section_name=xml.get('librarySectionTitle'),
|
try:
|
||||||
section_id=xml.get('librarySectionID'))
|
with itemtypes.ITEMTYPE_FROM_PLEXTYPE[plex_type](timing.unix_timestamp()) as typus:
|
||||||
cache_artwork(message['plex_id'], plex_type)
|
typus.add_update(xml[0],
|
||||||
|
section_name=xml.get('librarySectionTitle'),
|
||||||
|
section_id=xml.get('librarySectionID'))
|
||||||
|
cache_artwork(message['plex_id'], plex_type)
|
||||||
|
except utils.OperationalError:
|
||||||
|
# Since parallel caching of artwork might invalidade the current
|
||||||
|
# WAL snapshot of the db, sqlite immediatly throws
|
||||||
|
# OperationalError, NOT after waiting for a duraton of timeout
|
||||||
|
# See https://github.com/mattn/go-sqlite3/issues/274#issuecomment-211759641
|
||||||
|
LOG.debug('sqlite OperationalError encountered, trying again')
|
||||||
|
attempts -= 1
|
||||||
|
if attempts == 0:
|
||||||
|
LOG.error('Repeatedly could not process message %s', message)
|
||||||
|
return False, False, False
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
break
|
||||||
return True, plex_type in v.PLEX_VIDEOTYPES, plex_type in v.PLEX_AUDIOTYPES
|
return True, plex_type in v.PLEX_VIDEOTYPES, plex_type in v.PLEX_AUDIOTYPES
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue