From 3000bfcd7d25688ce07ff1a1a015fdeb867f7849 Mon Sep 17 00:00:00 2001 From: croneter Date: Tue, 10 Dec 2019 17:26:00 +0100 Subject: [PATCH 01/12] Always use sqlite WAL mode (did not switch back to normal journal mode automatically anyway) --- resources/lib/db.py | 17 ++++++----------- resources/lib/kodi_db/__init__.py | 6 +++--- resources/lib/kodi_db/common.py | 9 +++------ 3 files changed, 12 insertions(+), 20 deletions(-) diff --git a/resources/lib/db.py b/resources/lib/db.py index 1ea7e3fe..2c701040 100644 --- a/resources/lib/db.py +++ b/resources/lib/db.py @@ -52,26 +52,21 @@ def catch_operationalerrors(method): return wrapper -def _initial_db_connection_setup(conn, wal_mode): +def _initial_db_connection_setup(conn): """ Set-up DB e.g. for WAL journal mode, if that hasn't already been done before. Also start a transaction """ - if wal_mode: - pass - # conn.execute('PRAGMA journal_mode=WAL;') - # conn.execute('PRAGMA cache_size = -8000;') - # conn.execute('PRAGMA synchronous=NORMAL;') + conn.execute('PRAGMA journal_mode = WAL;') + conn.execute('PRAGMA cache_size = -8000;') + conn.execute('PRAGMA synchronous = NORMAL;') conn.execute('BEGIN') -def connect(media_type=None, wal_mode=True): +def connect(media_type=None): """ Open a connection to the Kodi database. media_type: 'video' (standard if not passed), 'plex', 'music', 'texture' - Pass wal_mode=False if you want the standard (and slower) sqlite - journal_mode, e.g. when wiping entire tables. Useful if you do NOT want - concurrent access to DB for both PKC and Kodi """ if media_type == "plex": db_path = v.DB_PLEX_PATH @@ -87,7 +82,7 @@ def connect(media_type=None, wal_mode=True): attempts = DB_WRITE_ATTEMPTS while True: try: - _initial_db_connection_setup(conn, wal_mode) + _initial_db_connection_setup(conn) except sqlite3.OperationalError as err: if 'database is locked' not in err: # Not an error we want to catch, so reraise it diff --git a/resources/lib/kodi_db/__init__.py b/resources/lib/kodi_db/__init__.py index 200d83ed..a7946736 100644 --- a/resources/lib/kodi_db/__init__.py +++ b/resources/lib/kodi_db/__init__.py @@ -62,7 +62,7 @@ def setup_kodi_default_entries(): def reset_cached_images(): LOG.info('Resetting cached artwork') LOG.debug('Resetting the Kodi texture DB') - with KodiTextureDB(wal_mode=False) as kodidb: + with KodiTextureDB() as kodidb: kodidb.wipe() LOG.debug('Deleting all cached image files') path = path_ops.translate_path('special://thumbnails/') @@ -91,11 +91,11 @@ def wipe_dbs(music=True): """ LOG.warn('Wiping Kodi databases!') LOG.info('Wiping Kodi video database') - with KodiVideoDB(wal_mode=False) as kodidb: + with KodiVideoDB() as kodidb: kodidb.wipe() if music: LOG.info('Wiping Kodi music database') - with KodiMusicDB(wal_mode=False) as kodidb: + with KodiMusicDB() as kodidb: kodidb.wipe() reset_cached_images() setup_kodi_default_entries() diff --git a/resources/lib/kodi_db/common.py b/resources/lib/kodi_db/common.py index 2a331290..e362ca6c 100644 --- a/resources/lib/kodi_db/common.py +++ b/resources/lib/kodi_db/common.py @@ -15,11 +15,9 @@ class KodiDBBase(object): Kodi database methods used for all types of items """ def __init__(self, texture_db=False, kodiconn=None, artconn=None, - lock=True, wal_mode=True): + lock=True): """ Allows direct use with a cursor instead of context mgr - Pass wal_mode=False if you want the standard sqlite journal_mode, e.g. - when wiping entire tables """ self._texture_db = texture_db self.lock = lock @@ -27,14 +25,13 @@ class KodiDBBase(object): self.cursor = self.kodiconn.cursor() if self.kodiconn else None self.artconn = artconn self.artcursor = self.artconn.cursor() if self.artconn else None - self.wal_mode = wal_mode def __enter__(self): if self.lock: KODIDB_LOCK.acquire() - self.kodiconn = db.connect(self.db_kind, self.wal_mode) + self.kodiconn = db.connect(self.db_kind) self.cursor = self.kodiconn.cursor() - self.artconn = db.connect('texture', self.wal_mode) if self._texture_db \ + self.artconn = db.connect('texture') if self._texture_db \ else None self.artcursor = self.artconn.cursor() if self._texture_db else None return self From 70b7a4451493aef47127b5286e63c24304faeb65 Mon Sep 17 00:00:00 2001 From: croneter Date: Thu, 12 Dec 2019 17:29:46 +0100 Subject: [PATCH 02/12] Avoid duplicate code --- resources/lib/library_sync/common.py | 15 +++++++++ .../lib/library_sync/fill_metadata_queue.py | 31 +++++++------------ resources/lib/library_sync/full_sync.py | 14 +-------- resources/lib/library_sync/get_metadata.py | 10 ------ .../lib/library_sync/process_metadata.py | 12 ------- 5 files changed, 27 insertions(+), 55 deletions(-) diff --git a/resources/lib/library_sync/common.py b/resources/lib/library_sync/common.py index d6aae66f..a0f43285 100644 --- a/resources/lib/library_sync/common.py +++ b/resources/lib/library_sync/common.py @@ -1,10 +1,13 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals +from logging import getLogger import xbmc from .. import utils, app, variables as v +LOG = getLogger('PLEX.sync') + PLAYLIST_SYNC_ENABLED = (v.DEVICE != 'Microsoft UWP' and utils.settings('enablePlaylistSync') == 'true') @@ -22,6 +25,18 @@ class LibrarySyncMixin(object): """ return self.should_cancel() + def run(self): + app.APP.register_thread(self) + LOG.debug('##===--- Starting %s ---===##', self.__class__.__name__) + try: + self._run() + except Exception as err: + LOG.error('Exception encountered: %s', err) + utils.ERROR(notify=True) + finally: + app.APP.deregister_thread(self) + LOG.debug('##===--- %s Stopped ---===##', self.__class__.__name__) + def update_kodi_library(video=True, music=True): """ diff --git a/resources/lib/library_sync/fill_metadata_queue.py b/resources/lib/library_sync/fill_metadata_queue.py index 7ed361b0..ceb05f06 100644 --- a/resources/lib/library_sync/fill_metadata_queue.py +++ b/resources/lib/library_sync/fill_metadata_queue.py @@ -4,13 +4,13 @@ from logging import getLogger from . import common from ..plex_db import PlexDB -from .. import backgroundthread, app +from .. import backgroundthread LOG = getLogger('PLEX.sync.fill_metadata_queue') class FillMetadataQueue(common.LibrarySyncMixin, - backgroundthread.KillableThread, ): + backgroundthread.KillableThread): """ Threaded download of Plex XML metadata for a certain library item. Fills the queue with the downloaded etree XML objects. Will use a COPIED @@ -47,21 +47,12 @@ class FillMetadataQueue(common.LibrarySyncMixin, # Ensures that our queues finish section.number_of_items = count - def run(self): - LOG.debug('Starting %s thread', self.__class__.__name__) - app.APP.register_thread(self) - try: - while not self.should_cancel(): - section = self.section_queue.get() - self.section_queue.task_done() - if section is None: - break - self._process_section(section) - except Exception: - from .. import utils - utils.ERROR(notify=True) - finally: - # Signal the download metadata threads to stop with a sentinel - self.get_metadata_queue.put(None) - app.APP.deregister_thread(self) - LOG.debug('##===---- %s Stopped ----===##', self.__class__.__name__) + def _run(self): + while not self.should_cancel(): + section = self.section_queue.get() + self.section_queue.task_done() + if section is None: + break + self._process_section(section) + # Signal the download metadata threads to stop with a sentinel + self.get_metadata_queue.put(None) diff --git a/resources/lib/library_sync/full_sync.py b/resources/lib/library_sync/full_sync.py index 0312ec21..692f620c 100644 --- a/resources/lib/library_sync/full_sync.py +++ b/resources/lib/library_sync/full_sync.py @@ -272,20 +272,8 @@ class FullSync(common.LibrarySyncMixin, backgroundthread.KillableThread): break LOG.debug('Done looking for items to delete') - def run(self): - app.APP.register_thread(self) - LOG.info('Running library sync with repair=%s', self.repair) - try: - self.run_full_library_sync() - except Exception: - utils.ERROR(notify=True) - self.successful = False - finally: - app.APP.deregister_thread(self) - LOG.info('Library sync done. successful: %s', self.successful) - @utils.log_time - def run_full_library_sync(self): + def _run(self): try: # Get latest Plex libraries and build playlist and video node files if self.should_cancel() or not sections.sync_from_pms(self): diff --git a/resources/lib/library_sync/get_metadata.py b/resources/lib/library_sync/get_metadata.py index 972e222c..e21d2c27 100644 --- a/resources/lib/library_sync/get_metadata.py +++ b/resources/lib/library_sync/get_metadata.py @@ -5,7 +5,6 @@ from logging import getLogger from . import common from ..plex_api import API from .. import backgroundthread, plex_functions as PF, utils, variables as v -from .. import app LOG = getLogger('PLEX.sync.get_metadata') LOCK = backgroundthread.threading.Lock() @@ -69,15 +68,6 @@ class GetMetadataThread(common.LibrarySyncMixin, # Add a "dummy" item so we're not skipping a beat self.processing_queue.put((count, {'section': section, 'xml': None})) - def run(self): - LOG.debug('Starting %s thread', self.__class__.__name__) - app.APP.register_thread(self) - try: - self._run() - finally: - app.APP.deregister_thread(self) - LOG.debug('##===---- %s Stopped ----===##', self.__class__.__name__) - def _run(self): while True: item = self.get_metadata_queue.get() diff --git a/resources/lib/library_sync/process_metadata.py b/resources/lib/library_sync/process_metadata.py index abc70fdd..cddcd20f 100644 --- a/resources/lib/library_sync/process_metadata.py +++ b/resources/lib/library_sync/process_metadata.py @@ -57,18 +57,6 @@ class ProcessMetadataThread(common.LibrarySyncMixin, self.processing_queue.task_done() return item - def run(self): - LOG.debug('Starting %s thread', self.__class__.__name__) - app.APP.register_thread(self) - try: - self._run() - except Exception: - from .. import utils - utils.ERROR(notify=True) - finally: - app.APP.deregister_thread(self) - LOG.debug('##===---- %s Stopped ----===##', self.__class__.__name__) - def _run(self): # There are 2 sentinels: None for aborting/ending this thread, the dict # {'section': section, 'xml': None} for skipped/invalid items From 6d39adbd8cdf5ebc443b5c332aaed43bffeceff7 Mon Sep 17 00:00:00 2001 From: croneter Date: Fri, 13 Dec 2019 07:45:38 +0100 Subject: [PATCH 03/12] Use sqlite isolation_level=None in order to use autocommit mode and thus avoid sqlite auto-committing --- resources/lib/db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/resources/lib/db.py b/resources/lib/db.py index 2c701040..ca998941 100644 --- a/resources/lib/db.py +++ b/resources/lib/db.py @@ -78,7 +78,7 @@ def connect(media_type=None): db_path = v.DB_TEXTURE_PATH else: db_path = v.DB_VIDEO_PATH - conn = sqlite3.connect(db_path, timeout=30.0) + conn = sqlite3.connect(db_path, timeout=30.0, isolation_level=None) attempts = DB_WRITE_ATTEMPTS while True: try: From b4e132af85db5c1693e5abfdb44648b5b1129a3b Mon Sep 17 00:00:00 2001 From: croneter Date: Fri, 13 Dec 2019 12:42:22 +0100 Subject: [PATCH 04/12] Optimize code --- resources/lib/kodi_db/video.py | 47 ++++++++++++---------------------- 1 file changed, 16 insertions(+), 31 deletions(-) diff --git a/resources/lib/kodi_db/video.py b/resources/lib/kodi_db/video.py index 415ebcb7..2466e5e3 100644 --- a/resources/lib/kodi_db/video.py +++ b/resources/lib/kodi_db/video.py @@ -38,37 +38,22 @@ class KodiVideoDB(common.KodiDBBase): For some reason, Kodi ignores this if done via itemtypes while e.g. adding or updating items. (addPath method does NOT work) """ - path_id = self.get_path(MOVIE_PATH) - if path_id is None: - query = ''' - INSERT INTO path(strPath, - strContent, - strScraper, - noUpdate, - exclude) - VALUES (?, ?, ?, ?, ?) - ''' - self.cursor.execute(query, (MOVIE_PATH, - 'movies', - 'metadata.local', - 1, - 0)) - # And TV shows - path_id = self.get_path(SHOW_PATH) - if path_id is None: - query = ''' - INSERT INTO path(strPath, - strContent, - strScraper, - noUpdate, - exclude) - VALUES (?, ?, ?, ?, ?) - ''' - self.cursor.execute(query, (SHOW_PATH, - 'tvshows', - 'metadata.local', - 1, - 0)) + for path, kind in ((MOVIE_PATH, 'movies'), (SHOW_PATH, 'tvshows')): + path_id = self.get_path(path) + if path_id is None: + query = ''' + INSERT INTO path(strPath, + strContent, + strScraper, + noUpdate, + exclude) + VALUES (?, ?, ?, ?, ?) + ''' + self.cursor.execute(query, (path, + kind, + 'metadata.local', + 1, + 0)) @db.catch_operationalerrors def parent_path_id(self, path): From a715b3a473007de42bb97f6db81b390537110348 Mon Sep 17 00:00:00 2001 From: croneter Date: Fri, 13 Dec 2019 12:46:26 +0100 Subject: [PATCH 05/12] raise exception instead of returning None if PKC needs to exit and we're trying to connect to a DB --- resources/lib/db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/resources/lib/db.py b/resources/lib/db.py index ca998941..53db56c7 100644 --- a/resources/lib/db.py +++ b/resources/lib/db.py @@ -93,7 +93,7 @@ def connect(media_type=None): raise LockedDatabase('Database is locked') if app.APP.monitor.waitForAbort(0.05): # PKC needs to quit - return + raise LockedDatabase('Database was locked and we need to exit') else: break return conn From 0d537f108efaae325c4f8904b3f971376da13117 Mon Sep 17 00:00:00 2001 From: croneter Date: Fri, 13 Dec 2019 12:47:56 +0100 Subject: [PATCH 06/12] Lower timeout for a DB connection from 30s to 10s --- resources/lib/db.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/resources/lib/db.py b/resources/lib/db.py index 53db56c7..28ffc7c7 100644 --- a/resources/lib/db.py +++ b/resources/lib/db.py @@ -6,6 +6,7 @@ from functools import wraps from . import variables as v, app DB_WRITE_ATTEMPTS = 100 +DB_CONNECTION_TIMEOUT = 10 class LockedDatabase(Exception): @@ -78,7 +79,9 @@ def connect(media_type=None): db_path = v.DB_TEXTURE_PATH else: db_path = v.DB_VIDEO_PATH - conn = sqlite3.connect(db_path, timeout=30.0, isolation_level=None) + conn = sqlite3.connect(db_path, + timeout=DB_CONNECTION_TIMEOUT, + isolation_level=None) attempts = DB_WRITE_ATTEMPTS while True: try: From 654748218e49eb1f37e908883f6c925f69b73be9 Mon Sep 17 00:00:00 2001 From: croneter Date: Fri, 13 Dec 2019 13:00:35 +0100 Subject: [PATCH 07/12] Get section iterators in a dedicated thread to gain speed --- resources/lib/library_sync/full_sync.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/resources/lib/library_sync/full_sync.py b/resources/lib/library_sync/full_sync.py index 692f620c..4e2571d7 100644 --- a/resources/lib/library_sync/full_sync.py +++ b/resources/lib/library_sync/full_sync.py @@ -147,11 +147,11 @@ class FullSync(common.LibrarySyncMixin, backgroundthread.KillableThread): LOG.error('Could not entirely process section %s', section) self.successful = False - def get_generators(self, kinds, queue, all_items): + def threaded_get_generators(self, kinds, queue, all_items): """ - Getting iterators is costly, so let's do it asynchronously + Getting iterators is costly, so let's do it in a dedicated thread """ - LOG.debug('Start get_generators') + LOG.debug('Start threaded_get_generators') try: for kind in kinds: for section in (x for x in app.SYNC.sections @@ -189,7 +189,7 @@ class FullSync(common.LibrarySyncMixin, backgroundthread.KillableThread): utils.ERROR(notify=True) finally: queue.put(None) - LOG.debug('Exiting get_generators') + LOG.debug('Exiting threaded_get_generators') def full_library_sync(self): kinds = [ @@ -205,7 +205,10 @@ class FullSync(common.LibrarySyncMixin, backgroundthread.KillableThread): ]) # ADD NEW ITEMS # We need to enforce syncing e.g. show before season before episode - self.get_generators(kinds, self.section_queue, False) + thread = backgroundthread.KillableThread( + target=self.threaded_get_generators, + args=(kinds, self.section_queue, False)) + thread.start() # Do the heavy lifting self.processing_loop_new_and_changed_items() common.update_kodi_library(video=True, music=True) @@ -237,7 +240,10 @@ class FullSync(common.LibrarySyncMixin, backgroundthread.KillableThread): # Close the progress indicator dialog self.dialog.close() self.dialog = None - self.get_generators(kinds, self.section_queue, True) + thread = backgroundthread.KillableThread( + target=self.threaded_get_generators, + args=(kinds, self.section_queue, True)) + thread.start() self.processing_loop_playstates() if self.should_cancel() or not self.successful: return From 136af95351d26f028a25e58dc3bc4d3efb9206e3 Mon Sep 17 00:00:00 2001 From: croneter Date: Fri, 13 Dec 2019 13:09:57 +0100 Subject: [PATCH 08/12] Speed up and simplify sync of playstates --- resources/lib/library_sync/full_sync.py | 35 +++++++++++-------------- 1 file changed, 15 insertions(+), 20 deletions(-) diff --git a/resources/lib/library_sync/full_sync.py b/resources/lib/library_sync/full_sync.py index 4e2571d7..652b99e3 100644 --- a/resources/lib/library_sync/full_sync.py +++ b/resources/lib/library_sync/full_sync.py @@ -20,6 +20,7 @@ if common.PLAYLIST_SYNC_ENABLED: LOG = getLogger('PLEX.sync.full_sync') # How many items will be put through the processing chain at once? BATCH_SIZE = 250 +PLAYSTATE_BATCH_SIZE = 5000 # Size of queue for xmls to be downloaded from PMS for/and before processing QUEUE_BUFFER = 50 # Max number of xmls held in memory @@ -123,26 +124,20 @@ class FullSync(common.LibrarySyncMixin, backgroundthread.KillableThread): LOG.debug('Processing %s playstates for library section %s', section.number_of_items, section) try: - iterator = section.iterator - iterator = common.tag_last(iterator) - last = True - while not self.should_cancel(): - with section.context(self.current_time) as itemtype: - for last, xml_item in iterator: - section.count += 1 - if not itemtype.update_userdata(xml_item, section.plex_type): - # Somehow did not sync this item yet - itemtype.add_update(xml_item, - section_name=section.name, - section_id=section.section_id) - itemtype.plexdb.update_last_sync(int(xml_item.attrib['ratingKey']), - section.plex_type, - self.current_time) - self.update_progressbar(section, '', section.count) - if section.count % (10 * BATCH_SIZE) == 0: - break - if last: - break + with section.context(self.current_time) as context: + for xml in section.iterator: + section.count += 1 + if not context.update_userdata(xml, section.plex_type): + # Somehow did not sync this item yet + context.add_update(xml, + section_name=section.name, + section_id=section.section_id) + context.plexdb.update_last_sync(int(xml.attrib['ratingKey']), + section.plex_type, + self.current_time) + self.update_progressbar(section, '', section.count) + if section.count % PLAYSTATE_BATCH_SIZE == 0: + context.commit() except RuntimeError: LOG.error('Could not entirely process section %s', section) self.successful = False From b55b22efb0e69b3dc1c0607260a63fae8428fd59 Mon Sep 17 00:00:00 2001 From: croneter Date: Fri, 13 Dec 2019 13:12:16 +0100 Subject: [PATCH 09/12] Clarify variables --- resources/lib/library_sync/full_sync.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/resources/lib/library_sync/full_sync.py b/resources/lib/library_sync/full_sync.py index 652b99e3..82f53f34 100644 --- a/resources/lib/library_sync/full_sync.py +++ b/resources/lib/library_sync/full_sync.py @@ -18,13 +18,13 @@ if common.PLAYLIST_SYNC_ENABLED: LOG = getLogger('PLEX.sync.full_sync') -# How many items will be put through the processing chain at once? -BATCH_SIZE = 250 +DELETION_BATCH_SIZE = 250 PLAYSTATE_BATCH_SIZE = 5000 -# Size of queue for xmls to be downloaded from PMS for/and before processing -QUEUE_BUFFER = 50 + +# Max. number of plex_ids held in memory for later processing +BACKLOG_QUEUE_SIZE = 10000 # Max number of xmls held in memory -MAX_QUEUE_SIZE = 500 +XML_QUEUE_SIZE = 500 # Safety margin to filter PMS items - how many seconds to look into the past? UPDATED_AT_SAFETY = 60 * 5 LAST_VIEWED_AT_SAFETY = 60 * 5 @@ -47,8 +47,8 @@ class FullSync(common.LibrarySyncMixin, backgroundthread.KillableThread): self.dialog = None self.section_queue = Queue.Queue() - self.get_metadata_queue = Queue.Queue(maxsize=5000) - self.processing_queue = backgroundthread.ProcessingQueue(maxsize=500) + self.get_metadata_queue = Queue.Queue(maxsize=BACKLOG_QUEUE_SIZE) + self.processing_queue = backgroundthread.ProcessingQueue(maxsize=XML_QUEUE_SIZE) self.current_time = timing.plex_now() self.last_section = sections.Section() @@ -264,12 +264,12 @@ class FullSync(common.LibrarySyncMixin, backgroundthread.KillableThread): plex_ids = list( ctx.plexdb.plex_id_by_last_sync(plex_type, self.current_time, - BATCH_SIZE)) + DELETION_BATCH_SIZE)) for plex_id in plex_ids: if self.should_cancel(): return ctx.remove(plex_id, plex_type) - if len(plex_ids) < BATCH_SIZE: + if len(plex_ids) < DELETION_BATCH_SIZE: break LOG.debug('Done looking for items to delete') From 6510d5e399dfe9f3c503a373e6c4a78726b54f1f Mon Sep 17 00:00:00 2001 From: croneter Date: Fri, 13 Dec 2019 13:19:15 +0100 Subject: [PATCH 10/12] Fix display of item numbers during playstate sync --- resources/lib/library_sync/full_sync.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/resources/lib/library_sync/full_sync.py b/resources/lib/library_sync/full_sync.py index 82f53f34..d885d8d2 100644 --- a/resources/lib/library_sync/full_sync.py +++ b/resources/lib/library_sync/full_sync.py @@ -135,7 +135,7 @@ class FullSync(common.LibrarySyncMixin, backgroundthread.KillableThread): context.plexdb.update_last_sync(int(xml.attrib['ratingKey']), section.plex_type, self.current_time) - self.update_progressbar(section, '', section.count) + self.update_progressbar(section, '', section.count - 1) if section.count % PLAYSTATE_BATCH_SIZE == 0: context.commit() except RuntimeError: From 58a86d34f1781df4baca3fa5c11b8d3c5b8d9058 Mon Sep 17 00:00:00 2001 From: croneter Date: Fri, 13 Dec 2019 13:45:34 +0100 Subject: [PATCH 11/12] Clarify class description --- resources/lib/library_sync/fill_metadata_queue.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/resources/lib/library_sync/fill_metadata_queue.py b/resources/lib/library_sync/fill_metadata_queue.py index ceb05f06..64665611 100644 --- a/resources/lib/library_sync/fill_metadata_queue.py +++ b/resources/lib/library_sync/fill_metadata_queue.py @@ -12,10 +12,9 @@ LOG = getLogger('PLEX.sync.fill_metadata_queue') class FillMetadataQueue(common.LibrarySyncMixin, backgroundthread.KillableThread): """ - Threaded download of Plex XML metadata for a certain library item. - Fills the queue with the downloaded etree XML objects. Will use a COPIED - plex.db file (plex-copy.db) in order to read much faster without the - writing thread stalling + Determines which plex_ids we need to sync and puts these ids in a separate + queue. Will use a COPIED plex.db file (plex-copy.db) in order to read much + faster without the writing thread stalling """ def __init__(self, repair, section_queue, get_metadata_queue): self.repair = repair From b611a66ff51b5fef33d963a6c2c7e41a813c7e7f Mon Sep 17 00:00:00 2001 From: croneter Date: Fri, 13 Dec 2019 13:55:56 +0100 Subject: [PATCH 12/12] Fix sync getting stuck --- resources/lib/library_sync/fill_metadata_queue.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/resources/lib/library_sync/fill_metadata_queue.py b/resources/lib/library_sync/fill_metadata_queue.py index 64665611..ca6c2d4e 100644 --- a/resources/lib/library_sync/fill_metadata_queue.py +++ b/resources/lib/library_sync/fill_metadata_queue.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals from logging import getLogger +from Queue import Empty from . import common from ..plex_db import PlexDB @@ -8,6 +9,8 @@ from .. import backgroundthread LOG = getLogger('PLEX.sync.fill_metadata_queue') +QUEUE_TIMEOUT = 10 # seconds + class FillMetadataQueue(common.LibrarySyncMixin, backgroundthread.KillableThread): @@ -40,7 +43,14 @@ class FillMetadataQueue(common.LibrarySyncMixin, if (not self.repair and plexdb.checksum(plex_id, section.plex_type) == checksum): continue - self.get_metadata_queue.put((count, plex_id, section)) + try: + self.get_metadata_queue.put((count, plex_id, section), + timeout=QUEUE_TIMEOUT) + except Empty: + LOG.error('Putting %s in get_metadata_queue timed out - ' + 'aborting sync now', plex_id) + section.sync_successful = False + break count += 1 # We might have received LESS items from the PMS than anticipated. # Ensures that our queues finish