diff --git a/addon.xml b/addon.xml index 83542d49..2073e524 100644 --- a/addon.xml +++ b/addon.xml @@ -1,5 +1,5 @@ - + @@ -88,7 +88,20 @@ Plex를 Kodi에 기본 통합 Kodi를 Plex Media Server에 연결합니다. 이 플러그인은 Plex로 모든 비디오를 관리하고 Kodi로는 관리하지 않는다고 가정합니다. Kodi 비디오 및 음악 데이터베이스에 이미 저장된 데이터가 손실 될 수 있습니다 (이 플러그인이 직접 변경하므로). 자신의 책임하에 사용하십시오! 자신의 책임하에 사용 - version 2.13.0: + version 2.14.0: +- Fix PlexKodiConnect changing or removing subtitles for every video on the PMS +- version 2.13.1-2.13.2 for everyone + +version 2.13.2 (beta only): +- Fix a racing condition that could lead to the sync getting stuck +- Fix RecursionError: maximum recursion depth exceeded +- Websocket Fix AttributeError: 'NoneType' object has no attribute 'is_ssl' + +version 2.13.1 (beta only): +- Fix a racing condition that could lead to the sync process getting stuck +- Fix likelyhood of `database is locked` error occuring + +version 2.13.0: - Support for the Plex HAMA agent to let Kodi identify animes (using Kodi's uniqueID 'anidb') - Support forced HAMA IDs when using tvdb uniqueID - version 2.12.26 for everyone diff --git a/changelog.txt b/changelog.txt index e14ad9e7..05a3a244 100644 --- a/changelog.txt +++ b/changelog.txt @@ -1,3 +1,16 @@ +version 2.14.0: +- Fix PlexKodiConnect changing or removing subtitles for every video on the PMS +- version 2.13.1-2.13.2 for everyone + +version 2.13.2 (beta only): +- Fix a racing condition that could lead to the sync getting stuck +- Fix RecursionError: maximum recursion depth exceeded +- Websocket Fix AttributeError: 'NoneType' object has no attribute 'is_ssl' + +version 2.13.1 (beta only): +- Fix a racing condition that could lead to the sync process getting stuck +- Fix likelyhood of `database is locked` error occuring + version 2.13.0: - Support for the Plex HAMA agent to let Kodi identify animes (using Kodi's uniqueID 'anidb') - Support forced HAMA IDs when using tvdb uniqueID diff --git a/resources/lib/backgroundthread.py b/resources/lib/backgroundthread.py index 5c095720..af360d9b 100644 --- a/resources/lib/backgroundthread.py +++ b/resources/lib/backgroundthread.py @@ -135,43 +135,6 @@ class ProcessingQueue(Queue.Queue, object): def _qsize(self): return self._current_queue._qsize() if self._current_queue else 0 - def _total_qsize(self): - """ - This method is BROKEN as it can lead to a deadlock when a single item - from the current section takes longer to download then any new items - coming in - """ - return sum(q._qsize() for q in self._queues) if self._queues else 0 - - def put(self, item, block=True, timeout=None): - """ - PKC customization of Queue.put. item needs to be the tuple - (count [int], {'section': [Section], 'xml': [etree xml]}) - """ - self.not_full.acquire() - try: - if self.maxsize > 0: - if not block: - if self._qsize() == self.maxsize: - raise Queue.Full - elif timeout is None: - while self._qsize() == self.maxsize: - self.not_full.wait() - elif timeout < 0: - raise ValueError("'timeout' must be a non-negative number") - else: - endtime = _time() + timeout - while self._qsize() == self.maxsize: - remaining = endtime - _time() - if remaining <= 0.0: - raise Queue.Full - self.not_full.wait(remaining) - self._put(item) - self.unfinished_tasks += 1 - self.not_empty.notify() - finally: - self.not_full.release() - def _put(self, item): for i, section in enumerate(self._sections): if item[1]['section'] == section: @@ -188,16 +151,13 @@ class ProcessingQueue(Queue.Queue, object): Once the get()-method returns None, you've received the sentinel and you've thus exhausted the queue """ - self.not_full.acquire() - try: + with self.not_full: section.number_of_items = 1 self._add_section(section) # Add the actual sentinel to the queue we just added self._queues[-1]._put((None, None)) self.unfinished_tasks += 1 self.not_empty.notify() - finally: - self.not_full.release() def add_section(self, section): """ @@ -207,11 +167,26 @@ class ProcessingQueue(Queue.Queue, object): Be sure to set section.number_of_items correctly as it will signal when processing is completely done for a specific section! """ - self.mutex.acquire() - try: + with self.mutex: self._add_section(section) - finally: - self.mutex.release() + + def change_section_number_of_items(self, section, number_of_items): + """ + Hit this method if you've reset section.number_of_items to make + sure we're not blocking + """ + with self.mutex: + self._change_section_number_of_items(section, number_of_items) + + def _change_section_number_of_items(self, section, number_of_items): + section.number_of_items = number_of_items + if (self._current_section == section + and self._counter == number_of_items): + # We were actually waiting for more items to come in - but there + # aren't any! + self._init_next_section() + if self._qsize() > 0: + self.not_empty.notify() def _add_section(self, section): self._sections.append(section) diff --git a/resources/lib/db.py b/resources/lib/db.py index 28ffc7c7..d6b1bfaf 100644 --- a/resources/lib/db.py +++ b/resources/lib/db.py @@ -6,6 +6,7 @@ from functools import wraps from . import variables as v, app DB_WRITE_ATTEMPTS = 100 +DB_WRITE_ATTEMPTS_TIMEOUT = 1 # in seconds DB_CONNECTION_TIMEOUT = 10 @@ -43,7 +44,7 @@ def catch_operationalerrors(method): self.kodiconn.commit() if self.artconn: self.artconn.commit() - if app.APP.monitor.waitForAbort(0.1): + if app.APP.monitor.waitForAbort(DB_WRITE_ATTEMPTS_TIMEOUT): # PKC needs to quit return # Start new transactions diff --git a/resources/lib/library_sync/fill_metadata_queue.py b/resources/lib/library_sync/fill_metadata_queue.py index 6e7f717d..b3bb5102 100644 --- a/resources/lib/library_sync/fill_metadata_queue.py +++ b/resources/lib/library_sync/fill_metadata_queue.py @@ -46,6 +46,10 @@ class FillMetadataQueue(common.LibrarySyncMixin, if (not self.repair and plexdb.checksum(plex_id, section.plex_type) == checksum): continue + if not do_process_section: + do_process_section = True + self.processing_queue.add_section(section) + LOG.debug('Put section in processing queue: %s', section) try: self.get_metadata_queue.put((count, plex_id, section), timeout=QUEUE_TIMEOUT) @@ -54,16 +58,14 @@ class FillMetadataQueue(common.LibrarySyncMixin, 'aborting sync now', plex_id) section.sync_successful = False break - count += 1 - if not do_process_section: - do_process_section = True - self.processing_queue.add_section(section) - LOG.debug('Put section in queue with %s items: %s', - section.number_of_items, section) + else: + count += 1 # We might have received LESS items from the PMS than anticipated. # Ensures that our queues finish - LOG.debug('%s items to process for section %s', count, section) - section.number_of_items = count + self.processing_queue.change_section_number_of_items(section, + count) + LOG.debug('%s items to process for section %s', + section.number_of_items, section) def _run(self): while not self.should_cancel(): diff --git a/resources/lib/library_sync/sections.py b/resources/lib/library_sync/sections.py index 481beae5..391aeb31 100644 --- a/resources/lib/library_sync/sections.py +++ b/resources/lib/library_sync/sections.py @@ -93,6 +93,7 @@ class Section(object): "'name': '{self.name}', " "'section_id': {self.section_id}, " "'section_type': '{self.section_type}', " + "'plex_type': '{self.plex_type}', " "'sync_to_kodi': {self.sync_to_kodi}, " "'last_sync': {self.last_sync}" "}}").format(self=self).encode('utf-8') @@ -108,6 +109,8 @@ class Section(object): Sections compare equal if their section_id, name and plex_type (first prio) OR section_type (if there is no plex_type is set) compare equal """ + if not isinstance(section, Section): + return False return (self.section_id == section.section_id and self.name == section.name and (self.plex_type == section.plex_type if self.plex_type else diff --git a/resources/lib/playback_decision.py b/resources/lib/playback_decision.py index 33d606f0..a45eae14 100644 --- a/resources/lib/playback_decision.py +++ b/resources/lib/playback_decision.py @@ -342,8 +342,7 @@ def audio_subtitle_prefs(api, item): if item.playmethod != v.PLAYBACK_METHOD_TRANSCODE: LOG.debug('Telling PMS we are not burning in any subtitles') args = { - 'subtitleStreamID': 0, - 'allParts': 1 + 'subtitleStreamID': 0 } DU().downloadUrl('{server}/library/parts/%s' % part_id, action_type='PUT', @@ -458,8 +457,7 @@ def setup_transcoding_audio_subtitle_prefs(mediastreams, part_id): select_subs_index = subtitle_streams_list[resp - 1] # Now prep the PMS for our choice args = { - 'subtitleStreamID': select_subs_index, - 'allParts': 1 + 'subtitleStreamID': select_subs_index } DU().downloadUrl('{server}/library/parts/%s' % part_id, action_type='PUT', diff --git a/resources/lib/timing.py b/resources/lib/timing.py index 5f0f1909..885502fa 100644 --- a/resources/lib/timing.py +++ b/resources/lib/timing.py @@ -34,10 +34,10 @@ def unix_date_to_kodi(unix_kodi_time): """ try: return strftime('%Y-%m-%d %H:%M:%S', localtime(float(unix_kodi_time))) - except Exception: - LOG.exception('Received an illegal timestamp from Plex: %s. ' - 'Using 1970-01-01 12:00:00', - unix_kodi_time) + except: + LOG.error('Received an illegal timestamp from Plex: %s. ' + 'Using 1970-01-01 12:00:00', + unix_kodi_time) return '1970-01-01 12:00:00' diff --git a/resources/lib/websocket/_core.py b/resources/lib/websocket/_core.py index 1ff80f05..7934cc15 100644 --- a/resources/lib/websocket/_core.py +++ b/resources/lib/websocket/_core.py @@ -197,7 +197,10 @@ class WebSocket(object): return None def is_ssl(self): - return isinstance(self.sock, ssl.SSLSocket) + try: + return isinstance(self.sock, ssl.SSLSocket) + except: + return False headers = property(getheaders)