2019-11-24 19:33:16 +11:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
from __future__ import absolute_import, division, unicode_literals
|
2020-12-19 03:10:20 +11:00
|
|
|
from future import standard_library
|
|
|
|
standard_library.install_aliases()
|
2019-11-24 19:33:16 +11:00
|
|
|
from logging import getLogger
|
2020-12-19 03:10:20 +11:00
|
|
|
from queue import Full
|
2019-11-24 19:33:16 +11:00
|
|
|
|
2020-02-17 17:26:28 +11:00
|
|
|
from . import common, sections
|
2019-11-24 19:33:16 +11:00
|
|
|
from ..plex_db import PlexDB
|
2019-12-13 03:29:46 +11:00
|
|
|
from .. import backgroundthread
|
2019-11-24 19:33:16 +11:00
|
|
|
|
|
|
|
LOG = getLogger('PLEX.sync.fill_metadata_queue')
|
|
|
|
|
2020-03-22 00:22:43 +11:00
|
|
|
QUEUE_TIMEOUT = 60 # seconds
|
2019-12-13 23:55:56 +11:00
|
|
|
|
2019-11-24 19:33:16 +11:00
|
|
|
|
|
|
|
class FillMetadataQueue(common.LibrarySyncMixin,
|
2019-12-13 03:29:46 +11:00
|
|
|
backgroundthread.KillableThread):
|
2019-11-24 19:33:16 +11:00
|
|
|
"""
|
2019-12-13 23:45:34 +11:00
|
|
|
Determines which plex_ids we need to sync and puts these ids in a separate
|
|
|
|
queue. Will use a COPIED plex.db file (plex-copy.db) in order to read much
|
|
|
|
faster without the writing thread stalling
|
2019-11-24 19:33:16 +11:00
|
|
|
"""
|
2020-02-17 17:26:28 +11:00
|
|
|
def __init__(self, repair, section_queue, get_metadata_queue,
|
|
|
|
processing_queue):
|
2019-11-24 19:33:16 +11:00
|
|
|
self.repair = repair
|
|
|
|
self.section_queue = section_queue
|
|
|
|
self.get_metadata_queue = get_metadata_queue
|
2020-02-17 17:26:28 +11:00
|
|
|
self.processing_queue = processing_queue
|
2019-11-24 19:33:16 +11:00
|
|
|
super(FillMetadataQueue, self).__init__()
|
|
|
|
|
|
|
|
def _process_section(self, section):
|
|
|
|
# Initialize only once to avoid loosing the last value before we're
|
|
|
|
# breaking the for loop
|
2019-12-06 18:54:21 +11:00
|
|
|
LOG.debug('Process section %s with %s items',
|
|
|
|
section, section.number_of_items)
|
|
|
|
count = 0
|
2020-02-17 17:26:28 +11:00
|
|
|
do_process_section = False
|
2019-12-06 18:54:21 +11:00
|
|
|
with PlexDB(lock=False, copy=True) as plexdb:
|
|
|
|
for xml in section.iterator:
|
|
|
|
if self.should_cancel():
|
|
|
|
break
|
2019-11-24 19:33:16 +11:00
|
|
|
plex_id = int(xml.get('ratingKey'))
|
|
|
|
checksum = int('{}{}'.format(
|
|
|
|
plex_id,
|
|
|
|
xml.get('updatedAt',
|
2020-09-19 22:40:00 +10:00
|
|
|
xml.get('addedAt', '1541572987')).replace('-', '')))
|
2019-12-06 18:54:21 +11:00
|
|
|
if (not self.repair and
|
|
|
|
plexdb.checksum(plex_id, section.plex_type) == checksum):
|
|
|
|
continue
|
2019-12-13 23:55:56 +11:00
|
|
|
try:
|
|
|
|
self.get_metadata_queue.put((count, plex_id, section),
|
|
|
|
timeout=QUEUE_TIMEOUT)
|
2020-03-22 00:22:43 +11:00
|
|
|
except Full:
|
2019-12-13 23:55:56 +11:00
|
|
|
LOG.error('Putting %s in get_metadata_queue timed out - '
|
|
|
|
'aborting sync now', plex_id)
|
|
|
|
section.sync_successful = False
|
|
|
|
break
|
2019-12-06 18:54:21 +11:00
|
|
|
count += 1
|
2020-02-17 17:26:28 +11:00
|
|
|
if not do_process_section:
|
|
|
|
do_process_section = True
|
|
|
|
self.processing_queue.add_section(section)
|
|
|
|
LOG.debug('Put section in queue with %s items: %s',
|
|
|
|
section.number_of_items, section)
|
2019-12-06 18:54:21 +11:00
|
|
|
# We might have received LESS items from the PMS than anticipated.
|
|
|
|
# Ensures that our queues finish
|
2020-02-17 17:26:28 +11:00
|
|
|
LOG.debug('%s items to process for section %s', count, section)
|
2019-12-06 18:54:21 +11:00
|
|
|
section.number_of_items = count
|
2019-11-24 19:33:16 +11:00
|
|
|
|
2019-12-13 03:29:46 +11:00
|
|
|
def _run(self):
|
|
|
|
while not self.should_cancel():
|
|
|
|
section = self.section_queue.get()
|
|
|
|
self.section_queue.task_done()
|
|
|
|
if section is None:
|
|
|
|
break
|
|
|
|
self._process_section(section)
|
|
|
|
# Signal the download metadata threads to stop with a sentinel
|
|
|
|
self.get_metadata_queue.put(None)
|
2020-02-17 17:26:28 +11:00
|
|
|
# Sentinel for the process_thread once we added everything else
|
|
|
|
self.processing_queue.add_sentinel(sections.Section())
|