2018-10-20 23:49:04 +11:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
from __future__ import absolute_import, division, unicode_literals
|
|
|
|
from logging import getLogger
|
2018-12-01 21:14:12 +11:00
|
|
|
import Queue
|
|
|
|
|
2018-12-09 23:02:08 +11:00
|
|
|
import xbmcgui
|
|
|
|
|
2019-11-24 19:33:16 +11:00
|
|
|
from .get_metadata import GetMetadataThread
|
|
|
|
from .fill_metadata_queue import FillMetadataQueue
|
|
|
|
from .process_metadata import ProcessMetadataThread
|
2018-11-09 21:19:32 +11:00
|
|
|
from . import common, sections
|
2018-11-19 00:59:17 +11:00
|
|
|
from .. import utils, timing, backgroundthread, variables as v, app
|
2019-12-06 18:54:21 +11:00
|
|
|
from .. import plex_functions as PF, itemtypes, path_ops
|
2018-10-20 23:49:04 +11:00
|
|
|
|
2019-02-03 01:49:21 +11:00
|
|
|
if common.PLAYLIST_SYNC_ENABLED:
|
2018-10-23 22:54:09 +11:00
|
|
|
from .. import playlists
|
2019-02-03 01:49:21 +11:00
|
|
|
|
2018-10-20 23:49:04 +11:00
|
|
|
|
2018-11-02 01:43:43 +11:00
|
|
|
LOG = getLogger('PLEX.sync.full_sync')
|
2018-12-09 23:02:08 +11:00
|
|
|
# How many items will be put through the processing chain at once?
|
2019-11-24 19:33:16 +11:00
|
|
|
BATCH_SIZE = 250
|
|
|
|
# Size of queue for xmls to be downloaded from PMS for/and before processing
|
|
|
|
QUEUE_BUFFER = 50
|
|
|
|
# Max number of xmls held in memory
|
|
|
|
MAX_QUEUE_SIZE = 500
|
2018-12-09 23:02:08 +11:00
|
|
|
# Safety margin to filter PMS items - how many seconds to look into the past?
|
|
|
|
UPDATED_AT_SAFETY = 60 * 5
|
|
|
|
LAST_VIEWED_AT_SAFETY = 60 * 5
|
|
|
|
|
|
|
|
|
2019-11-24 19:33:16 +11:00
|
|
|
class FullSync(common.LibrarySyncMixin, backgroundthread.KillableThread):
|
2018-10-24 16:08:32 +11:00
|
|
|
def __init__(self, repair, callback, show_dialog):
|
2018-10-20 23:49:04 +11:00
|
|
|
"""
|
|
|
|
repair=True: force sync EVERY item
|
|
|
|
"""
|
|
|
|
self.repair = repair
|
|
|
|
self.callback = callback
|
2018-12-09 23:02:08 +11:00
|
|
|
# For progress dialog
|
|
|
|
self.show_dialog = show_dialog
|
|
|
|
self.show_dialog_userdata = utils.settings('playstate_sync_indicator') == 'true'
|
2019-11-24 19:33:16 +11:00
|
|
|
if self.show_dialog:
|
|
|
|
self.dialog = xbmcgui.DialogProgressBG()
|
|
|
|
self.dialog.create(utils.lang(39714))
|
|
|
|
else:
|
|
|
|
self.dialog = None
|
2018-10-20 23:49:04 +11:00
|
|
|
|
2019-11-24 19:33:16 +11:00
|
|
|
self.section_queue = Queue.Queue()
|
|
|
|
self.get_metadata_queue = Queue.Queue(maxsize=5000)
|
|
|
|
self.processing_queue = backgroundthread.ProcessingQueue(maxsize=500)
|
|
|
|
self.current_time = timing.plex_now()
|
|
|
|
self.last_section = sections.Section()
|
2019-11-29 03:49:48 +11:00
|
|
|
|
2019-11-24 19:33:16 +11:00
|
|
|
self.successful = True
|
|
|
|
self.install_sync_done = utils.settings('SyncInstallRunDone') == 'true'
|
|
|
|
self.threads = [
|
|
|
|
GetMetadataThread(self.get_metadata_queue, self.processing_queue)
|
|
|
|
for _ in range(int(utils.settings('syncThreadNumber')))
|
|
|
|
]
|
|
|
|
for t in self.threads:
|
|
|
|
t.start()
|
|
|
|
super(FullSync, self).__init__()
|
2018-12-09 23:02:08 +11:00
|
|
|
|
2019-11-24 19:33:16 +11:00
|
|
|
def update_progressbar(self, section, title, current):
|
|
|
|
if not self.dialog:
|
2018-12-09 23:02:08 +11:00
|
|
|
return
|
2019-11-24 19:33:16 +11:00
|
|
|
current += 1
|
|
|
|
try:
|
|
|
|
progress = int(float(current) / float(section.number_of_items) * 100.0)
|
|
|
|
except ZeroDivisionError:
|
|
|
|
progress = 0
|
|
|
|
self.dialog.update(progress,
|
|
|
|
'%s (%s)' % (section.name, section.section_type_text),
|
|
|
|
'%s %s/%s'
|
|
|
|
% (title, current, section.number_of_items))
|
|
|
|
if app.APP.is_playing_video:
|
|
|
|
self.dialog.close()
|
|
|
|
self.dialog = None
|
2018-10-22 01:56:13 +11:00
|
|
|
|
2019-12-06 18:54:21 +11:00
|
|
|
@staticmethod
|
|
|
|
def copy_plex_db():
|
|
|
|
"""
|
|
|
|
Takes the current plex.db file and copies it to plex-copy.db
|
|
|
|
This will allow us to have "concurrent" connections during adding/
|
|
|
|
updating items, increasing sync speed tremendously.
|
|
|
|
Using the same DB with e.g. WAL mode did not really work out...
|
|
|
|
"""
|
|
|
|
path_ops.copyfile(v.DB_PLEX_PATH, v.DB_PLEX_COPY_PATH)
|
|
|
|
|
2018-10-21 21:03:21 +11:00
|
|
|
@utils.log_time
|
2019-11-24 19:33:16 +11:00
|
|
|
def processing_loop_new_and_changed_items(self):
|
|
|
|
LOG.debug('Start working')
|
|
|
|
scanner_thread = FillMetadataQueue(self.repair,
|
|
|
|
self.section_queue,
|
|
|
|
self.get_metadata_queue)
|
|
|
|
scanner_thread.start()
|
|
|
|
process_thread = ProcessMetadataThread(self.current_time,
|
|
|
|
self.processing_queue,
|
|
|
|
self.update_progressbar)
|
|
|
|
process_thread.start()
|
|
|
|
LOG.debug('Waiting for scanner thread to finish up')
|
|
|
|
scanner_thread.join()
|
|
|
|
LOG.debug('Waiting for metadata download threads to finish up')
|
|
|
|
for t in self.threads:
|
|
|
|
t.join()
|
|
|
|
LOG.debug('Download metadata threads finished')
|
|
|
|
# Sentinel for the process_thread once we added everything else
|
|
|
|
self.processing_queue.put_sentinel(sections.Section())
|
|
|
|
process_thread.join()
|
|
|
|
self.successful = process_thread.successful
|
|
|
|
LOG.debug('threads finished work. successful: %s', self.successful)
|
2018-12-09 23:02:08 +11:00
|
|
|
|
|
|
|
@utils.log_time
|
2019-11-24 19:33:16 +11:00
|
|
|
def processing_loop_playstates(self):
|
|
|
|
while not self.should_cancel():
|
|
|
|
section = self.section_queue.get()
|
|
|
|
self.section_queue.task_done()
|
|
|
|
if section is None:
|
|
|
|
break
|
|
|
|
self.playstate_per_section(section)
|
|
|
|
|
2018-12-09 23:02:08 +11:00
|
|
|
def playstate_per_section(self, section):
|
2019-02-05 02:15:58 +11:00
|
|
|
LOG.debug('Processing %s playstates for library section %s',
|
2019-11-24 19:33:16 +11:00
|
|
|
section.number_of_items, section)
|
2018-12-01 21:14:12 +11:00
|
|
|
try:
|
2019-01-09 04:00:54 +11:00
|
|
|
iterator = section.iterator
|
2019-11-24 19:33:16 +11:00
|
|
|
iterator = common.tag_last(iterator)
|
2019-01-15 04:22:31 +11:00
|
|
|
last = True
|
2019-11-24 19:33:16 +11:00
|
|
|
while not self.should_cancel():
|
|
|
|
with section.context(self.current_time) as itemtype:
|
|
|
|
for last, xml_item in iterator:
|
|
|
|
section.count += 1
|
2019-01-09 04:00:54 +11:00
|
|
|
if not itemtype.update_userdata(xml_item, section.plex_type):
|
2019-01-18 04:05:02 +11:00
|
|
|
# Somehow did not sync this item yet
|
|
|
|
itemtype.add_update(xml_item,
|
2019-05-29 02:42:04 +10:00
|
|
|
section_name=section.name,
|
|
|
|
section_id=section.section_id)
|
2019-01-15 04:22:31 +11:00
|
|
|
itemtype.plexdb.update_last_sync(int(xml_item.attrib['ratingKey']),
|
2019-01-09 04:00:54 +11:00
|
|
|
section.plex_type,
|
2019-11-24 19:33:16 +11:00
|
|
|
self.current_time)
|
|
|
|
self.update_progressbar(section, '', section.count)
|
|
|
|
if section.count % (10 * BATCH_SIZE) == 0:
|
2019-01-15 04:22:31 +11:00
|
|
|
break
|
|
|
|
if last:
|
|
|
|
break
|
2018-12-01 21:14:12 +11:00
|
|
|
except RuntimeError:
|
2018-12-09 23:02:08 +11:00
|
|
|
LOG.error('Could not entirely process section %s', section)
|
2019-11-24 19:33:16 +11:00
|
|
|
self.successful = False
|
2018-12-01 21:14:12 +11:00
|
|
|
|
2019-12-06 18:24:13 +11:00
|
|
|
def get_generators(self, kinds, queue, all_items):
|
2018-10-20 23:49:04 +11:00
|
|
|
"""
|
2019-11-12 03:21:07 +11:00
|
|
|
Getting iterators is costly, so let's do it asynchronously
|
2018-10-20 23:49:04 +11:00
|
|
|
"""
|
2019-12-06 18:24:13 +11:00
|
|
|
LOG.debug('Start get_generators')
|
2018-12-01 21:14:12 +11:00
|
|
|
try:
|
|
|
|
for kind in kinds:
|
2019-08-29 01:46:25 +10:00
|
|
|
for section in (x for x in app.SYNC.sections
|
2019-01-09 04:00:54 +11:00
|
|
|
if x.section_type == kind[1]):
|
2019-11-29 03:49:48 +11:00
|
|
|
if self.should_cancel():
|
2019-03-18 02:26:48 +11:00
|
|
|
LOG.debug('Need to exit now')
|
2018-12-01 21:14:12 +11:00
|
|
|
return
|
2019-01-09 04:00:54 +11:00
|
|
|
if not section.sync_to_kodi:
|
2019-02-08 06:15:49 +11:00
|
|
|
LOG.info('User chose to not sync section %s', section)
|
|
|
|
continue
|
2019-11-24 19:33:16 +11:00
|
|
|
section = sections.get_sync_section(section,
|
|
|
|
plex_type=kind[0])
|
2019-02-08 06:15:49 +11:00
|
|
|
if self.repair or all_items:
|
|
|
|
updated_at = None
|
|
|
|
else:
|
2019-01-09 04:00:54 +11:00
|
|
|
updated_at = section.last_sync - UPDATED_AT_SAFETY \
|
|
|
|
if section.last_sync else None
|
2018-12-11 06:00:48 +11:00
|
|
|
try:
|
2019-11-24 19:33:16 +11:00
|
|
|
section.iterator = PF.get_section_iterator(
|
2019-11-12 03:21:07 +11:00
|
|
|
section.section_id,
|
2019-11-24 19:33:16 +11:00
|
|
|
plex_type=section.plex_type,
|
2019-11-12 03:21:07 +11:00
|
|
|
updated_at=updated_at,
|
|
|
|
last_viewed_at=None)
|
2018-12-11 06:00:48 +11:00
|
|
|
except RuntimeError:
|
2019-11-24 19:33:16 +11:00
|
|
|
LOG.error('Sync at least partially unsuccessful!')
|
|
|
|
LOG.error('Error getting section iterator %s', section)
|
2018-12-11 06:00:48 +11:00
|
|
|
else:
|
2019-11-24 19:33:16 +11:00
|
|
|
section.number_of_items = section.iterator.total
|
|
|
|
if section.number_of_items > 0:
|
|
|
|
self.processing_queue.add_section(section)
|
|
|
|
queue.put(section)
|
|
|
|
LOG.debug('Put section in queue with %s items: %s',
|
|
|
|
section.number_of_items, section)
|
2019-03-18 01:42:29 +11:00
|
|
|
except Exception:
|
|
|
|
utils.ERROR(notify=True)
|
2018-12-01 21:14:12 +11:00
|
|
|
finally:
|
|
|
|
queue.put(None)
|
2019-12-06 18:24:13 +11:00
|
|
|
LOG.debug('Exiting get_generators')
|
2018-10-20 23:49:04 +11:00
|
|
|
|
2018-10-22 01:56:13 +11:00
|
|
|
def full_library_sync(self):
|
2018-10-21 21:03:21 +11:00
|
|
|
kinds = [
|
2019-11-24 19:33:16 +11:00
|
|
|
(v.PLEX_TYPE_MOVIE, v.PLEX_TYPE_MOVIE),
|
|
|
|
(v.PLEX_TYPE_SHOW, v.PLEX_TYPE_SHOW),
|
|
|
|
(v.PLEX_TYPE_SEASON, v.PLEX_TYPE_SHOW),
|
|
|
|
(v.PLEX_TYPE_EPISODE, v.PLEX_TYPE_SHOW)
|
2018-10-21 21:03:21 +11:00
|
|
|
]
|
2018-11-19 00:59:17 +11:00
|
|
|
if app.SYNC.enable_music:
|
2018-10-25 22:22:34 +11:00
|
|
|
kinds.extend([
|
2019-11-24 19:33:16 +11:00
|
|
|
(v.PLEX_TYPE_ARTIST, v.PLEX_TYPE_ARTIST),
|
|
|
|
(v.PLEX_TYPE_ALBUM, v.PLEX_TYPE_ARTIST),
|
2018-11-04 04:47:51 +11:00
|
|
|
])
|
2018-12-09 23:02:08 +11:00
|
|
|
# ADD NEW ITEMS
|
2019-12-06 18:24:13 +11:00
|
|
|
# We need to enforce syncing e.g. show before season before episode
|
|
|
|
self.get_generators(kinds, self.section_queue, False)
|
2019-11-24 19:33:16 +11:00
|
|
|
# Do the heavy lifting
|
|
|
|
self.processing_loop_new_and_changed_items()
|
2018-12-10 02:16:26 +11:00
|
|
|
common.update_kodi_library(video=True, music=True)
|
2019-11-24 19:33:16 +11:00
|
|
|
if self.should_cancel() or not self.successful:
|
|
|
|
return
|
2019-08-02 18:25:53 +10:00
|
|
|
|
|
|
|
# Sync Plex playlists to Kodi and vice-versa
|
|
|
|
if common.PLAYLIST_SYNC_ENABLED:
|
|
|
|
if self.show_dialog:
|
|
|
|
if self.dialog:
|
|
|
|
self.dialog.close()
|
|
|
|
self.dialog = xbmcgui.DialogProgressBG()
|
|
|
|
# "Synching playlists"
|
|
|
|
self.dialog.create(utils.lang(39715))
|
2019-11-24 19:33:16 +11:00
|
|
|
if not playlists.full_sync() or self.should_cancel():
|
|
|
|
return
|
2019-08-02 18:25:53 +10:00
|
|
|
|
|
|
|
# SYNC PLAYSTATE of ALL items (otherwise we won't pick up on items that
|
|
|
|
# were set to unwatched). Also mark all items on the PMS to be able
|
|
|
|
# to delete the ones still in Kodi
|
2019-11-24 19:33:16 +11:00
|
|
|
LOG.debug('Start synching playstate and userdata for every item')
|
2019-02-04 03:20:36 +11:00
|
|
|
if app.SYNC.enable_music:
|
2019-11-24 19:33:16 +11:00
|
|
|
# In order to not delete all your songs again
|
2019-02-04 03:20:36 +11:00
|
|
|
kinds.extend([
|
2019-11-24 19:33:16 +11:00
|
|
|
(v.PLEX_TYPE_SONG, v.PLEX_TYPE_ARTIST),
|
2019-02-04 03:20:36 +11:00
|
|
|
])
|
2019-02-06 01:48:24 +11:00
|
|
|
# Make sure we're not showing an item's title in the sync dialog
|
2018-12-10 02:15:37 +11:00
|
|
|
if not self.show_dialog_userdata and self.dialog:
|
|
|
|
# Close the progress indicator dialog
|
|
|
|
self.dialog.close()
|
|
|
|
self.dialog = None
|
2019-12-06 18:24:13 +11:00
|
|
|
self.get_generators(kinds, self.section_queue, True)
|
2019-11-24 19:33:16 +11:00
|
|
|
self.processing_loop_playstates()
|
|
|
|
if self.should_cancel() or not self.successful:
|
|
|
|
return
|
2018-12-10 05:21:02 +11:00
|
|
|
|
|
|
|
# Delete movies that are not on Plex anymore
|
2019-02-08 06:15:49 +11:00
|
|
|
LOG.debug('Looking for items to delete')
|
2018-12-10 05:21:02 +11:00
|
|
|
kinds = [
|
|
|
|
(v.PLEX_TYPE_MOVIE, itemtypes.Movie),
|
|
|
|
(v.PLEX_TYPE_SHOW, itemtypes.Show),
|
|
|
|
(v.PLEX_TYPE_SEASON, itemtypes.Season),
|
|
|
|
(v.PLEX_TYPE_EPISODE, itemtypes.Episode)
|
|
|
|
]
|
|
|
|
if app.SYNC.enable_music:
|
|
|
|
kinds.extend([
|
|
|
|
(v.PLEX_TYPE_ARTIST, itemtypes.Artist),
|
|
|
|
(v.PLEX_TYPE_ALBUM, itemtypes.Album),
|
|
|
|
(v.PLEX_TYPE_SONG, itemtypes.Song)
|
|
|
|
])
|
|
|
|
for plex_type, context in kinds:
|
2018-12-09 23:02:08 +11:00
|
|
|
# Delete movies that are not on Plex anymore
|
2019-02-02 21:24:55 +11:00
|
|
|
while True:
|
2019-11-24 19:33:16 +11:00
|
|
|
with context(self.current_time) as ctx:
|
|
|
|
plex_ids = list(
|
|
|
|
ctx.plexdb.plex_id_by_last_sync(plex_type,
|
|
|
|
self.current_time,
|
|
|
|
BATCH_SIZE))
|
2019-02-02 21:24:55 +11:00
|
|
|
for plex_id in plex_ids:
|
2019-11-29 03:49:48 +11:00
|
|
|
if self.should_cancel():
|
2019-11-24 19:33:16 +11:00
|
|
|
return
|
2019-02-02 21:24:55 +11:00
|
|
|
ctx.remove(plex_id, plex_type)
|
|
|
|
if len(plex_ids) < BATCH_SIZE:
|
|
|
|
break
|
2019-11-24 19:33:16 +11:00
|
|
|
LOG.debug('Done looking for items to delete')
|
2018-10-20 23:49:04 +11:00
|
|
|
|
|
|
|
def run(self):
|
2019-01-31 06:36:52 +11:00
|
|
|
app.APP.register_thread(self)
|
2019-11-24 19:33:16 +11:00
|
|
|
LOG.info('Running library sync with repair=%s', self.repair)
|
2019-01-31 06:36:52 +11:00
|
|
|
try:
|
2019-11-24 19:33:16 +11:00
|
|
|
self.run_full_library_sync()
|
2019-12-06 18:54:21 +11:00
|
|
|
except Exception:
|
|
|
|
utils.ERROR(notify=True)
|
|
|
|
self.successful = False
|
2019-01-31 06:36:52 +11:00
|
|
|
finally:
|
|
|
|
app.APP.deregister_thread(self)
|
2019-11-24 19:33:16 +11:00
|
|
|
LOG.info('Library sync done. successful: %s', self.successful)
|
2019-01-31 06:36:52 +11:00
|
|
|
|
|
|
|
@utils.log_time
|
2019-11-24 19:33:16 +11:00
|
|
|
def run_full_library_sync(self):
|
2018-10-20 23:49:04 +11:00
|
|
|
try:
|
2019-11-24 19:33:16 +11:00
|
|
|
# Get latest Plex libraries and build playlist and video node files
|
|
|
|
if self.should_cancel() or not sections.sync_from_pms(self):
|
|
|
|
return
|
2019-12-06 18:54:21 +11:00
|
|
|
self.copy_plex_db()
|
2019-11-24 19:33:16 +11:00
|
|
|
self.full_library_sync()
|
2018-10-20 23:49:04 +11:00
|
|
|
finally:
|
2019-01-27 03:18:48 +11:00
|
|
|
common.update_kodi_library(video=True, music=True)
|
2018-12-09 23:02:08 +11:00
|
|
|
if self.dialog:
|
|
|
|
self.dialog.close()
|
2019-11-29 03:49:48 +11:00
|
|
|
if not self.successful and not self.should_cancel():
|
2019-01-31 04:05:28 +11:00
|
|
|
# "ERROR in library sync"
|
|
|
|
utils.dialog('notification',
|
|
|
|
heading='{plex}',
|
|
|
|
message=utils.lang(39410),
|
|
|
|
icon='{error}')
|
2019-11-24 19:33:16 +11:00
|
|
|
self.callback(self.successful)
|
2018-10-20 23:49:04 +11:00
|
|
|
|
|
|
|
|
2018-10-24 16:08:32 +11:00
|
|
|
def start(show_dialog, repair=False, callback=None):
|
2019-11-29 03:49:48 +11:00
|
|
|
# Call run() and NOT start in order to not spawn another thread
|
2018-11-05 23:52:31 +11:00
|
|
|
FullSync(repair, callback, show_dialog).run()
|