2015-12-25 07:07:00 +11:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-02-12 00:03:04 +11:00
|
|
|
###############################################################################
|
2017-08-18 18:38:03 +10:00
|
|
|
from logging import getLogger
|
2017-04-03 01:02:41 +10:00
|
|
|
from threading import Thread
|
2016-01-23 01:37:20 +11:00
|
|
|
import Queue
|
2016-09-11 03:49:03 +10:00
|
|
|
from random import shuffle
|
2018-04-18 04:18:25 +10:00
|
|
|
import copy
|
2015-12-25 07:07:00 +11:00
|
|
|
|
|
|
|
import xbmc
|
2017-05-12 03:26:13 +10:00
|
|
|
from xbmcvfs import exists
|
2015-12-25 07:07:00 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
import utils
|
|
|
|
from utils import window, settings, dialog, language as lang, try_decode, \
|
|
|
|
try_encode
|
|
|
|
from downloadutils import DownloadUtils as DU
|
2015-12-25 07:07:00 +11:00
|
|
|
import itemtypes
|
2017-01-05 06:57:16 +11:00
|
|
|
import plexdb_functions as plexdb
|
2015-12-25 07:07:00 +11:00
|
|
|
import kodidb_functions as kodidb
|
2018-04-29 22:12:39 +10:00
|
|
|
import artwork
|
2015-12-25 07:07:00 +11:00
|
|
|
import videonodes
|
2017-01-25 02:04:42 +11:00
|
|
|
import variables as v
|
2015-12-25 07:07:00 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
import PlexFunctions as PF
|
2016-03-28 01:57:35 +11:00
|
|
|
import PlexAPI
|
2018-04-18 04:18:25 +10:00
|
|
|
from library_sync.get_metadata import ThreadedGetMetadata
|
|
|
|
from library_sync.process_metadata import ThreadedProcessMetadata
|
2017-04-03 01:02:41 +10:00
|
|
|
import library_sync.sync_info as sync_info
|
2018-04-18 04:18:25 +10:00
|
|
|
from library_sync.fanart import ThreadedProcessFanart
|
2017-05-07 01:04:09 +10:00
|
|
|
import music
|
2018-04-28 17:12:29 +10:00
|
|
|
import playlists
|
2017-05-17 18:09:50 +10:00
|
|
|
import state
|
2015-12-29 04:47:16 +11:00
|
|
|
|
2016-02-12 00:03:04 +11:00
|
|
|
###############################################################################
|
2015-12-25 07:07:00 +11:00
|
|
|
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG = getLogger("PLEX." + __name__)
|
2015-12-25 07:07:00 +11:00
|
|
|
|
2016-09-02 03:07:28 +10:00
|
|
|
###############################################################################
|
|
|
|
|
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
@utils.thread_methods(add_suspends=['SUSPEND_LIBRARY_THREAD', 'STOP_SYNC'])
|
2016-02-12 00:03:04 +11:00
|
|
|
class LibrarySync(Thread):
|
2016-03-25 04:52:02 +11:00
|
|
|
"""
|
2018-04-18 04:18:25 +10:00
|
|
|
The one and only library sync thread. Spawn only 1!
|
2016-03-25 04:52:02 +11:00
|
|
|
"""
|
2018-01-07 01:19:12 +11:00
|
|
|
def __init__(self):
|
2018-04-18 04:18:25 +10:00
|
|
|
self.items_to_process = []
|
|
|
|
self.views = []
|
|
|
|
self.session_keys = {}
|
2016-09-11 18:20:29 +10:00
|
|
|
self.fanartqueue = Queue.Queue()
|
2018-04-18 04:18:25 +10:00
|
|
|
self.fanartthread = ThreadedProcessFanart(self.fanartqueue)
|
2016-03-28 01:57:35 +11:00
|
|
|
# How long should we wait at least to process new/changed PMS items?
|
2015-12-25 07:07:00 +11:00
|
|
|
self.vnodes = videonodes.VideoNodes()
|
2018-04-18 04:18:25 +10:00
|
|
|
self.install_sync_done = settings('SyncInstallRunDone') == 'true'
|
2017-08-21 16:01:48 +10:00
|
|
|
# Show sync dialog even if user deactivated?
|
|
|
|
self.force_dialog = True
|
2018-04-18 04:18:25 +10:00
|
|
|
# Need to be set accordingly later
|
|
|
|
self.compare = None
|
|
|
|
self.new_items_only = None
|
|
|
|
self.update_kodi_video_library = None
|
|
|
|
self.update_kodi_music_library = None
|
|
|
|
self.nodes = {}
|
|
|
|
self.playlists = {}
|
|
|
|
self.sorted_views = []
|
|
|
|
self.old_views = []
|
|
|
|
self.updatelist = []
|
|
|
|
self.all_plex_ids = {}
|
|
|
|
self.all_kodi_ids = {}
|
2016-02-12 00:03:04 +11:00
|
|
|
Thread.__init__(self)
|
2015-12-25 07:07:00 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
def suspend_item_sync(self):
|
|
|
|
"""
|
|
|
|
Returns True if we should not sync new items or artwork to Kodi or even
|
|
|
|
abort a sync currently running.
|
|
|
|
|
|
|
|
Returns False otherwise.
|
|
|
|
"""
|
|
|
|
if self.suspended() or self.stopped():
|
|
|
|
return True
|
|
|
|
elif state.SUSPEND_SYNC:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def show_kodi_note(self, message, icon="plex"):
|
2016-02-12 00:44:11 +11:00
|
|
|
"""
|
2016-03-08 22:13:47 +11:00
|
|
|
Shows a Kodi popup, if user selected to do so. Pass message in unicode
|
|
|
|
or string
|
2016-03-10 18:51:24 +11:00
|
|
|
|
|
|
|
icon: "plex": shows Plex icon
|
|
|
|
"error": shows Kodi error icon
|
2016-02-12 00:44:11 +11:00
|
|
|
"""
|
2017-08-21 16:01:48 +10:00
|
|
|
if state.SYNC_DIALOG is not True and self.force_dialog is not True:
|
|
|
|
return
|
2016-03-10 18:51:24 +11:00
|
|
|
if icon == "plex":
|
2017-08-18 17:53:10 +10:00
|
|
|
dialog('notification',
|
|
|
|
heading='{plex}',
|
|
|
|
message=message,
|
|
|
|
icon='{plex}',
|
|
|
|
sound=False)
|
2016-03-10 18:51:24 +11:00
|
|
|
elif icon == "error":
|
2017-08-18 17:53:10 +10:00
|
|
|
dialog('notification',
|
|
|
|
heading='{plex}',
|
|
|
|
message=message,
|
2017-09-08 20:54:43 +10:00
|
|
|
icon='{error}')
|
2016-02-12 00:44:11 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
@staticmethod
|
|
|
|
def sync_pms_time():
|
2016-03-12 00:42:14 +11:00
|
|
|
"""
|
|
|
|
PMS does not provide a means to get a server timestamp. This is a work-
|
|
|
|
around.
|
2016-03-28 01:57:35 +11:00
|
|
|
|
|
|
|
In general, everything saved to Kodi shall be in Kodi time.
|
|
|
|
|
|
|
|
Any info with a PMS timestamp is in Plex time, naturally
|
2016-03-12 00:42:14 +11:00
|
|
|
"""
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info('Synching time with PMS server')
|
2016-03-12 00:42:14 +11:00
|
|
|
# Find a PMS item where we can toggle the view state to enforce a
|
|
|
|
# change in lastViewedAt
|
2016-04-08 18:52:15 +10:00
|
|
|
|
|
|
|
# Get all Plex libraries
|
2018-04-18 04:18:25 +10:00
|
|
|
sections = PF.get_plex_sections()
|
2016-04-08 18:52:15 +10:00
|
|
|
try:
|
|
|
|
sections.attrib
|
|
|
|
except AttributeError:
|
2018-04-18 04:18:25 +10:00
|
|
|
LOG.error("Error download PMS views, abort sync_pms_time")
|
2016-04-08 18:52:15 +10:00
|
|
|
return False
|
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
plex_id = None
|
2017-01-25 02:04:42 +11:00
|
|
|
for mediatype in (v.PLEX_TYPE_MOVIE,
|
|
|
|
v.PLEX_TYPE_SHOW,
|
|
|
|
v.PLEX_TYPE_ARTIST):
|
2018-04-18 04:18:25 +10:00
|
|
|
if plex_id is not None:
|
2016-04-08 18:52:15 +10:00
|
|
|
break
|
|
|
|
for view in sections:
|
2018-04-18 04:18:25 +10:00
|
|
|
if plex_id is not None:
|
2016-03-12 00:42:14 +11:00
|
|
|
break
|
2016-04-08 18:52:15 +10:00
|
|
|
if not view.attrib['type'] == mediatype:
|
|
|
|
continue
|
2018-04-18 04:18:25 +10:00
|
|
|
library_id = view.attrib['key']
|
|
|
|
items = PF.GetAllPlexLeaves(library_id)
|
2016-04-08 18:52:15 +10:00
|
|
|
if items in (None, 401):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error("Could not download section %s",
|
|
|
|
view.attrib['key'])
|
2016-04-08 18:52:15 +10:00
|
|
|
continue
|
|
|
|
for item in items:
|
|
|
|
if item.attrib.get('viewCount') is not None:
|
|
|
|
# Don't want to mess with items that have playcount>0
|
|
|
|
continue
|
|
|
|
if item.attrib.get('viewOffset') is not None:
|
|
|
|
# Don't mess with items with a resume point
|
|
|
|
continue
|
2018-04-18 04:18:25 +10:00
|
|
|
plex_id = item.attrib.get('ratingKey')
|
|
|
|
LOG.info('Found an item to sync with: %s', plex_id)
|
2016-04-08 18:52:15 +10:00
|
|
|
break
|
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
if plex_id is None:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error("Could not find an item to sync time with")
|
|
|
|
LOG.error("Aborting PMS-Kodi time sync")
|
2016-04-08 18:52:15 +10:00
|
|
|
return False
|
2016-03-12 00:42:14 +11:00
|
|
|
|
|
|
|
# Get the Plex item's metadata
|
2018-04-18 04:18:25 +10:00
|
|
|
xml = PF.GetPlexMetadata(plex_id)
|
2016-04-08 18:52:15 +10:00
|
|
|
if xml in (None, 401):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error("Could not download metadata, aborting time sync")
|
2016-04-08 18:52:15 +10:00
|
|
|
return False
|
|
|
|
|
2016-03-12 00:42:14 +11:00
|
|
|
timestamp = xml[0].attrib.get('lastViewedAt')
|
2016-04-08 18:52:15 +10:00
|
|
|
if timestamp is None:
|
2016-03-12 00:42:14 +11:00
|
|
|
timestamp = xml[0].attrib.get('updatedAt')
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug('Using items updatedAt=%s', timestamp)
|
2016-04-08 18:52:15 +10:00
|
|
|
if timestamp is None:
|
2016-03-12 00:42:14 +11:00
|
|
|
timestamp = xml[0].attrib.get('addedAt')
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug('Using items addedAt=%s', timestamp)
|
2016-04-08 18:52:15 +10:00
|
|
|
if timestamp is None:
|
|
|
|
timestamp = 0
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug('No timestamp; using 0')
|
2016-04-08 18:52:15 +10:00
|
|
|
|
2016-03-12 00:42:14 +11:00
|
|
|
# Set the timer
|
2018-04-18 04:18:25 +10:00
|
|
|
koditime = utils.unix_timestamp()
|
2016-03-12 00:42:14 +11:00
|
|
|
# Toggle watched state
|
2018-04-18 04:18:25 +10:00
|
|
|
PF.scrobble(plex_id, 'watched')
|
2016-03-12 00:42:14 +11:00
|
|
|
# Let the PMS process this first!
|
2016-04-08 18:52:15 +10:00
|
|
|
xbmc.sleep(1000)
|
|
|
|
# Get PMS items to find the item we just changed
|
2018-04-18 04:18:25 +10:00
|
|
|
items = PF.GetAllPlexLeaves(library_id, lastViewedAt=timestamp)
|
2016-03-12 00:42:14 +11:00
|
|
|
# Toggle watched state back
|
2018-04-18 04:18:25 +10:00
|
|
|
PF.scrobble(plex_id, 'unwatched')
|
2016-04-08 17:11:03 +10:00
|
|
|
if items in (None, 401):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error("Could not download metadata, aborting time sync")
|
2016-04-08 18:52:15 +10:00
|
|
|
return False
|
|
|
|
|
2016-03-12 00:42:14 +11:00
|
|
|
plextime = None
|
|
|
|
for item in items:
|
2018-04-18 04:18:25 +10:00
|
|
|
if item.attrib['ratingKey'] == plex_id:
|
2016-03-12 00:42:14 +11:00
|
|
|
plextime = item.attrib.get('lastViewedAt')
|
|
|
|
break
|
2016-04-08 18:52:15 +10:00
|
|
|
|
2016-04-08 17:11:03 +10:00
|
|
|
if plextime is None:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error('Could not get lastViewedAt - aborting')
|
2016-04-08 18:52:15 +10:00
|
|
|
return False
|
2016-03-12 00:42:14 +11:00
|
|
|
|
|
|
|
# Calculate time offset Kodi-PMS
|
2017-08-22 16:16:21 +10:00
|
|
|
state.KODI_PLEX_TIME_OFFSET = float(koditime) - float(plextime)
|
|
|
|
settings('kodiplextimeoffset', value=str(state.KODI_PLEX_TIME_OFFSET))
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info("Time offset Koditime - Plextime in seconds: %s",
|
|
|
|
str(state.KODI_PLEX_TIME_OFFSET))
|
2016-04-08 18:52:15 +10:00
|
|
|
return True
|
2016-03-12 00:42:14 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
@staticmethod
|
|
|
|
def initialize_plex_db():
|
2016-01-08 06:31:25 +11:00
|
|
|
"""
|
2017-01-05 06:57:16 +11:00
|
|
|
Run once during startup to verify that plex db exists.
|
2016-01-08 06:31:25 +11:00
|
|
|
"""
|
2017-01-05 06:57:16 +11:00
|
|
|
with plexdb.Get_Plex_DB() as plex_db:
|
|
|
|
# Create the tables for the plex database
|
2017-01-08 06:11:48 +11:00
|
|
|
plex_db.plexcursor.execute('''
|
|
|
|
CREATE TABLE IF NOT EXISTS plex(
|
2018-04-18 04:18:25 +10:00
|
|
|
plex_id TEXT UNIQUE,
|
|
|
|
view_id TEXT,
|
|
|
|
plex_type TEXT,
|
|
|
|
kodi_type TEXT,
|
|
|
|
kodi_id INTEGER,
|
|
|
|
kodi_fileid INTEGER,
|
|
|
|
kodi_pathid INTEGER,
|
|
|
|
parent_id INTEGER,
|
|
|
|
checksum INTEGER,
|
|
|
|
fanart_synced INTEGER)
|
2017-01-08 06:11:48 +11:00
|
|
|
''')
|
|
|
|
plex_db.plexcursor.execute('''
|
|
|
|
CREATE TABLE IF NOT EXISTS view(
|
2018-04-18 04:18:25 +10:00
|
|
|
view_id TEXT UNIQUE,
|
|
|
|
view_name TEXT,
|
|
|
|
kodi_type TEXT,
|
|
|
|
kodi_tagid INTEGER,
|
|
|
|
sync_to_kodi INTEGER)
|
2017-01-08 06:11:48 +11:00
|
|
|
''')
|
|
|
|
plex_db.plexcursor.execute('''
|
|
|
|
CREATE TABLE IF NOT EXISTS version(idVersion TEXT)
|
|
|
|
''')
|
2018-04-28 17:12:29 +10:00
|
|
|
plex_db.plexcursor.execute('''
|
|
|
|
CREATE TABLE IF NOT EXISTS playlists(
|
|
|
|
plex_id TEXT UNIQUE,
|
|
|
|
plex_name TEXT,
|
|
|
|
plex_updatedat TEXT,
|
|
|
|
kodi_path TEXT,
|
|
|
|
kodi_type TEXT,
|
|
|
|
kodi_hash TEXT)
|
|
|
|
''')
|
2016-12-21 02:13:19 +11:00
|
|
|
# Create an index for actors to speed up sync
|
2018-04-18 04:18:25 +10:00
|
|
|
utils.create_actor_db_index()
|
2016-01-08 03:27:48 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
@utils.log_time
|
|
|
|
def full_sync(self, repair=False):
|
2016-04-07 19:57:34 +10:00
|
|
|
"""
|
|
|
|
repair=True: force sync EVERY item
|
|
|
|
"""
|
2018-02-06 03:48:50 +11:00
|
|
|
# Reset our keys
|
2018-04-18 04:18:25 +10:00
|
|
|
self.session_keys = {}
|
2016-03-03 03:27:21 +11:00
|
|
|
# self.compare == False: we're syncing EVERY item
|
|
|
|
# True: we're syncing only the delta, e.g. different checksum
|
2016-04-07 19:57:34 +10:00
|
|
|
self.compare = not repair
|
2016-01-08 03:27:48 +11:00
|
|
|
|
2016-12-03 23:07:41 +11:00
|
|
|
self.new_items_only = True
|
2017-02-03 02:28:25 +11:00
|
|
|
# This will also update playstates and userratings!
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info('Running fullsync for NEW PMS items with repair=%s', repair)
|
2018-04-18 04:18:25 +10:00
|
|
|
if self._full_sync() is False:
|
2016-12-03 23:07:41 +11:00
|
|
|
return False
|
|
|
|
self.new_items_only = False
|
2017-02-03 02:28:25 +11:00
|
|
|
# This will NOT update playstates and userratings!
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info('Running fullsync for CHANGED PMS items with repair=%s',
|
|
|
|
repair)
|
2018-04-18 04:18:25 +10:00
|
|
|
if self._full_sync() is False:
|
2016-12-03 23:07:41 +11:00
|
|
|
return False
|
2018-05-04 01:20:23 +10:00
|
|
|
playlists.full_sync()
|
2016-12-03 23:07:41 +11:00
|
|
|
return True
|
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
def _full_sync(self):
|
2018-05-15 04:23:39 +10:00
|
|
|
process = [self.plex_movies, self.plex_tv_show]
|
2017-08-22 16:16:21 +10:00
|
|
|
if state.ENABLE_MUSIC:
|
2018-05-15 04:23:39 +10:00
|
|
|
process.append(self.plex_music)
|
2016-04-08 17:11:03 +10:00
|
|
|
|
|
|
|
# Do the processing
|
2018-05-15 04:23:39 +10:00
|
|
|
for kind in process:
|
|
|
|
if self.suspend_item_sync() or not kind():
|
2015-12-25 07:07:00 +11:00
|
|
|
return False
|
|
|
|
|
2016-03-03 03:27:21 +11:00
|
|
|
# Let kodi update the views in any case, since we're doing a full sync
|
2015-12-25 07:07:00 +11:00
|
|
|
xbmc.executebuiltin('UpdateLibrary(video)')
|
2017-08-22 16:16:21 +10:00
|
|
|
if state.ENABLE_MUSIC:
|
2016-02-13 02:53:49 +11:00
|
|
|
xbmc.executebuiltin('UpdateLibrary(music)')
|
2016-02-12 00:44:11 +11:00
|
|
|
|
2016-09-02 03:07:28 +10:00
|
|
|
if window('plex_scancrashed') == 'true':
|
2016-04-08 02:29:23 +10:00
|
|
|
# Show warning if itemtypes.py crashed at some point
|
2017-08-18 17:53:10 +10:00
|
|
|
dialog('ok', heading='{plex}', line1=lang(39408))
|
2016-09-02 03:07:28 +10:00
|
|
|
window('plex_scancrashed', clear=True)
|
|
|
|
elif window('plex_scancrashed') == '401':
|
|
|
|
window('plex_scancrashed', clear=True)
|
2017-05-17 18:09:50 +10:00
|
|
|
if state.PMS_STATUS not in ('401', 'Auth'):
|
2016-04-08 02:29:23 +10:00
|
|
|
# Plex server had too much and returned ERROR
|
2017-08-18 17:53:10 +10:00
|
|
|
dialog('ok', heading='{plex}', line1=lang(39409))
|
2015-12-25 07:07:00 +11:00
|
|
|
return True
|
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
def _process_view(self, folder_item, kodi_db, plex_db, totalnodes):
|
2016-02-12 00:03:04 +11:00
|
|
|
vnodes = self.vnodes
|
2018-04-18 04:18:25 +10:00
|
|
|
folder = folder_item.attrib
|
2016-02-12 00:03:04 +11:00
|
|
|
mediatype = folder['type']
|
|
|
|
# Only process supported formats
|
2017-01-25 02:04:42 +11:00
|
|
|
if mediatype not in (v.PLEX_TYPE_MOVIE, v.PLEX_TYPE_SHOW,
|
|
|
|
v.PLEX_TYPE_ARTIST, v.PLEX_TYPE_PHOTO):
|
2016-03-03 03:27:21 +11:00
|
|
|
return totalnodes
|
|
|
|
|
|
|
|
# Prevent duplicate for nodes of the same type
|
|
|
|
nodes = self.nodes[mediatype]
|
|
|
|
# Prevent duplicate for playlists of the same type
|
2018-05-03 15:53:28 +10:00
|
|
|
lists = self.playlists[mediatype]
|
2016-03-03 19:04:15 +11:00
|
|
|
sorted_views = self.sorted_views
|
2016-02-12 00:03:04 +11:00
|
|
|
|
|
|
|
folderid = folder['key']
|
|
|
|
foldername = folder['title']
|
|
|
|
viewtype = folder['type']
|
|
|
|
|
2017-01-05 06:57:16 +11:00
|
|
|
# Get current media folders from plex database
|
|
|
|
view = plex_db.getView_byId(folderid)
|
2016-02-12 00:03:04 +11:00
|
|
|
try:
|
|
|
|
current_viewname = view[0]
|
|
|
|
current_viewtype = view[1]
|
|
|
|
current_tagid = view[2]
|
|
|
|
except TypeError:
|
2018-04-18 04:18:25 +10:00
|
|
|
LOG.info('Creating viewid: %s in Plex database.', folderid)
|
2016-02-12 00:03:04 +11:00
|
|
|
tagid = kodi_db.createTag(foldername)
|
|
|
|
# Create playlist for the video library
|
2018-05-03 15:53:28 +10:00
|
|
|
if (foldername not in lists and
|
2017-01-25 02:04:42 +11:00
|
|
|
mediatype in (v.PLEX_TYPE_MOVIE, v.PLEX_TYPE_SHOW)):
|
2018-04-18 04:18:25 +10:00
|
|
|
utils.playlist_xsp(mediatype, foldername, folderid, viewtype)
|
2018-05-03 15:53:28 +10:00
|
|
|
lists.append(foldername)
|
2016-02-12 00:03:04 +11:00
|
|
|
# Create the video node
|
2016-03-03 03:27:21 +11:00
|
|
|
if (foldername not in nodes and
|
2017-01-25 02:04:42 +11:00
|
|
|
mediatype != v.PLEX_TYPE_ARTIST):
|
2016-03-03 03:27:21 +11:00
|
|
|
vnodes.viewNode(sorted_views.index(foldername),
|
2016-02-12 00:03:04 +11:00
|
|
|
foldername,
|
|
|
|
mediatype,
|
2016-03-01 22:10:09 +11:00
|
|
|
viewtype,
|
|
|
|
folderid)
|
2016-03-03 03:27:21 +11:00
|
|
|
nodes.append(foldername)
|
2016-02-12 00:03:04 +11:00
|
|
|
totalnodes += 1
|
2017-01-05 06:57:16 +11:00
|
|
|
# Add view to plex database
|
|
|
|
plex_db.addView(folderid, foldername, viewtype, tagid)
|
2016-02-12 00:03:04 +11:00
|
|
|
else:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info(' '.join((
|
2018-04-18 04:18:25 +10:00
|
|
|
'Found viewid: %s' % folderid,
|
|
|
|
'viewname: %s' % current_viewname,
|
|
|
|
'viewtype: %s' % current_viewtype,
|
|
|
|
'tagid: %s' % current_tagid)))
|
2016-03-01 21:26:46 +11:00
|
|
|
|
|
|
|
# Remove views that are still valid to delete rest later
|
|
|
|
try:
|
|
|
|
self.old_views.remove(folderid)
|
|
|
|
except ValueError:
|
|
|
|
# View was just created, nothing to remove
|
|
|
|
pass
|
|
|
|
|
2016-02-12 00:03:04 +11:00
|
|
|
# View was modified, update with latest info
|
|
|
|
if current_viewname != foldername:
|
2018-04-18 04:18:25 +10:00
|
|
|
LOG.info('viewid: %s new viewname: %s', folderid, foldername)
|
2016-02-12 00:03:04 +11:00
|
|
|
tagid = kodi_db.createTag(foldername)
|
|
|
|
|
|
|
|
# Update view with new info
|
2017-01-05 06:57:16 +11:00
|
|
|
plex_db.updateView(foldername, tagid, folderid)
|
2016-02-12 00:03:04 +11:00
|
|
|
|
2016-03-03 03:27:21 +11:00
|
|
|
if mediatype != "artist":
|
2017-01-05 06:57:16 +11:00
|
|
|
if plex_db.getView_byName(current_viewname) is None:
|
2016-02-12 00:03:04 +11:00
|
|
|
# The tag could be a combined view. Ensure there's
|
|
|
|
# no other tags with the same name before deleting
|
|
|
|
# playlist.
|
2018-04-18 04:18:25 +10:00
|
|
|
utils.playlist_xsp(mediatype,
|
|
|
|
current_viewname,
|
|
|
|
folderid,
|
|
|
|
current_viewtype,
|
|
|
|
True)
|
2016-02-12 00:03:04 +11:00
|
|
|
# Delete video node
|
|
|
|
if mediatype != "musicvideos":
|
2016-03-03 03:27:21 +11:00
|
|
|
vnodes.viewNode(
|
|
|
|
indexnumber=sorted_views.index(foldername),
|
|
|
|
tagname=current_viewname,
|
|
|
|
mediatype=mediatype,
|
|
|
|
viewtype=current_viewtype,
|
|
|
|
viewid=folderid,
|
|
|
|
delete=True)
|
2016-02-12 00:03:04 +11:00
|
|
|
# Added new playlist
|
2018-05-03 15:53:28 +10:00
|
|
|
if (foldername not in lists and mediatype in
|
2017-04-03 01:02:41 +10:00
|
|
|
(v.PLEX_TYPE_MOVIE, v.PLEX_TYPE_SHOW)):
|
2018-04-18 04:18:25 +10:00
|
|
|
utils.playlist_xsp(mediatype,
|
|
|
|
foldername,
|
|
|
|
folderid,
|
|
|
|
viewtype)
|
2018-05-03 15:53:28 +10:00
|
|
|
lists.append(foldername)
|
2016-02-12 00:03:04 +11:00
|
|
|
# Add new video node
|
2016-03-03 03:27:21 +11:00
|
|
|
if foldername not in nodes and mediatype != "musicvideos":
|
|
|
|
vnodes.viewNode(sorted_views.index(foldername),
|
2016-02-12 00:03:04 +11:00
|
|
|
foldername,
|
|
|
|
mediatype,
|
2016-03-01 22:10:09 +11:00
|
|
|
viewtype,
|
|
|
|
folderid)
|
2016-03-03 03:27:21 +11:00
|
|
|
nodes.append(foldername)
|
2016-02-12 00:03:04 +11:00
|
|
|
totalnodes += 1
|
|
|
|
|
|
|
|
# Update items with new tag
|
2017-01-05 06:57:16 +11:00
|
|
|
items = plex_db.getItem_byView(folderid)
|
2016-02-12 00:03:04 +11:00
|
|
|
for item in items:
|
|
|
|
# Remove the "s" from viewtype for tags
|
2016-03-03 03:27:21 +11:00
|
|
|
kodi_db.updateTag(
|
|
|
|
current_tagid, tagid, item[0], current_viewtype[:-1])
|
2016-02-12 00:03:04 +11:00
|
|
|
else:
|
2016-03-03 03:27:21 +11:00
|
|
|
# Validate the playlist exists or recreate it
|
2017-01-25 02:04:42 +11:00
|
|
|
if mediatype != v.PLEX_TYPE_ARTIST:
|
2018-05-03 15:53:28 +10:00
|
|
|
if (foldername not in lists and mediatype in
|
2017-04-03 01:02:41 +10:00
|
|
|
(v.PLEX_TYPE_MOVIE, v.PLEX_TYPE_SHOW)):
|
2018-04-18 04:18:25 +10:00
|
|
|
utils.playlist_xsp(mediatype,
|
|
|
|
foldername,
|
|
|
|
folderid,
|
|
|
|
viewtype)
|
2018-05-03 15:53:28 +10:00
|
|
|
lists.append(foldername)
|
2016-02-12 00:03:04 +11:00
|
|
|
# Create the video node if not already exists
|
2016-03-03 03:27:21 +11:00
|
|
|
if foldername not in nodes and mediatype != "musicvideos":
|
|
|
|
vnodes.viewNode(sorted_views.index(foldername),
|
2016-02-12 00:03:04 +11:00
|
|
|
foldername,
|
|
|
|
mediatype,
|
2016-03-01 22:10:09 +11:00
|
|
|
viewtype,
|
|
|
|
folderid)
|
2016-03-03 03:27:21 +11:00
|
|
|
nodes.append(foldername)
|
2016-02-12 00:03:04 +11:00
|
|
|
totalnodes += 1
|
2016-03-03 03:27:21 +11:00
|
|
|
return totalnodes
|
2016-02-12 00:03:04 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
def maintain_views(self):
|
2015-12-28 23:10:05 +11:00
|
|
|
"""
|
2016-01-11 19:57:45 +11:00
|
|
|
Compare the views to Plex
|
2015-12-28 23:10:05 +11:00
|
|
|
"""
|
2015-12-25 07:07:00 +11:00
|
|
|
# Get views
|
2018-04-18 04:18:25 +10:00
|
|
|
sections = PF.get_plex_sections()
|
2016-03-01 22:10:09 +11:00
|
|
|
try:
|
|
|
|
sections.attrib
|
|
|
|
except AttributeError:
|
2018-04-18 04:18:25 +10:00
|
|
|
LOG.error("Error download PMS views, abort maintain_views")
|
2016-02-01 20:33:33 +11:00
|
|
|
return False
|
2018-06-03 21:32:25 +10:00
|
|
|
if state.DIRECT_PATHS is True and state.ENABLE_MUSIC is True:
|
|
|
|
# Will reboot Kodi is new library detected
|
|
|
|
music.excludefromscan_music_folders(xml=sections)
|
2018-06-08 01:11:13 +10:00
|
|
|
self.views = []
|
|
|
|
vnodes = self.vnodes
|
2015-12-25 07:07:00 +11:00
|
|
|
|
2016-03-03 03:27:21 +11:00
|
|
|
self.nodes = {
|
2017-01-25 02:04:42 +11:00
|
|
|
v.PLEX_TYPE_MOVIE: [],
|
|
|
|
v.PLEX_TYPE_SHOW: [],
|
|
|
|
v.PLEX_TYPE_ARTIST: [],
|
|
|
|
v.PLEX_TYPE_PHOTO: []
|
2016-03-03 03:27:21 +11:00
|
|
|
}
|
2018-04-18 04:18:25 +10:00
|
|
|
self.playlists = copy.deepcopy(self.nodes)
|
2016-03-03 19:04:15 +11:00
|
|
|
self.sorted_views = []
|
2016-03-03 03:27:21 +11:00
|
|
|
|
|
|
|
for view in sections:
|
2018-04-18 04:18:25 +10:00
|
|
|
if (view.attrib['type'] in
|
2017-04-03 01:02:41 +10:00
|
|
|
(v.PLEX_TYPE_MOVIE, v.PLEX_TYPE_SHOW, v.PLEX_TYPE_PHOTO)):
|
2016-03-03 19:04:15 +11:00
|
|
|
self.sorted_views.append(view.attrib['title'])
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug('Sorted views: %s', self.sorted_views)
|
2016-03-03 19:04:15 +11:00
|
|
|
|
|
|
|
# total nodes for window properties
|
|
|
|
vnodes.clearProperties()
|
|
|
|
totalnodes = len(self.sorted_views)
|
2015-12-25 07:07:00 +11:00
|
|
|
|
2017-01-05 06:57:16 +11:00
|
|
|
with plexdb.Get_Plex_DB() as plex_db:
|
2016-03-01 21:26:46 +11:00
|
|
|
# Backup old views to delete them later, if needed (at the end
|
|
|
|
# of this method, only unused views will be left in oldviews)
|
2017-01-05 06:57:16 +11:00
|
|
|
self.old_views = plex_db.getViews()
|
2016-03-01 20:40:30 +11:00
|
|
|
with kodidb.GetKodiDB('video') as kodi_db:
|
2018-04-18 04:18:25 +10:00
|
|
|
for folder_item in sections:
|
|
|
|
totalnodes = self._process_view(folder_item,
|
|
|
|
kodi_db,
|
|
|
|
plex_db,
|
|
|
|
totalnodes)
|
2016-03-03 19:04:15 +11:00
|
|
|
# Add video nodes listings
|
|
|
|
# Plex: there seem to be no favorites/favorites tag
|
|
|
|
# vnodes.singleNode(totalnodes,
|
|
|
|
# "Favorite movies",
|
|
|
|
# "movies",
|
|
|
|
# "favourites")
|
|
|
|
# totalnodes += 1
|
|
|
|
# vnodes.singleNode(totalnodes,
|
|
|
|
# "Favorite tvshows",
|
|
|
|
# "tvshows",
|
|
|
|
# "favourites")
|
|
|
|
# totalnodes += 1
|
|
|
|
# vnodes.singleNode(totalnodes,
|
|
|
|
# "channels",
|
|
|
|
# "movies",
|
|
|
|
# "channels")
|
|
|
|
# totalnodes += 1
|
|
|
|
|
|
|
|
# Save total
|
2016-09-02 03:07:28 +10:00
|
|
|
window('Plex.nodes.total', str(totalnodes))
|
2016-02-12 00:03:04 +11:00
|
|
|
|
2017-02-01 22:16:35 +11:00
|
|
|
# Get rid of old items (view has been deleted on Plex side)
|
|
|
|
if self.old_views:
|
|
|
|
self.delete_views()
|
|
|
|
# update views for all:
|
2017-01-05 06:57:16 +11:00
|
|
|
with plexdb.Get_Plex_DB() as plex_db:
|
|
|
|
self.views = plex_db.getAllViewInfo()
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info("Finished processing views. Views saved: %s", self.views)
|
2016-03-03 03:27:21 +11:00
|
|
|
return True
|
2015-12-25 07:07:00 +11:00
|
|
|
|
2017-02-01 22:16:35 +11:00
|
|
|
def delete_views(self):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info("Removing views: %s", self.old_views)
|
2017-02-01 22:16:35 +11:00
|
|
|
delete_items = []
|
|
|
|
with plexdb.Get_Plex_DB() as plex_db:
|
|
|
|
for view in self.old_views:
|
|
|
|
plex_db.removeView(view)
|
|
|
|
delete_items.extend(plex_db.get_items_by_viewid(view))
|
2017-02-02 04:55:22 +11:00
|
|
|
delete_movies = []
|
|
|
|
delete_tv = []
|
|
|
|
delete_music = []
|
|
|
|
for item in delete_items:
|
|
|
|
if item['kodi_type'] == v.KODI_TYPE_MOVIE:
|
|
|
|
delete_movies.append(item)
|
|
|
|
elif item['kodi_type'] in v.KODI_VIDEOTYPES:
|
|
|
|
delete_tv.append(item)
|
|
|
|
elif item['kodi_type'] in v.KODI_AUDIOTYPES:
|
|
|
|
delete_music.append(item)
|
|
|
|
|
2017-02-02 05:02:32 +11:00
|
|
|
dialog('notification',
|
|
|
|
heading='{plex}',
|
|
|
|
message=lang(30052),
|
|
|
|
icon='{plex}',
|
|
|
|
sound=False)
|
2017-02-02 04:55:22 +11:00
|
|
|
for item in delete_movies:
|
2018-04-18 04:18:25 +10:00
|
|
|
with itemtypes.Movies() as movie_db:
|
|
|
|
movie_db.remove(item['plex_id'])
|
2017-02-02 04:55:22 +11:00
|
|
|
for item in delete_tv:
|
2018-04-18 04:18:25 +10:00
|
|
|
with itemtypes.TVShows() as tv_db:
|
|
|
|
tv_db.remove(item['plex_id'])
|
2017-02-01 22:16:35 +11:00
|
|
|
# And for the music DB:
|
2017-02-02 04:55:22 +11:00
|
|
|
for item in delete_music:
|
2018-04-18 04:18:25 +10:00
|
|
|
with itemtypes.Music() as music_db:
|
|
|
|
music_db.remove(item['plex_id'])
|
2017-02-01 22:16:35 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
def get_updatelist(self, xml, item_class, method, view_name, view_id,
|
|
|
|
get_children=False):
|
2016-01-08 03:27:48 +11:00
|
|
|
"""
|
2016-02-13 02:53:49 +11:00
|
|
|
THIS METHOD NEEDS TO BE FAST! => e.g. no API calls
|
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
Adds items to self.updatelist as well as self.all_plex_ids dict
|
2015-12-25 07:07:00 +11:00
|
|
|
|
2016-01-08 03:27:48 +11:00
|
|
|
Input:
|
2016-02-01 20:33:33 +11:00
|
|
|
xml: PMS answer for section items
|
2018-04-18 04:18:25 +10:00
|
|
|
item_class: 'Movies', 'TVShows', ... see itemtypes.py
|
2016-01-24 01:53:24 +11:00
|
|
|
method: Method name to be called with this itemtype
|
|
|
|
see itemtypes.py
|
2018-04-18 04:18:25 +10:00
|
|
|
view_name: Name of the Plex view (e.g. 'My TV shows')
|
|
|
|
view_id: Id/Key of Plex library (e.g. '1')
|
2017-04-03 01:02:41 +10:00
|
|
|
get_children: will get Plex children of the item if True,
|
|
|
|
e.g. for music albums
|
2016-01-08 03:27:48 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
Output: self.updatelist, self.all_plex_ids
|
2016-01-24 01:53:24 +11:00
|
|
|
self.updatelist APPENDED(!!) list itemids (Plex Keys as
|
2018-02-12 00:42:49 +11:00
|
|
|
as received from API.plex_id())
|
2016-01-24 01:53:24 +11:00
|
|
|
One item in this list is of the form:
|
|
|
|
'itemId': xxx,
|
2018-04-18 04:18:25 +10:00
|
|
|
'item_class': 'Movies','TVShows', ...
|
2016-01-24 01:53:24 +11:00
|
|
|
'method': 'add_update', 'add_updateSeason', ...
|
2018-04-18 04:18:25 +10:00
|
|
|
'view_name': xxx,
|
|
|
|
'view_id': xxx,
|
2016-01-28 06:41:28 +11:00
|
|
|
'title': xxx
|
2018-04-18 04:18:25 +10:00
|
|
|
'plex_type': xxx, e.g. 'movie', 'episode'
|
2016-01-24 01:53:24 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
self.all_plex_ids APPENDED(!!) dict
|
2016-01-08 03:27:48 +11:00
|
|
|
= {itemid: checksum}
|
|
|
|
"""
|
2016-12-03 23:07:41 +11:00
|
|
|
if self.new_items_only is True:
|
|
|
|
# Only process Plex items that Kodi does not already have in lib
|
|
|
|
for item in xml:
|
2018-04-18 04:18:25 +10:00
|
|
|
plex_id = item.get('ratingKey')
|
|
|
|
if not plex_id:
|
2016-12-03 23:07:41 +11:00
|
|
|
# Skipping items 'title=All episodes' without a 'ratingKey'
|
|
|
|
continue
|
2018-04-18 04:18:25 +10:00
|
|
|
self.all_plex_ids[plex_id] = "K%s%s" % \
|
|
|
|
(plex_id, item.get('updatedAt', ''))
|
|
|
|
if plex_id not in self.all_kodi_ids:
|
2016-12-03 23:07:41 +11:00
|
|
|
self.updatelist.append({
|
2018-04-18 04:18:25 +10:00
|
|
|
'plex_id': plex_id,
|
|
|
|
'item_class': item_class,
|
2016-12-03 23:07:41 +11:00
|
|
|
'method': method,
|
2018-04-18 04:18:25 +10:00
|
|
|
'view_name': view_name,
|
|
|
|
'view_id': view_id,
|
|
|
|
'title': item.get('title', 'Missing Title'),
|
|
|
|
'plex_type': item.get('type'),
|
2017-04-03 01:02:41 +10:00
|
|
|
'get_children': get_children
|
2016-12-03 23:07:41 +11:00
|
|
|
})
|
2017-09-07 03:24:26 +10:00
|
|
|
elif self.compare:
|
2016-03-01 22:10:09 +11:00
|
|
|
# Only process the delta - new or changed items
|
2016-02-01 20:33:33 +11:00
|
|
|
for item in xml:
|
2018-04-18 04:18:25 +10:00
|
|
|
plex_id = item.get('ratingKey')
|
|
|
|
if not plex_id:
|
2016-12-03 23:07:41 +11:00
|
|
|
# Skipping items 'title=All episodes' without a 'ratingKey'
|
2016-01-12 20:30:28 +11:00
|
|
|
continue
|
2016-02-13 02:53:49 +11:00
|
|
|
plex_checksum = ("K%s%s"
|
2018-04-18 04:18:25 +10:00
|
|
|
% (plex_id, item.get('updatedAt', '')))
|
|
|
|
self.all_plex_ids[plex_id] = plex_checksum
|
|
|
|
kodi_checksum = self.all_kodi_ids.get(plex_id)
|
2016-03-01 22:10:09 +11:00
|
|
|
# Only update if movie is not in Kodi or checksum is
|
|
|
|
# different
|
2016-01-12 20:30:28 +11:00
|
|
|
if kodi_checksum != plex_checksum:
|
2016-09-12 01:59:00 +10:00
|
|
|
self.updatelist.append({
|
2018-04-18 04:18:25 +10:00
|
|
|
'plex_id': plex_id,
|
|
|
|
'item_class': item_class,
|
2016-09-12 01:59:00 +10:00
|
|
|
'method': method,
|
2018-04-18 04:18:25 +10:00
|
|
|
'view_name': view_name,
|
|
|
|
'view_id': view_id,
|
|
|
|
'title': item.get('title', 'Missing Title'),
|
|
|
|
'plex_type': item.get('type'),
|
2017-04-03 01:02:41 +10:00
|
|
|
'get_children': get_children
|
2016-09-12 01:59:00 +10:00
|
|
|
})
|
2016-01-08 03:27:48 +11:00
|
|
|
else:
|
|
|
|
# Initial or repair sync: get all Plex movies
|
2016-02-01 20:33:33 +11:00
|
|
|
for item in xml:
|
2018-04-18 04:18:25 +10:00
|
|
|
plex_id = item.get('ratingKey')
|
|
|
|
if not plex_id:
|
2016-12-03 23:07:41 +11:00
|
|
|
# Skipping items 'title=All episodes' without a 'ratingKey'
|
2016-01-12 20:30:28 +11:00
|
|
|
continue
|
2018-04-18 04:18:25 +10:00
|
|
|
self.all_plex_ids[plex_id] = "K%s%s" \
|
|
|
|
% (plex_id, item.get('updatedAt', ''))
|
2016-09-12 01:59:00 +10:00
|
|
|
self.updatelist.append({
|
2018-04-18 04:18:25 +10:00
|
|
|
'plex_id': plex_id,
|
|
|
|
'item_class': item_class,
|
2016-09-12 01:59:00 +10:00
|
|
|
'method': method,
|
2018-04-18 04:18:25 +10:00
|
|
|
'view_name': view_name,
|
|
|
|
'view_id': view_id,
|
|
|
|
'title': item.get('title', 'Missing Title'),
|
|
|
|
'plex_type': item.get('type'),
|
2017-04-03 01:02:41 +10:00
|
|
|
'get_children': get_children
|
2016-09-12 01:59:00 +10:00
|
|
|
})
|
2016-01-08 03:27:48 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
def process_updatelist(self, item_class):
|
2016-01-08 06:31:25 +11:00
|
|
|
"""
|
2018-04-18 04:18:25 +10:00
|
|
|
Downloads all XMLs for item_class (e.g. Movies, TV-Shows). Processes them
|
|
|
|
by then calling item_classs.<item_class>()
|
2015-12-29 04:47:16 +11:00
|
|
|
|
2016-01-08 06:31:25 +11:00
|
|
|
Input:
|
2018-04-18 04:18:25 +10:00
|
|
|
item_class: 'Movies', 'TVShows', ...
|
2016-01-11 17:55:22 +11:00
|
|
|
self.updatelist
|
2016-01-08 06:31:25 +11:00
|
|
|
"""
|
|
|
|
# Some logging, just in case.
|
2018-04-18 04:18:25 +10:00
|
|
|
item_number = len(self.updatelist)
|
|
|
|
if item_number == 0:
|
2016-03-03 03:27:21 +11:00
|
|
|
return
|
2016-01-08 03:27:48 +11:00
|
|
|
|
|
|
|
# Run through self.updatelist, get XML metadata per item
|
|
|
|
# Initiate threads
|
2018-04-18 04:18:25 +10:00
|
|
|
LOG.debug("Starting sync threads")
|
|
|
|
download_queue = Queue.Queue()
|
|
|
|
process_queue = Queue.Queue(maxsize=100)
|
2016-01-08 03:27:48 +11:00
|
|
|
# To keep track
|
2017-04-03 01:02:41 +10:00
|
|
|
sync_info.GET_METADATA_COUNT = 0
|
|
|
|
sync_info.PROCESS_METADATA_COUNT = 0
|
|
|
|
sync_info.PROCESSING_VIEW_NAME = ''
|
2016-01-10 02:14:02 +11:00
|
|
|
# Populate queue: GetMetadata
|
2018-04-18 04:18:25 +10:00
|
|
|
for item in self.updatelist:
|
|
|
|
download_queue.put(item)
|
2016-09-26 03:18:27 +10:00
|
|
|
# Spawn GetMetadata threads for downloading
|
2016-01-08 03:27:48 +11:00
|
|
|
threads = []
|
2018-04-18 04:18:25 +10:00
|
|
|
for _ in range(min(state.SYNC_THREAD_NUMBER, item_number)):
|
|
|
|
thread = ThreadedGetMetadata(download_queue, process_queue)
|
2016-09-26 03:18:27 +10:00
|
|
|
thread.setDaemon(True)
|
|
|
|
thread.start()
|
|
|
|
threads.append(thread)
|
2018-04-18 04:18:25 +10:00
|
|
|
LOG.debug("%s download threads spawned", len(threads))
|
2016-01-30 18:43:28 +11:00
|
|
|
# Spawn one more thread to process Metadata, once downloaded
|
2018-04-18 04:18:25 +10:00
|
|
|
thread = ThreadedProcessMetadata(process_queue, item_class)
|
2016-01-30 18:43:28 +11:00
|
|
|
thread.setDaemon(True)
|
|
|
|
thread.start()
|
|
|
|
threads.append(thread)
|
2016-12-03 23:07:41 +11:00
|
|
|
# Start one thread to show sync progress ONLY for new PMS items
|
2017-08-21 16:01:48 +10:00
|
|
|
if self.new_items_only is True and (state.SYNC_DIALOG is True or
|
|
|
|
self.force_dialog is True):
|
2018-04-18 04:18:25 +10:00
|
|
|
thread = sync_info.ThreadedShowSyncInfo(item_number, item_class)
|
2016-12-03 23:07:41 +11:00
|
|
|
thread.setDaemon(True)
|
|
|
|
thread.start()
|
|
|
|
threads.append(thread)
|
2016-01-30 06:07:21 +11:00
|
|
|
|
2016-01-08 03:27:48 +11:00
|
|
|
# Wait until finished
|
2018-04-18 04:18:25 +10:00
|
|
|
download_queue.join()
|
|
|
|
process_queue.join()
|
2016-01-08 03:27:48 +11:00
|
|
|
# Kill threads
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug("Waiting to kill threads")
|
2016-01-08 03:27:48 +11:00
|
|
|
for thread in threads:
|
2016-04-08 17:11:03 +10:00
|
|
|
# Threads might already have quit by themselves (e.g. Kodi exit)
|
|
|
|
try:
|
2018-02-12 00:57:39 +11:00
|
|
|
thread.stop()
|
2017-05-17 18:09:50 +10:00
|
|
|
except AttributeError:
|
2016-04-08 17:11:03 +10:00
|
|
|
pass
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug("Stop sent to all threads")
|
2016-01-08 03:27:48 +11:00
|
|
|
# Wait till threads are indeed dead
|
|
|
|
for thread in threads:
|
2016-04-08 17:11:03 +10:00
|
|
|
try:
|
|
|
|
thread.join(1.0)
|
|
|
|
except:
|
|
|
|
pass
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug("Sync threads finished")
|
2016-09-11 19:46:42 +10:00
|
|
|
if (settings('FanartTV') == 'true' and
|
2018-04-18 04:18:25 +10:00
|
|
|
item_class in ('Movies', 'TVShows')):
|
2016-09-11 19:46:42 +10:00
|
|
|
for item in self.updatelist:
|
2018-04-18 04:18:25 +10:00
|
|
|
if item['plex_type'] in (v.PLEX_TYPE_MOVIE, v.PLEX_TYPE_SHOW):
|
2016-09-12 01:59:00 +10:00
|
|
|
self.fanartqueue.put({
|
2018-04-18 04:18:25 +10:00
|
|
|
'plex_id': item['plex_id'],
|
|
|
|
'plex_type': item['plex_type'],
|
2016-09-12 01:59:00 +10:00
|
|
|
'refresh': False
|
|
|
|
})
|
2016-01-28 06:41:28 +11:00
|
|
|
self.updatelist = []
|
2015-12-29 04:47:16 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
@utils.log_time
|
|
|
|
def plex_movies(self):
|
2016-01-08 06:31:25 +11:00
|
|
|
# Initialize
|
2018-04-18 04:18:25 +10:00
|
|
|
self.all_plex_ids = {}
|
2016-01-12 00:38:01 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
item_class = 'Movies'
|
2016-01-08 06:31:25 +11:00
|
|
|
|
2017-01-25 02:04:42 +11:00
|
|
|
views = [x for x in self.views if x['itemtype'] == v.KODI_TYPE_MOVIE]
|
2018-04-18 04:18:25 +10:00
|
|
|
LOG.info("Processing Plex %s. Libraries: %s", item_class, views)
|
2016-01-08 06:31:25 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
self.all_kodi_ids = {}
|
2016-01-08 06:31:25 +11:00
|
|
|
if self.compare:
|
2017-01-05 06:57:16 +11:00
|
|
|
with plexdb.Get_Plex_DB() as plex_db:
|
2016-02-10 20:04:49 +11:00
|
|
|
# Get movies from Plex server
|
|
|
|
# Pull the list of movies and boxsets in Kodi
|
|
|
|
try:
|
2018-04-18 04:18:25 +10:00
|
|
|
self.all_kodi_ids = dict(
|
2018-02-12 00:42:49 +11:00
|
|
|
plex_db.checksum(v.PLEX_TYPE_MOVIE))
|
2016-02-10 20:04:49 +11:00
|
|
|
except ValueError:
|
2018-04-18 04:18:25 +10:00
|
|
|
self.all_kodi_ids = {}
|
2016-01-08 06:31:25 +11:00
|
|
|
|
2016-02-20 06:03:06 +11:00
|
|
|
# PROCESS MOVIES #####
|
2016-01-11 17:55:22 +11:00
|
|
|
self.updatelist = []
|
2016-01-08 06:31:25 +11:00
|
|
|
for view in views:
|
2018-04-18 04:18:25 +10:00
|
|
|
if not self.install_sync_done:
|
2017-08-19 23:13:22 +10:00
|
|
|
state.PATH_VERIFIED = False
|
2018-04-18 04:18:25 +10:00
|
|
|
if self.suspend_item_sync():
|
2016-01-08 06:31:25 +11:00
|
|
|
return False
|
|
|
|
# Get items per view
|
2018-04-18 04:18:25 +10:00
|
|
|
all_plexmovies = PF.GetPlexSectionResults(view['id'], args=None)
|
2016-03-24 02:07:09 +11:00
|
|
|
if all_plexmovies is None:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info("Couldnt get section items, aborting for view.")
|
2016-02-01 20:33:33 +11:00
|
|
|
continue
|
2016-04-08 17:11:03 +10:00
|
|
|
elif all_plexmovies == 401:
|
|
|
|
return False
|
2018-04-18 04:18:25 +10:00
|
|
|
# Populate self.updatelist and self.all_plex_ids
|
|
|
|
self.get_updatelist(all_plexmovies,
|
|
|
|
item_class,
|
|
|
|
'add_update',
|
|
|
|
view['name'],
|
|
|
|
view['id'])
|
|
|
|
self.process_updatelist(item_class)
|
2016-04-08 17:11:03 +10:00
|
|
|
# Update viewstate for EVERY item
|
2016-01-15 02:17:24 +11:00
|
|
|
for view in views:
|
2018-04-18 04:18:25 +10:00
|
|
|
if self.suspend_item_sync():
|
2016-02-01 20:33:33 +11:00
|
|
|
return False
|
2018-04-18 04:18:25 +10:00
|
|
|
self.plex_update_watched(view['id'], item_class)
|
2016-01-15 02:17:24 +11:00
|
|
|
|
2016-02-20 06:03:06 +11:00
|
|
|
# PROCESS DELETES #####
|
2016-01-11 01:16:59 +11:00
|
|
|
if self.compare:
|
|
|
|
# Manual sync, process deletes
|
2018-04-18 04:18:25 +10:00
|
|
|
with itemtypes.Movies() as movie_db:
|
|
|
|
for kodimovie in self.all_kodi_ids:
|
|
|
|
if kodimovie not in self.all_plex_ids:
|
|
|
|
movie_db.remove(kodimovie)
|
|
|
|
LOG.info("%s sync is finished.", item_class)
|
2016-01-11 01:16:59 +11:00
|
|
|
return True
|
2016-01-08 06:31:25 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
def plex_update_watched(self, viewId, item_class, lastViewedAt=None,
|
|
|
|
updatedAt=None):
|
2016-01-15 02:17:24 +11:00
|
|
|
"""
|
2016-01-30 06:07:21 +11:00
|
|
|
Updates plex elements' view status ('watched' or 'unwatched') and
|
2016-01-15 02:17:24 +11:00
|
|
|
also updates resume times.
|
|
|
|
This is done by downloading one XML for ALL elements with viewId
|
|
|
|
"""
|
2017-02-03 02:28:25 +11:00
|
|
|
if self.new_items_only is False:
|
|
|
|
# Only do this once for fullsync: the first run where new items are
|
|
|
|
# added to Kodi
|
|
|
|
return
|
2018-04-18 04:18:25 +10:00
|
|
|
xml = PF.GetAllPlexLeaves(viewId,
|
|
|
|
lastViewedAt=lastViewedAt,
|
|
|
|
updatedAt=updatedAt)
|
2016-02-11 22:44:12 +11:00
|
|
|
# Return if there are no items in PMS reply - it's faster
|
|
|
|
try:
|
|
|
|
xml[0].attrib
|
|
|
|
except (TypeError, AttributeError, IndexError):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error('Error updating watch status. Could not get viewId: '
|
2018-04-18 04:18:25 +10:00
|
|
|
'%s of item_class %s with lastViewedAt: %s, updatedAt: '
|
|
|
|
'%s', viewId, item_class, lastViewedAt, updatedAt)
|
2016-02-11 22:44:12 +11:00
|
|
|
return
|
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
if item_class in ('Movies', 'TVShows'):
|
|
|
|
self.update_kodi_video_library = True
|
|
|
|
elif item_class == 'Music':
|
|
|
|
self.update_kodi_music_library = True
|
|
|
|
with getattr(itemtypes, item_class)() as itemtype:
|
|
|
|
itemtype.updateUserdata(xml)
|
2016-02-11 22:44:12 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
@utils.log_time
|
|
|
|
def plex_tv_show(self):
|
2016-01-08 03:27:48 +11:00
|
|
|
# Initialize
|
2018-04-18 04:18:25 +10:00
|
|
|
self.all_plex_ids = {}
|
|
|
|
item_class = 'TVShows'
|
2016-01-08 03:27:48 +11:00
|
|
|
|
2016-01-28 06:41:28 +11:00
|
|
|
views = [x for x in self.views if x['itemtype'] == 'show']
|
2018-04-18 04:18:25 +10:00
|
|
|
LOG.info("Media folders for %s: %s", item_class, views)
|
2016-01-08 03:27:48 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
self.all_kodi_ids = {}
|
2016-01-10 02:14:02 +11:00
|
|
|
if self.compare:
|
2017-01-05 06:57:16 +11:00
|
|
|
with plexdb.Get_Plex_DB() as plex:
|
2016-02-10 20:04:49 +11:00
|
|
|
# Pull the list of TV shows already in Kodi
|
2017-01-25 02:04:42 +11:00
|
|
|
for kind in (v.PLEX_TYPE_SHOW,
|
|
|
|
v.PLEX_TYPE_SEASON,
|
|
|
|
v.PLEX_TYPE_EPISODE):
|
2016-02-13 02:53:49 +11:00
|
|
|
try:
|
2018-02-12 00:42:49 +11:00
|
|
|
elements = dict(plex.checksum(kind))
|
2018-04-18 04:18:25 +10:00
|
|
|
self.all_kodi_ids.update(elements)
|
2016-02-13 02:53:49 +11:00
|
|
|
# Yet empty/not yet synched
|
|
|
|
except ValueError:
|
|
|
|
pass
|
2016-01-08 03:27:48 +11:00
|
|
|
|
2016-02-20 06:03:06 +11:00
|
|
|
# PROCESS TV Shows #####
|
2016-01-11 17:55:22 +11:00
|
|
|
self.updatelist = []
|
2016-01-08 03:27:48 +11:00
|
|
|
for view in views:
|
2018-04-18 04:18:25 +10:00
|
|
|
if not self.install_sync_done:
|
2017-08-19 23:13:22 +10:00
|
|
|
state.PATH_VERIFIED = False
|
2018-04-18 04:18:25 +10:00
|
|
|
if self.suspend_item_sync():
|
2016-01-08 03:27:48 +11:00
|
|
|
return False
|
|
|
|
# Get items per view
|
2018-04-18 04:18:25 +10:00
|
|
|
view_id = view['id']
|
|
|
|
view_name = view['name']
|
|
|
|
all_plex_tv_shows = PF.GetPlexSectionResults(view_id)
|
|
|
|
if all_plex_tv_shows is None:
|
|
|
|
LOG.error("Error downloading show xml for view %s", view_id)
|
2016-02-01 20:33:33 +11:00
|
|
|
continue
|
2018-04-18 04:18:25 +10:00
|
|
|
elif all_plex_tv_shows == 401:
|
2016-04-08 17:11:03 +10:00
|
|
|
return False
|
2018-04-18 04:18:25 +10:00
|
|
|
# Populate self.updatelist and self.all_plex_ids
|
|
|
|
self.get_updatelist(all_plex_tv_shows,
|
|
|
|
item_class,
|
|
|
|
'add_update',
|
|
|
|
view_name,
|
|
|
|
view_id)
|
|
|
|
LOG.debug("Analyzed view %s with ID %s", view_name, view_id)
|
2016-01-08 03:27:48 +11:00
|
|
|
|
2016-01-11 01:16:59 +11:00
|
|
|
# COPY for later use
|
2018-04-18 04:18:25 +10:00
|
|
|
all_plex_tv_show_ids = self.all_plex_ids.copy()
|
2016-01-08 03:27:48 +11:00
|
|
|
|
2016-03-14 02:06:54 +11:00
|
|
|
# Process self.updatelist
|
2018-04-18 04:18:25 +10:00
|
|
|
self.process_updatelist(item_class)
|
|
|
|
LOG.debug("process_updatelist completed for tv shows")
|
2016-03-14 02:06:54 +11:00
|
|
|
|
2016-02-20 06:03:06 +11:00
|
|
|
# PROCESS TV Seasons #####
|
2016-01-10 02:14:02 +11:00
|
|
|
# Cycle through tv shows
|
2018-04-18 04:18:25 +10:00
|
|
|
for show_id in all_plex_tv_show_ids:
|
|
|
|
if self.suspend_item_sync():
|
2016-01-11 17:55:22 +11:00
|
|
|
return False
|
2016-01-10 02:14:02 +11:00
|
|
|
# Grab all seasons to tvshow from PMS
|
2018-04-18 04:18:25 +10:00
|
|
|
seasons = PF.GetAllPlexChildren(show_id)
|
2016-03-24 02:57:49 +11:00
|
|
|
if seasons is None:
|
2018-04-18 04:18:25 +10:00
|
|
|
LOG.error("Error download season xml for show %s", show_id)
|
2016-02-01 20:33:33 +11:00
|
|
|
continue
|
2016-04-08 17:11:03 +10:00
|
|
|
elif seasons == 401:
|
|
|
|
return False
|
2018-04-18 04:18:25 +10:00
|
|
|
# Populate self.updatelist and self.all_plex_ids
|
|
|
|
self.get_updatelist(seasons,
|
|
|
|
item_class,
|
|
|
|
'add_updateSeason',
|
|
|
|
view_name,
|
|
|
|
view_id)
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug("Analyzed all seasons of TV show with Plex Id %s",
|
2018-04-18 04:18:25 +10:00
|
|
|
show_id)
|
2016-01-10 02:14:02 +11:00
|
|
|
|
2016-03-14 02:06:54 +11:00
|
|
|
# Process self.updatelist
|
2018-04-18 04:18:25 +10:00
|
|
|
self.process_updatelist(item_class)
|
|
|
|
LOG.debug("process_updatelist completed for seasons")
|
2016-03-14 02:06:54 +11:00
|
|
|
|
2016-02-20 06:03:06 +11:00
|
|
|
# PROCESS TV Episodes #####
|
2016-01-10 02:14:02 +11:00
|
|
|
# Cycle through tv shows
|
2016-01-15 02:17:24 +11:00
|
|
|
for view in views:
|
2018-04-18 04:18:25 +10:00
|
|
|
if self.suspend_item_sync():
|
2016-01-11 17:55:22 +11:00
|
|
|
return False
|
2016-01-10 02:14:02 +11:00
|
|
|
# Grab all episodes to tvshow from PMS
|
2018-04-18 04:18:25 +10:00
|
|
|
episodes = PF.GetAllPlexLeaves(view['id'])
|
2016-03-24 02:57:49 +11:00
|
|
|
if episodes is None:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error("Error downloading episod xml for view %s",
|
|
|
|
view.get('name'))
|
2016-02-01 20:33:33 +11:00
|
|
|
continue
|
2016-04-08 17:11:03 +10:00
|
|
|
elif episodes == 401:
|
|
|
|
return False
|
2018-04-18 04:18:25 +10:00
|
|
|
# Populate self.updatelist and self.all_plex_ids
|
|
|
|
self.get_updatelist(episodes,
|
|
|
|
item_class,
|
|
|
|
'add_updateEpisode',
|
|
|
|
view_name,
|
|
|
|
view_id)
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug("Analyzed all episodes of TV show with Plex Id %s",
|
|
|
|
view['id'])
|
2016-01-11 17:55:22 +11:00
|
|
|
|
|
|
|
# Process self.updatelist
|
2018-04-18 04:18:25 +10:00
|
|
|
self.process_updatelist(item_class)
|
|
|
|
LOG.debug("process_updatelist completed for episodes")
|
2016-01-11 17:55:22 +11:00
|
|
|
# Refresh season info
|
|
|
|
# Cycle through tv shows
|
2018-04-18 04:18:25 +10:00
|
|
|
with itemtypes.TVShows() as tvshow_db:
|
|
|
|
for show_id in all_plex_tv_show_ids:
|
|
|
|
xml_show = PF.GetPlexMetadata(show_id)
|
|
|
|
if xml_show is None or xml_show == 401:
|
|
|
|
LOG.error('Could not download xml_show')
|
2016-04-08 02:29:23 +10:00
|
|
|
continue
|
2018-04-18 04:18:25 +10:00
|
|
|
tvshow_db.refreshSeasonEntry(xml_show, show_id)
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug("Season info refreshed")
|
2016-01-11 17:55:22 +11:00
|
|
|
|
2016-01-15 02:17:24 +11:00
|
|
|
# Update viewstate:
|
|
|
|
for view in views:
|
2018-04-18 04:18:25 +10:00
|
|
|
if self.suspend_item_sync():
|
2016-04-08 17:11:03 +10:00
|
|
|
return False
|
2018-04-18 04:18:25 +10:00
|
|
|
self.plex_update_watched(view['id'], item_class)
|
2016-01-15 02:17:24 +11:00
|
|
|
|
2016-01-11 01:16:59 +11:00
|
|
|
if self.compare:
|
|
|
|
# Manual sync, process deletes
|
2018-04-18 04:18:25 +10:00
|
|
|
with itemtypes.TVShows() as tvshow_db:
|
|
|
|
for item in self.all_kodi_ids:
|
|
|
|
if item not in self.all_plex_ids:
|
|
|
|
tvshow_db.remove(item)
|
|
|
|
LOG.info("%s sync is finished.", item_class)
|
2016-01-08 03:27:48 +11:00
|
|
|
return True
|
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
@utils.log_time
|
|
|
|
def plex_music(self):
|
|
|
|
item_class = 'Music'
|
2015-12-25 07:07:00 +11:00
|
|
|
|
2017-01-25 02:04:42 +11:00
|
|
|
views = [x for x in self.views if x['itemtype'] == v.PLEX_TYPE_ARTIST]
|
2018-04-18 04:18:25 +10:00
|
|
|
LOG.info("Media folders for %s: %s", item_class, views)
|
2015-12-25 07:07:00 +11:00
|
|
|
|
2016-02-13 02:53:49 +11:00
|
|
|
methods = {
|
2017-01-25 02:04:42 +11:00
|
|
|
v.PLEX_TYPE_ARTIST: 'add_updateArtist',
|
|
|
|
v.PLEX_TYPE_ALBUM: 'add_updateAlbum',
|
|
|
|
v.PLEX_TYPE_SONG: 'add_updateSong'
|
2016-02-13 02:53:49 +11:00
|
|
|
}
|
|
|
|
urlArgs = {
|
2017-01-25 02:04:42 +11:00
|
|
|
v.PLEX_TYPE_ARTIST: {'type': 8},
|
|
|
|
v.PLEX_TYPE_ALBUM: {'type': 9},
|
|
|
|
v.PLEX_TYPE_SONG: {'type': 10}
|
2015-12-25 07:07:00 +11:00
|
|
|
}
|
2016-01-23 22:05:56 +11:00
|
|
|
|
2016-03-01 22:10:09 +11:00
|
|
|
# Process artist, then album and tracks last to minimize overhead
|
2017-04-02 02:28:02 +10:00
|
|
|
# Each album needs to be processed directly with its songs
|
|
|
|
# Remaining songs without album will be processed last
|
2017-01-25 02:04:42 +11:00
|
|
|
for kind in (v.PLEX_TYPE_ARTIST,
|
|
|
|
v.PLEX_TYPE_ALBUM,
|
|
|
|
v.PLEX_TYPE_SONG):
|
2018-04-18 04:18:25 +10:00
|
|
|
if self.suspend_item_sync():
|
2016-04-08 17:11:03 +10:00
|
|
|
return False
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug("Start processing music %s", kind)
|
2018-04-18 04:18:25 +10:00
|
|
|
self.all_kodi_ids = {}
|
|
|
|
self.all_plex_ids = {}
|
2017-04-02 02:28:02 +10:00
|
|
|
self.updatelist = []
|
2018-04-18 04:18:25 +10:00
|
|
|
if not self.process_music(views,
|
|
|
|
kind,
|
|
|
|
urlArgs[kind],
|
|
|
|
methods[kind]):
|
2016-04-08 17:11:03 +10:00
|
|
|
return False
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug("Processing of music %s done", kind)
|
2018-04-18 04:18:25 +10:00
|
|
|
self.process_updatelist(item_class)
|
|
|
|
LOG.debug("process_updatelist for music %s completed", kind)
|
2016-02-13 02:53:49 +11:00
|
|
|
|
2016-04-14 00:51:53 +10:00
|
|
|
# Update viewstate for EVERY item
|
|
|
|
for view in views:
|
2018-04-18 04:18:25 +10:00
|
|
|
if self.suspend_item_sync():
|
2016-04-14 00:51:53 +10:00
|
|
|
return False
|
2018-04-18 04:18:25 +10:00
|
|
|
self.plex_update_watched(view['id'], item_class)
|
2016-04-14 00:51:53 +10:00
|
|
|
|
2016-02-13 02:53:49 +11:00
|
|
|
# reset stuff
|
2018-04-18 04:18:25 +10:00
|
|
|
self.all_kodi_ids = {}
|
|
|
|
self.all_plex_ids = {}
|
2016-02-13 02:53:49 +11:00
|
|
|
self.updatelist = []
|
2018-04-18 04:18:25 +10:00
|
|
|
LOG.info("%s sync is finished.", item_class)
|
2016-02-13 02:53:49 +11:00
|
|
|
return True
|
2015-12-25 07:07:00 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
def process_music(self, views, kind, urlArgs, method):
|
2017-04-03 01:02:41 +10:00
|
|
|
# For albums, we need to look at the album's songs simultaneously
|
|
|
|
get_children = True if kind == v.PLEX_TYPE_ALBUM else False
|
2016-02-13 02:53:49 +11:00
|
|
|
# Get a list of items already existing in Kodi db
|
|
|
|
if self.compare:
|
2017-01-05 06:57:16 +11:00
|
|
|
with plexdb.Get_Plex_DB() as plex_db:
|
2016-02-13 02:53:49 +11:00
|
|
|
# Pull the list of items already in Kodi
|
|
|
|
try:
|
2018-02-12 00:42:49 +11:00
|
|
|
elements = dict(plex_db.checksum(kind))
|
2018-04-18 04:18:25 +10:00
|
|
|
self.all_kodi_ids.update(elements)
|
2016-02-13 02:53:49 +11:00
|
|
|
# Yet empty/nothing yet synched
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
for view in views:
|
2018-04-18 04:18:25 +10:00
|
|
|
if not self.install_sync_done:
|
2017-08-19 23:13:22 +10:00
|
|
|
state.PATH_VERIFIED = False
|
2018-04-18 04:18:25 +10:00
|
|
|
if self.suspend_item_sync():
|
2016-04-08 17:11:03 +10:00
|
|
|
return False
|
2016-02-13 02:53:49 +11:00
|
|
|
# Get items per view
|
2018-04-18 04:18:25 +10:00
|
|
|
items_xml = PF.GetPlexSectionResults(view['id'], args=urlArgs)
|
|
|
|
if items_xml is None:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error("Error downloading xml for view %s", view['id'])
|
2016-02-13 02:53:49 +11:00
|
|
|
continue
|
2018-04-18 04:18:25 +10:00
|
|
|
elif items_xml == 401:
|
2016-04-08 17:11:03 +10:00
|
|
|
return False
|
2018-04-18 04:18:25 +10:00
|
|
|
# Populate self.updatelist and self.all_plex_ids
|
|
|
|
self.get_updatelist(items_xml,
|
|
|
|
'Music',
|
|
|
|
method,
|
|
|
|
view['name'],
|
|
|
|
view['id'],
|
|
|
|
get_children=get_children)
|
2016-04-08 22:11:50 +10:00
|
|
|
if self.compare:
|
|
|
|
# Manual sync, process deletes
|
2018-04-18 04:18:25 +10:00
|
|
|
with itemtypes.Music() as music_db:
|
|
|
|
for itemid in self.all_kodi_ids:
|
|
|
|
if itemid not in self.all_plex_ids:
|
|
|
|
music_db.remove(itemid)
|
|
|
|
return True
|
2016-04-08 22:11:50 +10:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
def process_message(self, message):
|
2016-03-25 04:52:02 +11:00
|
|
|
"""
|
|
|
|
processes json.loads() messages from websocket. Triage what we need to
|
|
|
|
do with "process_" methods
|
|
|
|
"""
|
2017-09-08 20:06:31 +10:00
|
|
|
if message['type'] == 'playing':
|
|
|
|
try:
|
|
|
|
self.process_playing(message['PlaySessionStateNotification'])
|
|
|
|
except KeyError:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error('Received invalid PMS message for playstate: %s',
|
|
|
|
message)
|
2017-09-08 20:06:31 +10:00
|
|
|
elif message['type'] == 'timeline':
|
|
|
|
try:
|
|
|
|
self.process_timeline(message['TimelineEntry'])
|
|
|
|
except (KeyError, ValueError):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error('Received invalid PMS message for timeline: %s',
|
|
|
|
message)
|
2017-09-08 20:06:31 +10:00
|
|
|
elif message['type'] == 'activity':
|
|
|
|
try:
|
|
|
|
self.process_activity(message['ActivityNotification'])
|
|
|
|
except KeyError:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error('Received invalid PMS message for activity: %s',
|
|
|
|
message)
|
2016-03-25 04:52:02 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
def multi_delete(self, liste, delete_list):
|
2016-03-25 04:52:02 +11:00
|
|
|
"""
|
2018-04-18 04:18:25 +10:00
|
|
|
Deletes the list items of liste at the positions in delete_list
|
2016-04-08 17:11:03 +10:00
|
|
|
(which can be in any arbitrary order)
|
2016-03-25 04:52:02 +11:00
|
|
|
"""
|
2018-04-18 04:18:25 +10:00
|
|
|
indexes = sorted(delete_list, reverse=True)
|
2016-03-25 04:52:02 +11:00
|
|
|
for index in indexes:
|
|
|
|
del liste[index]
|
|
|
|
return liste
|
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
def process_items(self):
|
2016-03-25 04:52:02 +11:00
|
|
|
"""
|
|
|
|
Periodically called to process new/updated PMS items
|
|
|
|
|
|
|
|
PMS needs a while to download info from internet AFTER it
|
|
|
|
showed up under 'timeline' websocket messages
|
2016-03-28 04:06:36 +11:00
|
|
|
|
|
|
|
data['type']:
|
|
|
|
1: movie
|
|
|
|
2: tv show??
|
|
|
|
3: season??
|
|
|
|
4: episode
|
|
|
|
8: artist (band)
|
|
|
|
9: album
|
|
|
|
10: track (song)
|
|
|
|
12: trailer, extras?
|
|
|
|
|
|
|
|
data['state']:
|
|
|
|
0: 'created',
|
|
|
|
2: 'matching',
|
|
|
|
3: 'downloading',
|
|
|
|
4: 'loading',
|
|
|
|
5: 'finished',
|
|
|
|
6: 'analyzing',
|
|
|
|
9: 'deleted'
|
2016-03-25 04:52:02 +11:00
|
|
|
"""
|
2018-04-18 04:18:25 +10:00
|
|
|
self.update_kodi_video_library = False
|
|
|
|
self.update_kodi_music_library = False
|
|
|
|
now = utils.unix_timestamp()
|
|
|
|
delete_list = []
|
|
|
|
for i, item in enumerate(self.items_to_process):
|
2018-02-12 00:57:39 +11:00
|
|
|
if self.stopped() or self.suspended():
|
2017-02-03 00:49:14 +11:00
|
|
|
# Chances are that Kodi gets shut down
|
|
|
|
break
|
2017-02-02 19:32:00 +11:00
|
|
|
if item['state'] == 9:
|
|
|
|
successful = self.process_deleteditems(item)
|
2017-08-22 16:16:21 +10:00
|
|
|
elif now - item['timestamp'] < state.BACKGROUNDSYNC_SAFTYMARGIN:
|
2016-03-25 04:52:02 +11:00
|
|
|
# We haven't waited long enough for the PMS to finish
|
2016-10-24 01:55:28 +11:00
|
|
|
# processing the item. Do it later (excepting deletions)
|
2016-03-25 04:52:02 +11:00
|
|
|
continue
|
2016-06-01 03:30:12 +10:00
|
|
|
else:
|
2017-02-02 22:27:21 +11:00
|
|
|
successful = self.process_newitems(item)
|
2016-09-18 03:12:32 +10:00
|
|
|
if successful and settings('FanartTV') == 'true':
|
2017-09-08 20:06:31 +10:00
|
|
|
if item['type'] in (v.PLEX_TYPE_MOVIE, v.PLEX_TYPE_SHOW):
|
2016-09-18 03:12:32 +10:00
|
|
|
self.fanartqueue.put({
|
2017-02-02 22:27:21 +11:00
|
|
|
'plex_id': item['ratingKey'],
|
2017-09-08 20:06:31 +10:00
|
|
|
'plex_type': item['type'],
|
2016-09-18 03:12:32 +10:00
|
|
|
'refresh': False
|
|
|
|
})
|
2016-06-01 03:30:12 +10:00
|
|
|
if successful is True:
|
2018-04-18 04:18:25 +10:00
|
|
|
delete_list.append(i)
|
2016-05-13 05:46:50 +10:00
|
|
|
else:
|
2016-06-01 03:30:12 +10:00
|
|
|
# Safety net if we can't process an item
|
|
|
|
item['attempt'] += 1
|
|
|
|
if item['attempt'] > 3:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error('Repeatedly could not process item %s, abort',
|
|
|
|
item)
|
2018-04-18 04:18:25 +10:00
|
|
|
delete_list.append(i)
|
2016-03-25 04:52:02 +11:00
|
|
|
|
|
|
|
# Get rid of the items we just processed
|
2018-04-18 04:18:25 +10:00
|
|
|
if delete_list:
|
|
|
|
self.items_to_process = self.multi_delete(self.items_to_process,
|
|
|
|
delete_list)
|
2016-03-25 04:52:02 +11:00
|
|
|
# Let Kodi know of the change
|
2018-04-18 04:18:25 +10:00
|
|
|
if self.update_kodi_video_library is True:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info("Doing Kodi Video Lib update")
|
2016-03-25 04:52:02 +11:00
|
|
|
xbmc.executebuiltin('UpdateLibrary(video)')
|
2018-04-18 04:18:25 +10:00
|
|
|
if self.update_kodi_music_library is True:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info("Doing Kodi Music Lib update")
|
2016-03-28 04:06:36 +11:00
|
|
|
xbmc.executebuiltin('UpdateLibrary(music)')
|
|
|
|
|
|
|
|
def process_newitems(self, item):
|
2018-04-18 04:18:25 +10:00
|
|
|
xml = PF.GetPlexMetadata(item['ratingKey'])
|
2016-10-12 03:35:11 +11:00
|
|
|
try:
|
|
|
|
mediatype = xml[0].attrib['type']
|
2016-10-12 03:37:47 +11:00
|
|
|
except (IndexError, KeyError, TypeError):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error('Could not download metadata for %s', item['ratingKey'])
|
2017-02-02 22:27:21 +11:00
|
|
|
return False
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug("Processing new/updated PMS item: %s", item['ratingKey'])
|
2016-03-28 04:06:36 +11:00
|
|
|
viewtag = xml.attrib.get('librarySectionTitle')
|
|
|
|
viewid = xml.attrib.get('librarySectionID')
|
2017-02-02 22:27:21 +11:00
|
|
|
if mediatype == v.PLEX_TYPE_MOVIE:
|
2018-04-18 04:18:25 +10:00
|
|
|
self.update_kodi_video_library = True
|
2016-03-28 04:06:36 +11:00
|
|
|
with itemtypes.Movies() as movie:
|
|
|
|
movie.add_update(xml[0],
|
|
|
|
viewtag=viewtag,
|
|
|
|
viewid=viewid)
|
2017-02-02 22:27:21 +11:00
|
|
|
elif mediatype == v.PLEX_TYPE_EPISODE:
|
2018-04-18 04:18:25 +10:00
|
|
|
self.update_kodi_video_library = True
|
2016-03-28 04:06:36 +11:00
|
|
|
with itemtypes.TVShows() as show:
|
|
|
|
show.add_updateEpisode(xml[0],
|
|
|
|
viewtag=viewtag,
|
|
|
|
viewid=viewid)
|
2017-02-02 22:27:21 +11:00
|
|
|
elif mediatype == v.PLEX_TYPE_SONG:
|
2018-04-18 04:18:25 +10:00
|
|
|
self.update_kodi_music_library = True
|
|
|
|
with itemtypes.Music() as music_db:
|
|
|
|
music_db.add_updateSong(xml[0], viewtag=viewtag, viewid=viewid)
|
2017-02-02 22:27:21 +11:00
|
|
|
return True
|
2016-03-28 04:06:36 +11:00
|
|
|
|
|
|
|
def process_deleteditems(self, item):
|
2017-09-08 20:06:31 +10:00
|
|
|
if item['type'] == v.PLEX_TYPE_MOVIE:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug("Removing movie %s", item['ratingKey'])
|
2018-04-18 04:18:25 +10:00
|
|
|
self.update_kodi_video_library = True
|
2016-03-28 04:06:36 +11:00
|
|
|
with itemtypes.Movies() as movie:
|
2017-09-08 20:06:31 +10:00
|
|
|
movie.remove(item['ratingKey'])
|
|
|
|
elif item['type'] in (v.PLEX_TYPE_SHOW,
|
|
|
|
v.PLEX_TYPE_SEASON,
|
|
|
|
v.PLEX_TYPE_EPISODE):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug("Removing episode/season/show with plex id %s",
|
2018-03-12 01:23:32 +11:00
|
|
|
item['ratingKey'])
|
2018-04-18 04:18:25 +10:00
|
|
|
self.update_kodi_video_library = True
|
2016-03-28 04:06:36 +11:00
|
|
|
with itemtypes.TVShows() as show:
|
2017-09-08 20:06:31 +10:00
|
|
|
show.remove(item['ratingKey'])
|
|
|
|
elif item['type'] in (v.PLEX_TYPE_ARTIST,
|
|
|
|
v.PLEX_TYPE_ALBUM,
|
|
|
|
v.PLEX_TYPE_SONG):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug("Removing song/album/artist %s", item['ratingKey'])
|
2018-04-18 04:18:25 +10:00
|
|
|
self.update_kodi_music_library = True
|
|
|
|
with itemtypes.Music() as music_db:
|
|
|
|
music_db.remove(item['ratingKey'])
|
2016-03-28 04:06:36 +11:00
|
|
|
return True
|
2016-03-25 04:52:02 +11:00
|
|
|
|
|
|
|
def process_timeline(self, data):
|
|
|
|
"""
|
2016-03-28 04:06:36 +11:00
|
|
|
PMS is messing with the library items, e.g. new or changed. Put in our
|
2016-04-08 17:11:03 +10:00
|
|
|
"processing queue" for later
|
2016-03-25 04:52:02 +11:00
|
|
|
"""
|
|
|
|
for item in data:
|
2016-10-12 03:52:59 +11:00
|
|
|
if 'tv.plex' in item.get('identifier', ''):
|
|
|
|
# Ommit Plex DVR messages - the Plex IDs are not corresponding
|
|
|
|
# (DVR ratingKeys are not unique and might correspond to a
|
|
|
|
# movie or episode)
|
|
|
|
continue
|
2017-09-08 20:06:31 +10:00
|
|
|
typus = v.PLEX_TYPE_FROM_WEBSOCKET[int(item['type'])]
|
2017-09-08 20:34:13 +10:00
|
|
|
if typus == v.PLEX_TYPE_CLIP:
|
|
|
|
# No need to process extras or trailers
|
|
|
|
continue
|
2017-09-08 20:06:31 +10:00
|
|
|
status = int(item['state'])
|
2018-05-03 01:27:35 +10:00
|
|
|
if typus == 'playlist':
|
2018-05-03 16:20:55 +10:00
|
|
|
if not state.SYNC_PLAYLISTS:
|
|
|
|
continue
|
2018-05-03 01:27:35 +10:00
|
|
|
playlists.process_websocket(plex_id=str(item['itemID']),
|
|
|
|
updated_at=str(item['updatedAt']),
|
|
|
|
state=status)
|
|
|
|
elif status == 9:
|
2017-09-08 20:34:13 +10:00
|
|
|
# Immediately and always process deletions (as the PMS will
|
|
|
|
# send additional message with other codes)
|
2018-04-18 04:18:25 +10:00
|
|
|
self.items_to_process.append({
|
2017-09-08 20:34:13 +10:00
|
|
|
'state': status,
|
|
|
|
'type': typus,
|
|
|
|
'ratingKey': str(item['itemID']),
|
2018-04-18 04:18:25 +10:00
|
|
|
'timestamp': utils.unix_timestamp(),
|
2017-09-08 20:34:13 +10:00
|
|
|
'attempt': 0
|
|
|
|
})
|
|
|
|
elif typus in (v.PLEX_TYPE_MOVIE,
|
|
|
|
v.PLEX_TYPE_EPISODE,
|
|
|
|
v.PLEX_TYPE_SONG) and status == 5:
|
2017-09-08 20:06:31 +10:00
|
|
|
plex_id = str(item['itemID'])
|
2017-09-08 20:34:13 +10:00
|
|
|
# Have we already added this element for processing?
|
2018-04-18 04:18:25 +10:00
|
|
|
for existing_item in self.items_to_process:
|
|
|
|
if existing_item['ratingKey'] == plex_id:
|
2016-09-11 18:43:16 +10:00
|
|
|
break
|
2016-05-13 05:46:50 +10:00
|
|
|
else:
|
|
|
|
# Haven't added this element to the queue yet
|
2018-04-18 04:18:25 +10:00
|
|
|
self.items_to_process.append({
|
2017-05-17 18:09:50 +10:00
|
|
|
'state': status,
|
2016-05-13 05:46:50 +10:00
|
|
|
'type': typus,
|
2017-02-02 19:32:00 +11:00
|
|
|
'ratingKey': plex_id,
|
2018-04-18 04:18:25 +10:00
|
|
|
'timestamp': utils.unix_timestamp(),
|
2016-06-01 03:30:12 +10:00
|
|
|
'attempt': 0
|
2016-05-13 05:46:50 +10:00
|
|
|
})
|
2016-03-25 04:52:02 +11:00
|
|
|
|
2017-09-08 20:06:31 +10:00
|
|
|
def process_activity(self, data):
|
|
|
|
"""
|
|
|
|
PMS is re-scanning an item, e.g. after having changed a movie poster.
|
|
|
|
WATCH OUT for this if it's triggered by our PKC library scan!
|
|
|
|
"""
|
|
|
|
for item in data:
|
|
|
|
if item['event'] != 'ended':
|
|
|
|
# Scan still going on, so skip for now
|
|
|
|
continue
|
2017-09-08 20:12:29 +10:00
|
|
|
elif item['Activity'].get('Context') is None:
|
|
|
|
# Not related to any Plex element, but entire library
|
|
|
|
continue
|
2017-09-08 20:06:31 +10:00
|
|
|
elif item['Activity']['type'] != 'library.refresh.items':
|
|
|
|
# Not the type of message relevant for us
|
|
|
|
continue
|
2018-04-18 04:18:25 +10:00
|
|
|
plex_id = PF.GetPlexKeyNumber(item['Activity']['Context']['key'])[1]
|
2017-09-08 20:06:31 +10:00
|
|
|
if plex_id == '':
|
2017-09-08 20:36:26 +10:00
|
|
|
# Likely a Plex id like /library/metadata/3/children
|
|
|
|
continue
|
2017-09-08 20:06:31 +10:00
|
|
|
# We're only looking at existing elements - have we synced yet?
|
|
|
|
with plexdb.Get_Plex_DB() as plex_db:
|
|
|
|
kodi_info = plex_db.getItem_byId(plex_id)
|
|
|
|
if kodi_info is None:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug('Plex id %s not synced yet - skipping', plex_id)
|
2017-09-08 20:06:31 +10:00
|
|
|
continue
|
|
|
|
# Have we already added this element?
|
2018-04-18 04:18:25 +10:00
|
|
|
for existing_item in self.items_to_process:
|
|
|
|
if existing_item['ratingKey'] == plex_id:
|
2017-09-08 20:06:31 +10:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
# Haven't added this element to the queue yet
|
2018-04-18 04:18:25 +10:00
|
|
|
self.items_to_process.append({
|
2017-09-08 20:06:31 +10:00
|
|
|
'state': None, # Don't need a state here
|
|
|
|
'type': kodi_info[5],
|
|
|
|
'ratingKey': plex_id,
|
2018-04-18 04:18:25 +10:00
|
|
|
'timestamp': utils.unix_timestamp(),
|
2017-09-08 20:06:31 +10:00
|
|
|
'attempt': 0
|
|
|
|
})
|
|
|
|
|
2016-03-25 04:52:02 +11:00
|
|
|
def process_playing(self, data):
|
2016-03-28 01:57:35 +11:00
|
|
|
"""
|
|
|
|
Someone (not necessarily the user signed in) is playing something some-
|
|
|
|
where
|
|
|
|
"""
|
2017-09-08 20:06:31 +10:00
|
|
|
for item in data:
|
|
|
|
status = item['state']
|
|
|
|
if status == 'buffering':
|
2018-02-06 03:48:50 +11:00
|
|
|
# Drop buffering messages immediately
|
2017-09-08 20:06:31 +10:00
|
|
|
continue
|
2018-02-16 03:19:12 +11:00
|
|
|
plex_id = item['ratingKey']
|
|
|
|
skip = False
|
2018-02-04 22:22:10 +11:00
|
|
|
for pid in (0, 1, 2):
|
2018-02-06 03:48:50 +11:00
|
|
|
if plex_id == state.PLAYER_STATES[pid]['plex_id']:
|
2018-01-28 23:55:00 +11:00
|
|
|
# Kodi is playing this item - no need to set the playstate
|
2018-02-16 03:19:12 +11:00
|
|
|
skip = True
|
|
|
|
if skip:
|
|
|
|
continue
|
2018-04-18 04:18:25 +10:00
|
|
|
session_key = item['sessionKey']
|
2017-09-08 20:06:31 +10:00
|
|
|
# Do we already have a sessionKey stored?
|
2018-04-18 04:18:25 +10:00
|
|
|
if session_key not in self.session_keys:
|
2018-02-06 03:48:50 +11:00
|
|
|
with plexdb.Get_Plex_DB() as plex_db:
|
|
|
|
kodi_info = plex_db.getItem_byId(plex_id)
|
|
|
|
if kodi_info is None:
|
|
|
|
# Item not (yet) in Kodi library
|
|
|
|
continue
|
2017-09-08 20:06:31 +10:00
|
|
|
if settings('plex_serverowned') == 'false':
|
2018-02-06 03:48:50 +11:00
|
|
|
# Not our PMS, we are not authorized to get the sessions
|
2017-09-08 20:06:31 +10:00
|
|
|
# On the bright side, it must be us playing :-)
|
2018-04-18 04:18:25 +10:00
|
|
|
self.session_keys[session_key] = {}
|
2017-09-08 20:06:31 +10:00
|
|
|
else:
|
|
|
|
# PMS is ours - get all current sessions
|
2018-04-18 04:18:25 +10:00
|
|
|
self.session_keys.update(PF.GetPMSStatus(state.PLEX_TOKEN))
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug('Updated current sessions. They are: %s',
|
2018-04-18 04:18:25 +10:00
|
|
|
self.session_keys)
|
|
|
|
if session_key not in self.session_keys:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info('Session key %s still unknown! Skip '
|
2018-04-18 04:18:25 +10:00
|
|
|
'playstate update', session_key)
|
2016-04-13 23:27:02 +10:00
|
|
|
continue
|
2018-02-06 03:48:50 +11:00
|
|
|
# Attach Kodi info to the session
|
2018-04-18 04:18:25 +10:00
|
|
|
self.session_keys[session_key]['kodi_id'] = kodi_info[0]
|
|
|
|
self.session_keys[session_key]['file_id'] = kodi_info[1]
|
|
|
|
self.session_keys[session_key]['kodi_type'] = kodi_info[4]
|
|
|
|
session = self.session_keys[session_key]
|
2017-09-08 20:06:31 +10:00
|
|
|
if settings('plex_serverowned') != 'false':
|
|
|
|
# Identify the user - same one as signed on with PKC? Skip
|
|
|
|
# update if neither session's username nor userid match
|
|
|
|
# (Owner sometime's returns id '1', not always)
|
2018-02-06 03:48:50 +11:00
|
|
|
if not state.PLEX_TOKEN and session['userId'] == '1':
|
2017-09-08 20:06:31 +10:00
|
|
|
# PKC not signed in to plex.tv. Plus owner of PMS is
|
|
|
|
# playing (the '1').
|
|
|
|
# Hence must be us (since several users require plex.tv
|
|
|
|
# token for PKC)
|
|
|
|
pass
|
2018-02-06 03:48:50 +11:00
|
|
|
elif not (session['userId'] == state.PLEX_USER_ID or
|
|
|
|
session['username'] == state.PLEX_USERNAME):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug('Our username %s, userid %s did not match '
|
2018-02-06 03:48:50 +11:00
|
|
|
'the session username %s with userid %s',
|
|
|
|
state.PLEX_USERNAME,
|
|
|
|
state.PLEX_USER_ID,
|
|
|
|
session['username'],
|
|
|
|
session['userId'])
|
2017-09-08 20:06:31 +10:00
|
|
|
continue
|
2018-02-06 03:48:50 +11:00
|
|
|
# Get an up-to-date XML from the PMS because PMS will NOT directly
|
|
|
|
# tell us: duration of item viewCount
|
|
|
|
if session.get('duration') is None:
|
2018-04-18 04:18:25 +10:00
|
|
|
xml = PF.GetPlexMetadata(plex_id)
|
2017-09-08 20:06:31 +10:00
|
|
|
if xml in (None, 401):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error('Could not get up-to-date xml for item %s',
|
2018-02-06 03:48:50 +11:00
|
|
|
plex_id)
|
2017-09-08 20:06:31 +10:00
|
|
|
continue
|
2018-02-06 03:48:50 +11:00
|
|
|
api = PlexAPI.API(xml[0])
|
2018-02-12 00:42:49 +11:00
|
|
|
userdata = api.userdata()
|
2018-02-06 03:48:50 +11:00
|
|
|
session['duration'] = userdata['Runtime']
|
|
|
|
session['viewCount'] = userdata['PlayCount']
|
2017-09-08 20:06:31 +10:00
|
|
|
# Sometimes, Plex tells us resume points in milliseconds and
|
|
|
|
# not in seconds - thank you very much!
|
2018-02-06 03:48:50 +11:00
|
|
|
if item['viewOffset'] > session['duration']:
|
|
|
|
resume = item['viewOffset'] / 1000
|
2017-09-08 20:06:31 +10:00
|
|
|
else:
|
2018-02-06 03:48:50 +11:00
|
|
|
resume = item['viewOffset']
|
|
|
|
if resume < v.IGNORE_SECONDS_AT_START:
|
2017-09-24 02:49:59 +10:00
|
|
|
continue
|
2018-02-06 03:48:50 +11:00
|
|
|
try:
|
|
|
|
completed = float(resume) / float(session['duration'])
|
|
|
|
except (ZeroDivisionError, TypeError):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error('Could not mark playstate for %s and session %s',
|
2018-02-06 03:48:50 +11:00
|
|
|
data, session)
|
|
|
|
continue
|
|
|
|
if completed >= v.MARK_PLAYED_AT:
|
|
|
|
# Only mark completely watched ONCE
|
|
|
|
if session.get('marked_played') is None:
|
|
|
|
session['marked_played'] = True
|
|
|
|
mark_played = True
|
|
|
|
else:
|
|
|
|
# Don't mark it as completely watched again
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
mark_played = False
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.debug('Update playstate for user %s with id %s for plex id %s',
|
2018-02-06 03:48:50 +11:00
|
|
|
state.PLEX_USERNAME, state.PLEX_USER_ID, plex_id)
|
|
|
|
item_fkt = getattr(itemtypes,
|
|
|
|
v.ITEMTYPE_FROM_KODITYPE[session['kodi_type']])
|
|
|
|
with item_fkt() as fkt:
|
2018-05-27 02:54:20 +10:00
|
|
|
plex_type = v.PLEX_TYPE_FROM_KODI_TYPE[session['kodi_type']]
|
2018-02-06 03:48:50 +11:00
|
|
|
fkt.updatePlaystate(mark_played,
|
|
|
|
session['viewCount'],
|
|
|
|
resume,
|
|
|
|
session['duration'],
|
|
|
|
session['file_id'],
|
2018-05-27 02:54:20 +10:00
|
|
|
utils.unix_date_to_kodi(utils.unix_timestamp()),
|
|
|
|
plex_type)
|
2016-03-25 04:52:02 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
def sync_fanart(self, missing_only=True, refresh=False):
|
2016-09-11 03:49:03 +10:00
|
|
|
"""
|
2018-04-18 04:18:25 +10:00
|
|
|
Throw items to the fanart queue in order to download missing (or all)
|
|
|
|
additional fanart.
|
2016-09-11 03:49:03 +10:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
missing_only=True False will start look-up for EVERY item
|
|
|
|
refresh=False True will force refresh all external fanart
|
2016-09-11 03:49:03 +10:00
|
|
|
"""
|
2018-05-13 22:42:58 +10:00
|
|
|
if settings('FanartTV') == 'false':
|
|
|
|
return
|
2017-01-05 06:57:16 +11:00
|
|
|
with plexdb.Get_Plex_DB() as plex_db:
|
2018-04-18 04:18:25 +10:00
|
|
|
if missing_only:
|
|
|
|
with plexdb.Get_Plex_DB() as plex_db:
|
|
|
|
items = plex_db.get_missing_fanart()
|
|
|
|
LOG.info('Trying to get %s additional fanart', len(items))
|
|
|
|
else:
|
|
|
|
items = []
|
|
|
|
for plex_type in (v.PLEX_TYPE_MOVIE, v.PLEX_TYPE_SHOW):
|
|
|
|
items.extend(plex_db.itemsByType(plex_type))
|
|
|
|
LOG.info('Trying to get ALL additional fanart for %s items',
|
|
|
|
len(items))
|
2018-05-13 23:22:03 +10:00
|
|
|
if not items:
|
|
|
|
return
|
2016-09-11 03:49:03 +10:00
|
|
|
# Shuffle the list to not always start out identically
|
2016-09-11 19:29:51 +10:00
|
|
|
shuffle(items)
|
2018-05-13 23:22:03 +10:00
|
|
|
# Checking FanartTV for %s items
|
2018-05-16 04:46:16 +10:00
|
|
|
self.fanartqueue.put(artwork.ArtworkSyncMessage(lang(30018) % len(items)))
|
2018-05-13 23:22:03 +10:00
|
|
|
for i, item in enumerate(items):
|
2016-09-11 03:49:03 +10:00
|
|
|
self.fanartqueue.put({
|
2017-02-02 22:27:21 +11:00
|
|
|
'plex_id': item['plex_id'],
|
|
|
|
'plex_type': item['plex_type'],
|
2016-09-11 03:49:03 +10:00
|
|
|
'refresh': refresh
|
|
|
|
})
|
2018-05-13 23:22:03 +10:00
|
|
|
# FanartTV lookup completed
|
2018-05-16 04:46:16 +10:00
|
|
|
self.fanartqueue.put(artwork.ArtworkSyncMessage(lang(30019)))
|
2016-09-11 03:49:03 +10:00
|
|
|
|
2017-08-22 02:53:38 +10:00
|
|
|
def triage_lib_scans(self):
|
|
|
|
"""
|
2017-08-22 03:38:41 +10:00
|
|
|
Decides what to do if state.RUN_LIB_SCAN has been set. E.g. manually
|
|
|
|
triggered full or repair syncs
|
2017-08-22 02:53:38 +10:00
|
|
|
"""
|
|
|
|
if state.RUN_LIB_SCAN in ("full", "repair"):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info('Full library scan requested, starting')
|
2017-08-22 02:53:38 +10:00
|
|
|
window('plex_dbScan', value="true")
|
|
|
|
state.DB_SCAN = True
|
2018-04-18 04:18:25 +10:00
|
|
|
success = self.maintain_views()
|
|
|
|
if success and state.RUN_LIB_SCAN == "full":
|
|
|
|
success = self.full_sync()
|
|
|
|
elif success:
|
|
|
|
success = self.full_sync(repair=True)
|
2017-08-22 02:53:38 +10:00
|
|
|
window('plex_dbScan', clear=True)
|
|
|
|
state.DB_SCAN = False
|
2018-04-18 04:18:25 +10:00
|
|
|
if success:
|
|
|
|
# Full library sync finished
|
|
|
|
self.show_kodi_note(lang(39407))
|
|
|
|
elif not self.suspend_item_sync():
|
|
|
|
self.force_dialog = True
|
|
|
|
# ERROR in library sync
|
|
|
|
self.show_kodi_note(lang(39410), icon='error')
|
|
|
|
self.force_dialog = False
|
2017-08-22 02:53:38 +10:00
|
|
|
# Reset views was requested from somewhere else
|
|
|
|
elif state.RUN_LIB_SCAN == "views":
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info('Refresh playlist and nodes requested, starting')
|
2017-08-22 02:53:38 +10:00
|
|
|
window('plex_dbScan', value="true")
|
|
|
|
state.DB_SCAN = True
|
|
|
|
# First remove playlists
|
2018-04-18 04:18:25 +10:00
|
|
|
utils.delete_playlists()
|
2017-08-22 02:53:38 +10:00
|
|
|
# Remove video nodes
|
2018-04-18 04:18:25 +10:00
|
|
|
utils.delete_nodes()
|
2017-08-22 02:53:38 +10:00
|
|
|
# Kick off refresh
|
2018-04-18 04:18:25 +10:00
|
|
|
if self.maintain_views() is True:
|
2017-08-22 02:53:38 +10:00
|
|
|
# Ran successfully
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info("Refresh playlists/nodes completed")
|
2017-08-22 02:53:38 +10:00
|
|
|
# "Plex playlists/nodes refreshed"
|
2018-04-18 04:18:25 +10:00
|
|
|
self.show_kodi_note(lang(39405))
|
2017-08-22 02:53:38 +10:00
|
|
|
else:
|
|
|
|
# Failed
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error("Refresh playlists/nodes failed")
|
2017-08-22 02:53:38 +10:00
|
|
|
# "Plex playlists/nodes refresh failed"
|
2018-04-18 04:18:25 +10:00
|
|
|
self.show_kodi_note(lang(39406), icon="error")
|
2017-08-22 02:53:38 +10:00
|
|
|
window('plex_dbScan', clear=True)
|
|
|
|
state.DB_SCAN = False
|
|
|
|
elif state.RUN_LIB_SCAN == 'fanart':
|
|
|
|
# Only look for missing fanart (No)
|
|
|
|
# or refresh all fanart (Yes)
|
2018-04-18 04:18:25 +10:00
|
|
|
refresh = dialog('yesno',
|
|
|
|
heading='{plex}',
|
|
|
|
line1=lang(39223),
|
|
|
|
nolabel=lang(39224),
|
|
|
|
yeslabel=lang(39225))
|
|
|
|
self.sync_fanart(missing_only=not refresh, refresh=refresh)
|
2017-08-22 02:53:38 +10:00
|
|
|
elif state.RUN_LIB_SCAN == 'textures':
|
|
|
|
state.DB_SCAN = True
|
|
|
|
window('plex_dbScan', value="true")
|
|
|
|
artwork.Artwork().fullTextureCacheSync()
|
|
|
|
window('plex_dbScan', clear=True)
|
|
|
|
state.DB_SCAN = False
|
|
|
|
else:
|
|
|
|
raise NotImplementedError('Library scan not defined: %s'
|
|
|
|
% state.RUN_LIB_SCAN)
|
2017-08-22 03:38:41 +10:00
|
|
|
# Reset
|
|
|
|
state.RUN_LIB_SCAN = None
|
2017-08-22 02:53:38 +10:00
|
|
|
|
2016-08-07 23:33:36 +10:00
|
|
|
def run(self):
|
2015-12-25 07:07:00 +11:00
|
|
|
try:
|
2018-04-18 04:18:25 +10:00
|
|
|
self._run_internal()
|
2015-12-25 07:07:00 +11:00
|
|
|
except Exception as e:
|
2017-05-17 18:09:50 +10:00
|
|
|
state.DB_SCAN = False
|
2016-09-02 03:07:28 +10:00
|
|
|
window('plex_dbScan', clear=True)
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error('LibrarySync thread crashed. Error message: %s', e)
|
2016-04-08 17:11:03 +10:00
|
|
|
import traceback
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error("Traceback:\n%s", traceback.format_exc())
|
2016-03-08 21:47:46 +11:00
|
|
|
# Library sync thread has crashed
|
2017-08-18 17:53:10 +10:00
|
|
|
dialog('ok', heading='{plex}', line1=lang(39400))
|
2015-12-25 07:07:00 +11:00
|
|
|
raise
|
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
def _run_internal(self):
|
|
|
|
LOG.info("---===### Starting LibrarySync ###===---")
|
|
|
|
initial_sync_done = False
|
|
|
|
kodi_db_version_checked = False
|
|
|
|
last_sync = 0
|
|
|
|
last_processing = 0
|
2018-05-20 22:28:56 +10:00
|
|
|
last_time_sync = 0
|
2018-04-18 04:18:25 +10:00
|
|
|
one_day_in_seconds = 60*60*24
|
2016-12-28 03:33:52 +11:00
|
|
|
# Link to Websocket queue
|
2018-01-07 01:19:12 +11:00
|
|
|
queue = state.WEBSOCKET_QUEUE
|
2016-03-25 04:52:02 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
if not exists(try_encode(v.DB_VIDEO_PATH)):
|
|
|
|
# Database does not exists
|
|
|
|
LOG.error("The current Kodi version is incompatible "
|
|
|
|
"to know which Kodi versions are supported.")
|
|
|
|
LOG.error('Current Kodi version: %s', try_decode(
|
|
|
|
xbmc.getInfoLabel('System.BuildVersion')))
|
|
|
|
# "Current Kodi version is unsupported, cancel lib sync"
|
|
|
|
dialog('ok', heading='{plex}', line1=lang(39403))
|
|
|
|
return
|
2016-04-08 21:57:55 +10:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
# Do some initializing
|
2016-05-30 00:52:38 +10:00
|
|
|
# Ensure that DBs exist if called for very first time
|
2018-04-18 04:18:25 +10:00
|
|
|
self.initialize_plex_db()
|
|
|
|
# Run start up sync
|
|
|
|
state.DB_SCAN = True
|
|
|
|
window('plex_dbScan', value="true")
|
|
|
|
LOG.info("Db version: %s", settings('dbCreatedWithVersion'))
|
|
|
|
|
|
|
|
LOG.info('Refreshing video nodes and playlists now')
|
|
|
|
# Setup the paths for addon-paths (even when using direct paths)
|
|
|
|
with kodidb.GetKodiDB('video') as kodi_db:
|
|
|
|
kodi_db.setup_path_table()
|
|
|
|
window('plex_dbScan', clear=True)
|
|
|
|
state.DB_SCAN = False
|
2018-05-01 22:48:49 +10:00
|
|
|
playlist_monitor = None
|
2018-04-18 04:18:25 +10:00
|
|
|
|
|
|
|
while not self.stopped():
|
2016-04-08 17:11:03 +10:00
|
|
|
# In the event the server goes offline
|
2018-04-18 04:18:25 +10:00
|
|
|
while self.suspended():
|
|
|
|
if self.stopped():
|
2015-12-25 07:07:00 +11:00
|
|
|
# Abort was requested while waiting. We should exit
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info("###===--- LibrarySync Stopped ---===###")
|
2016-01-28 06:41:28 +11:00
|
|
|
return
|
2016-02-11 20:56:01 +11:00
|
|
|
xbmc.sleep(1000)
|
2015-12-25 07:07:00 +11:00
|
|
|
|
2018-04-18 04:18:25 +10:00
|
|
|
if not self.install_sync_done:
|
|
|
|
# Very first sync upon installation or reset of Kodi DB
|
|
|
|
state.DB_SCAN = True
|
|
|
|
window('plex_dbScan', value='true')
|
2018-05-20 22:28:56 +10:00
|
|
|
# Initialize time offset Kodi - PMS
|
|
|
|
self.sync_pms_time()
|
|
|
|
last_time_sync = utils.unix_timestamp()
|
2018-04-18 04:18:25 +10:00
|
|
|
LOG.info('Initial start-up full sync starting')
|
|
|
|
xbmc.executebuiltin('InhibitIdleShutdown(true)')
|
2018-06-08 01:11:13 +10:00
|
|
|
# Completely refresh Kodi playlists and video nodes
|
|
|
|
utils.delete_playlists()
|
|
|
|
utils.delete_nodes()
|
|
|
|
if not self.maintain_views():
|
|
|
|
LOG.error('Initial maintain_views not successful')
|
|
|
|
elif self.full_sync():
|
2018-04-18 04:18:25 +10:00
|
|
|
LOG.info('Initial start-up full sync successful')
|
|
|
|
settings('SyncInstallRunDone', value='true')
|
|
|
|
self.install_sync_done = True
|
|
|
|
settings('dbCreatedWithVersion', v.ADDON_VERSION)
|
|
|
|
self.force_dialog = False
|
|
|
|
initial_sync_done = True
|
|
|
|
kodi_db_version_checked = True
|
|
|
|
last_sync = utils.unix_timestamp()
|
2018-05-13 22:42:58 +10:00
|
|
|
self.sync_fanart()
|
2018-04-29 22:12:39 +10:00
|
|
|
self.fanartthread.start()
|
2018-05-03 16:20:55 +10:00
|
|
|
if state.SYNC_PLAYLISTS and playlists.full_sync():
|
2018-05-01 22:48:49 +10:00
|
|
|
playlist_monitor = playlists.kodi_playlist_monitor()
|
2018-04-18 04:18:25 +10:00
|
|
|
else:
|
|
|
|
LOG.error('Initial start-up full sync unsuccessful')
|
|
|
|
xbmc.executebuiltin('InhibitIdleShutdown(false)')
|
|
|
|
window('plex_dbScan', clear=True)
|
|
|
|
state.DB_SCAN = False
|
|
|
|
|
|
|
|
elif not kodi_db_version_checked:
|
2017-08-21 16:01:48 +10:00
|
|
|
# Install sync was already done, don't force-show dialogs
|
|
|
|
self.force_dialog = False
|
2015-12-25 07:07:00 +11:00
|
|
|
# Verify the validity of the database
|
2018-04-18 04:18:25 +10:00
|
|
|
current_version = settings('dbCreatedWithVersion')
|
|
|
|
if not utils.compare_version(current_version, v.MIN_DB_VERSION):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.warn("Db version out of date: %s minimum version "
|
2018-04-18 04:18:25 +10:00
|
|
|
"required: %s", current_version, v.MIN_DB_VERSION)
|
2016-03-08 21:47:46 +11:00
|
|
|
# DB out of date. Proceed to recreate?
|
2017-08-18 17:53:10 +10:00
|
|
|
resp = dialog('yesno',
|
|
|
|
heading=lang(29999),
|
|
|
|
line1=lang(39401))
|
2015-12-25 07:07:00 +11:00
|
|
|
if not resp:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.warn("Db version out of date! USER IGNORED!")
|
2016-03-08 21:47:46 +11:00
|
|
|
# PKC may not work correctly until reset
|
2017-08-18 17:53:10 +10:00
|
|
|
dialog('ok',
|
|
|
|
heading='{plex}',
|
|
|
|
line1=lang(29999) + lang(39402))
|
2015-12-25 07:07:00 +11:00
|
|
|
else:
|
2018-05-15 04:51:14 +10:00
|
|
|
utils.reset(ask_user=False)
|
2016-03-02 03:49:16 +11:00
|
|
|
break
|
2018-04-18 04:18:25 +10:00
|
|
|
kodi_db_version_checked = True
|
|
|
|
|
|
|
|
elif not initial_sync_done:
|
|
|
|
# First sync upon PKC restart. Skipped if very first sync upon
|
|
|
|
# PKC installation has been completed
|
2017-05-17 18:09:50 +10:00
|
|
|
state.DB_SCAN = True
|
2016-05-31 16:06:42 +10:00
|
|
|
window('plex_dbScan', value="true")
|
2018-04-18 04:18:25 +10:00
|
|
|
LOG.info('Doing initial sync on Kodi startup')
|
2018-06-03 21:48:00 +10:00
|
|
|
if state.SUSPEND_SYNC:
|
|
|
|
LOG.warning('Forcing startup sync even if Kodi is playing')
|
|
|
|
state.SUSPEND_SYNC = False
|
2018-06-08 01:11:13 +10:00
|
|
|
# Completely refresh Kodi playlists and video nodes
|
|
|
|
utils.delete_playlists()
|
|
|
|
utils.delete_nodes()
|
|
|
|
if not self.maintain_views():
|
|
|
|
LOG.info('Initial maintain_views on startup unsuccessful')
|
|
|
|
elif self.full_sync():
|
2018-04-18 04:18:25 +10:00
|
|
|
initial_sync_done = True
|
|
|
|
last_sync = utils.unix_timestamp()
|
|
|
|
LOG.info('Done initial sync on Kodi startup')
|
2018-04-29 22:26:53 +10:00
|
|
|
artwork.Artwork().cache_major_artwork()
|
2018-05-13 22:42:58 +10:00
|
|
|
self.sync_fanart()
|
2018-04-29 22:26:53 +10:00
|
|
|
self.fanartthread.start()
|
2018-05-03 16:20:55 +10:00
|
|
|
if state.SYNC_PLAYLISTS and playlists.full_sync():
|
2018-05-01 22:48:49 +10:00
|
|
|
playlist_monitor = playlists.kodi_playlist_monitor()
|
2018-04-18 04:18:25 +10:00
|
|
|
else:
|
|
|
|
LOG.info('Startup sync has not yet been successful')
|
2016-05-31 16:06:42 +10:00
|
|
|
window('plex_dbScan', clear=True)
|
2017-05-17 18:09:50 +10:00
|
|
|
state.DB_SCAN = False
|
2016-01-28 06:41:28 +11:00
|
|
|
|
|
|
|
# Currently no db scan, so we can start a new scan
|
2017-05-17 18:09:50 +10:00
|
|
|
elif state.DB_SCAN is False:
|
2016-01-28 06:41:28 +11:00
|
|
|
# Full scan was requested from somewhere else, e.g. userclient
|
2017-08-22 02:53:38 +10:00
|
|
|
if state.RUN_LIB_SCAN is not None:
|
|
|
|
# Force-show dialogs since they are user-initiated
|
|
|
|
self.force_dialog = True
|
|
|
|
self.triage_lib_scans()
|
|
|
|
self.force_dialog = False
|
|
|
|
continue
|
2018-04-18 04:18:25 +10:00
|
|
|
now = utils.unix_timestamp()
|
2017-08-22 02:53:38 +10:00
|
|
|
# Standard syncs - don't force-show dialogs
|
|
|
|
self.force_dialog = False
|
2018-04-18 04:18:25 +10:00
|
|
|
if (now - last_sync > state.FULL_SYNC_INTERVALL and
|
|
|
|
not self.suspend_item_sync()):
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info('Doing scheduled full library scan')
|
2017-05-17 18:09:50 +10:00
|
|
|
state.DB_SCAN = True
|
2017-08-22 02:53:38 +10:00
|
|
|
window('plex_dbScan', value="true")
|
2018-04-18 04:18:25 +10:00
|
|
|
success = self.maintain_views()
|
|
|
|
if success:
|
|
|
|
success = self.full_sync()
|
|
|
|
if not success and not self.suspend_item_sync():
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.error('Could not finish scheduled full sync')
|
2017-08-22 02:53:38 +10:00
|
|
|
self.force_dialog = True
|
2018-04-18 04:18:25 +10:00
|
|
|
self.show_kodi_note(lang(39410),
|
|
|
|
icon='error')
|
2017-08-22 02:53:38 +10:00
|
|
|
self.force_dialog = False
|
2018-04-18 04:18:25 +10:00
|
|
|
elif success:
|
|
|
|
last_sync = now
|
|
|
|
# Full library sync finished successfully
|
|
|
|
self.show_kodi_note(lang(39407))
|
|
|
|
else:
|
|
|
|
LOG.info('Full sync interrupted')
|
2016-05-31 16:06:42 +10:00
|
|
|
window('plex_dbScan', clear=True)
|
2017-05-17 18:09:50 +10:00
|
|
|
state.DB_SCAN = False
|
2018-04-18 04:18:25 +10:00
|
|
|
elif now - last_time_sync > one_day_in_seconds:
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info('Starting daily time sync')
|
2018-04-18 04:18:25 +10:00
|
|
|
self.sync_pms_time()
|
|
|
|
last_time_sync = now
|
2018-03-10 22:58:11 +11:00
|
|
|
elif not state.BACKGROUND_SYNC_DISABLED:
|
2017-08-22 02:53:38 +10:00
|
|
|
# Check back whether we should process something
|
|
|
|
# Only do this once every while (otherwise, potentially
|
|
|
|
# many screen refreshes lead to flickering)
|
2018-04-18 04:18:25 +10:00
|
|
|
if now - last_processing > 5:
|
|
|
|
last_processing = now
|
|
|
|
self.process_items()
|
2017-08-22 02:53:38 +10:00
|
|
|
# See if there is a PMS message we need to handle
|
|
|
|
try:
|
|
|
|
message = queue.get(block=False)
|
|
|
|
except Queue.Empty:
|
2018-04-18 04:18:25 +10:00
|
|
|
pass
|
2017-08-22 02:53:38 +10:00
|
|
|
# Got a message from PMS; process it
|
|
|
|
else:
|
2018-04-18 04:18:25 +10:00
|
|
|
self.process_message(message)
|
2017-08-22 02:53:38 +10:00
|
|
|
queue.task_done()
|
2018-04-18 04:18:25 +10:00
|
|
|
# Sleep just a bit
|
|
|
|
xbmc.sleep(10)
|
2017-08-22 02:53:38 +10:00
|
|
|
continue
|
2017-08-21 16:03:08 +10:00
|
|
|
xbmc.sleep(100)
|
2018-04-28 17:12:29 +10:00
|
|
|
# Shut down playlist monitoring
|
2018-05-01 22:48:49 +10:00
|
|
|
if playlist_monitor:
|
|
|
|
playlist_monitor.stop()
|
2016-04-10 00:57:45 +10:00
|
|
|
# doUtils could still have a session open due to interrupted sync
|
|
|
|
try:
|
2018-04-18 04:18:25 +10:00
|
|
|
DU().stopSession()
|
2016-04-10 00:57:45 +10:00
|
|
|
except:
|
|
|
|
pass
|
2018-04-16 02:33:20 +10:00
|
|
|
LOG.info("###===--- LibrarySync Stopped ---===###")
|