Optimize code
This commit is contained in:
parent
938b82da9c
commit
2fcbc1f9b7
2 changed files with 88 additions and 121 deletions
|
@ -191,12 +191,10 @@ class FullSync(common.fullsync_mixin):
|
||||||
self.process_item(xml_item)
|
self.process_item(xml_item)
|
||||||
if self.item_count == BATCH_SIZE:
|
if self.item_count == BATCH_SIZE:
|
||||||
break
|
break
|
||||||
# Make sure Plex DB above is closed before adding/updating
|
# Make sure Plex DB above is closed before adding/updating!
|
||||||
if self.item_count == BATCH_SIZE:
|
self.update_library()
|
||||||
self.update_library()
|
|
||||||
if last:
|
if last:
|
||||||
break
|
break
|
||||||
self.update_library()
|
|
||||||
reset_collections()
|
reset_collections()
|
||||||
return True
|
return True
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
|
@ -251,7 +249,7 @@ class FullSync(common.fullsync_mixin):
|
||||||
|
|
||||||
def threaded_get_iterators(self, kinds, queue, all_items=False):
|
def threaded_get_iterators(self, kinds, queue, all_items=False):
|
||||||
"""
|
"""
|
||||||
PF.SectionItems is costly, so let's do it asynchronous
|
Getting iterators is costly, so let's do it asynchronously
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
for kind in kinds:
|
for kind in kinds:
|
||||||
|
@ -274,10 +272,11 @@ class FullSync(common.fullsync_mixin):
|
||||||
updated_at = section.last_sync - UPDATED_AT_SAFETY \
|
updated_at = section.last_sync - UPDATED_AT_SAFETY \
|
||||||
if section.last_sync else None
|
if section.last_sync else None
|
||||||
try:
|
try:
|
||||||
element.iterator = PF.SectionItems(section.section_id,
|
element.iterator = PF.get_section_iterator(
|
||||||
plex_type=element.plex_type,
|
section.section_id,
|
||||||
updated_at=updated_at,
|
plex_type=element.plex_type,
|
||||||
last_viewed_at=None)
|
updated_at=updated_at,
|
||||||
|
last_viewed_at=None)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
LOG.warn('Sync at least partially unsuccessful')
|
LOG.warn('Sync at least partially unsuccessful')
|
||||||
self.successful = False
|
self.successful = False
|
||||||
|
|
|
@ -557,23 +557,7 @@ def GetAllPlexChildren(key):
|
||||||
return DownloadChunks("{server}/library/metadata/%s/children" % key)
|
return DownloadChunks("{server}/library/metadata/%s/children" % key)
|
||||||
|
|
||||||
|
|
||||||
def GetPlexSectionResults(viewId, args=None):
|
class ThreadedDownloadChunk(backgroundthread.Task):
|
||||||
"""
|
|
||||||
Returns a list (XML API dump) of all Plex items in the Plex
|
|
||||||
section with key = viewId.
|
|
||||||
|
|
||||||
Input:
|
|
||||||
args: optional dict to be urlencoded
|
|
||||||
|
|
||||||
Returns None if something went wrong
|
|
||||||
"""
|
|
||||||
url = "{server}/library/sections/%s/all" % viewId
|
|
||||||
if args:
|
|
||||||
url = utils.extend_url(url, args)
|
|
||||||
return DownloadChunks(url)
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadChunk(backgroundthread.Task):
|
|
||||||
"""
|
"""
|
||||||
This task will also be executed while library sync is suspended!
|
This task will also be executed while library sync is suspended!
|
||||||
"""
|
"""
|
||||||
|
@ -581,7 +565,7 @@ class DownloadChunk(backgroundthread.Task):
|
||||||
self.url = url
|
self.url = url
|
||||||
self.args = args
|
self.args = args
|
||||||
self.callback = callback
|
self.callback = callback
|
||||||
super(DownloadChunk, self).__init__()
|
super(ThreadedDownloadChunk, self).__init__()
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
xml = DU().downloadUrl(self.url, parameters=self.args)
|
xml = DU().downloadUrl(self.url, parameters=self.args)
|
||||||
|
@ -601,14 +585,15 @@ class DownloadGen(object):
|
||||||
|
|
||||||
Yields XML etree children or raises RuntimeError at the end
|
Yields XML etree children or raises RuntimeError at the end
|
||||||
"""
|
"""
|
||||||
def __init__(self, url, plex_type=None, last_viewed_at=None,
|
def __init__(self, url, plex_type, last_viewed_at, updated_at, args,
|
||||||
updated_at=None, args=None):
|
downloader):
|
||||||
|
self._downloader = downloader
|
||||||
self.successful = True
|
self.successful = True
|
||||||
self.args = args or {}
|
self.xml = None
|
||||||
|
self.args = args
|
||||||
self.args.update({
|
self.args.update({
|
||||||
'X-Plex-Container-Size': CONTAINERSIZE,
|
'X-Plex-Container-Start': 0,
|
||||||
'sort': 'id', # Entries are sorted by plex_id
|
'X-Plex-Container-Size': CONTAINERSIZE
|
||||||
'excludeAllLeaves': 1 # PMS wont attach a first summary child
|
|
||||||
})
|
})
|
||||||
url += '?'
|
url += '?'
|
||||||
if plex_type:
|
if plex_type:
|
||||||
|
@ -618,8 +603,8 @@ class DownloadGen(object):
|
||||||
if updated_at:
|
if updated_at:
|
||||||
url = '%supdatedAt>=%s&' % (url, updated_at)
|
url = '%supdatedAt>=%s&' % (url, updated_at)
|
||||||
self.url = url[:-1]
|
self.url = url[:-1]
|
||||||
self._download_chunk(start=0)
|
_blocking_download_chunk(self.url, self.args, 0, self.set_xml)
|
||||||
self.attrib = deepcopy(self.xml.attrib)
|
self.attrib = self.xml.attrib
|
||||||
self.current = 0
|
self.current = 0
|
||||||
self.total = int(self.attrib['totalSize'])
|
self.total = int(self.attrib['totalSize'])
|
||||||
self.cache_factor = 10
|
self.cache_factor = 10
|
||||||
|
@ -629,34 +614,24 @@ class DownloadGen(object):
|
||||||
self.total + CONTAINERSIZE - self.total % CONTAINERSIZE)
|
self.total + CONTAINERSIZE - self.total % CONTAINERSIZE)
|
||||||
for pos in range(CONTAINERSIZE, end, CONTAINERSIZE):
|
for pos in range(CONTAINERSIZE, end, CONTAINERSIZE):
|
||||||
self.pending_counter.append(None)
|
self.pending_counter.append(None)
|
||||||
self._download_chunk(start=pos)
|
self._downloader(self.url, self.args, pos, self.on_chunk_downloaded)
|
||||||
|
|
||||||
def _download_chunk(self, start):
|
def set_xml(self, xml):
|
||||||
self.args['X-Plex-Container-Start'] = start
|
self.xml = xml
|
||||||
if start == 0:
|
|
||||||
# We need the result NOW
|
|
||||||
self.xml = DU().downloadUrl(self.url, parameters=self.args)
|
|
||||||
try:
|
|
||||||
self.xml.attrib
|
|
||||||
except AttributeError:
|
|
||||||
LOG.error('Error while downloading chunks: %s, args: %s',
|
|
||||||
self.url, self.args)
|
|
||||||
raise RuntimeError('Error while downloading chunks for %s'
|
|
||||||
% self.url)
|
|
||||||
else:
|
|
||||||
task = DownloadChunk(self.url,
|
|
||||||
deepcopy(self.args), # Beware!
|
|
||||||
self.on_chunk_downloaded)
|
|
||||||
backgroundthread.BGThreader.addTask(task)
|
|
||||||
|
|
||||||
def on_chunk_downloaded(self, xml):
|
def on_chunk_downloaded(self, xml):
|
||||||
if xml is not None:
|
if xml is not None:
|
||||||
for child in xml:
|
self.xml.extend(xml)
|
||||||
self.xml.append(child)
|
|
||||||
else:
|
else:
|
||||||
self.successful = False
|
self.successful = False
|
||||||
self.pending_counter.pop()
|
self.pending_counter.pop()
|
||||||
|
|
||||||
|
def get(self, key, default=None):
|
||||||
|
"""
|
||||||
|
Mimick etree xml's way to access xml.attrib via xml.get(key, default)
|
||||||
|
"""
|
||||||
|
return self.attrib.get(key, default)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
@ -669,8 +644,11 @@ class DownloadGen(object):
|
||||||
if (self.current % CONTAINERSIZE == 0 and
|
if (self.current % CONTAINERSIZE == 0 and
|
||||||
self.current <= self.total - (self.cache_factor - 1) * CONTAINERSIZE):
|
self.current <= self.total - (self.cache_factor - 1) * CONTAINERSIZE):
|
||||||
self.pending_counter.append(None)
|
self.pending_counter.append(None)
|
||||||
self._download_chunk(
|
self._downloader(
|
||||||
start=self.current + (self.cache_factor - 1) * CONTAINERSIZE)
|
self.url,
|
||||||
|
self.args,
|
||||||
|
self.current + (self.cache_factor - 1) * CONTAINERSIZE,
|
||||||
|
self.on_chunk_downloaded)
|
||||||
return child
|
return child
|
||||||
except IndexError:
|
except IndexError:
|
||||||
if not self.pending_counter and not len(self.xml):
|
if not self.pending_counter and not len(self.xml):
|
||||||
|
@ -679,46 +657,67 @@ class DownloadGen(object):
|
||||||
else:
|
else:
|
||||||
raise StopIteration()
|
raise StopIteration()
|
||||||
LOG.debug('Waiting for download to finish')
|
LOG.debug('Waiting for download to finish')
|
||||||
app.APP.monitor.waitForAbort(0.1)
|
if app.APP.monitor.waitForAbort(0.1):
|
||||||
|
raise StopIteration('PKC needs to exit now')
|
||||||
|
|
||||||
next = __next__
|
next = __next__
|
||||||
|
|
||||||
def get(self, key, default=None):
|
|
||||||
return self.attrib.get(key, default)
|
def _blocking_download_chunk(url, args, start, callback):
|
||||||
|
"""
|
||||||
|
callback will be called with the downloaded xml (fragment)
|
||||||
|
"""
|
||||||
|
args['X-Plex-Container-Start'] = start
|
||||||
|
xml = DU().downloadUrl(url, parameters=args)
|
||||||
|
try:
|
||||||
|
xml.attrib
|
||||||
|
except AttributeError:
|
||||||
|
LOG.error('Error while downloading chunks: %s, args: %s',
|
||||||
|
url, args)
|
||||||
|
raise RuntimeError('Error while downloading chunks for %s'
|
||||||
|
% url)
|
||||||
|
callback(xml)
|
||||||
|
|
||||||
|
|
||||||
class SectionItems(DownloadGen):
|
def _async_download_chunk(url, args, start, callback):
|
||||||
"""
|
args['X-Plex-Container-Start'] = start
|
||||||
Iterator object to get all items of a Plex library section
|
task = ThreadedDownloadChunk(url,
|
||||||
"""
|
deepcopy(args), # Beware!
|
||||||
def __init__(self, section_id, plex_type=None, last_viewed_at=None,
|
callback)
|
||||||
updated_at=None, args=None):
|
backgroundthread.BGThreader.addTask(task)
|
||||||
if plex_type in (v.PLEX_TYPE_EPISODE, v.PLEX_TYPE_SONG):
|
|
||||||
# Annoying Plex bug. You won't get all episodes otherwise
|
|
||||||
url = '{server}/library/sections/%s/allLeaves' % section_id
|
|
||||||
plex_type = None
|
|
||||||
else:
|
|
||||||
url = '{server}/library/sections/%s/all' % section_id
|
|
||||||
super(SectionItems, self).__init__(url, plex_type, last_viewed_at,
|
|
||||||
updated_at, args)
|
|
||||||
|
|
||||||
|
|
||||||
class Children(DownloadGen):
|
def get_section_iterator(section_id, plex_type=None, last_viewed_at=None,
|
||||||
"""
|
updated_at=None, args=None):
|
||||||
Iterator object to get all items of a Plex library section
|
args = args or {}
|
||||||
"""
|
args.update({
|
||||||
def __init__(self, plex_id):
|
'checkFiles': 0,
|
||||||
super(Children, self).__init__(
|
'includeExtras': 0, # Trailers and Extras => Extras
|
||||||
'{server}/library/metadata/%s/children' % plex_id)
|
'includeReviews': 0,
|
||||||
|
'includeRelated': 0, # Similar movies => Video -> Related
|
||||||
|
'skipRefresh': 1, # don't scan
|
||||||
class Leaves(DownloadGen):
|
'excludeAllLeaves': 1 # PMS wont attach a first summary child
|
||||||
"""
|
})
|
||||||
Iterator object to get all items of a Plex library section
|
if plex_type == v.PLEX_TYPE_ALBUM:
|
||||||
"""
|
# Kodi sorts Newest Albums by their position within the Kodi music
|
||||||
def __init__(self, section_id):
|
# database - great...
|
||||||
super(Leaves, self).__init__(
|
downloader = _blocking_download_chunk
|
||||||
'{server}/library/sections/%s/allLeaves' % section_id)
|
args['sort'] = 'addedAt:asc'
|
||||||
|
else:
|
||||||
|
downloader = _async_download_chunk
|
||||||
|
args['sort'] = 'id' # Entries are sorted by plex_id
|
||||||
|
if plex_type in (v.PLEX_TYPE_EPISODE, v.PLEX_TYPE_SONG):
|
||||||
|
# Annoying Plex bug. You won't get all episodes otherwise
|
||||||
|
url = '{server}/library/sections/%s/allLeaves' % section_id
|
||||||
|
plex_type = None
|
||||||
|
else:
|
||||||
|
url = '{server}/library/sections/%s/all' % section_id
|
||||||
|
return DownloadGen(url,
|
||||||
|
plex_type,
|
||||||
|
last_viewed_at,
|
||||||
|
updated_at,
|
||||||
|
args,
|
||||||
|
downloader)
|
||||||
|
|
||||||
|
|
||||||
def DownloadChunks(url):
|
def DownloadChunks(url):
|
||||||
|
@ -767,37 +766,6 @@ def DownloadChunks(url):
|
||||||
return xml
|
return xml
|
||||||
|
|
||||||
|
|
||||||
def GetAllPlexLeaves(viewId, lastViewedAt=None, updatedAt=None):
|
|
||||||
"""
|
|
||||||
Returns a list (raw XML API dump) of all Plex subitems for the key.
|
|
||||||
(e.g. /library/sections/2/allLeaves pointing to all TV shows)
|
|
||||||
|
|
||||||
Input:
|
|
||||||
viewId Id of Plex library, e.g. '2'
|
|
||||||
lastViewedAt Unix timestamp; only retrieves PMS items viewed
|
|
||||||
since that point of time until now.
|
|
||||||
updatedAt Unix timestamp; only retrieves PMS items updated
|
|
||||||
by the PMS since that point of time until now.
|
|
||||||
|
|
||||||
If lastViewedAt and updatedAt=None, ALL PMS items are returned.
|
|
||||||
|
|
||||||
Warning: lastViewedAt and updatedAt are combined with AND by the PMS!
|
|
||||||
|
|
||||||
Relevant "master time": PMS server. I guess this COULD lead to problems,
|
|
||||||
e.g. when server and client are in different time zones.
|
|
||||||
"""
|
|
||||||
args = []
|
|
||||||
url = "{server}/library/sections/%s/allLeaves" % viewId
|
|
||||||
|
|
||||||
if lastViewedAt:
|
|
||||||
args.append('lastViewedAt>=%s' % lastViewedAt)
|
|
||||||
if updatedAt:
|
|
||||||
args.append('updatedAt>=%s' % updatedAt)
|
|
||||||
if args:
|
|
||||||
url += '?' + '&'.join(args)
|
|
||||||
return DownloadChunks(url)
|
|
||||||
|
|
||||||
|
|
||||||
def GetPlexOnDeck(viewId):
|
def GetPlexOnDeck(viewId):
|
||||||
"""
|
"""
|
||||||
"""
|
"""
|
||||||
|
|
Loading…
Reference in a new issue