2018-07-13 02:46:02 +10:00
|
|
|
#!/usr/bin/env python
|
2016-09-03 00:53:16 +10:00
|
|
|
# -*- coding: utf-8 -*-
|
2015-12-27 22:09:07 +11:00
|
|
|
"""
|
|
|
|
Taken from iBaa, https://github.com/iBaa/PlexConnect
|
|
|
|
Point of time: December 22, 2015
|
|
|
|
|
|
|
|
|
|
|
|
Collection of "connector functions" to Plex Media Server/MyPlex
|
|
|
|
|
|
|
|
|
|
|
|
PlexGDM:
|
|
|
|
loosely based on hippojay's plexGDM:
|
|
|
|
https://github.com/hippojay/script.plexbmc.helper... /resources/lib/plexgdm.py
|
|
|
|
|
|
|
|
|
|
|
|
Plex Media Server communication:
|
|
|
|
source (somewhat): https://github.com/hippojay/plugin.video.plexbmc
|
|
|
|
later converted from httplib to urllib2
|
|
|
|
|
|
|
|
|
|
|
|
Transcoder support:
|
|
|
|
PlexAPI_getTranscodePath() based on getTranscodeURL from pyplex/plexAPI
|
|
|
|
https://github.com/megawubs/pyplex/blob/master/plexAPI/info.py
|
|
|
|
|
|
|
|
|
|
|
|
MyPlex - Basic Authentication:
|
|
|
|
http://www.voidspace.org.uk/python/articles/urllib2.shtml
|
|
|
|
http://www.voidspace.org.uk/python/articles/authentication.shtml
|
|
|
|
http://stackoverflow.com/questions/2407126/python-urllib2-basic-auth-problem
|
|
|
|
http://stackoverflow.com/questions/111945/is-there-any-way-to-do-http-put-in-python
|
|
|
|
(and others...)
|
|
|
|
"""
|
2018-07-13 02:46:02 +10:00
|
|
|
from __future__ import absolute_import, division, unicode_literals
|
2017-12-10 00:35:08 +11:00
|
|
|
from logging import getLogger
|
2018-06-24 02:25:18 +10:00
|
|
|
from re import sub
|
2018-07-05 20:46:40 +10:00
|
|
|
from urllib import urlencode, unquote, quote
|
|
|
|
from urlparse import parse_qsl
|
2018-02-11 03:59:20 +11:00
|
|
|
from xbmcgui import ListItem
|
2016-01-30 06:07:21 +11:00
|
|
|
|
2018-06-22 03:24:37 +10:00
|
|
|
from .downloadutils import DownloadUtils as DU
|
|
|
|
from . import clientinfo
|
|
|
|
from . import utils
|
2018-06-24 02:25:18 +10:00
|
|
|
from . import path_ops
|
2018-06-22 03:24:37 +10:00
|
|
|
from . import plex_functions as PF
|
|
|
|
from . import plexdb_functions as plexdb
|
|
|
|
from . import kodidb_functions as kodidb
|
|
|
|
from . import variables as v
|
|
|
|
from . import state
|
2016-03-24 02:57:49 +11:00
|
|
|
|
2016-09-03 00:53:16 +10:00
|
|
|
###############################################################################
|
2018-06-22 03:24:37 +10:00
|
|
|
LOG = getLogger('PLEX.plex_api')
|
2016-09-03 00:53:16 +10:00
|
|
|
|
|
|
|
###############################################################################
|
|
|
|
|
2015-12-27 22:09:07 +11:00
|
|
|
|
2018-08-05 22:44:24 +10:00
|
|
|
def _unicode_or_none(value):
|
|
|
|
"""
|
|
|
|
Tries to decode value to unicode. Returns None if this fails
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return value.decode('utf-8')
|
|
|
|
except TypeError:
|
|
|
|
# e.g. Android TV's Python
|
|
|
|
return value.decode()
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
class API(object):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
|
|
|
API(item)
|
2016-01-02 06:36:02 +11:00
|
|
|
|
2016-01-03 23:36:00 +11:00
|
|
|
Processes a Plex media server's XML response
|
2015-12-29 04:47:16 +11:00
|
|
|
|
2016-01-03 23:36:00 +11:00
|
|
|
item: xml.etree.ElementTree element
|
|
|
|
"""
|
2015-12-29 04:47:16 +11:00
|
|
|
def __init__(self, item):
|
|
|
|
self.item = item
|
2016-01-04 05:17:59 +11:00
|
|
|
# which media part in the XML response shall we look at?
|
|
|
|
self.part = 0
|
2016-09-08 23:55:49 +10:00
|
|
|
self.mediastream = None
|
2018-06-22 03:24:37 +10:00
|
|
|
self.server = utils.window('pms_server')
|
2015-12-29 04:47:16 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def set_part_number(self, number=None):
|
2016-01-04 05:17:59 +11:00
|
|
|
"""
|
|
|
|
Sets the part number to work with (used to deal with Movie with several
|
|
|
|
parts).
|
|
|
|
"""
|
2016-02-08 04:05:59 +11:00
|
|
|
self.part = number or 0
|
2016-01-04 05:17:59 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def plex_type(self):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
2018-08-05 22:44:24 +10:00
|
|
|
Returns the type of media, e.g. 'movie' or 'clip' for trailers as
|
|
|
|
Unicode or None.
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
2018-08-05 22:44:24 +10:00
|
|
|
return _unicode_or_none(self.item.get('type'))
|
2016-01-02 19:28:31 +11:00
|
|
|
|
2018-05-02 00:44:40 +10:00
|
|
|
def playlist_type(self):
|
|
|
|
"""
|
|
|
|
Returns the playlist type ('video', 'audio') or None
|
|
|
|
"""
|
|
|
|
return self.item.get('playlistType')
|
|
|
|
|
2018-05-01 22:48:49 +10:00
|
|
|
def updated_at(self):
|
|
|
|
"""
|
|
|
|
Returns the last time this item was updated as unicode, e.g.
|
|
|
|
'1524739868', or None
|
|
|
|
"""
|
|
|
|
return self.item.get('updatedAt')
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def checksum(self):
|
2015-12-29 04:47:16 +11:00
|
|
|
"""
|
2018-02-12 00:42:49 +11:00
|
|
|
Returns a string, not int.
|
2016-03-28 01:57:35 +11:00
|
|
|
WATCH OUT - time in Plex, not Kodi ;-)
|
2015-12-29 04:47:16 +11:00
|
|
|
"""
|
2016-01-03 23:36:00 +11:00
|
|
|
# Include a letter to prohibit saving as an int!
|
2018-02-12 18:10:39 +11:00
|
|
|
return "K%s%s" % (self.plex_id(), self.item.get('updatedAt', ''))
|
2015-12-29 04:47:16 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def plex_id(self):
|
2015-12-30 23:25:37 +11:00
|
|
|
"""
|
2018-08-05 22:44:24 +10:00
|
|
|
Returns the Plex ratingKey such as '246922' as Unicode or None
|
2015-12-30 23:25:37 +11:00
|
|
|
"""
|
2018-08-05 22:44:24 +10:00
|
|
|
return _unicode_or_none(self.item.get('ratingKey'))
|
2015-12-30 00:13:32 +11:00
|
|
|
|
2018-06-08 00:27:41 +10:00
|
|
|
def path(self, force_first_media=True, force_addon=False,
|
|
|
|
direct_paths=None):
|
2018-04-28 17:12:29 +10:00
|
|
|
"""
|
|
|
|
Returns a "fully qualified path": add-on paths or direct paths
|
|
|
|
depending on the current settings. Will NOT valide the playurl
|
|
|
|
Returns unicode or None if something went wrong.
|
2018-06-08 00:27:41 +10:00
|
|
|
|
|
|
|
Pass direct_path=True if you're calling from another Plex python
|
|
|
|
instance - because otherwise direct paths will evaluate to False!
|
2018-04-28 17:12:29 +10:00
|
|
|
"""
|
2018-06-08 00:27:41 +10:00
|
|
|
direct_paths = direct_paths or state.DIRECT_PATHS
|
2018-04-28 17:12:29 +10:00
|
|
|
filename = self.file_path(force_first_media=force_first_media)
|
2018-06-08 00:27:41 +10:00
|
|
|
if (not direct_paths or force_addon or
|
|
|
|
self.plex_type() == v.PLEX_TYPE_CLIP):
|
2018-04-28 17:12:29 +10:00
|
|
|
if filename and '/' in filename:
|
|
|
|
filename = filename.rsplit('/', 1)
|
|
|
|
elif filename:
|
|
|
|
filename = filename.rsplit('\\', 1)
|
|
|
|
try:
|
|
|
|
filename = filename[1]
|
|
|
|
except (TypeError, IndexError):
|
|
|
|
filename = None
|
|
|
|
# Set plugin path and media flags using real filename
|
2018-05-27 02:54:20 +10:00
|
|
|
if self.plex_type() == v.PLEX_TYPE_EPISODE:
|
|
|
|
# need to include the plex show id in the path
|
|
|
|
path = ('plugin://plugin.video.plexkodiconnect.tvshows/%s/'
|
|
|
|
% self.grandparent_id())
|
|
|
|
else:
|
|
|
|
path = 'plugin://%s/' % v.ADDON_TYPE[self.plex_type()]
|
|
|
|
path = ('%s?plex_id=%s&plex_type=%s&mode=play&filename=%s'
|
|
|
|
% (path, self.plex_id(), self.plex_type(), filename))
|
2018-04-28 17:12:29 +10:00
|
|
|
else:
|
|
|
|
# Direct paths is set the Kodi way
|
|
|
|
path = self.validate_playurl(filename,
|
|
|
|
self.plex_type(),
|
|
|
|
omit_check=True)
|
|
|
|
return path
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def path_and_plex_id(self):
|
2016-01-30 06:07:21 +11:00
|
|
|
"""
|
2018-02-12 00:42:49 +11:00
|
|
|
Returns the Plex key such as '/library/metadata/246922' or None
|
2016-01-30 06:07:21 +11:00
|
|
|
"""
|
2018-02-12 18:10:39 +11:00
|
|
|
return self.item.get('key')
|
2016-03-16 06:26:45 +11:00
|
|
|
|
2018-01-08 03:50:30 +11:00
|
|
|
def plex_media_streams(self):
|
|
|
|
"""
|
|
|
|
Returns the media streams directly from the PMS xml.
|
|
|
|
Mind self.mediastream to be set before and self.part!
|
|
|
|
"""
|
|
|
|
return self.item[self.mediastream][self.part]
|
|
|
|
|
2018-03-27 17:01:24 +11:00
|
|
|
def file_name(self, force_first_media=False):
|
|
|
|
"""
|
|
|
|
Returns only the filename, e.g. 'movie.mkv' as unicode or None if not
|
|
|
|
found
|
|
|
|
"""
|
|
|
|
ans = self.file_path(force_first_media=force_first_media)
|
|
|
|
if ans is None:
|
|
|
|
return
|
|
|
|
if "\\" in ans:
|
|
|
|
# Local path
|
|
|
|
filename = ans.rsplit("\\", 1)[1]
|
|
|
|
else:
|
2018-04-23 15:39:36 +10:00
|
|
|
try:
|
|
|
|
# Network share
|
|
|
|
filename = ans.rsplit("/", 1)[1]
|
|
|
|
except IndexError:
|
|
|
|
# E.g. certain Plex channels
|
|
|
|
filename = None
|
2018-03-27 17:01:24 +11:00
|
|
|
return filename
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def file_path(self, force_first_media=False):
|
2016-03-16 06:26:45 +11:00
|
|
|
"""
|
|
|
|
Returns the direct path to this item, e.g. '\\NAS\movies\movie.mkv'
|
|
|
|
or None
|
2016-07-13 04:27:59 +10:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
force_first_media=True:
|
2016-07-13 04:27:59 +10:00
|
|
|
will always use 1st media stream, e.g. when several different
|
|
|
|
files are present for the same PMS item
|
2016-03-16 06:26:45 +11:00
|
|
|
"""
|
2018-02-12 00:42:49 +11:00
|
|
|
if self.mediastream is None and force_first_media is False:
|
2018-09-03 03:40:56 +10:00
|
|
|
if self.mediastream_number() is None:
|
|
|
|
return
|
2016-03-16 06:26:45 +11:00
|
|
|
try:
|
2018-02-12 00:42:49 +11:00
|
|
|
if force_first_media is False:
|
2016-09-08 23:55:49 +10:00
|
|
|
ans = self.item[self.mediastream][self.part].attrib['file']
|
2016-07-13 04:27:59 +10:00
|
|
|
else:
|
|
|
|
ans = self.item[0][self.part].attrib['file']
|
2018-02-12 00:42:49 +11:00
|
|
|
except (TypeError, AttributeError, IndexError, KeyError):
|
2016-07-13 04:27:59 +10:00
|
|
|
ans = None
|
|
|
|
if ans is not None:
|
2016-04-27 03:19:52 +10:00
|
|
|
try:
|
2018-06-22 03:24:37 +10:00
|
|
|
ans = utils.try_decode(unquote(ans))
|
2016-05-07 06:16:56 +10:00
|
|
|
except UnicodeDecodeError:
|
|
|
|
# Sometimes, Plex seems to have encoded in latin1
|
2016-07-13 04:27:59 +10:00
|
|
|
ans = unquote(ans).decode('latin1')
|
|
|
|
return ans
|
2016-03-16 06:26:45 +11:00
|
|
|
|
2017-03-19 22:14:16 +11:00
|
|
|
def get_picture_path(self):
|
|
|
|
"""
|
2017-03-26 23:09:43 +11:00
|
|
|
Returns the item's picture path (transcode, if necessary) as string.
|
|
|
|
Will always use addon paths, never direct paths
|
2017-03-19 22:14:16 +11:00
|
|
|
"""
|
|
|
|
extension = self.item[0][0].attrib['key'][self.item[0][0].attrib['key'].rfind('.'):].lower()
|
2018-06-22 03:24:37 +10:00
|
|
|
if (utils.window('plex_force_transcode_pix') == 'true' or
|
2017-03-19 22:14:16 +11:00
|
|
|
extension not in v.KODI_SUPPORTED_IMAGES):
|
|
|
|
# Let Plex transcode
|
|
|
|
# max width/height supported by plex image transcoder is 1920x1080
|
2018-02-11 03:59:20 +11:00
|
|
|
path = self.server + PF.transcode_image_path(
|
2018-02-12 18:10:39 +11:00
|
|
|
self.item[0][0].get('key'),
|
2018-06-22 03:24:37 +10:00
|
|
|
utils.window('pms_token'),
|
2018-02-12 18:10:39 +11:00
|
|
|
"%s%s" % (self.server, self.item[0][0].get('key')),
|
2017-03-19 22:14:16 +11:00
|
|
|
1920,
|
|
|
|
1080)
|
|
|
|
else:
|
2018-02-12 00:42:49 +11:00
|
|
|
path = self.attach_plex_token_to_url(
|
2018-06-22 03:24:37 +10:00
|
|
|
'%s%s' % (utils.window('pms_server'),
|
2017-03-26 23:09:43 +11:00
|
|
|
self.item[0][0].attrib['key']))
|
|
|
|
# Attach Plex id to url to let it be picked up by our playqueue agent
|
|
|
|
# later
|
2018-06-22 03:24:37 +10:00
|
|
|
return utils.try_encode('%s&plex_id=%s' % (path, self.plex_id()))
|
2017-03-19 22:14:16 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def tv_show_path(self):
|
2016-03-16 06:26:45 +11:00
|
|
|
"""
|
|
|
|
Returns the direct path to the TV show, e.g. '\\NAS\tv\series'
|
|
|
|
or None
|
|
|
|
"""
|
|
|
|
res = None
|
|
|
|
for child in self.item:
|
|
|
|
if child.tag == 'Location':
|
2018-02-12 18:10:39 +11:00
|
|
|
res = child.get('path')
|
2016-03-16 06:26:45 +11:00
|
|
|
return res
|
2016-01-30 06:07:21 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def season_number(self):
|
2016-01-10 02:14:02 +11:00
|
|
|
"""
|
|
|
|
Returns the 'index' of an PMS XML reply. Depicts e.g. season number.
|
|
|
|
"""
|
2018-02-12 18:10:39 +11:00
|
|
|
return self.item.get('index')
|
2016-01-10 02:14:02 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def date_created(self):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
2016-04-15 17:32:04 +10:00
|
|
|
Returns the date when this library item was created.
|
|
|
|
|
|
|
|
If not found, returns 2000-01-01 10:00:00
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
2018-02-12 18:10:39 +11:00
|
|
|
res = self.item.get('addedAt')
|
2016-04-02 19:43:50 +11:00
|
|
|
if res is not None:
|
2018-06-22 03:24:37 +10:00
|
|
|
res = utils.unix_date_to_kodi(res)
|
2016-04-15 17:32:04 +10:00
|
|
|
else:
|
|
|
|
res = '2000-01-01 10:00:00'
|
2016-04-02 19:43:50 +11:00
|
|
|
return res
|
2015-12-29 04:47:16 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def viewcount(self):
|
2018-01-22 04:31:49 +11:00
|
|
|
"""
|
|
|
|
Returns the play count for the item as an int or the int 0 if not found
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return int(self.item.attrib['viewCount'])
|
|
|
|
except (KeyError, ValueError):
|
|
|
|
return 0
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def userdata(self):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
|
|
|
Returns a dict with None if a value is missing
|
|
|
|
{
|
|
|
|
'Favorite': favorite, # False, because n/a in Plex
|
|
|
|
'PlayCount': playcount,
|
|
|
|
'Played': played, # True/False
|
|
|
|
'LastPlayedDate': lastPlayedDate,
|
|
|
|
'Resume': resume, # Resume time in seconds
|
2016-01-15 02:17:24 +11:00
|
|
|
'Runtime': runtime,
|
2016-01-03 23:36:00 +11:00
|
|
|
'Rating': rating
|
|
|
|
}
|
|
|
|
"""
|
2016-02-01 20:33:33 +11:00
|
|
|
item = self.item.attrib
|
2016-04-02 19:31:21 +11:00
|
|
|
# Default - attributes not found with Plex
|
2015-12-29 04:47:16 +11:00
|
|
|
favorite = False
|
|
|
|
try:
|
|
|
|
playcount = int(item['viewCount'])
|
2017-02-27 02:40:04 +11:00
|
|
|
except (KeyError, ValueError):
|
2016-04-07 23:13:05 +10:00
|
|
|
playcount = None
|
2017-02-03 01:23:54 +11:00
|
|
|
played = True if playcount else False
|
2015-12-29 04:47:16 +11:00
|
|
|
|
|
|
|
try:
|
2018-06-22 03:24:37 +10:00
|
|
|
last_played = utils.unix_date_to_kodi(int(item['lastViewedAt']))
|
2017-02-27 02:40:04 +11:00
|
|
|
except (KeyError, ValueError):
|
2018-02-12 00:42:49 +11:00
|
|
|
last_played = None
|
2015-12-29 04:47:16 +11:00
|
|
|
|
2017-05-31 22:13:45 +10:00
|
|
|
if state.INDICATE_MEDIA_VERSIONS is True:
|
2017-02-03 01:23:54 +11:00
|
|
|
userrating = 0
|
2018-02-12 00:42:49 +11:00
|
|
|
for _ in self.item.findall('./Media'):
|
2017-05-31 22:13:45 +10:00
|
|
|
userrating += 1
|
|
|
|
# Don't show a value of '1'
|
|
|
|
userrating = 0 if userrating == 1 else userrating
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
userrating = int(float(item['userRating']))
|
|
|
|
except (KeyError, ValueError):
|
|
|
|
userrating = 0
|
2016-04-02 19:31:21 +11:00
|
|
|
|
|
|
|
try:
|
|
|
|
rating = float(item['audienceRating'])
|
2017-02-27 02:40:04 +11:00
|
|
|
except (KeyError, ValueError):
|
2016-04-02 19:31:21 +11:00
|
|
|
try:
|
|
|
|
rating = float(item['rating'])
|
2017-02-27 02:40:04 +11:00
|
|
|
except (KeyError, ValueError):
|
2016-04-02 19:31:21 +11:00
|
|
|
rating = 0.0
|
2016-02-13 02:53:49 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
resume, runtime = self.resume_runtime()
|
2015-12-29 04:47:16 +11:00
|
|
|
return {
|
|
|
|
'Favorite': favorite,
|
|
|
|
'PlayCount': playcount,
|
|
|
|
'Played': played,
|
2018-02-12 00:42:49 +11:00
|
|
|
'LastPlayedDate': last_played,
|
2015-12-29 04:47:16 +11:00
|
|
|
'Resume': resume,
|
2016-01-15 02:17:24 +11:00
|
|
|
'Runtime': runtime,
|
2016-02-13 02:53:49 +11:00
|
|
|
'Rating': rating,
|
|
|
|
'UserRating': userrating
|
2015-12-29 04:47:16 +11:00
|
|
|
}
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def collection_list(self):
|
2016-03-02 02:18:12 +11:00
|
|
|
"""
|
2018-07-05 20:46:40 +10:00
|
|
|
Returns a list of tuples of the collection id and tags or an empty list
|
|
|
|
[(<collection id 1>, <collection name 1>), ...]
|
2016-03-02 02:18:12 +11:00
|
|
|
"""
|
|
|
|
collections = []
|
|
|
|
for child in self.item:
|
|
|
|
if child.tag == 'Collection':
|
2018-07-05 20:46:40 +10:00
|
|
|
collections.append((child.get('id'), child.get('tag')))
|
2016-03-02 02:18:12 +11:00
|
|
|
return collections
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def people(self):
|
2015-12-29 04:47:16 +11:00
|
|
|
"""
|
2016-01-03 23:36:00 +11:00
|
|
|
Returns a dict of lists of people found.
|
2015-12-29 04:47:16 +11:00
|
|
|
{
|
2016-01-03 23:36:00 +11:00
|
|
|
'Director': list,
|
|
|
|
'Writer': list,
|
2018-03-05 04:32:29 +11:00
|
|
|
'Cast': list of tuples (<actor>, <role>), <role> might be ''
|
2016-01-03 23:36:00 +11:00
|
|
|
'Producer': list
|
2015-12-29 04:47:16 +11:00
|
|
|
}
|
|
|
|
"""
|
|
|
|
director = []
|
|
|
|
writer = []
|
|
|
|
cast = []
|
|
|
|
producer = []
|
2016-02-01 20:33:33 +11:00
|
|
|
for child in self.item:
|
2018-03-05 04:02:55 +11:00
|
|
|
if child.tag == 'Director':
|
|
|
|
director.append(child.attrib['tag'])
|
|
|
|
elif child.tag == 'Writer':
|
|
|
|
writer.append(child.attrib['tag'])
|
|
|
|
elif child.tag == 'Role':
|
2018-03-05 04:32:29 +11:00
|
|
|
cast.append((child.attrib['tag'], child.get('role', '')))
|
2018-03-05 04:02:55 +11:00
|
|
|
elif child.tag == 'Producer':
|
|
|
|
producer.append(child.attrib['tag'])
|
2015-12-29 04:47:16 +11:00
|
|
|
return {
|
|
|
|
'Director': director,
|
|
|
|
'Writer': writer,
|
|
|
|
'Cast': cast,
|
|
|
|
'Producer': producer
|
|
|
|
}
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def people_list(self):
|
2015-12-29 04:47:16 +11:00
|
|
|
"""
|
2018-03-01 03:24:32 +11:00
|
|
|
Returns a dict with lists of tuples:
|
2015-12-29 04:47:16 +11:00
|
|
|
{
|
2018-03-01 03:24:32 +11:00
|
|
|
'actor': [..., (<name>, <artwork url>, <role>, <cast order>), ...],
|
|
|
|
'director': [..., (<name>, ), ...],
|
|
|
|
'writer': [..., (<name>, ), ...]
|
2015-12-29 04:47:16 +11:00
|
|
|
}
|
2018-03-01 03:24:32 +11:00
|
|
|
Everything in unicode, except <cast order> which is an int.
|
|
|
|
Only <art-url> and <role> may be None if not found.
|
|
|
|
|
|
|
|
Kodi does not yet support a Producer. People may appear several times
|
|
|
|
per category and overall!
|
2015-12-29 04:47:16 +11:00
|
|
|
"""
|
2018-03-01 03:24:32 +11:00
|
|
|
people = {
|
|
|
|
'actor': [],
|
|
|
|
'director': [],
|
|
|
|
'writer': []
|
|
|
|
}
|
|
|
|
cast_order = 0
|
2016-02-01 20:33:33 +11:00
|
|
|
for child in self.item:
|
2018-03-01 03:24:32 +11:00
|
|
|
if child.tag == 'Role':
|
|
|
|
people['actor'].append((child.attrib['tag'],
|
|
|
|
child.get('thumb'),
|
|
|
|
child.get('role'),
|
|
|
|
cast_order))
|
|
|
|
cast_order += 1
|
|
|
|
elif child.tag == 'Writer':
|
|
|
|
people['writer'].append((child.attrib['tag'], ))
|
|
|
|
elif child.tag == 'Director':
|
|
|
|
people['director'].append((child.attrib['tag'], ))
|
2015-12-29 04:47:16 +11:00
|
|
|
return people
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def genre_list(self):
|
2015-12-29 04:47:16 +11:00
|
|
|
"""
|
2016-01-03 23:36:00 +11:00
|
|
|
Returns a list of genres found. (Not a string)
|
2015-12-29 04:47:16 +11:00
|
|
|
"""
|
|
|
|
genre = []
|
2016-02-01 20:33:33 +11:00
|
|
|
for child in self.item:
|
2015-12-31 20:53:22 +11:00
|
|
|
if child.tag == 'Genre':
|
|
|
|
genre.append(child.attrib['tag'])
|
2015-12-29 04:47:16 +11:00
|
|
|
return genre
|
|
|
|
|
2018-02-23 22:41:18 +11:00
|
|
|
def guid_html_escaped(self):
|
|
|
|
"""
|
|
|
|
Returns the 'guid' attribute, e.g.
|
|
|
|
'com.plexapp.agents.thetvdb://76648/2/4?lang=en'
|
|
|
|
as an HTML-escaped string or None
|
|
|
|
"""
|
|
|
|
answ = self.item.get('guid')
|
|
|
|
if answ is not None:
|
2018-06-22 03:24:37 +10:00
|
|
|
answ = utils.escape_html(answ)
|
2018-02-23 22:41:18 +11:00
|
|
|
return answ
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def provider(self, providername=None):
|
2015-12-29 04:47:16 +11:00
|
|
|
"""
|
2016-03-17 04:47:57 +11:00
|
|
|
providername: e.g. 'imdb', 'tvdb'
|
2015-12-29 04:47:16 +11:00
|
|
|
|
2016-02-03 23:01:13 +11:00
|
|
|
Return IMDB, e.g. "tt0903624". Returns None if not found
|
2015-12-29 04:47:16 +11:00
|
|
|
"""
|
2016-01-11 19:57:45 +11:00
|
|
|
try:
|
2018-02-12 18:10:39 +11:00
|
|
|
item = self.item.attrib['guid']
|
2016-01-11 19:57:45 +11:00
|
|
|
except KeyError:
|
|
|
|
return None
|
2016-01-10 02:14:02 +11:00
|
|
|
|
2016-02-03 23:01:13 +11:00
|
|
|
if providername == 'imdb':
|
2018-06-24 02:25:18 +10:00
|
|
|
regex = utils.REGEX_IMDB
|
2016-03-17 04:47:57 +11:00
|
|
|
elif providername == 'tvdb':
|
|
|
|
# originally e.g. com.plexapp.agents.thetvdb://276564?lang=en
|
2018-06-24 02:25:18 +10:00
|
|
|
regex = utils.REGEX_TVDB
|
2016-02-03 23:01:13 +11:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2016-01-11 19:57:45 +11:00
|
|
|
provider = regex.findall(item)
|
2015-12-29 04:47:16 +11:00
|
|
|
try:
|
2016-01-10 02:14:02 +11:00
|
|
|
provider = provider[0]
|
2016-01-11 19:57:45 +11:00
|
|
|
except IndexError:
|
2015-12-29 04:47:16 +11:00
|
|
|
provider = None
|
|
|
|
return provider
|
|
|
|
|
2018-04-28 17:12:29 +10:00
|
|
|
def title(self):
|
|
|
|
"""
|
|
|
|
Returns the title of the element as unicode or 'Missing Title Name'
|
|
|
|
"""
|
2018-06-22 03:24:37 +10:00
|
|
|
return utils.try_decode(self.item.get('title', 'Missing Title Name'))
|
2018-04-28 17:12:29 +10:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def titles(self):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
2016-02-01 20:33:33 +11:00
|
|
|
Returns an item's name/title or "Missing Title Name".
|
2018-02-12 00:42:49 +11:00
|
|
|
Output is the tuple
|
2016-01-03 23:36:00 +11:00
|
|
|
title, sorttitle
|
|
|
|
|
|
|
|
sorttitle = title, if no sorttitle is found
|
|
|
|
"""
|
2018-02-12 18:10:39 +11:00
|
|
|
title = self.item.get('title', 'Missing Title Name')
|
|
|
|
sorttitle = self.item.get('titleSort', title)
|
2015-12-29 04:47:16 +11:00
|
|
|
return title, sorttitle
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def plot(self):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
|
|
|
Returns the plot or None.
|
|
|
|
"""
|
2018-02-12 18:10:39 +11:00
|
|
|
return self.item.get('summary')
|
2015-12-31 20:53:22 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def tagline(self):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
|
|
|
Returns a shorter tagline or None
|
|
|
|
"""
|
2018-02-12 18:10:39 +11:00
|
|
|
return self.item.get('tagline')
|
2015-12-31 20:53:22 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def audience_rating(self):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
2016-04-02 19:31:21 +11:00
|
|
|
Returns the audience rating, 'rating' itself or 0.0
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
2018-02-12 18:10:39 +11:00
|
|
|
res = self.item.get('audienceRating')
|
2016-03-18 02:15:48 +11:00
|
|
|
if res is None:
|
2018-02-12 18:10:39 +11:00
|
|
|
res = self.item.get('rating')
|
2016-04-17 21:36:41 +10:00
|
|
|
try:
|
|
|
|
res = float(res)
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
res = 0.0
|
2016-03-18 02:15:48 +11:00
|
|
|
return res
|
2015-12-31 20:53:22 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def year(self):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
|
|
|
Returns the production(?) year ("year") or None
|
|
|
|
"""
|
2018-02-12 18:10:39 +11:00
|
|
|
return self.item.get('year')
|
2015-12-31 20:53:22 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def resume_point(self):
|
2018-01-22 21:20:37 +11:00
|
|
|
"""
|
|
|
|
Returns the resume point of time in seconds as int. 0 if not found
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
resume = float(self.item.attrib['viewOffset'])
|
|
|
|
except (KeyError, ValueError):
|
|
|
|
resume = 0.0
|
2018-05-27 19:13:19 +10:00
|
|
|
return resume * v.PLEX_TO_KODI_TIMEFACTOR
|
2018-01-22 21:20:37 +11:00
|
|
|
|
2018-04-28 17:12:29 +10:00
|
|
|
def runtime(self):
|
|
|
|
"""
|
|
|
|
Returns the total duration of the element as int. 0 if not found
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
runtime = float(self.item.attrib['duration'])
|
|
|
|
except (KeyError, ValueError):
|
|
|
|
runtime = 0.0
|
|
|
|
return int(runtime * v.PLEX_TO_KODI_TIMEFACTOR)
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def resume_runtime(self):
|
2015-12-29 04:47:16 +11:00
|
|
|
"""
|
2016-02-01 20:33:33 +11:00
|
|
|
Resume point of time and runtime/totaltime in rounded to seconds.
|
2016-01-02 06:36:02 +11:00
|
|
|
Time from Plex server is measured in milliseconds.
|
2016-01-03 23:36:00 +11:00
|
|
|
Kodi: seconds
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
Output is the tuple:
|
2016-02-01 20:33:33 +11:00
|
|
|
resume, runtime as ints. 0 if not found
|
2015-12-29 04:47:16 +11:00
|
|
|
"""
|
2015-12-31 20:53:22 +11:00
|
|
|
try:
|
2018-02-12 18:10:39 +11:00
|
|
|
runtime = float(self.item.attrib['duration'])
|
2016-04-17 21:36:41 +10:00
|
|
|
except (KeyError, ValueError):
|
2016-01-03 23:36:00 +11:00
|
|
|
runtime = 0.0
|
2015-12-29 04:47:16 +11:00
|
|
|
try:
|
2018-02-12 18:10:39 +11:00
|
|
|
resume = float(self.item.attrib['viewOffset'])
|
2016-04-17 21:36:41 +10:00
|
|
|
except (KeyError, ValueError):
|
2016-01-02 06:36:02 +11:00
|
|
|
resume = 0.0
|
2018-05-27 18:52:04 +10:00
|
|
|
runtime = runtime * v.PLEX_TO_KODI_TIMEFACTOR
|
|
|
|
resume = resume * v.PLEX_TO_KODI_TIMEFACTOR
|
2015-12-29 04:47:16 +11:00
|
|
|
return resume, runtime
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def content_rating(self):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
|
|
|
Get the content rating or None
|
|
|
|
"""
|
2018-02-13 06:14:25 +11:00
|
|
|
mpaa = self.item.get('contentRating')
|
|
|
|
if mpaa is None:
|
|
|
|
return
|
2015-12-29 04:47:16 +11:00
|
|
|
# Convert more complex cases
|
|
|
|
if mpaa in ("NR", "UR"):
|
|
|
|
# Kodi seems to not like NR, but will accept Rated Not Rated
|
|
|
|
mpaa = "Rated Not Rated"
|
2018-02-13 06:14:25 +11:00
|
|
|
elif mpaa.startswith('gb/'):
|
|
|
|
mpaa = mpaa.replace('gb/', 'UK:', 1)
|
2015-12-29 04:47:16 +11:00
|
|
|
return mpaa
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def country_list(self):
|
2015-12-29 04:47:16 +11:00
|
|
|
"""
|
|
|
|
Returns a list of all countries found in item.
|
|
|
|
"""
|
|
|
|
country = []
|
2016-02-01 20:33:33 +11:00
|
|
|
for child in self.item:
|
2015-12-31 20:53:22 +11:00
|
|
|
if child.tag == 'Country':
|
|
|
|
country.append(child.attrib['tag'])
|
2015-12-29 04:47:16 +11:00
|
|
|
return country
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def premiere_date(self):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
|
|
|
Returns the "originallyAvailableAt" or None
|
|
|
|
"""
|
2018-02-12 18:10:39 +11:00
|
|
|
return self.item.get('originallyAvailableAt')
|
2016-01-02 19:28:31 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def music_studio(self):
|
|
|
|
"""
|
|
|
|
Returns the 'studio' or None
|
|
|
|
"""
|
2018-02-12 18:10:39 +11:00
|
|
|
return self.item.get('studio')
|
2016-03-28 18:56:22 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def music_studio_list(self):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
|
|
|
Returns a list with a single entry for the studio, or an empty list
|
|
|
|
"""
|
2015-12-29 04:47:16 +11:00
|
|
|
studio = []
|
|
|
|
try:
|
2018-02-12 00:42:49 +11:00
|
|
|
studio.append(self.replace_studio(self.item.attrib['studio']))
|
2015-12-29 04:47:16 +11:00
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
return studio
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
@staticmethod
|
|
|
|
def replace_studio(studio_name):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
|
|
|
Convert studio for Kodi to properly detect them
|
|
|
|
"""
|
2015-12-29 04:47:16 +11:00
|
|
|
studios = {
|
|
|
|
'abc (us)': "ABC",
|
|
|
|
'fox (us)': "FOX",
|
|
|
|
'mtv (us)': "MTV",
|
|
|
|
'showcase (ca)': "Showcase",
|
|
|
|
'wgn america': "WGN"
|
|
|
|
}
|
2018-02-12 00:42:49 +11:00
|
|
|
return studios.get(studio_name.lower(), studio_name)
|
2015-12-29 04:47:16 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
@staticmethod
|
|
|
|
def list_to_string(listobject):
|
2015-12-29 04:47:16 +11:00
|
|
|
"""
|
2018-02-12 18:10:39 +11:00
|
|
|
Smart-joins the listobject into a single string using a " / " separator
|
2015-12-29 04:47:16 +11:00
|
|
|
If the list is empty, smart_join returns an empty string.
|
|
|
|
"""
|
|
|
|
string = " / ".join(listobject)
|
|
|
|
return string
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def parent_plex_id(self):
|
|
|
|
"""
|
|
|
|
Returns the 'parentRatingKey' as a string or None
|
|
|
|
"""
|
2018-02-12 18:10:39 +11:00
|
|
|
return self.item.get('parentRatingKey')
|
2016-02-03 23:01:13 +11:00
|
|
|
|
2018-02-22 06:23:43 +11:00
|
|
|
def grandparent_id(self):
|
|
|
|
"""
|
|
|
|
Returns the ratingKey for the corresponding grandparent, e.g. a TV show
|
|
|
|
for episodes, or None
|
|
|
|
"""
|
|
|
|
return self.item.get('grandparentRatingKey')
|
|
|
|
|
2018-03-11 00:56:24 +11:00
|
|
|
def grandparent_title(self):
|
|
|
|
"""
|
|
|
|
Returns the title for the corresponding grandparent, e.g. a TV show
|
|
|
|
name for episodes, or None
|
|
|
|
"""
|
|
|
|
return self.item.get('grandparentTitle')
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def episode_data(self):
|
2016-01-10 02:14:02 +11:00
|
|
|
"""
|
|
|
|
Call on a single episode.
|
|
|
|
|
|
|
|
Output: for the corresponding the TV show and season:
|
|
|
|
[
|
|
|
|
TV show key, Plex: 'grandparentRatingKey'
|
|
|
|
TV show title, Plex: 'grandparentTitle'
|
|
|
|
TV show season, Plex: 'parentIndex'
|
|
|
|
Episode number, Plex: 'index'
|
|
|
|
]
|
|
|
|
"""
|
2018-02-12 00:42:49 +11:00
|
|
|
return (self.item.get('grandparentRatingKey'),
|
|
|
|
self.item.get('grandparentTitle'),
|
|
|
|
self.item.get('parentIndex'),
|
|
|
|
self.item.get('index'))
|
2016-01-30 06:07:21 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
@staticmethod
|
|
|
|
def attach_plex_token_to_url(url):
|
2016-02-20 02:10:19 +11:00
|
|
|
"""
|
|
|
|
Returns an extended URL with the Plex token included as 'X-Plex-Token='
|
|
|
|
|
|
|
|
url may or may not already contain a '?'
|
|
|
|
"""
|
2018-06-22 03:24:37 +10:00
|
|
|
if utils.window('pms_token') == '':
|
2016-04-06 02:23:00 +10:00
|
|
|
return url
|
2016-02-20 02:10:19 +11:00
|
|
|
if '?' not in url:
|
2018-06-22 03:24:37 +10:00
|
|
|
url = "%s?X-Plex-Token=%s" % (url, utils.window('pms_token'))
|
2016-02-20 02:10:19 +11:00
|
|
|
else:
|
2018-06-22 03:24:37 +10:00
|
|
|
url = "%s&X-Plex-Token=%s" % (url, utils.window('pms_token'))
|
2016-02-20 02:10:19 +11:00
|
|
|
return url
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def item_id(self):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
2018-01-31 17:42:23 +11:00
|
|
|
Returns current playQueueItemID or if unsuccessful the playListItemID
|
2018-08-05 22:44:24 +10:00
|
|
|
as Unicode.
|
2018-01-31 17:42:23 +11:00
|
|
|
If not found, None is returned
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
2018-08-05 22:44:24 +10:00
|
|
|
return _unicode_or_none(self.item.get('playQueueItemID') or
|
|
|
|
self.item.get('playListItemID'))
|
2016-01-03 23:36:00 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def _data_from_part_or_media(self, key):
|
2016-01-06 02:05:20 +11:00
|
|
|
"""
|
|
|
|
Retrieves XML data 'key' first from the active part. If unsuccessful,
|
|
|
|
tries to retrieve the data from the Media response part.
|
|
|
|
|
|
|
|
If all fails, None is returned.
|
|
|
|
"""
|
2018-02-12 18:10:39 +11:00
|
|
|
answ = self.item[0][self.part].get(key)
|
|
|
|
if answ is None:
|
|
|
|
answ = self.item[0].get(key)
|
|
|
|
return answ
|
2016-01-06 02:05:20 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def video_codec(self):
|
2016-01-06 02:05:20 +11:00
|
|
|
"""
|
|
|
|
Returns the video codec and resolution for the child and part selected.
|
|
|
|
If any data is not found on a part-level, the Media-level data is
|
|
|
|
returned.
|
|
|
|
If that also fails (e.g. for old trailers, None is returned)
|
|
|
|
|
|
|
|
Output:
|
|
|
|
{
|
|
|
|
'videocodec': xxx, e.g. 'h264'
|
|
|
|
'resolution': xxx, e.g. '720' or '1080'
|
|
|
|
'height': xxx, e.g. '816'
|
|
|
|
'width': xxx, e.g. '1920'
|
|
|
|
'aspectratio': xxx, e.g. '1.78'
|
2016-05-16 00:30:09 +10:00
|
|
|
'bitrate': xxx, e.g. '10642'
|
2016-05-16 00:25:38 +10:00
|
|
|
'container': xxx e.g. 'mkv',
|
|
|
|
'bitDepth': xxx e.g. '8', '10'
|
2016-01-06 02:05:20 +11:00
|
|
|
}
|
|
|
|
"""
|
2016-10-27 06:14:58 +11:00
|
|
|
answ = {
|
2018-02-12 00:42:49 +11:00
|
|
|
'videocodec': self._data_from_part_or_media('videoCodec'),
|
|
|
|
'resolution': self._data_from_part_or_media('videoResolution'),
|
|
|
|
'height': self._data_from_part_or_media('height'),
|
|
|
|
'width': self._data_from_part_or_media('width'),
|
|
|
|
'aspectratio': self._data_from_part_or_media('aspectratio'),
|
|
|
|
'bitrate': self._data_from_part_or_media('bitrate'),
|
|
|
|
'container': self._data_from_part_or_media('container'),
|
2016-01-06 02:05:20 +11:00
|
|
|
}
|
2016-10-27 06:14:58 +11:00
|
|
|
try:
|
2018-02-12 18:10:39 +11:00
|
|
|
answ['bitDepth'] = self.item[0][self.part][self.mediastream].get(
|
|
|
|
'bitDepth')
|
2018-02-12 00:42:49 +11:00
|
|
|
except (TypeError, AttributeError, KeyError, IndexError):
|
2016-10-27 06:14:58 +11:00
|
|
|
answ['bitDepth'] = None
|
|
|
|
return answ
|
2016-01-06 02:05:20 +11:00
|
|
|
|
2018-05-05 03:03:27 +10:00
|
|
|
def extras(self):
|
|
|
|
"""
|
|
|
|
Returns a list of XML etree elements for each extra, e.g. a trailer.
|
|
|
|
"""
|
|
|
|
answ = []
|
|
|
|
for extras in self.item.iterfind('Extras'):
|
|
|
|
for extra in extras:
|
|
|
|
answ.append(extra)
|
|
|
|
return answ
|
|
|
|
|
2018-08-04 23:11:21 +10:00
|
|
|
def trailers(self):
|
2018-02-12 18:10:39 +11:00
|
|
|
"""
|
2018-08-14 03:58:09 +10:00
|
|
|
Returns the URL for a single trailer (local trailer preferred; first
|
|
|
|
trailer found returned) or an add-on path to list all Plex extras
|
|
|
|
if the user setting showExtrasInsteadOfTrailer is set.
|
|
|
|
Returns None if nothing is found.
|
2018-02-12 18:10:39 +11:00
|
|
|
"""
|
2018-08-04 23:11:21 +10:00
|
|
|
url = None
|
2018-02-24 02:53:06 +11:00
|
|
|
for extras in self.item.iterfind('Extras'):
|
2018-08-14 03:58:09 +10:00
|
|
|
# There will always be only 1 extras element
|
|
|
|
if (len(extras) > 0 and
|
|
|
|
state.SHOW_EXTRAS_INSTEAD_OF_PLAYING_TRAILER):
|
|
|
|
return ('plugin://%s?mode=route_to_extras&plex_id=%s'
|
|
|
|
% (v.ADDON_ID, self.plex_id()))
|
2018-02-24 02:53:06 +11:00
|
|
|
for extra in extras:
|
|
|
|
try:
|
|
|
|
typus = int(extra.attrib['extraType'])
|
|
|
|
except (KeyError, TypeError):
|
|
|
|
typus = None
|
|
|
|
if typus != 1:
|
2018-08-14 03:58:09 +10:00
|
|
|
# Skip non-trailers
|
2018-02-24 02:53:06 +11:00
|
|
|
continue
|
2018-08-14 03:58:09 +10:00
|
|
|
if extra.get('guid', '').startswith('file:'):
|
|
|
|
url = extra.get('ratingKey')
|
|
|
|
# Always prefer local trailers (first one listed)
|
|
|
|
break
|
|
|
|
elif not url:
|
|
|
|
url = extra.get('ratingKey')
|
|
|
|
if url:
|
2018-08-04 23:11:21 +10:00
|
|
|
url = ('plugin://%s.movies/?plex_id=%s&plex_type=%s&mode=play'
|
|
|
|
% (v.ADDON_ID, url, v.PLEX_TYPE_CLIP))
|
|
|
|
return url
|
2016-01-12 21:47:48 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def mediastreams(self):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
2016-03-17 03:02:22 +11:00
|
|
|
Returns the media streams for metadata purposes
|
2016-01-03 23:36:00 +11:00
|
|
|
|
|
|
|
Output: each track contains a dictionaries
|
|
|
|
{
|
2016-01-15 20:04:47 +11:00
|
|
|
'video': videotrack-list, 'codec', 'height', 'width',
|
|
|
|
'aspect', 'video3DFormat'
|
|
|
|
'audio': audiotrack-list, 'codec', 'channels',
|
|
|
|
'language'
|
2016-01-03 23:36:00 +11:00
|
|
|
'subtitle': list of subtitle languages (or "Unknown")
|
|
|
|
}
|
|
|
|
"""
|
2015-12-29 04:47:16 +11:00
|
|
|
videotracks = []
|
|
|
|
audiotracks = []
|
|
|
|
subtitlelanguages = []
|
2017-09-13 23:41:06 +10:00
|
|
|
try:
|
|
|
|
# Sometimes, aspectratio is on the "toplevel"
|
2018-02-12 18:10:39 +11:00
|
|
|
aspect = self.item[0].get('aspectRatio')
|
2017-09-13 23:41:06 +10:00
|
|
|
except IndexError:
|
|
|
|
# There is no stream info at all, returning empty
|
|
|
|
return {
|
2017-10-06 17:41:16 +11:00
|
|
|
'video': videotracks,
|
|
|
|
'audio': audiotracks,
|
|
|
|
'subtitle': subtitlelanguages
|
|
|
|
}
|
2015-12-31 20:53:22 +11:00
|
|
|
# Loop over parts
|
2016-02-01 20:33:33 +11:00
|
|
|
for child in self.item[0]:
|
2018-02-12 18:10:39 +11:00
|
|
|
container = child.get('container')
|
2015-12-31 20:53:22 +11:00
|
|
|
# Loop over Streams
|
|
|
|
for grandchild in child:
|
2017-10-06 17:41:16 +11:00
|
|
|
stream = grandchild.attrib
|
|
|
|
media_type = int(stream.get('streamType', 999))
|
|
|
|
track = {}
|
|
|
|
if media_type == 1: # Video streams
|
|
|
|
if 'codec' in stream:
|
|
|
|
track['codec'] = stream['codec'].lower()
|
|
|
|
if "msmpeg4" in track['codec']:
|
|
|
|
track['codec'] = "divx"
|
|
|
|
elif "mpeg4" in track['codec']:
|
|
|
|
# if "simple profile" in profile or profile == "":
|
|
|
|
# track['codec'] = "xvid"
|
|
|
|
pass
|
|
|
|
elif "h264" in track['codec']:
|
|
|
|
if container in ("mp4", "mov", "m4v"):
|
|
|
|
track['codec'] = "avc1"
|
|
|
|
track['height'] = stream.get('height')
|
|
|
|
track['width'] = stream.get('width')
|
|
|
|
# track['Video3DFormat'] = item.get('Video3DFormat')
|
|
|
|
track['aspect'] = stream.get('aspectRatio', aspect)
|
2018-02-12 00:42:49 +11:00
|
|
|
track['duration'] = self.resume_runtime()[1]
|
2017-10-06 17:41:16 +11:00
|
|
|
track['video3DFormat'] = None
|
|
|
|
videotracks.append(track)
|
|
|
|
elif media_type == 2: # Audio streams
|
|
|
|
if 'codec' in stream:
|
|
|
|
track['codec'] = stream['codec'].lower()
|
|
|
|
if ("dca" in track['codec'] and
|
|
|
|
"ma" in stream.get('profile', '').lower()):
|
|
|
|
track['codec'] = "dtshd_ma"
|
|
|
|
track['channels'] = stream.get('channels')
|
2016-03-10 22:47:30 +11:00
|
|
|
# 'unknown' if we cannot get language
|
2017-10-06 17:41:16 +11:00
|
|
|
track['language'] = stream.get(
|
2018-06-22 03:24:37 +10:00
|
|
|
'languageCode', utils.lang(39310)).lower()
|
2017-10-06 17:41:16 +11:00
|
|
|
audiotracks.append(track)
|
|
|
|
elif media_type == 3: # Subtitle streams
|
2016-03-10 22:47:30 +11:00
|
|
|
# 'unknown' if we cannot get language
|
|
|
|
subtitlelanguages.append(
|
2018-06-22 03:24:37 +10:00
|
|
|
stream.get('languageCode', utils.lang(39310)).lower())
|
2016-02-01 20:33:33 +11:00
|
|
|
return {
|
2015-12-29 04:47:16 +11:00
|
|
|
'video': videotracks,
|
|
|
|
'audio': audiotracks,
|
|
|
|
'subtitle': subtitlelanguages
|
|
|
|
}
|
2015-12-31 01:57:55 +11:00
|
|
|
|
2018-07-19 22:54:46 +10:00
|
|
|
def one_artwork(self, art_kind):
|
2018-03-04 00:40:12 +11:00
|
|
|
artwork = self.item.get(art_kind)
|
|
|
|
if artwork and not artwork.startswith('http'):
|
2018-07-05 20:46:40 +10:00
|
|
|
if '/composite/' in artwork:
|
2018-07-17 21:48:09 +10:00
|
|
|
try:
|
|
|
|
# e.g. Plex collections where artwork already contains
|
|
|
|
# width and height. Need to upscale for better resolution
|
|
|
|
artwork, args = artwork.split('?')
|
|
|
|
args = dict(parse_qsl(args))
|
|
|
|
width = int(args.get('width', 400))
|
|
|
|
height = int(args.get('height', 400))
|
|
|
|
# Adjust to 4k resolution 3,840x2,160
|
|
|
|
scaling = 3840.0 / float(max(width, height))
|
|
|
|
width = int(scaling * width)
|
|
|
|
height = int(scaling * height)
|
|
|
|
except ValueError:
|
|
|
|
# e.g. playlists
|
|
|
|
width = 3840
|
|
|
|
height = 3840
|
2018-07-05 20:46:40 +10:00
|
|
|
artwork = '%s?width=%s&height=%s' % (artwork, width, height)
|
|
|
|
artwork = ('%s/photo/:/transcode?width=3840&height=3840&'
|
|
|
|
'minSize=1&upscale=0&url=%s'
|
|
|
|
% (self.server, quote(artwork)))
|
|
|
|
artwork = self.attach_plex_token_to_url(artwork)
|
2016-04-22 18:26:40 +10:00
|
|
|
return artwork
|
|
|
|
|
2018-03-20 20:37:42 +11:00
|
|
|
def artwork(self, kodi_id=None, kodi_type=None, full_artwork=False):
|
2016-01-03 23:36:00 +11:00
|
|
|
"""
|
2018-03-04 00:40:12 +11:00
|
|
|
Gets the URLs to the Plex artwork. Dict keys will be missing if there
|
|
|
|
is no corresponding artwork.
|
|
|
|
Pass kodi_id and kodi_type to grab the artwork saved in the Kodi DB
|
|
|
|
(thus potentially more artwork, e.g. clearart, discart)
|
2015-12-31 01:57:55 +11:00
|
|
|
|
2018-03-04 00:40:12 +11:00
|
|
|
Output ('max' version)
|
2016-01-03 23:36:00 +11:00
|
|
|
{
|
2018-03-04 00:40:12 +11:00
|
|
|
'thumb'
|
|
|
|
'poster'
|
|
|
|
'banner'
|
|
|
|
'clearart'
|
|
|
|
'clearlogo'
|
|
|
|
'fanart'
|
2016-01-03 23:36:00 +11:00
|
|
|
}
|
2018-03-04 23:39:18 +11:00
|
|
|
'landscape' and 'icon' might be implemented later
|
2018-03-20 20:37:42 +11:00
|
|
|
Passing full_artwork=True returns ALL the artwork for the item, so not
|
|
|
|
just 'thumb' for episodes, but also season and show artwork
|
2016-04-22 18:26:40 +10:00
|
|
|
"""
|
2018-03-20 20:37:42 +11:00
|
|
|
artworks = {}
|
|
|
|
if self.plex_type() == v.PLEX_TYPE_EPISODE:
|
|
|
|
# Artwork lookup for episodes is broken for addon paths
|
2018-03-20 21:08:09 +11:00
|
|
|
# Episodes is a bit special, only get the thumb, because all
|
|
|
|
# the other artwork will be saved under season and show
|
2018-04-26 16:18:51 +10:00
|
|
|
# EXCEPT if you're constructing a listitem
|
|
|
|
if not full_artwork:
|
2018-07-19 22:54:46 +10:00
|
|
|
art = self.one_artwork('thumb')
|
2018-04-26 16:18:51 +10:00
|
|
|
if art:
|
|
|
|
artworks['thumb'] = art
|
|
|
|
return artworks
|
2018-04-20 23:58:35 +10:00
|
|
|
for kodi_artwork, plex_artwork in \
|
|
|
|
v.KODI_TO_PLEX_ARTWORK_EPISODE.iteritems():
|
2018-07-19 22:54:46 +10:00
|
|
|
art = self.one_artwork(plex_artwork)
|
2018-04-20 15:41:59 +10:00
|
|
|
if art:
|
|
|
|
artworks[kodi_artwork] = art
|
2018-04-20 23:58:35 +10:00
|
|
|
if not full_artwork:
|
|
|
|
return artworks
|
|
|
|
with plexdb.Get_Plex_DB() as plex_db:
|
2018-03-20 20:37:42 +11:00
|
|
|
try:
|
2018-04-20 23:58:35 +10:00
|
|
|
season_id = plex_db.getItem_byId(self.plex_id())[3]
|
2018-03-20 20:37:42 +11:00
|
|
|
except TypeError:
|
2018-03-20 21:08:09 +11:00
|
|
|
return artworks
|
2018-04-20 23:58:35 +10:00
|
|
|
# Grab artwork from the season
|
|
|
|
with kodidb.GetKodiDB('video') as kodi_db:
|
|
|
|
season_art = kodi_db.get_art(season_id, v.KODI_TYPE_SEASON)
|
|
|
|
for kodi_art in season_art:
|
|
|
|
artworks['season.%s' % kodi_art] = season_art[kodi_art]
|
|
|
|
# Get the show id
|
|
|
|
with plexdb.Get_Plex_DB() as plex_db:
|
2018-03-20 21:08:09 +11:00
|
|
|
try:
|
2018-04-20 23:58:35 +10:00
|
|
|
show_id = plex_db.getItem_byKodiId(season_id,
|
|
|
|
v.KODI_TYPE_SEASON)[1]
|
2018-03-20 21:08:09 +11:00
|
|
|
except TypeError:
|
|
|
|
return artworks
|
2018-04-20 23:58:35 +10:00
|
|
|
# Grab more artwork from the show
|
|
|
|
with kodidb.GetKodiDB('video') as kodi_db:
|
|
|
|
show_art = kodi_db.get_art(show_id, v.KODI_TYPE_SHOW)
|
|
|
|
for kodi_art in show_art:
|
|
|
|
artworks['tvshow.%s' % kodi_art] = show_art[kodi_art]
|
2018-03-20 20:37:42 +11:00
|
|
|
return artworks
|
|
|
|
|
2018-03-04 00:40:12 +11:00
|
|
|
if kodi_id:
|
|
|
|
# in Kodi database, potentially with additional e.g. clearart
|
|
|
|
if self.plex_type() in v.PLEX_VIDEOTYPES:
|
|
|
|
with kodidb.GetKodiDB('video') as kodi_db:
|
|
|
|
return kodi_db.get_art(kodi_id, kodi_type)
|
|
|
|
else:
|
|
|
|
with kodidb.GetKodiDB('music') as kodi_db:
|
|
|
|
return kodi_db.get_art(kodi_id, kodi_type)
|
|
|
|
|
|
|
|
# Grab artwork from Plex
|
2018-03-20 20:37:42 +11:00
|
|
|
# if self.plex_type() == v.PLEX_TYPE_EPISODE:
|
|
|
|
|
2018-03-04 00:40:12 +11:00
|
|
|
for kodi_artwork, plex_artwork in v.KODI_TO_PLEX_ARTWORK.iteritems():
|
2018-07-19 22:54:46 +10:00
|
|
|
art = self.one_artwork(plex_artwork)
|
2018-03-04 00:40:12 +11:00
|
|
|
if art:
|
|
|
|
artworks[kodi_artwork] = art
|
2018-03-19 05:18:44 +11:00
|
|
|
if self.plex_type() in (v.PLEX_TYPE_SONG, v.PLEX_TYPE_ALBUM):
|
2018-03-04 00:40:12 +11:00
|
|
|
# Get parent item artwork if the main item is missing artwork
|
2018-03-04 23:39:18 +11:00
|
|
|
if 'fanart' not in artworks:
|
2018-07-19 22:54:46 +10:00
|
|
|
art = self.one_artwork('parentArt')
|
2018-03-04 00:40:12 +11:00
|
|
|
if art:
|
|
|
|
artworks['fanart1'] = art
|
|
|
|
if 'poster' not in artworks:
|
2018-07-19 22:54:46 +10:00
|
|
|
art = self.one_artwork('parentThumb')
|
2018-03-04 00:40:12 +11:00
|
|
|
if art:
|
|
|
|
artworks['poster'] = art
|
2018-03-05 00:22:39 +11:00
|
|
|
if self.plex_type() in (v.PLEX_TYPE_SONG,
|
|
|
|
v.PLEX_TYPE_ALBUM,
|
|
|
|
v.PLEX_TYPE_ARTIST):
|
|
|
|
# need to set poster also as thumb
|
2018-07-19 22:54:46 +10:00
|
|
|
art = self.one_artwork('thumb')
|
2018-03-05 00:22:39 +11:00
|
|
|
if art:
|
|
|
|
artworks['thumb'] = art
|
2018-03-04 00:40:12 +11:00
|
|
|
return artworks
|
2015-12-31 01:57:55 +11:00
|
|
|
|
2018-03-04 23:39:18 +11:00
|
|
|
def fanart_artwork(self, artworks):
|
2016-09-11 03:49:03 +10:00
|
|
|
"""
|
|
|
|
Downloads additional fanart from third party sources (well, link to
|
|
|
|
fanart only).
|
|
|
|
"""
|
2018-02-12 00:42:49 +11:00
|
|
|
external_id = self.retrieve_external_item_id()
|
|
|
|
if external_id is not None:
|
2018-03-04 23:39:18 +11:00
|
|
|
artworks = self.lookup_fanart_tv(external_id[0], artworks)
|
|
|
|
return artworks
|
2016-04-16 05:44:54 +10:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def retrieve_external_item_id(self, collection=False):
|
2016-04-29 20:44:56 +10:00
|
|
|
"""
|
2018-03-04 23:39:18 +11:00
|
|
|
Returns the set
|
|
|
|
media_id [unicode]: the item's IMDB id for movies or tvdb id for
|
|
|
|
TV shows
|
|
|
|
poster [unicode]: path to the item's poster artwork
|
|
|
|
background [unicode]: path to the item's background artwork
|
2018-05-20 20:28:49 +10:00
|
|
|
|
2018-03-04 23:39:18 +11:00
|
|
|
The last two might be None if not found. Generally None is returned
|
|
|
|
if unsuccessful.
|
2016-04-18 19:34:01 +10:00
|
|
|
|
2018-03-04 23:39:18 +11:00
|
|
|
If not found in item's Plex metadata, check themovidedb.org.
|
2016-04-29 20:44:56 +10:00
|
|
|
"""
|
2016-04-16 05:44:54 +10:00
|
|
|
item = self.item.attrib
|
2016-04-29 20:44:56 +10:00
|
|
|
media_type = item.get('type')
|
2018-02-12 00:42:49 +11:00
|
|
|
media_id = None
|
2016-06-09 03:50:24 +10:00
|
|
|
# Return the saved Plex id's, if applicable
|
|
|
|
# Always seek collection's ids since not provided by PMS
|
|
|
|
if collection is False:
|
2017-01-25 02:04:42 +11:00
|
|
|
if media_type == v.PLEX_TYPE_MOVIE:
|
2018-02-12 00:42:49 +11:00
|
|
|
media_id = self.provider('imdb')
|
2017-01-25 02:04:42 +11:00
|
|
|
elif media_type == v.PLEX_TYPE_SHOW:
|
2018-02-12 00:42:49 +11:00
|
|
|
media_id = self.provider('tvdb')
|
|
|
|
if media_id is not None:
|
2018-03-04 23:39:18 +11:00
|
|
|
return media_id, None, None
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.info('Plex did not provide ID for IMDB or TVDB. Start '
|
2016-09-03 00:53:16 +10:00
|
|
|
'lookup process')
|
2016-06-09 03:50:24 +10:00
|
|
|
else:
|
2018-02-12 18:10:39 +11:00
|
|
|
LOG.info('Start movie set/collection lookup on themoviedb with %s',
|
2018-02-01 17:56:54 +11:00
|
|
|
item.get('title', ''))
|
2016-04-29 20:44:56 +10:00
|
|
|
|
2018-06-22 03:24:37 +10:00
|
|
|
api_key = utils.settings('themoviedbAPIKey')
|
2017-01-25 02:04:42 +11:00
|
|
|
if media_type == v.PLEX_TYPE_SHOW:
|
2016-04-29 20:44:56 +10:00
|
|
|
media_type = 'tv'
|
|
|
|
title = item.get('title', '')
|
|
|
|
# if the title has the year in remove it as tmdb cannot deal with it...
|
|
|
|
# replace e.g. 'The Americans (2015)' with 'The Americans'
|
2017-01-25 04:00:35 +11:00
|
|
|
title = sub(r'\s*\(\d{4}\)$', '', title, count=1)
|
2017-05-31 18:05:50 +10:00
|
|
|
url = 'https://api.themoviedb.org/3/search/%s' % media_type
|
2016-04-29 20:44:56 +10:00
|
|
|
parameters = {
|
2018-02-12 00:42:49 +11:00
|
|
|
'api_key': api_key,
|
2017-01-25 02:04:42 +11:00
|
|
|
'language': v.KODILANGUAGE,
|
2018-06-22 03:24:37 +10:00
|
|
|
'query': utils.try_encode(title)
|
2016-04-29 20:44:56 +10:00
|
|
|
}
|
2018-02-11 03:59:20 +11:00
|
|
|
data = DU().downloadUrl(url,
|
|
|
|
authenticate=False,
|
|
|
|
parameters=parameters,
|
|
|
|
timeout=7)
|
2016-04-29 20:44:56 +10:00
|
|
|
try:
|
|
|
|
data.get('test')
|
2018-02-12 00:42:49 +11:00
|
|
|
except AttributeError:
|
2018-05-20 20:34:44 +10:00
|
|
|
LOG.warning('Could not download data from FanartTV')
|
2016-04-29 20:44:56 +10:00
|
|
|
return
|
2018-05-20 20:34:44 +10:00
|
|
|
if not data.get('results'):
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.info('No match found on themoviedb for type: %s, title: %s',
|
|
|
|
media_type, title)
|
2016-04-29 20:44:56 +10:00
|
|
|
return
|
|
|
|
|
2016-04-16 05:44:54 +10:00
|
|
|
year = item.get('year')
|
2018-02-12 00:42:49 +11:00
|
|
|
match_found = None
|
2016-04-29 20:44:56 +10:00
|
|
|
# find year match
|
2018-05-20 20:34:44 +10:00
|
|
|
if year:
|
|
|
|
for entry in data['results']:
|
|
|
|
if year in entry.get('first_air_date', ''):
|
2018-02-12 00:42:49 +11:00
|
|
|
match_found = entry
|
2016-04-29 20:44:56 +10:00
|
|
|
break
|
2018-05-20 20:34:44 +10:00
|
|
|
elif year in entry.get('release_date', ''):
|
2018-02-12 00:42:49 +11:00
|
|
|
match_found = entry
|
2016-04-29 20:44:56 +10:00
|
|
|
break
|
|
|
|
# find exact match based on title, if we haven't found a year match
|
2018-02-12 00:42:49 +11:00
|
|
|
if match_found is None:
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.info('No themoviedb match found using year %s', year)
|
2016-04-29 20:44:56 +10:00
|
|
|
replacements = (
|
|
|
|
' ',
|
|
|
|
'-',
|
|
|
|
'&',
|
|
|
|
',',
|
|
|
|
':',
|
|
|
|
';'
|
|
|
|
)
|
2018-05-20 20:34:44 +10:00
|
|
|
for entry in data['results']:
|
|
|
|
name = entry.get('name', entry.get('title', ''))
|
|
|
|
original_name = entry.get('original_name', '')
|
2016-04-29 20:44:56 +10:00
|
|
|
title_alt = title.lower()
|
|
|
|
name_alt = name.lower()
|
|
|
|
org_name_alt = original_name.lower()
|
2018-02-12 00:42:49 +11:00
|
|
|
for replace_string in replacements:
|
|
|
|
title_alt = title_alt.replace(replace_string, '')
|
|
|
|
name_alt = name_alt.replace(replace_string, '')
|
|
|
|
org_name_alt = org_name_alt.replace(replace_string, '')
|
2016-04-29 20:44:56 +10:00
|
|
|
if name == title or original_name == title:
|
|
|
|
# match found for exact title name
|
2018-02-12 00:42:49 +11:00
|
|
|
match_found = entry
|
2016-04-29 20:44:56 +10:00
|
|
|
break
|
2018-05-20 20:34:44 +10:00
|
|
|
elif (name.split(' (')[0] == title or title_alt == name_alt
|
2018-05-20 20:28:49 +10:00
|
|
|
or title_alt == org_name_alt):
|
2016-04-29 20:44:56 +10:00
|
|
|
# match found with substituting some stuff
|
2018-02-12 00:42:49 +11:00
|
|
|
match_found = entry
|
2016-04-29 20:44:56 +10:00
|
|
|
break
|
|
|
|
|
|
|
|
# if a match was not found, we accept the closest match from TMDB
|
2018-05-20 20:34:44 +10:00
|
|
|
if match_found is None and data.get('results'):
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.info('Using very first match from themoviedb')
|
2018-05-20 20:34:44 +10:00
|
|
|
match_found = entry = data.get('results')[0]
|
2016-04-29 20:44:56 +10:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
if match_found is None:
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.info('Still no themoviedb match for type: %s, title: %s, '
|
|
|
|
'year: %s', media_type, title, year)
|
|
|
|
LOG.debug('themoviedb answer was %s', data['results'])
|
2016-04-29 20:44:56 +10:00
|
|
|
return
|
|
|
|
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.info('Found themoviedb match for %s: %s',
|
2018-02-12 00:42:49 +11:00
|
|
|
item.get('title'), match_found)
|
2016-04-29 20:44:56 +10:00
|
|
|
|
2018-05-20 20:34:44 +10:00
|
|
|
tmdb_id = str(entry.get('id', ''))
|
2018-02-12 00:42:49 +11:00
|
|
|
if tmdb_id == '':
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.error('No themoviedb ID found, aborting')
|
2016-04-29 20:44:56 +10:00
|
|
|
return
|
|
|
|
|
2018-05-20 20:34:44 +10:00
|
|
|
if media_type == 'multi' and entry.get('media_type'):
|
|
|
|
media_type = entry.get('media_type')
|
|
|
|
name = entry.get('name', entry.get('title'))
|
2018-02-12 00:42:49 +11:00
|
|
|
# lookup external tmdb_id and perform artwork lookup on fanart.tv
|
2018-03-04 23:39:18 +11:00
|
|
|
parameters = {'api_key': api_key}
|
2018-05-20 21:52:23 +10:00
|
|
|
if media_type == 'movie':
|
|
|
|
url = 'https://api.themoviedb.org/3/movie/%s' % tmdb_id
|
|
|
|
parameters['append_to_response'] = 'videos'
|
|
|
|
elif media_type == 'tv':
|
|
|
|
url = 'https://api.themoviedb.org/3/tv/%s' % tmdb_id
|
|
|
|
parameters['append_to_response'] = 'external_ids,videos'
|
2018-03-04 23:39:18 +11:00
|
|
|
media_id, poster, background = None, None, None
|
2018-05-20 20:34:44 +10:00
|
|
|
for language in [v.KODILANGUAGE, 'en']:
|
2016-04-29 20:44:56 +10:00
|
|
|
parameters['language'] = language
|
2018-02-11 03:59:20 +11:00
|
|
|
data = DU().downloadUrl(url,
|
|
|
|
authenticate=False,
|
|
|
|
parameters=parameters,
|
|
|
|
timeout=7)
|
2016-04-29 20:44:56 +10:00
|
|
|
try:
|
|
|
|
data.get('test')
|
2018-02-12 00:42:49 +11:00
|
|
|
except AttributeError:
|
2018-05-20 21:52:23 +10:00
|
|
|
LOG.warning('Could not download %s with parameters %s',
|
|
|
|
url, parameters)
|
2016-04-29 20:44:56 +10:00
|
|
|
continue
|
2016-06-09 04:34:49 +10:00
|
|
|
if collection is False:
|
2018-05-20 21:52:23 +10:00
|
|
|
if data.get('imdb_id'):
|
2018-05-20 20:34:44 +10:00
|
|
|
media_id = str(data.get('imdb_id'))
|
2016-06-07 06:10:58 +10:00
|
|
|
break
|
2018-05-20 21:52:23 +10:00
|
|
|
if (data.get('external_ids') and
|
|
|
|
data['external_ids'].get('tvdb_id')):
|
|
|
|
media_id = str(data['external_ids']['tvdb_id'])
|
2016-06-07 06:10:58 +10:00
|
|
|
break
|
2016-06-09 03:50:24 +10:00
|
|
|
else:
|
2018-05-20 21:52:23 +10:00
|
|
|
if not data.get('belongs_to_collection'):
|
2017-05-31 19:21:14 +10:00
|
|
|
continue
|
2018-05-20 21:52:23 +10:00
|
|
|
media_id = data.get('belongs_to_collection').get('id')
|
|
|
|
if not media_id:
|
|
|
|
continue
|
|
|
|
media_id = str(media_id)
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.debug('Retrieved collections tmdb id %s for %s',
|
2018-02-12 00:42:49 +11:00
|
|
|
media_id, title)
|
|
|
|
url = 'https://api.themoviedb.org/3/collection/%s' % media_id
|
2018-02-11 03:59:20 +11:00
|
|
|
data = DU().downloadUrl(url,
|
|
|
|
authenticate=False,
|
|
|
|
parameters=parameters,
|
|
|
|
timeout=7)
|
2017-05-31 19:21:14 +10:00
|
|
|
try:
|
|
|
|
data.get('poster_path')
|
|
|
|
except AttributeError:
|
2018-06-22 03:24:37 +10:00
|
|
|
LOG.debug('Could not find TheMovieDB poster paths for %s'
|
|
|
|
' in the language %s', title, language)
|
2017-05-31 19:21:14 +10:00
|
|
|
continue
|
2018-05-20 21:52:23 +10:00
|
|
|
if not poster and data.get('poster_path'):
|
2018-02-12 00:42:49 +11:00
|
|
|
poster = ('https://image.tmdb.org/t/p/original%s' %
|
|
|
|
data.get('poster_path'))
|
2018-05-20 21:52:23 +10:00
|
|
|
if not background and data.get('backdrop_path'):
|
2018-02-12 00:42:49 +11:00
|
|
|
background = ('https://image.tmdb.org/t/p/original%s' %
|
|
|
|
data.get('backdrop_path'))
|
2018-03-04 23:39:18 +11:00
|
|
|
return media_id, poster, background
|
2016-06-07 06:10:58 +10:00
|
|
|
|
2018-05-20 20:26:52 +10:00
|
|
|
def lookup_fanart_tv(self, media_id, artworks):
|
2016-04-29 20:44:56 +10:00
|
|
|
"""
|
|
|
|
perform artwork lookup on fanart.tv
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
media_id: IMDB id for movies, tvdb id for TV shows
|
2016-04-29 20:44:56 +10:00
|
|
|
"""
|
2018-06-22 03:24:37 +10:00
|
|
|
api_key = utils.settings('FanArtTVAPIKey')
|
2018-03-04 23:39:18 +11:00
|
|
|
typus = self.plex_type()
|
|
|
|
if typus == v.PLEX_TYPE_SHOW:
|
2016-04-29 20:44:56 +10:00
|
|
|
typus = 'tv'
|
|
|
|
|
2018-03-04 23:39:18 +11:00
|
|
|
if typus == v.PLEX_TYPE_MOVIE:
|
2016-04-29 20:44:56 +10:00
|
|
|
url = 'http://webservice.fanart.tv/v3/movies/%s?api_key=%s' \
|
2018-02-12 00:42:49 +11:00
|
|
|
% (media_id, api_key)
|
2016-04-29 20:44:56 +10:00
|
|
|
elif typus == 'tv':
|
|
|
|
url = 'http://webservice.fanart.tv/v3/tv/%s?api_key=%s' \
|
2018-02-12 00:42:49 +11:00
|
|
|
% (media_id, api_key)
|
2016-04-29 20:44:56 +10:00
|
|
|
else:
|
|
|
|
# Not supported artwork
|
2018-03-04 23:39:18 +11:00
|
|
|
return artworks
|
|
|
|
data = DU().downloadUrl(url, authenticate=False, timeout=15)
|
2016-04-16 05:44:54 +10:00
|
|
|
try:
|
2016-04-29 20:44:56 +10:00
|
|
|
data.get('test')
|
2018-02-12 00:42:49 +11:00
|
|
|
except AttributeError:
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.error('Could not download data from FanartTV')
|
2018-03-04 23:39:18 +11:00
|
|
|
return artworks
|
2016-04-29 20:44:56 +10:00
|
|
|
|
2018-03-04 23:39:18 +11:00
|
|
|
fanart_tv_types = list(v.FANART_TV_TO_KODI_TYPE)
|
2018-02-12 00:42:49 +11:00
|
|
|
|
2018-03-04 23:39:18 +11:00
|
|
|
if typus == v.PLEX_TYPE_ARTIST:
|
2018-02-12 00:42:49 +11:00
|
|
|
fanart_tv_types.append(("thumb", "folder"))
|
2016-04-29 20:44:56 +10:00
|
|
|
else:
|
2018-03-04 23:39:18 +11:00
|
|
|
fanart_tv_types.append(("thumb", "thumb"))
|
2016-06-07 06:10:58 +10:00
|
|
|
|
2016-04-29 20:44:56 +10:00
|
|
|
prefixes = (
|
|
|
|
"hd" + typus,
|
|
|
|
"hd",
|
|
|
|
typus,
|
|
|
|
"",
|
|
|
|
)
|
2018-03-04 23:39:18 +11:00
|
|
|
for fanart_tv_type, kodi_type in fanart_tv_types:
|
2016-04-29 20:44:56 +10:00
|
|
|
# Skip the ones we already have
|
2018-03-04 23:39:18 +11:00
|
|
|
if kodi_type in artworks:
|
2016-04-29 20:44:56 +10:00
|
|
|
continue
|
|
|
|
for prefix in prefixes:
|
2018-03-04 23:39:18 +11:00
|
|
|
fanarttvimage = prefix + fanart_tv_type
|
2016-04-29 20:44:56 +10:00
|
|
|
if fanarttvimage not in data:
|
|
|
|
continue
|
|
|
|
# select image in preferred language
|
|
|
|
for entry in data[fanarttvimage]:
|
2017-01-25 02:04:42 +11:00
|
|
|
if entry.get("lang") == v.KODILANGUAGE:
|
2018-03-04 23:39:18 +11:00
|
|
|
artworks[kodi_type] = \
|
2018-02-12 18:10:39 +11:00
|
|
|
entry.get("url", "").replace(' ', '%20')
|
2016-04-29 20:44:56 +10:00
|
|
|
break
|
|
|
|
# just grab the first english OR undefinded one as fallback
|
2016-06-19 23:34:27 +10:00
|
|
|
# (so we're actually grabbing the more popular one)
|
2018-03-04 23:39:18 +11:00
|
|
|
if kodi_type not in artworks:
|
2016-04-29 20:44:56 +10:00
|
|
|
for entry in data[fanarttvimage]:
|
|
|
|
if entry.get("lang") in ("en", "00"):
|
2018-03-04 23:39:18 +11:00
|
|
|
artworks[kodi_type] = \
|
2018-02-12 18:10:39 +11:00
|
|
|
entry.get("url", "").replace(' ', '%20')
|
2016-04-16 05:44:54 +10:00
|
|
|
break
|
|
|
|
|
2016-04-29 20:44:56 +10:00
|
|
|
# grab extrafanarts in list
|
2018-03-04 23:39:18 +11:00
|
|
|
fanartcount = 1 if 'fanart' in artworks else ''
|
2016-04-29 20:44:56 +10:00
|
|
|
for prefix in prefixes:
|
|
|
|
fanarttvimage = prefix + 'background'
|
|
|
|
if fanarttvimage not in data:
|
|
|
|
continue
|
|
|
|
for entry in data[fanarttvimage]:
|
2017-05-04 04:30:33 +10:00
|
|
|
if entry.get("url") is None:
|
|
|
|
continue
|
2018-03-04 23:39:18 +11:00
|
|
|
artworks['fanart%s' % fanartcount] = \
|
|
|
|
entry['url'].replace(' ', '%20')
|
|
|
|
try:
|
|
|
|
fanartcount += 1
|
|
|
|
except TypeError:
|
|
|
|
fanartcount = 1
|
|
|
|
if fanartcount >= v.MAX_BACKGROUND_COUNT:
|
2017-05-04 04:30:33 +10:00
|
|
|
break
|
2018-03-04 23:39:18 +11:00
|
|
|
return artworks
|
2016-01-02 00:40:40 +11:00
|
|
|
|
2018-07-05 20:46:40 +10:00
|
|
|
def library_section_id(self):
|
|
|
|
"""
|
|
|
|
Returns the id of the Plex library section (for e.g. a movies section)
|
|
|
|
or None
|
|
|
|
"""
|
|
|
|
return self.item.get('librarySectionID')
|
|
|
|
|
|
|
|
def collections_match(self):
|
|
|
|
"""
|
|
|
|
Downloads one additional xml from the PMS in order to return a list of
|
|
|
|
tuples [(collection_id, plex_id), ...] for all collections of the
|
|
|
|
current item's Plex library sectin
|
|
|
|
Pass in the collection id of e.g. the movie's metadata
|
|
|
|
"""
|
|
|
|
xml = PF.collections(self.library_section_id())
|
2018-07-08 03:16:33 +10:00
|
|
|
if xml is None:
|
2018-07-05 20:46:40 +10:00
|
|
|
return []
|
|
|
|
return [(i.get('index'), i.get('ratingKey')) for i in xml]
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def set_artwork(self):
|
2016-06-07 06:10:58 +10:00
|
|
|
"""
|
|
|
|
Gets the URLs to the Plex artwork, or empty string if not found.
|
2016-06-09 03:50:24 +10:00
|
|
|
Only call on movies
|
2016-06-07 06:10:58 +10:00
|
|
|
"""
|
2018-03-04 23:39:18 +11:00
|
|
|
artworks = {}
|
2016-06-07 06:10:58 +10:00
|
|
|
# Plex does not get much artwork - go ahead and get the rest from
|
|
|
|
# fanart tv only for movie or tv show
|
2018-02-12 00:42:49 +11:00
|
|
|
external_id = self.retrieve_external_item_id(collection=True)
|
|
|
|
if external_id is not None:
|
2018-03-04 23:39:18 +11:00
|
|
|
external_id, poster, background = external_id
|
2017-05-31 19:21:14 +10:00
|
|
|
if poster is not None:
|
2018-03-04 23:39:18 +11:00
|
|
|
artworks['poster'] = poster
|
2017-05-31 19:21:14 +10:00
|
|
|
if background is not None:
|
2018-03-04 23:39:18 +11:00
|
|
|
artworks['fanart'] = background
|
2018-05-20 20:26:52 +10:00
|
|
|
artworks = self.lookup_fanart_tv(external_id, artworks)
|
2017-05-31 19:21:14 +10:00
|
|
|
else:
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.info('Did not find a set/collection ID on TheMovieDB using %s.'
|
2018-02-12 00:42:49 +11:00
|
|
|
' Artwork will be missing.', self.titles()[0])
|
2018-03-04 23:39:18 +11:00
|
|
|
return artworks
|
2016-06-07 06:10:58 +10:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def should_stream(self):
|
2016-04-17 21:36:41 +10:00
|
|
|
"""
|
|
|
|
Returns True if the item's 'optimizedForStreaming' is set, False other-
|
|
|
|
wise
|
|
|
|
"""
|
2018-02-12 18:10:39 +11:00
|
|
|
return self.item[0].get('optimizedForStreaming') == '1'
|
2016-04-17 21:36:41 +10:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def mediastream_number(self):
|
2016-07-13 04:16:45 +10:00
|
|
|
"""
|
|
|
|
Returns the Media stream as an int (mostly 0). Will let the user choose
|
|
|
|
if several media streams are present for a PMS item (if settings are
|
|
|
|
set accordingly)
|
2018-09-03 03:40:56 +10:00
|
|
|
|
|
|
|
Returns None if the user aborted selection (leaving self.mediastream at
|
|
|
|
its default of None)
|
2016-07-13 04:16:45 +10:00
|
|
|
"""
|
|
|
|
# How many streams do we have?
|
|
|
|
count = 0
|
2018-02-12 18:10:39 +11:00
|
|
|
for entry in self.item.iterfind('./Media'):
|
2016-07-13 04:16:45 +10:00
|
|
|
count += 1
|
|
|
|
if (count > 1 and (
|
2018-07-04 17:08:39 +10:00
|
|
|
(self.plex_type() != v.PLEX_TYPE_CLIP and
|
2018-06-22 03:24:37 +10:00
|
|
|
utils.settings('bestQuality') == 'false')
|
2018-02-12 18:10:39 +11:00
|
|
|
or
|
2018-07-04 17:08:39 +10:00
|
|
|
(self.plex_type() == v.PLEX_TYPE_CLIP and
|
2018-06-22 03:24:37 +10:00
|
|
|
utils.settings('bestTrailer') == 'false'))):
|
2016-07-13 04:16:45 +10:00
|
|
|
# Several streams/files available.
|
|
|
|
dialoglist = []
|
2018-02-12 18:10:39 +11:00
|
|
|
for entry in self.item.iterfind('./Media'):
|
2017-04-02 22:11:13 +10:00
|
|
|
# Get additional info (filename / languages)
|
2017-04-03 04:10:10 +10:00
|
|
|
if 'file' in entry[0].attrib:
|
2018-07-04 17:08:39 +10:00
|
|
|
option = utils.try_decode(entry[0].attrib['file'])
|
|
|
|
option = path_ops.path.basename(option)
|
|
|
|
else:
|
|
|
|
option = self.title() or ''
|
2017-04-03 04:10:10 +10:00
|
|
|
# Languages of audio streams
|
|
|
|
languages = []
|
|
|
|
for stream in entry[0]:
|
|
|
|
if (stream.attrib['streamType'] == '1' and
|
|
|
|
'language' in stream.attrib):
|
2018-07-04 17:08:39 +10:00
|
|
|
language = utils.try_decode(stream.attrib['language'])
|
|
|
|
languages.append(language)
|
2017-04-03 04:10:10 +10:00
|
|
|
languages = ', '.join(languages)
|
|
|
|
if languages:
|
|
|
|
if option:
|
2018-07-04 17:08:39 +10:00
|
|
|
option = '%s (%s): ' % (option, languages)
|
2017-04-03 04:10:10 +10:00
|
|
|
else:
|
2018-07-04 17:08:39 +10:00
|
|
|
option = '%s: ' % languages
|
|
|
|
else:
|
|
|
|
option = '%s ' % option
|
2017-04-03 04:10:10 +10:00
|
|
|
if 'videoResolution' in entry.attrib:
|
2018-07-27 22:05:44 +10:00
|
|
|
res = utils.try_decode(entry.attrib['videoResolution'])
|
2018-07-04 17:08:39 +10:00
|
|
|
option = '%s%sp ' % (option, res)
|
2017-04-03 04:10:10 +10:00
|
|
|
if 'videoCodec' in entry.attrib:
|
2018-07-27 22:05:44 +10:00
|
|
|
codec = utils.try_decode(entry.attrib['videoCodec'])
|
2018-07-04 17:08:39 +10:00
|
|
|
option = '%s%s' % (option, codec)
|
2017-04-03 04:10:10 +10:00
|
|
|
option = option.strip() + ' - '
|
|
|
|
if 'audioProfile' in entry.attrib:
|
2018-07-27 22:05:44 +10:00
|
|
|
profile = utils.try_decode(entry.attrib['audioProfile'])
|
2018-07-04 17:08:39 +10:00
|
|
|
option = '%s%s ' % (option, profile)
|
2017-04-03 04:10:10 +10:00
|
|
|
if 'audioCodec' in entry.attrib:
|
2018-07-27 22:05:44 +10:00
|
|
|
codec = utils.try_decode(entry.attrib['audioCodec'])
|
2018-07-04 17:08:39 +10:00
|
|
|
option = '%s%s ' % (option, codec)
|
|
|
|
option = utils.try_encode(option.strip())
|
2017-04-03 04:10:10 +10:00
|
|
|
dialoglist.append(option)
|
2018-06-22 03:24:37 +10:00
|
|
|
media = utils.dialog('select', 'Select stream', dialoglist)
|
2018-09-03 03:40:56 +10:00
|
|
|
if media == -1:
|
|
|
|
LOG.info('User cancelled media stream selection')
|
|
|
|
return
|
2016-07-13 04:16:45 +10:00
|
|
|
else:
|
|
|
|
media = 0
|
2016-09-08 23:55:49 +10:00
|
|
|
self.mediastream = media
|
2016-07-13 04:16:45 +10:00
|
|
|
return media
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def transcode_video_path(self, action, quality=None):
|
2016-01-02 00:40:40 +11:00
|
|
|
"""
|
2016-02-03 23:01:13 +11:00
|
|
|
|
|
|
|
To be called on a VIDEO level of PMS xml response!
|
|
|
|
|
2016-01-03 23:36:00 +11:00
|
|
|
Transcode Video support; returns the URL to get a media started
|
2016-01-02 00:40:40 +11:00
|
|
|
|
|
|
|
Input:
|
2016-04-12 02:57:20 +10:00
|
|
|
action 'DirectStream' or 'Transcode'
|
2016-01-04 05:17:59 +11:00
|
|
|
|
2016-01-03 23:36:00 +11:00
|
|
|
quality: {
|
2016-02-05 06:23:04 +11:00
|
|
|
'videoResolution': e.g. '1024x768',
|
|
|
|
'videoQuality': e.g. '60',
|
|
|
|
'maxVideoBitrate': e.g. '2000' (in kbits)
|
2016-01-03 23:36:00 +11:00
|
|
|
}
|
2016-01-06 02:05:20 +11:00
|
|
|
(one or several of these options)
|
2016-01-02 00:40:40 +11:00
|
|
|
Output:
|
2016-01-06 02:05:20 +11:00
|
|
|
final URL to pull in PMS transcoder
|
2016-01-04 05:17:59 +11:00
|
|
|
|
|
|
|
TODO: mediaIndex
|
2016-01-02 00:40:40 +11:00
|
|
|
"""
|
2018-09-03 03:40:56 +10:00
|
|
|
if self.mediastream is None and self.mediastream_number() is None:
|
|
|
|
return
|
2016-07-13 04:03:10 +10:00
|
|
|
if quality is None:
|
|
|
|
quality = {}
|
2018-06-22 03:24:37 +10:00
|
|
|
xargs = clientinfo.getXArgsDeviceInfo()
|
2016-02-05 06:23:04 +11:00
|
|
|
# For DirectPlay, path/key of PART is needed
|
2016-06-26 20:32:19 +10:00
|
|
|
# trailers are 'clip' with PMS xmls
|
2016-04-12 02:57:20 +10:00
|
|
|
if action == "DirectStream":
|
2016-09-08 23:55:49 +10:00
|
|
|
path = self.item[self.mediastream][self.part].attrib['key']
|
2016-02-03 23:01:13 +11:00
|
|
|
url = self.server + path
|
2016-02-07 22:38:50 +11:00
|
|
|
# e.g. Trailers already feature an '?'!
|
2016-02-03 23:01:13 +11:00
|
|
|
if '?' in url:
|
|
|
|
url += '&' + urlencode(xargs)
|
|
|
|
else:
|
|
|
|
url += '?' + urlencode(xargs)
|
2016-01-06 02:05:20 +11:00
|
|
|
return url
|
|
|
|
|
2016-04-14 00:14:55 +10:00
|
|
|
# For Transcoding
|
2017-05-01 22:22:47 +10:00
|
|
|
headers = {
|
|
|
|
'X-Plex-Platform': 'Android',
|
|
|
|
'X-Plex-Platform-Version': '7.0',
|
|
|
|
'X-Plex-Product': 'Plex for Android',
|
|
|
|
'X-Plex-Version': '5.8.0.475'
|
|
|
|
}
|
2016-02-05 06:23:04 +11:00
|
|
|
# Path/key to VIDEO item of xml PMS response is needed, not part
|
|
|
|
path = self.item.attrib['key']
|
2018-02-12 00:42:49 +11:00
|
|
|
transcode_path = self.server + \
|
2016-02-07 22:38:50 +11:00
|
|
|
'/video/:/transcode/universal/start.m3u8?'
|
2016-01-02 00:40:40 +11:00
|
|
|
args = {
|
2018-06-22 03:24:37 +10:00
|
|
|
'audioBoost': utils.settings('audioBoost'),
|
2017-05-01 22:22:47 +10:00
|
|
|
'autoAdjustQuality': 0,
|
|
|
|
'directPlay': 0,
|
|
|
|
'directStream': 1,
|
2016-03-24 22:34:39 +11:00
|
|
|
'protocol': 'hls', # seen in the wild: 'dash', 'http', 'hls'
|
2018-06-22 03:24:37 +10:00
|
|
|
'session': utils.window('plex_client_Id'),
|
2016-03-24 22:34:39 +11:00
|
|
|
'fastSeek': 1,
|
2016-01-02 00:40:40 +11:00
|
|
|
'path': path,
|
2016-09-08 23:55:49 +10:00
|
|
|
'mediaIndex': self.mediastream,
|
2016-01-06 02:05:20 +11:00
|
|
|
'partIndex': self.part,
|
2017-05-01 22:22:47 +10:00
|
|
|
'hasMDE': 1,
|
|
|
|
'location': 'lan',
|
2018-06-22 03:24:37 +10:00
|
|
|
'subtitleSize': utils.settings('subtitleSize')
|
2016-01-02 00:40:40 +11:00
|
|
|
}
|
2017-05-01 22:22:47 +10:00
|
|
|
# Look like Android to let the PMS use the transcoding profile
|
|
|
|
xargs.update(headers)
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.debug("Setting transcode quality to: %s", quality)
|
2017-05-01 22:22:47 +10:00
|
|
|
args.update(quality)
|
2018-02-12 00:42:49 +11:00
|
|
|
url = transcode_path + urlencode(xargs) + '&' + urlencode(args)
|
2016-01-06 02:05:20 +11:00
|
|
|
return url
|
2016-01-02 00:40:40 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def cache_external_subs(self):
|
|
|
|
"""
|
|
|
|
Downloads external subtitles temporarily to Kodi and returns a list
|
|
|
|
of their paths
|
|
|
|
"""
|
2016-01-02 00:40:40 +11:00
|
|
|
externalsubs = []
|
|
|
|
try:
|
2016-11-06 02:38:56 +11:00
|
|
|
mediastreams = self.item[0][self.part]
|
2016-01-02 00:40:40 +11:00
|
|
|
except (TypeError, KeyError, IndexError):
|
|
|
|
return
|
|
|
|
kodiindex = 0
|
2017-05-09 03:15:50 +10:00
|
|
|
fileindex = 0
|
2016-01-02 00:40:40 +11:00
|
|
|
for stream in mediastreams:
|
2017-01-05 06:57:16 +11:00
|
|
|
# Since plex returns all possible tracks together, have to pull
|
2017-05-01 21:07:41 +10:00
|
|
|
# only external subtitles - only for these a 'key' exists
|
2018-02-12 18:10:39 +11:00
|
|
|
if stream.get('streamType') != "3":
|
2017-05-01 21:07:41 +10:00
|
|
|
# Not a subtitle
|
|
|
|
continue
|
|
|
|
# Only set for additional external subtitles NOT lying beside video
|
2018-02-12 18:10:39 +11:00
|
|
|
key = stream.get('key')
|
2017-05-01 21:07:41 +10:00
|
|
|
# Only set for dedicated subtitle files lying beside video
|
|
|
|
# ext = stream.attrib.get('format')
|
|
|
|
if key:
|
|
|
|
# We do know the language - temporarily download
|
2018-02-12 18:10:39 +11:00
|
|
|
if stream.get('languageCode') is not None:
|
2017-05-02 03:51:10 +10:00
|
|
|
path = self.download_external_subtitles(
|
2017-05-01 21:07:41 +10:00
|
|
|
"{server}%s" % key,
|
2017-05-09 03:15:50 +10:00
|
|
|
"subtitle%02d.%s.%s" % (fileindex,
|
2017-10-03 02:41:28 +11:00
|
|
|
stream.attrib['languageCode'],
|
2017-05-09 03:15:50 +10:00
|
|
|
stream.attrib['codec']))
|
|
|
|
fileindex += 1
|
2017-05-01 21:07:41 +10:00
|
|
|
# We don't know the language - no need to download
|
|
|
|
else:
|
2018-02-12 00:42:49 +11:00
|
|
|
path = self.attach_plex_token_to_url(
|
2017-05-01 21:07:41 +10:00
|
|
|
"%s%s" % (self.server, key))
|
|
|
|
externalsubs.append(path)
|
2016-01-02 00:40:40 +11:00
|
|
|
kodiindex += 1
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.info('Found external subs: %s', externalsubs)
|
2016-01-02 00:40:40 +11:00
|
|
|
return externalsubs
|
2016-03-15 03:47:05 +11:00
|
|
|
|
2017-05-01 21:07:41 +10:00
|
|
|
@staticmethod
|
2017-05-02 03:51:10 +10:00
|
|
|
def download_external_subtitles(url, filename):
|
2017-05-01 21:07:41 +10:00
|
|
|
"""
|
|
|
|
One cannot pass the subtitle language for ListItems. Workaround; will
|
|
|
|
download the subtitle at url to the Kodi PKC directory in a temp dir
|
|
|
|
|
|
|
|
Returns the path to the downloaded subtitle or None
|
|
|
|
"""
|
2018-06-24 02:25:18 +10:00
|
|
|
path = path_ops.path.join(v.EXTERNAL_SUBTITLE_TEMP_PATH, filename)
|
2018-02-12 00:42:49 +11:00
|
|
|
response = DU().downloadUrl(url, return_response=True)
|
2017-05-01 21:07:41 +10:00
|
|
|
try:
|
2018-02-12 00:42:49 +11:00
|
|
|
response.status_code
|
2017-05-01 21:07:41 +10:00
|
|
|
except AttributeError:
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.error('Could not temporarily download subtitle %s', url)
|
2017-05-01 21:07:41 +10:00
|
|
|
return
|
|
|
|
else:
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.debug('Writing temp subtitle to %s', path)
|
2018-06-24 02:25:18 +10:00
|
|
|
with open(path_ops.encode_path(path), 'wb') as filer:
|
2018-06-22 21:39:38 +10:00
|
|
|
filer.write(response.content)
|
2017-05-01 21:07:41 +10:00
|
|
|
return path
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def kodi_premiere_date(self):
|
2016-06-13 01:22:22 +10:00
|
|
|
"""
|
2016-06-19 22:58:42 +10:00
|
|
|
Takes Plex' originallyAvailableAt of the form "yyyy-mm-dd" and returns
|
2018-02-12 00:42:49 +11:00
|
|
|
Kodi's "dd.mm.yyyy" or None
|
2016-06-13 01:22:22 +10:00
|
|
|
"""
|
2018-02-12 00:42:49 +11:00
|
|
|
date = self.premiere_date()
|
2016-06-19 22:58:42 +10:00
|
|
|
if date is None:
|
|
|
|
return
|
2016-06-13 01:22:22 +10:00
|
|
|
try:
|
2017-01-25 04:00:35 +11:00
|
|
|
date = sub(r'(\d+)-(\d+)-(\d+)', r'\3.\2.\1', date)
|
2016-06-13 01:22:22 +10:00
|
|
|
except:
|
2016-06-19 22:58:42 +10:00
|
|
|
date = None
|
|
|
|
return date
|
2016-06-13 01:22:22 +10:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def create_listitem(self, listitem=None, append_show_title=False,
|
|
|
|
append_sxxexx=False):
|
|
|
|
"""
|
|
|
|
Return a xbmcgui.ListItem() for this Plex item
|
|
|
|
"""
|
|
|
|
if self.plex_type() == v.PLEX_TYPE_PHOTO:
|
|
|
|
listitem = self._create_photo_listitem(listitem)
|
2017-03-19 22:14:16 +11:00
|
|
|
# Only set the bare minimum of artwork
|
2018-02-12 00:42:49 +11:00
|
|
|
listitem.setArt({'icon': 'DefaultPicture.png',
|
2018-07-19 22:54:46 +10:00
|
|
|
'fanart': self.one_artwork('thumb')})
|
2018-07-25 05:04:31 +10:00
|
|
|
elif self.plex_type() == v.PLEX_TYPE_SONG:
|
|
|
|
listitem = self._create_audio_listitem(listitem)
|
|
|
|
listitem.setArt(self.artwork())
|
2017-03-19 22:14:16 +11:00
|
|
|
else:
|
2018-02-12 00:42:49 +11:00
|
|
|
listitem = self._create_video_listitem(listitem,
|
|
|
|
append_show_title,
|
|
|
|
append_sxxexx)
|
|
|
|
self.add_video_streams(listitem)
|
2018-03-20 20:37:42 +11:00
|
|
|
listitem.setArt(self.artwork(full_artwork=True))
|
2018-02-12 00:42:49 +11:00
|
|
|
return listitem
|
2017-03-19 22:14:16 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def _create_photo_listitem(self, listitem=None):
|
2016-06-19 22:58:42 +10:00
|
|
|
"""
|
|
|
|
Use for photo items only
|
2016-03-15 03:47:05 +11:00
|
|
|
"""
|
2018-02-12 00:42:49 +11:00
|
|
|
title, _ = self.titles()
|
|
|
|
if listitem is None:
|
|
|
|
listitem = ListItem(title)
|
2016-06-19 22:58:42 +10:00
|
|
|
else:
|
2018-02-12 00:42:49 +11:00
|
|
|
listitem.setLabel(title)
|
2016-06-19 22:58:42 +10:00
|
|
|
metadata = {
|
2018-02-12 00:42:49 +11:00
|
|
|
'date': self.kodi_premiere_date(),
|
2018-02-12 18:10:39 +11:00
|
|
|
'size': long(self.item[0][0].get('size', 0)),
|
|
|
|
'exif:width': self.item[0].get('width', ''),
|
|
|
|
'exif:height': self.item[0].get('height', ''),
|
2016-06-19 22:58:42 +10:00
|
|
|
}
|
2018-02-12 00:42:49 +11:00
|
|
|
listitem.setInfo(type='image', infoLabels=metadata)
|
|
|
|
listitem.setProperty('plot', self.plot())
|
|
|
|
listitem.setProperty('plexid', self.plex_id())
|
|
|
|
return listitem
|
2016-06-19 22:58:42 +10:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def _create_video_listitem(self,
|
|
|
|
listitem=None,
|
|
|
|
append_show_title=False,
|
|
|
|
append_sxxexx=False):
|
2016-06-19 22:58:42 +10:00
|
|
|
"""
|
|
|
|
Use for video items only
|
2016-03-15 03:47:05 +11:00
|
|
|
Call on a child level of PMS xml response (e.g. in a for loop)
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
listitem : existing xbmcgui.ListItem to work with
|
2016-04-27 18:37:24 +10:00
|
|
|
otherwise, a new one is created
|
2018-02-12 00:42:49 +11:00
|
|
|
append_show_title : True to append TV show title to episode title
|
|
|
|
append_sxxexx : True to append SxxExx to episode title
|
2016-03-15 03:47:05 +11:00
|
|
|
|
|
|
|
Returns XBMC listitem for this PMS library item
|
|
|
|
"""
|
2018-02-12 00:42:49 +11:00
|
|
|
title, sorttitle = self.titles()
|
|
|
|
typus = self.plex_type()
|
2016-03-15 03:47:05 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
if listitem is None:
|
|
|
|
listitem = ListItem(title)
|
2017-03-19 22:14:16 +11:00
|
|
|
else:
|
2018-02-12 00:42:49 +11:00
|
|
|
listitem.setLabel(title)
|
2017-03-19 22:54:29 +11:00
|
|
|
# Necessary; Kodi won't start video otherwise!
|
2018-02-12 00:42:49 +11:00
|
|
|
listitem.setProperty('IsPlayable', 'true')
|
2016-06-19 22:58:42 +10:00
|
|
|
# Video items, e.g. movies and episodes or clips
|
2018-02-12 00:42:49 +11:00
|
|
|
people = self.people()
|
|
|
|
userdata = self.userdata()
|
2016-06-19 22:58:42 +10:00
|
|
|
metadata = {
|
2018-03-05 04:32:29 +11:00
|
|
|
'genre': self.genre_list(),
|
|
|
|
'country': self.country_list(),
|
2018-02-12 00:42:49 +11:00
|
|
|
'year': self.year(),
|
|
|
|
'rating': self.audience_rating(),
|
2016-06-19 22:58:42 +10:00
|
|
|
'playcount': userdata['PlayCount'],
|
|
|
|
'cast': people['Cast'],
|
2018-03-05 04:32:29 +11:00
|
|
|
'director': people['Director'],
|
2018-02-12 00:42:49 +11:00
|
|
|
'plot': self.plot(),
|
2016-06-19 22:58:42 +10:00
|
|
|
'sorttitle': sorttitle,
|
|
|
|
'duration': userdata['Runtime'],
|
2018-03-05 04:32:29 +11:00
|
|
|
'studio': self.music_studio_list(),
|
2018-02-12 00:42:49 +11:00
|
|
|
'tagline': self.tagline(),
|
2018-03-05 04:32:29 +11:00
|
|
|
'writer': people.get('Writer'),
|
2018-02-12 00:42:49 +11:00
|
|
|
'premiered': self.premiere_date(),
|
|
|
|
'dateadded': self.date_created(),
|
2016-06-19 22:58:42 +10:00
|
|
|
'lastplayed': userdata['LastPlayedDate'],
|
2018-02-12 00:42:49 +11:00
|
|
|
'mpaa': self.content_rating(),
|
2018-03-05 04:32:29 +11:00
|
|
|
'aired': self.premiere_date(),
|
2016-06-19 22:58:42 +10:00
|
|
|
}
|
2018-01-22 21:20:37 +11:00
|
|
|
# Do NOT set resumetime - otherwise Kodi always resumes at that time
|
|
|
|
# even if the user chose to start element from the beginning
|
2018-02-12 00:42:49 +11:00
|
|
|
# listitem.setProperty('resumetime', str(userdata['Resume']))
|
|
|
|
listitem.setProperty('totaltime', str(userdata['Runtime']))
|
2016-03-15 03:47:05 +11:00
|
|
|
|
2017-03-19 22:14:16 +11:00
|
|
|
if typus == v.PLEX_TYPE_EPISODE:
|
2018-03-20 21:26:01 +11:00
|
|
|
metadata['mediatype'] = 'episode'
|
2018-02-12 00:42:49 +11:00
|
|
|
_, show, season, episode = self.episode_data()
|
2016-04-22 18:26:40 +10:00
|
|
|
season = -1 if season is None else int(season)
|
|
|
|
episode = -1 if episode is None else int(episode)
|
2016-04-19 23:20:03 +10:00
|
|
|
metadata['episode'] = episode
|
2018-03-05 04:32:29 +11:00
|
|
|
metadata['sortepisode'] = episode
|
2016-04-19 23:20:03 +10:00
|
|
|
metadata['season'] = season
|
2018-03-05 04:32:29 +11:00
|
|
|
metadata['sortseason'] = season
|
2016-03-15 03:47:05 +11:00
|
|
|
metadata['tvshowtitle'] = show
|
2016-04-22 18:26:40 +10:00
|
|
|
if season and episode:
|
2018-02-12 00:42:49 +11:00
|
|
|
if append_sxxexx is True:
|
2016-04-27 18:37:24 +10:00
|
|
|
title = "S%.2dE%.2d - %s" % (season, episode, title)
|
2018-02-12 00:42:49 +11:00
|
|
|
if append_show_title is True:
|
2016-06-19 22:58:42 +10:00
|
|
|
title = "%s - %s " % (show, title)
|
2018-02-12 00:42:49 +11:00
|
|
|
if append_show_title or append_sxxexx:
|
|
|
|
listitem.setLabel(title)
|
2017-03-19 22:14:16 +11:00
|
|
|
elif typus == v.PLEX_TYPE_MOVIE:
|
2018-03-05 04:32:29 +11:00
|
|
|
metadata['mediatype'] = 'movie'
|
2016-04-22 18:26:40 +10:00
|
|
|
else:
|
2016-06-13 01:22:22 +10:00
|
|
|
# E.g. clips, trailers, ...
|
2018-03-05 04:32:29 +11:00
|
|
|
pass
|
2016-04-19 23:20:03 +10:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
plex_id = self.plex_id()
|
|
|
|
listitem.setProperty('plexid', plex_id)
|
2017-01-05 06:57:16 +11:00
|
|
|
with plexdb.Get_Plex_DB() as plex_db:
|
2018-03-05 04:32:29 +11:00
|
|
|
kodi_id = plex_db.getItem_byId(plex_id)
|
|
|
|
if kodi_id:
|
|
|
|
kodi_id = kodi_id[0]
|
|
|
|
metadata['dbid'] = kodi_id
|
2016-04-27 18:37:24 +10:00
|
|
|
metadata['title'] = title
|
2018-03-05 04:32:29 +11:00
|
|
|
# Expensive operation
|
2018-02-12 00:42:49 +11:00
|
|
|
listitem.setInfo('video', infoLabels=metadata)
|
2017-08-03 02:54:05 +10:00
|
|
|
try:
|
|
|
|
# Add context menu entry for information screen
|
2018-06-22 03:24:37 +10:00
|
|
|
listitem.addContextMenuItems([(utils.lang(30032),
|
|
|
|
'XBMC.Action(Info)',)])
|
2017-08-03 02:54:05 +10:00
|
|
|
except TypeError:
|
|
|
|
# Kodi fuck-up
|
|
|
|
pass
|
2018-02-12 00:42:49 +11:00
|
|
|
return listitem
|
2016-03-15 03:47:05 +11:00
|
|
|
|
2018-07-25 05:04:31 +10:00
|
|
|
def track_number(self):
|
|
|
|
"""
|
|
|
|
Returns the song's track number as an int or None if not found
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return int(self.item.get('index'))
|
|
|
|
except TypeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
def disc_number(self):
|
|
|
|
"""
|
|
|
|
Returns the song's disc number as an int or None if not found
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return int(self.item.get('parentIndex'))
|
|
|
|
except TypeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
def _create_audio_listitem(self, listitem=None):
|
|
|
|
"""
|
|
|
|
Use for songs only
|
|
|
|
Call on a child level of PMS xml response (e.g. in a for loop)
|
|
|
|
|
|
|
|
listitem : existing xbmcgui.ListItem to work with
|
|
|
|
otherwise, a new one is created
|
|
|
|
|
|
|
|
Returns XBMC listitem for this PMS library item
|
|
|
|
"""
|
|
|
|
if listitem is None:
|
|
|
|
listitem = ListItem(self.title())
|
|
|
|
else:
|
|
|
|
listitem.setLabel(self.title())
|
|
|
|
listitem.setProperty('IsPlayable', 'true')
|
|
|
|
userdata = self.userdata()
|
|
|
|
metadata = {
|
|
|
|
'mediatype': 'song',
|
|
|
|
'tracknumber': self.track_number(),
|
|
|
|
'discnumber': self.track_number(),
|
|
|
|
'duration': userdata['Runtime'],
|
|
|
|
'year': self.year(),
|
|
|
|
# Kodi does not support list of str
|
|
|
|
'genre': ','.join(self.genre_list()) or None,
|
|
|
|
'album': self.item.get('parentTitle'),
|
|
|
|
'artist': self.item.get('originalTitle') or self.grandparent_title(),
|
|
|
|
'title': self.title(),
|
|
|
|
'rating': self.audience_rating(),
|
|
|
|
'playcount': userdata['PlayCount'],
|
|
|
|
'lastplayed': userdata['LastPlayedDate'],
|
|
|
|
# lyrics string (On a dark desert highway...)
|
|
|
|
# userrating integer - range is 1..10
|
|
|
|
# comment string (This is a great song)
|
|
|
|
# listeners integer (25614)
|
|
|
|
# musicbrainztrackid string (cd1de9af-0b71-4503-9f96-9f5efe27923c)
|
|
|
|
# musicbrainzartistid string (d87e52c5-bb8d-4da8-b941-9f4928627dc8)
|
|
|
|
# musicbrainzalbumid string (24944755-2f68-3778-974e-f572a9e30108)
|
|
|
|
# musicbrainzalbumartistid string (d87e52c5-bb8d-4da8-b941-9f4928627dc8)
|
|
|
|
}
|
|
|
|
plex_id = self.plex_id()
|
|
|
|
listitem.setProperty('plexid', plex_id)
|
|
|
|
if v.KODIVERSION >= 18:
|
|
|
|
with plexdb.Get_Plex_DB() as plex_db:
|
|
|
|
kodi_id = plex_db.getItem_byId(plex_id)
|
|
|
|
if kodi_id:
|
|
|
|
kodi_id = kodi_id[0]
|
|
|
|
metadata['dbid'] = kodi_id
|
|
|
|
listitem.setInfo('music', infoLabels=metadata)
|
|
|
|
return listitem
|
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def add_video_streams(self, listitem):
|
2016-03-15 03:47:05 +11:00
|
|
|
"""
|
|
|
|
Add media stream information to xbmcgui.ListItem
|
|
|
|
"""
|
2018-02-12 00:42:49 +11:00
|
|
|
for key, value in self.mediastreams().iteritems():
|
2017-10-06 17:41:16 +11:00
|
|
|
if value:
|
2018-02-12 00:42:49 +11:00
|
|
|
listitem.addStreamInfo(key, value)
|
2016-03-17 04:33:18 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
def validate_playurl(self, path, typus, force_check=False, folder=False,
|
|
|
|
omit_check=False):
|
2016-03-17 04:33:18 +11:00
|
|
|
"""
|
2016-04-13 01:18:32 +10:00
|
|
|
Returns a valid path for Kodi, e.g. with '\' substituted to '\\' in
|
|
|
|
Unicode. Returns None if this is not possible
|
|
|
|
|
|
|
|
path : Unicode
|
|
|
|
typus : Plex type from PMS xml
|
2018-02-12 00:42:49 +11:00
|
|
|
force_check : Will always try to check validity of path
|
2016-04-13 01:18:32 +10:00
|
|
|
Will also skip confirmation dialog if path not found
|
2016-04-13 22:15:31 +10:00
|
|
|
folder : Set to True if path is a folder
|
2018-02-12 00:42:49 +11:00
|
|
|
omit_check : Will entirely omit validity check if True
|
2016-03-17 04:33:18 +11:00
|
|
|
"""
|
2016-04-13 01:18:32 +10:00
|
|
|
if path is None:
|
2018-03-11 00:51:00 +11:00
|
|
|
return
|
2017-01-25 02:04:42 +11:00
|
|
|
typus = v.REMAP_TYPE_FROM_PLEXTYPE[typus]
|
2017-08-22 15:18:19 +10:00
|
|
|
if state.REMAP_PATH is True:
|
|
|
|
path = path.replace(getattr(state, 'remapSMB%sOrg' % typus),
|
|
|
|
getattr(state, 'remapSMB%sNew' % typus),
|
2016-05-18 04:15:46 +10:00
|
|
|
1)
|
2016-03-17 04:33:18 +11:00
|
|
|
# There might be backslashes left over:
|
2016-04-13 01:18:32 +10:00
|
|
|
path = path.replace('\\', '/')
|
2017-08-22 15:18:19 +10:00
|
|
|
elif state.REPLACE_SMB_PATH is True:
|
2016-04-13 01:18:32 +10:00
|
|
|
if path.startswith('\\\\'):
|
|
|
|
path = 'smb:' + path.replace('\\', '/')
|
2018-02-12 00:42:49 +11:00
|
|
|
if ((state.PATH_VERIFIED and force_check is False) or
|
|
|
|
omit_check is True):
|
2016-04-13 01:18:32 +10:00
|
|
|
return path
|
|
|
|
|
2016-04-13 22:15:31 +10:00
|
|
|
# exist() needs a / or \ at the end to work for directories
|
|
|
|
if folder is False:
|
|
|
|
# files
|
2018-06-25 00:05:04 +10:00
|
|
|
check = path_ops.exists(path)
|
2016-04-13 22:15:31 +10:00
|
|
|
else:
|
|
|
|
# directories
|
2018-06-25 00:05:04 +10:00
|
|
|
if "\\" in path:
|
|
|
|
if not path.endswith('\\'):
|
2017-05-12 03:26:13 +10:00
|
|
|
# Add the missing backslash
|
2018-06-25 00:05:04 +10:00
|
|
|
check = path_ops.exists(path + "\\")
|
2017-05-12 03:26:13 +10:00
|
|
|
else:
|
2018-06-25 00:05:04 +10:00
|
|
|
check = path_ops.exists(path)
|
2017-05-12 03:26:13 +10:00
|
|
|
else:
|
2018-06-25 00:05:04 +10:00
|
|
|
if not path.endswith('/'):
|
|
|
|
check = path_ops.exists(path + "/")
|
2017-05-12 03:26:13 +10:00
|
|
|
else:
|
2018-06-25 00:05:04 +10:00
|
|
|
check = path_ops.exists(path)
|
2017-05-10 04:39:05 +10:00
|
|
|
if not check:
|
2018-02-12 00:42:49 +11:00
|
|
|
if force_check is False:
|
2016-04-13 01:18:32 +10:00
|
|
|
# Validate the path is correct with user intervention
|
2018-02-12 00:42:49 +11:00
|
|
|
if self.ask_to_validate(path):
|
2017-05-17 18:09:50 +10:00
|
|
|
state.STOP_SYNC = True
|
2016-04-13 01:18:32 +10:00
|
|
|
path = None
|
2017-08-19 23:03:19 +10:00
|
|
|
state.PATH_VERIFIED = True
|
2016-04-13 01:18:32 +10:00
|
|
|
else:
|
|
|
|
path = None
|
2018-02-12 00:42:49 +11:00
|
|
|
elif force_check is False:
|
2017-08-19 23:03:19 +10:00
|
|
|
# Only set the flag if we were not force-checking the path
|
|
|
|
state.PATH_VERIFIED = True
|
2016-04-13 01:18:32 +10:00
|
|
|
return path
|
2016-03-17 04:33:18 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
@staticmethod
|
|
|
|
def ask_to_validate(url):
|
2016-03-17 04:33:18 +11:00
|
|
|
"""
|
|
|
|
Displays a YESNO dialog box:
|
|
|
|
Kodi can't locate file: <url>. Please verify the path.
|
|
|
|
You may need to verify your network credentials in the
|
|
|
|
add-on settings or use different Plex paths. Stop syncing?
|
|
|
|
|
|
|
|
Returns True if sync should stop, else False
|
|
|
|
"""
|
2018-02-01 17:56:54 +11:00
|
|
|
LOG.warn('Cannot access file: %s', url)
|
2018-03-23 03:25:21 +11:00
|
|
|
# Kodi cannot locate the file #s. Please verify your PKC settings. Stop
|
|
|
|
# syncing?
|
2018-09-19 00:26:40 +10:00
|
|
|
return utils.yesno_dialog(utils.lang(29999), utils.lang(39031) % url)
|
2017-01-09 01:03:41 +11:00
|
|
|
|
2018-02-12 00:42:49 +11:00
|
|
|
@staticmethod
|
|
|
|
def _set_listitem_artprop(listitem, arttype, path):
|
2017-01-09 01:03:41 +11:00
|
|
|
if arttype in (
|
|
|
|
'thumb', 'fanart_image', 'small_poster', 'tiny_poster',
|
|
|
|
'medium_landscape', 'medium_poster', 'small_fanartimage',
|
|
|
|
'medium_fanartimage', 'fanart_noindicators'):
|
|
|
|
listitem.setProperty(arttype, path)
|
|
|
|
else:
|
|
|
|
listitem.setArt({arttype: path})
|