2018-07-13 02:46:02 +10:00
|
|
|
#!/usr/bin/env python
|
2016-01-28 02:33:02 +11:00
|
|
|
# -*- coding: utf-8 -*-
|
2018-07-13 02:46:02 +10:00
|
|
|
from __future__ import absolute_import, division, unicode_literals
|
2017-04-02 02:28:02 +10:00
|
|
|
from logging import getLogger
|
2018-02-11 03:59:20 +11:00
|
|
|
from urllib import urlencode, quote_plus
|
2016-01-30 06:07:21 +11:00
|
|
|
from ast import literal_eval
|
2016-07-21 02:36:31 +10:00
|
|
|
from urlparse import urlparse, parse_qsl
|
2016-03-15 00:51:49 +11:00
|
|
|
from copy import deepcopy
|
2018-02-11 03:59:20 +11:00
|
|
|
from time import time
|
|
|
|
from threading import Thread
|
|
|
|
from xbmc import sleep
|
|
|
|
|
2018-06-22 03:24:37 +10:00
|
|
|
from .downloadutils import DownloadUtils as DU
|
2018-11-10 00:39:43 +11:00
|
|
|
from . import backgroundthread, utils, plex_tv, variables as v
|
2016-01-28 02:33:02 +11:00
|
|
|
|
2016-09-03 01:26:17 +10:00
|
|
|
###############################################################################
|
2018-06-22 03:24:37 +10:00
|
|
|
LOG = getLogger('PLEX.plex_functions')
|
2017-04-02 02:28:02 +10:00
|
|
|
|
2018-06-22 03:24:37 +10:00
|
|
|
CONTAINERSIZE = int(utils.settings('limitindex'))
|
2018-02-11 03:59:20 +11:00
|
|
|
|
|
|
|
# For discovery of PMS in the local LAN
|
|
|
|
PLEX_GDM_IP = '239.0.0.250' # multicast to PMS
|
|
|
|
PLEX_GDM_PORT = 32414
|
|
|
|
PLEX_GDM_MSG = 'M-SEARCH * HTTP/1.0'
|
|
|
|
|
2016-09-03 01:26:17 +10:00
|
|
|
###############################################################################
|
2016-01-28 02:33:02 +11:00
|
|
|
|
2017-01-16 04:01:27 +11:00
|
|
|
|
2016-02-01 02:13:40 +11:00
|
|
|
def ConvertPlexToKodiTime(plexTime):
|
|
|
|
"""
|
|
|
|
Converts Plextime to Koditime. Returns an int (in seconds).
|
|
|
|
"""
|
2016-03-25 04:52:02 +11:00
|
|
|
if plexTime is None:
|
|
|
|
return None
|
2018-06-22 03:24:37 +10:00
|
|
|
return int(float(plexTime) * v.PLEX_TO_KODI_TIMEFACTOR)
|
2016-02-01 02:13:40 +11:00
|
|
|
|
|
|
|
|
2016-01-30 06:07:21 +11:00
|
|
|
def GetPlexKeyNumber(plexKey):
|
|
|
|
"""
|
2018-11-07 00:08:14 +11:00
|
|
|
Deconstructs e.g. '/library/metadata/xxxx' to the tuple (unicode, int)
|
2016-01-30 06:07:21 +11:00
|
|
|
|
2018-11-07 00:08:14 +11:00
|
|
|
('library/metadata', xxxx)
|
2016-01-30 06:07:21 +11:00
|
|
|
|
2018-11-07 00:08:14 +11:00
|
|
|
Returns (None, None) if nothing is found
|
2016-01-30 06:07:21 +11:00
|
|
|
"""
|
|
|
|
try:
|
2018-06-24 02:25:18 +10:00
|
|
|
result = utils.REGEX_END_DIGITS.findall(plexKey)[0]
|
2016-01-30 06:07:21 +11:00
|
|
|
except IndexError:
|
2018-11-07 00:11:47 +11:00
|
|
|
return (None, None)
|
2018-11-07 00:08:14 +11:00
|
|
|
else:
|
2018-11-07 00:11:47 +11:00
|
|
|
return (result[0], utils.cast(int, result[1]))
|
2016-01-30 06:07:21 +11:00
|
|
|
|
|
|
|
|
|
|
|
def ParseContainerKey(containerKey):
|
|
|
|
"""
|
|
|
|
Parses e.g. /playQueues/3045?own=1&repeat=0&window=200 to:
|
2018-11-07 00:08:14 +11:00
|
|
|
'playQueues', 3045, {'window': '200', 'own': '1', 'repeat': '0'}
|
2016-01-30 06:07:21 +11:00
|
|
|
|
2018-11-07 00:08:14 +11:00
|
|
|
Output hence: library, key, query (str, int, dict)
|
2016-01-30 06:07:21 +11:00
|
|
|
"""
|
|
|
|
result = urlparse(containerKey)
|
|
|
|
library, key = GetPlexKeyNumber(result.path)
|
2016-07-21 02:36:31 +10:00
|
|
|
query = dict(parse_qsl(result.query))
|
2016-01-30 06:07:21 +11:00
|
|
|
return library, key, query
|
|
|
|
|
|
|
|
|
|
|
|
def LiteralEval(string):
|
|
|
|
"""
|
|
|
|
Turns a string e.g. in a dict, safely :-)
|
|
|
|
"""
|
|
|
|
return literal_eval(string)
|
|
|
|
|
|
|
|
|
2016-01-28 06:41:28 +11:00
|
|
|
def GetMethodFromPlexType(plexType):
|
|
|
|
methods = {
|
|
|
|
'movie': 'add_update',
|
2016-02-07 22:38:50 +11:00
|
|
|
'episode': 'add_updateEpisode',
|
|
|
|
'show': 'add_update',
|
2016-02-13 02:53:49 +11:00
|
|
|
'season': 'add_updateSeason',
|
|
|
|
'track': 'add_updateSong',
|
|
|
|
'album': 'add_updateAlbum',
|
|
|
|
'artist': 'add_updateArtist'
|
2016-01-28 06:41:28 +11:00
|
|
|
}
|
|
|
|
return methods[plexType]
|
|
|
|
|
|
|
|
|
2018-02-11 03:59:20 +11:00
|
|
|
def GetPlexLoginFromSettings():
|
|
|
|
"""
|
|
|
|
Returns a dict:
|
2018-06-22 03:24:37 +10:00
|
|
|
'plexLogin': utils.settings('plexLogin'),
|
|
|
|
'plexToken': utils.settings('plexToken'),
|
|
|
|
'plexid': utils.settings('plexid'),
|
|
|
|
'myplexlogin': utils.settings('myplexlogin'),
|
|
|
|
'plexAvatar': utils.settings('plexAvatar'),
|
2018-02-11 03:59:20 +11:00
|
|
|
|
|
|
|
Returns strings or unicode
|
|
|
|
|
|
|
|
Returns empty strings '' for a setting if not found.
|
|
|
|
|
|
|
|
myplexlogin is 'true' if user opted to log into plex.tv (the default)
|
|
|
|
"""
|
|
|
|
return {
|
2018-06-22 03:24:37 +10:00
|
|
|
'plexLogin': utils.settings('plexLogin'),
|
|
|
|
'plexToken': utils.settings('plexToken'),
|
|
|
|
'plexid': utils.settings('plexid'),
|
|
|
|
'myplexlogin': utils.settings('myplexlogin'),
|
|
|
|
'plexAvatar': utils.settings('plexAvatar'),
|
2018-02-11 03:59:20 +11:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def check_connection(url, token=None, verifySSL=None):
|
|
|
|
"""
|
|
|
|
Checks connection to a Plex server, available at url. Can also be used
|
|
|
|
to check for connection with plex.tv.
|
|
|
|
|
|
|
|
Override SSL to skip the check by setting verifySSL=False
|
|
|
|
if 'None', SSL will be checked (standard requests setting)
|
|
|
|
if 'True', SSL settings from file settings are used (False/True)
|
|
|
|
|
|
|
|
Input:
|
|
|
|
url URL to Plex server (e.g. https://192.168.1.1:32400)
|
|
|
|
token appropriate token to access server. If None is passed,
|
|
|
|
the current token is used
|
|
|
|
Output:
|
|
|
|
False if server could not be reached or timeout occured
|
|
|
|
200 if connection was successfull
|
|
|
|
int or other HTML status codes as received from the server
|
|
|
|
"""
|
|
|
|
# Add '/clients' to URL because then an authentication is necessary
|
|
|
|
# If a plex.tv URL was passed, this does not work.
|
|
|
|
header_options = None
|
|
|
|
if token is not None:
|
|
|
|
header_options = {'X-Plex-Token': token}
|
|
|
|
if verifySSL is True:
|
2018-06-22 03:24:37 +10:00
|
|
|
verifySSL = None if utils.settings('sslverify') == 'true' else False
|
2018-02-11 03:59:20 +11:00
|
|
|
if 'plex.tv' in url:
|
|
|
|
url = 'https://plex.tv/api/home/users'
|
|
|
|
LOG.debug("Checking connection to server %s with verifySSL=%s",
|
|
|
|
url, verifySSL)
|
|
|
|
answer = DU().downloadUrl(url,
|
|
|
|
authenticate=False,
|
|
|
|
headerOptions=header_options,
|
|
|
|
verifySSL=verifySSL,
|
|
|
|
timeout=10)
|
|
|
|
if answer is None:
|
|
|
|
LOG.debug("Could not connect to %s", url)
|
|
|
|
return False
|
|
|
|
try:
|
|
|
|
# xml received?
|
|
|
|
answer.attrib
|
|
|
|
except AttributeError:
|
|
|
|
if answer is True:
|
|
|
|
# Maybe no xml but connection was successful nevertheless
|
|
|
|
answer = 200
|
|
|
|
else:
|
|
|
|
# Success - we downloaded an xml!
|
|
|
|
answer = 200
|
|
|
|
# We could connect but maybe were not authenticated. No worries
|
|
|
|
LOG.debug("Checking connection successfull. Answer: %s", answer)
|
|
|
|
return answer
|
|
|
|
|
|
|
|
|
|
|
|
def discover_pms(token=None):
|
|
|
|
"""
|
|
|
|
Optional parameter:
|
|
|
|
token token for plex.tv
|
|
|
|
|
|
|
|
Returns a list of available PMS to connect to, one entry is the dict:
|
|
|
|
{
|
|
|
|
'machineIdentifier' [str] unique identifier of the PMS
|
|
|
|
'name' [str] name of the PMS
|
|
|
|
'token' [str] token needed to access that PMS
|
|
|
|
'ownername' [str] name of the owner of this PMS or None if
|
|
|
|
the owner itself supplied tries to connect
|
|
|
|
'product' e.g. 'Plex Media Server' or None
|
|
|
|
'version' e.g. '1.11.2.4772-3e...' or None
|
|
|
|
'device': e.g. 'PC' or 'Windows' or None
|
|
|
|
'platform': e.g. 'Windows', 'Android' or None
|
|
|
|
'local' [bool] True if plex.tv supplied
|
|
|
|
'publicAddressMatches'='1'
|
|
|
|
or if found using Plex GDM in the local LAN
|
|
|
|
'owned' [bool] True if it's the owner's PMS
|
|
|
|
'relay' [bool] True if plex.tv supplied 'relay'='1'
|
|
|
|
'presence' [bool] True if plex.tv supplied 'presence'='1'
|
|
|
|
'httpsRequired' [bool] True if plex.tv supplied
|
|
|
|
'httpsRequired'='1'
|
|
|
|
'scheme' [str] either 'http' or 'https'
|
|
|
|
'ip': [str] IP of the PMS, e.g. '192.168.1.1'
|
|
|
|
'port': [str] Port of the PMS, e.g. '32400'
|
|
|
|
'baseURL': [str] <scheme>://<ip>:<port> of the PMS
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
LOG.info('Start discovery of Plex Media Servers')
|
|
|
|
# Look first for local PMS in the LAN
|
|
|
|
local_pms_list = _plex_gdm()
|
|
|
|
LOG.debug('PMS found in the local LAN using Plex GDM: %s', local_pms_list)
|
|
|
|
# Get PMS from plex.tv
|
|
|
|
if token:
|
|
|
|
LOG.info('Checking with plex.tv for more PMS to connect to')
|
|
|
|
plex_pms_list = _pms_list_from_plex_tv(token)
|
2018-05-30 19:24:51 +10:00
|
|
|
_log_pms(plex_pms_list)
|
2018-02-11 03:59:20 +11:00
|
|
|
else:
|
|
|
|
LOG.info('No plex token supplied, only checked LAN for available PMS')
|
|
|
|
plex_pms_list = []
|
|
|
|
|
2018-05-15 03:42:00 +10:00
|
|
|
# Add PMS found only in the LAN to the Plex.tv PMS list
|
2018-02-11 03:59:20 +11:00
|
|
|
for pms in local_pms_list:
|
2018-05-15 03:42:00 +10:00
|
|
|
for plex_pms in plex_pms_list:
|
2018-02-11 03:59:20 +11:00
|
|
|
if pms['machineIdentifier'] == plex_pms['machineIdentifier']:
|
2018-05-16 03:39:34 +10:00
|
|
|
break
|
2018-02-11 03:59:20 +11:00
|
|
|
else:
|
2018-05-15 03:42:00 +10:00
|
|
|
# Only found PMS using GDM - add it to the PMS from plex.tv
|
|
|
|
https = _pms_https_enabled('%s:%s' % (pms['ip'], pms['port']))
|
|
|
|
if https is None:
|
|
|
|
# Error contacting url. Skip and ignore this PMS for now
|
|
|
|
LOG.error('Could not contact PMS %s but we should have', pms)
|
|
|
|
continue
|
|
|
|
elif https is True:
|
|
|
|
pms['scheme'] = 'https'
|
|
|
|
else:
|
|
|
|
pms['scheme'] = 'http'
|
|
|
|
pms['baseURL'] = '%s://%s:%s' % (pms['scheme'],
|
|
|
|
pms['ip'],
|
|
|
|
pms['port'])
|
|
|
|
plex_pms_list.append(pms)
|
2018-05-30 19:24:51 +10:00
|
|
|
_log_pms(plex_pms_list)
|
|
|
|
return plex_pms_list
|
|
|
|
|
2018-05-30 18:40:58 +10:00
|
|
|
|
2018-05-30 19:24:51 +10:00
|
|
|
def _log_pms(pms_list):
|
|
|
|
log_list = deepcopy(pms_list)
|
2018-05-30 18:40:58 +10:00
|
|
|
for pms in log_list:
|
|
|
|
if pms.get('token') is not None:
|
|
|
|
pms['token'] = '%s...' % pms['token'][:5]
|
2018-05-30 19:24:51 +10:00
|
|
|
LOG.debug('Found the following PMS: %s', log_list)
|
2018-02-11 03:59:20 +11:00
|
|
|
|
|
|
|
|
|
|
|
def _plex_gdm():
|
|
|
|
"""
|
|
|
|
PlexGDM - looks for PMS in the local LAN and returns a list of the PMS found
|
|
|
|
"""
|
|
|
|
# Import here because we might not need to do gdm because we already
|
|
|
|
# connected to a PMS successfully in the past
|
|
|
|
import struct
|
|
|
|
import socket
|
|
|
|
|
|
|
|
# setup socket for discovery -> multicast message
|
|
|
|
gdm = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
|
|
gdm.settimeout(2.0)
|
|
|
|
# Set the time-to-live for messages to 2 for local network
|
|
|
|
ttl = struct.pack('b', 2)
|
|
|
|
gdm.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl)
|
|
|
|
|
|
|
|
return_data = []
|
|
|
|
try:
|
|
|
|
# Send data to the multicast group
|
|
|
|
gdm.sendto(PLEX_GDM_MSG, (PLEX_GDM_IP, PLEX_GDM_PORT))
|
|
|
|
|
|
|
|
# Look for responses from all recipients
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
data, server = gdm.recvfrom(1024)
|
2018-09-16 21:33:20 +10:00
|
|
|
return_data.append({'from': server,
|
|
|
|
'data': data.decode('utf-8')})
|
2018-02-11 03:59:20 +11:00
|
|
|
except socket.timeout:
|
|
|
|
break
|
|
|
|
except Exception as e:
|
|
|
|
# Probably error: (101, 'Network is unreachable')
|
|
|
|
LOG.error(e)
|
|
|
|
import traceback
|
|
|
|
LOG.error("Traceback:\n%s", traceback.format_exc())
|
|
|
|
finally:
|
|
|
|
gdm.close()
|
|
|
|
LOG.debug('Plex GDM returned the data: %s', return_data)
|
|
|
|
pms_list = []
|
|
|
|
for response in return_data:
|
|
|
|
# Check if we had a positive HTTP response
|
|
|
|
if '200 OK' not in response['data']:
|
|
|
|
continue
|
|
|
|
pms = {
|
|
|
|
'ip': response['from'][0],
|
|
|
|
'scheme': None,
|
|
|
|
'local': True, # Since we found it using GDM
|
|
|
|
'product': None,
|
|
|
|
'baseURL': None,
|
|
|
|
'name': None,
|
|
|
|
'version': None,
|
|
|
|
'token': None,
|
|
|
|
'ownername': None,
|
|
|
|
'device': None,
|
|
|
|
'platform': None,
|
|
|
|
'owned': None,
|
|
|
|
'relay': None,
|
|
|
|
'presence': True, # Since we're talking to the PMS
|
|
|
|
'httpsRequired': None,
|
|
|
|
}
|
|
|
|
for line in response['data'].split('\n'):
|
|
|
|
if 'Content-Type:' in line:
|
2018-06-22 03:24:37 +10:00
|
|
|
pms['product'] = utils.try_decode(line.split(':')[1].strip())
|
2018-02-11 03:59:20 +11:00
|
|
|
elif 'Host:' in line:
|
|
|
|
pms['baseURL'] = line.split(':')[1].strip()
|
|
|
|
elif 'Name:' in line:
|
2018-06-22 03:24:37 +10:00
|
|
|
pms['name'] = utils.try_decode(line.split(':')[1].strip())
|
2018-02-11 03:59:20 +11:00
|
|
|
elif 'Port:' in line:
|
|
|
|
pms['port'] = line.split(':')[1].strip()
|
|
|
|
elif 'Resource-Identifier:' in line:
|
|
|
|
pms['machineIdentifier'] = line.split(':')[1].strip()
|
|
|
|
elif 'Version:' in line:
|
|
|
|
pms['version'] = line.split(':')[1].strip()
|
|
|
|
pms_list.append(pms)
|
|
|
|
return pms_list
|
|
|
|
|
|
|
|
|
|
|
|
def _pms_list_from_plex_tv(token):
|
|
|
|
"""
|
|
|
|
get Plex media Server List from plex.tv/pms/resources
|
|
|
|
"""
|
|
|
|
xml = DU().downloadUrl('https://plex.tv/api/resources',
|
|
|
|
authenticate=False,
|
|
|
|
parameters={'includeHttps': 1},
|
|
|
|
headerOptions={'X-Plex-Token': token})
|
|
|
|
try:
|
|
|
|
xml.attrib
|
|
|
|
except AttributeError:
|
|
|
|
LOG.error('Could not get list of PMS from plex.tv')
|
2018-06-08 01:15:37 +10:00
|
|
|
return []
|
2018-02-11 03:59:20 +11:00
|
|
|
|
|
|
|
from Queue import Queue
|
|
|
|
queue = Queue()
|
|
|
|
thread_queue = []
|
|
|
|
|
2018-06-22 03:24:37 +10:00
|
|
|
max_age_in_seconds = 2 * 60 * 60 * 24
|
2018-02-11 03:59:20 +11:00
|
|
|
for device in xml.findall('Device'):
|
|
|
|
if 'server' not in device.get('provides'):
|
|
|
|
# No PMS - skip
|
|
|
|
continue
|
|
|
|
if device.find('Connection') is None:
|
|
|
|
# no valid connection - skip
|
|
|
|
continue
|
|
|
|
# check MyPlex data age - skip if >2 days
|
|
|
|
info_age = time() - int(device.get('lastSeenAt'))
|
|
|
|
if info_age > max_age_in_seconds:
|
|
|
|
LOG.debug("Skip server %s not seen for 2 days", device.get('name'))
|
|
|
|
continue
|
|
|
|
pms = {
|
|
|
|
'machineIdentifier': device.get('clientIdentifier'),
|
|
|
|
'name': device.get('name'),
|
|
|
|
'token': device.get('accessToken'),
|
|
|
|
'ownername': device.get('sourceTitle'),
|
|
|
|
'product': device.get('product'), # e.g. 'Plex Media Server'
|
2018-06-22 03:24:37 +10:00
|
|
|
'version': device.get('productVersion'), # e.g. '1.11.2.4772-3e..'
|
2018-02-11 03:59:20 +11:00
|
|
|
'device': device.get('device'), # e.g. 'PC' or 'Windows'
|
|
|
|
'platform': device.get('platform'), # e.g. 'Windows', 'Android'
|
|
|
|
'local': device.get('publicAddressMatches') == '1',
|
|
|
|
'owned': device.get('owned') == '1',
|
|
|
|
'relay': device.get('relay') == '1',
|
|
|
|
'presence': device.get('presence') == '1',
|
|
|
|
'httpsRequired': device.get('httpsRequired') == '1',
|
|
|
|
'connections': []
|
|
|
|
}
|
|
|
|
# Try a local connection first, no matter what plex.tv tells us
|
|
|
|
for connection in device.findall('Connection'):
|
|
|
|
if connection.get('local') == '1':
|
|
|
|
pms['connections'].append(connection)
|
|
|
|
# Then try non-local
|
|
|
|
for connection in device.findall('Connection'):
|
|
|
|
if connection.get('local') != '1':
|
|
|
|
pms['connections'].append(connection)
|
|
|
|
# Spawn threads to ping each PMS simultaneously
|
|
|
|
thread = Thread(target=_poke_pms, args=(pms, queue))
|
|
|
|
thread_queue.append(thread)
|
|
|
|
|
|
|
|
max_threads = 5
|
|
|
|
threads = []
|
|
|
|
# poke PMS, own thread for each PMS
|
|
|
|
while True:
|
|
|
|
# Remove finished threads
|
|
|
|
for thread in threads:
|
|
|
|
if not thread.isAlive():
|
|
|
|
threads.remove(thread)
|
|
|
|
if len(threads) < max_threads:
|
|
|
|
try:
|
|
|
|
thread = thread_queue.pop()
|
|
|
|
except IndexError:
|
|
|
|
# We have done our work
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
thread.start()
|
|
|
|
threads.append(thread)
|
|
|
|
else:
|
|
|
|
sleep(50)
|
|
|
|
# wait for requests being answered
|
|
|
|
for thread in threads:
|
|
|
|
thread.join()
|
|
|
|
# declare new PMSs
|
|
|
|
pms_list = []
|
|
|
|
while not queue.empty():
|
|
|
|
pms = queue.get()
|
|
|
|
del pms['connections']
|
|
|
|
pms_list.append(pms)
|
|
|
|
queue.task_done()
|
|
|
|
return pms_list
|
|
|
|
|
|
|
|
|
|
|
|
def _poke_pms(pms, queue):
|
|
|
|
data = pms['connections'][0].attrib
|
2018-05-15 03:42:00 +10:00
|
|
|
url = data['uri']
|
2018-06-24 02:25:18 +10:00
|
|
|
if data['local'] == '1' and utils.REGEX_PLEX_DIRECT.findall(url):
|
2018-05-19 03:31:43 +10:00
|
|
|
# In case DNS resolve of plex.direct does not work, append a new
|
|
|
|
# connection that will directly access the local IP (e.g. internet down)
|
|
|
|
conn = deepcopy(pms['connections'][0])
|
|
|
|
# Overwrite plex.direct
|
|
|
|
conn.attrib['uri'] = '%s://%s:%s' % (data['protocol'],
|
|
|
|
data['address'],
|
|
|
|
data['port'])
|
|
|
|
pms['connections'].insert(1, conn)
|
2018-05-20 22:23:21 +10:00
|
|
|
try:
|
|
|
|
protocol, address, port = url.split(':', 2)
|
|
|
|
except ValueError:
|
|
|
|
# e.g. .ork.plex.services uri, thanks Plex
|
|
|
|
protocol, address = url.split(':', 1)
|
|
|
|
port = data['port']
|
|
|
|
url = '%s:%s' % (url, port)
|
2018-05-15 03:42:00 +10:00
|
|
|
address = address.replace('/', '')
|
2018-02-11 03:59:20 +11:00
|
|
|
xml = DU().downloadUrl('%s/identity' % url,
|
|
|
|
authenticate=False,
|
|
|
|
headerOptions={'X-Plex-Token': pms['token']},
|
|
|
|
verifySSL=False,
|
|
|
|
timeout=10)
|
|
|
|
try:
|
|
|
|
xml.attrib['machineIdentifier']
|
|
|
|
except (AttributeError, KeyError):
|
|
|
|
# No connection, delete the one we just tested
|
|
|
|
del pms['connections'][0]
|
|
|
|
if pms['connections']:
|
|
|
|
# Still got connections left, try them
|
|
|
|
return _poke_pms(pms, queue)
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
# Connection successful - correct pms?
|
|
|
|
if xml.get('machineIdentifier') == pms['machineIdentifier']:
|
|
|
|
# process later
|
|
|
|
pms['baseURL'] = url
|
2018-02-16 03:44:58 +11:00
|
|
|
pms['scheme'] = protocol
|
2018-02-11 03:59:20 +11:00
|
|
|
pms['ip'] = address
|
|
|
|
pms['port'] = port
|
|
|
|
queue.put(pms)
|
|
|
|
return
|
|
|
|
LOG.info('Found a pms at %s, but the expected machineIdentifier of '
|
|
|
|
'%s did not match the one we found: %s',
|
|
|
|
url, pms['uuid'], xml.get('machineIdentifier'))
|
|
|
|
|
|
|
|
|
2016-02-01 20:33:33 +11:00
|
|
|
def GetPlexMetadata(key):
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
|
|
|
Returns raw API metadata for key as an etree XML.
|
|
|
|
|
|
|
|
Can be called with either Plex key '/library/metadata/xxxx'metadata
|
|
|
|
OR with the digits 'xxxx' only.
|
|
|
|
|
2016-06-27 00:10:32 +10:00
|
|
|
Returns None or 401 if something went wrong
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
|
|
|
key = str(key)
|
|
|
|
if '/library/metadata/' in key:
|
|
|
|
url = "{server}" + key
|
|
|
|
else:
|
|
|
|
url = "{server}/library/metadata/" + key
|
|
|
|
arguments = {
|
2016-08-07 23:50:01 +10:00
|
|
|
'checkFiles': 0,
|
2016-01-28 02:33:02 +11:00
|
|
|
'includeExtras': 1, # Trailers and Extras => Extras
|
2016-08-07 23:50:01 +10:00
|
|
|
'includeReviews': 1,
|
|
|
|
'includeRelated': 0, # Similar movies => Video -> Related
|
2018-07-11 04:37:26 +10:00
|
|
|
'skipRefresh': 1,
|
2016-08-07 23:50:01 +10:00
|
|
|
# 'includeRelatedCount': 0,
|
2016-02-01 20:33:33 +11:00
|
|
|
# 'includeOnDeck': 1,
|
2016-08-07 23:50:01 +10:00
|
|
|
# 'includeChapters': 1,
|
|
|
|
# 'includePopularLeaves': 1,
|
|
|
|
# 'includeConcerts': 1
|
2016-01-28 02:33:02 +11:00
|
|
|
}
|
|
|
|
url = url + '?' + urlencode(arguments)
|
2018-02-11 03:59:20 +11:00
|
|
|
xml = DU().downloadUrl(url)
|
2016-04-08 02:29:23 +10:00
|
|
|
if xml == 401:
|
|
|
|
# Either unauthorized (taken care of by doUtils) or PMS under strain
|
|
|
|
return 401
|
2016-01-28 02:33:02 +11:00
|
|
|
# Did we receive a valid XML?
|
|
|
|
try:
|
2016-02-01 20:33:33 +11:00
|
|
|
xml.attrib
|
2016-01-28 02:33:02 +11:00
|
|
|
# Nope we did not receive a valid XML
|
|
|
|
except AttributeError:
|
2018-02-11 03:59:20 +11:00
|
|
|
LOG.error("Error retrieving metadata for %s", url)
|
2016-02-01 20:33:33 +11:00
|
|
|
xml = None
|
2016-01-28 02:33:02 +11:00
|
|
|
return xml
|
|
|
|
|
|
|
|
|
2018-10-15 04:59:11 +11:00
|
|
|
def plex_children_generator(key):
|
|
|
|
"""
|
|
|
|
"""
|
2018-10-20 23:49:04 +11:00
|
|
|
for entry in download_generator('{server}/library/metadata/%s/children' % key):
|
|
|
|
yield entry
|
2018-10-15 04:59:11 +11:00
|
|
|
|
|
|
|
|
2017-04-02 02:28:02 +10:00
|
|
|
def GetAllPlexChildren(key):
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
2016-02-01 20:33:33 +11:00
|
|
|
Returns a list (raw xml API dump) of all Plex children for the key.
|
2016-01-28 02:33:02 +11:00
|
|
|
(e.g. /library/metadata/194853/children pointing to a season)
|
|
|
|
|
|
|
|
Input:
|
|
|
|
key Key to a Plex item, e.g. 12345
|
|
|
|
"""
|
2017-04-02 02:28:02 +10:00
|
|
|
return DownloadChunks("{server}/library/metadata/%s/children?" % key)
|
2016-01-28 02:33:02 +11:00
|
|
|
|
|
|
|
|
2017-04-02 02:28:02 +10:00
|
|
|
def GetPlexSectionResults(viewId, args=None):
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
2016-02-01 20:33:33 +11:00
|
|
|
Returns a list (XML API dump) of all Plex items in the Plex
|
2016-01-28 02:33:02 +11:00
|
|
|
section with key = viewId.
|
2016-02-01 20:33:33 +11:00
|
|
|
|
2016-02-13 02:53:49 +11:00
|
|
|
Input:
|
|
|
|
args: optional dict to be urlencoded
|
|
|
|
|
2016-02-01 20:33:33 +11:00
|
|
|
Returns None if something went wrong
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
2016-03-15 00:51:49 +11:00
|
|
|
url = "{server}/library/sections/%s/all?" % viewId
|
2016-02-13 02:53:49 +11:00
|
|
|
if args:
|
2016-03-15 00:51:49 +11:00
|
|
|
url += urlencode(args) + '&'
|
2017-04-02 02:28:02 +10:00
|
|
|
return DownloadChunks(url)
|
2016-02-13 02:53:49 +11:00
|
|
|
|
2016-02-01 20:33:33 +11:00
|
|
|
|
2018-11-10 00:39:43 +11:00
|
|
|
class DownloadChunk(backgroundthread.Task):
|
|
|
|
"""
|
|
|
|
This task will also be executed while library sync is suspended!
|
|
|
|
"""
|
|
|
|
def setup(self, url, args, callback):
|
|
|
|
self.url = url
|
|
|
|
self.args = args
|
|
|
|
self.callback = callback
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
xml = DU().downloadUrl(self.url, parameters=self.args)
|
|
|
|
try:
|
|
|
|
xml.attrib
|
|
|
|
except AttributeError:
|
|
|
|
LOG.error('Error while downloading chunks: %s, args: %s',
|
|
|
|
self.url, self.args)
|
|
|
|
xml = None
|
|
|
|
self.callback(xml)
|
|
|
|
|
|
|
|
|
2018-10-20 23:49:04 +11:00
|
|
|
class DownloadGen(object):
|
2018-10-15 04:59:11 +11:00
|
|
|
"""
|
2018-10-20 23:49:04 +11:00
|
|
|
Special iterator object that will yield all child xmls piece-wise. It also
|
|
|
|
saves the original xml.attrib.
|
2018-10-15 04:59:11 +11:00
|
|
|
|
|
|
|
Yields XML etree children or raises RuntimeError
|
|
|
|
"""
|
2018-11-05 02:53:42 +11:00
|
|
|
def __init__(self, url, plex_type=None, last_viewed_at=None,
|
|
|
|
updated_at=None, args=None):
|
2018-11-11 20:47:18 +11:00
|
|
|
self.args = args or {}
|
2018-11-05 02:53:42 +11:00
|
|
|
url += '?'
|
|
|
|
if plex_type:
|
|
|
|
url = '%stype=%s&' % (url, v.PLEX_TYPE_NUMBER_FROM_PLEX_TYPE[plex_type])
|
|
|
|
if last_viewed_at:
|
|
|
|
url = '%slastViewedAt>=%s&' % (url, last_viewed_at)
|
|
|
|
if updated_at:
|
|
|
|
url = '%supdatedAt>=%s&' % (url, updated_at)
|
2018-11-11 20:47:18 +11:00
|
|
|
self.url = url[:-1]
|
|
|
|
self._download_chunk(start=0)
|
2018-10-20 23:49:04 +11:00
|
|
|
self.attrib = deepcopy(self.xml.attrib)
|
2018-11-12 03:14:40 +11:00
|
|
|
self.current = 0
|
2018-11-11 20:47:18 +11:00
|
|
|
self.total = int(self.attrib['totalSize'])
|
2018-11-12 03:14:40 +11:00
|
|
|
self.cache_factor = 10
|
2018-11-11 20:47:18 +11:00
|
|
|
# Will keep track whether we still have results incoming
|
|
|
|
self.pending_counter = []
|
2018-11-12 03:14:40 +11:00
|
|
|
end = min(self.cache_factor * CONTAINERSIZE,
|
|
|
|
self.total + (CONTAINERSIZE - self.total % CONTAINERSIZE))
|
|
|
|
for self.position in range(CONTAINERSIZE, end, CONTAINERSIZE):
|
|
|
|
self._download_chunk(start=self.position)
|
2018-11-11 20:47:18 +11:00
|
|
|
self.pending_counter.append(None)
|
|
|
|
|
|
|
|
def _download_chunk(self, start):
|
|
|
|
self.args.update({
|
2018-10-15 04:59:11 +11:00
|
|
|
'X-Plex-Container-Size': CONTAINERSIZE,
|
2018-11-11 20:47:18 +11:00
|
|
|
'X-Plex-Container-Start': start,
|
2018-10-22 01:56:13 +11:00
|
|
|
'sort': 'id', # Entries are sorted by plex_id
|
|
|
|
'excludeAllLeaves': 1 # PMS wont attach a first summary child
|
|
|
|
})
|
2018-11-11 20:47:18 +11:00
|
|
|
if start == 0:
|
|
|
|
# We need the result NOW
|
|
|
|
self.xml = DU().downloadUrl(self.url, parameters=self.args)
|
2018-11-10 00:39:43 +11:00
|
|
|
try:
|
|
|
|
self.xml.attrib
|
|
|
|
except AttributeError:
|
|
|
|
LOG.error('Error while downloading chunks: %s, args: %s',
|
2018-11-11 20:47:18 +11:00
|
|
|
self.url, self.args)
|
2018-11-10 00:39:43 +11:00
|
|
|
raise RuntimeError('Error while downloading chunks for %s'
|
2018-11-11 20:47:18 +11:00
|
|
|
% self.url)
|
2018-11-10 00:39:43 +11:00
|
|
|
else:
|
|
|
|
task = DownloadChunk()
|
2018-11-11 20:47:18 +11:00
|
|
|
task.setup(self.url, self.args, self.on_chunk_downloaded)
|
2018-11-10 00:39:43 +11:00
|
|
|
backgroundthread.BGThreader.addTask(task)
|
|
|
|
|
|
|
|
def on_chunk_downloaded(self, xml):
|
|
|
|
if xml:
|
2018-11-12 03:14:40 +11:00
|
|
|
for child in xml:
|
|
|
|
self.xml.append(child)
|
2018-11-11 20:47:18 +11:00
|
|
|
self.pending_counter.pop()
|
2018-10-20 23:49:04 +11:00
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def next(self):
|
|
|
|
return self.__next__()
|
|
|
|
|
|
|
|
def __next__(self):
|
2018-11-10 00:39:43 +11:00
|
|
|
while True:
|
|
|
|
if len(self.xml):
|
2018-11-12 03:14:40 +11:00
|
|
|
self.current += 1
|
2018-11-10 00:39:43 +11:00
|
|
|
child = self.xml[0]
|
2018-11-12 03:14:40 +11:00
|
|
|
self.xml.remove(child)
|
2018-11-12 03:48:11 +11:00
|
|
|
if (self.current % CONTAINERSIZE == 0 and
|
2018-11-12 05:22:32 +11:00
|
|
|
self.current < self.total - (self.cache_factor - 1) * CONTAINERSIZE):
|
2018-11-12 03:48:11 +11:00
|
|
|
self.pending_counter.append(None)
|
2018-11-12 03:14:40 +11:00
|
|
|
self._download_chunk(
|
|
|
|
start=self.current + (self.cache_factor - 1) * CONTAINERSIZE)
|
2018-11-10 00:39:43 +11:00
|
|
|
return child
|
2018-11-12 03:14:40 +11:00
|
|
|
sleep(100)
|
2018-11-11 20:47:18 +11:00
|
|
|
if not len(self.pending_counter) and not len(self.xml):
|
2018-10-20 23:49:04 +11:00
|
|
|
raise StopIteration
|
2018-11-12 06:37:40 +11:00
|
|
|
LOG.debug('Waiting for download to finish')
|
2018-10-20 23:49:04 +11:00
|
|
|
|
|
|
|
def get(self, key, default=None):
|
|
|
|
return self.attrib.get(key, default)
|
|
|
|
|
|
|
|
|
2018-10-22 01:56:13 +11:00
|
|
|
class SectionItems(DownloadGen):
|
|
|
|
"""
|
|
|
|
Iterator object to get all items of a Plex library section
|
|
|
|
"""
|
2018-11-05 02:53:42 +11:00
|
|
|
def __init__(self, section_id, plex_type=None, last_viewed_at=None,
|
|
|
|
updated_at=None, args=None):
|
|
|
|
url = '{server}/library/sections/%s/all' % section_id
|
|
|
|
super(SectionItems, self).__init__(url, plex_type, last_viewed_at,
|
|
|
|
updated_at, args)
|
2018-10-22 01:56:13 +11:00
|
|
|
|
|
|
|
|
|
|
|
class Children(DownloadGen):
|
|
|
|
"""
|
|
|
|
Iterator object to get all items of a Plex library section
|
|
|
|
"""
|
|
|
|
def __init__(self, plex_id):
|
|
|
|
super(Children, self).__init__(
|
|
|
|
'{server}/library/metadata/%s/children' % plex_id)
|
|
|
|
|
|
|
|
|
|
|
|
class Leaves(DownloadGen):
|
2018-10-20 23:49:04 +11:00
|
|
|
"""
|
|
|
|
Iterator object to get all items of a Plex library section
|
|
|
|
"""
|
|
|
|
def __init__(self, section_id):
|
2018-10-22 01:56:13 +11:00
|
|
|
super(Leaves, self).__init__(
|
|
|
|
'{server}/library/sections/%s/allLeaves' % section_id)
|
2018-10-15 04:59:11 +11:00
|
|
|
|
|
|
|
|
2017-04-02 02:28:02 +10:00
|
|
|
def DownloadChunks(url):
|
2016-03-15 00:51:49 +11:00
|
|
|
"""
|
2017-04-02 02:28:02 +10:00
|
|
|
Downloads PMS url in chunks of CONTAINERSIZE.
|
2016-02-01 20:33:33 +11:00
|
|
|
|
2016-03-15 00:51:49 +11:00
|
|
|
url MUST end with '?' (if no other url encoded args are present) or '&'
|
|
|
|
|
|
|
|
Returns a stitched-together xml or None.
|
|
|
|
"""
|
|
|
|
xml = None
|
|
|
|
pos = 0
|
2018-02-11 03:59:20 +11:00
|
|
|
error_counter = 0
|
|
|
|
while error_counter < 10:
|
2016-03-15 00:51:49 +11:00
|
|
|
args = {
|
2017-04-02 02:28:02 +10:00
|
|
|
'X-Plex-Container-Size': CONTAINERSIZE,
|
2018-10-21 21:03:21 +11:00
|
|
|
'X-Plex-Container-Start': pos,
|
|
|
|
'sort': 'id'
|
2016-03-15 00:51:49 +11:00
|
|
|
}
|
2018-02-11 03:59:20 +11:00
|
|
|
xmlpart = DU().downloadUrl(url + urlencode(args))
|
2016-03-15 00:51:49 +11:00
|
|
|
# If something went wrong - skip in the hope that it works next time
|
|
|
|
try:
|
|
|
|
xmlpart.attrib
|
|
|
|
except AttributeError:
|
2018-02-11 03:59:20 +11:00
|
|
|
LOG.error('Error while downloading chunks: %s',
|
|
|
|
url + urlencode(args))
|
2017-04-02 02:28:02 +10:00
|
|
|
pos += CONTAINERSIZE
|
2018-02-11 03:59:20 +11:00
|
|
|
error_counter += 1
|
2016-03-15 00:51:49 +11:00
|
|
|
continue
|
|
|
|
|
|
|
|
# Very first run: starting xml (to retain data in xml's root!)
|
|
|
|
if xml is None:
|
|
|
|
xml = deepcopy(xmlpart)
|
2017-04-02 02:28:02 +10:00
|
|
|
if len(xmlpart) < CONTAINERSIZE:
|
2016-03-15 00:51:49 +11:00
|
|
|
break
|
|
|
|
else:
|
2017-04-02 02:28:02 +10:00
|
|
|
pos += CONTAINERSIZE
|
2016-03-15 00:51:49 +11:00
|
|
|
continue
|
|
|
|
# Build answer xml - containing the entire library
|
|
|
|
for child in xmlpart:
|
|
|
|
xml.append(child)
|
|
|
|
# Done as soon as we don't receive a full complement of items
|
2017-04-02 02:28:02 +10:00
|
|
|
if len(xmlpart) < CONTAINERSIZE:
|
2016-03-15 00:51:49 +11:00
|
|
|
break
|
2017-04-02 02:28:02 +10:00
|
|
|
pos += CONTAINERSIZE
|
2018-02-11 03:59:20 +11:00
|
|
|
if error_counter == 10:
|
|
|
|
LOG.error('Fatal error while downloading chunks for %s', url)
|
2016-03-15 00:51:49 +11:00
|
|
|
return None
|
|
|
|
return xml
|
2016-01-28 02:33:02 +11:00
|
|
|
|
|
|
|
|
2017-04-02 02:28:02 +10:00
|
|
|
def GetAllPlexLeaves(viewId, lastViewedAt=None, updatedAt=None):
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
2016-02-01 20:33:33 +11:00
|
|
|
Returns a list (raw XML API dump) of all Plex subitems for the key.
|
2016-01-28 02:33:02 +11:00
|
|
|
(e.g. /library/sections/2/allLeaves pointing to all TV shows)
|
|
|
|
|
|
|
|
Input:
|
2016-01-30 06:07:21 +11:00
|
|
|
viewId Id of Plex library, e.g. '2'
|
|
|
|
lastViewedAt Unix timestamp; only retrieves PMS items viewed
|
|
|
|
since that point of time until now.
|
|
|
|
updatedAt Unix timestamp; only retrieves PMS items updated
|
|
|
|
by the PMS since that point of time until now.
|
|
|
|
|
|
|
|
If lastViewedAt and updatedAt=None, ALL PMS items are returned.
|
|
|
|
|
|
|
|
Warning: lastViewedAt and updatedAt are combined with AND by the PMS!
|
|
|
|
|
|
|
|
Relevant "master time": PMS server. I guess this COULD lead to problems,
|
|
|
|
e.g. when server and client are in different time zones.
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
2016-01-30 06:07:21 +11:00
|
|
|
args = []
|
2016-02-04 00:44:11 +11:00
|
|
|
url = "{server}/library/sections/%s/allLeaves" % viewId
|
|
|
|
|
2016-01-30 06:07:21 +11:00
|
|
|
if lastViewedAt:
|
|
|
|
args.append('lastViewedAt>=%s' % lastViewedAt)
|
|
|
|
if updatedAt:
|
|
|
|
args.append('updatedAt>=%s' % updatedAt)
|
2016-02-04 00:44:11 +11:00
|
|
|
if args:
|
2016-03-15 00:51:49 +11:00
|
|
|
url += '?' + '&'.join(args) + '&'
|
|
|
|
else:
|
|
|
|
url += '?'
|
2017-04-02 02:28:02 +10:00
|
|
|
return DownloadChunks(url)
|
2016-03-15 03:47:05 +11:00
|
|
|
|
2016-02-01 20:33:33 +11:00
|
|
|
|
2017-04-02 02:28:02 +10:00
|
|
|
def GetPlexOnDeck(viewId):
|
2016-03-15 03:47:05 +11:00
|
|
|
"""
|
|
|
|
"""
|
2017-04-02 02:28:02 +10:00
|
|
|
return DownloadChunks("{server}/library/sections/%s/onDeck?" % viewId)
|
2016-01-28 02:33:02 +11:00
|
|
|
|
|
|
|
|
2018-07-28 00:01:05 +10:00
|
|
|
def get_plex_hub():
|
|
|
|
return DU().downloadUrl('{server}/hubs')
|
|
|
|
|
|
|
|
|
2017-05-06 17:45:21 +10:00
|
|
|
def get_plex_sections():
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
2017-05-06 17:45:21 +10:00
|
|
|
Returns all Plex sections (libraries) of the PMS as an etree xml
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
2018-02-11 03:59:20 +11:00
|
|
|
return DU().downloadUrl('{server}/library/sections')
|
2016-02-03 23:01:13 +11:00
|
|
|
|
|
|
|
|
2018-11-06 22:33:02 +11:00
|
|
|
def init_plex_playqueue(plex_id, librarySectionUUID, mediatype='movie',
|
2017-02-19 03:04:30 +11:00
|
|
|
trailers=False):
|
2016-02-03 23:01:13 +11:00
|
|
|
"""
|
|
|
|
Returns raw API metadata XML dump for a playlist with e.g. trailers.
|
2016-03-15 03:47:05 +11:00
|
|
|
"""
|
2016-02-03 23:01:13 +11:00
|
|
|
url = "{server}/playQueues"
|
|
|
|
args = {
|
|
|
|
'type': mediatype,
|
2018-11-06 22:33:02 +11:00
|
|
|
'uri': ('library://{0}/item/%2Flibrary%2Fmetadata%2F{1}'.format(
|
|
|
|
librarySectionUUID, plex_id)),
|
2016-02-03 23:01:13 +11:00
|
|
|
'includeChapters': '1',
|
|
|
|
'shuffle': '0',
|
|
|
|
'repeat': '0'
|
|
|
|
}
|
2016-12-30 01:41:14 +11:00
|
|
|
if trailers is True:
|
2018-06-22 03:24:37 +10:00
|
|
|
args['extrasPrefixCount'] = utils.settings('trailerNumber')
|
2018-02-11 03:59:20 +11:00
|
|
|
xml = DU().downloadUrl(url + '?' + urlencode(args), action_type="POST")
|
2016-02-03 23:01:13 +11:00
|
|
|
try:
|
|
|
|
xml[0].tag
|
|
|
|
except (IndexError, TypeError, AttributeError):
|
2018-02-11 03:59:20 +11:00
|
|
|
LOG.error("Error retrieving metadata for %s", url)
|
2018-04-08 22:34:38 +10:00
|
|
|
return
|
2016-02-03 23:01:13 +11:00
|
|
|
return xml
|
2016-02-07 22:38:50 +11:00
|
|
|
|
|
|
|
|
2018-02-11 03:59:20 +11:00
|
|
|
def _pms_https_enabled(url):
|
2016-03-09 03:41:07 +11:00
|
|
|
"""
|
2016-04-07 00:24:03 +10:00
|
|
|
Returns True if the PMS can talk https, False otherwise.
|
|
|
|
None if error occured, e.g. the connection timed out
|
2016-03-09 03:41:07 +11:00
|
|
|
|
2016-04-07 00:24:03 +10:00
|
|
|
Call with e.g. url='192.168.0.1:32400' (NO http/https)
|
2016-03-09 03:41:07 +11:00
|
|
|
|
|
|
|
This is done by GET /identity (returns an error if https is enabled and we
|
|
|
|
are trying to use http)
|
2016-03-11 02:02:46 +11:00
|
|
|
|
|
|
|
Prefers HTTPS over HTTP
|
2016-03-09 03:41:07 +11:00
|
|
|
"""
|
2018-02-11 03:59:20 +11:00
|
|
|
res = DU().downloadUrl('https://%s/identity' % url,
|
|
|
|
authenticate=False,
|
|
|
|
verifySSL=False)
|
2016-03-09 03:41:07 +11:00
|
|
|
try:
|
2016-04-07 00:24:03 +10:00
|
|
|
res.attrib
|
2016-05-25 03:00:39 +10:00
|
|
|
except AttributeError:
|
2016-03-24 02:07:09 +11:00
|
|
|
# Might have SSL deactivated. Try with http
|
2018-02-11 03:59:20 +11:00
|
|
|
res = DU().downloadUrl('http://%s/identity' % url,
|
|
|
|
authenticate=False,
|
|
|
|
verifySSL=False)
|
2016-03-24 02:07:09 +11:00
|
|
|
try:
|
2016-04-07 00:24:03 +10:00
|
|
|
res.attrib
|
2016-05-25 03:00:39 +10:00
|
|
|
except AttributeError:
|
2018-02-11 03:59:20 +11:00
|
|
|
LOG.error("Could not contact PMS %s", url)
|
2016-03-24 02:07:09 +11:00
|
|
|
return None
|
|
|
|
else:
|
2016-04-07 00:24:03 +10:00
|
|
|
# Received a valid XML. Server wants to talk HTTP
|
|
|
|
return False
|
2016-03-24 02:07:09 +11:00
|
|
|
else:
|
2016-04-07 00:24:03 +10:00
|
|
|
# Received a valid XML. Server wants to talk HTTPS
|
|
|
|
return True
|
2016-03-12 00:42:14 +11:00
|
|
|
|
|
|
|
|
2016-03-24 19:08:58 +11:00
|
|
|
def GetMachineIdentifier(url):
|
|
|
|
"""
|
|
|
|
Returns the unique PMS machine identifier of url
|
|
|
|
|
|
|
|
Returns None if something went wrong
|
|
|
|
"""
|
2018-02-11 03:59:20 +11:00
|
|
|
xml = DU().downloadUrl('%s/identity' % url,
|
|
|
|
authenticate=False,
|
|
|
|
verifySSL=False,
|
|
|
|
timeout=10)
|
2016-03-24 19:08:58 +11:00
|
|
|
try:
|
2016-05-25 03:00:39 +10:00
|
|
|
machineIdentifier = xml.attrib['machineIdentifier']
|
|
|
|
except (AttributeError, KeyError):
|
2018-02-11 03:59:20 +11:00
|
|
|
LOG.error('Could not get the PMS machineIdentifier for %s', url)
|
2016-03-24 19:08:58 +11:00
|
|
|
return None
|
2018-02-11 03:59:20 +11:00
|
|
|
LOG.debug('Found machineIdentifier %s for the PMS %s',
|
|
|
|
machineIdentifier, url)
|
2016-03-24 19:08:58 +11:00
|
|
|
return machineIdentifier
|
|
|
|
|
|
|
|
|
2016-03-28 01:57:20 +11:00
|
|
|
def GetPMSStatus(token):
|
|
|
|
"""
|
|
|
|
token: Needs to be authorized with a master Plex token
|
|
|
|
(not a managed user token)!
|
|
|
|
Calls /status/sessions on currently active PMS. Returns a dict with:
|
|
|
|
|
|
|
|
'sessionKey':
|
|
|
|
{
|
|
|
|
'userId': Plex ID of the user (if applicable, otherwise '')
|
|
|
|
'username': Plex name (if applicable, otherwise '')
|
|
|
|
'ratingKey': Unique Plex id of item being played
|
|
|
|
}
|
|
|
|
|
|
|
|
or an empty dict.
|
|
|
|
"""
|
|
|
|
answer = {}
|
2018-02-11 03:59:20 +11:00
|
|
|
xml = DU().downloadUrl('{server}/status/sessions',
|
|
|
|
headerOptions={'X-Plex-Token': token})
|
2016-03-28 01:57:20 +11:00
|
|
|
try:
|
|
|
|
xml.attrib
|
|
|
|
except AttributeError:
|
|
|
|
return answer
|
|
|
|
for item in xml:
|
|
|
|
ratingKey = item.attrib.get('ratingKey')
|
|
|
|
sessionKey = item.attrib.get('sessionKey')
|
|
|
|
userId = item.find('User')
|
|
|
|
username = ''
|
|
|
|
if userId is not None:
|
|
|
|
username = userId.attrib.get('title', '')
|
|
|
|
userId = userId.attrib.get('id', '')
|
|
|
|
else:
|
|
|
|
userId = ''
|
|
|
|
answer[sessionKey] = {
|
|
|
|
'userId': userId,
|
|
|
|
'username': username,
|
|
|
|
'ratingKey': ratingKey
|
|
|
|
}
|
|
|
|
return answer
|
|
|
|
|
|
|
|
|
2018-07-05 20:46:40 +10:00
|
|
|
def collections(section_id):
|
|
|
|
"""
|
|
|
|
Returns an etree with list of collections or None.
|
|
|
|
"""
|
|
|
|
url = '{server}/library/sections/%s/all' % section_id
|
|
|
|
params = {
|
|
|
|
'type': 18, # Collections
|
|
|
|
'includeCollections': 1,
|
|
|
|
}
|
|
|
|
xml = DU().downloadUrl(url, parameters=params)
|
|
|
|
try:
|
|
|
|
xml.attrib
|
|
|
|
except AttributeError:
|
|
|
|
LOG.error("Error retrieving collections for %s", url)
|
|
|
|
xml = None
|
|
|
|
return xml
|
|
|
|
|
|
|
|
|
2016-03-12 00:42:14 +11:00
|
|
|
def scrobble(ratingKey, state):
|
|
|
|
"""
|
|
|
|
Tells the PMS to set an item's watched state to state="watched" or
|
|
|
|
state="unwatched"
|
|
|
|
"""
|
|
|
|
args = {
|
|
|
|
'key': ratingKey,
|
|
|
|
'identifier': 'com.plexapp.plugins.library'
|
|
|
|
}
|
|
|
|
if state == "watched":
|
|
|
|
url = "{server}/:/scrobble?" + urlencode(args)
|
|
|
|
elif state == "unwatched":
|
|
|
|
url = "{server}/:/unscrobble?" + urlencode(args)
|
|
|
|
else:
|
|
|
|
return
|
2018-02-11 03:59:20 +11:00
|
|
|
DU().downloadUrl(url)
|
|
|
|
LOG.info("Toggled watched state for Plex item %s", ratingKey)
|
2016-10-23 02:15:10 +11:00
|
|
|
|
|
|
|
|
|
|
|
def delete_item_from_pms(plexid):
|
|
|
|
"""
|
|
|
|
Deletes the item plexid from the Plex Media Server (and the harddrive!).
|
|
|
|
Do make sure that the currently logged in user has the credentials
|
|
|
|
|
|
|
|
Returns True if successful, False otherwise
|
|
|
|
"""
|
2018-02-11 03:59:20 +11:00
|
|
|
if DU().downloadUrl('{server}/library/metadata/%s' % plexid,
|
|
|
|
action_type="DELETE") is True:
|
|
|
|
LOG.info('Successfully deleted Plex id %s from the PMS', plexid)
|
2016-10-24 01:37:26 +11:00
|
|
|
return True
|
2018-02-11 03:59:20 +11:00
|
|
|
LOG.error('Could not delete Plex id %s from the PMS', plexid)
|
|
|
|
return False
|
2016-10-24 04:38:21 +11:00
|
|
|
|
|
|
|
|
|
|
|
def get_PMS_settings(url, token):
|
|
|
|
"""
|
2018-02-11 03:59:20 +11:00
|
|
|
Retrieve the PMS' settings via <url>/:/prefs
|
2016-10-24 04:38:21 +11:00
|
|
|
|
|
|
|
Call with url: scheme://ip:port
|
|
|
|
"""
|
2018-02-11 03:59:20 +11:00
|
|
|
return DU().downloadUrl(
|
2016-10-24 04:38:21 +11:00
|
|
|
'%s/:/prefs' % url,
|
|
|
|
authenticate=False,
|
|
|
|
verifySSL=False,
|
|
|
|
headerOptions={'X-Plex-Token': token} if token else None)
|
2018-02-11 03:59:20 +11:00
|
|
|
|
|
|
|
|
|
|
|
def GetUserArtworkURL(username):
|
|
|
|
"""
|
|
|
|
Returns the URL for the user's Avatar. Or False if something went
|
|
|
|
wrong.
|
|
|
|
"""
|
2018-09-11 04:53:46 +10:00
|
|
|
users = plex_tv.plex_home_users(utils.settings('plexToken'))
|
2018-02-11 03:59:20 +11:00
|
|
|
url = ''
|
|
|
|
for user in users:
|
2018-09-11 04:53:46 +10:00
|
|
|
if user.title == username:
|
|
|
|
url = user.thumb
|
2018-02-11 03:59:20 +11:00
|
|
|
LOG.debug("Avatar url for user %s is: %s", username, url)
|
|
|
|
return url
|
|
|
|
|
|
|
|
|
|
|
|
def transcode_image_path(key, AuthToken, path, width, height):
|
|
|
|
"""
|
|
|
|
Transcode Image support
|
|
|
|
|
|
|
|
parameters:
|
|
|
|
key
|
|
|
|
AuthToken
|
|
|
|
path - source path of current XML: path[srcXML]
|
|
|
|
width
|
|
|
|
height
|
|
|
|
result:
|
|
|
|
final path to image file
|
|
|
|
"""
|
|
|
|
# external address - can we get a transcoding request for external images?
|
2018-06-22 03:24:37 +10:00
|
|
|
if key.startswith('http://') or key.startswith('https://'):
|
2018-02-11 03:59:20 +11:00
|
|
|
path = key
|
|
|
|
elif key.startswith('/'): # internal full path.
|
|
|
|
path = 'http://127.0.0.1:32400' + key
|
|
|
|
else: # internal path, add-on
|
|
|
|
path = 'http://127.0.0.1:32400' + path + '/' + key
|
2018-06-22 03:24:37 +10:00
|
|
|
path = utils.try_encode(path)
|
2018-02-11 03:59:20 +11:00
|
|
|
# This is bogus (note the extra path component) but ATV is stupid when it
|
|
|
|
# comes to caching images, it doesn't use querystrings. Fortunately PMS is
|
|
|
|
# lenient...
|
|
|
|
transcode_path = ('/photo/:/transcode/%sx%s/%s'
|
|
|
|
% (width, height, quote_plus(path)))
|
|
|
|
args = {
|
|
|
|
'width': width,
|
|
|
|
'height': height,
|
|
|
|
'url': path
|
|
|
|
}
|
|
|
|
if AuthToken:
|
|
|
|
args['X-Plex-Token'] = AuthToken
|
|
|
|
return transcode_path + '?' + urlencode(args)
|