2016-01-28 02:33:02 +11:00
|
|
|
# -*- coding: utf-8 -*-
|
2017-04-02 02:28:02 +10:00
|
|
|
from logging import getLogger
|
2016-01-28 02:33:02 +11:00
|
|
|
from urllib import urlencode
|
2016-01-30 06:07:21 +11:00
|
|
|
from ast import literal_eval
|
2016-07-21 02:36:31 +10:00
|
|
|
from urlparse import urlparse, parse_qsl
|
2016-01-30 06:07:21 +11:00
|
|
|
import re
|
2016-03-15 00:51:49 +11:00
|
|
|
from copy import deepcopy
|
2016-01-28 02:33:02 +11:00
|
|
|
|
2016-01-30 06:07:21 +11:00
|
|
|
import downloadutils
|
2016-09-03 01:26:17 +10:00
|
|
|
from utils import settings
|
2017-01-25 02:04:42 +11:00
|
|
|
from variables import PLEX_TO_KODI_TIMEFACTOR
|
2016-01-28 02:33:02 +11:00
|
|
|
|
2016-09-03 01:26:17 +10:00
|
|
|
###############################################################################
|
|
|
|
|
2017-04-02 02:28:02 +10:00
|
|
|
log = getLogger("PLEX."+__name__)
|
|
|
|
|
|
|
|
CONTAINERSIZE = int(settings('limitindex'))
|
2017-09-07 03:55:27 +10:00
|
|
|
REGEX_PLEX_KEY = re.compile(r'''/(.+)/(\d+)$''')
|
2016-09-03 01:26:17 +10:00
|
|
|
###############################################################################
|
2016-01-28 02:33:02 +11:00
|
|
|
|
2017-01-16 04:01:27 +11:00
|
|
|
|
2016-02-01 02:13:40 +11:00
|
|
|
def ConvertPlexToKodiTime(plexTime):
|
|
|
|
"""
|
|
|
|
Converts Plextime to Koditime. Returns an int (in seconds).
|
|
|
|
"""
|
2016-03-25 04:52:02 +11:00
|
|
|
if plexTime is None:
|
|
|
|
return None
|
2016-10-30 00:23:48 +11:00
|
|
|
return int(float(plexTime) * PLEX_TO_KODI_TIMEFACTOR)
|
2016-02-01 02:13:40 +11:00
|
|
|
|
|
|
|
|
2016-01-30 06:07:21 +11:00
|
|
|
def GetPlexKeyNumber(plexKey):
|
|
|
|
"""
|
|
|
|
Deconstructs e.g. '/library/metadata/xxxx' to the tuple
|
|
|
|
|
|
|
|
('library/metadata', 'xxxx')
|
|
|
|
|
|
|
|
Returns ('','') if nothing is found
|
|
|
|
"""
|
|
|
|
try:
|
2017-09-07 03:55:27 +10:00
|
|
|
result = REGEX_PLEX_KEY.findall(plexKey)[0]
|
2016-01-30 06:07:21 +11:00
|
|
|
except IndexError:
|
|
|
|
result = ('', '')
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def ParseContainerKey(containerKey):
|
|
|
|
"""
|
|
|
|
Parses e.g. /playQueues/3045?own=1&repeat=0&window=200 to:
|
2016-07-21 02:36:31 +10:00
|
|
|
'playQueues', '3045', {'window': '200', 'own': '1', 'repeat': '0'}
|
2016-01-30 06:07:21 +11:00
|
|
|
|
2016-07-21 02:36:31 +10:00
|
|
|
Output hence: library, key, query (str, str, dict)
|
2016-01-30 06:07:21 +11:00
|
|
|
"""
|
|
|
|
result = urlparse(containerKey)
|
|
|
|
library, key = GetPlexKeyNumber(result.path)
|
2016-07-21 02:36:31 +10:00
|
|
|
query = dict(parse_qsl(result.query))
|
2016-01-30 06:07:21 +11:00
|
|
|
return library, key, query
|
|
|
|
|
|
|
|
|
|
|
|
def LiteralEval(string):
|
|
|
|
"""
|
|
|
|
Turns a string e.g. in a dict, safely :-)
|
|
|
|
"""
|
|
|
|
return literal_eval(string)
|
|
|
|
|
|
|
|
|
2016-01-28 06:41:28 +11:00
|
|
|
def GetMethodFromPlexType(plexType):
|
|
|
|
methods = {
|
|
|
|
'movie': 'add_update',
|
2016-02-07 22:38:50 +11:00
|
|
|
'episode': 'add_updateEpisode',
|
|
|
|
'show': 'add_update',
|
2016-02-13 02:53:49 +11:00
|
|
|
'season': 'add_updateSeason',
|
|
|
|
'track': 'add_updateSong',
|
|
|
|
'album': 'add_updateAlbum',
|
|
|
|
'artist': 'add_updateArtist'
|
2016-01-28 06:41:28 +11:00
|
|
|
}
|
|
|
|
return methods[plexType]
|
|
|
|
|
|
|
|
|
2016-02-01 20:33:33 +11:00
|
|
|
def GetPlexMetadata(key):
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
|
|
|
Returns raw API metadata for key as an etree XML.
|
|
|
|
|
|
|
|
Can be called with either Plex key '/library/metadata/xxxx'metadata
|
|
|
|
OR with the digits 'xxxx' only.
|
|
|
|
|
2016-06-27 00:10:32 +10:00
|
|
|
Returns None or 401 if something went wrong
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
|
|
|
key = str(key)
|
|
|
|
if '/library/metadata/' in key:
|
|
|
|
url = "{server}" + key
|
|
|
|
else:
|
|
|
|
url = "{server}/library/metadata/" + key
|
|
|
|
arguments = {
|
2016-08-07 23:50:01 +10:00
|
|
|
'checkFiles': 0,
|
2016-01-28 02:33:02 +11:00
|
|
|
'includeExtras': 1, # Trailers and Extras => Extras
|
2016-08-07 23:50:01 +10:00
|
|
|
'includeReviews': 1,
|
|
|
|
'includeRelated': 0, # Similar movies => Video -> Related
|
|
|
|
# 'includeRelatedCount': 0,
|
2016-02-01 20:33:33 +11:00
|
|
|
# 'includeOnDeck': 1,
|
2016-08-07 23:50:01 +10:00
|
|
|
# 'includeChapters': 1,
|
|
|
|
# 'includePopularLeaves': 1,
|
|
|
|
# 'includeConcerts': 1
|
2016-01-28 02:33:02 +11:00
|
|
|
}
|
|
|
|
url = url + '?' + urlencode(arguments)
|
2016-02-01 20:33:33 +11:00
|
|
|
xml = downloadutils.DownloadUtils().downloadUrl(url)
|
2016-04-08 02:29:23 +10:00
|
|
|
if xml == 401:
|
|
|
|
# Either unauthorized (taken care of by doUtils) or PMS under strain
|
|
|
|
return 401
|
2016-01-28 02:33:02 +11:00
|
|
|
# Did we receive a valid XML?
|
|
|
|
try:
|
2016-02-01 20:33:33 +11:00
|
|
|
xml.attrib
|
2016-01-28 02:33:02 +11:00
|
|
|
# Nope we did not receive a valid XML
|
|
|
|
except AttributeError:
|
2016-09-03 01:26:17 +10:00
|
|
|
log.error("Error retrieving metadata for %s" % url)
|
2016-02-01 20:33:33 +11:00
|
|
|
xml = None
|
2016-01-28 02:33:02 +11:00
|
|
|
return xml
|
|
|
|
|
|
|
|
|
2017-04-02 02:28:02 +10:00
|
|
|
def GetAllPlexChildren(key):
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
2016-02-01 20:33:33 +11:00
|
|
|
Returns a list (raw xml API dump) of all Plex children for the key.
|
2016-01-28 02:33:02 +11:00
|
|
|
(e.g. /library/metadata/194853/children pointing to a season)
|
|
|
|
|
|
|
|
Input:
|
|
|
|
key Key to a Plex item, e.g. 12345
|
|
|
|
"""
|
2017-04-02 02:28:02 +10:00
|
|
|
return DownloadChunks("{server}/library/metadata/%s/children?" % key)
|
2016-01-28 02:33:02 +11:00
|
|
|
|
|
|
|
|
2017-04-02 02:28:02 +10:00
|
|
|
def GetPlexSectionResults(viewId, args=None):
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
2016-02-01 20:33:33 +11:00
|
|
|
Returns a list (XML API dump) of all Plex items in the Plex
|
2016-01-28 02:33:02 +11:00
|
|
|
section with key = viewId.
|
2016-02-01 20:33:33 +11:00
|
|
|
|
2016-02-13 02:53:49 +11:00
|
|
|
Input:
|
|
|
|
args: optional dict to be urlencoded
|
|
|
|
|
2016-02-01 20:33:33 +11:00
|
|
|
Returns None if something went wrong
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
2016-03-15 00:51:49 +11:00
|
|
|
url = "{server}/library/sections/%s/all?" % viewId
|
2016-02-13 02:53:49 +11:00
|
|
|
if args:
|
2016-03-15 00:51:49 +11:00
|
|
|
url += urlencode(args) + '&'
|
2017-04-02 02:28:02 +10:00
|
|
|
return DownloadChunks(url)
|
2016-02-13 02:53:49 +11:00
|
|
|
|
2016-02-01 20:33:33 +11:00
|
|
|
|
2017-04-02 02:28:02 +10:00
|
|
|
def DownloadChunks(url):
|
2016-03-15 00:51:49 +11:00
|
|
|
"""
|
2017-04-02 02:28:02 +10:00
|
|
|
Downloads PMS url in chunks of CONTAINERSIZE.
|
2016-02-01 20:33:33 +11:00
|
|
|
|
2016-03-15 00:51:49 +11:00
|
|
|
url MUST end with '?' (if no other url encoded args are present) or '&'
|
|
|
|
|
|
|
|
Returns a stitched-together xml or None.
|
|
|
|
"""
|
|
|
|
xml = None
|
|
|
|
pos = 0
|
|
|
|
errorCounter = 0
|
|
|
|
while errorCounter < 10:
|
|
|
|
args = {
|
2017-04-02 02:28:02 +10:00
|
|
|
'X-Plex-Container-Size': CONTAINERSIZE,
|
2016-03-15 00:51:49 +11:00
|
|
|
'X-Plex-Container-Start': pos
|
|
|
|
}
|
|
|
|
xmlpart = downloadutils.DownloadUtils().downloadUrl(
|
|
|
|
url + urlencode(args))
|
|
|
|
# If something went wrong - skip in the hope that it works next time
|
|
|
|
try:
|
|
|
|
xmlpart.attrib
|
|
|
|
except AttributeError:
|
2016-09-03 01:26:17 +10:00
|
|
|
log.error('Error while downloading chunks: %s'
|
|
|
|
% (url + urlencode(args)))
|
2017-04-02 02:28:02 +10:00
|
|
|
pos += CONTAINERSIZE
|
2016-03-15 00:51:49 +11:00
|
|
|
errorCounter += 1
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Very first run: starting xml (to retain data in xml's root!)
|
|
|
|
if xml is None:
|
|
|
|
xml = deepcopy(xmlpart)
|
2017-04-02 02:28:02 +10:00
|
|
|
if len(xmlpart) < CONTAINERSIZE:
|
2016-03-15 00:51:49 +11:00
|
|
|
break
|
|
|
|
else:
|
2017-04-02 02:28:02 +10:00
|
|
|
pos += CONTAINERSIZE
|
2016-03-15 00:51:49 +11:00
|
|
|
continue
|
|
|
|
# Build answer xml - containing the entire library
|
|
|
|
for child in xmlpart:
|
|
|
|
xml.append(child)
|
|
|
|
# Done as soon as we don't receive a full complement of items
|
2017-04-02 02:28:02 +10:00
|
|
|
if len(xmlpart) < CONTAINERSIZE:
|
2016-03-15 00:51:49 +11:00
|
|
|
break
|
2017-04-02 02:28:02 +10:00
|
|
|
pos += CONTAINERSIZE
|
2016-03-15 00:51:49 +11:00
|
|
|
if errorCounter == 10:
|
2016-09-03 01:26:17 +10:00
|
|
|
log.error('Fatal error while downloading chunks for %s' % url)
|
2016-03-15 00:51:49 +11:00
|
|
|
return None
|
|
|
|
return xml
|
2016-01-28 02:33:02 +11:00
|
|
|
|
|
|
|
|
2017-04-02 02:28:02 +10:00
|
|
|
def GetAllPlexLeaves(viewId, lastViewedAt=None, updatedAt=None):
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
2016-02-01 20:33:33 +11:00
|
|
|
Returns a list (raw XML API dump) of all Plex subitems for the key.
|
2016-01-28 02:33:02 +11:00
|
|
|
(e.g. /library/sections/2/allLeaves pointing to all TV shows)
|
|
|
|
|
|
|
|
Input:
|
2016-01-30 06:07:21 +11:00
|
|
|
viewId Id of Plex library, e.g. '2'
|
|
|
|
lastViewedAt Unix timestamp; only retrieves PMS items viewed
|
|
|
|
since that point of time until now.
|
|
|
|
updatedAt Unix timestamp; only retrieves PMS items updated
|
|
|
|
by the PMS since that point of time until now.
|
|
|
|
|
|
|
|
If lastViewedAt and updatedAt=None, ALL PMS items are returned.
|
|
|
|
|
|
|
|
Warning: lastViewedAt and updatedAt are combined with AND by the PMS!
|
|
|
|
|
|
|
|
Relevant "master time": PMS server. I guess this COULD lead to problems,
|
|
|
|
e.g. when server and client are in different time zones.
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
2016-01-30 06:07:21 +11:00
|
|
|
args = []
|
2016-02-04 00:44:11 +11:00
|
|
|
url = "{server}/library/sections/%s/allLeaves" % viewId
|
|
|
|
|
2016-01-30 06:07:21 +11:00
|
|
|
if lastViewedAt:
|
|
|
|
args.append('lastViewedAt>=%s' % lastViewedAt)
|
|
|
|
if updatedAt:
|
|
|
|
args.append('updatedAt>=%s' % updatedAt)
|
2016-02-04 00:44:11 +11:00
|
|
|
if args:
|
2016-03-15 00:51:49 +11:00
|
|
|
url += '?' + '&'.join(args) + '&'
|
|
|
|
else:
|
|
|
|
url += '?'
|
2017-04-02 02:28:02 +10:00
|
|
|
return DownloadChunks(url)
|
2016-03-15 03:47:05 +11:00
|
|
|
|
2016-02-01 20:33:33 +11:00
|
|
|
|
2017-04-02 02:28:02 +10:00
|
|
|
def GetPlexOnDeck(viewId):
|
2016-03-15 03:47:05 +11:00
|
|
|
"""
|
|
|
|
"""
|
2017-04-02 02:28:02 +10:00
|
|
|
return DownloadChunks("{server}/library/sections/%s/onDeck?" % viewId)
|
2016-01-28 02:33:02 +11:00
|
|
|
|
|
|
|
|
2017-05-06 17:45:21 +10:00
|
|
|
def get_plex_sections():
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
2017-05-06 17:45:21 +10:00
|
|
|
Returns all Plex sections (libraries) of the PMS as an etree xml
|
2016-01-28 02:33:02 +11:00
|
|
|
"""
|
2017-05-06 17:45:21 +10:00
|
|
|
return downloadutils.DownloadUtils().downloadUrl(
|
|
|
|
'{server}/library/sections')
|
2016-02-03 23:01:13 +11:00
|
|
|
|
|
|
|
|
2017-02-19 03:04:30 +11:00
|
|
|
def init_plex_playqueue(itemid, librarySectionUUID, mediatype='movie',
|
|
|
|
trailers=False):
|
2016-02-03 23:01:13 +11:00
|
|
|
"""
|
|
|
|
Returns raw API metadata XML dump for a playlist with e.g. trailers.
|
2016-03-15 03:47:05 +11:00
|
|
|
"""
|
2016-02-03 23:01:13 +11:00
|
|
|
url = "{server}/playQueues"
|
|
|
|
args = {
|
|
|
|
'type': mediatype,
|
2016-09-06 04:57:08 +10:00
|
|
|
'uri': ('library://' + librarySectionUUID +
|
|
|
|
'/item/%2Flibrary%2Fmetadata%2F' + itemid),
|
2016-02-03 23:01:13 +11:00
|
|
|
'includeChapters': '1',
|
|
|
|
'shuffle': '0',
|
|
|
|
'repeat': '0'
|
|
|
|
}
|
2016-12-30 01:41:14 +11:00
|
|
|
if trailers is True:
|
|
|
|
args['extrasPrefixCount'] = settings('trailerNumber')
|
2016-02-03 23:01:13 +11:00
|
|
|
xml = downloadutils.DownloadUtils().downloadUrl(
|
2016-04-26 22:02:19 +10:00
|
|
|
url + '?' + urlencode(args), action_type="POST")
|
2016-02-03 23:01:13 +11:00
|
|
|
try:
|
|
|
|
xml[0].tag
|
|
|
|
except (IndexError, TypeError, AttributeError):
|
2016-09-03 01:26:17 +10:00
|
|
|
log.error("Error retrieving metadata for %s" % url)
|
2016-02-03 23:01:13 +11:00
|
|
|
return None
|
|
|
|
return xml
|
2016-02-07 22:38:50 +11:00
|
|
|
|
|
|
|
|
2016-03-09 03:41:07 +11:00
|
|
|
def PMSHttpsEnabled(url):
|
|
|
|
"""
|
2016-04-07 00:24:03 +10:00
|
|
|
Returns True if the PMS can talk https, False otherwise.
|
|
|
|
None if error occured, e.g. the connection timed out
|
2016-03-09 03:41:07 +11:00
|
|
|
|
2016-04-07 00:24:03 +10:00
|
|
|
Call with e.g. url='192.168.0.1:32400' (NO http/https)
|
2016-03-09 03:41:07 +11:00
|
|
|
|
|
|
|
This is done by GET /identity (returns an error if https is enabled and we
|
|
|
|
are trying to use http)
|
2016-03-11 02:02:46 +11:00
|
|
|
|
|
|
|
Prefers HTTPS over HTTP
|
2016-03-09 03:41:07 +11:00
|
|
|
"""
|
2016-04-07 00:24:03 +10:00
|
|
|
doUtils = downloadutils.DownloadUtils().downloadUrl
|
|
|
|
res = doUtils('https://%s/identity' % url,
|
|
|
|
authenticate=False,
|
2016-04-13 18:48:45 +10:00
|
|
|
verifySSL=False)
|
2016-03-09 03:41:07 +11:00
|
|
|
try:
|
2016-04-07 00:24:03 +10:00
|
|
|
res.attrib
|
2016-05-25 03:00:39 +10:00
|
|
|
except AttributeError:
|
2016-03-24 02:07:09 +11:00
|
|
|
# Might have SSL deactivated. Try with http
|
2016-04-07 00:24:03 +10:00
|
|
|
res = doUtils('http://%s/identity' % url,
|
|
|
|
authenticate=False,
|
|
|
|
verifySSL=False)
|
2016-03-24 02:07:09 +11:00
|
|
|
try:
|
2016-04-07 00:24:03 +10:00
|
|
|
res.attrib
|
2016-05-25 03:00:39 +10:00
|
|
|
except AttributeError:
|
2016-09-03 01:26:17 +10:00
|
|
|
log.error("Could not contact PMS %s" % url)
|
2016-03-24 02:07:09 +11:00
|
|
|
return None
|
|
|
|
else:
|
2016-04-07 00:24:03 +10:00
|
|
|
# Received a valid XML. Server wants to talk HTTP
|
|
|
|
return False
|
2016-03-24 02:07:09 +11:00
|
|
|
else:
|
2016-04-07 00:24:03 +10:00
|
|
|
# Received a valid XML. Server wants to talk HTTPS
|
|
|
|
return True
|
2016-03-12 00:42:14 +11:00
|
|
|
|
|
|
|
|
2016-03-24 19:08:58 +11:00
|
|
|
def GetMachineIdentifier(url):
|
|
|
|
"""
|
|
|
|
Returns the unique PMS machine identifier of url
|
|
|
|
|
|
|
|
Returns None if something went wrong
|
|
|
|
"""
|
2016-05-25 03:00:39 +10:00
|
|
|
xml = downloadutils.DownloadUtils().downloadUrl('%s/identity' % url,
|
|
|
|
authenticate=False,
|
2016-05-30 00:52:00 +10:00
|
|
|
verifySSL=False,
|
2017-05-01 01:45:45 +10:00
|
|
|
timeout=10)
|
2016-03-24 19:08:58 +11:00
|
|
|
try:
|
2016-05-25 03:00:39 +10:00
|
|
|
machineIdentifier = xml.attrib['machineIdentifier']
|
|
|
|
except (AttributeError, KeyError):
|
2016-09-03 01:26:17 +10:00
|
|
|
log.error('Could not get the PMS machineIdentifier for %s' % url)
|
2016-03-24 19:08:58 +11:00
|
|
|
return None
|
2016-09-03 01:26:17 +10:00
|
|
|
log.debug('Found machineIdentifier %s for the PMS %s'
|
|
|
|
% (machineIdentifier, url))
|
2016-03-24 19:08:58 +11:00
|
|
|
return machineIdentifier
|
|
|
|
|
|
|
|
|
2016-03-28 01:57:20 +11:00
|
|
|
def GetPMSStatus(token):
|
|
|
|
"""
|
|
|
|
token: Needs to be authorized with a master Plex token
|
|
|
|
(not a managed user token)!
|
|
|
|
Calls /status/sessions on currently active PMS. Returns a dict with:
|
|
|
|
|
|
|
|
'sessionKey':
|
|
|
|
{
|
|
|
|
'userId': Plex ID of the user (if applicable, otherwise '')
|
|
|
|
'username': Plex name (if applicable, otherwise '')
|
|
|
|
'ratingKey': Unique Plex id of item being played
|
|
|
|
}
|
|
|
|
|
|
|
|
or an empty dict.
|
|
|
|
"""
|
|
|
|
answer = {}
|
|
|
|
xml = downloadutils.DownloadUtils().downloadUrl(
|
2016-04-08 00:10:07 +10:00
|
|
|
'{server}/status/sessions',
|
2016-03-28 01:57:20 +11:00
|
|
|
headerOptions={'X-Plex-Token': token})
|
|
|
|
try:
|
|
|
|
xml.attrib
|
|
|
|
except AttributeError:
|
|
|
|
return answer
|
|
|
|
for item in xml:
|
|
|
|
ratingKey = item.attrib.get('ratingKey')
|
|
|
|
sessionKey = item.attrib.get('sessionKey')
|
|
|
|
userId = item.find('User')
|
|
|
|
username = ''
|
|
|
|
if userId is not None:
|
|
|
|
username = userId.attrib.get('title', '')
|
|
|
|
userId = userId.attrib.get('id', '')
|
|
|
|
else:
|
|
|
|
userId = ''
|
|
|
|
answer[sessionKey] = {
|
|
|
|
'userId': userId,
|
|
|
|
'username': username,
|
|
|
|
'ratingKey': ratingKey
|
|
|
|
}
|
|
|
|
return answer
|
|
|
|
|
|
|
|
|
2016-03-12 00:42:14 +11:00
|
|
|
def scrobble(ratingKey, state):
|
|
|
|
"""
|
|
|
|
Tells the PMS to set an item's watched state to state="watched" or
|
|
|
|
state="unwatched"
|
|
|
|
"""
|
|
|
|
args = {
|
|
|
|
'key': ratingKey,
|
|
|
|
'identifier': 'com.plexapp.plugins.library'
|
|
|
|
}
|
|
|
|
if state == "watched":
|
|
|
|
url = "{server}/:/scrobble?" + urlencode(args)
|
|
|
|
elif state == "unwatched":
|
|
|
|
url = "{server}/:/unscrobble?" + urlencode(args)
|
|
|
|
else:
|
|
|
|
return
|
2016-04-26 22:02:19 +10:00
|
|
|
downloadutils.DownloadUtils().downloadUrl(url)
|
2016-09-03 01:26:17 +10:00
|
|
|
log.info("Toggled watched state for Plex item %s" % ratingKey)
|
2016-10-23 02:15:10 +11:00
|
|
|
|
|
|
|
|
|
|
|
def delete_item_from_pms(plexid):
|
|
|
|
"""
|
|
|
|
Deletes the item plexid from the Plex Media Server (and the harddrive!).
|
|
|
|
Do make sure that the currently logged in user has the credentials
|
|
|
|
|
|
|
|
Returns True if successful, False otherwise
|
|
|
|
"""
|
2016-10-24 01:37:26 +11:00
|
|
|
if downloadutils.DownloadUtils().downloadUrl(
|
|
|
|
'{server}/library/metadata/%s' % plexid,
|
|
|
|
action_type="DELETE") is True:
|
|
|
|
log.info('Successfully deleted Plex id %s from the PMS' % plexid)
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
log.error('Could not delete Plex id %s from the PMS' % plexid)
|
2016-10-23 02:15:10 +11:00
|
|
|
return False
|
2016-10-24 04:38:21 +11:00
|
|
|
|
|
|
|
|
|
|
|
def get_PMS_settings(url, token):
|
|
|
|
"""
|
|
|
|
Retrieve the PMS' settings via <url>/:/
|
|
|
|
|
|
|
|
Call with url: scheme://ip:port
|
|
|
|
"""
|
|
|
|
return downloadutils.DownloadUtils().downloadUrl(
|
|
|
|
'%s/:/prefs' % url,
|
|
|
|
authenticate=False,
|
|
|
|
verifySSL=False,
|
|
|
|
headerOptions={'X-Plex-Token': token} if token else None)
|