Turn urllib and urlparse unicode-safe
This commit is contained in:
parent
b4d036ed6b
commit
1ac19109ba
12 changed files with 138 additions and 73 deletions
|
@ -29,9 +29,13 @@ class Main():
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
LOG.debug('Full sys.argv received: %s', argv)
|
LOG.debug('Full sys.argv received: %s', argv)
|
||||||
# Parse parameters
|
# Parse parameters
|
||||||
path = unicode_paths.decode(argv[0])
|
params = dict(parse_qsl(argv[2][1:]))
|
||||||
arguments = unicode_paths.decode(argv[2])
|
arguments = unicode_paths.decode(argv[2])
|
||||||
params = dict(parse_qsl(arguments[1:]))
|
path = unicode_paths.decode(argv[0])
|
||||||
|
# Ensure unicode
|
||||||
|
for key, value in params.iteritems():
|
||||||
|
params[key.decode('utf-8')] = params.pop(key)
|
||||||
|
params[key] = value.decode('utf-8')
|
||||||
mode = params.get('mode', '')
|
mode = params.get('mode', '')
|
||||||
itemid = params.get('id', '')
|
itemid = params.get('id', '')
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from urllib import quote_plus, unquote
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from .kodi_db import KodiVideoDB, KodiMusicDB, KodiTextureDB
|
from .kodi_db import KodiVideoDB, KodiMusicDB, KodiTextureDB
|
||||||
|
@ -20,11 +19,11 @@ BATCH_SIZE = 500
|
||||||
|
|
||||||
|
|
||||||
def double_urlencode(text):
|
def double_urlencode(text):
|
||||||
return quote_plus(quote_plus(text))
|
return utils.quote_plus(utils.quote_plus(text))
|
||||||
|
|
||||||
|
|
||||||
def double_urldecode(text):
|
def double_urldecode(text):
|
||||||
return unquote(unquote(text))
|
return utils.unquote(utils.unquote(text))
|
||||||
|
|
||||||
|
|
||||||
class ImageCachingThread(backgroundthread.KillableThread):
|
class ImageCachingThread(backgroundthread.KillableThread):
|
||||||
|
@ -89,7 +88,7 @@ class ImageCachingThread(backgroundthread.KillableThread):
|
||||||
|
|
||||||
|
|
||||||
def cache_url(url):
|
def cache_url(url):
|
||||||
url = double_urlencode(utils.try_encode(url))
|
url = double_urlencode(url)
|
||||||
sleeptime = 0
|
sleeptime = 0
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -226,7 +226,7 @@ class Section(object):
|
||||||
args = copy.deepcopy(args)
|
args = copy.deepcopy(args)
|
||||||
for key, value in args.iteritems():
|
for key, value in args.iteritems():
|
||||||
args[key] = value.format(self=self)
|
args[key] = value.format(self=self)
|
||||||
return 'plugin://plugin.video.plexkodiconnect?%s' % urllib.urlencode(args)
|
return utils.extend_url('plugin://%s' % v.ADDON_ID, args)
|
||||||
|
|
||||||
def to_kodi(self):
|
def to_kodi(self):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -201,3 +201,18 @@ def copy_tree(src, dst, *args, **kwargs):
|
||||||
src = encode_path(src)
|
src = encode_path(src)
|
||||||
dst = encode_path(dst)
|
dst = encode_path(dst)
|
||||||
return dir_util.copy_tree(src, dst, *args, **kwargs)
|
return dir_util.copy_tree(src, dst, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def basename(path):
|
||||||
|
"""
|
||||||
|
Returns the filename for path [unicode] or an empty string if not possible.
|
||||||
|
Safer than using os.path.basename, as we could be expecting \\ for / or
|
||||||
|
vice versa
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return path.rsplit('/', 1)[1]
|
||||||
|
except IndexError:
|
||||||
|
try:
|
||||||
|
return path.rsplit('\\', 1)[1]
|
||||||
|
except IndexError:
|
||||||
|
return ''
|
||||||
|
|
|
@ -2,13 +2,8 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from urlparse import parse_qsl
|
|
||||||
|
|
||||||
|
from . import utils, playback, context_entry, transfer, backgroundthread
|
||||||
from . import playback
|
|
||||||
from . import context_entry
|
|
||||||
from . import transfer
|
|
||||||
from . import backgroundthread
|
|
||||||
|
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
|
@ -35,7 +30,7 @@ class PlaybackTask(backgroundthread.Task):
|
||||||
LOG.debug('Detected 3rd party add-on call - ignoring')
|
LOG.debug('Detected 3rd party add-on call - ignoring')
|
||||||
transfer.send(True)
|
transfer.send(True)
|
||||||
return
|
return
|
||||||
params = dict(parse_qsl(params))
|
params = dict(utils.parse_qsl(params))
|
||||||
mode = params.get('mode')
|
mode = params.get('mode')
|
||||||
resolve = False if params.get('handle') == '-1' else True
|
resolve = False if params.get('handle') == '-1' else True
|
||||||
LOG.debug('Received mode: %s, params: %s', mode, params)
|
LOG.debug('Received mode: %s, params: %s', mode, params)
|
||||||
|
|
|
@ -5,8 +5,6 @@ Collection of functions associated with Kodi and Plex playlists and playqueues
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import urllib
|
|
||||||
from urlparse import parse_qsl, urlsplit
|
|
||||||
|
|
||||||
from .plex_api import API
|
from .plex_api import API
|
||||||
from .plex_db import PlexDB
|
from .plex_db import PlexDB
|
||||||
|
@ -328,12 +326,16 @@ def playlist_item_from_kodi(kodi_item):
|
||||||
item.plex_uuid = db_item['plex_id'] # we dont need the uuid yet :-)
|
item.plex_uuid = db_item['plex_id'] # we dont need the uuid yet :-)
|
||||||
item.file = kodi_item.get('file')
|
item.file = kodi_item.get('file')
|
||||||
if item.plex_id is None and item.file is not None:
|
if item.plex_id is None and item.file is not None:
|
||||||
query = dict(parse_qsl(urlsplit(item.file).query))
|
try:
|
||||||
|
query = item.file.split('?', 1)[1]
|
||||||
|
except IndexError:
|
||||||
|
query = ''
|
||||||
|
query = dict(utils.parse_qsl(query))
|
||||||
item.plex_id = utils.cast(int, query.get('plex_id'))
|
item.plex_id = utils.cast(int, query.get('plex_id'))
|
||||||
item.plex_type = query.get('itemType')
|
item.plex_type = query.get('itemType')
|
||||||
if item.plex_id is None and item.file is not None:
|
if item.plex_id is None and item.file is not None:
|
||||||
item.uri = ('library://whatever/item/%s'
|
item.uri = ('library://whatever/item/%s'
|
||||||
% urllib.quote(utils.try_encode(item.file), safe=''))
|
% utils.quote(item.file, safe=''))
|
||||||
else:
|
else:
|
||||||
# TO BE VERIFIED - PLEX DOESN'T LIKE PLAYLIST ADDS IN THIS MANNER
|
# TO BE VERIFIED - PLEX DOESN'T LIKE PLAYLIST ADDS IN THIS MANNER
|
||||||
item.uri = ('library://%s/item/library%%2Fmetadata%%2F%s' %
|
item.uri = ('library://%s/item/library%%2Fmetadata%%2F%s' %
|
||||||
|
|
|
@ -6,13 +6,12 @@ manipulate playlists
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import urllib
|
|
||||||
|
|
||||||
from .common import PlaylistError
|
from .common import PlaylistError
|
||||||
|
|
||||||
from ..plex_api import API
|
from ..plex_api import API
|
||||||
from ..downloadutils import DownloadUtils as DU
|
from ..downloadutils import DownloadUtils as DU
|
||||||
from .. import app, variables as v
|
from .. import utils, app, variables as v
|
||||||
###############################################################################
|
###############################################################################
|
||||||
LOG = getLogger('PLEX.playlists.pms')
|
LOG = getLogger('PLEX.playlists.pms')
|
||||||
|
|
||||||
|
@ -56,8 +55,8 @@ def initialize(playlist, plex_id):
|
||||||
'type': v.PLEX_PLAYLIST_TYPE_FROM_KODI[playlist.kodi_type],
|
'type': v.PLEX_PLAYLIST_TYPE_FROM_KODI[playlist.kodi_type],
|
||||||
'title': playlist.plex_name,
|
'title': playlist.plex_name,
|
||||||
'smart': 0,
|
'smart': 0,
|
||||||
'uri': ('library://None/item/%s' % (urllib.quote('/library/metadata/%s'
|
'uri': ('library://None/item/%s' % (utils.quote('/library/metadata/%s'
|
||||||
% plex_id, safe='')))
|
% plex_id, safe='')))
|
||||||
}
|
}
|
||||||
xml = DU().downloadUrl(url='{server}/playlists',
|
xml = DU().downloadUrl(url='{server}/playlists',
|
||||||
action_type='POST',
|
action_type='POST',
|
||||||
|
@ -80,8 +79,8 @@ def add_item(playlist, plex_id):
|
||||||
Raises PlaylistError if that did not work out.
|
Raises PlaylistError if that did not work out.
|
||||||
"""
|
"""
|
||||||
params = {
|
params = {
|
||||||
'uri': ('library://None/item/%s' % (urllib.quote('/library/metadata/%s'
|
'uri': ('library://None/item/%s' % (utils.quote('/library/metadata/%s'
|
||||||
% plex_id, safe='')))
|
% plex_id, safe='')))
|
||||||
}
|
}
|
||||||
xml = DU().downloadUrl(url='{server}/playlists/%s/items' % playlist.plex_id,
|
xml = DU().downloadUrl(url='{server}/playlists/%s/items' % playlist.plex_id,
|
||||||
action_type='PUT',
|
action_type='PUT',
|
||||||
|
|
|
@ -8,7 +8,6 @@ from logging import getLogger
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from Queue import Empty
|
from Queue import Empty
|
||||||
from socket import SHUT_RDWR
|
from socket import SHUT_RDWR
|
||||||
from urllib import urlencode
|
|
||||||
from xbmc import executebuiltin
|
from xbmc import executebuiltin
|
||||||
|
|
||||||
from .plexbmchelper import listener, plexgdm, subscribers, httppersist
|
from .plexbmchelper import listener, plexgdm, subscribers, httppersist
|
||||||
|
@ -96,7 +95,7 @@ class PlexCompanion(backgroundthread.KillableThread):
|
||||||
transient_token=data.get('token'))
|
transient_token=data.get('token'))
|
||||||
elif data['containerKey'].startswith('/playQueues/'):
|
elif data['containerKey'].startswith('/playQueues/'):
|
||||||
_, container_key, _ = PF.ParseContainerKey(data['containerKey'])
|
_, container_key, _ = PF.ParseContainerKey(data['containerKey'])
|
||||||
xml = PF.DownloadChunks('{server}/playQueues/%s?' % container_key)
|
xml = PF.DownloadChunks('{server}/playQueues/%s' % container_key)
|
||||||
if xml is None:
|
if xml is None:
|
||||||
# "Play error"
|
# "Play error"
|
||||||
utils.dialog('notification',
|
utils.dialog('notification',
|
||||||
|
@ -133,8 +132,7 @@ class PlexCompanion(backgroundthread.KillableThread):
|
||||||
'key': '{server}%s' % data.get('key'),
|
'key': '{server}%s' % data.get('key'),
|
||||||
'offset': data.get('offset')
|
'offset': data.get('offset')
|
||||||
}
|
}
|
||||||
executebuiltin('RunPlugin(plugin://%s?%s)'
|
executebuiltin('RunPlugin(plugin://%s)' % utils.extend_url(v.ADDON_ID, params))
|
||||||
% (v.ADDON_ID, urlencode(params)))
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _process_playlist(data):
|
def _process_playlist(data):
|
||||||
|
|
|
@ -2,9 +2,7 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from urllib import urlencode, quote_plus
|
|
||||||
from ast import literal_eval
|
from ast import literal_eval
|
||||||
from urlparse import urlparse, parse_qsl
|
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from time import time
|
from time import time
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
|
@ -57,9 +55,9 @@ def ParseContainerKey(containerKey):
|
||||||
|
|
||||||
Output hence: library, key, query (str, int, dict)
|
Output hence: library, key, query (str, int, dict)
|
||||||
"""
|
"""
|
||||||
result = urlparse(containerKey)
|
result = utils.urlparse(containerKey)
|
||||||
library, key = GetPlexKeyNumber(result.path)
|
library, key = GetPlexKeyNumber(result.path.decode('utf-8'))
|
||||||
query = dict(parse_qsl(result.query))
|
query = dict(utils.parse_qsl(result.query))
|
||||||
return library, key, query
|
return library, key, query
|
||||||
|
|
||||||
|
|
||||||
|
@ -480,9 +478,9 @@ def GetPlexMetadata(key, reraise=False):
|
||||||
# 'includePopularLeaves': 1,
|
# 'includePopularLeaves': 1,
|
||||||
# 'includeConcerts': 1
|
# 'includeConcerts': 1
|
||||||
}
|
}
|
||||||
url = url + '?' + urlencode(arguments)
|
|
||||||
try:
|
try:
|
||||||
xml = DU().downloadUrl(url, reraise=reraise)
|
xml = DU().downloadUrl(utils.extend_url(url, arguments),
|
||||||
|
reraise=reraise)
|
||||||
except exceptions.RequestException:
|
except exceptions.RequestException:
|
||||||
# "PMS offline"
|
# "PMS offline"
|
||||||
utils.dialog('notification',
|
utils.dialog('notification',
|
||||||
|
@ -556,7 +554,7 @@ def GetAllPlexChildren(key):
|
||||||
Input:
|
Input:
|
||||||
key Key to a Plex item, e.g. 12345
|
key Key to a Plex item, e.g. 12345
|
||||||
"""
|
"""
|
||||||
return DownloadChunks("{server}/library/metadata/%s/children?" % key)
|
return DownloadChunks("{server}/library/metadata/%s/children" % key)
|
||||||
|
|
||||||
|
|
||||||
def GetPlexSectionResults(viewId, args=None):
|
def GetPlexSectionResults(viewId, args=None):
|
||||||
|
@ -569,9 +567,9 @@ def GetPlexSectionResults(viewId, args=None):
|
||||||
|
|
||||||
Returns None if something went wrong
|
Returns None if something went wrong
|
||||||
"""
|
"""
|
||||||
url = "{server}/library/sections/%s/all?" % viewId
|
url = "{server}/library/sections/%s/all" % viewId
|
||||||
if args:
|
if args:
|
||||||
url += urlencode(args) + '&'
|
url = utils.extend_url(url, args)
|
||||||
return DownloadChunks(url)
|
return DownloadChunks(url)
|
||||||
|
|
||||||
|
|
||||||
|
@ -726,9 +724,6 @@ class Leaves(DownloadGen):
|
||||||
def DownloadChunks(url):
|
def DownloadChunks(url):
|
||||||
"""
|
"""
|
||||||
Downloads PMS url in chunks of CONTAINERSIZE.
|
Downloads PMS url in chunks of CONTAINERSIZE.
|
||||||
|
|
||||||
url MUST end with '?' (if no other url encoded args are present) or '&'
|
|
||||||
|
|
||||||
Returns a stitched-together xml or None.
|
Returns a stitched-together xml or None.
|
||||||
"""
|
"""
|
||||||
xml = None
|
xml = None
|
||||||
|
@ -740,13 +735,13 @@ def DownloadChunks(url):
|
||||||
'X-Plex-Container-Start': pos,
|
'X-Plex-Container-Start': pos,
|
||||||
'sort': 'id'
|
'sort': 'id'
|
||||||
}
|
}
|
||||||
xmlpart = DU().downloadUrl(url + urlencode(args))
|
xmlpart = DU().downloadUrl(utils.extend_url(url, args))
|
||||||
# If something went wrong - skip in the hope that it works next time
|
# If something went wrong - skip in the hope that it works next time
|
||||||
try:
|
try:
|
||||||
xmlpart.attrib
|
xmlpart.attrib
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
LOG.error('Error while downloading chunks: %s',
|
LOG.error('Error while downloading chunks: %s, args: %s',
|
||||||
url + urlencode(args))
|
url, args)
|
||||||
pos += CONTAINERSIZE
|
pos += CONTAINERSIZE
|
||||||
error_counter += 1
|
error_counter += 1
|
||||||
continue
|
continue
|
||||||
|
@ -799,16 +794,14 @@ def GetAllPlexLeaves(viewId, lastViewedAt=None, updatedAt=None):
|
||||||
if updatedAt:
|
if updatedAt:
|
||||||
args.append('updatedAt>=%s' % updatedAt)
|
args.append('updatedAt>=%s' % updatedAt)
|
||||||
if args:
|
if args:
|
||||||
url += '?' + '&'.join(args) + '&'
|
url += '?' + '&'.join(args)
|
||||||
else:
|
|
||||||
url += '?'
|
|
||||||
return DownloadChunks(url)
|
return DownloadChunks(url)
|
||||||
|
|
||||||
|
|
||||||
def GetPlexOnDeck(viewId):
|
def GetPlexOnDeck(viewId):
|
||||||
"""
|
"""
|
||||||
"""
|
"""
|
||||||
return DownloadChunks("{server}/library/sections/%s/onDeck?" % viewId)
|
return DownloadChunks("{server}/library/sections/%s/onDeck" % viewId)
|
||||||
|
|
||||||
|
|
||||||
def get_plex_hub():
|
def get_plex_hub():
|
||||||
|
@ -843,7 +836,7 @@ def init_plex_playqueue(plex_id, librarySectionUUID, mediatype='movie',
|
||||||
}
|
}
|
||||||
if trailers is True:
|
if trailers is True:
|
||||||
args['extrasPrefixCount'] = utils.settings('trailerNumber')
|
args['extrasPrefixCount'] = utils.settings('trailerNumber')
|
||||||
xml = DU().downloadUrl(url + '?' + urlencode(args), action_type="POST")
|
xml = DU().downloadUrl(utils.extend_url(url, args), action_type="POST")
|
||||||
try:
|
try:
|
||||||
xml[0].tag
|
xml[0].tag
|
||||||
except (IndexError, TypeError, AttributeError):
|
except (IndexError, TypeError, AttributeError):
|
||||||
|
@ -976,12 +969,12 @@ def scrobble(ratingKey, state):
|
||||||
'identifier': 'com.plexapp.plugins.library'
|
'identifier': 'com.plexapp.plugins.library'
|
||||||
}
|
}
|
||||||
if state == "watched":
|
if state == "watched":
|
||||||
url = "{server}/:/scrobble?" + urlencode(args)
|
url = '{server}/:/scrobble'
|
||||||
elif state == "unwatched":
|
elif state == "unwatched":
|
||||||
url = "{server}/:/unscrobble?" + urlencode(args)
|
url = '{server}/:/unscrobble'
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
DU().downloadUrl(url)
|
DU().downloadUrl(utils.extend_url(url, args))
|
||||||
LOG.info("Toggled watched state for Plex item %s", ratingKey)
|
LOG.info("Toggled watched state for Plex item %s", ratingKey)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1058,12 +1051,13 @@ def transcode_image_path(key, AuthToken, path, width, height):
|
||||||
path = 'http://127.0.0.1:32400' + key
|
path = 'http://127.0.0.1:32400' + key
|
||||||
else: # internal path, add-on
|
else: # internal path, add-on
|
||||||
path = 'http://127.0.0.1:32400' + path + '/' + key
|
path = 'http://127.0.0.1:32400' + path + '/' + key
|
||||||
path = utils.try_encode(path)
|
|
||||||
# This is bogus (note the extra path component) but ATV is stupid when it
|
# This is bogus (note the extra path component) but ATV is stupid when it
|
||||||
# comes to caching images, it doesn't use querystrings. Fortunately PMS is
|
# comes to caching images, it doesn't use querystrings. Fortunately PMS is
|
||||||
# lenient...
|
# lenient...
|
||||||
|
path = path.encode('utf-8')
|
||||||
transcode_path = ('/photo/:/transcode/%sx%s/%s'
|
transcode_path = ('/photo/:/transcode/%sx%s/%s'
|
||||||
% (width, height, quote_plus(path)))
|
% (width, height, utils.quote_plus(path)))
|
||||||
|
transcode_path = transcode_path.decode('utf-8')
|
||||||
args = {
|
args = {
|
||||||
'width': width,
|
'width': width,
|
||||||
'height': height,
|
'height': height,
|
||||||
|
@ -1071,4 +1065,4 @@ def transcode_image_path(key, AuthToken, path, width, height):
|
||||||
}
|
}
|
||||||
if AuthToken:
|
if AuthToken:
|
||||||
args['X-Plex-Token'] = AuthToken
|
args['X-Plex-Token'] = AuthToken
|
||||||
return transcode_path + '?' + urlencode(args)
|
return utils.extend_url(transcode_path, args)
|
||||||
|
|
|
@ -8,13 +8,8 @@ from logging import getLogger
|
||||||
from re import sub
|
from re import sub
|
||||||
from SocketServer import ThreadingMixIn
|
from SocketServer import ThreadingMixIn
|
||||||
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
|
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
|
||||||
from urlparse import urlparse, parse_qs
|
|
||||||
import xbmc
|
|
||||||
|
|
||||||
from .. import companion
|
from .. import utils, companion, json_rpc as js, clientinfo, variables as v
|
||||||
from .. import json_rpc as js
|
|
||||||
from .. import clientinfo
|
|
||||||
from .. import variables as v
|
|
||||||
from .. import app
|
from .. import app
|
||||||
|
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
@ -102,8 +97,8 @@ class MyHandler(BaseHTTPRequestHandler):
|
||||||
|
|
||||||
request_path = self.path[1:]
|
request_path = self.path[1:]
|
||||||
request_path = sub(r"\?.*", "", request_path)
|
request_path = sub(r"\?.*", "", request_path)
|
||||||
url = urlparse(self.path)
|
parseresult = utils.urlparse(self.path)
|
||||||
paramarrays = parse_qs(url.query)
|
paramarrays = utils.parse_qs(parseresult.query)
|
||||||
params = {}
|
params = {}
|
||||||
for key in paramarrays:
|
for key in paramarrays:
|
||||||
params[key] = paramarrays[key][0]
|
params[key] = paramarrays[key][0]
|
||||||
|
|
|
@ -10,6 +10,7 @@ from datetime import datetime
|
||||||
from unicodedata import normalize
|
from unicodedata import normalize
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
import urllib
|
import urllib
|
||||||
|
import urlparse as _urlparse
|
||||||
# Originally tried faster cElementTree, but does NOT work reliably with Kodi
|
# Originally tried faster cElementTree, but does NOT work reliably with Kodi
|
||||||
import xml.etree.ElementTree as etree
|
import xml.etree.ElementTree as etree
|
||||||
# etree parse unsafe; make sure we're always receiving unicode
|
# etree parse unsafe; make sure we're always receiving unicode
|
||||||
|
@ -26,8 +27,6 @@ import xbmcgui
|
||||||
|
|
||||||
from . import path_ops, variables as v
|
from . import path_ops, variables as v
|
||||||
|
|
||||||
###############################################################################
|
|
||||||
|
|
||||||
LOG = getLogger('PLEX.utils')
|
LOG = getLogger('PLEX.utils')
|
||||||
|
|
||||||
WINDOW = xbmcgui.Window(10000)
|
WINDOW = xbmcgui.Window(10000)
|
||||||
|
@ -50,9 +49,6 @@ REGEX_MUSICPATH = re.compile(r'''^\^(.+)\$$''')
|
||||||
# Grab Plex id from an URL-encoded string
|
# Grab Plex id from an URL-encoded string
|
||||||
REGEX_PLEX_ID_FROM_URL = re.compile(r'''metadata%2F(\d+)''')
|
REGEX_PLEX_ID_FROM_URL = re.compile(r'''metadata%2F(\d+)''')
|
||||||
|
|
||||||
###############################################################################
|
|
||||||
# Main methods
|
|
||||||
|
|
||||||
|
|
||||||
def garbageCollect():
|
def garbageCollect():
|
||||||
gc.collect(2)
|
gc.collect(2)
|
||||||
|
@ -326,6 +322,73 @@ def encode_dict(dictionary):
|
||||||
return dictionary
|
return dictionary
|
||||||
|
|
||||||
|
|
||||||
|
def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
|
||||||
|
"""
|
||||||
|
unicode-safe way to use urlparse.parse_qs(). Pass in the query string qs
|
||||||
|
either as str or unicode
|
||||||
|
Returns a dict with lists as values; all entires unicode
|
||||||
|
"""
|
||||||
|
if isinstance(qs, unicode):
|
||||||
|
qs = qs.encode('utf-8')
|
||||||
|
qs = _urlparse.parse_qs(qs, keep_blank_values, strict_parsing)
|
||||||
|
return {k.decode('utf-8'): [e.decode('utf-8') for e in v]
|
||||||
|
for k, v in qs.iteritems()}
|
||||||
|
|
||||||
|
|
||||||
|
def parse_qsl(qs, keep_blank_values=0, strict_parsing=0):
|
||||||
|
"""
|
||||||
|
unicode-safe way to use urlparse.parse_qsl(). Pass in either str or unicode
|
||||||
|
Returns a list of unicode tuples
|
||||||
|
"""
|
||||||
|
if isinstance(qs, unicode):
|
||||||
|
qs = qs.encode('utf-8')
|
||||||
|
qs = _urlparse.parse_qsl(qs, keep_blank_values, strict_parsing)
|
||||||
|
return [(x.decode('utf-8'), y.decode('utf-8')) for (x, y) in qs]
|
||||||
|
|
||||||
|
|
||||||
|
def urlparse(url, scheme='', allow_fragments=True):
|
||||||
|
"""
|
||||||
|
unicode-safe way to use urlparse.urlparse(). Pass in either str or unicode
|
||||||
|
CAREFUL: returns an encoded urlparse.ParseResult()!
|
||||||
|
"""
|
||||||
|
if isinstance(url, unicode):
|
||||||
|
url = url.encode('utf-8')
|
||||||
|
return _urlparse.urlparse(url, scheme, allow_fragments)
|
||||||
|
|
||||||
|
|
||||||
|
def quote(s, safe='/'):
|
||||||
|
"""
|
||||||
|
unicode-safe way to use urllib.quote(). Pass in either str or unicode
|
||||||
|
Returns unicode
|
||||||
|
"""
|
||||||
|
if isinstance(s, unicode):
|
||||||
|
s = s.encode('utf-8')
|
||||||
|
s = urllib.quote(s, safe)
|
||||||
|
return s.decode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
|
def quote_plus(s, safe=''):
|
||||||
|
"""
|
||||||
|
unicode-safe way to use urllib.quote(). Pass in either str or unicode
|
||||||
|
Returns unicode
|
||||||
|
"""
|
||||||
|
if isinstance(s, unicode):
|
||||||
|
s = s.encode('utf-8')
|
||||||
|
s = urllib.quote_plus(s, safe)
|
||||||
|
return s.decode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
|
def unquote(s):
|
||||||
|
"""
|
||||||
|
unicode-safe way to use urllib.unquote(). Pass in either str or unicode
|
||||||
|
Returns unicode
|
||||||
|
"""
|
||||||
|
if isinstance(s, unicode):
|
||||||
|
s = s.encode('utf-8')
|
||||||
|
s = urllib.unquote(s)
|
||||||
|
return s.decode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
def try_encode(input_str, encoding='utf-8'):
|
def try_encode(input_str, encoding='utf-8'):
|
||||||
"""
|
"""
|
||||||
Will try to encode input_str (in unicode) to encoding. This possibly
|
Will try to encode input_str (in unicode) to encoding. This possibly
|
||||||
|
|
|
@ -8,7 +8,6 @@ e.g. plugin://... calls. Hence be careful to only rely on window variables.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import urllib
|
|
||||||
try:
|
try:
|
||||||
from multiprocessing.pool import ThreadPool
|
from multiprocessing.pool import ThreadPool
|
||||||
SUPPORTS_POOL = True
|
SUPPORTS_POOL = True
|
||||||
|
@ -75,10 +74,12 @@ def get_clean_image(image):
|
||||||
image = thumbcache
|
image = thumbcache
|
||||||
if image and b"image://" in image:
|
if image and b"image://" in image:
|
||||||
image = image.replace(b"image://", b"")
|
image = image.replace(b"image://", b"")
|
||||||
image = urllib.unquote(image)
|
image = utils.unquote(image)
|
||||||
if image.endswith(b"/"):
|
if image.endswith("/"):
|
||||||
image = image[:-1]
|
image = image[:-1]
|
||||||
return image.decode('utf-8')
|
return image
|
||||||
|
else:
|
||||||
|
return image.decode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
def generate_item(xml_element):
|
def generate_item(xml_element):
|
||||||
|
@ -227,7 +228,7 @@ def _generate_content(xml_element):
|
||||||
'key': key,
|
'key': key,
|
||||||
'offset': xml_element.attrib.get('viewOffset', '0'),
|
'offset': xml_element.attrib.get('viewOffset', '0'),
|
||||||
}
|
}
|
||||||
url = "plugin://%s?%s" % (v.ADDON_ID, urllib.urlencode(params))
|
url = utils.extend_url('plugin://%s' % v.ADDON_ID, params)
|
||||||
elif plex_type == v.PLEX_TYPE_PHOTO:
|
elif plex_type == v.PLEX_TYPE_PHOTO:
|
||||||
url = api.get_picture_path()
|
url = api.get_picture_path()
|
||||||
else:
|
else:
|
||||||
|
|
Loading…
Reference in a new issue