Turn urllib and urlparse unicode-safe
This commit is contained in:
parent
b4d036ed6b
commit
1ac19109ba
12 changed files with 138 additions and 73 deletions
|
@ -29,9 +29,13 @@ class Main():
|
|||
def __init__(self):
|
||||
LOG.debug('Full sys.argv received: %s', argv)
|
||||
# Parse parameters
|
||||
path = unicode_paths.decode(argv[0])
|
||||
params = dict(parse_qsl(argv[2][1:]))
|
||||
arguments = unicode_paths.decode(argv[2])
|
||||
params = dict(parse_qsl(arguments[1:]))
|
||||
path = unicode_paths.decode(argv[0])
|
||||
# Ensure unicode
|
||||
for key, value in params.iteritems():
|
||||
params[key.decode('utf-8')] = params.pop(key)
|
||||
params[key] = value.decode('utf-8')
|
||||
mode = params.get('mode', '')
|
||||
itemid = params.get('id', '')
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
from logging import getLogger
|
||||
from urllib import quote_plus, unquote
|
||||
import requests
|
||||
|
||||
from .kodi_db import KodiVideoDB, KodiMusicDB, KodiTextureDB
|
||||
|
@ -20,11 +19,11 @@ BATCH_SIZE = 500
|
|||
|
||||
|
||||
def double_urlencode(text):
|
||||
return quote_plus(quote_plus(text))
|
||||
return utils.quote_plus(utils.quote_plus(text))
|
||||
|
||||
|
||||
def double_urldecode(text):
|
||||
return unquote(unquote(text))
|
||||
return utils.unquote(utils.unquote(text))
|
||||
|
||||
|
||||
class ImageCachingThread(backgroundthread.KillableThread):
|
||||
|
@ -89,7 +88,7 @@ class ImageCachingThread(backgroundthread.KillableThread):
|
|||
|
||||
|
||||
def cache_url(url):
|
||||
url = double_urlencode(utils.try_encode(url))
|
||||
url = double_urlencode(url)
|
||||
sleeptime = 0
|
||||
while True:
|
||||
try:
|
||||
|
|
|
@ -226,7 +226,7 @@ class Section(object):
|
|||
args = copy.deepcopy(args)
|
||||
for key, value in args.iteritems():
|
||||
args[key] = value.format(self=self)
|
||||
return 'plugin://plugin.video.plexkodiconnect?%s' % urllib.urlencode(args)
|
||||
return utils.extend_url('plugin://%s' % v.ADDON_ID, args)
|
||||
|
||||
def to_kodi(self):
|
||||
"""
|
||||
|
|
|
@ -201,3 +201,18 @@ def copy_tree(src, dst, *args, **kwargs):
|
|||
src = encode_path(src)
|
||||
dst = encode_path(dst)
|
||||
return dir_util.copy_tree(src, dst, *args, **kwargs)
|
||||
|
||||
|
||||
def basename(path):
|
||||
"""
|
||||
Returns the filename for path [unicode] or an empty string if not possible.
|
||||
Safer than using os.path.basename, as we could be expecting \\ for / or
|
||||
vice versa
|
||||
"""
|
||||
try:
|
||||
return path.rsplit('/', 1)[1]
|
||||
except IndexError:
|
||||
try:
|
||||
return path.rsplit('\\', 1)[1]
|
||||
except IndexError:
|
||||
return ''
|
||||
|
|
|
@ -2,13 +2,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
from logging import getLogger
|
||||
from urlparse import parse_qsl
|
||||
|
||||
|
||||
from . import playback
|
||||
from . import context_entry
|
||||
from . import transfer
|
||||
from . import backgroundthread
|
||||
from . import utils, playback, context_entry, transfer, backgroundthread
|
||||
|
||||
###############################################################################
|
||||
|
||||
|
@ -35,7 +30,7 @@ class PlaybackTask(backgroundthread.Task):
|
|||
LOG.debug('Detected 3rd party add-on call - ignoring')
|
||||
transfer.send(True)
|
||||
return
|
||||
params = dict(parse_qsl(params))
|
||||
params = dict(utils.parse_qsl(params))
|
||||
mode = params.get('mode')
|
||||
resolve = False if params.get('handle') == '-1' else True
|
||||
LOG.debug('Received mode: %s, params: %s', mode, params)
|
||||
|
|
|
@ -5,8 +5,6 @@ Collection of functions associated with Kodi and Plex playlists and playqueues
|
|||
"""
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
from logging import getLogger
|
||||
import urllib
|
||||
from urlparse import parse_qsl, urlsplit
|
||||
|
||||
from .plex_api import API
|
||||
from .plex_db import PlexDB
|
||||
|
@ -328,12 +326,16 @@ def playlist_item_from_kodi(kodi_item):
|
|||
item.plex_uuid = db_item['plex_id'] # we dont need the uuid yet :-)
|
||||
item.file = kodi_item.get('file')
|
||||
if item.plex_id is None and item.file is not None:
|
||||
query = dict(parse_qsl(urlsplit(item.file).query))
|
||||
try:
|
||||
query = item.file.split('?', 1)[1]
|
||||
except IndexError:
|
||||
query = ''
|
||||
query = dict(utils.parse_qsl(query))
|
||||
item.plex_id = utils.cast(int, query.get('plex_id'))
|
||||
item.plex_type = query.get('itemType')
|
||||
if item.plex_id is None and item.file is not None:
|
||||
item.uri = ('library://whatever/item/%s'
|
||||
% urllib.quote(utils.try_encode(item.file), safe=''))
|
||||
% utils.quote(item.file, safe=''))
|
||||
else:
|
||||
# TO BE VERIFIED - PLEX DOESN'T LIKE PLAYLIST ADDS IN THIS MANNER
|
||||
item.uri = ('library://%s/item/library%%2Fmetadata%%2F%s' %
|
||||
|
|
|
@ -6,13 +6,12 @@ manipulate playlists
|
|||
"""
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
from logging import getLogger
|
||||
import urllib
|
||||
|
||||
from .common import PlaylistError
|
||||
|
||||
from ..plex_api import API
|
||||
from ..downloadutils import DownloadUtils as DU
|
||||
from .. import app, variables as v
|
||||
from .. import utils, app, variables as v
|
||||
###############################################################################
|
||||
LOG = getLogger('PLEX.playlists.pms')
|
||||
|
||||
|
@ -56,8 +55,8 @@ def initialize(playlist, plex_id):
|
|||
'type': v.PLEX_PLAYLIST_TYPE_FROM_KODI[playlist.kodi_type],
|
||||
'title': playlist.plex_name,
|
||||
'smart': 0,
|
||||
'uri': ('library://None/item/%s' % (urllib.quote('/library/metadata/%s'
|
||||
% plex_id, safe='')))
|
||||
'uri': ('library://None/item/%s' % (utils.quote('/library/metadata/%s'
|
||||
% plex_id, safe='')))
|
||||
}
|
||||
xml = DU().downloadUrl(url='{server}/playlists',
|
||||
action_type='POST',
|
||||
|
@ -80,8 +79,8 @@ def add_item(playlist, plex_id):
|
|||
Raises PlaylistError if that did not work out.
|
||||
"""
|
||||
params = {
|
||||
'uri': ('library://None/item/%s' % (urllib.quote('/library/metadata/%s'
|
||||
% plex_id, safe='')))
|
||||
'uri': ('library://None/item/%s' % (utils.quote('/library/metadata/%s'
|
||||
% plex_id, safe='')))
|
||||
}
|
||||
xml = DU().downloadUrl(url='{server}/playlists/%s/items' % playlist.plex_id,
|
||||
action_type='PUT',
|
||||
|
|
|
@ -8,7 +8,6 @@ from logging import getLogger
|
|||
from threading import Thread
|
||||
from Queue import Empty
|
||||
from socket import SHUT_RDWR
|
||||
from urllib import urlencode
|
||||
from xbmc import executebuiltin
|
||||
|
||||
from .plexbmchelper import listener, plexgdm, subscribers, httppersist
|
||||
|
@ -96,7 +95,7 @@ class PlexCompanion(backgroundthread.KillableThread):
|
|||
transient_token=data.get('token'))
|
||||
elif data['containerKey'].startswith('/playQueues/'):
|
||||
_, container_key, _ = PF.ParseContainerKey(data['containerKey'])
|
||||
xml = PF.DownloadChunks('{server}/playQueues/%s?' % container_key)
|
||||
xml = PF.DownloadChunks('{server}/playQueues/%s' % container_key)
|
||||
if xml is None:
|
||||
# "Play error"
|
||||
utils.dialog('notification',
|
||||
|
@ -133,8 +132,7 @@ class PlexCompanion(backgroundthread.KillableThread):
|
|||
'key': '{server}%s' % data.get('key'),
|
||||
'offset': data.get('offset')
|
||||
}
|
||||
executebuiltin('RunPlugin(plugin://%s?%s)'
|
||||
% (v.ADDON_ID, urlencode(params)))
|
||||
executebuiltin('RunPlugin(plugin://%s)' % utils.extend_url(v.ADDON_ID, params))
|
||||
|
||||
@staticmethod
|
||||
def _process_playlist(data):
|
||||
|
|
|
@ -2,9 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
from logging import getLogger
|
||||
from urllib import urlencode, quote_plus
|
||||
from ast import literal_eval
|
||||
from urlparse import urlparse, parse_qsl
|
||||
from copy import deepcopy
|
||||
from time import time
|
||||
from threading import Thread
|
||||
|
@ -57,9 +55,9 @@ def ParseContainerKey(containerKey):
|
|||
|
||||
Output hence: library, key, query (str, int, dict)
|
||||
"""
|
||||
result = urlparse(containerKey)
|
||||
library, key = GetPlexKeyNumber(result.path)
|
||||
query = dict(parse_qsl(result.query))
|
||||
result = utils.urlparse(containerKey)
|
||||
library, key = GetPlexKeyNumber(result.path.decode('utf-8'))
|
||||
query = dict(utils.parse_qsl(result.query))
|
||||
return library, key, query
|
||||
|
||||
|
||||
|
@ -480,9 +478,9 @@ def GetPlexMetadata(key, reraise=False):
|
|||
# 'includePopularLeaves': 1,
|
||||
# 'includeConcerts': 1
|
||||
}
|
||||
url = url + '?' + urlencode(arguments)
|
||||
try:
|
||||
xml = DU().downloadUrl(url, reraise=reraise)
|
||||
xml = DU().downloadUrl(utils.extend_url(url, arguments),
|
||||
reraise=reraise)
|
||||
except exceptions.RequestException:
|
||||
# "PMS offline"
|
||||
utils.dialog('notification',
|
||||
|
@ -556,7 +554,7 @@ def GetAllPlexChildren(key):
|
|||
Input:
|
||||
key Key to a Plex item, e.g. 12345
|
||||
"""
|
||||
return DownloadChunks("{server}/library/metadata/%s/children?" % key)
|
||||
return DownloadChunks("{server}/library/metadata/%s/children" % key)
|
||||
|
||||
|
||||
def GetPlexSectionResults(viewId, args=None):
|
||||
|
@ -569,9 +567,9 @@ def GetPlexSectionResults(viewId, args=None):
|
|||
|
||||
Returns None if something went wrong
|
||||
"""
|
||||
url = "{server}/library/sections/%s/all?" % viewId
|
||||
url = "{server}/library/sections/%s/all" % viewId
|
||||
if args:
|
||||
url += urlencode(args) + '&'
|
||||
url = utils.extend_url(url, args)
|
||||
return DownloadChunks(url)
|
||||
|
||||
|
||||
|
@ -726,9 +724,6 @@ class Leaves(DownloadGen):
|
|||
def DownloadChunks(url):
|
||||
"""
|
||||
Downloads PMS url in chunks of CONTAINERSIZE.
|
||||
|
||||
url MUST end with '?' (if no other url encoded args are present) or '&'
|
||||
|
||||
Returns a stitched-together xml or None.
|
||||
"""
|
||||
xml = None
|
||||
|
@ -740,13 +735,13 @@ def DownloadChunks(url):
|
|||
'X-Plex-Container-Start': pos,
|
||||
'sort': 'id'
|
||||
}
|
||||
xmlpart = DU().downloadUrl(url + urlencode(args))
|
||||
xmlpart = DU().downloadUrl(utils.extend_url(url, args))
|
||||
# If something went wrong - skip in the hope that it works next time
|
||||
try:
|
||||
xmlpart.attrib
|
||||
except AttributeError:
|
||||
LOG.error('Error while downloading chunks: %s',
|
||||
url + urlencode(args))
|
||||
LOG.error('Error while downloading chunks: %s, args: %s',
|
||||
url, args)
|
||||
pos += CONTAINERSIZE
|
||||
error_counter += 1
|
||||
continue
|
||||
|
@ -799,16 +794,14 @@ def GetAllPlexLeaves(viewId, lastViewedAt=None, updatedAt=None):
|
|||
if updatedAt:
|
||||
args.append('updatedAt>=%s' % updatedAt)
|
||||
if args:
|
||||
url += '?' + '&'.join(args) + '&'
|
||||
else:
|
||||
url += '?'
|
||||
url += '?' + '&'.join(args)
|
||||
return DownloadChunks(url)
|
||||
|
||||
|
||||
def GetPlexOnDeck(viewId):
|
||||
"""
|
||||
"""
|
||||
return DownloadChunks("{server}/library/sections/%s/onDeck?" % viewId)
|
||||
return DownloadChunks("{server}/library/sections/%s/onDeck" % viewId)
|
||||
|
||||
|
||||
def get_plex_hub():
|
||||
|
@ -843,7 +836,7 @@ def init_plex_playqueue(plex_id, librarySectionUUID, mediatype='movie',
|
|||
}
|
||||
if trailers is True:
|
||||
args['extrasPrefixCount'] = utils.settings('trailerNumber')
|
||||
xml = DU().downloadUrl(url + '?' + urlencode(args), action_type="POST")
|
||||
xml = DU().downloadUrl(utils.extend_url(url, args), action_type="POST")
|
||||
try:
|
||||
xml[0].tag
|
||||
except (IndexError, TypeError, AttributeError):
|
||||
|
@ -976,12 +969,12 @@ def scrobble(ratingKey, state):
|
|||
'identifier': 'com.plexapp.plugins.library'
|
||||
}
|
||||
if state == "watched":
|
||||
url = "{server}/:/scrobble?" + urlencode(args)
|
||||
url = '{server}/:/scrobble'
|
||||
elif state == "unwatched":
|
||||
url = "{server}/:/unscrobble?" + urlencode(args)
|
||||
url = '{server}/:/unscrobble'
|
||||
else:
|
||||
return
|
||||
DU().downloadUrl(url)
|
||||
DU().downloadUrl(utils.extend_url(url, args))
|
||||
LOG.info("Toggled watched state for Plex item %s", ratingKey)
|
||||
|
||||
|
||||
|
@ -1058,12 +1051,13 @@ def transcode_image_path(key, AuthToken, path, width, height):
|
|||
path = 'http://127.0.0.1:32400' + key
|
||||
else: # internal path, add-on
|
||||
path = 'http://127.0.0.1:32400' + path + '/' + key
|
||||
path = utils.try_encode(path)
|
||||
# This is bogus (note the extra path component) but ATV is stupid when it
|
||||
# comes to caching images, it doesn't use querystrings. Fortunately PMS is
|
||||
# lenient...
|
||||
path = path.encode('utf-8')
|
||||
transcode_path = ('/photo/:/transcode/%sx%s/%s'
|
||||
% (width, height, quote_plus(path)))
|
||||
% (width, height, utils.quote_plus(path)))
|
||||
transcode_path = transcode_path.decode('utf-8')
|
||||
args = {
|
||||
'width': width,
|
||||
'height': height,
|
||||
|
@ -1071,4 +1065,4 @@ def transcode_image_path(key, AuthToken, path, width, height):
|
|||
}
|
||||
if AuthToken:
|
||||
args['X-Plex-Token'] = AuthToken
|
||||
return transcode_path + '?' + urlencode(args)
|
||||
return utils.extend_url(transcode_path, args)
|
||||
|
|
|
@ -8,13 +8,8 @@ from logging import getLogger
|
|||
from re import sub
|
||||
from SocketServer import ThreadingMixIn
|
||||
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
|
||||
from urlparse import urlparse, parse_qs
|
||||
import xbmc
|
||||
|
||||
from .. import companion
|
||||
from .. import json_rpc as js
|
||||
from .. import clientinfo
|
||||
from .. import variables as v
|
||||
from .. import utils, companion, json_rpc as js, clientinfo, variables as v
|
||||
from .. import app
|
||||
|
||||
###############################################################################
|
||||
|
@ -102,8 +97,8 @@ class MyHandler(BaseHTTPRequestHandler):
|
|||
|
||||
request_path = self.path[1:]
|
||||
request_path = sub(r"\?.*", "", request_path)
|
||||
url = urlparse(self.path)
|
||||
paramarrays = parse_qs(url.query)
|
||||
parseresult = utils.urlparse(self.path)
|
||||
paramarrays = utils.parse_qs(parseresult.query)
|
||||
params = {}
|
||||
for key in paramarrays:
|
||||
params[key] = paramarrays[key][0]
|
||||
|
|
|
@ -10,6 +10,7 @@ from datetime import datetime
|
|||
from unicodedata import normalize
|
||||
from threading import Lock
|
||||
import urllib
|
||||
import urlparse as _urlparse
|
||||
# Originally tried faster cElementTree, but does NOT work reliably with Kodi
|
||||
import xml.etree.ElementTree as etree
|
||||
# etree parse unsafe; make sure we're always receiving unicode
|
||||
|
@ -26,8 +27,6 @@ import xbmcgui
|
|||
|
||||
from . import path_ops, variables as v
|
||||
|
||||
###############################################################################
|
||||
|
||||
LOG = getLogger('PLEX.utils')
|
||||
|
||||
WINDOW = xbmcgui.Window(10000)
|
||||
|
@ -50,9 +49,6 @@ REGEX_MUSICPATH = re.compile(r'''^\^(.+)\$$''')
|
|||
# Grab Plex id from an URL-encoded string
|
||||
REGEX_PLEX_ID_FROM_URL = re.compile(r'''metadata%2F(\d+)''')
|
||||
|
||||
###############################################################################
|
||||
# Main methods
|
||||
|
||||
|
||||
def garbageCollect():
|
||||
gc.collect(2)
|
||||
|
@ -326,6 +322,73 @@ def encode_dict(dictionary):
|
|||
return dictionary
|
||||
|
||||
|
||||
def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
|
||||
"""
|
||||
unicode-safe way to use urlparse.parse_qs(). Pass in the query string qs
|
||||
either as str or unicode
|
||||
Returns a dict with lists as values; all entires unicode
|
||||
"""
|
||||
if isinstance(qs, unicode):
|
||||
qs = qs.encode('utf-8')
|
||||
qs = _urlparse.parse_qs(qs, keep_blank_values, strict_parsing)
|
||||
return {k.decode('utf-8'): [e.decode('utf-8') for e in v]
|
||||
for k, v in qs.iteritems()}
|
||||
|
||||
|
||||
def parse_qsl(qs, keep_blank_values=0, strict_parsing=0):
|
||||
"""
|
||||
unicode-safe way to use urlparse.parse_qsl(). Pass in either str or unicode
|
||||
Returns a list of unicode tuples
|
||||
"""
|
||||
if isinstance(qs, unicode):
|
||||
qs = qs.encode('utf-8')
|
||||
qs = _urlparse.parse_qsl(qs, keep_blank_values, strict_parsing)
|
||||
return [(x.decode('utf-8'), y.decode('utf-8')) for (x, y) in qs]
|
||||
|
||||
|
||||
def urlparse(url, scheme='', allow_fragments=True):
|
||||
"""
|
||||
unicode-safe way to use urlparse.urlparse(). Pass in either str or unicode
|
||||
CAREFUL: returns an encoded urlparse.ParseResult()!
|
||||
"""
|
||||
if isinstance(url, unicode):
|
||||
url = url.encode('utf-8')
|
||||
return _urlparse.urlparse(url, scheme, allow_fragments)
|
||||
|
||||
|
||||
def quote(s, safe='/'):
|
||||
"""
|
||||
unicode-safe way to use urllib.quote(). Pass in either str or unicode
|
||||
Returns unicode
|
||||
"""
|
||||
if isinstance(s, unicode):
|
||||
s = s.encode('utf-8')
|
||||
s = urllib.quote(s, safe)
|
||||
return s.decode('utf-8')
|
||||
|
||||
|
||||
def quote_plus(s, safe=''):
|
||||
"""
|
||||
unicode-safe way to use urllib.quote(). Pass in either str or unicode
|
||||
Returns unicode
|
||||
"""
|
||||
if isinstance(s, unicode):
|
||||
s = s.encode('utf-8')
|
||||
s = urllib.quote_plus(s, safe)
|
||||
return s.decode('utf-8')
|
||||
|
||||
|
||||
def unquote(s):
|
||||
"""
|
||||
unicode-safe way to use urllib.unquote(). Pass in either str or unicode
|
||||
Returns unicode
|
||||
"""
|
||||
if isinstance(s, unicode):
|
||||
s = s.encode('utf-8')
|
||||
s = urllib.unquote(s)
|
||||
return s.decode('utf-8')
|
||||
|
||||
|
||||
def try_encode(input_str, encoding='utf-8'):
|
||||
"""
|
||||
Will try to encode input_str (in unicode) to encoding. This possibly
|
||||
|
|
|
@ -8,7 +8,6 @@ e.g. plugin://... calls. Hence be careful to only rely on window variables.
|
|||
"""
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
from logging import getLogger
|
||||
import urllib
|
||||
try:
|
||||
from multiprocessing.pool import ThreadPool
|
||||
SUPPORTS_POOL = True
|
||||
|
@ -75,10 +74,12 @@ def get_clean_image(image):
|
|||
image = thumbcache
|
||||
if image and b"image://" in image:
|
||||
image = image.replace(b"image://", b"")
|
||||
image = urllib.unquote(image)
|
||||
if image.endswith(b"/"):
|
||||
image = utils.unquote(image)
|
||||
if image.endswith("/"):
|
||||
image = image[:-1]
|
||||
return image.decode('utf-8')
|
||||
return image
|
||||
else:
|
||||
return image.decode('utf-8')
|
||||
|
||||
|
||||
def generate_item(xml_element):
|
||||
|
@ -227,7 +228,7 @@ def _generate_content(xml_element):
|
|||
'key': key,
|
||||
'offset': xml_element.attrib.get('viewOffset', '0'),
|
||||
}
|
||||
url = "plugin://%s?%s" % (v.ADDON_ID, urllib.urlencode(params))
|
||||
url = utils.extend_url('plugin://%s' % v.ADDON_ID, params)
|
||||
elif plex_type == v.PLEX_TYPE_PHOTO:
|
||||
url = api.get_picture_path()
|
||||
else:
|
||||
|
|
Loading…
Reference in a new issue