Compare commits
219 commits
py3-reduce
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
ee1eb14476 | ||
|
9e54f59fd4 | ||
|
4cfcc4c1f8 | ||
|
5a6623a1dc | ||
|
e5fa5de670 | ||
|
191a3131e3 | ||
|
f96c246244 | ||
|
a4bf3d061a | ||
|
24c1ada5b1 | ||
|
61114e0d2e | ||
|
bdc98d0352 | ||
|
436d2e4391 | ||
|
2bc98f9ff1 | ||
|
a4fba553f3 | ||
|
097fd4cfa2 | ||
|
d80d3525b3 | ||
|
d54307ffd5 | ||
|
53e3258517 | ||
|
dae123acee | ||
|
f4a0789fc0 | ||
|
887f659b2f | ||
|
2bd692e173 | ||
|
176fa07e80 | ||
|
9da61a059f | ||
|
11d06d909e | ||
|
e96df700c1 | ||
|
057921b05e | ||
|
63bd85d5c8 | ||
|
9d6bae3957 | ||
|
2432ce5ee6 | ||
|
5a009b7ea0 | ||
|
26073e5dac | ||
|
47cd15baa0 | ||
|
560fc5b9c8 | ||
|
9495e1e27d | ||
|
5720811a7e | ||
|
45afba1840 | ||
|
cb1a3e74e0 | ||
|
76c4fba8e6 | ||
|
516a09ce56 | ||
|
41855882ab | ||
|
289266bb81 | ||
|
0490ce766e | ||
|
c182b8f5f8 | ||
|
e6a0af4621 | ||
|
ea877b55d5 | ||
|
7c2478a568 | ||
|
c99db1edff | ||
|
2f25ba2eae | ||
|
7602f02bcd | ||
|
de9c935a40 | ||
|
e049f37da9 | ||
|
ce6ab2c258 | ||
|
0f7410e0e3 | ||
|
4de0920bf5 | ||
|
2b9594dd90 | ||
|
4f75502a8a | ||
|
6bf41116cb | ||
|
6201a04513 | ||
|
74ec9eff97 | ||
|
295f403c64 | ||
|
cb8dc30c7c | ||
|
262315c3e7 | ||
|
a18b971564 | ||
|
1bd1da9f5a | ||
|
2fd91ff9d6 | ||
|
1001df5e30 | ||
|
0f2fd110db | ||
|
ada337c2c4 | ||
|
1066f857a2 | ||
|
858a33f816 | ||
|
fce964cc7b | ||
|
7c903d0c94 | ||
|
3ff97d0669 | ||
|
7553061945 | ||
|
6105a571c8 | ||
|
2484cf10ac | ||
|
f171785602 | ||
|
3e9c8c6361 | ||
|
cac32cc66a | ||
|
c4d14c02e2 | ||
|
c6056b4efc | ||
|
2c979fba57 | ||
|
f877c37e76 | ||
|
038960c538 | ||
|
cdf1514215 | ||
|
f15ef8886a | ||
|
7f8339a753 | ||
|
0cf35b7b87 | ||
|
09b0c61f11 | ||
|
675a8150cc | ||
|
a2194a5ce8 | ||
|
166b94c4cd | ||
|
46f99901cc | ||
|
36befcf46a | ||
|
abd8b04ff9 | ||
|
dbf2117a30 | ||
|
a2e08a30ec | ||
|
d38fe789b3 | ||
|
f262fba18a | ||
|
29822db781 | ||
|
7c12b7aa36 | ||
|
019bd1aeae | ||
|
c29be48cac | ||
|
4916bbb46e | ||
|
f7ae807167 | ||
|
966cf6f526 | ||
|
ce14d394d4 | ||
|
46f115de68 | ||
|
e98aca1f00 | ||
|
cb6ba50904 | ||
|
2d02f4af07 | ||
|
1493ac0c58 | ||
|
7c57dca0ec | ||
|
04e2d09835 | ||
|
fbfcffbb0c | ||
|
6b6464dac3 | ||
|
34045c0136 | ||
|
1c4b15e357 | ||
|
3d139b0929 | ||
|
4c0634bc13 | ||
|
060880e754 | ||
|
95758b5dc8 | ||
|
3d7d2d0993 | ||
|
808136bff8 | ||
|
98b6b681fd | ||
|
0a1edcd24a | ||
|
c8caf2f11b | ||
|
4bef20da32 | ||
|
886d2e5df7 | ||
|
f6c2a7c08f | ||
|
0fd7d11631 | ||
|
c69d131084 | ||
|
dc5402abcc | ||
|
9d7d33c0d0 | ||
|
1885d3fc94 | ||
|
bb7b2de44b | ||
|
f134266efc | ||
|
66771c53a2 | ||
|
16cbe430af | ||
|
8aa5890e67 | ||
|
12587a985c | ||
|
9150e168f6 | ||
|
a12e07da6a | ||
|
fad755745a | ||
|
07ed0d1105 | ||
|
f524018160 | ||
|
cf6a301d70 | ||
|
09d4ed597b | ||
|
faf8575537 | ||
|
c4cfdddb91 | ||
|
f469627d33 | ||
|
8bccff05b6 | ||
|
08bbf38128 | ||
|
474e4ac5d1 | ||
|
10326882bd | ||
|
e980de05a8 | ||
|
0051ed316e | ||
|
c79938e08b | ||
|
3e1f52802f | ||
|
06a20a8358 | ||
|
31549a1ffb | ||
|
a3d654c65c | ||
|
dad8d58824 | ||
|
e5585aec44 | ||
|
a7ffceb631 | ||
|
538832bed5 | ||
|
94e474513c | ||
|
1b56f5cef9 | ||
|
f192c0912c | ||
|
269dedf398 | ||
|
63144ba070 | ||
|
625d4c91b4 | ||
|
011d20473e | ||
|
2884054fd4 | ||
|
01fb1d5da6 | ||
|
f187111411 | ||
|
281c7d1599 | ||
|
89afd46b56 | ||
|
94a86b43c1 | ||
|
7393023fcc | ||
|
f544c4065f | ||
|
8ae2fdc10a | ||
|
b9c1aaac20 | ||
|
e887e7162b | ||
|
b2139ce150 | ||
|
22efe274a1 | ||
|
828a580031 | ||
|
151e3a5eef | ||
|
939cdd4615 | ||
|
a867acb0f8 | ||
|
acf446dcc0 | ||
|
70e6e4350e | ||
|
86dab2ab66 | ||
|
4bae675181 | ||
|
250859d3a7 | ||
|
3cc939f320 | ||
|
aac16f38b3 | ||
|
5014a0fafa | ||
|
7cf8cb59f1 | ||
|
a648d8941a | ||
|
d1fdf5d25f | ||
|
5eb1c2aacd | ||
|
9e0ac64bb9 | ||
|
5816235062 | ||
|
0982c3bae2 | ||
|
17d84c1f29 | ||
|
d096854b14 | ||
|
ddf8637bb6 | ||
|
089294681e | ||
|
a0280fdbd3 | ||
|
e2ebe98fde | ||
|
e60816c022 | ||
|
fb53ba3a0a | ||
|
27202d2ab2 | ||
|
ba6c46afac | ||
|
941ac4ef3b | ||
|
493ac7f49a | ||
|
d8dc959879 |
217 changed files with 6832 additions and 7908 deletions
|
@ -1,4 +1,5 @@
|
||||||
exclude_paths:
|
exclude_paths:
|
||||||
- 'resources/lib/watchdog/**'
|
- 'resources/lib/watchdog/**'
|
||||||
- 'resources/lib/pathtools/**'
|
- 'resources/lib/pathtools/**'
|
||||||
- 'resources/lib/defusedxml/**'
|
- 'resources/lib/pathtools/**'
|
||||||
|
- 'resources/lib/defused_etree.py'
|
||||||
|
|
12
README.md
12
README.md
|
@ -1,8 +1,8 @@
|
||||||
[![Kodi Leia stable version](https://img.shields.io/badge/Kodi_Leia_STABLE-latest-blue.svg?maxAge=60&style=flat) ](https://croneter.github.io/pkc-source/repository.plexkodiconnect.Kodi-Leia.STABLE.zip)
|
[![Kodi Leia stable version](https://img.shields.io/badge/Kodi_Leia_STABLE-latest-blue.svg?maxAge=60&style=flat) ](https://croneter.github.io/pkc-source/repository.plexkodiconnect.Kodi-Leia.STABLE.zip)
|
||||||
[![Kodi Leia beta version](https://img.shields.io/badge/Kodi_Leia_BETA-latest-red.svg?maxAge=60&style=flat) ](https://croneter.github.io/pkc-source/repository.plexkodiconnect.Kodi-Leia.BETA.zip)
|
[![Kodi Leia beta version](https://img.shields.io/badge/Kodi_Leia_BETA-latest-red.svg?maxAge=60&style=flat) ](https://croneter.github.io/pkc-source/repository.plexkodiconnect.Kodi-Leia.BETA.zip)
|
||||||
|
[![Kodi Matrix stable version](https://img.shields.io/badge/Kodi_Matrix_STABLE-latest-blue.svg?maxAge=60&style=flat) ](https://croneter.github.io/pkc-source/repository.plexkodiconnect.Kodi-Matrix.STABLE.zip)
|
||||||
[![Kodi Matrix beta version](https://img.shields.io/badge/Kodi_Matrix_BETA-latest-red.svg?maxAge=60&style=flat) ](https://croneter.github.io/pkc-source/repository.plexkodiconnect.Kodi-Matrix.BETA.zip)
|
[![Kodi Matrix beta version](https://img.shields.io/badge/Kodi_Matrix_BETA-latest-red.svg?maxAge=60&style=flat) ](https://croneter.github.io/pkc-source/repository.plexkodiconnect.Kodi-Matrix.BETA.zip)
|
||||||
|
|
||||||
|
|
||||||
[![Installation](https://img.shields.io/badge/wiki-installation-brightgreen.svg?maxAge=60&style=flat)](https://github.com/croneter/PlexKodiConnect/wiki/Installation)
|
[![Installation](https://img.shields.io/badge/wiki-installation-brightgreen.svg?maxAge=60&style=flat)](https://github.com/croneter/PlexKodiConnect/wiki/Installation)
|
||||||
[![FAQ](https://img.shields.io/badge/wiki-FAQ-brightgreen.svg?maxAge=60&style=flat)](https://github.com/croneter/PlexKodiConnect/wiki/faq)
|
[![FAQ](https://img.shields.io/badge/wiki-FAQ-brightgreen.svg?maxAge=60&style=flat)](https://github.com/croneter/PlexKodiConnect/wiki/faq)
|
||||||
[![Forum](https://img.shields.io/badge/forum-plex-orange.svg?maxAge=60&style=flat)](https://forums.plex.tv/discussion/210023/plexkodiconnect-let-kodi-talk-to-your-plex)
|
[![Forum](https://img.shields.io/badge/forum-plex-orange.svg?maxAge=60&style=flat)](https://forums.plex.tv/discussion/210023/plexkodiconnect-let-kodi-talk-to-your-plex)
|
||||||
|
@ -39,11 +39,7 @@ Unfortunately, the PKC Kodi repository had to move because it stopped working (t
|
||||||
|
|
||||||
### Download and Installation
|
### Download and Installation
|
||||||
|
|
||||||
Install PKC via the PlexKodiConnect Kodi repository download button just below (do NOT use the standard GitHub download!). Alternatively, add [https://croneter.github.io/pkc-source](https://croneter.github.io/pkc-source) as a new Kodi `Web server directory (HTTPS)` source. See the [github wiki installation manual](https://github.com/croneter/PlexKodiConnect/wiki/Installation) for a detailed guide. Please use the stable version except if you really know what you're doing. Kodi will update PKC automatically.
|
Using the Kodi file manager, add [https://croneter.github.io/pkc-source](https://croneter.github.io/pkc-source) as a new Kodi `Web server directory (HTTPS)` source, then install the PlexKodiConnect repository from this new source "from ZIP file". See the [github wiki installation manual](https://github.com/croneter/PlexKodiConnect/wiki/Installation) for a detailed guide. Kodi will update PKC automatically.
|
||||||
|
|
||||||
| Stable version | Beta version |
|
|
||||||
|----------------|--------------|
|
|
||||||
| [![stable version](https://img.shields.io/badge/stable_version-latest-blue.svg?maxAge=60&style=flat) ](https://github.com/croneter/binary_repo/raw/master/stable/repository.plexkodiconnect/repository.plexkodiconnect-1.0.2.zip) | [![beta version](https://img.shields.io/badge/beta_version-latest-red.svg?maxAge=60&style=flat) ](https://github.com/croneter/binary_repo/raw/master/beta/repository.plexkodiconnectbeta/repository.plexkodiconnectbeta-1.0.2.zip) |
|
|
||||||
|
|
||||||
### Warning
|
### Warning
|
||||||
Use at your own risk! This plugin assumes that you manage all your videos with Plex (and none with Kodi). You might lose data already stored in the Kodi video and music databases as this plugin directly changes them. Don't worry if you want Plex to manage all your media (like you should ;-)).
|
Use at your own risk! This plugin assumes that you manage all your videos with Plex (and none with Kodi). You might lose data already stored in the Kodi video and music databases as this plugin directly changes them. Don't worry if you want Plex to manage all your media (like you should ;-)).
|
||||||
|
@ -53,10 +49,10 @@ Some people argue that PKC is 'hacky' because of the way it directly accesses th
|
||||||
### PKC Features
|
### PKC Features
|
||||||
|
|
||||||
- Support for Kodi 18 Leia and Kodi 19 Matrix
|
- Support for Kodi 18 Leia and Kodi 19 Matrix
|
||||||
- Preliminary support for Kodi 19 Nexus. Keep in mind that development for Kodi Nexus has not even officially reached alpha stage - any issues you encounter are probably caused by that
|
- Preliminary support for Kodi 20 Nexus. Keep in mind that development for Kodi Nexus has not even officially reached alpha stage - any issues you encounter are probably caused by that
|
||||||
|
- [Skip intros](https://support.plex.tv/articles/skip-content/)
|
||||||
- [Amazon Alexa voice recognition](https://www.plex.tv/apps/streaming-devices/amazon-alexa)
|
- [Amazon Alexa voice recognition](https://www.plex.tv/apps/streaming-devices/amazon-alexa)
|
||||||
- [Cinema Trailers & Extras](https://support.plex.tv/articles/202934883-cinema-trailers-extras/)
|
- [Cinema Trailers & Extras](https://support.plex.tv/articles/202934883-cinema-trailers-extras/)
|
||||||
- If Plex did not provide a trailer, automatically get one using the Kodi add-on [The Movie Database](https://kodi.wiki/view/Add-on:The_Movie_Database)
|
|
||||||
- [Plex Watch Later / Plex It!](https://support.plex.tv/hc/en-us/sections/200211783-Plex-It-)
|
- [Plex Watch Later / Plex It!](https://support.plex.tv/hc/en-us/sections/200211783-Plex-It-)
|
||||||
- [Plex Companion](https://support.plex.tv/hc/en-us/sections/200276908-Plex-Companion): fling Plex media (or anything else) from other Plex devices to PlexKodiConnect
|
- [Plex Companion](https://support.plex.tv/hc/en-us/sections/200276908-Plex-Companion): fling Plex media (or anything else) from other Plex devices to PlexKodiConnect
|
||||||
- Automatically sync Plex playlists to Kodi playlists and vice-versa
|
- Automatically sync Plex playlists to Kodi playlists and vice-versa
|
||||||
|
|
310
addon.xml
310
addon.xml
|
@ -1,11 +1,12 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
<addon id="plugin.video.plexkodiconnect" name="PlexKodiConnect" version="3.5.17" provider-name="croneter">
|
<addon id="plugin.video.plexkodiconnect" name="PlexKodiConnect" version="2.15.0" provider-name="croneter">
|
||||||
<requires>
|
<requires>
|
||||||
<import addon="xbmc.python" version="3.0.0"/>
|
<import addon="xbmc.python" version="2.1.0"/>
|
||||||
<import addon="script.module.requests" version="2.22.0+matrix.1" />
|
<import addon="script.module.requests" version="2.9.1" />
|
||||||
<import addon="plugin.video.plexkodiconnect.movies" version="3.0.2" />
|
<import addon="script.module.defusedxml" version="0.5.0"/>
|
||||||
<import addon="plugin.video.plexkodiconnect.tvshows" version="3.0.2" />
|
<import addon="script.module.six" />
|
||||||
<import addon="metadata.themoviedb.org.python" version="1.3.1+matrix.1" />
|
<import addon="plugin.video.plexkodiconnect.movies" version="2.1.3" />
|
||||||
|
<import addon="plugin.video.plexkodiconnect.tvshows" version="2.1.3" />
|
||||||
</requires>
|
</requires>
|
||||||
<extension point="xbmc.python.pluginsource" library="default.py">
|
<extension point="xbmc.python.pluginsource" library="default.py">
|
||||||
<provides>video audio image</provides>
|
<provides>video audio image</provides>
|
||||||
|
@ -20,12 +21,6 @@
|
||||||
</item>
|
</item>
|
||||||
</extension>
|
</extension>
|
||||||
<extension point="xbmc.addon.metadata">
|
<extension point="xbmc.addon.metadata">
|
||||||
<!-- see e.g. https://github.com/xbmc/xbmc/pull/14136 -->
|
|
||||||
<reuselanguageinvoker>true</reuselanguageinvoker>
|
|
||||||
<assets>
|
|
||||||
<icon>icon.png</icon>
|
|
||||||
<fanart>fanart.jpg</fanart>
|
|
||||||
</assets>
|
|
||||||
<summary lang="en">Native Integration of Plex into Kodi</summary>
|
<summary lang="en">Native Integration of Plex into Kodi</summary>
|
||||||
<description lang="en">Connect Kodi to your Plex Media Server. This plugin assumes that you manage all your videos with Plex (and none with Kodi). You might lose data already stored in the Kodi video and music databases (as this plugin directly changes them). Use at your own risk!</description>
|
<description lang="en">Connect Kodi to your Plex Media Server. This plugin assumes that you manage all your videos with Plex (and none with Kodi). You might lose data already stored in the Kodi video and music databases (as this plugin directly changes them). Use at your own risk!</description>
|
||||||
<disclaimer lang="en">Use at your own risk</disclaimer>
|
<disclaimer lang="en">Use at your own risk</disclaimer>
|
||||||
|
@ -93,252 +88,193 @@
|
||||||
<summary lang="ko_KR">Plex를 Kodi에 기본 통합</summary>
|
<summary lang="ko_KR">Plex를 Kodi에 기본 통합</summary>
|
||||||
<description lang="ko_KR">Kodi를 Plex Media Server에 연결합니다. 이 플러그인은 Plex로 모든 비디오를 관리하고 Kodi로는 관리하지 않는다고 가정합니다. Kodi 비디오 및 음악 데이터베이스에 이미 저장된 데이터가 손실 될 수 있습니다 (이 플러그인이 직접 변경하므로). 자신의 책임하에 사용하십시오!</description>
|
<description lang="ko_KR">Kodi를 Plex Media Server에 연결합니다. 이 플러그인은 Plex로 모든 비디오를 관리하고 Kodi로는 관리하지 않는다고 가정합니다. Kodi 비디오 및 음악 데이터베이스에 이미 저장된 데이터가 손실 될 수 있습니다 (이 플러그인이 직접 변경하므로). 자신의 책임하에 사용하십시오!</description>
|
||||||
<disclaimer lang="ko_KR">자신의 책임하에 사용</disclaimer>
|
<disclaimer lang="ko_KR">자신의 책임하에 사용</disclaimer>
|
||||||
<news>version 3.5.17 (beta only):
|
<news>version 2.15.0:
|
||||||
- Use addon.xml `reuselanguageinvoker` to turn add-on snappier
|
- versions 2.14.3-2.14.4 for everyone
|
||||||
- Fix detection of playqueue order. Thus fix PKC reporting back the playing of an old episode when using UpNext
|
- Direct Paths: Fix TypeError: "element indices must be integers" on playback startup [backport]
|
||||||
- Fix logging for playlist items not working correctly
|
- Refactor stream code and fix Kodi not activating subtitle when it should [backport]
|
||||||
|
- Add playback settings to let the user choose whether Plex or Kodi provides the default audio or subtitle stream on playback start [backport]
|
||||||
|
- Update translations from Transifex [backport]
|
||||||
|
|
||||||
version 3.5.16 (beta only):
|
version 2.14.4 (beta only):
|
||||||
- Fix playback report for widget not working if direct paths are used
|
|
||||||
|
|
||||||
version 3.5.15 (beta only):
|
|
||||||
- Re-add old Plex Companion mechanism as the new one sucks
|
|
||||||
- Fix KeyError on playback startup
|
|
||||||
|
|
||||||
version 3.5.14 (beta only):
|
|
||||||
- Fix PKC not being able to connect to plex.tv after installation
|
|
||||||
|
|
||||||
version 3.5.13 (beta only):
|
|
||||||
- Fix Kodi getting blocked and losing PMS access e.g. due to cloudflare
|
|
||||||
|
|
||||||
version 3.5.12 (beta only):
|
|
||||||
- Fix skip intro not working
|
|
||||||
- Fix playback report not working due to an IndexError
|
|
||||||
- Fix rare IndexError when trying to delete a playlist item
|
|
||||||
|
|
||||||
version 3.5.11 (beta only):
|
|
||||||
- Fix playback startup and AttributeError: 'bool' object has no attribute 'get'
|
|
||||||
|
|
||||||
version 3.5.10 (beta only):
|
|
||||||
- Tell the PMS and Plex Companion about any stream changes on the Kodi side
|
|
||||||
|
|
||||||
version 3.5.9 (beta only):
|
|
||||||
- Huge overhaul: completely new Plex Companion implementation. PKC is now available as a casting target for Plexamp. Includes refactoring of Skip Intro as well as Playqueues
|
|
||||||
- Add auto skip intro functionality
|
|
||||||
- Fix streams for videos not being set-up
|
|
||||||
- Fix generating new unique device ID for PKC not working
|
|
||||||
- Make PKC compatible with Python 3.6 again
|
|
||||||
|
|
||||||
version 3.5.8:
|
|
||||||
- Fix UnboundLocalError: local variable 'identifier' referenced before assignment
|
|
||||||
- versions 3.5.6-3.5.7 for everyone
|
|
||||||
|
|
||||||
version 3.5.7 (beta only):
|
|
||||||
- Fix Kodi JSON racing condition on playback startup and KeyError
|
|
||||||
|
|
||||||
version 3.5.6 (beta only):
|
|
||||||
- Fix Plex Companion not working by fixing some issues with PKC's http.server's BaseHTTPRequestHandler
|
|
||||||
|
|
||||||
version 3.5.5:
|
|
||||||
- Lost patience with Kodi 19: drop use of Python multiprocessing entirely
|
|
||||||
|
|
||||||
version 3.5.4:
|
|
||||||
- Fix Receiving init() missing 1 required positional argument: ‘certification_country’
|
|
||||||
- Update translations from Transifex
|
|
||||||
|
|
||||||
version 3.5.3:
|
|
||||||
- Add playback settings to let the user choose whether Plex or Kodi provides the default audio or subtitle stream on playback start
|
|
||||||
|
|
||||||
version 3.5.2:
|
|
||||||
- version 3.5.1 for everyone
|
|
||||||
|
|
||||||
version 3.5.1 (beta only):
|
|
||||||
- Refactor stream code and fix Kodi not activating subtitle when it should
|
|
||||||
- Direct Paths: Fix TypeError: "element indices must be integers" on playback startup
|
|
||||||
- Android: Fix broken Python multiprocessing module (a Kodi 19.2 bug)
|
|
||||||
- Fix logging if fanart.tv lookup fails: be less verbose
|
|
||||||
|
|
||||||
version 3.5.0:
|
|
||||||
- versions 3.4.5-3.4.7 for everyone
|
|
||||||
|
|
||||||
version 3.4.7 (beta only):
|
|
||||||
- Tell the PMS if a video's audio stream or potentially subtitle stream has changed. For subtitles, this functionality is broken due to a Kodi bug
|
- Tell the PMS if a video's audio stream or potentially subtitle stream has changed. For subtitles, this functionality is broken due to a Kodi bug
|
||||||
- Transcoding: Fix Plex burning-in subtitles when it should not
|
- Transcoding: Fix Plex burning-in subtitles when it should not
|
||||||
|
- Fix logging if fanart.tv lookup fails: be less verbose
|
||||||
- Large refactoring of playlist and playqueue code
|
- Large refactoring of playlist and playqueue code
|
||||||
- Refactor usage of a media part's id
|
- Refactor usage of a media part's id
|
||||||
|
|
||||||
version 3.4.6 (beta only):
|
version 2.14.3 (beta only):
|
||||||
- Fix RecursionError if a video lies in a root directory
|
|
||||||
|
|
||||||
version 3.4.5 (beta only):
|
|
||||||
- Implement "Reset resume position" from the Kodi context menu
|
- Implement "Reset resume position" from the Kodi context menu
|
||||||
|
|
||||||
version 3.4.4:
|
version 2.14.2:
|
||||||
- Initial compatibility with Kodi 20 Nexus. Keep in mind that development for Kodi Nexus has not even officially reached alpha stage - any issues you encounter are probably caused by that
|
- version 2.14.1 for everyone
|
||||||
- version 3.4.3 for everyone
|
|
||||||
|
|
||||||
version 3.4.3 (beta ony):
|
version 2.14.1 (beta only):
|
||||||
- Use Plex settings for audio and subtitle stream selection. This is a best guess regarding subtitles as Plex and Kodi are not sharing much info
|
- Use Plex settings for audio and subtitle stream selection. This is a best guess regarding subtitles as Plex and Kodi are not sharing much info
|
||||||
- Fix PlexKodiConnect setting the Plex subtitle to None
|
- Fix PlexKodiConnect setting the Plex subtitle to None
|
||||||
- Download landscape artwork from fanart.tv, thanks @geropan
|
- Download landscape artwork from fanart.tv, thanks @geropan
|
||||||
- Revert "Fix PlexKodiConnect changing subtitles for all videos on the PMS"
|
- Revert "Fix PlexKodiConnect changing subtitles for all videos on the PMS"
|
||||||
|
|
||||||
version 3.4.2:
|
version 2.14.0:
|
||||||
- Fix PlexKodiConnect changing or removing subtitles for every video on the PMS
|
- Fix PlexKodiConnect changing or removing subtitles for every video on the PMS
|
||||||
|
- version 2.13.1-2.13.2 for everyone
|
||||||
|
|
||||||
version 3.4.1:
|
version 2.13.2 (beta only):
|
||||||
- Fix PMS setting `List of IP addresses and networks that are allowed without auth` causing Kodi to take forever to start playback
|
|
||||||
|
|
||||||
version 3.4.0:
|
|
||||||
- Improve logging for converting Unix timestamps
|
|
||||||
- Remove dependency on script.module.defusedxml - that module is now included in PKC
|
|
||||||
- version 3.3.3-3.3.5 for everyone
|
|
||||||
|
|
||||||
version 3.3.5 (beta only):
|
|
||||||
- Rewire defusedxml and xml.etree.ElementTree: Fix AttributeError: module 'resources.lib.utils' has no attribute 'ParseError'
|
|
||||||
- Fix errors when PKC tries to edit files that don't exist yet
|
|
||||||
|
|
||||||
version 3.3.4 (beta only):
|
|
||||||
- Fix a racing condition that could lead to the sync getting stuck
|
- Fix a racing condition that could lead to the sync getting stuck
|
||||||
- Fix RecursionError: maximum recursion depth exceeded
|
- Fix RecursionError: maximum recursion depth exceeded
|
||||||
- Bump websocket client: fix AttributeError: 'NoneType' object has no attribute 'is_ssl'
|
- Websocket Fix AttributeError: 'NoneType' object has no attribute 'is_ssl'
|
||||||
|
|
||||||
version 3.3.3 (beta only):
|
version 2.13.1 (beta only):
|
||||||
- Fix a racing condition that could lead to the sync process getting stuck
|
- Fix a racing condition that could lead to the sync process getting stuck
|
||||||
- Fix likelyhood of `database is locked` error occuring
|
- Fix likelyhood of `database is locked` error occuring
|
||||||
- Fix AttributeError: module 'urllib' has no attribute 'parse'
|
|
||||||
|
version 2.13.0:
|
||||||
- Support for the Plex HAMA agent to let Kodi identify animes (using Kodi's uniqueID 'anidb')
|
- Support for the Plex HAMA agent to let Kodi identify animes (using Kodi's uniqueID 'anidb')
|
||||||
- Support forced HAMA IDs when using tvdb uniqueID
|
- Support forced HAMA IDs when using tvdb uniqueID
|
||||||
|
- version 2.12.26 for everyone
|
||||||
|
|
||||||
version 3.3.2:
|
version 2.12.26 (beta only):
|
||||||
- version 3.3.1 for everyone
|
|
||||||
|
|
||||||
version 3.3.1 (beta only):
|
|
||||||
- Add an additional Plex Hub "PKC Continue Watching" that merges the Plex Continue Watching with On Deck
|
- Add an additional Plex Hub "PKC Continue Watching" that merges the Plex Continue Watching with On Deck
|
||||||
- Fix auto-picking of video stream if several video versions are available
|
- Fix auto-picking of video stream if several video versions are available
|
||||||
- Make PKC compatible with Kodi 20 N* by using xbmcvfs for translatePath
|
|
||||||
- Update translations
|
- Update translations
|
||||||
|
|
||||||
version 3.3.0:
|
version 2.12.25:
|
||||||
WARNING: Database reset and full resync required
|
- Update websocket client to 0.59.0. Fix threading issues and AttributeErrors
|
||||||
- versions 3.2.1-3.2.4 for everyone
|
|
||||||
|
|
||||||
version 3.2.4 (beta only):
|
version 2.12.24:
|
||||||
- Fix websockets and AttributeError: 'NoneType' object has no attribute
|
- version 2.12.23 for everyone
|
||||||
|
|
||||||
version 3.2.3 (beta only):
|
version 2.12.23 (beta only):
|
||||||
- Attempt to fix websocket threading issues and AttributeError: 'NoneType' object has no attribute 'is_ssl' or 'settimeout'
|
|
||||||
- Get rid of Python arrow; hopefully fix many Python import errors (also occuring in other add-ons!)
|
|
||||||
|
|
||||||
version 3.2.2 (beta only):
|
|
||||||
- Fix videos not starting due to a TypeError
|
|
||||||
- Show warning message to remind user to use Estuary for database resets
|
|
||||||
- Update websocket client to 1.0.0
|
|
||||||
|
|
||||||
version 3.2.1 (beta only):
|
|
||||||
WARNING: Database reset and full resync required
|
|
||||||
- Fix PKC widgets not working at all in some cases
|
|
||||||
- Direct Paths: fix several issues with episodes
|
|
||||||
- New Python-dependency: arrow
|
|
||||||
|
|
||||||
version 3.2.0:
|
|
||||||
WARNING: Database reset and full resync required
|
|
||||||
- version 3.1.1-3.1.4 for everyone
|
|
||||||
|
|
||||||
version 3.1.4 (beta only):
|
|
||||||
- Fix Alexa and RuntimeError: dictionary keys changed during iteration
|
- Fix Alexa and RuntimeError: dictionary keys changed during iteration
|
||||||
- Fix AttributeError: module 'shutil' has no attribute 'copy_tree'
|
|
||||||
|
|
||||||
version 3.1.3 (beta only):
|
|
||||||
- Add PKC setting to disable verification whether we can access a media file
|
|
||||||
- Direct paths: corrections to more closely mirror Kodi's way of saving movie and tv show files to the db
|
|
||||||
- Make sure that the correct file system encoding is used for playlists
|
|
||||||
- Fix a rare AttributeError when using playlists
|
- Fix a rare AttributeError when using playlists
|
||||||
- Fix regression: fix add-on paths always falling back to direct paths
|
|
||||||
|
|
||||||
version 3.1.2 (beta only):
|
version 2.12.22:
|
||||||
- Fix ImportError: cannot import name 'dir_util' from 'distutils' on PKC startup
|
- version 2.12.20 and 2.12.21 for everyone
|
||||||
- Fix UnicodeEncodeError if Plex playlist name contains illegal chars
|
|
||||||
- Fix PKC not showing up as a casting target in some cases
|
|
||||||
|
|
||||||
version 3.1.1 (beta only):
|
version 2.12.21 (beta only):
|
||||||
- Direct paths: fix filename showing instead of full video metadata during playback
|
- Switch to new websocket implementation
|
||||||
|
- Hopefully fix RuntimeError: no add-on id "plugin.video.plexkodiconnect"
|
||||||
- Update translations
|
- Update translations
|
||||||
|
|
||||||
version 3.1.0:
|
version 2.12.20 (beta only):
|
||||||
- version 3.0.16 and 3.0.17 for everyone
|
|
||||||
- Fix resume not working if Kodi player start-up is slow
|
|
||||||
|
|
||||||
version 3.0.17 (beta only):
|
|
||||||
- Fix instantaneous background sync and Alexa not working
|
|
||||||
- Hopefully fix RuntimeError: no add-on id "plugin.video.plexkodiconnect"
|
|
||||||
- Fix error socket.timeout: timed out
|
|
||||||
|
|
||||||
version 3.0.16 (beta only):
|
|
||||||
- Add information to PKC settings for background sync and Alexa whether a connection has been successfully made
|
- Add information to PKC settings for background sync and Alexa whether a connection has been successfully made
|
||||||
|
|
||||||
version 3.0.15:
|
version 2.12.19:
|
||||||
- 3.0.14 for everyone
|
- 2.12.17 and 2.12.18 for everyone
|
||||||
- Rename skip intro skin file
|
- Rename skip intro skin file
|
||||||
|
|
||||||
version 3.0.14 (beta only):
|
version 2.12.18 (beta only):
|
||||||
- Quickly sync recently watched items before synching the playstates of the entire Plex library
|
- Quickly sync recently watched items before synching the playstates of the entire Plex library
|
||||||
- Fix TypeError: function missing required argument 'message'
|
|
||||||
- Fix PlexKodiConnect Kodi add-on icon and fanart not showing
|
|
||||||
- Improve logging for websocket JSON loads
|
- Improve logging for websocket JSON loads
|
||||||
|
|
||||||
version 3.0.13:
|
version 2.12.17 (beta only):
|
||||||
- Fix UnboundLocalError: local variable 'user' referenced before assignment
|
|
||||||
|
|
||||||
version 3.0.12:
|
|
||||||
- Sync name and user rating of a TV show season to Kodi
|
- Sync name and user rating of a TV show season to Kodi
|
||||||
- Fix rare TypeError: expected string or buffer on playback start
|
- Fix rare TypeError: expected string or buffer on playback start
|
||||||
|
|
||||||
version 3.0.11:
|
version 2.12.16:
|
||||||
- Fix TypeError: function missing required argument 'message'
|
- versions 2.12.14 and 2.12.15 for everyone
|
||||||
|
|
||||||
version 3.0.10:
|
version 2.12.15 (beta only):
|
||||||
- Fix skip intros sometimes not working due to a RuntimeError
|
- Fix skip intros sometimes not working due to a RuntimeError
|
||||||
- Update translations
|
- Update translations
|
||||||
|
|
||||||
version 3.0.9:
|
version 2.12.14:
|
||||||
- Add skip intro functionality
|
- Add skip intro functionality
|
||||||
- Fix Kodi add-on NextUp not working
|
|
||||||
|
|
||||||
version 3.0.8:
|
version 2.12.13:
|
||||||
- Fix KeyError: u'game' if Plex Arcade has been activated
|
- Fix KeyError: u'game' if Plex Arcade has been activated
|
||||||
- Fix AttributeError: 'App' object has no attribute 'threads' when sync is cancelled
|
- Fix AttributeError: 'App' object has no attribute 'threads' when sync is cancelled
|
||||||
|
|
||||||
version 3.0.7:
|
version 2.12.12:
|
||||||
- Hopefully fix rare case when sync would get stuck indefinitely
|
- Hopefully fix rare case when sync would get stuck indefinitely
|
||||||
- Fix ValueError: invalid literal for int() for invalid dates sent by Plex
|
- Fix ValueError: invalid literal for int() for invalid dates sent by Plex
|
||||||
|
- version 2.12.11 for everyone
|
||||||
|
|
||||||
version 3.0.6:
|
version 2.12.11 (beta only):
|
||||||
- Fix PKC not auto-picking audio/subtitle stream when transcoding
|
- Fix PKC not auto-picking audio/subtitle stream when transcoding
|
||||||
- Fix ValueError when deleting a music album
|
- Fix ValueError when deleting a music album
|
||||||
- Fix OSError: Invalid argument when Plex returns an invalid timestamp
|
- Fix OSError: Invalid argument when Plex returns an invalid timestamp
|
||||||
|
|
||||||
version 3.0.5:
|
version 2.12.10:
|
||||||
- Fix pictures from Plex picture libraries not working/displaying
|
- Fix pictures from Plex picture libraries not working/displaying
|
||||||
- Fix sqlite3.OperationalError on PKC upgrade
|
|
||||||
|
|
||||||
version 3.0.4:
|
version 2.12.9:
|
||||||
- Automatically look for missing movie trailers using TMDB
|
- Fix Local variable 'user' referenced before assignement
|
||||||
|
|
||||||
version 3.0.3:
|
version 2.12.8:
|
||||||
|
- version 2.12.7 for everyone
|
||||||
|
|
||||||
|
version 2.12.7 (beta only):
|
||||||
- Fix PKC suddenly using main Plex user's credentials, e.g. when the PMS address changed
|
- Fix PKC suddenly using main Plex user's credentials, e.g. when the PMS address changed
|
||||||
- Fix missing Kodi tags for movie collections/sets
|
- Fix missing Kodi tags for movie collections/sets
|
||||||
- Change `thread.isAlive` to `thread.is_alive`
|
|
||||||
|
|
||||||
version 3.0.2:
|
version 2.12.6:
|
||||||
- Fix AttributeError: module has no attribute try_decode
|
|
||||||
|
|
||||||
version 3.0.1:
|
|
||||||
- Fix rare KeyError when using PKC widgets
|
- Fix rare KeyError when using PKC widgets
|
||||||
|
- Fix suspension of artwork caching and PKC becoming unresponsive
|
||||||
|
- Update translations
|
||||||
|
- Versions 2.12.4 and 2.12.5 for everyone
|
||||||
|
|
||||||
|
version 2.12.5 (beta only):
|
||||||
|
- Greatly improve matching logic for The Movie Database if Plex does not provide an appropriate id
|
||||||
|
- Fix high transcoding resolutions not being available for Win10
|
||||||
|
- Fix rare playback progress report failing and KeyError: u'containerKey'
|
||||||
|
- Fix rare KeyError: None when trying to sync playlists
|
||||||
|
- Fix TypeError when canceling Plex sync section dialog
|
||||||
|
|
||||||
|
version 2.12.4 (beta only):
|
||||||
|
- Hopefully fix freeze during sync: Don't assign multiple sets/collections for a specific movie
|
||||||
|
- Support metadata provider ids (e.g. for IMDB) for the new Plex Movie Agent
|
||||||
|
|
||||||
|
version 2.12.3:
|
||||||
|
- Fix playback failing due to caching of subtitles with non-ascii chars
|
||||||
|
- Fix ValueError: invalid literal for int() with base 10 during show sync
|
||||||
|
- Fix UnboundLocalError when certain Plex sections are deleted or being un-synched
|
||||||
|
- New method to install PlexKodiConnect directly via an URL. You thus do not need to upload a ZIP file to Kodi anymore.
|
||||||
|
|
||||||
|
version 2.12.2:
|
||||||
|
- version 2.12.0 and 2.12.1 for everyone
|
||||||
|
- Fix regression: sync dialog not showing up when it should
|
||||||
|
|
||||||
|
version 2.12.1 (beta only):
|
||||||
|
- Fix PKC shutdown on Kodi profile switch
|
||||||
|
- Fix Kodi content type for images/photos
|
||||||
|
- Added support for custom set of safe characters when escaping paths (thanks @geropan)
|
||||||
|
- Revert "Don't allow spaces in devicename"
|
||||||
|
- Fix sync dialog showing in certain cases even though user opted out
|
||||||
|
|
||||||
|
version 2.12.0 (beta only):
|
||||||
|
- Fix websocket threads; enable PKC background sync for all Plex Home users!
|
||||||
|
- Fix PKC incorrectly marking a video as unwatched if an external player has been used
|
||||||
- Update translations
|
- Update translations
|
||||||
|
|
||||||
version 3.0.0:
|
version 2.11.7:
|
||||||
- Major upgrade from Python 2 to Python 3, allowing use of Kodi 19 Matrix
|
- Fix PKC crashing on devices running Microsoft UWP, e.g. XBox
|
||||||
|
|
||||||
|
version 2.11.6:
|
||||||
|
- Fix rare sync crash when queue was full
|
||||||
|
- Set "Auto-adjust transcoding quality" to false by default
|
||||||
|
|
||||||
|
version 2.11.5:
|
||||||
|
- Versions 2.11.0-2.11.4 for everyone
|
||||||
|
|
||||||
|
version 2.11.4 (beta only):
|
||||||
|
- Fix another TypeError: 'NoneType' object has no attribute '__getitem__', e.g. when trying to play trailers
|
||||||
|
|
||||||
|
version 2.11.3 (beta only):
|
||||||
|
- Fix TypeError: 'NoneType' object has no attribute '__getitem__', e.g. when displaying albums
|
||||||
|
|
||||||
|
version 2.11.2 (beta only):
|
||||||
|
- Refactor direct and add-on paths. Enables use of Plex music playlists synched to Kodi
|
||||||
|
|
||||||
|
version 2.11.1 (beta only):
|
||||||
|
- Rewire the set-up of audio and subtitle streams, esp. before starting a transcoding session. Fixes playback not starting at all
|
||||||
|
|
||||||
|
version 2.11.0 (beta only):
|
||||||
|
- Fix PKC not burning in (and thus not showing) subtitles when transcoding
|
||||||
|
- When transcoding, only let user choose to burn-in subtitles that can't be displayed otherwise by Kodi
|
||||||
|
- Improve PKC automatically connecting to local PMS
|
||||||
|
- Ensure that our only video transcoding target is h264
|
||||||
|
- Fix adjusted subtitle size not working when burning in subtitles
|
||||||
|
- Fix regression: burn-in subtitles picking up the last user setting instead of the current one
|
||||||
</news>
|
</news>
|
||||||
</extension>
|
</extension>
|
||||||
</addon>
|
</addon>
|
||||||
|
|
241
changelog.txt
241
changelog.txt
|
@ -1,250 +1,77 @@
|
||||||
version 3.5.17 (beta only):
|
version 2.15.0:
|
||||||
- Use addon.xml `reuselanguageinvoker` to turn add-on snappier
|
- versions 2.14.3-2.14.4 for everyone
|
||||||
- Fix detection of playqueue order. Thus fix PKC reporting back the playing of an old episode when using UpNext
|
- Direct Paths: Fix TypeError: "element indices must be integers" on playback startup [backport]
|
||||||
- Fix logging for playlist items not working correctly
|
- Refactor stream code and fix Kodi not activating subtitle when it should [backport]
|
||||||
|
- Add playback settings to let the user choose whether Plex or Kodi provides the default audio or subtitle stream on playback start [backport]
|
||||||
|
- Update translations from Transifex [backport]
|
||||||
|
|
||||||
version 3.5.16 (beta only):
|
version 2.14.4 (beta only):
|
||||||
- Fix playback report for widget not working if direct paths are used
|
|
||||||
|
|
||||||
version 3.5.15 (beta only):
|
|
||||||
- Re-add old Plex Companion mechanism as the new one sucks
|
|
||||||
- Fix KeyError on playback startup
|
|
||||||
|
|
||||||
version 3.5.14 (beta only):
|
|
||||||
- Fix PKC not being able to connect to plex.tv after installation
|
|
||||||
|
|
||||||
version 3.5.13 (beta only):
|
|
||||||
- Fix Kodi getting blocked and losing PMS access e.g. due to cloudflare
|
|
||||||
|
|
||||||
version 3.5.12 (beta only):
|
|
||||||
- Fix skip intro not working
|
|
||||||
- Fix playback report not working due to an IndexError
|
|
||||||
- Fix rare IndexError when trying to delete a playlist item
|
|
||||||
|
|
||||||
version 3.5.11 (beta only):
|
|
||||||
- Fix playback startup and AttributeError: 'bool' object has no attribute 'get'
|
|
||||||
|
|
||||||
version 3.5.10 (beta only):
|
|
||||||
- Tell the PMS and Plex Companion about any stream changes on the Kodi side
|
|
||||||
|
|
||||||
version 3.5.9 (beta only):
|
|
||||||
- Huge overhaul: completely new Plex Companion implementation. PKC is now available as a casting target for Plexamp. Includes refactoring of Skip Intro as well as Playqueues
|
|
||||||
- Add auto skip intro functionality
|
|
||||||
- Fix streams for videos not being set-up
|
|
||||||
- Fix generating new unique device ID for PKC not working
|
|
||||||
- Make PKC compatible with Python 3.6 again
|
|
||||||
|
|
||||||
version 3.5.8:
|
|
||||||
- Fix UnboundLocalError: local variable 'identifier' referenced before assignment
|
|
||||||
- versions 3.5.6-3.5.7 for everyone
|
|
||||||
|
|
||||||
version 3.5.7 (beta only):
|
|
||||||
- Fix Kodi JSON racing condition on playback startup and KeyError
|
|
||||||
|
|
||||||
version 3.5.6 (beta only):
|
|
||||||
- Fix Plex Companion not working by fixing some issues with PKC's http.server's BaseHTTPRequestHandler
|
|
||||||
|
|
||||||
version 3.5.5:
|
|
||||||
- Lost patience with Kodi 19: drop use of Python multiprocessing entirely
|
|
||||||
|
|
||||||
version 3.5.4:
|
|
||||||
- Fix Receiving init() missing 1 required positional argument: ‘certification_country’
|
|
||||||
- Update translations from Transifex
|
|
||||||
|
|
||||||
version 3.5.3:
|
|
||||||
- Add playback settings to let the user choose whether Plex or Kodi provides the default audio or subtitle stream on playback start
|
|
||||||
|
|
||||||
version 3.5.2:
|
|
||||||
- version 3.5.1 for everyone
|
|
||||||
|
|
||||||
version 3.5.1 (beta only):
|
|
||||||
- Refactor stream code and fix Kodi not activating subtitle when it should
|
|
||||||
- Direct Paths: Fix TypeError: "element indices must be integers" on playback startup
|
|
||||||
- Android: Fix broken Python multiprocessing module (a Kodi 19.2 bug)
|
|
||||||
- Fix logging if fanart.tv lookup fails: be less verbose
|
|
||||||
|
|
||||||
version 3.5.0:
|
|
||||||
- versions 3.4.5-3.4.7 for everyone
|
|
||||||
|
|
||||||
version 3.4.7 (beta only):
|
|
||||||
- Tell the PMS if a video's audio stream or potentially subtitle stream has changed. For subtitles, this functionality is broken due to a Kodi bug
|
- Tell the PMS if a video's audio stream or potentially subtitle stream has changed. For subtitles, this functionality is broken due to a Kodi bug
|
||||||
- Transcoding: Fix Plex burning-in subtitles when it should not
|
- Transcoding: Fix Plex burning-in subtitles when it should not
|
||||||
|
- Fix logging if fanart.tv lookup fails: be less verbose
|
||||||
- Large refactoring of playlist and playqueue code
|
- Large refactoring of playlist and playqueue code
|
||||||
- Refactor usage of a media part's id
|
- Refactor usage of a media part's id
|
||||||
|
|
||||||
version 3.4.6 (beta only):
|
version 2.14.3 (beta only):
|
||||||
- Fix RecursionError if a video lies in a root directory
|
|
||||||
|
|
||||||
version 3.4.5 (beta only):
|
|
||||||
- Implement "Reset resume position" from the Kodi context menu
|
- Implement "Reset resume position" from the Kodi context menu
|
||||||
|
|
||||||
version 3.4.4:
|
version 2.14.2:
|
||||||
- Initial compatibility with Kodi 20 Nexus. Keep in mind that development for Kodi Nexus has not even officially reached alpha stage - any issues you encounter are probably caused by that
|
- version 2.14.1 for everyone
|
||||||
- version 3.4.3 for everyone
|
|
||||||
|
|
||||||
version 3.4.3 (beta ony):
|
version 2.14.1 (beta only):
|
||||||
- Use Plex settings for audio and subtitle stream selection. This is a best guess regarding subtitles as Plex and Kodi are not sharing much info
|
- Use Plex settings for audio and subtitle stream selection. This is a best guess regarding subtitles as Plex and Kodi are not sharing much info
|
||||||
- Fix PlexKodiConnect setting the Plex subtitle to None
|
- Fix PlexKodiConnect setting the Plex subtitle to None
|
||||||
- Download landscape artwork from fanart.tv, thanks @geropan
|
- Download landscape artwork from fanart.tv, thanks @geropan
|
||||||
- Revert "Fix PlexKodiConnect changing subtitles for all videos on the PMS"
|
- Revert "Fix PlexKodiConnect changing subtitles for all videos on the PMS"
|
||||||
|
|
||||||
version 3.4.2:
|
version 2.14.0:
|
||||||
- Fix PlexKodiConnect changing or removing subtitles for every video on the PMS
|
- Fix PlexKodiConnect changing or removing subtitles for every video on the PMS
|
||||||
|
- version 2.13.1-2.13.2 for everyone
|
||||||
|
|
||||||
version 3.4.1:
|
version 2.13.2 (beta only):
|
||||||
- Fix PMS setting `List of IP addresses and networks that are allowed without auth` causing Kodi to take forever to start playback
|
|
||||||
|
|
||||||
version 3.4.0:
|
|
||||||
- Improve logging for converting Unix timestamps
|
|
||||||
- Remove dependency on script.module.defusedxml - that module is now included in PKC
|
|
||||||
- version 3.3.3-3.3.5 for everyone
|
|
||||||
|
|
||||||
version 3.3.5 (beta only):
|
|
||||||
- Rewire defusedxml and xml.etree.ElementTree: Fix AttributeError: module 'resources.lib.utils' has no attribute 'ParseError'
|
|
||||||
- Fix errors when PKC tries to edit files that don't exist yet
|
|
||||||
|
|
||||||
version 3.3.4 (beta only):
|
|
||||||
- Fix a racing condition that could lead to the sync getting stuck
|
- Fix a racing condition that could lead to the sync getting stuck
|
||||||
- Fix RecursionError: maximum recursion depth exceeded
|
- Fix RecursionError: maximum recursion depth exceeded
|
||||||
- Bump websocket client: fix AttributeError: 'NoneType' object has no attribute 'is_ssl'
|
- Websocket Fix AttributeError: 'NoneType' object has no attribute 'is_ssl'
|
||||||
|
|
||||||
version 3.3.3 (beta only):
|
version 2.13.1 (beta only):
|
||||||
- Fix a racing condition that could lead to the sync process getting stuck
|
- Fix a racing condition that could lead to the sync process getting stuck
|
||||||
- Fix likelyhood of `database is locked` error occuring
|
- Fix likelyhood of `database is locked` error occuring
|
||||||
- Fix AttributeError: module 'urllib' has no attribute 'parse'
|
|
||||||
|
version 2.13.0:
|
||||||
- Support for the Plex HAMA agent to let Kodi identify animes (using Kodi's uniqueID 'anidb')
|
- Support for the Plex HAMA agent to let Kodi identify animes (using Kodi's uniqueID 'anidb')
|
||||||
- Support forced HAMA IDs when using tvdb uniqueID
|
- Support forced HAMA IDs when using tvdb uniqueID
|
||||||
|
- version 2.12.26 for everyone
|
||||||
|
|
||||||
version 3.3.2:
|
version 2.12.26 (beta only):
|
||||||
- version 3.3.1 for everyone
|
|
||||||
|
|
||||||
version 3.3.1 (beta only):
|
|
||||||
- Add an additional Plex Hub "PKC Continue Watching" that merges the Plex Continue Watching with On Deck
|
- Add an additional Plex Hub "PKC Continue Watching" that merges the Plex Continue Watching with On Deck
|
||||||
- Fix auto-picking of video stream if several video versions are available
|
- Fix auto-picking of video stream if several video versions are available
|
||||||
- Make PKC compatible with Kodi 20 N* by using xbmcvfs for translatePath
|
|
||||||
- Update translations
|
- Update translations
|
||||||
|
|
||||||
version 3.3.0:
|
version 2.12.25:
|
||||||
WARNING: Database reset and full resync required
|
- Update websocket client to 0.59.0. Fix threading issues and AttributeErrors
|
||||||
- versions 3.2.1-3.2.4 for everyone
|
|
||||||
|
|
||||||
version 3.2.4 (beta only):
|
version 2.12.24:
|
||||||
- Fix websockets and AttributeError: 'NoneType' object has no attribute
|
- version 2.12.23 for everyone
|
||||||
|
|
||||||
version 3.2.3 (beta only):
|
version 2.12.23 (beta only):
|
||||||
- Attempt to fix websocket threading issues and AttributeError: 'NoneType' object has no attribute 'is_ssl' or 'settimeout'
|
|
||||||
- Get rid of Python arrow; hopefully fix many Python import errors (also occuring in other add-ons!)
|
|
||||||
|
|
||||||
version 3.2.2 (beta only):
|
|
||||||
- Fix videos not starting due to a TypeError
|
|
||||||
- Show warning message to remind user to use Estuary for database resets
|
|
||||||
- Update websocket client to 1.0.0
|
|
||||||
|
|
||||||
version 3.2.1 (beta only):
|
|
||||||
WARNING: Database reset and full resync required
|
|
||||||
- Fix PKC widgets not working at all in some cases
|
|
||||||
- Direct Paths: fix several issues with episodes
|
|
||||||
- New Python-dependency: arrow
|
|
||||||
|
|
||||||
version 3.2.0:
|
|
||||||
WARNING: Database reset and full resync required
|
|
||||||
- version 3.1.1-3.1.4 for everyone
|
|
||||||
|
|
||||||
version 3.1.4 (beta only):
|
|
||||||
- Fix Alexa and RuntimeError: dictionary keys changed during iteration
|
- Fix Alexa and RuntimeError: dictionary keys changed during iteration
|
||||||
- Fix AttributeError: module 'shutil' has no attribute 'copy_tree'
|
|
||||||
|
|
||||||
version 3.1.3 (beta only):
|
|
||||||
- Add PKC setting to disable verification whether we can access a media file
|
|
||||||
- Direct paths: corrections to more closely mirror Kodi's way of saving movie and tv show files to the db
|
|
||||||
- Make sure that the correct file system encoding is used for playlists
|
|
||||||
- Fix a rare AttributeError when using playlists
|
- Fix a rare AttributeError when using playlists
|
||||||
- Fix regression: fix add-on paths always falling back to direct paths
|
|
||||||
|
|
||||||
version 3.1.2 (beta only):
|
version 2.12.22:
|
||||||
- Fix ImportError: cannot import name 'dir_util' from 'distutils' on PKC startup
|
- version 2.12.20 and 2.12.21 for everyone
|
||||||
- Fix UnicodeEncodeError if Plex playlist name contains illegal chars
|
|
||||||
- Fix PKC not showing up as a casting target in some cases
|
|
||||||
|
|
||||||
version 3.1.1 (beta only):
|
version 2.12.21 (beta only):
|
||||||
- Direct paths: fix filename showing instead of full video metadata during playback
|
- Switch to new websocket implementation
|
||||||
|
- Hopefully fix RuntimeError: no add-on id "plugin.video.plexkodiconnect"
|
||||||
- Update translations
|
- Update translations
|
||||||
|
|
||||||
version 3.1.0:
|
version 2.12.20 (beta only):
|
||||||
- version 3.0.16 and 3.0.17 for everyone
|
|
||||||
- Fix resume not working if Kodi player start-up is slow
|
|
||||||
|
|
||||||
version 3.0.17 (beta only):
|
|
||||||
- Fix instantaneous background sync and Alexa not working
|
|
||||||
- Hopefully fix RuntimeError: no add-on id "plugin.video.plexkodiconnect"
|
|
||||||
- Fix error socket.timeout: timed out
|
|
||||||
|
|
||||||
version 3.0.16 (beta only):
|
|
||||||
- Add information to PKC settings for background sync and Alexa whether a connection has been successfully made
|
- Add information to PKC settings for background sync and Alexa whether a connection has been successfully made
|
||||||
|
|
||||||
version 3.0.15:
|
version 2.12.19:
|
||||||
- 3.0.14 for everyone
|
- 2.12.17 and 2.12.18 for everyone
|
||||||
- Rename skip intro skin file
|
- Rename skip intro skin file
|
||||||
|
|
||||||
version 3.0.14 (beta only):
|
|
||||||
- Quickly sync recently watched items before synching the playstates of the entire Plex library
|
|
||||||
- Fix TypeError: function missing required argument 'message'
|
|
||||||
- Fix PlexKodiConnect Kodi add-on icon and fanart not showing
|
|
||||||
- Improve logging for websocket JSON loads
|
|
||||||
|
|
||||||
version 3.0.13:
|
|
||||||
- Fix UnboundLocalError: local variable 'user' referenced before assignment
|
|
||||||
|
|
||||||
version 3.0.12:
|
|
||||||
- Sync name and user rating of a TV show season to Kodi
|
|
||||||
- Fix rare TypeError: expected string or buffer on playback start
|
|
||||||
|
|
||||||
version 3.0.11:
|
|
||||||
- Fix TypeError: function missing required argument 'message'
|
|
||||||
|
|
||||||
version 3.0.10:
|
|
||||||
- Fix skip intros sometimes not working due to a RuntimeError
|
|
||||||
- Update translations
|
|
||||||
|
|
||||||
version 3.0.9:
|
|
||||||
- Add skip intro functionality
|
|
||||||
- Fix Kodi add-on NextUp not working
|
|
||||||
|
|
||||||
version 3.0.8:
|
|
||||||
- Fix KeyError: u'game' if Plex Arcade has been activated
|
|
||||||
- Fix AttributeError: 'App' object has no attribute 'threads' when sync is cancelled
|
|
||||||
|
|
||||||
version 3.0.7:
|
|
||||||
- Hopefully fix rare case when sync would get stuck indefinitely
|
|
||||||
- Fix ValueError: invalid literal for int() for invalid dates sent by Plex
|
|
||||||
|
|
||||||
version 3.0.6:
|
|
||||||
- Fix PKC not auto-picking audio/subtitle stream when transcoding
|
|
||||||
- Fix ValueError when deleting a music album
|
|
||||||
- Fix OSError: Invalid argument when Plex returns an invalid timestamp
|
|
||||||
|
|
||||||
version 3.0.5:
|
|
||||||
- Fix pictures from Plex picture libraries not working/displaying
|
|
||||||
- Fix sqlite3.OperationalError on PKC upgrade
|
|
||||||
|
|
||||||
version 3.0.4:
|
|
||||||
- Automatically look for missing movie trailers using TMDB
|
|
||||||
|
|
||||||
version 3.0.3:
|
|
||||||
- Fix PKC suddenly using main Plex user's credentials, e.g. when the PMS address changed
|
|
||||||
- Fix missing Kodi tags for movie collections/sets
|
|
||||||
- Change `thread.isAlive` to `thread.is_alive`
|
|
||||||
|
|
||||||
version 3.0.2:
|
|
||||||
- Fix AttributeError: module has no attribute try_decode
|
|
||||||
|
|
||||||
version 3.0.1:
|
|
||||||
- Fix rare KeyError when using PKC widgets
|
|
||||||
- Update translations
|
|
||||||
|
|
||||||
version 3.0.0:
|
|
||||||
- Major upgrade from Python 2 to Python 3, allowing use of Kodi 19 Matrix
|
|
||||||
|
|
||||||
version 2.12.18 (beta only):
|
version 2.12.18 (beta only):
|
||||||
- Quickly sync recently watched items before synching the playstates of the entire Plex library
|
- Quickly sync recently watched items before synching the playstates of the entire Plex library
|
||||||
- Improve logging for websocket JSON loads
|
- Improve logging for websocket JSON loads
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from sys import listitem
|
from sys import listitem
|
||||||
from urllib.parse import urlencode
|
from urllib import urlencode
|
||||||
|
|
||||||
from xbmc import getCondVisibility, sleep
|
from xbmc import getCondVisibility, sleep
|
||||||
from xbmcgui import Window
|
from xbmcgui import Window
|
||||||
|
@ -10,7 +11,7 @@ from xbmcgui import Window
|
||||||
|
|
||||||
|
|
||||||
def _get_kodi_type():
|
def _get_kodi_type():
|
||||||
kodi_type = listitem.getVideoInfoTag().getMediaType()
|
kodi_type = listitem.getVideoInfoTag().getMediaType().decode('utf-8')
|
||||||
if not kodi_type:
|
if not kodi_type:
|
||||||
if getCondVisibility('Container.Content(albums)'):
|
if getCondVisibility('Container.Content(albums)'):
|
||||||
kodi_type = "album"
|
kodi_type = "album"
|
||||||
|
|
50
default.py
50
default.py
|
@ -1,29 +1,41 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from builtins import object
|
|
||||||
|
###############################################################################
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
import logging
|
import logging
|
||||||
from sys import argv
|
from sys import argv
|
||||||
from urllib.parse import parse_qsl
|
from urlparse import parse_qsl
|
||||||
|
|
||||||
import xbmc
|
import xbmc
|
||||||
import xbmcgui
|
import xbmcgui
|
||||||
import xbmcplugin
|
import xbmcplugin
|
||||||
|
|
||||||
from resources.lib import entrypoint, utils, transfer, variables as v, loghandler
|
from resources.lib import entrypoint, utils, transfer, variables as v, loghandler
|
||||||
|
from resources.lib.tools import unicode_paths
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
loghandler.config()
|
loghandler.config()
|
||||||
LOG = logging.getLogger('PLEX.default')
|
LOG = logging.getLogger('PLEX.default')
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
class Main(object):
|
HANDLE = int(argv[1])
|
||||||
|
|
||||||
|
|
||||||
|
class Main():
|
||||||
# MAIN ENTRY POINT
|
# MAIN ENTRY POINT
|
||||||
# @utils.profiling()
|
# @utils.profiling()
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
LOG.debug('Full sys.argv received: %s', argv)
|
LOG.debug('Full sys.argv received: %s', argv)
|
||||||
# Parse parameters
|
# Parse parameters
|
||||||
params = dict(parse_qsl(argv[2][1:]))
|
params = dict(parse_qsl(argv[2][1:]))
|
||||||
arguments = argv[2]
|
arguments = unicode_paths.decode(argv[2])
|
||||||
path = argv[0]
|
path = unicode_paths.decode(argv[0])
|
||||||
|
# Ensure unicode
|
||||||
|
for key, value in params.iteritems():
|
||||||
|
params[key.decode('utf-8')] = params.pop(key)
|
||||||
|
params[key] = value.decode('utf-8')
|
||||||
mode = params.get('mode', '')
|
mode = params.get('mode', '')
|
||||||
itemid = params.get('id', '')
|
itemid = params.get('id', '')
|
||||||
|
|
||||||
|
@ -110,8 +122,7 @@ class Main(object):
|
||||||
transfer.plex_command('choose_pms_server')
|
transfer.plex_command('choose_pms_server')
|
||||||
|
|
||||||
elif mode == 'deviceid':
|
elif mode == 'deviceid':
|
||||||
LOG.info('New PKC UUID / unique device id requested')
|
self.deviceid()
|
||||||
transfer.plex_command('generate_new_uuid')
|
|
||||||
|
|
||||||
elif mode == 'fanart':
|
elif mode == 'fanart':
|
||||||
LOG.info('User requested fanarttv refresh')
|
LOG.info('User requested fanarttv refresh')
|
||||||
|
@ -151,21 +162,38 @@ class Main(object):
|
||||||
"""
|
"""
|
||||||
Start up playback_starter in main Python thread
|
Start up playback_starter in main Python thread
|
||||||
"""
|
"""
|
||||||
request = '%s&handle=%s' % (argv[2], int(argv[1]))
|
request = '%s&handle=%s' % (argv[2], HANDLE)
|
||||||
# Put the request into the 'queue'
|
# Put the request into the 'queue'
|
||||||
transfer.plex_command('PLAY-%s' % request)
|
transfer.plex_command('PLAY-%s' % request)
|
||||||
if int(argv[1]) == -1:
|
if HANDLE == -1:
|
||||||
# Handle -1 received, not waiting for main thread
|
# Handle -1 received, not waiting for main thread
|
||||||
return
|
return
|
||||||
# Wait for the result from the main PKC thread
|
# Wait for the result from the main PKC thread
|
||||||
result = transfer.wait_for_transfer(source='main')
|
result = transfer.wait_for_transfer(source='main')
|
||||||
if result is True:
|
if result is True:
|
||||||
xbmcplugin.setResolvedUrl(int(argv[1]), False, xbmcgui.ListItem())
|
xbmcplugin.setResolvedUrl(HANDLE, False, xbmcgui.ListItem())
|
||||||
# Tell main thread that we're done
|
# Tell main thread that we're done
|
||||||
transfer.send(True, target='main')
|
transfer.send(True, target='main')
|
||||||
else:
|
else:
|
||||||
# Received a xbmcgui.ListItem()
|
# Received a xbmcgui.ListItem()
|
||||||
xbmcplugin.setResolvedUrl(int(argv[1]), True, result)
|
xbmcplugin.setResolvedUrl(HANDLE, True, result)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def deviceid():
|
||||||
|
window = xbmcgui.Window(10000)
|
||||||
|
deviceId_old = window.getProperty('plex_client_Id')
|
||||||
|
from resources.lib import clientinfo
|
||||||
|
try:
|
||||||
|
deviceId = clientinfo.getDeviceId(reset=True)
|
||||||
|
except Exception as e:
|
||||||
|
LOG.error('Failed to generate a new device Id: %s' % e)
|
||||||
|
utils.messageDialog(utils.lang(29999), utils.lang(33032))
|
||||||
|
else:
|
||||||
|
LOG.info('Successfully removed old device ID: %s New deviceId:'
|
||||||
|
'%s' % (deviceId_old, deviceId))
|
||||||
|
# 'Kodi will now restart to apply the changes'
|
||||||
|
utils.messageDialog(utils.lang(29999), utils.lang(33033))
|
||||||
|
xbmc.executebuiltin('RestartApp')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -1676,8 +1676,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr "Nahradit uživatelské hodnocení počtem verzí média"
|
msgstr "Nahradit uživatelské hodnocení počtem verzí média"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1684,8 +1684,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr "Erstat brugerbedømmelser med antal af medieversioner"
|
msgstr "Erstat brugerbedømmelser med antal af medieversioner"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1713,8 +1713,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr "Benutzerbewertungen durch verfügbare Anzahl Versionen ersetzen"
|
msgstr "Benutzerbewertungen durch verfügbare Anzahl Versionen ersetzen"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1601,8 +1601,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -155,11 +155,6 @@ msgctxt "#30028"
|
||||||
msgid "PKC-only image caching completed"
|
msgid "PKC-only image caching completed"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
# Warning shown when PKC switches to the Kodi default skin Estuary
|
|
||||||
msgctxt "#30029"
|
|
||||||
msgid "To ensure a smooth PlexKodiConnect experience, it is HIGHLY recommended to use Kodi's default skin \"Estuary\" for initial set-up and for possible database resets. Continue?"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgctxt "#30030"
|
msgctxt "#30030"
|
||||||
msgid "Port Number"
|
msgid "Port Number"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
@ -1098,11 +1093,6 @@ msgctxt "#39074"
|
||||||
msgid "TV Shows"
|
msgid "TV Shows"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
# PKC Settings - Sync
|
|
||||||
msgctxt "#39075"
|
|
||||||
msgid "Verify access to media files while synching"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
# Pop-up during initial sync
|
# Pop-up during initial sync
|
||||||
msgctxt "#39076"
|
msgctxt "#39076"
|
||||||
msgid "If you use several Plex libraries of one kind, e.g. \"Kids Movies\" and \"Parents Movies\", be sure to check the Wiki: https://goo.gl/JFtQV9"
|
msgid "If you use several Plex libraries of one kind, e.g. \"Kids Movies\" and \"Parents Movies\", be sure to check the Wiki: https://goo.gl/JFtQV9"
|
||||||
|
@ -1501,8 +1491,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1699,8 +1699,3 @@ msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
"Reemplazar valoraciones de usuario con cantidad de versiones de medios"
|
"Reemplazar valoraciones de usuario con cantidad de versiones de medios"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1701,8 +1701,3 @@ msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
"Reemplazar valoraciones de usuario con cantidad de versiones de medios"
|
"Reemplazar valoraciones de usuario con cantidad de versiones de medios"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1699,8 +1699,3 @@ msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
"Reemplazar valoraciones de usuario con cantidad de versiones de medios"
|
"Reemplazar valoraciones de usuario con cantidad de versiones de medios"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1719,8 +1719,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr "Remplacer les notes d'utilisateurs par le nombre de versions du média"
|
msgstr "Remplacer les notes d'utilisateurs par le nombre de versions du média"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1723,8 +1723,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr "Remplacer les notes d'utilisateurs par le nombre de versions du média"
|
msgstr "Remplacer les notes d'utilisateurs par le nombre de versions du média"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1712,8 +1712,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr "Felhasználói osztályzatok lecserélése a médiaverziók számára"
|
msgstr "Felhasználói osztályzatok lecserélése a médiaverziók számára"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1711,8 +1711,3 @@ msgid "Replace user ratings with number of media versions"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
"Sostituisci la valutazione contenuti con il numero delle versioni del "
|
"Sostituisci la valutazione contenuti con il numero delle versioni del "
|
||||||
"contenuto disponibili"
|
"contenuto disponibili"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1632,8 +1632,3 @@ msgstr "사용자 지정 사용자 등급을 보유하고있는 미디어 항목
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr "사용자 등급을 미디어 버전 수로 대체"
|
msgstr "사용자 등급을 미디어 버전 수로 대체"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1697,8 +1697,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr "Pakeiskite naudotojų reitingus medijos versijų skaičiumi"
|
msgstr "Pakeiskite naudotojų reitingus medijos versijų skaičiumi"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1632,8 +1632,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1683,8 +1683,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr "Vervang ratings met aantal media versies"
|
msgstr "Vervang ratings met aantal media versies"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1677,8 +1677,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr "Erstatt rating med antall versjoner av media"
|
msgstr "Erstatt rating med antall versjoner av media"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1606,8 +1606,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1674,8 +1674,3 @@ msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
"Substituir classificações do utilizador com numero de versões de média"
|
"Substituir classificações do utilizador com numero de versões de média"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1677,8 +1677,3 @@ msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
"Substituir classificações do utilizador com numero de versões de média"
|
"Substituir classificações do utilizador com numero de versões de média"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1691,8 +1691,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr "Заменить пользовательский рейтинг счетчиком версий элемента"
|
msgstr "Заменить пользовательский рейтинг счетчиком версий элемента"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1685,8 +1685,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr "Ersätt användarbetyg med antalet mediaobjekt"
|
msgstr "Ersätt användarbetyg med antalet mediaobjekt"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1691,8 +1691,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr "Замінити користувацький рейтинг лічильником версій елемента"
|
msgstr "Замінити користувацький рейтинг лічильником версій елемента"
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1602,8 +1602,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -1598,8 +1598,3 @@ msgstr ""
|
||||||
msgctxt "#39719"
|
msgctxt "#39719"
|
||||||
msgid "Replace user ratings with number of media versions"
|
msgid "Replace user ratings with number of media versions"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
# PKC Settings - Playback
|
|
||||||
msgctxt "#39720"
|
|
||||||
msgid "Auto skip intro"
|
|
||||||
msgstr ""
|
|
||||||
|
|
|
@ -4,19 +4,18 @@
|
||||||
Used to save PKC's application state and share between modules. Be careful
|
Used to save PKC's application state and share between modules. Be careful
|
||||||
if you invoke another PKC Python instance (!!) when e.g. PKC.movies is called
|
if you invoke another PKC Python instance (!!) when e.g. PKC.movies is called
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from .account import Account
|
from .account import Account
|
||||||
from .application import App
|
from .application import App
|
||||||
from .connection import Connection
|
from .connection import Connection
|
||||||
from .libsync import Sync
|
from .libsync import Sync
|
||||||
from .playstate import PlayState
|
from .playstate import PlayState
|
||||||
from .playqueues import Playqueues
|
|
||||||
|
|
||||||
ACCOUNT = None
|
ACCOUNT = None
|
||||||
APP = None
|
APP = None
|
||||||
CONN = None
|
CONN = None
|
||||||
SYNC = None
|
SYNC = None
|
||||||
PLAYSTATE = None
|
PLAYSTATE = None
|
||||||
PLAYQUEUES = None
|
|
||||||
|
|
||||||
|
|
||||||
def init(entrypoint=False):
|
def init(entrypoint=False):
|
||||||
|
@ -24,15 +23,13 @@ def init(entrypoint=False):
|
||||||
entrypoint=True initiates only the bare minimum - for other PKC python
|
entrypoint=True initiates only the bare minimum - for other PKC python
|
||||||
instances
|
instances
|
||||||
"""
|
"""
|
||||||
global ACCOUNT, APP, CONN, SYNC, PLAYSTATE, PLAYQUEUES
|
global ACCOUNT, APP, CONN, SYNC, PLAYSTATE
|
||||||
APP = App(entrypoint)
|
APP = App(entrypoint)
|
||||||
CONN = Connection(entrypoint)
|
CONN = Connection(entrypoint)
|
||||||
ACCOUNT = Account(entrypoint)
|
ACCOUNT = Account(entrypoint)
|
||||||
SYNC = Sync(entrypoint)
|
SYNC = Sync(entrypoint)
|
||||||
if not entrypoint:
|
if not entrypoint:
|
||||||
PLAYSTATE = PlayState()
|
PLAYSTATE = PlayState()
|
||||||
PLAYQUEUES = Playqueues()
|
|
||||||
|
|
||||||
|
|
||||||
def reload():
|
def reload():
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
|
|
||||||
from .. import utils
|
from .. import utils
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import queue
|
import Queue
|
||||||
from threading import Lock, RLock
|
from threading import Lock, RLock
|
||||||
|
|
||||||
import xbmc
|
import xbmc
|
||||||
|
@ -39,15 +40,15 @@ class App(object):
|
||||||
self.lock_playlists = Lock()
|
self.lock_playlists = Lock()
|
||||||
|
|
||||||
# Plex Companion Queue()
|
# Plex Companion Queue()
|
||||||
self.companion_queue = queue.Queue(maxsize=100)
|
self.companion_queue = Queue.Queue(maxsize=100)
|
||||||
# Websocket_client queue to communicate with librarysync
|
# Websocket_client queue to communicate with librarysync
|
||||||
self.websocket_queue = queue.Queue()
|
self.websocket_queue = Queue.Queue()
|
||||||
# xbmc.Monitor() instance from kodimonitor.py
|
# xbmc.Monitor() instance from kodimonitor.py
|
||||||
self.monitor = None
|
self.monitor = None
|
||||||
# xbmc.Player() instance
|
# xbmc.Player() instance
|
||||||
self.player = None
|
self.player = None
|
||||||
# Instance of MetadataThread()
|
# Instance of FanartThread()
|
||||||
self.metadata_thread = None
|
self.fanart_thread = None
|
||||||
# Instance of ImageCachingThread()
|
# Instance of ImageCachingThread()
|
||||||
self.caching_thread = None
|
self.caching_thread = None
|
||||||
# Dialog to skip intro
|
# Dialog to skip intro
|
||||||
|
@ -61,24 +62,24 @@ class App(object):
|
||||||
def is_playing_video(self):
|
def is_playing_video(self):
|
||||||
return self.player.isPlayingVideo() == 1
|
return self.player.isPlayingVideo() == 1
|
||||||
|
|
||||||
def register_metadata_thread(self, thread):
|
def register_fanart_thread(self, thread):
|
||||||
self.metadata_thread = thread
|
self.fanart_thread = thread
|
||||||
self.threads.append(thread)
|
self.threads.append(thread)
|
||||||
|
|
||||||
def deregister_metadata_thread(self, thread):
|
def deregister_fanart_thread(self, thread):
|
||||||
self.metadata_thread.unblock_callers()
|
self.fanart_thread.unblock_callers()
|
||||||
self.metadata_thread = None
|
self.fanart_thread = None
|
||||||
self.threads.remove(thread)
|
self.threads.remove(thread)
|
||||||
|
|
||||||
def suspend_metadata_thread(self, block=True):
|
def suspend_fanart_thread(self, block=True):
|
||||||
try:
|
try:
|
||||||
self.metadata_thread.suspend(block=block)
|
self.fanart_thread.suspend(block=block)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def resume_metadata_thread(self):
|
def resume_fanart_thread(self):
|
||||||
try:
|
try:
|
||||||
self.metadata_thread.resume()
|
self.fanart_thread.resume()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import secrets
|
|
||||||
|
|
||||||
from .. import utils, json_rpc as js
|
from .. import utils, json_rpc as js, variables as v
|
||||||
|
|
||||||
LOG = getLogger('PLEX.connection')
|
LOG = getLogger('PLEX.connection')
|
||||||
|
|
||||||
|
@ -38,36 +38,22 @@ class Connection(object):
|
||||||
PKC needs Kodi webserver to work correctly
|
PKC needs Kodi webserver to work correctly
|
||||||
"""
|
"""
|
||||||
LOG.debug('Loading Kodi webserver details')
|
LOG.debug('Loading Kodi webserver details')
|
||||||
if not utils.settings('enableTextureCache') == 'true':
|
# Kodi webserver details
|
||||||
LOG.info('Artwork caching disabled')
|
if js.get_setting('services.webserver') in (None, False):
|
||||||
return
|
# Enable the webserver, it is disabled
|
||||||
self.webserver_password = js.get_setting('services.webserverpassword')
|
|
||||||
if not self.webserver_password:
|
|
||||||
LOG.warn('No password set for the Kodi web server. Generating a '
|
|
||||||
'new random password')
|
|
||||||
self.webserver_password = secrets.token_urlsafe(16)
|
|
||||||
js.set_setting('services.webserverpassword', self.webserver_password)
|
|
||||||
if not js.get_setting('services.webserver'):
|
|
||||||
# The Kodi webserver is needed for artwork caching. PKC already set
|
|
||||||
# a strong, random password automatically if you haven't done so
|
|
||||||
# already. Please confirm the next dialog that you want to enable
|
|
||||||
# the webserver now with Yes.
|
|
||||||
utils.messageDialog(utils.lang(29999), utils.lang(30004))
|
|
||||||
# Enable the webserver, it is disabled. Will force a Kodi pop-up
|
|
||||||
js.set_setting('services.webserver', True)
|
js.set_setting('services.webserver', True)
|
||||||
if not js.get_setting('services.webserver'):
|
|
||||||
LOG.warn('User chose to not enable Kodi webserver')
|
|
||||||
utils.settings('enableTextureCache', value='false')
|
|
||||||
self.webserver_host = 'localhost'
|
self.webserver_host = 'localhost'
|
||||||
self.webserver_port = js.get_setting('services.webserverport')
|
self.webserver_port = js.get_setting('services.webserverport')
|
||||||
self.webserver_username = js.get_setting('services.webserverusername')
|
self.webserver_username = js.get_setting('services.webserverusername')
|
||||||
|
self.webserver_password = js.get_setting('services.webserverpassword')
|
||||||
|
|
||||||
def load(self):
|
def load(self):
|
||||||
LOG.debug('Loading connection settings')
|
LOG.debug('Loading connection settings')
|
||||||
# Shall we verify SSL certificates? "None" will leave SSL enabled
|
# Shall we verify SSL certificates? "None" will leave SSL enabled
|
||||||
# Ignore this setting for Kodi >= 18 as Kodi 18 is much stricter
|
# Ignore this setting for Kodi >= 18 as Kodi 18 is much stricter
|
||||||
# with checking SSL certs
|
# with checking SSL certs
|
||||||
self.verify_ssl_cert = None
|
self.verify_ssl_cert = None if v.KODIVERSION >= 18 or utils.settings('sslverify') == 'true' \
|
||||||
|
else False
|
||||||
# Do we have an ssl certificate for PKC we need to use?
|
# Do we have an ssl certificate for PKC we need to use?
|
||||||
self.ssl_cert_path = utils.settings('sslcert') \
|
self.ssl_cert_path = utils.settings('sslcert') \
|
||||||
if utils.settings('sslcert') != 'None' else None
|
if utils.settings('sslcert') != 'None' else None
|
||||||
|
@ -88,7 +74,8 @@ class Connection(object):
|
||||||
self.server_name, self.machine_identifier, self.server)
|
self.server_name, self.machine_identifier, self.server)
|
||||||
|
|
||||||
def load_entrypoint(self):
|
def load_entrypoint(self):
|
||||||
self.verify_ssl_cert = None
|
self.verify_ssl_cert = None if v.KODIVERSION >= 18 or utils.settings('sslverify') == 'true' \
|
||||||
|
else False
|
||||||
self.ssl_cert_path = utils.settings('sslcert') \
|
self.ssl_cert_path = utils.settings('sslcert') \
|
||||||
if utils.settings('sslcert') != 'None' else None
|
if utils.settings('sslcert') != 'None' else None
|
||||||
self.https = utils.settings('https') == 'true'
|
self.https = utils.settings('https') == 'true'
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
|
|
||||||
from .. import utils
|
from .. import utils
|
||||||
|
|
||||||
|
|
||||||
|
@ -69,8 +71,6 @@ class Sync(object):
|
||||||
self.run_lib_scan = None
|
self.run_lib_scan = None
|
||||||
# Set if user decided to cancel sync
|
# Set if user decided to cancel sync
|
||||||
self.stop_sync = False
|
self.stop_sync = False
|
||||||
# Do we check whether we can access a media file?
|
|
||||||
self.check_media_file_existence = False
|
|
||||||
# Could we access the paths?
|
# Could we access the paths?
|
||||||
self.path_verified = False
|
self.path_verified = False
|
||||||
|
|
||||||
|
@ -94,8 +94,6 @@ class Sync(object):
|
||||||
|
|
||||||
def load(self):
|
def load(self):
|
||||||
self.direct_paths = utils.settings('useDirectPaths') == '1'
|
self.direct_paths = utils.settings('useDirectPaths') == '1'
|
||||||
self.check_media_file_existence = \
|
|
||||||
utils.settings('check_media_file_existence') == '1'
|
|
||||||
self.enable_music = utils.settings('enableMusic') == 'true'
|
self.enable_music = utils.settings('enableMusic') == 'true'
|
||||||
self.artwork = utils.settings('usePlexArtwork') == 'true'
|
self.artwork = utils.settings('usePlexArtwork') == 'true'
|
||||||
self.replace_smb_path = utils.settings('replaceSMB') == 'true'
|
self.replace_smb_path = utils.settings('replaceSMB') == 'true'
|
||||||
|
@ -109,7 +107,7 @@ class Sync(object):
|
||||||
self.remapSMBphotoOrg = remove_trailing_slash(utils.settings('remapSMBphotoOrg'))
|
self.remapSMBphotoOrg = remove_trailing_slash(utils.settings('remapSMBphotoOrg'))
|
||||||
self.remapSMBphotoNew = remove_trailing_slash(utils.settings('remapSMBphotoNew'))
|
self.remapSMBphotoNew = remove_trailing_slash(utils.settings('remapSMBphotoNew'))
|
||||||
self.escape_path = utils.settings('escapePath') == 'true'
|
self.escape_path = utils.settings('escapePath') == 'true'
|
||||||
self.escape_path_safe_chars = utils.settings('escapePathSafeChars')
|
self.escape_path_safe_chars = utils.settings('escapePathSafeChars').encode('utf-8')
|
||||||
self.indicate_media_versions = utils.settings('indicate_media_versions') == "true"
|
self.indicate_media_versions = utils.settings('indicate_media_versions') == "true"
|
||||||
self.sync_specific_plex_playlists = utils.settings('syncSpecificPlexPlaylists') == 'true'
|
self.sync_specific_plex_playlists = utils.settings('syncSpecificPlexPlaylists') == 'true'
|
||||||
self.sync_specific_kodi_playlists = utils.settings('syncSpecificKodiPlaylists') == 'true'
|
self.sync_specific_kodi_playlists = utils.settings('syncSpecificKodiPlaylists') == 'true'
|
||||||
|
|
|
@ -1,230 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from logging import getLogger
|
|
||||||
|
|
||||||
import xbmc
|
|
||||||
|
|
||||||
from .. import variables as v
|
|
||||||
|
|
||||||
|
|
||||||
LOG = getLogger('PLEX.playqueue')
|
|
||||||
|
|
||||||
|
|
||||||
class Playqueue(object):
|
|
||||||
"""
|
|
||||||
PKC object to represent PMS playQueues and Kodi playlist for queueing
|
|
||||||
|
|
||||||
playlistid = None [int] Kodi playlist id (0, 1, 2)
|
|
||||||
type = None [str] Kodi type: 'audio', 'video', 'picture'
|
|
||||||
kodi_pl = None Kodi xbmc.PlayList object
|
|
||||||
items = [] [list] of Playlist_Items
|
|
||||||
id = None [str] Plex playQueueID, unique Plex identifier
|
|
||||||
version = None [int] Plex version of the playQueue
|
|
||||||
selectedItemID = None
|
|
||||||
[str] Plex selectedItemID, playing element in queue
|
|
||||||
selectedItemOffset = None
|
|
||||||
[str] Offset of the playing element in queue
|
|
||||||
shuffled = 0 [int] 0: not shuffled, 1: ??? 2: ???
|
|
||||||
repeat = 0 [int] 0: not repeated, 1: ??? 2: ???
|
|
||||||
|
|
||||||
If Companion playback is initiated by another user:
|
|
||||||
plex_transient_token = None
|
|
||||||
"""
|
|
||||||
kind = 'playQueue'
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.id = None
|
|
||||||
self.type = None
|
|
||||||
self.playlistid = None
|
|
||||||
self.kodi_pl = None
|
|
||||||
self.items = []
|
|
||||||
self.version = None
|
|
||||||
self.selectedItemID = None
|
|
||||||
self.selectedItemOffset = None
|
|
||||||
self.shuffled = 0
|
|
||||||
self.repeat = 0
|
|
||||||
self.plex_transient_token = None
|
|
||||||
# Need a hack for detecting swaps of elements
|
|
||||||
self.old_kodi_pl = []
|
|
||||||
# Did PKC itself just change the playqueue so the PKC playqueue monitor
|
|
||||||
# should not pick up any changes?
|
|
||||||
self.pkc_edit = False
|
|
||||||
# Workaround to avoid endless loops of detecting PL clears
|
|
||||||
self._clear_list = []
|
|
||||||
# To keep track if Kodi playback was initiated from a Kodi playlist
|
|
||||||
# There are a couple of pitfalls, unfortunately...
|
|
||||||
self.kodi_playlist_playback = False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
answ = ("{{"
|
|
||||||
"'playlistid': {self.playlistid}, "
|
|
||||||
"'id': {self.id}, "
|
|
||||||
"'version': {self.version}, "
|
|
||||||
"'type': '{self.type}', "
|
|
||||||
"'selectedItemID': {self.selectedItemID}, "
|
|
||||||
"'selectedItemOffset': {self.selectedItemOffset}, "
|
|
||||||
"'shuffled': {self.shuffled}, "
|
|
||||||
"'repeat': {self.repeat}, "
|
|
||||||
"'kodi_playlist_playback': {self.kodi_playlist_playback}, "
|
|
||||||
"'pkc_edit': {self.pkc_edit}, ".format(self=self))
|
|
||||||
# Since list.__repr__ will return string, not unicode
|
|
||||||
return answ + "'items': {self.items}}}".format(self=self)
|
|
||||||
|
|
||||||
def is_pkc_clear(self):
|
|
||||||
"""
|
|
||||||
Returns True if PKC has cleared the Kodi playqueue just recently.
|
|
||||||
Then this clear will be ignored from now on
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
self._clear_list.pop()
|
|
||||||
except IndexError:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
def clear(self, kodi=True):
|
|
||||||
"""
|
|
||||||
Resets the playlist object to an empty playlist.
|
|
||||||
|
|
||||||
Pass kodi=False in order to NOT clear the Kodi playqueue
|
|
||||||
"""
|
|
||||||
# kodi monitor's on_clear method will only be called if there were some
|
|
||||||
# items to begin with
|
|
||||||
if kodi and self.kodi_pl.size() != 0:
|
|
||||||
self._clear_list.append(None)
|
|
||||||
self.kodi_pl.clear() # Clear Kodi playlist object
|
|
||||||
self.items = []
|
|
||||||
self.id = None
|
|
||||||
self.version = None
|
|
||||||
self.selectedItemID = None
|
|
||||||
self.selectedItemOffset = None
|
|
||||||
self.shuffled = 0
|
|
||||||
self.repeat = 0
|
|
||||||
self.plex_transient_token = None
|
|
||||||
self.old_kodi_pl = []
|
|
||||||
self.kodi_playlist_playback = False
|
|
||||||
LOG.debug('Playlist cleared: %s', self)
|
|
||||||
|
|
||||||
def position_from_plex_id(self, plex_id):
|
|
||||||
"""
|
|
||||||
Returns the position [int] for the very first item with plex_id [int]
|
|
||||||
(Plex seems uncapable of adding the same element multiple times to a
|
|
||||||
playqueue or playlist)
|
|
||||||
|
|
||||||
Raises KeyError if not found
|
|
||||||
"""
|
|
||||||
for position, item in enumerate(self.items):
|
|
||||||
if item.plex_id == plex_id:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
raise KeyError('Did not find plex_id %s in %s', plex_id, self)
|
|
||||||
return position
|
|
||||||
|
|
||||||
|
|
||||||
class Playqueues(list):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
for i, typus in enumerate((v.KODI_PLAYLIST_TYPE_AUDIO,
|
|
||||||
v.KODI_PLAYLIST_TYPE_VIDEO,
|
|
||||||
v.KODI_PLAYLIST_TYPE_PHOTO)):
|
|
||||||
playqueue = Playqueue()
|
|
||||||
playqueue.playlistid = i
|
|
||||||
playqueue.type = typus
|
|
||||||
# Initialize each Kodi playlist
|
|
||||||
if typus == v.KODI_PLAYLIST_TYPE_AUDIO:
|
|
||||||
playqueue.kodi_pl = xbmc.PlayList(xbmc.PLAYLIST_MUSIC)
|
|
||||||
elif typus == v.KODI_PLAYLIST_TYPE_VIDEO:
|
|
||||||
playqueue.kodi_pl = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
|
|
||||||
else:
|
|
||||||
# Currently, only video or audio playqueues available
|
|
||||||
playqueue.kodi_pl = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
|
|
||||||
# Overwrite 'picture' with 'photo'
|
|
||||||
playqueue.type = v.KODI_TYPE_PHOTO
|
|
||||||
self.append(playqueue)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def audio(self):
|
|
||||||
return self[0]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def video(self):
|
|
||||||
return self[1]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def photo(self):
|
|
||||||
return self[2]
|
|
||||||
|
|
||||||
def from_kodi_playlist_type(self, kodi_playlist_type):
|
|
||||||
"""
|
|
||||||
Returns the playqueue according to the kodi_playlist_type ('video',
|
|
||||||
'audio', 'picture') passed in
|
|
||||||
"""
|
|
||||||
if kodi_playlist_type == v.KODI_PLAYLIST_TYPE_AUDIO:
|
|
||||||
return self[0]
|
|
||||||
elif kodi_playlist_type == v.KODI_PLAYLIST_TYPE_VIDEO:
|
|
||||||
return self[1]
|
|
||||||
elif kodi_playlist_type == v.KODI_PLAYLIST_TYPE_PHOTO:
|
|
||||||
return self[2]
|
|
||||||
else:
|
|
||||||
raise ValueError('Unknown kodi_playlist_type: %s' % kodi_playlist_type)
|
|
||||||
|
|
||||||
def from_kodi_type(self, kodi_type):
|
|
||||||
"""
|
|
||||||
Pass in the kodi_type (e.g. the string 'movie') to get the correct
|
|
||||||
playqueue (either video, audio or picture)
|
|
||||||
"""
|
|
||||||
if kodi_type == v.KODI_TYPE_VIDEO:
|
|
||||||
return self[1]
|
|
||||||
elif kodi_type == v.KODI_TYPE_MOVIE:
|
|
||||||
return self[1]
|
|
||||||
elif kodi_type == v.KODI_TYPE_EPISODE:
|
|
||||||
return self[1]
|
|
||||||
elif kodi_type == v.KODI_TYPE_SEASON:
|
|
||||||
return self[1]
|
|
||||||
elif kodi_type == v.KODI_TYPE_SHOW:
|
|
||||||
return self[1]
|
|
||||||
elif kodi_type == v.KODI_TYPE_CLIP:
|
|
||||||
return self[1]
|
|
||||||
elif kodi_type == v.KODI_TYPE_SONG:
|
|
||||||
return self[0]
|
|
||||||
elif kodi_type == v.KODI_TYPE_ALBUM:
|
|
||||||
return self[0]
|
|
||||||
elif kodi_type == v.KODI_TYPE_ARTIST:
|
|
||||||
return self[0]
|
|
||||||
elif kodi_type == v.KODI_TYPE_AUDIO:
|
|
||||||
return self[0]
|
|
||||||
elif kodi_type == v.KODI_TYPE_PHOTO:
|
|
||||||
return self[2]
|
|
||||||
else:
|
|
||||||
raise ValueError('Unknown kodi_type: %s' % kodi_type)
|
|
||||||
|
|
||||||
def from_plex_type(self, plex_type):
|
|
||||||
"""
|
|
||||||
Pass in the plex_type (e.g. the string 'movie') to get the correct
|
|
||||||
playqueue (either video, audio or picture)
|
|
||||||
"""
|
|
||||||
if plex_type == v.PLEX_TYPE_VIDEO:
|
|
||||||
return self[1]
|
|
||||||
elif plex_type == v.PLEX_TYPE_MOVIE:
|
|
||||||
return self[1]
|
|
||||||
elif plex_type == v.PLEX_TYPE_EPISODE:
|
|
||||||
return self[1]
|
|
||||||
elif plex_type == v.PLEX_TYPE_SEASON:
|
|
||||||
return self[1]
|
|
||||||
elif plex_type == v.PLEX_TYPE_SHOW:
|
|
||||||
return self[1]
|
|
||||||
elif plex_type == v.PLEX_TYPE_CLIP:
|
|
||||||
return self[1]
|
|
||||||
elif plex_type == v.PLEX_TYPE_SONG:
|
|
||||||
return self[0]
|
|
||||||
elif plex_type == v.PLEX_TYPE_ALBUM:
|
|
||||||
return self[0]
|
|
||||||
elif plex_type == v.PLEX_TYPE_ARTIST:
|
|
||||||
return self[0]
|
|
||||||
elif plex_type == v.PLEX_TYPE_AUDIO:
|
|
||||||
return self[0]
|
|
||||||
elif plex_type == v.PLEX_TYPE_PHOTO:
|
|
||||||
return self[2]
|
|
||||||
else:
|
|
||||||
raise ValueError('Unknown plex_type: %s' % plex_type)
|
|
|
@ -1,5 +1,8 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
|
|
||||||
|
|
||||||
class PlayState(object):
|
class PlayState(object):
|
||||||
# "empty" dict for the PLAYER_STATES above. Use copy.deepcopy to duplicate!
|
# "empty" dict for the PLAYER_STATES above. Use copy.deepcopy to duplicate!
|
||||||
template = {
|
template = {
|
||||||
|
@ -44,6 +47,12 @@ class PlayState(object):
|
||||||
1: {},
|
1: {},
|
||||||
2: {}
|
2: {}
|
||||||
}
|
}
|
||||||
|
# The LAST playstate once playback is finished
|
||||||
|
self.old_player_states = {
|
||||||
|
0: {},
|
||||||
|
1: {},
|
||||||
|
2: {}
|
||||||
|
}
|
||||||
self.played_info = {}
|
self.played_info = {}
|
||||||
|
|
||||||
# Currently playing PKC item, a PlaylistItem()
|
# Currently playing PKC item, a PlaylistItem()
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
@ -100,7 +101,10 @@ def cache_url(url, should_suspend=None):
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
requests.head(
|
requests.head(
|
||||||
url=f'http://{app.CONN.webserver_username}:{app.CONN.webserver_password}@{app.CONN.webserver_host}:{app.CONN.webserver_port}/image/image://{url}',
|
url="http://%s:%s/image/image://%s"
|
||||||
|
% (app.CONN.webserver_host,
|
||||||
|
app.CONN.webserver_port,
|
||||||
|
url),
|
||||||
auth=(app.CONN.webserver_username,
|
auth=(app.CONN.webserver_username,
|
||||||
app.CONN.webserver_password),
|
app.CONN.webserver_password),
|
||||||
timeout=TIMEOUT)
|
timeout=TIMEOUT)
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from time import time as _time
|
from time import time as _time
|
||||||
import threading
|
import threading
|
||||||
import queue
|
import Queue
|
||||||
import heapq
|
import heapq
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from functools import total_ordering
|
|
||||||
|
|
||||||
from . import utils, app, variables as v
|
from . import utils, app, variables as v
|
||||||
|
|
||||||
|
@ -113,7 +113,7 @@ class KillableThread(threading.Thread):
|
||||||
self._suspension_reached.set()
|
self._suspension_reached.set()
|
||||||
|
|
||||||
|
|
||||||
class ProcessingQueue(queue.Queue, object):
|
class ProcessingQueue(Queue.Queue, object):
|
||||||
"""
|
"""
|
||||||
Queue of queues that processes a queue completely before moving on to the
|
Queue of queues that processes a queue completely before moving on to the
|
||||||
next queue. There's one queue per Section(). You need to initialize each
|
next queue. There's one queue per Section(). You need to initialize each
|
||||||
|
@ -192,7 +192,7 @@ class ProcessingQueue(queue.Queue, object):
|
||||||
self._sections.append(section)
|
self._sections.append(section)
|
||||||
self._queues.append(
|
self._queues.append(
|
||||||
OrderedQueue() if section.plex_type == v.PLEX_TYPE_ALBUM
|
OrderedQueue() if section.plex_type == v.PLEX_TYPE_ALBUM
|
||||||
else queue.Queue())
|
else Queue.Queue())
|
||||||
if self._current_section is None:
|
if self._current_section is None:
|
||||||
self._activate_next_section()
|
self._activate_next_section()
|
||||||
|
|
||||||
|
@ -217,7 +217,7 @@ class ProcessingQueue(queue.Queue, object):
|
||||||
return item[1]
|
return item[1]
|
||||||
|
|
||||||
|
|
||||||
class OrderedQueue(queue.PriorityQueue, object):
|
class OrderedQueue(Queue.PriorityQueue, object):
|
||||||
"""
|
"""
|
||||||
Queue that enforces an order on the items it returns. An item you push
|
Queue that enforces an order on the items it returns. An item you push
|
||||||
onto the queue must be a tuple
|
onto the queue must be a tuple
|
||||||
|
@ -233,15 +233,15 @@ class OrderedQueue(queue.PriorityQueue, object):
|
||||||
self.next_index = 0
|
self.next_index = 0
|
||||||
super(OrderedQueue, self).__init__(maxsize)
|
super(OrderedQueue, self).__init__(maxsize)
|
||||||
|
|
||||||
def _qsize(self):
|
def _qsize(self, len=len):
|
||||||
try:
|
try:
|
||||||
return len(self.queue) if self.queue[0][0] == self.next_index else 0
|
return len(self.queue) if self.queue[0][0] == self.next_index else 0
|
||||||
except IndexError:
|
except IndexError:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def _get(self):
|
def _get(self, heappop=heapq.heappop):
|
||||||
self.next_index += 1
|
self.next_index += 1
|
||||||
return heapq.heappop(self.queue)
|
return heappop(self.queue)
|
||||||
|
|
||||||
|
|
||||||
class Tasks(list):
|
class Tasks(list):
|
||||||
|
@ -260,20 +260,14 @@ class Tasks(list):
|
||||||
self.pop().cancel()
|
self.pop().cancel()
|
||||||
|
|
||||||
|
|
||||||
@total_ordering
|
|
||||||
class Task(object):
|
class Task(object):
|
||||||
def __init__(self, priority=None):
|
def __init__(self, priority=None):
|
||||||
self.priority = priority
|
self.priority = priority
|
||||||
self._canceled = False
|
self._canceled = False
|
||||||
self.finished = False
|
self.finished = False
|
||||||
|
|
||||||
def __lt__(self, other):
|
def __cmp__(self, other):
|
||||||
"""Magic method Task<Other Task; compares the tasks' priorities."""
|
return self.priority - other.priority
|
||||||
return self.priority - other.priority > 0
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
"""Magic method Task=Other Task; compares the tasks' priorities."""
|
|
||||||
return self.priority == other.priority
|
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
BGThreader.addTask(self)
|
BGThreader.addTask(self)
|
||||||
|
@ -314,10 +308,10 @@ class FunctionAsTask(Task):
|
||||||
self._callback(result)
|
self._callback(result)
|
||||||
|
|
||||||
|
|
||||||
class MutablePriorityQueue(queue.PriorityQueue):
|
class MutablePriorityQueue(Queue.PriorityQueue):
|
||||||
def _get(self):
|
def _get(self, heappop=heapq.heappop):
|
||||||
self.queue.sort()
|
self.queue.sort()
|
||||||
return heapq.heappop(self.queue)
|
return heappop(self.queue)
|
||||||
|
|
||||||
def lowest(self):
|
def lowest(self):
|
||||||
"""Return the lowest priority item in the queue (not reliable!)."""
|
"""Return the lowest priority item in the queue (not reliable!)."""
|
||||||
|
@ -357,7 +351,7 @@ class BackgroundWorker(object):
|
||||||
return self._abort or app.APP.monitor.abortRequested()
|
return self._abort or app.APP.monitor.abortRequested()
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
if self._thread and self._thread.is_alive():
|
if self._thread and self._thread.isAlive():
|
||||||
return
|
return
|
||||||
|
|
||||||
self._thread = KillableThread(target=self._queueLoop, name='BACKGROUND-WORKER({0})'.format(self.name))
|
self._thread = KillableThread(target=self._queueLoop, name='BACKGROUND-WORKER({0})'.format(self.name))
|
||||||
|
@ -374,7 +368,7 @@ class BackgroundWorker(object):
|
||||||
self._runTask(self._task)
|
self._runTask(self._task)
|
||||||
self._queue.task_done()
|
self._queue.task_done()
|
||||||
self._task = None
|
self._task = None
|
||||||
except queue.Empty:
|
except Queue.Empty:
|
||||||
LOG.debug('(%s): Idle', self.name)
|
LOG.debug('(%s): Idle', self.name)
|
||||||
|
|
||||||
def shutdown(self, block=True):
|
def shutdown(self, block=True):
|
||||||
|
@ -383,13 +377,13 @@ class BackgroundWorker(object):
|
||||||
if self._task:
|
if self._task:
|
||||||
self._task.cancel()
|
self._task.cancel()
|
||||||
|
|
||||||
if block and self._thread and self._thread.is_alive():
|
if block and self._thread and self._thread.isAlive():
|
||||||
LOG.debug('thread (%s): Waiting...', self.name)
|
LOG.debug('thread (%s): Waiting...', self.name)
|
||||||
self._thread.join()
|
self._thread.join()
|
||||||
LOG.debug('thread (%s): Done', self.name)
|
LOG.debug('thread (%s): Done', self.name)
|
||||||
|
|
||||||
def working(self):
|
def working(self):
|
||||||
return self._thread and self._thread.is_alive()
|
return self._thread and self._thread.isAlive()
|
||||||
|
|
||||||
|
|
||||||
class NonstoppingBackgroundWorker(BackgroundWorker):
|
class NonstoppingBackgroundWorker(BackgroundWorker):
|
||||||
|
@ -414,7 +408,7 @@ class NonstoppingBackgroundWorker(BackgroundWorker):
|
||||||
return self._working
|
return self._working
|
||||||
|
|
||||||
|
|
||||||
class BackgroundThreader(object):
|
class BackgroundThreader:
|
||||||
def __init__(self, name=None, worker=BackgroundWorker, worker_count=6):
|
def __init__(self, name=None, worker=BackgroundWorker, worker_count=6):
|
||||||
self.name = name
|
self.name = name
|
||||||
self._queue = MutablePriorityQueue()
|
self._queue = MutablePriorityQueue()
|
||||||
|
@ -494,7 +488,7 @@ class BackgroundThreader(object):
|
||||||
qitem.priority = lowest - 1
|
qitem.priority = lowest - 1
|
||||||
|
|
||||||
|
|
||||||
class ThreaderManager(object):
|
class ThreaderManager:
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
worker=NonstoppingBackgroundWorker,
|
worker=NonstoppingBackgroundWorker,
|
||||||
worker_count=WORKER_COUNT):
|
worker_count=WORKER_COUNT):
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
|
|
||||||
import xbmc
|
import xbmc
|
||||||
|
@ -29,6 +30,7 @@ def getXArgsDeviceInfo(options=None, include_token=True):
|
||||||
"""
|
"""
|
||||||
xargs = {
|
xargs = {
|
||||||
'Accept': '*/*',
|
'Accept': '*/*',
|
||||||
|
'Connection': 'keep-alive',
|
||||||
"Content-Type": "application/x-www-form-urlencoded",
|
"Content-Type": "application/x-www-form-urlencoded",
|
||||||
# "Access-Control-Allow-Origin": "*",
|
# "Access-Control-Allow-Origin": "*",
|
||||||
'Accept-Language': xbmc.getLanguage(xbmc.ISO_639_1),
|
'Accept-Language': xbmc.getLanguage(xbmc.ISO_639_1),
|
||||||
|
@ -41,8 +43,6 @@ def getXArgsDeviceInfo(options=None, include_token=True):
|
||||||
'X-Plex-Version': v.ADDON_VERSION,
|
'X-Plex-Version': v.ADDON_VERSION,
|
||||||
'X-Plex-Client-Identifier': getDeviceId(),
|
'X-Plex-Client-Identifier': getDeviceId(),
|
||||||
'X-Plex-Provides': 'client,controller,player,pubsub-player',
|
'X-Plex-Provides': 'client,controller,player,pubsub-player',
|
||||||
'X-Plex-Protocol': '1.0',
|
|
||||||
'Cache-Control': 'no-cache'
|
|
||||||
}
|
}
|
||||||
if include_token and utils.window('pms_token'):
|
if include_token and utils.window('pms_token'):
|
||||||
xargs['X-Plex-Token'] = utils.window('pms_token')
|
xargs['X-Plex-Token'] = utils.window('pms_token')
|
||||||
|
@ -51,22 +51,6 @@ def getXArgsDeviceInfo(options=None, include_token=True):
|
||||||
return xargs
|
return xargs
|
||||||
|
|
||||||
|
|
||||||
def generate_device_id():
|
|
||||||
LOG.info("Generating a new deviceid.")
|
|
||||||
from uuid import uuid4
|
|
||||||
client_id = str(uuid4())
|
|
||||||
utils.settings('plex_client_Id', value=client_id)
|
|
||||||
v.PKC_MACHINE_IDENTIFIER = client_id
|
|
||||||
utils.window('plex_client_Id', value=client_id)
|
|
||||||
LOG.info("Unique device Id plex_client_Id generated: %s", client_id)
|
|
||||||
# IF WE EXIT KODI NOW, THE SETTING WON'T STICK!
|
|
||||||
# 'Kodi will now restart to apply the changes'
|
|
||||||
# utils.messageDialog(utils.lang(29999), utils.lang(33033))
|
|
||||||
# xbmc.executebuiltin('RestartApp')
|
|
||||||
utils.messageDialog(utils.lang(29999), 'Please restart Kodi now!')
|
|
||||||
return client_id
|
|
||||||
|
|
||||||
|
|
||||||
def getDeviceId(reset=False):
|
def getDeviceId(reset=False):
|
||||||
"""
|
"""
|
||||||
Returns a unique Plex client id "X-Plex-Client-Identifier" from Kodi
|
Returns a unique Plex client id "X-Plex-Client-Identifier" from Kodi
|
||||||
|
@ -75,18 +59,28 @@ def getDeviceId(reset=False):
|
||||||
|
|
||||||
If id does not exist, create one and save in Kodi settings file.
|
If id does not exist, create one and save in Kodi settings file.
|
||||||
"""
|
"""
|
||||||
if reset:
|
if reset is True:
|
||||||
return generate_device_id()
|
v.PKC_MACHINE_IDENTIFIER = None
|
||||||
|
utils.window('plex_client_Id', clear=True)
|
||||||
|
utils.settings('plex_client_Id', value="")
|
||||||
|
|
||||||
client_id = v.PKC_MACHINE_IDENTIFIER
|
client_id = v.PKC_MACHINE_IDENTIFIER
|
||||||
if client_id:
|
if client_id:
|
||||||
return client_id
|
return client_id
|
||||||
|
|
||||||
client_id = utils.settings('plex_client_Id')
|
client_id = utils.settings('plex_client_Id')
|
||||||
if client_id != "":
|
# Because Kodi appears to cache file settings!!
|
||||||
|
if client_id != "" and reset is False:
|
||||||
v.PKC_MACHINE_IDENTIFIER = client_id
|
v.PKC_MACHINE_IDENTIFIER = client_id
|
||||||
utils.window('plex_client_Id', value=client_id)
|
utils.window('plex_client_Id', value=client_id)
|
||||||
LOG.info("Unique device Id plex_client_Id loaded: %s", client_id)
|
LOG.info("Unique device Id plex_client_Id loaded: %s", client_id)
|
||||||
return client_id
|
return client_id
|
||||||
else:
|
|
||||||
return generate_device_id()
|
LOG.info("Generating a new deviceid.")
|
||||||
|
from uuid import uuid4
|
||||||
|
client_id = str(uuid4())
|
||||||
|
utils.settings('plex_client_Id', value=client_id)
|
||||||
|
v.PKC_MACHINE_IDENTIFIER = client_id
|
||||||
|
utils.window('plex_client_Id', value=client_id)
|
||||||
|
LOG.info("Unique device Id plex_client_Id generated: %s", client_id)
|
||||||
|
return client_id
|
||||||
|
|
|
@ -3,13 +3,12 @@
|
||||||
"""
|
"""
|
||||||
Processes Plex companion inputs from the plexbmchelper to Kodi commands
|
Processes Plex companion inputs from the plexbmchelper to Kodi commands
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from xbmc import Player
|
from xbmc import Player
|
||||||
|
|
||||||
from . import plex_functions as PF
|
from . import playqueue as PQ, plex_functions as PF
|
||||||
from . import json_rpc as js
|
from . import json_rpc as js, variables as v, app
|
||||||
from . import variables as v
|
|
||||||
from . import app
|
|
||||||
|
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
|
@ -29,8 +28,8 @@ def skip_to(params):
|
||||||
LOG.debug('Skipping to playQueueItemID %s, plex_id %s',
|
LOG.debug('Skipping to playQueueItemID %s, plex_id %s',
|
||||||
playqueue_item_id, plex_id)
|
playqueue_item_id, plex_id)
|
||||||
found = True
|
found = True
|
||||||
for player in list(js.get_players().values()):
|
for player in js.get_players().values():
|
||||||
playqueue = app.PLAYQUEUES[player['playerid']]
|
playqueue = PQ.PLAYQUEUES[player['playerid']]
|
||||||
for i, item in enumerate(playqueue.items):
|
for i, item in enumerate(playqueue.items):
|
||||||
if item.id == playqueue_item_id:
|
if item.id == playqueue_item_id:
|
||||||
found = True
|
found = True
|
||||||
|
@ -80,7 +79,6 @@ def process_command(request_path, params):
|
||||||
js.set_volume(int(params['volume']))
|
js.set_volume(int(params['volume']))
|
||||||
else:
|
else:
|
||||||
LOG.error('Unknown parameters: %s', params)
|
LOG.error('Unknown parameters: %s', params)
|
||||||
return False
|
|
||||||
elif request_path == "player/playback/play":
|
elif request_path == "player/playback/play":
|
||||||
js.play()
|
js.play()
|
||||||
elif request_path == "player/playback/pause":
|
elif request_path == "player/playback/pause":
|
||||||
|
@ -88,7 +86,7 @@ def process_command(request_path, params):
|
||||||
elif request_path == "player/playback/stop":
|
elif request_path == "player/playback/stop":
|
||||||
js.stop()
|
js.stop()
|
||||||
elif request_path == "player/playback/seekTo":
|
elif request_path == "player/playback/seekTo":
|
||||||
js.seek_to(float(params.get('offset', 0.0)) / 1000.0)
|
js.seek_to(int(params.get('offset', 0)))
|
||||||
elif request_path == "player/playback/stepForward":
|
elif request_path == "player/playback/stepForward":
|
||||||
js.smallforward()
|
js.smallforward()
|
||||||
elif request_path == "player/playback/stepBack":
|
elif request_path == "player/playback/stepBack":
|
||||||
|
@ -120,5 +118,3 @@ def process_command(request_path, params):
|
||||||
})
|
})
|
||||||
else:
|
else:
|
||||||
LOG.error('Unknown request path: %s', request_path)
|
LOG.error('Unknown request path: %s', request_path)
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import xbmcgui
|
import xbmcgui
|
||||||
|
|
||||||
|
@ -59,14 +60,14 @@ class ContextMenu(xbmcgui.WindowXMLDialog):
|
||||||
if action in (ACTION_SELECT_ITEM, ACTION_MOUSE_LEFT_CLICK):
|
if action in (ACTION_SELECT_ITEM, ACTION_MOUSE_LEFT_CLICK):
|
||||||
if self.getFocusId() == LIST:
|
if self.getFocusId() == LIST:
|
||||||
option = self.list_.getSelectedItem()
|
option = self.list_.getSelectedItem()
|
||||||
self.selected_option = option.getLabel()
|
self.selected_option = option.getLabel().decode('utf-8')
|
||||||
LOG.info('option selected: %s', self.selected_option)
|
LOG.info('option selected: %s', self.selected_option)
|
||||||
self.close()
|
self.close()
|
||||||
|
|
||||||
def _add_editcontrol(self, x, y, height, width, password=None):
|
def _add_editcontrol(self, x, y, height, width, password=None):
|
||||||
media = path_ops.path.join(
|
media = path_ops.path.join(
|
||||||
v.ADDON_PATH, 'resources', 'skins', 'default', 'media')
|
v.ADDON_PATH, 'resources', 'skins', 'default', 'media')
|
||||||
filename = path_ops.path.join(media, 'white.png')
|
filename = utils.try_encode(path_ops.path.join(media, 'white.png'))
|
||||||
control = xbmcgui.ControlImage(0, 0, 0, 0,
|
control = xbmcgui.ControlImage(0, 0, 0, 0,
|
||||||
filename=filename,
|
filename=filename,
|
||||||
aspectRatio=0,
|
aspectRatio=0,
|
||||||
|
|
|
@ -1,16 +1,14 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import xbmc
|
import xbmc
|
||||||
import xbmcgui
|
import xbmcgui
|
||||||
|
|
||||||
from .plex_api import API
|
from .plex_api import API
|
||||||
from .plex_db import PlexDB
|
from .plex_db import PlexDB
|
||||||
from . import context
|
from . import context, plex_functions as PF, playqueue as PQ
|
||||||
from . import plex_functions as PF
|
from . import utils, variables as v, app
|
||||||
from . import utils
|
|
||||||
from . import variables as v
|
|
||||||
from . import app
|
|
||||||
|
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
|
@ -94,7 +92,7 @@ class ContextMenu(object):
|
||||||
options.append(OPTIONS['Addon'])
|
options.append(OPTIONS['Addon'])
|
||||||
context_menu = context.ContextMenu(
|
context_menu = context.ContextMenu(
|
||||||
"script-plex-context.xml",
|
"script-plex-context.xml",
|
||||||
v.ADDON_PATH,
|
utils.try_encode(v.ADDON_PATH),
|
||||||
"default",
|
"default",
|
||||||
"1080i")
|
"1080i")
|
||||||
context_menu.set_options(options)
|
context_menu.set_options(options)
|
||||||
|
@ -128,24 +126,25 @@ class ContextMenu(object):
|
||||||
"""
|
"""
|
||||||
delete = True
|
delete = True
|
||||||
if utils.settings('skipContextMenu') != "true":
|
if utils.settings('skipContextMenu') != "true":
|
||||||
if not utils.dialog("yesno", heading="{plex}", message=utils.lang(33041)):
|
if not utils.dialog("yesno", heading="{plex}", line1=utils.lang(33041)):
|
||||||
LOG.info("User skipped deletion for: %s", self.plex_id)
|
LOG.info("User skipped deletion for: %s", self.plex_id)
|
||||||
delete = False
|
delete = False
|
||||||
if delete:
|
if delete:
|
||||||
LOG.info("Deleting Plex item with id %s", self.plex_id)
|
LOG.info("Deleting Plex item with id %s", self.plex_id)
|
||||||
if PF.delete_item_from_pms(self.plex_id) is False:
|
if PF.delete_item_from_pms(self.plex_id) is False:
|
||||||
utils.dialog("ok", heading="{plex}", message=utils.lang(30414))
|
utils.dialog("ok", heading="{plex}", line1=utils.lang(30414))
|
||||||
|
|
||||||
def _PMS_play(self):
|
def _PMS_play(self):
|
||||||
"""
|
"""
|
||||||
For using direct paths: Initiates playback using the PMS
|
For using direct paths: Initiates playback using the PMS
|
||||||
"""
|
"""
|
||||||
playqueue = app.PLAYQUEUES.from_kodi_type(self.kodi_type)
|
playqueue = PQ.get_playqueue_from_type(
|
||||||
|
v.KODI_PLAYLIST_TYPE_FROM_KODI_TYPE[self.kodi_type])
|
||||||
playqueue.clear()
|
playqueue.clear()
|
||||||
app.PLAYSTATE.context_menu_play = True
|
app.PLAYSTATE.context_menu_play = True
|
||||||
handle = self.api.fullpath(force_addon=True)[0]
|
handle = self.api.fullpath(force_addon=True)[0]
|
||||||
handle = f'RunPlugin({handle})'
|
handle = 'RunPlugin(%s)' % handle
|
||||||
xbmc.executebuiltin(handle)
|
xbmc.executebuiltin(handle.encode('utf-8'))
|
||||||
|
|
||||||
def _extras(self):
|
def _extras(self):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -27,7 +27,7 @@ def catch_operationalerrors(method):
|
||||||
try:
|
try:
|
||||||
return method(self, *args, **kwargs)
|
return method(self, *args, **kwargs)
|
||||||
except sqlite3.OperationalError as err:
|
except sqlite3.OperationalError as err:
|
||||||
if err.args[0] and 'database is locked' not in err.args[0]:
|
if 'database is locked' not in err:
|
||||||
# Not an error we want to catch, so reraise it
|
# Not an error we want to catch, so reraise it
|
||||||
raise
|
raise
|
||||||
attempts -= 1
|
attempts -= 1
|
||||||
|
|
39
resources/lib/defused_etree.py
Normal file
39
resources/lib/defused_etree.py
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
xml.etree.ElementTree tries to encode with text.encode('ascii') - which is
|
||||||
|
just plain BS. This etree will always return unicode, not string
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
|
# Originally tried faster cElementTree, but does NOT work reliably with Kodi
|
||||||
|
from defusedxml.ElementTree import DefusedXMLParser, _generate_etree_functions
|
||||||
|
|
||||||
|
from xml.etree.ElementTree import TreeBuilder as _TreeBuilder
|
||||||
|
from xml.etree.ElementTree import parse as _parse
|
||||||
|
from xml.etree.ElementTree import iterparse as _iterparse
|
||||||
|
from xml.etree.ElementTree import tostring
|
||||||
|
|
||||||
|
|
||||||
|
class UnicodeXMLParser(DefusedXMLParser):
|
||||||
|
"""
|
||||||
|
PKC Hack to ensure we're always receiving unicode, not str
|
||||||
|
"""
|
||||||
|
@staticmethod
|
||||||
|
def _fixtext(text):
|
||||||
|
"""
|
||||||
|
Do NOT try to convert every entry to str with entry.encode('ascii')!
|
||||||
|
"""
|
||||||
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
# aliases
|
||||||
|
XMLTreeBuilder = XMLParse = UnicodeXMLParser
|
||||||
|
|
||||||
|
parse, iterparse, fromstring = _generate_etree_functions(UnicodeXMLParser,
|
||||||
|
_TreeBuilder, _parse,
|
||||||
|
_iterparse)
|
||||||
|
XML = fromstring
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['XML', 'XMLParse', 'XMLTreeBuilder', 'fromstring', 'iterparse',
|
||||||
|
'parse', 'tostring']
|
|
@ -1,188 +0,0 @@
|
||||||
# defusedxml
|
|
||||||
#
|
|
||||||
# Copyright (c) 2013-2020 by Christian Heimes <christian@python.org>
|
|
||||||
# Licensed to PSF under a Contributor Agreement.
|
|
||||||
# See https://www.python.org/psf/license for licensing details.
|
|
||||||
"""Defused xml.etree.ElementTree facade
|
|
||||||
"""
|
|
||||||
from __future__ import print_function, absolute_import
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import warnings
|
|
||||||
from xml.etree.ElementTree import ParseError
|
|
||||||
from xml.etree.ElementTree import TreeBuilder as _TreeBuilder
|
|
||||||
from xml.etree.ElementTree import parse as _parse
|
|
||||||
from xml.etree.ElementTree import tostring
|
|
||||||
|
|
||||||
import importlib
|
|
||||||
|
|
||||||
|
|
||||||
from .common import DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden
|
|
||||||
|
|
||||||
__origin__ = "xml.etree.ElementTree"
|
|
||||||
|
|
||||||
|
|
||||||
def _get_py3_cls():
|
|
||||||
"""Python 3.3 hides the pure Python code but defusedxml requires it.
|
|
||||||
|
|
||||||
The code is based on test.support.import_fresh_module().
|
|
||||||
"""
|
|
||||||
pymodname = "xml.etree.ElementTree"
|
|
||||||
cmodname = "_elementtree"
|
|
||||||
|
|
||||||
pymod = sys.modules.pop(pymodname, None)
|
|
||||||
cmod = sys.modules.pop(cmodname, None)
|
|
||||||
|
|
||||||
sys.modules[cmodname] = None
|
|
||||||
try:
|
|
||||||
pure_pymod = importlib.import_module(pymodname)
|
|
||||||
finally:
|
|
||||||
# restore module
|
|
||||||
sys.modules[pymodname] = pymod
|
|
||||||
if cmod is not None:
|
|
||||||
sys.modules[cmodname] = cmod
|
|
||||||
else:
|
|
||||||
sys.modules.pop(cmodname, None)
|
|
||||||
# restore attribute on original package
|
|
||||||
etree_pkg = sys.modules["xml.etree"]
|
|
||||||
if pymod is not None:
|
|
||||||
etree_pkg.ElementTree = pymod
|
|
||||||
elif hasattr(etree_pkg, "ElementTree"):
|
|
||||||
del etree_pkg.ElementTree
|
|
||||||
|
|
||||||
_XMLParser = pure_pymod.XMLParser
|
|
||||||
_iterparse = pure_pymod.iterparse
|
|
||||||
# patch pure module to use ParseError from C extension
|
|
||||||
pure_pymod.ParseError = ParseError
|
|
||||||
|
|
||||||
return _XMLParser, _iterparse
|
|
||||||
|
|
||||||
|
|
||||||
_XMLParser, _iterparse = _get_py3_cls()
|
|
||||||
|
|
||||||
_sentinel = object()
|
|
||||||
|
|
||||||
|
|
||||||
class DefusedXMLParser(_XMLParser):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
html=_sentinel,
|
|
||||||
target=None,
|
|
||||||
encoding=None,
|
|
||||||
forbid_dtd=False,
|
|
||||||
forbid_entities=True,
|
|
||||||
forbid_external=True,
|
|
||||||
):
|
|
||||||
super().__init__(target=target, encoding=encoding)
|
|
||||||
if html is not _sentinel:
|
|
||||||
# the 'html' argument has been deprecated and ignored in all
|
|
||||||
# supported versions of Python. Python 3.8 finally removed it.
|
|
||||||
if html:
|
|
||||||
raise TypeError("'html=True' is no longer supported.")
|
|
||||||
else:
|
|
||||||
warnings.warn(
|
|
||||||
"'html' keyword argument is no longer supported. Pass "
|
|
||||||
"in arguments as keyword arguments.",
|
|
||||||
category=DeprecationWarning,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.forbid_dtd = forbid_dtd
|
|
||||||
self.forbid_entities = forbid_entities
|
|
||||||
self.forbid_external = forbid_external
|
|
||||||
parser = self.parser
|
|
||||||
if self.forbid_dtd:
|
|
||||||
parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl
|
|
||||||
if self.forbid_entities:
|
|
||||||
parser.EntityDeclHandler = self.defused_entity_decl
|
|
||||||
parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl
|
|
||||||
if self.forbid_external:
|
|
||||||
parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler
|
|
||||||
|
|
||||||
def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
|
|
||||||
raise DTDForbidden(name, sysid, pubid)
|
|
||||||
|
|
||||||
def defused_entity_decl(
|
|
||||||
self, name, is_parameter_entity, value, base, sysid, pubid, notation_name
|
|
||||||
):
|
|
||||||
raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
|
|
||||||
|
|
||||||
def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
|
|
||||||
# expat 1.2
|
|
||||||
raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name) # pragma: no cover
|
|
||||||
|
|
||||||
def defused_external_entity_ref_handler(self, context, base, sysid, pubid):
|
|
||||||
raise ExternalReferenceForbidden(context, base, sysid, pubid)
|
|
||||||
|
|
||||||
|
|
||||||
# aliases
|
|
||||||
# XMLParse is a typo, keep it for backwards compatibility
|
|
||||||
XMLTreeBuilder = XMLParse = XMLParser = DefusedXMLParser
|
|
||||||
|
|
||||||
|
|
||||||
def parse(source, parser=None, forbid_dtd=False, forbid_entities=True, forbid_external=True):
|
|
||||||
if parser is None:
|
|
||||||
parser = DefusedXMLParser(
|
|
||||||
target=_TreeBuilder(),
|
|
||||||
forbid_dtd=forbid_dtd,
|
|
||||||
forbid_entities=forbid_entities,
|
|
||||||
forbid_external=forbid_external,
|
|
||||||
)
|
|
||||||
return _parse(source, parser)
|
|
||||||
|
|
||||||
|
|
||||||
def iterparse(
|
|
||||||
source,
|
|
||||||
events=None,
|
|
||||||
parser=None,
|
|
||||||
forbid_dtd=False,
|
|
||||||
forbid_entities=True,
|
|
||||||
forbid_external=True,
|
|
||||||
):
|
|
||||||
if parser is None:
|
|
||||||
parser = DefusedXMLParser(
|
|
||||||
target=_TreeBuilder(),
|
|
||||||
forbid_dtd=forbid_dtd,
|
|
||||||
forbid_entities=forbid_entities,
|
|
||||||
forbid_external=forbid_external,
|
|
||||||
)
|
|
||||||
return _iterparse(source, events, parser)
|
|
||||||
|
|
||||||
|
|
||||||
def fromstring(text, forbid_dtd=False, forbid_entities=True, forbid_external=True):
|
|
||||||
parser = DefusedXMLParser(
|
|
||||||
target=_TreeBuilder(),
|
|
||||||
forbid_dtd=forbid_dtd,
|
|
||||||
forbid_entities=forbid_entities,
|
|
||||||
forbid_external=forbid_external,
|
|
||||||
)
|
|
||||||
parser.feed(text)
|
|
||||||
return parser.close()
|
|
||||||
|
|
||||||
|
|
||||||
XML = fromstring
|
|
||||||
|
|
||||||
|
|
||||||
def fromstringlist(sequence, forbid_dtd=False, forbid_entities=True, forbid_external=True):
|
|
||||||
parser = DefusedXMLParser(
|
|
||||||
target=_TreeBuilder(),
|
|
||||||
forbid_dtd=forbid_dtd,
|
|
||||||
forbid_entities=forbid_entities,
|
|
||||||
forbid_external=forbid_external,
|
|
||||||
)
|
|
||||||
for text in sequence:
|
|
||||||
parser.feed(text)
|
|
||||||
return parser.close()
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"ParseError",
|
|
||||||
"XML",
|
|
||||||
"XMLParse",
|
|
||||||
"XMLParser",
|
|
||||||
"XMLTreeBuilder",
|
|
||||||
"fromstring",
|
|
||||||
"fromstringlist",
|
|
||||||
"iterparse",
|
|
||||||
"parse",
|
|
||||||
"tostring",
|
|
||||||
]
|
|
|
@ -1,67 +0,0 @@
|
||||||
# defusedxml
|
|
||||||
#
|
|
||||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
|
||||||
# Licensed to PSF under a Contributor Agreement.
|
|
||||||
# See https://www.python.org/psf/license for licensing details.
|
|
||||||
"""Defuse XML bomb denial of service vulnerabilities
|
|
||||||
"""
|
|
||||||
from __future__ import print_function, absolute_import
|
|
||||||
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from .common import (
|
|
||||||
DefusedXmlException,
|
|
||||||
DTDForbidden,
|
|
||||||
EntitiesForbidden,
|
|
||||||
ExternalReferenceForbidden,
|
|
||||||
NotSupportedError,
|
|
||||||
_apply_defusing,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def defuse_stdlib():
|
|
||||||
"""Monkey patch and defuse all stdlib packages
|
|
||||||
|
|
||||||
:warning: The monkey patch is an EXPERIMETNAL feature.
|
|
||||||
"""
|
|
||||||
defused = {}
|
|
||||||
|
|
||||||
with warnings.catch_warnings():
|
|
||||||
from . import cElementTree
|
|
||||||
from . import ElementTree
|
|
||||||
from . import minidom
|
|
||||||
from . import pulldom
|
|
||||||
from . import sax
|
|
||||||
from . import expatbuilder
|
|
||||||
from . import expatreader
|
|
||||||
from . import xmlrpc
|
|
||||||
|
|
||||||
xmlrpc.monkey_patch()
|
|
||||||
defused[xmlrpc] = None
|
|
||||||
|
|
||||||
defused_mods = [
|
|
||||||
cElementTree,
|
|
||||||
ElementTree,
|
|
||||||
minidom,
|
|
||||||
pulldom,
|
|
||||||
sax,
|
|
||||||
expatbuilder,
|
|
||||||
expatreader,
|
|
||||||
]
|
|
||||||
|
|
||||||
for defused_mod in defused_mods:
|
|
||||||
stdlib_mod = _apply_defusing(defused_mod)
|
|
||||||
defused[defused_mod] = stdlib_mod
|
|
||||||
|
|
||||||
return defused
|
|
||||||
|
|
||||||
|
|
||||||
__version__ = "0.8.0.dev1"
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"DefusedXmlException",
|
|
||||||
"DTDForbidden",
|
|
||||||
"EntitiesForbidden",
|
|
||||||
"ExternalReferenceForbidden",
|
|
||||||
"NotSupportedError",
|
|
||||||
]
|
|
|
@ -1,47 +0,0 @@
|
||||||
# defusedxml
|
|
||||||
#
|
|
||||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
|
||||||
# Licensed to PSF under a Contributor Agreement.
|
|
||||||
# See https://www.python.org/psf/license for licensing details.
|
|
||||||
"""Defused xml.etree.cElementTree
|
|
||||||
"""
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
# This module is an alias for ElementTree just like xml.etree.cElementTree
|
|
||||||
from .ElementTree import (
|
|
||||||
XML,
|
|
||||||
XMLParse,
|
|
||||||
XMLParser,
|
|
||||||
XMLTreeBuilder,
|
|
||||||
fromstring,
|
|
||||||
fromstringlist,
|
|
||||||
iterparse,
|
|
||||||
parse,
|
|
||||||
tostring,
|
|
||||||
DefusedXMLParser,
|
|
||||||
ParseError,
|
|
||||||
)
|
|
||||||
|
|
||||||
__origin__ = "xml.etree.cElementTree"
|
|
||||||
|
|
||||||
|
|
||||||
warnings.warn(
|
|
||||||
"defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead.",
|
|
||||||
category=DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"ParseError",
|
|
||||||
"XML",
|
|
||||||
"XMLParse",
|
|
||||||
"XMLParser",
|
|
||||||
"XMLTreeBuilder",
|
|
||||||
"fromstring",
|
|
||||||
"fromstringlist",
|
|
||||||
"iterparse",
|
|
||||||
"parse",
|
|
||||||
"tostring",
|
|
||||||
# backwards compatibility
|
|
||||||
"DefusedXMLParser",
|
|
||||||
]
|
|
|
@ -1,85 +0,0 @@
|
||||||
# defusedxml
|
|
||||||
#
|
|
||||||
# Copyright (c) 2013-2020 by Christian Heimes <christian@python.org>
|
|
||||||
# Licensed to PSF under a Contributor Agreement.
|
|
||||||
# See https://www.python.org/psf/license for licensing details.
|
|
||||||
"""Common constants, exceptions and helpe functions
|
|
||||||
"""
|
|
||||||
import sys
|
|
||||||
import xml.parsers.expat
|
|
||||||
|
|
||||||
PY3 = True
|
|
||||||
|
|
||||||
# Fail early when pyexpat is not installed correctly
|
|
||||||
if not hasattr(xml.parsers.expat, "ParserCreate"):
|
|
||||||
raise ImportError("pyexpat") # pragma: no cover
|
|
||||||
|
|
||||||
|
|
||||||
class DefusedXmlException(ValueError):
|
|
||||||
"""Base exception"""
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return str(self)
|
|
||||||
|
|
||||||
|
|
||||||
class DTDForbidden(DefusedXmlException):
|
|
||||||
"""Document type definition is forbidden"""
|
|
||||||
|
|
||||||
def __init__(self, name, sysid, pubid):
|
|
||||||
super().__init__()
|
|
||||||
self.name = name
|
|
||||||
self.sysid = sysid
|
|
||||||
self.pubid = pubid
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
tpl = "DTDForbidden(name='{}', system_id={!r}, public_id={!r})"
|
|
||||||
return tpl.format(self.name, self.sysid, self.pubid)
|
|
||||||
|
|
||||||
|
|
||||||
class EntitiesForbidden(DefusedXmlException):
|
|
||||||
"""Entity definition is forbidden"""
|
|
||||||
|
|
||||||
def __init__(self, name, value, base, sysid, pubid, notation_name):
|
|
||||||
super().__init__()
|
|
||||||
self.name = name
|
|
||||||
self.value = value
|
|
||||||
self.base = base
|
|
||||||
self.sysid = sysid
|
|
||||||
self.pubid = pubid
|
|
||||||
self.notation_name = notation_name
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
tpl = "EntitiesForbidden(name='{}', system_id={!r}, public_id={!r})"
|
|
||||||
return tpl.format(self.name, self.sysid, self.pubid)
|
|
||||||
|
|
||||||
|
|
||||||
class ExternalReferenceForbidden(DefusedXmlException):
|
|
||||||
"""Resolving an external reference is forbidden"""
|
|
||||||
|
|
||||||
def __init__(self, context, base, sysid, pubid):
|
|
||||||
super().__init__()
|
|
||||||
self.context = context
|
|
||||||
self.base = base
|
|
||||||
self.sysid = sysid
|
|
||||||
self.pubid = pubid
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
tpl = "ExternalReferenceForbidden(system_id='{}', public_id={})"
|
|
||||||
return tpl.format(self.sysid, self.pubid)
|
|
||||||
|
|
||||||
|
|
||||||
class NotSupportedError(DefusedXmlException):
|
|
||||||
"""The operation is not supported"""
|
|
||||||
|
|
||||||
|
|
||||||
def _apply_defusing(defused_mod):
|
|
||||||
assert defused_mod is sys.modules[defused_mod.__name__]
|
|
||||||
stdlib_name = defused_mod.__origin__
|
|
||||||
__import__(stdlib_name, {}, {}, ["*"])
|
|
||||||
stdlib_mod = sys.modules[stdlib_name]
|
|
||||||
stdlib_names = set(dir(stdlib_mod))
|
|
||||||
for name, obj in vars(defused_mod).items():
|
|
||||||
if name.startswith("_") or name not in stdlib_names:
|
|
||||||
continue
|
|
||||||
setattr(stdlib_mod, name, obj)
|
|
||||||
return stdlib_mod
|
|
|
@ -1,107 +0,0 @@
|
||||||
# defusedxml
|
|
||||||
#
|
|
||||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
|
||||||
# Licensed to PSF under a Contributor Agreement.
|
|
||||||
# See https://www.python.org/psf/license for licensing details.
|
|
||||||
"""Defused xml.dom.expatbuilder
|
|
||||||
"""
|
|
||||||
from __future__ import print_function, absolute_import
|
|
||||||
|
|
||||||
from xml.dom.expatbuilder import ExpatBuilder as _ExpatBuilder
|
|
||||||
from xml.dom.expatbuilder import Namespaces as _Namespaces
|
|
||||||
|
|
||||||
from .common import DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden
|
|
||||||
|
|
||||||
__origin__ = "xml.dom.expatbuilder"
|
|
||||||
|
|
||||||
|
|
||||||
class DefusedExpatBuilder(_ExpatBuilder):
|
|
||||||
"""Defused document builder"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, options=None, forbid_dtd=False, forbid_entities=True, forbid_external=True
|
|
||||||
):
|
|
||||||
_ExpatBuilder.__init__(self, options)
|
|
||||||
self.forbid_dtd = forbid_dtd
|
|
||||||
self.forbid_entities = forbid_entities
|
|
||||||
self.forbid_external = forbid_external
|
|
||||||
|
|
||||||
def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
|
|
||||||
raise DTDForbidden(name, sysid, pubid)
|
|
||||||
|
|
||||||
def defused_entity_decl(
|
|
||||||
self, name, is_parameter_entity, value, base, sysid, pubid, notation_name
|
|
||||||
):
|
|
||||||
raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
|
|
||||||
|
|
||||||
def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
|
|
||||||
# expat 1.2
|
|
||||||
raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name) # pragma: no cover
|
|
||||||
|
|
||||||
def defused_external_entity_ref_handler(self, context, base, sysid, pubid):
|
|
||||||
raise ExternalReferenceForbidden(context, base, sysid, pubid)
|
|
||||||
|
|
||||||
def install(self, parser):
|
|
||||||
_ExpatBuilder.install(self, parser)
|
|
||||||
|
|
||||||
if self.forbid_dtd:
|
|
||||||
parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl
|
|
||||||
if self.forbid_entities:
|
|
||||||
# if self._options.entities:
|
|
||||||
parser.EntityDeclHandler = self.defused_entity_decl
|
|
||||||
parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl
|
|
||||||
if self.forbid_external:
|
|
||||||
parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler
|
|
||||||
|
|
||||||
|
|
||||||
class DefusedExpatBuilderNS(_Namespaces, DefusedExpatBuilder):
|
|
||||||
"""Defused document builder that supports namespaces."""
|
|
||||||
|
|
||||||
def install(self, parser):
|
|
||||||
DefusedExpatBuilder.install(self, parser)
|
|
||||||
if self._options.namespace_declarations:
|
|
||||||
parser.StartNamespaceDeclHandler = self.start_namespace_decl_handler
|
|
||||||
|
|
||||||
def reset(self):
|
|
||||||
DefusedExpatBuilder.reset(self)
|
|
||||||
self._initNamespaces()
|
|
||||||
|
|
||||||
|
|
||||||
def parse(file, namespaces=True, forbid_dtd=False, forbid_entities=True, forbid_external=True):
|
|
||||||
"""Parse a document, returning the resulting Document node.
|
|
||||||
|
|
||||||
'file' may be either a file name or an open file object.
|
|
||||||
"""
|
|
||||||
if namespaces:
|
|
||||||
build_builder = DefusedExpatBuilderNS
|
|
||||||
else:
|
|
||||||
build_builder = DefusedExpatBuilder
|
|
||||||
builder = build_builder(
|
|
||||||
forbid_dtd=forbid_dtd, forbid_entities=forbid_entities, forbid_external=forbid_external
|
|
||||||
)
|
|
||||||
|
|
||||||
if isinstance(file, str):
|
|
||||||
fp = open(file, "rb")
|
|
||||||
try:
|
|
||||||
result = builder.parseFile(fp)
|
|
||||||
finally:
|
|
||||||
fp.close()
|
|
||||||
else:
|
|
||||||
result = builder.parseFile(file)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def parseString(
|
|
||||||
string, namespaces=True, forbid_dtd=False, forbid_entities=True, forbid_external=True
|
|
||||||
):
|
|
||||||
"""Parse a document from a string, returning the resulting
|
|
||||||
Document node.
|
|
||||||
"""
|
|
||||||
if namespaces:
|
|
||||||
build_builder = DefusedExpatBuilderNS
|
|
||||||
else:
|
|
||||||
build_builder = DefusedExpatBuilder
|
|
||||||
builder = build_builder(
|
|
||||||
forbid_dtd=forbid_dtd, forbid_entities=forbid_entities, forbid_external=forbid_external
|
|
||||||
)
|
|
||||||
return builder.parseString(string)
|
|
|
@ -1,61 +0,0 @@
|
||||||
# defusedxml
|
|
||||||
#
|
|
||||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
|
||||||
# Licensed to PSF under a Contributor Agreement.
|
|
||||||
# See https://www.python.org/psf/license for licensing details.
|
|
||||||
"""Defused xml.sax.expatreader
|
|
||||||
"""
|
|
||||||
from __future__ import print_function, absolute_import
|
|
||||||
|
|
||||||
from xml.sax.expatreader import ExpatParser as _ExpatParser
|
|
||||||
|
|
||||||
from .common import DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden
|
|
||||||
|
|
||||||
__origin__ = "xml.sax.expatreader"
|
|
||||||
|
|
||||||
|
|
||||||
class DefusedExpatParser(_ExpatParser):
|
|
||||||
"""Defused SAX driver for the pyexpat C module."""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
namespaceHandling=0,
|
|
||||||
bufsize=2 ** 16 - 20,
|
|
||||||
forbid_dtd=False,
|
|
||||||
forbid_entities=True,
|
|
||||||
forbid_external=True,
|
|
||||||
):
|
|
||||||
super().__init__(namespaceHandling, bufsize)
|
|
||||||
self.forbid_dtd = forbid_dtd
|
|
||||||
self.forbid_entities = forbid_entities
|
|
||||||
self.forbid_external = forbid_external
|
|
||||||
|
|
||||||
def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
|
|
||||||
raise DTDForbidden(name, sysid, pubid)
|
|
||||||
|
|
||||||
def defused_entity_decl(
|
|
||||||
self, name, is_parameter_entity, value, base, sysid, pubid, notation_name
|
|
||||||
):
|
|
||||||
raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
|
|
||||||
|
|
||||||
def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
|
|
||||||
# expat 1.2
|
|
||||||
raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name) # pragma: no cover
|
|
||||||
|
|
||||||
def defused_external_entity_ref_handler(self, context, base, sysid, pubid):
|
|
||||||
raise ExternalReferenceForbidden(context, base, sysid, pubid)
|
|
||||||
|
|
||||||
def reset(self):
|
|
||||||
super().reset()
|
|
||||||
parser = self._parser
|
|
||||||
if self.forbid_dtd:
|
|
||||||
parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl
|
|
||||||
if self.forbid_entities:
|
|
||||||
parser.EntityDeclHandler = self.defused_entity_decl
|
|
||||||
parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl
|
|
||||||
if self.forbid_external:
|
|
||||||
parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler
|
|
||||||
|
|
||||||
|
|
||||||
def create_parser(*args, **kwargs):
|
|
||||||
return DefusedExpatParser(*args, **kwargs)
|
|
|
@ -1,153 +0,0 @@
|
||||||
# defusedxml
|
|
||||||
#
|
|
||||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
|
||||||
# Licensed to PSF under a Contributor Agreement.
|
|
||||||
# See https://www.python.org/psf/license for licensing details.
|
|
||||||
"""DEPRECATED Example code for lxml.etree protection
|
|
||||||
|
|
||||||
The code has NO protection against decompression bombs.
|
|
||||||
"""
|
|
||||||
from __future__ import print_function, absolute_import
|
|
||||||
|
|
||||||
import threading
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from lxml import etree as _etree
|
|
||||||
|
|
||||||
from .common import DTDForbidden, EntitiesForbidden, NotSupportedError
|
|
||||||
|
|
||||||
LXML3 = _etree.LXML_VERSION[0] >= 3
|
|
||||||
|
|
||||||
__origin__ = "lxml.etree"
|
|
||||||
|
|
||||||
tostring = _etree.tostring
|
|
||||||
|
|
||||||
|
|
||||||
warnings.warn(
|
|
||||||
"defusedxml.lxml is no longer supported and will be removed in a future release.",
|
|
||||||
category=DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class RestrictedElement(_etree.ElementBase):
|
|
||||||
"""A restricted Element class that filters out instances of some classes"""
|
|
||||||
|
|
||||||
__slots__ = ()
|
|
||||||
# blacklist = (etree._Entity, etree._ProcessingInstruction, etree._Comment)
|
|
||||||
blacklist = _etree._Entity
|
|
||||||
|
|
||||||
def _filter(self, iterator):
|
|
||||||
blacklist = self.blacklist
|
|
||||||
for child in iterator:
|
|
||||||
if isinstance(child, blacklist):
|
|
||||||
continue
|
|
||||||
yield child
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
iterator = super(RestrictedElement, self).__iter__()
|
|
||||||
return self._filter(iterator)
|
|
||||||
|
|
||||||
def iterchildren(self, tag=None, reversed=False):
|
|
||||||
iterator = super(RestrictedElement, self).iterchildren(tag=tag, reversed=reversed)
|
|
||||||
return self._filter(iterator)
|
|
||||||
|
|
||||||
def iter(self, tag=None, *tags):
|
|
||||||
iterator = super(RestrictedElement, self).iter(tag=tag, *tags)
|
|
||||||
return self._filter(iterator)
|
|
||||||
|
|
||||||
def iterdescendants(self, tag=None, *tags):
|
|
||||||
iterator = super(RestrictedElement, self).iterdescendants(tag=tag, *tags)
|
|
||||||
return self._filter(iterator)
|
|
||||||
|
|
||||||
def itersiblings(self, tag=None, preceding=False):
|
|
||||||
iterator = super(RestrictedElement, self).itersiblings(tag=tag, preceding=preceding)
|
|
||||||
return self._filter(iterator)
|
|
||||||
|
|
||||||
def getchildren(self):
|
|
||||||
iterator = super(RestrictedElement, self).__iter__()
|
|
||||||
return list(self._filter(iterator))
|
|
||||||
|
|
||||||
def getiterator(self, tag=None):
|
|
||||||
iterator = super(RestrictedElement, self).getiterator(tag)
|
|
||||||
return self._filter(iterator)
|
|
||||||
|
|
||||||
|
|
||||||
class GlobalParserTLS(threading.local):
|
|
||||||
"""Thread local context for custom parser instances"""
|
|
||||||
|
|
||||||
parser_config = {
|
|
||||||
"resolve_entities": False,
|
|
||||||
# 'remove_comments': True,
|
|
||||||
# 'remove_pis': True,
|
|
||||||
}
|
|
||||||
|
|
||||||
element_class = RestrictedElement
|
|
||||||
|
|
||||||
def createDefaultParser(self):
|
|
||||||
parser = _etree.XMLParser(**self.parser_config)
|
|
||||||
element_class = self.element_class
|
|
||||||
if self.element_class is not None:
|
|
||||||
lookup = _etree.ElementDefaultClassLookup(element=element_class)
|
|
||||||
parser.set_element_class_lookup(lookup)
|
|
||||||
return parser
|
|
||||||
|
|
||||||
def setDefaultParser(self, parser):
|
|
||||||
self._default_parser = parser
|
|
||||||
|
|
||||||
def getDefaultParser(self):
|
|
||||||
parser = getattr(self, "_default_parser", None)
|
|
||||||
if parser is None:
|
|
||||||
parser = self.createDefaultParser()
|
|
||||||
self.setDefaultParser(parser)
|
|
||||||
return parser
|
|
||||||
|
|
||||||
|
|
||||||
_parser_tls = GlobalParserTLS()
|
|
||||||
getDefaultParser = _parser_tls.getDefaultParser
|
|
||||||
|
|
||||||
|
|
||||||
def check_docinfo(elementtree, forbid_dtd=False, forbid_entities=True):
|
|
||||||
"""Check docinfo of an element tree for DTD and entity declarations
|
|
||||||
|
|
||||||
The check for entity declarations needs lxml 3 or newer. lxml 2.x does
|
|
||||||
not support dtd.iterentities().
|
|
||||||
"""
|
|
||||||
docinfo = elementtree.docinfo
|
|
||||||
if docinfo.doctype:
|
|
||||||
if forbid_dtd:
|
|
||||||
raise DTDForbidden(docinfo.doctype, docinfo.system_url, docinfo.public_id)
|
|
||||||
if forbid_entities and not LXML3:
|
|
||||||
# lxml < 3 has no iterentities()
|
|
||||||
raise NotSupportedError("Unable to check for entity declarations " "in lxml 2.x")
|
|
||||||
|
|
||||||
if forbid_entities:
|
|
||||||
for dtd in docinfo.internalDTD, docinfo.externalDTD:
|
|
||||||
if dtd is None:
|
|
||||||
continue
|
|
||||||
for entity in dtd.iterentities():
|
|
||||||
raise EntitiesForbidden(entity.name, entity.content, None, None, None, None)
|
|
||||||
|
|
||||||
|
|
||||||
def parse(source, parser=None, base_url=None, forbid_dtd=False, forbid_entities=True):
|
|
||||||
if parser is None:
|
|
||||||
parser = getDefaultParser()
|
|
||||||
elementtree = _etree.parse(source, parser, base_url=base_url)
|
|
||||||
check_docinfo(elementtree, forbid_dtd, forbid_entities)
|
|
||||||
return elementtree
|
|
||||||
|
|
||||||
|
|
||||||
def fromstring(text, parser=None, base_url=None, forbid_dtd=False, forbid_entities=True):
|
|
||||||
if parser is None:
|
|
||||||
parser = getDefaultParser()
|
|
||||||
rootelement = _etree.fromstring(text, parser, base_url=base_url)
|
|
||||||
elementtree = rootelement.getroottree()
|
|
||||||
check_docinfo(elementtree, forbid_dtd, forbid_entities)
|
|
||||||
return rootelement
|
|
||||||
|
|
||||||
|
|
||||||
XML = fromstring
|
|
||||||
|
|
||||||
|
|
||||||
def iterparse(*args, **kwargs):
|
|
||||||
raise NotSupportedError("defused lxml.etree.iterparse not available")
|
|
|
@ -1,63 +0,0 @@
|
||||||
# defusedxml
|
|
||||||
#
|
|
||||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
|
||||||
# Licensed to PSF under a Contributor Agreement.
|
|
||||||
# See https://www.python.org/psf/license for licensing details.
|
|
||||||
"""Defused xml.dom.minidom
|
|
||||||
"""
|
|
||||||
from __future__ import print_function, absolute_import
|
|
||||||
|
|
||||||
from xml.dom.minidom import _do_pulldom_parse
|
|
||||||
from . import expatbuilder as _expatbuilder
|
|
||||||
from . import pulldom as _pulldom
|
|
||||||
|
|
||||||
__origin__ = "xml.dom.minidom"
|
|
||||||
|
|
||||||
|
|
||||||
def parse(
|
|
||||||
file, parser=None, bufsize=None, forbid_dtd=False, forbid_entities=True, forbid_external=True
|
|
||||||
):
|
|
||||||
"""Parse a file into a DOM by filename or file object."""
|
|
||||||
if parser is None and not bufsize:
|
|
||||||
return _expatbuilder.parse(
|
|
||||||
file,
|
|
||||||
forbid_dtd=forbid_dtd,
|
|
||||||
forbid_entities=forbid_entities,
|
|
||||||
forbid_external=forbid_external,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return _do_pulldom_parse(
|
|
||||||
_pulldom.parse,
|
|
||||||
(file,),
|
|
||||||
{
|
|
||||||
"parser": parser,
|
|
||||||
"bufsize": bufsize,
|
|
||||||
"forbid_dtd": forbid_dtd,
|
|
||||||
"forbid_entities": forbid_entities,
|
|
||||||
"forbid_external": forbid_external,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def parseString(
|
|
||||||
string, parser=None, forbid_dtd=False, forbid_entities=True, forbid_external=True
|
|
||||||
):
|
|
||||||
"""Parse a file into a DOM from a string."""
|
|
||||||
if parser is None:
|
|
||||||
return _expatbuilder.parseString(
|
|
||||||
string,
|
|
||||||
forbid_dtd=forbid_dtd,
|
|
||||||
forbid_entities=forbid_entities,
|
|
||||||
forbid_external=forbid_external,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return _do_pulldom_parse(
|
|
||||||
_pulldom.parseString,
|
|
||||||
(string,),
|
|
||||||
{
|
|
||||||
"parser": parser,
|
|
||||||
"forbid_dtd": forbid_dtd,
|
|
||||||
"forbid_entities": forbid_entities,
|
|
||||||
"forbid_external": forbid_external,
|
|
||||||
},
|
|
||||||
)
|
|
|
@ -1,41 +0,0 @@
|
||||||
# defusedxml
|
|
||||||
#
|
|
||||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
|
||||||
# Licensed to PSF under a Contributor Agreement.
|
|
||||||
# See https://www.python.org/psf/license for licensing details.
|
|
||||||
"""Defused xml.dom.pulldom
|
|
||||||
"""
|
|
||||||
from __future__ import print_function, absolute_import
|
|
||||||
|
|
||||||
from xml.dom.pulldom import parse as _parse
|
|
||||||
from xml.dom.pulldom import parseString as _parseString
|
|
||||||
from .sax import make_parser
|
|
||||||
|
|
||||||
__origin__ = "xml.dom.pulldom"
|
|
||||||
|
|
||||||
|
|
||||||
def parse(
|
|
||||||
stream_or_string,
|
|
||||||
parser=None,
|
|
||||||
bufsize=None,
|
|
||||||
forbid_dtd=False,
|
|
||||||
forbid_entities=True,
|
|
||||||
forbid_external=True,
|
|
||||||
):
|
|
||||||
if parser is None:
|
|
||||||
parser = make_parser()
|
|
||||||
parser.forbid_dtd = forbid_dtd
|
|
||||||
parser.forbid_entities = forbid_entities
|
|
||||||
parser.forbid_external = forbid_external
|
|
||||||
return _parse(stream_or_string, parser, bufsize)
|
|
||||||
|
|
||||||
|
|
||||||
def parseString(
|
|
||||||
string, parser=None, forbid_dtd=False, forbid_entities=True, forbid_external=True
|
|
||||||
):
|
|
||||||
if parser is None:
|
|
||||||
parser = make_parser()
|
|
||||||
parser.forbid_dtd = forbid_dtd
|
|
||||||
parser.forbid_entities = forbid_entities
|
|
||||||
parser.forbid_external = forbid_external
|
|
||||||
return _parseString(string, parser)
|
|
|
@ -1,60 +0,0 @@
|
||||||
# defusedxml
|
|
||||||
#
|
|
||||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
|
||||||
# Licensed to PSF under a Contributor Agreement.
|
|
||||||
# See https://www.python.org/psf/license for licensing details.
|
|
||||||
"""Defused xml.sax
|
|
||||||
"""
|
|
||||||
from __future__ import print_function, absolute_import
|
|
||||||
|
|
||||||
from xml.sax import InputSource as _InputSource
|
|
||||||
from xml.sax import ErrorHandler as _ErrorHandler
|
|
||||||
|
|
||||||
from . import expatreader
|
|
||||||
|
|
||||||
__origin__ = "xml.sax"
|
|
||||||
|
|
||||||
|
|
||||||
def parse(
|
|
||||||
source,
|
|
||||||
handler,
|
|
||||||
errorHandler=_ErrorHandler(),
|
|
||||||
forbid_dtd=False,
|
|
||||||
forbid_entities=True,
|
|
||||||
forbid_external=True,
|
|
||||||
):
|
|
||||||
parser = make_parser()
|
|
||||||
parser.setContentHandler(handler)
|
|
||||||
parser.setErrorHandler(errorHandler)
|
|
||||||
parser.forbid_dtd = forbid_dtd
|
|
||||||
parser.forbid_entities = forbid_entities
|
|
||||||
parser.forbid_external = forbid_external
|
|
||||||
parser.parse(source)
|
|
||||||
|
|
||||||
|
|
||||||
def parseString(
|
|
||||||
string,
|
|
||||||
handler,
|
|
||||||
errorHandler=_ErrorHandler(),
|
|
||||||
forbid_dtd=False,
|
|
||||||
forbid_entities=True,
|
|
||||||
forbid_external=True,
|
|
||||||
):
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
if errorHandler is None:
|
|
||||||
errorHandler = _ErrorHandler()
|
|
||||||
parser = make_parser()
|
|
||||||
parser.setContentHandler(handler)
|
|
||||||
parser.setErrorHandler(errorHandler)
|
|
||||||
parser.forbid_dtd = forbid_dtd
|
|
||||||
parser.forbid_entities = forbid_entities
|
|
||||||
parser.forbid_external = forbid_external
|
|
||||||
|
|
||||||
inpsrc = _InputSource()
|
|
||||||
inpsrc.setByteStream(BytesIO(string))
|
|
||||||
parser.parse(inpsrc)
|
|
||||||
|
|
||||||
|
|
||||||
def make_parser(parser_list=[]):
|
|
||||||
return expatreader.create_parser()
|
|
|
@ -1,144 +0,0 @@
|
||||||
# defusedxml
|
|
||||||
#
|
|
||||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
|
||||||
# Licensed to PSF under a Contributor Agreement.
|
|
||||||
# See https://www.python.org/psf/license for licensing details.
|
|
||||||
"""Defused xmlrpclib
|
|
||||||
|
|
||||||
Also defuses gzip bomb
|
|
||||||
"""
|
|
||||||
from __future__ import print_function, absolute_import
|
|
||||||
|
|
||||||
import io
|
|
||||||
|
|
||||||
from .common import DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden
|
|
||||||
|
|
||||||
__origin__ = "xmlrpc.client"
|
|
||||||
from xmlrpc.client import ExpatParser
|
|
||||||
from xmlrpc import client as xmlrpc_client
|
|
||||||
from xmlrpc import server as xmlrpc_server
|
|
||||||
from xmlrpc.client import gzip_decode as _orig_gzip_decode
|
|
||||||
from xmlrpc.client import GzipDecodedResponse as _OrigGzipDecodedResponse
|
|
||||||
|
|
||||||
try:
|
|
||||||
import gzip
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
gzip = None
|
|
||||||
|
|
||||||
|
|
||||||
# Limit maximum request size to prevent resource exhaustion DoS
|
|
||||||
# Also used to limit maximum amount of gzip decoded data in order to prevent
|
|
||||||
# decompression bombs
|
|
||||||
# A value of -1 or smaller disables the limit
|
|
||||||
MAX_DATA = 30 * 1024 * 1024 # 30 MB
|
|
||||||
|
|
||||||
|
|
||||||
def defused_gzip_decode(data, limit=None):
|
|
||||||
"""gzip encoded data -> unencoded data
|
|
||||||
|
|
||||||
Decode data using the gzip content encoding as described in RFC 1952
|
|
||||||
"""
|
|
||||||
if not gzip: # pragma: no cover
|
|
||||||
raise NotImplementedError
|
|
||||||
if limit is None:
|
|
||||||
limit = MAX_DATA
|
|
||||||
f = io.BytesIO(data)
|
|
||||||
gzf = gzip.GzipFile(mode="rb", fileobj=f)
|
|
||||||
try:
|
|
||||||
if limit < 0: # no limit
|
|
||||||
decoded = gzf.read()
|
|
||||||
else:
|
|
||||||
decoded = gzf.read(limit + 1)
|
|
||||||
except IOError: # pragma: no cover
|
|
||||||
raise ValueError("invalid data")
|
|
||||||
f.close()
|
|
||||||
gzf.close()
|
|
||||||
if limit >= 0 and len(decoded) > limit:
|
|
||||||
raise ValueError("max gzipped payload length exceeded")
|
|
||||||
return decoded
|
|
||||||
|
|
||||||
|
|
||||||
class DefusedGzipDecodedResponse(gzip.GzipFile if gzip else object):
|
|
||||||
"""a file-like object to decode a response encoded with the gzip
|
|
||||||
method, as described in RFC 1952.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, response, limit=None):
|
|
||||||
# response doesn't support tell() and read(), required by
|
|
||||||
# GzipFile
|
|
||||||
if not gzip: # pragma: no cover
|
|
||||||
raise NotImplementedError
|
|
||||||
self.limit = limit = limit if limit is not None else MAX_DATA
|
|
||||||
if limit < 0: # no limit
|
|
||||||
data = response.read()
|
|
||||||
self.readlength = None
|
|
||||||
else:
|
|
||||||
data = response.read(limit + 1)
|
|
||||||
self.readlength = 0
|
|
||||||
if limit >= 0 and len(data) > limit:
|
|
||||||
raise ValueError("max payload length exceeded")
|
|
||||||
self.stringio = io.BytesIO(data)
|
|
||||||
super().__init__(mode="rb", fileobj=self.stringio)
|
|
||||||
|
|
||||||
def read(self, n):
|
|
||||||
if self.limit >= 0:
|
|
||||||
left = self.limit - self.readlength
|
|
||||||
n = min(n, left + 1)
|
|
||||||
data = gzip.GzipFile.read(self, n)
|
|
||||||
self.readlength += len(data)
|
|
||||||
if self.readlength > self.limit:
|
|
||||||
raise ValueError("max payload length exceeded")
|
|
||||||
return data
|
|
||||||
else:
|
|
||||||
return super().read(n)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
super().close()
|
|
||||||
self.stringio.close()
|
|
||||||
|
|
||||||
|
|
||||||
class DefusedExpatParser(ExpatParser):
|
|
||||||
def __init__(self, target, forbid_dtd=False, forbid_entities=True, forbid_external=True):
|
|
||||||
super().__init__(target)
|
|
||||||
self.forbid_dtd = forbid_dtd
|
|
||||||
self.forbid_entities = forbid_entities
|
|
||||||
self.forbid_external = forbid_external
|
|
||||||
parser = self._parser
|
|
||||||
if self.forbid_dtd:
|
|
||||||
parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl
|
|
||||||
if self.forbid_entities:
|
|
||||||
parser.EntityDeclHandler = self.defused_entity_decl
|
|
||||||
parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl
|
|
||||||
if self.forbid_external:
|
|
||||||
parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler
|
|
||||||
|
|
||||||
def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
|
|
||||||
raise DTDForbidden(name, sysid, pubid)
|
|
||||||
|
|
||||||
def defused_entity_decl(
|
|
||||||
self, name, is_parameter_entity, value, base, sysid, pubid, notation_name
|
|
||||||
):
|
|
||||||
raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
|
|
||||||
|
|
||||||
def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
|
|
||||||
# expat 1.2
|
|
||||||
raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name) # pragma: no cover
|
|
||||||
|
|
||||||
def defused_external_entity_ref_handler(self, context, base, sysid, pubid):
|
|
||||||
raise ExternalReferenceForbidden(context, base, sysid, pubid)
|
|
||||||
|
|
||||||
|
|
||||||
def monkey_patch():
|
|
||||||
xmlrpc_client.FastParser = DefusedExpatParser
|
|
||||||
xmlrpc_client.GzipDecodedResponse = DefusedGzipDecodedResponse
|
|
||||||
xmlrpc_client.gzip_decode = defused_gzip_decode
|
|
||||||
if xmlrpc_server:
|
|
||||||
xmlrpc_server.gzip_decode = defused_gzip_decode
|
|
||||||
|
|
||||||
|
|
||||||
def unmonkey_patch():
|
|
||||||
xmlrpc_client.FastParser = None
|
|
||||||
xmlrpc_client.GzipDecodedResponse = _OrigGzipDecodedResponse
|
|
||||||
xmlrpc_client.gzip_decode = _orig_gzip_decode
|
|
||||||
if xmlrpc_server:
|
|
||||||
xmlrpc_server.gzip_decode = _orig_gzip_decode
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import requests
|
import requests
|
||||||
import requests.exceptions as exceptions
|
import requests.exceptions as exceptions
|
||||||
|
@ -17,7 +18,7 @@ LOG = getLogger('PLEX.download')
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
|
|
||||||
class DownloadUtils(object):
|
class DownloadUtils():
|
||||||
"""
|
"""
|
||||||
Manages any up/downloads with PKC. Careful to initiate correctly
|
Manages any up/downloads with PKC. Careful to initiate correctly
|
||||||
Use startSession() to initiate.
|
Use startSession() to initiate.
|
||||||
|
@ -263,7 +264,7 @@ class DownloadUtils(object):
|
||||||
# 201: Created
|
# 201: Created
|
||||||
try:
|
try:
|
||||||
# xml response
|
# xml response
|
||||||
r = utils.etree.fromstring(r.content)
|
r = utils.defused_etree.fromstring(r.content)
|
||||||
return r
|
return r
|
||||||
except Exception:
|
except Exception:
|
||||||
r.encoding = 'utf-8'
|
r.encoding = 'utf-8'
|
||||||
|
|
|
@ -4,10 +4,10 @@
|
||||||
Loads of different functions called in SEPARATE Python instances through
|
Loads of different functions called in SEPARATE Python instances through
|
||||||
e.g. plugin://... calls. Hence be careful to only rely on window variables.
|
e.g. plugin://... calls. Hence be careful to only rely on window variables.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import sys
|
import sys
|
||||||
import copy
|
import copy
|
||||||
import xml.etree.ElementTree as etree
|
|
||||||
|
|
||||||
import xbmc
|
import xbmc
|
||||||
import xbmcplugin
|
import xbmcplugin
|
||||||
|
@ -88,10 +88,12 @@ def directory_item(label, path, folder=True):
|
||||||
Adds a xbmcplugin.addDirectoryItem() directory itemlistitem
|
Adds a xbmcplugin.addDirectoryItem() directory itemlistitem
|
||||||
"""
|
"""
|
||||||
listitem = ListItem(label, path=path)
|
listitem = ListItem(label, path=path)
|
||||||
|
listitem.setThumbnailImage(
|
||||||
|
"special://home/addons/plugin.video.plexkodiconnect/icon.png")
|
||||||
listitem.setArt(
|
listitem.setArt(
|
||||||
{'landscape':'special://home/addons/plugin.video.plexkodiconnect/fanart.jpg',
|
{"fanart": "special://home/addons/plugin.video.plexkodiconnect/fanart.jpg"})
|
||||||
'fanart': 'special://home/addons/plugin.video.plexkodiconnect/fanart.jpg',
|
listitem.setArt(
|
||||||
'thumb': 'special://home/addons/plugin.video.plexkodiconnect/icon.png'})
|
{"landscape":"special://home/addons/plugin.video.plexkodiconnect/fanart.jpg"})
|
||||||
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),
|
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),
|
||||||
url=path,
|
url=path,
|
||||||
listitem=listitem,
|
listitem=listitem,
|
||||||
|
@ -247,10 +249,12 @@ def show_listing(xml, plex_type=None, section_id=None, synched=True, key=None):
|
||||||
widgets.KEY = key
|
widgets.KEY = key
|
||||||
# Process all items to show
|
# Process all items to show
|
||||||
all_items = mass_api(xml)
|
all_items = mass_api(xml)
|
||||||
all_items = [widgets.generate_item(api) for api in all_items]
|
all_items = utils.process_method_on_list(widgets.generate_item, all_items)
|
||||||
all_items = [widgets.prepare_listitem(item) for item in all_items]
|
all_items = utils.process_method_on_list(widgets.prepare_listitem,
|
||||||
|
all_items)
|
||||||
# fill that listing...
|
# fill that listing...
|
||||||
all_items = [widgets.create_listitem(item) for item in all_items]
|
all_items = utils.process_method_on_list(widgets.create_listitem,
|
||||||
|
all_items)
|
||||||
xbmcplugin.addDirectoryItems(int(sys.argv[1]), all_items, len(all_items))
|
xbmcplugin.addDirectoryItems(int(sys.argv[1]), all_items, len(all_items))
|
||||||
# end directory listing
|
# end directory listing
|
||||||
xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_UNSORTED)
|
xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_UNSORTED)
|
||||||
|
@ -284,7 +288,7 @@ def get_video_files(plex_id, params):
|
||||||
app.init(entrypoint=True)
|
app.init(entrypoint=True)
|
||||||
item = PF.GetPlexMetadata(plex_id)
|
item = PF.GetPlexMetadata(plex_id)
|
||||||
try:
|
try:
|
||||||
path = item[0][0][0].attrib['file']
|
path = utils.try_decode(item[0][0][0].attrib['file'])
|
||||||
except (TypeError, IndexError, AttributeError, KeyError):
|
except (TypeError, IndexError, AttributeError, KeyError):
|
||||||
LOG.error('Could not get file path for item %s', plex_id)
|
LOG.error('Could not get file path for item %s', plex_id)
|
||||||
return xbmcplugin.endOfDirectory(int(sys.argv[1]))
|
return xbmcplugin.endOfDirectory(int(sys.argv[1]))
|
||||||
|
@ -300,14 +304,15 @@ def get_video_files(plex_id, params):
|
||||||
if path_ops.exists(path):
|
if path_ops.exists(path):
|
||||||
for root, dirs, files in path_ops.walk(path):
|
for root, dirs, files in path_ops.walk(path):
|
||||||
for directory in dirs:
|
for directory in dirs:
|
||||||
item_path = path_ops.path.join(root, directory)
|
item_path = utils.try_encode(path_ops.path.join(root,
|
||||||
|
directory))
|
||||||
listitem = ListItem(item_path, path=item_path)
|
listitem = ListItem(item_path, path=item_path)
|
||||||
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),
|
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),
|
||||||
url=item_path,
|
url=item_path,
|
||||||
listitem=listitem,
|
listitem=listitem,
|
||||||
isFolder=True)
|
isFolder=True)
|
||||||
for file in files:
|
for file in files:
|
||||||
item_path = path_ops.path.join(root, file)
|
item_path = utils.try_encode(path_ops.path.join(root, file))
|
||||||
listitem = ListItem(item_path, path=item_path)
|
listitem = ListItem(item_path, path=item_path)
|
||||||
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),
|
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),
|
||||||
url=file,
|
url=file,
|
||||||
|
@ -352,20 +357,23 @@ def extra_fanart(plex_id, plex_path):
|
||||||
backdrops = api.artwork()['Backdrop']
|
backdrops = api.artwork()['Backdrop']
|
||||||
for count, backdrop in enumerate(backdrops):
|
for count, backdrop in enumerate(backdrops):
|
||||||
# Same ordering as in artwork
|
# Same ordering as in artwork
|
||||||
art_file = path_ops.path.join(fanart_dir, "fanart%.3d.jpg" % count)
|
art_file = utils.try_encode(path_ops.path.join(
|
||||||
|
fanart_dir, "fanart%.3d.jpg" % count))
|
||||||
listitem = ListItem("%.3d" % count, path=art_file)
|
listitem = ListItem("%.3d" % count, path=art_file)
|
||||||
xbmcplugin.addDirectoryItem(
|
xbmcplugin.addDirectoryItem(
|
||||||
handle=int(sys.argv[1]),
|
handle=int(sys.argv[1]),
|
||||||
url=art_file,
|
url=art_file,
|
||||||
listitem=listitem)
|
listitem=listitem)
|
||||||
path_ops.copyfile(backdrop, art_file)
|
path_ops.copyfile(backdrop, utils.try_decode(art_file))
|
||||||
else:
|
else:
|
||||||
LOG.info("Found cached backdrop.")
|
LOG.info("Found cached backdrop.")
|
||||||
# Use existing cached images
|
# Use existing cached images
|
||||||
fanart_dir = fanart_dir
|
fanart_dir = utils.try_decode(fanart_dir)
|
||||||
for root, _, files in path_ops.walk(fanart_dir):
|
for root, _, files in path_ops.walk(fanart_dir):
|
||||||
|
root = utils.decode_path(root)
|
||||||
for file in files:
|
for file in files:
|
||||||
art_file = path_ops.path.join(root, file)
|
file = utils.decode_path(file)
|
||||||
|
art_file = utils.try_encode(path_ops.path.join(root, file))
|
||||||
listitem = ListItem(file, path=art_file)
|
listitem = ListItem(file, path=art_file)
|
||||||
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),
|
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),
|
||||||
url=art_file,
|
url=art_file,
|
||||||
|
@ -497,7 +505,7 @@ def browse_plex(key=None, plex_type=None, section_id=None, synched=True,
|
||||||
if prompt is None:
|
if prompt is None:
|
||||||
# User cancelled
|
# User cancelled
|
||||||
return
|
return
|
||||||
prompt = prompt.strip()
|
prompt = prompt.strip().decode('utf-8')
|
||||||
args['query'] = prompt
|
args['query'] = prompt
|
||||||
xml = DU().downloadUrl(utils.extend_url('{server}%s' % key, args))
|
xml = DU().downloadUrl(utils.extend_url('{server}%s' % key, args))
|
||||||
try:
|
try:
|
||||||
|
@ -508,7 +516,7 @@ def browse_plex(key=None, plex_type=None, section_id=None, synched=True,
|
||||||
return
|
return
|
||||||
if xml[0].tag == 'Hub':
|
if xml[0].tag == 'Hub':
|
||||||
# E.g. when hitting the endpoint '/hubs/search'
|
# E.g. when hitting the endpoint '/hubs/search'
|
||||||
answ = etree.Element(xml.tag, attrib=xml.attrib)
|
answ = utils.etree.Element(xml.tag, attrib=xml.attrib)
|
||||||
for hub in xml:
|
for hub in xml:
|
||||||
if not utils.cast(int, hub.get('size')):
|
if not utils.cast(int, hub.get('size')):
|
||||||
# Empty category
|
# Empty category
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
|
|
||||||
|
|
||||||
class PlaylistError(Exception):
|
class PlaylistError(Exception):
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
|
|
||||||
from xbmc import executebuiltin
|
from xbmc import executebuiltin
|
||||||
|
|
||||||
from . import utils
|
from . import utils
|
||||||
|
from .utils import etree
|
||||||
from . import path_ops
|
from . import path_ops
|
||||||
from . import migration
|
from . import migration
|
||||||
from .downloadutils import DownloadUtils as DU, exceptions
|
from .downloadutils import DownloadUtils as DU, exceptions
|
||||||
|
@ -13,7 +15,6 @@ from . import plex_tv
|
||||||
from . import json_rpc as js
|
from . import json_rpc as js
|
||||||
from . import app
|
from . import app
|
||||||
from . import variables as v
|
from . import variables as v
|
||||||
from . import sources
|
|
||||||
|
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
|
@ -211,7 +212,8 @@ class InitialSetup(object):
|
||||||
not set before
|
not set before
|
||||||
"""
|
"""
|
||||||
answer = True
|
answer = True
|
||||||
chk = PF.check_connection(app.CONN.server, verifySSL=True)
|
chk = PF.check_connection(app.CONN.server,
|
||||||
|
verifySSL=True if v.KODIVERSION >= 18 else False)
|
||||||
if chk is False:
|
if chk is False:
|
||||||
LOG.warn('Could not reach PMS %s', app.CONN.server)
|
LOG.warn('Could not reach PMS %s', app.CONN.server)
|
||||||
answer = False
|
answer = False
|
||||||
|
@ -239,13 +241,18 @@ class InitialSetup(object):
|
||||||
"""
|
"""
|
||||||
Checks for server's connectivity. Returns check_connection result
|
Checks for server's connectivity. Returns check_connection result
|
||||||
"""
|
"""
|
||||||
|
if server['local']:
|
||||||
|
# Deactive SSL verification if the server is local for Kodi 17
|
||||||
|
verifySSL = True if v.KODIVERSION >= 18 else False
|
||||||
|
else:
|
||||||
|
verifySSL = True
|
||||||
if not server['token']:
|
if not server['token']:
|
||||||
# Plex GDM: we only get the token from plex.tv after
|
# Plex GDM: we only get the token from plex.tv after
|
||||||
# Sign-in to plex.tv
|
# Sign-in to plex.tv
|
||||||
server['token'] = utils.settings('plexToken') or None
|
server['token'] = utils.settings('plexToken') or None
|
||||||
return PF.check_connection(server['baseURL'],
|
return PF.check_connection(server['baseURL'],
|
||||||
token=server['token'],
|
token=server['token'],
|
||||||
verifySSL=True)
|
verifySSL=verifySSL)
|
||||||
|
|
||||||
def pick_pms(self, showDialog=False, inform_of_search=False):
|
def pick_pms(self, showDialog=False, inform_of_search=False):
|
||||||
"""
|
"""
|
||||||
|
@ -403,7 +410,7 @@ class InitialSetup(object):
|
||||||
utils.messageDialog(
|
utils.messageDialog(
|
||||||
utils.lang(29999),
|
utils.lang(29999),
|
||||||
'%s %s\n%s' % (utils.lang(39013),
|
'%s %s\n%s' % (utils.lang(39013),
|
||||||
server['name'],
|
server['name'].decode('utf-8'),
|
||||||
utils.lang(39014)))
|
utils.lang(39014)))
|
||||||
if self.plex_tv_sign_in() is False:
|
if self.plex_tv_sign_in() is False:
|
||||||
# Exit while loop if user cancels
|
# Exit while loop if user cancels
|
||||||
|
@ -454,6 +461,31 @@ class InitialSetup(object):
|
||||||
server['machineIdentifier'], server['ip'], server['port'],
|
server['machineIdentifier'], server['ip'], server['port'],
|
||||||
server['scheme'])
|
server['scheme'])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _add_sources(root, extension):
|
||||||
|
changed = False
|
||||||
|
count = 2
|
||||||
|
for source in root.findall('.//path'):
|
||||||
|
if source.text == extension:
|
||||||
|
count -= 1
|
||||||
|
if count == 0:
|
||||||
|
# sources already set
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# Missing smb:// occurences, re-add.
|
||||||
|
changed = True
|
||||||
|
for _ in range(0, count):
|
||||||
|
source = etree.SubElement(root, 'source')
|
||||||
|
etree.SubElement(
|
||||||
|
source,
|
||||||
|
'name').text = "PlexKodiConnect Masterlock Hack"
|
||||||
|
etree.SubElement(
|
||||||
|
source,
|
||||||
|
'path',
|
||||||
|
{'pathversion': "1"}).text = extension
|
||||||
|
etree.SubElement(source, 'allowsharing').text = "true"
|
||||||
|
return changed
|
||||||
|
|
||||||
def setup(self):
|
def setup(self):
|
||||||
"""
|
"""
|
||||||
Initial setup. Run once upon startup.
|
Initial setup. Run once upon startup.
|
||||||
|
@ -495,7 +527,20 @@ class InitialSetup(object):
|
||||||
LOG.info('Current Kodi video memory cache in bytes: %s', cache)
|
LOG.info('Current Kodi video memory cache in bytes: %s', cache)
|
||||||
utils.settings('kodi_video_cache', value=cache)
|
utils.settings('kodi_video_cache', value=cache)
|
||||||
|
|
||||||
reboot = sources.pkc_sources_hack() or reboot
|
# Hack to make PKC Kodi master lock compatible
|
||||||
|
try:
|
||||||
|
with utils.XmlKodiSetting('sources.xml',
|
||||||
|
force_create=True,
|
||||||
|
top_element='sources') as xml:
|
||||||
|
changed = False
|
||||||
|
for extension in ('smb://', 'nfs://'):
|
||||||
|
root = xml.set_setting(['video'])
|
||||||
|
changed = self._add_sources(root, extension) or changed
|
||||||
|
if changed:
|
||||||
|
xml.write_xml = True
|
||||||
|
reboot = True
|
||||||
|
except utils.ParseError:
|
||||||
|
pass
|
||||||
|
|
||||||
# Do we need to migrate stuff?
|
# Do we need to migrate stuff?
|
||||||
migration.check_migration()
|
migration.check_migration()
|
||||||
|
@ -504,7 +549,9 @@ class InitialSetup(object):
|
||||||
|
|
||||||
# Display a warning if Kodi puts ALL movies into the queue, basically
|
# Display a warning if Kodi puts ALL movies into the queue, basically
|
||||||
# breaking playback reporting for PKC
|
# breaking playback reporting for PKC
|
||||||
|
warn = False
|
||||||
settings = js.settings_getsettingvalue('videoplayer.autoplaynextitem')
|
settings = js.settings_getsettingvalue('videoplayer.autoplaynextitem')
|
||||||
|
if v.KODIVERSION >= 18:
|
||||||
# Answer for videoplayer.autoplaynextitem:
|
# Answer for videoplayer.autoplaynextitem:
|
||||||
# [{u'label': u'Music videos', u'value': 0},
|
# [{u'label': u'Music videos', u'value': 0},
|
||||||
# {u'label': u'TV shows', u'value': 1},
|
# {u'label': u'TV shows', u'value': 1},
|
||||||
|
@ -512,6 +559,12 @@ class InitialSetup(object):
|
||||||
# {u'label': u'Movies', u'value': 3},
|
# {u'label': u'Movies', u'value': 3},
|
||||||
# {u'label': u'Uncategorized', u'value': 4}]
|
# {u'label': u'Uncategorized', u'value': 4}]
|
||||||
if 1 in settings or 2 in settings or 3 in settings:
|
if 1 in settings or 2 in settings or 3 in settings:
|
||||||
|
warn = True
|
||||||
|
else:
|
||||||
|
# Kodi Krypton: answer is boolean
|
||||||
|
if settings:
|
||||||
|
warn = True
|
||||||
|
if warn:
|
||||||
LOG.warn('Kodi setting videoplayer.autoplaynextitem is: %s',
|
LOG.warn('Kodi setting videoplayer.autoplaynextitem is: %s',
|
||||||
settings)
|
settings)
|
||||||
if utils.settings('warned_setting_videoplayer.autoplaynextitem') == 'false':
|
if utils.settings('warned_setting_videoplayer.autoplaynextitem') == 'false':
|
||||||
|
@ -521,6 +574,7 @@ class InitialSetup(object):
|
||||||
# Warning: Kodi setting "Play next video automatically" is
|
# Warning: Kodi setting "Play next video automatically" is
|
||||||
# enabled. This could break PKC. Deactivate?
|
# enabled. This could break PKC. Deactivate?
|
||||||
if utils.yesno_dialog(utils.lang(29999), utils.lang(30003)):
|
if utils.yesno_dialog(utils.lang(29999), utils.lang(30003)):
|
||||||
|
if v.KODIVERSION >= 18:
|
||||||
for i in (1, 2, 3):
|
for i in (1, 2, 3):
|
||||||
try:
|
try:
|
||||||
settings.remove(i)
|
settings.remove(i)
|
||||||
|
@ -528,6 +582,9 @@ class InitialSetup(object):
|
||||||
pass
|
pass
|
||||||
js.settings_setsettingvalue('videoplayer.autoplaynextitem',
|
js.settings_setsettingvalue('videoplayer.autoplaynextitem',
|
||||||
settings)
|
settings)
|
||||||
|
else:
|
||||||
|
js.settings_setsettingvalue('videoplayer.autoplaynextitem',
|
||||||
|
False)
|
||||||
# Set any video library updates to happen in the background in order to
|
# Set any video library updates to happen in the background in order to
|
||||||
# hide "Compressing database"
|
# hide "Compressing database"
|
||||||
js.settings_setsettingvalue('videolibrary.backgroundupdate', True)
|
js.settings_setsettingvalue('videolibrary.backgroundupdate', True)
|
||||||
|
@ -575,11 +632,7 @@ class InitialSetup(object):
|
||||||
app.ACCOUNT.load()
|
app.ACCOUNT.load()
|
||||||
app.SYNC.load()
|
app.SYNC.load()
|
||||||
return
|
return
|
||||||
|
|
||||||
LOG.info('Showing install questions')
|
LOG.info('Showing install questions')
|
||||||
if not utils.default_kodi_skin_warning_message():
|
|
||||||
LOG.info('Aborting initial setup due to skin')
|
|
||||||
return
|
|
||||||
# Additional settings where the user needs to choose
|
# Additional settings where the user needs to choose
|
||||||
# Direct paths (\\NAS\mymovie.mkv) or addon (http)?
|
# Direct paths (\\NAS\mymovie.mkv) or addon (http)?
|
||||||
goto_settings = False
|
goto_settings = False
|
||||||
|
@ -634,10 +687,10 @@ class InitialSetup(object):
|
||||||
|
|
||||||
# If you use several Plex libraries of one kind, e.g. "Kids Movies" and
|
# If you use several Plex libraries of one kind, e.g. "Kids Movies" and
|
||||||
# "Parents Movies", be sure to check https://goo.gl/JFtQV9
|
# "Parents Movies", be sure to check https://goo.gl/JFtQV9
|
||||||
# dialog.ok(heading=utils.lang(29999), message=utils.lang(39076))
|
# dialog.ok(heading=utils.lang(29999), line1=utils.lang(39076))
|
||||||
|
|
||||||
# Need to tell about our image source for collections: themoviedb.org
|
# Need to tell about our image source for collections: themoviedb.org
|
||||||
# dialog.ok(heading=utils.lang(29999), message=utils.lang(39717))
|
# dialog.ok(heading=utils.lang(29999), line1=utils.lang(39717))
|
||||||
# Make sure that we only ask these questions upon first installation
|
# Make sure that we only ask these questions upon first installation
|
||||||
utils.settings('InstallQuestionsAnswered', value='true')
|
utils.settings('InstallQuestionsAnswered', value='true')
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from .movies import Movie
|
from .movies import Movie
|
||||||
from .tvshows import Show, Season, Episode
|
from .tvshows import Show, Season, Episode
|
||||||
from .music import Artist, Album, Song
|
from .music import Artist, Album, Song
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from ntpath import dirname
|
from ntpath import dirname
|
||||||
|
|
||||||
|
@ -161,7 +162,7 @@ class ItemBase(object):
|
||||||
Returns a dict of the Kodi ids: {<provider>: <kodi_unique_id>}
|
Returns a dict of the Kodi ids: {<provider>: <kodi_unique_id>}
|
||||||
"""
|
"""
|
||||||
kodi_unique_ids = api.guids.copy()
|
kodi_unique_ids = api.guids.copy()
|
||||||
for provider, provider_id in api.guids.items():
|
for provider, provider_id in api.guids.iteritems():
|
||||||
kodi_unique_ids[provider] = self.kodidb.add_uniqueid(
|
kodi_unique_ids[provider] = self.kodidb.add_uniqueid(
|
||||||
kodi_id,
|
kodi_id,
|
||||||
api.kodi_type,
|
api.kodi_type,
|
||||||
|
|
|
@ -1,26 +1,14 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import re
|
|
||||||
import os
|
|
||||||
import string
|
|
||||||
|
|
||||||
from .common import ItemBase
|
from .common import ItemBase
|
||||||
from ..plex_api import API
|
from ..plex_api import API
|
||||||
from .. import app, variables as v, plex_functions as PF
|
from .. import app, variables as v, plex_functions as PF
|
||||||
from ..path_ops import append_os_sep
|
|
||||||
|
|
||||||
LOG = getLogger('PLEX.movies')
|
LOG = getLogger('PLEX.movies')
|
||||||
|
|
||||||
# Tolerance in years if comparing videos as equal
|
|
||||||
VIDEOYEAR_TOLERANCE = 1
|
|
||||||
PUNCTUATION_TRANSLATION = {ord(char): None for char in string.punctuation}
|
|
||||||
# Punctuation removed in original strings!!
|
|
||||||
# Matches '2010 The Year We Make Contact 1984'
|
|
||||||
# from '2010 The Year We Make Contact 1984 720p webrip'
|
|
||||||
REGEX_MOVIENAME_AND_YEAR = re.compile(
|
|
||||||
r'''(.+)((?:19|20)\d{2}).*(?!((19|20)\d{2}))''')
|
|
||||||
|
|
||||||
|
|
||||||
class Movie(ItemBase):
|
class Movie(ItemBase):
|
||||||
"""
|
"""
|
||||||
|
@ -50,39 +38,11 @@ class Movie(ItemBase):
|
||||||
|
|
||||||
fullpath, path, filename = api.fullpath()
|
fullpath, path, filename = api.fullpath()
|
||||||
if app.SYNC.direct_paths and not fullpath.startswith('http'):
|
if app.SYNC.direct_paths and not fullpath.startswith('http'):
|
||||||
if api.subtype:
|
kodi_pathid = self.kodidb.add_path(path,
|
||||||
# E.g. homevideos, which have "subtype" flag set
|
|
||||||
# Homevideo directories need to be flat by Plex' instructions
|
|
||||||
library_path, video_path = path, path
|
|
||||||
else:
|
|
||||||
# Normal movie libraries
|
|
||||||
library_path, video_path, filename = split_movie_path(fullpath)
|
|
||||||
if library_path == video_path:
|
|
||||||
# "Flat" folder structure where e.g. movies lie all in 1 dir
|
|
||||||
# E.g.
|
|
||||||
# 'C:\\Movies\\Pulp Fiction (1994).mkv'
|
|
||||||
kodi_pathid = self.kodidb.add_path(library_path,
|
|
||||||
content='movies',
|
content='movies',
|
||||||
scraper='metadata.local')
|
scraper='metadata.local')
|
||||||
path = library_path
|
|
||||||
kodi_parent_pathid = kodi_pathid
|
|
||||||
else:
|
|
||||||
# Plex library contains folders named identical to the
|
|
||||||
# video file, e.g.
|
|
||||||
# 'C:\\Movies\\Pulp Fiction (1994)\\Pulp Fiction (1994).mkv'
|
|
||||||
# Add the "parent" path for the Plex library
|
|
||||||
kodi_parent_pathid = self.kodidb.add_path(
|
|
||||||
library_path,
|
|
||||||
content='movies',
|
|
||||||
scraper='metadata.local')
|
|
||||||
# Add this movie's path
|
|
||||||
kodi_pathid = self.kodidb.add_path(
|
|
||||||
video_path,
|
|
||||||
id_parent_path=kodi_parent_pathid)
|
|
||||||
path = video_path
|
|
||||||
else:
|
else:
|
||||||
kodi_pathid = self.kodidb.get_path(path)
|
kodi_pathid = self.kodidb.get_path(path)
|
||||||
kodi_parent_pathid = kodi_pathid
|
|
||||||
|
|
||||||
if update_item:
|
if update_item:
|
||||||
LOG.info('UPDATE movie plex_id: %s - %s', plex_id, api.title())
|
LOG.info('UPDATE movie plex_id: %s - %s', plex_id, api.title())
|
||||||
|
@ -146,8 +106,8 @@ class Movie(ItemBase):
|
||||||
api.list_to_string(api.studios()),
|
api.list_to_string(api.studios()),
|
||||||
api.trailer(),
|
api.trailer(),
|
||||||
api.list_to_string(api.countries()),
|
api.list_to_string(api.countries()),
|
||||||
path,
|
fullpath,
|
||||||
kodi_parent_pathid,
|
kodi_pathid,
|
||||||
api.premiere_date(),
|
api.premiere_date(),
|
||||||
api.userrating())
|
api.userrating())
|
||||||
|
|
||||||
|
@ -173,7 +133,6 @@ class Movie(ItemBase):
|
||||||
kodi_id=kodi_id,
|
kodi_id=kodi_id,
|
||||||
kodi_fileid=file_id,
|
kodi_fileid=file_id,
|
||||||
kodi_pathid=kodi_pathid,
|
kodi_pathid=kodi_pathid,
|
||||||
trailer_synced=bool(api.trailer()),
|
|
||||||
last_sync=self.last_sync)
|
last_sync=self.last_sync)
|
||||||
|
|
||||||
def remove(self, plex_id, plex_type=None):
|
def remove(self, plex_id, plex_type=None):
|
||||||
|
@ -284,73 +243,3 @@ class Movie(ItemBase):
|
||||||
return unique_ids.get('imdb',
|
return unique_ids.get('imdb',
|
||||||
unique_ids.get('tmdb',
|
unique_ids.get('tmdb',
|
||||||
unique_ids.get('tvdb')))
|
unique_ids.get('tvdb')))
|
||||||
|
|
||||||
|
|
||||||
def split_movie_path(path):
|
|
||||||
"""
|
|
||||||
Implements Plex' video naming convention for movies:
|
|
||||||
https://support.plex.tv/articles/naming-and-organizing-your-movie-media-files/
|
|
||||||
|
|
||||||
Splits a video's path into its librarypath, potential video folder, and
|
|
||||||
filename.
|
|
||||||
E.g. path = 'C:\\Movies\\Pulp Fiction (1994)\\Pulp Fiction (1994).mkv'
|
|
||||||
returns the tuple
|
|
||||||
('C:\\Movies\\',
|
|
||||||
'C:\\Movies\\Pulp Fiction (1994)\\',
|
|
||||||
'Pulp Fiction (1994).mkv')
|
|
||||||
|
|
||||||
E.g. path = 'C:\\Movies\\Pulp Fiction (1994).mkv'
|
|
||||||
returns the tuple
|
|
||||||
('C:\\Movies\\',
|
|
||||||
'C:\\Movies\\',
|
|
||||||
'Pulp Fiction (1994).mkv')
|
|
||||||
"""
|
|
||||||
basename, filename = os.path.split(path)
|
|
||||||
library_path, videofolder = os.path.split(basename)
|
|
||||||
|
|
||||||
clean_filename = _clean_name(os.path.splitext(filename)[0])
|
|
||||||
clean_videofolder = _clean_name(videofolder)
|
|
||||||
|
|
||||||
try:
|
|
||||||
parsed_filename = _parse_videoname_and_year(clean_filename)
|
|
||||||
except (TypeError, IndexError):
|
|
||||||
LOG.warn('Could not parse video path, be sure to follow the Plex '
|
|
||||||
'naming guidelines!! We failed to parse this path: %s', path)
|
|
||||||
# Be on the safe side and assume that the movie folder structure is
|
|
||||||
# flat
|
|
||||||
return append_os_sep(basename), append_os_sep(basename), filename
|
|
||||||
try:
|
|
||||||
parsed_videofolder = _parse_videoname_and_year(clean_videofolder)
|
|
||||||
except (TypeError, IndexError):
|
|
||||||
# e.g. no year to parse => flat structure
|
|
||||||
return append_os_sep(basename), append_os_sep(basename), filename
|
|
||||||
if _parsed_names_alike(parsed_filename, parsed_videofolder):
|
|
||||||
# e.g.
|
|
||||||
# filename = The Master.(2012).720p.Blu-ray.axed.mkv
|
|
||||||
# videofolder = The Master 2012
|
|
||||||
# or
|
|
||||||
# filename = National Lampoon's Christmas Vacation (1989)
|
|
||||||
# [x264-Bluray-1080p DTS-2.0]
|
|
||||||
# videofolder = Christmas Vacation 1989
|
|
||||||
return append_os_sep(library_path), append_os_sep(basename), filename
|
|
||||||
else:
|
|
||||||
# Flat movie file-stuctrue, all movies in one big directory
|
|
||||||
return append_os_sep(basename), append_os_sep(basename), filename
|
|
||||||
|
|
||||||
|
|
||||||
def _parsed_names_alike(name1, name2):
|
|
||||||
return (abs(name2[1] - name1[1]) <= VIDEOYEAR_TOLERANCE and
|
|
||||||
(name1[0] in name2[0] or name2[0] in name1[0]))
|
|
||||||
|
|
||||||
|
|
||||||
def _clean_name(name):
|
|
||||||
"""
|
|
||||||
Returns name with all whitespaces (regex "\\s") and punctuation
|
|
||||||
(string.punctuation) characters removed; all characters in lowercase
|
|
||||||
"""
|
|
||||||
return re.sub('\\s', '', name).translate(PUNCTUATION_TRANSLATION).lower()
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_videoname_and_year(name):
|
|
||||||
parsed = REGEX_MOVIENAME_AND_YEAR.search(name)
|
|
||||||
return parsed[1], int(parsed[2])
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
|
|
||||||
from .common import ItemBase
|
from .common import ItemBase
|
||||||
|
@ -126,12 +127,16 @@ class MusicMixin(object):
|
||||||
# Check whether we have orphaned path entries
|
# Check whether we have orphaned path entries
|
||||||
if not self.kodidb.path_id_from_song(kodi_id):
|
if not self.kodidb.path_id_from_song(kodi_id):
|
||||||
self.kodidb.remove_path(path_id)
|
self.kodidb.remove_path(path_id)
|
||||||
|
if v.KODIVERSION < 18:
|
||||||
|
self.kodidb.remove_albuminfosong(kodi_id)
|
||||||
self.kodidb.delete_artwork(kodi_id, v.KODI_TYPE_SONG)
|
self.kodidb.delete_artwork(kodi_id, v.KODI_TYPE_SONG)
|
||||||
|
|
||||||
def remove_album(self, kodi_id):
|
def remove_album(self, kodi_id):
|
||||||
'''
|
'''
|
||||||
Remove an album
|
Remove an album
|
||||||
'''
|
'''
|
||||||
|
if v.KODIVERSION < 18:
|
||||||
|
self.kodidb.delete_album_from_album_genre(kodi_id)
|
||||||
self.kodidb.remove_album(kodi_id)
|
self.kodidb.remove_album(kodi_id)
|
||||||
self.kodidb.delete_artwork(kodi_id, v.KODI_TYPE_ALBUM)
|
self.kodidb.delete_artwork(kodi_id, v.KODI_TYPE_ALBUM)
|
||||||
|
|
||||||
|
@ -171,6 +176,16 @@ class Artist(MusicMixin, ItemBase):
|
||||||
|
|
||||||
if app.SYNC.artwork:
|
if app.SYNC.artwork:
|
||||||
artworks = api.artwork()
|
artworks = api.artwork()
|
||||||
|
if 'poster' in artworks:
|
||||||
|
thumb = "<thumb>%s</thumb>" % artworks['poster']
|
||||||
|
else:
|
||||||
|
thumb = None
|
||||||
|
if 'fanart' in artworks:
|
||||||
|
fanart = "<fanart>%s</fanart>" % artworks['fanart']
|
||||||
|
else:
|
||||||
|
fanart = None
|
||||||
|
else:
|
||||||
|
thumb, fanart = None, None
|
||||||
|
|
||||||
# UPDATE THE ARTIST #####
|
# UPDATE THE ARTIST #####
|
||||||
if update_item:
|
if update_item:
|
||||||
|
@ -185,6 +200,8 @@ class Artist(MusicMixin, ItemBase):
|
||||||
kodi_id = self.kodidb.add_artist(api.title(), musicBrainzId)
|
kodi_id = self.kodidb.add_artist(api.title(), musicBrainzId)
|
||||||
self.kodidb.update_artist(api.list_to_string(api.genres()),
|
self.kodidb.update_artist(api.list_to_string(api.genres()),
|
||||||
api.plot(),
|
api.plot(),
|
||||||
|
thumb,
|
||||||
|
fanart,
|
||||||
timing.unix_date_to_kodi(self.last_sync),
|
timing.unix_date_to_kodi(self.last_sync),
|
||||||
kodi_id)
|
kodi_id)
|
||||||
if app.SYNC.artwork:
|
if app.SYNC.artwork:
|
||||||
|
@ -264,46 +281,76 @@ class Album(MusicMixin, ItemBase):
|
||||||
genre = api.list_to_string(api.genres())
|
genre = api.list_to_string(api.genres())
|
||||||
if app.SYNC.artwork:
|
if app.SYNC.artwork:
|
||||||
artworks = api.artwork()
|
artworks = api.artwork()
|
||||||
|
if 'poster' in artworks:
|
||||||
|
thumb = "<thumb>%s</thumb>" % artworks['poster']
|
||||||
|
else:
|
||||||
|
thumb = None
|
||||||
|
else:
|
||||||
|
thumb = None
|
||||||
|
|
||||||
# UPDATE THE ALBUM #####
|
# UPDATE THE ALBUM #####
|
||||||
if update_item:
|
if update_item:
|
||||||
LOG.info("UPDATE album plex_id: %s - Name: %s", plex_id, name)
|
LOG.info("UPDATE album plex_id: %s - Name: %s", plex_id, name)
|
||||||
|
if v.KODIVERSION >= 18:
|
||||||
self.kodidb.update_album(name,
|
self.kodidb.update_album(name,
|
||||||
musicBrainzId,
|
musicBrainzId,
|
||||||
api.artist_name(),
|
api.artist_name(),
|
||||||
genre,
|
genre,
|
||||||
api.premiere_date(),
|
api.year(),
|
||||||
# TODO: as soon as Plex supports the original
|
|
||||||
# release date (Kodi: strOrigReleaseDate)
|
|
||||||
api.premiere_date(),
|
|
||||||
compilation,
|
compilation,
|
||||||
api.plot(),
|
api.plot(),
|
||||||
|
thumb,
|
||||||
api.list_to_string(api.studios()),
|
api.list_to_string(api.studios()),
|
||||||
api.kodi_type,
|
|
||||||
api.userrating(),
|
api.userrating(),
|
||||||
timing.unix_date_to_kodi(self.last_sync),
|
timing.unix_date_to_kodi(self.last_sync),
|
||||||
api.kodi_type,
|
'album',
|
||||||
|
kodi_id)
|
||||||
|
else:
|
||||||
|
self.kodidb.update_album_17(name,
|
||||||
|
musicBrainzId,
|
||||||
|
api.artist_name(),
|
||||||
|
genre,
|
||||||
|
api.year(),
|
||||||
|
compilation,
|
||||||
|
api.plot(),
|
||||||
|
thumb,
|
||||||
|
api.list_to_string(api.studios()),
|
||||||
|
api.userrating(),
|
||||||
|
timing.unix_date_to_kodi(self.last_sync),
|
||||||
|
'album',
|
||||||
kodi_id)
|
kodi_id)
|
||||||
# OR ADD THE ALBUM #####
|
# OR ADD THE ALBUM #####
|
||||||
else:
|
else:
|
||||||
LOG.info("ADD album plex_id: %s - Name: %s", plex_id, name)
|
LOG.info("ADD album plex_id: %s - Name: %s", plex_id, name)
|
||||||
kodi_id = self.kodidb.new_album_id()
|
kodi_id = self.kodidb.new_album_id()
|
||||||
|
if v.KODIVERSION >= 18:
|
||||||
self.kodidb.add_album(kodi_id,
|
self.kodidb.add_album(kodi_id,
|
||||||
name,
|
name,
|
||||||
musicBrainzId,
|
musicBrainzId,
|
||||||
api.artist_name(),
|
api.artist_name(),
|
||||||
genre,
|
genre,
|
||||||
api.premiere_date(),
|
api.year(),
|
||||||
# TODO: as soon as Plex supports the original
|
|
||||||
# release date (Kodi: strOrigReleaseDate)
|
|
||||||
api.premiere_date(),
|
|
||||||
compilation,
|
compilation,
|
||||||
api.plot(),
|
api.plot(),
|
||||||
|
thumb,
|
||||||
api.list_to_string(api.studios()),
|
api.list_to_string(api.studios()),
|
||||||
api.kodi_type,
|
|
||||||
api.userrating(),
|
api.userrating(),
|
||||||
timing.unix_date_to_kodi(self.last_sync),
|
timing.unix_date_to_kodi(self.last_sync),
|
||||||
api.kodi_type)
|
'album')
|
||||||
|
else:
|
||||||
|
self.kodidb.add_album_17(kodi_id,
|
||||||
|
name,
|
||||||
|
musicBrainzId,
|
||||||
|
api.artist_name(),
|
||||||
|
genre,
|
||||||
|
api.year(),
|
||||||
|
compilation,
|
||||||
|
api.plot(),
|
||||||
|
thumb,
|
||||||
|
api.list_to_string(api.studios()),
|
||||||
|
api.userrating(),
|
||||||
|
timing.unix_date_to_kodi(self.last_sync),
|
||||||
|
'album')
|
||||||
self.kodidb.add_albumartist(artist_id, kodi_id, api.artist_name())
|
self.kodidb.add_albumartist(artist_id, kodi_id, api.artist_name())
|
||||||
if app.SYNC.artwork:
|
if app.SYNC.artwork:
|
||||||
self.kodidb.modify_artwork(artworks,
|
self.kodidb.modify_artwork(artworks,
|
||||||
|
@ -385,19 +432,30 @@ class Song(MusicMixin, ItemBase):
|
||||||
# No album found, create a single's album
|
# No album found, create a single's album
|
||||||
LOG.info('Creating singles album')
|
LOG.info('Creating singles album')
|
||||||
parent_id = self.kodidb.new_album_id()
|
parent_id = self.kodidb.new_album_id()
|
||||||
|
if v.KODIVERSION >= 18:
|
||||||
self.kodidb.add_album(kodi_id,
|
self.kodidb.add_album(kodi_id,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
genre,
|
genre,
|
||||||
api.premiere_date(),
|
api.year(),
|
||||||
# TODO: as soon as Plex supports the original
|
None,
|
||||||
# release date (Kodi: strOrigReleaseDate)
|
None,
|
||||||
api.premiere_date(),
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
timing.unix_date_to_kodi(self.last_sync),
|
||||||
|
'single')
|
||||||
|
else:
|
||||||
|
self.kodidb.add_album_17(kodi_id,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
genre,
|
||||||
|
api.year(),
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
'single',
|
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
timing.unix_date_to_kodi(self.last_sync),
|
timing.unix_date_to_kodi(self.last_sync),
|
||||||
|
@ -457,31 +515,43 @@ class Song(MusicMixin, ItemBase):
|
||||||
moods.append(entry.attrib['tag'])
|
moods.append(entry.attrib['tag'])
|
||||||
mood = api.list_to_string(moods)
|
mood = api.list_to_string(moods)
|
||||||
_, path, filename = api.fullpath()
|
_, path, filename = api.fullpath()
|
||||||
audio_codec = api.audio_codec()
|
|
||||||
# UPDATE THE SONG #####
|
# UPDATE THE SONG #####
|
||||||
if update_item:
|
if update_item:
|
||||||
LOG.info("UPDATE song plex_id: %s - %s", plex_id, title)
|
LOG.info("UPDATE song plex_id: %s - %s", plex_id, title)
|
||||||
# Use dummy strHash '123' for Kodi
|
# Use dummy strHash '123' for Kodi
|
||||||
self.kodidb.update_path(path, kodi_pathid)
|
self.kodidb.update_path(path, kodi_pathid)
|
||||||
|
# Update the song entry
|
||||||
|
if v.KODIVERSION >= 18:
|
||||||
|
# Kodi Leia
|
||||||
self.kodidb.update_song(parent_id,
|
self.kodidb.update_song(parent_id,
|
||||||
artists,
|
artists,
|
||||||
genre,
|
genre,
|
||||||
title,
|
title,
|
||||||
track,
|
track,
|
||||||
api.runtime(),
|
api.runtime(),
|
||||||
api.premiere_date(),
|
year,
|
||||||
# TODO: as soon as Plex supports the original
|
filename,
|
||||||
# release date (Kodi: strOrigReleaseDate)
|
api.viewcount(),
|
||||||
api.premiere_date(),
|
api.lastplayed(),
|
||||||
|
api.userrating(),
|
||||||
|
comment,
|
||||||
|
mood,
|
||||||
|
api.date_created(),
|
||||||
|
kodi_id)
|
||||||
|
else:
|
||||||
|
self.kodidb.update_song_17(parent_id,
|
||||||
|
artists,
|
||||||
|
genre,
|
||||||
|
title,
|
||||||
|
track,
|
||||||
|
api.runtime(),
|
||||||
|
year,
|
||||||
filename,
|
filename,
|
||||||
api.viewcount(),
|
api.viewcount(),
|
||||||
api.lastplayed(),
|
api.lastplayed(),
|
||||||
api.userrating(),
|
api.userrating(),
|
||||||
comment,
|
comment,
|
||||||
mood,
|
mood,
|
||||||
audio_codec['bitrate'] or 0,
|
|
||||||
audio_codec['samplingrate'] or 0,
|
|
||||||
audio_codec['channels'] or 0,
|
|
||||||
api.date_created(),
|
api.date_created(),
|
||||||
kodi_id)
|
kodi_id)
|
||||||
# OR ADD THE SONG #####
|
# OR ADD THE SONG #####
|
||||||
|
@ -489,6 +559,9 @@ class Song(MusicMixin, ItemBase):
|
||||||
LOG.info("ADD song plex_id: %s - %s", plex_id, title)
|
LOG.info("ADD song plex_id: %s - %s", plex_id, title)
|
||||||
# Add path
|
# Add path
|
||||||
kodi_pathid = self.kodidb.add_path(path)
|
kodi_pathid = self.kodidb.add_path(path)
|
||||||
|
# Create the song entry
|
||||||
|
if v.KODIVERSION >= 18:
|
||||||
|
# Kodi Leia
|
||||||
self.kodidb.add_song(kodi_id,
|
self.kodidb.add_song(kodi_id,
|
||||||
parent_id,
|
parent_id,
|
||||||
kodi_pathid,
|
kodi_pathid,
|
||||||
|
@ -497,10 +570,7 @@ class Song(MusicMixin, ItemBase):
|
||||||
title,
|
title,
|
||||||
track,
|
track,
|
||||||
api.runtime(),
|
api.runtime(),
|
||||||
api.premiere_date(),
|
year,
|
||||||
# TODO: as soon as Plex supports the original
|
|
||||||
# release date (Kodi: strOrigReleaseDate)
|
|
||||||
api.premiere_date(),
|
|
||||||
filename,
|
filename,
|
||||||
musicBrainzId,
|
musicBrainzId,
|
||||||
api.viewcount(),
|
api.viewcount(),
|
||||||
|
@ -509,10 +579,33 @@ class Song(MusicMixin, ItemBase):
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
mood,
|
mood,
|
||||||
audio_codec['bitrate'] or 0,
|
|
||||||
audio_codec['samplingrate'] or 0,
|
|
||||||
audio_codec['channels'] or 0,
|
|
||||||
api.date_created())
|
api.date_created())
|
||||||
|
else:
|
||||||
|
self.kodidb.add_song_17(kodi_id,
|
||||||
|
parent_id,
|
||||||
|
kodi_pathid,
|
||||||
|
artists,
|
||||||
|
genre,
|
||||||
|
title,
|
||||||
|
track,
|
||||||
|
api.runtime(),
|
||||||
|
year,
|
||||||
|
filename,
|
||||||
|
musicBrainzId,
|
||||||
|
api.viewcount(),
|
||||||
|
api.lastplayed(),
|
||||||
|
api.userrating(),
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
mood,
|
||||||
|
api.date_created())
|
||||||
|
if v.KODIVERSION < 18:
|
||||||
|
# Link song to album
|
||||||
|
self.kodidb.add_albuminfosong(kodi_id,
|
||||||
|
parent_id,
|
||||||
|
track,
|
||||||
|
title,
|
||||||
|
api.runtime())
|
||||||
# Link song to artists
|
# Link song to artists
|
||||||
artist_name = api.grandparent_title()
|
artist_name = api.grandparent_title()
|
||||||
# Do the actual linking
|
# Do the actual linking
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
|
|
||||||
from .common import ItemBase, process_path
|
from .common import ItemBase, process_path
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
Collection of functions using the Kodi JSON RPC interface.
|
Collection of functions using the Kodi JSON RPC interface.
|
||||||
See http://kodi.wiki/view/JSON-RPC_API
|
See http://kodi.wiki/view/JSON-RPC_API
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from json import loads, dumps
|
from json import loads, dumps
|
||||||
from xbmc import executeJSONRPC
|
from xbmc import executeJSONRPC
|
||||||
|
|
||||||
|
@ -84,7 +85,7 @@ def get_player_ids():
|
||||||
Returns a list of all the active Kodi player ids (usually 3) as int
|
Returns a list of all the active Kodi player ids (usually 3) as int
|
||||||
"""
|
"""
|
||||||
ret = []
|
ret = []
|
||||||
for player in list(get_players().values()):
|
for player in get_players().values():
|
||||||
ret.append(player['playerid'])
|
ret.append(player['playerid'])
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@ -169,12 +170,12 @@ def stop():
|
||||||
|
|
||||||
def seek_to(offset):
|
def seek_to(offset):
|
||||||
"""
|
"""
|
||||||
Seeks all Kodi players to offset [int] in seconds
|
Seeks all Kodi players to offset [int] in milliseconds
|
||||||
"""
|
"""
|
||||||
for playerid in get_player_ids():
|
for playerid in get_player_ids():
|
||||||
return JsonRPC("Player.Seek").execute(
|
return JsonRPC("Player.Seek").execute(
|
||||||
{"playerid": playerid,
|
{"playerid": playerid,
|
||||||
"value": {'time': timing.millis_to_kodi_time(int(offset * 1000))}})
|
"value": timing.millis_to_kodi_time(offset)})
|
||||||
|
|
||||||
|
|
||||||
def smallforward():
|
def smallforward():
|
||||||
|
@ -429,15 +430,6 @@ def get_current_audio_stream_index(playerid):
|
||||||
'properties': ['currentaudiostream']})['result']['currentaudiostream']['index']
|
'properties': ['currentaudiostream']})['result']['currentaudiostream']['index']
|
||||||
|
|
||||||
|
|
||||||
def get_current_video_stream_index(playerid):
|
|
||||||
"""
|
|
||||||
Returns the currently active video stream index [int]
|
|
||||||
"""
|
|
||||||
return JsonRPC('Player.GetProperties').execute({
|
|
||||||
'playerid': playerid,
|
|
||||||
'properties': ['currentvideostream']})['result']['currentvideostream']['index']
|
|
||||||
|
|
||||||
|
|
||||||
def get_current_subtitle_stream_index(playerid):
|
def get_current_subtitle_stream_index(playerid):
|
||||||
"""
|
"""
|
||||||
Returns the currently active subtitle stream index [int] or None if there
|
Returns the currently active subtitle stream index [int] or None if there
|
||||||
|
@ -629,15 +621,6 @@ def item_details(kodi_id, kodi_type):
|
||||||
ret = JsonRPC(json).execute({'%sid' % kodi_type: kodi_id,
|
ret = JsonRPC(json).execute({'%sid' % kodi_type: kodi_id,
|
||||||
'properties': fields})
|
'properties': fields})
|
||||||
try:
|
try:
|
||||||
ret = ret['result']['%sdetails' % kodi_type]
|
return ret['result']['%sdetails' % kodi_type]
|
||||||
except (KeyError, TypeError):
|
except (KeyError, TypeError):
|
||||||
return {}
|
return {}
|
||||||
if kodi_type == v.KODI_TYPE_SHOW:
|
|
||||||
# append watched counts to tvshow details
|
|
||||||
ret["extraproperties"] = {
|
|
||||||
"totalseasons": str(ret["season"]),
|
|
||||||
"totalepisodes": str(ret["episode"]),
|
|
||||||
"watchedepisodes": str(ret["watchedepisodes"]),
|
|
||||||
"unwatchedepisodes": str(ret["episode"] - ret["watchedepisodes"])
|
|
||||||
}
|
|
||||||
return ret
|
|
||||||
|
|
|
@ -5,45 +5,88 @@ script.module.metadatautils
|
||||||
kodi_constants.py
|
kodi_constants.py
|
||||||
Several common constants for use with Kodi json api
|
Several common constants for use with Kodi json api
|
||||||
'''
|
'''
|
||||||
FIELDS_BASE = ["dateadded", "file", "lastplayed", "plot", "title", "art", "playcount"]
|
FIELDS_BASE = ['dateadded', 'file', 'lastplayed', 'plot', 'title', 'art',
|
||||||
FIELDS_FILE = FIELDS_BASE + ["streamdetails", "director", "resume", "runtime"]
|
'playcount']
|
||||||
FIELDS_MOVIES = FIELDS_FILE + ["plotoutline", "sorttitle", "cast", "votes", "showlink", "top250", "trailer", "year",
|
FIELDS_FILE = FIELDS_BASE + ['streamdetails', 'director', 'resume', 'runtime']
|
||||||
"country", "studio", "set", "genre", "mpaa", "setid", "rating", "tag", "tagline",
|
FIELDS_MOVIES = FIELDS_FILE + ['plotoutline', 'sorttitle', 'cast', 'votes',
|
||||||
"writer", "originaltitle",
|
'showlink', 'top250', 'trailer', 'year', 'country', 'studio', 'set',
|
||||||
"imdbnumber"]
|
'genre', 'mpaa', 'setid', 'rating', 'tag', 'tagline', 'writer',
|
||||||
FIELDS_MOVIES.append("uniqueid")
|
'originaltitle', 'imdbnumber', 'uniqueid']
|
||||||
FIELDS_TVSHOWS = FIELDS_BASE + ["sorttitle", "mpaa", "premiered", "year", "episode", "watchedepisodes", "votes",
|
FIELDS_TVSHOWS = FIELDS_BASE + ['sorttitle', 'mpaa', 'premiered', 'year',
|
||||||
"rating", "studio", "season", "genre", "cast", "episodeguide", "tag", "originaltitle",
|
'episode', 'watchedepisodes', 'votes', 'rating', 'studio', 'season',
|
||||||
"imdbnumber"]
|
'genre', 'cast', 'episodeguide', 'tag', 'originaltitle', 'imdbnumber']
|
||||||
FIELDS_SEASON = ['art', 'playcount', 'season', 'showtitle', 'episode',
|
FIELDS_SEASON = ['art', 'playcount', 'season', 'showtitle', 'episode',
|
||||||
'tvshowid', 'watchedepisodes', 'userrating', 'fanart', 'thumbnail']
|
'tvshowid', 'watchedepisodes', 'userrating', 'fanart', 'thumbnail']
|
||||||
FIELDS_EPISODES = FIELDS_FILE + ["cast", "productioncode", "rating", "votes", "episode", "showtitle", "tvshowid",
|
FIELDS_EPISODES = FIELDS_FILE + ['cast', 'productioncode', 'rating', 'votes',
|
||||||
"season", "firstaired", "writer", "originaltitle"]
|
'episode', 'showtitle', 'tvshowid', 'season', 'firstaired', 'writer',
|
||||||
FIELDS_MUSICVIDEOS = FIELDS_FILE + ["genre", "artist", "tag", "album", "track", "studio", "year"]
|
'originaltitle']
|
||||||
FIELDS_FILES = FIELDS_FILE + ["plotoutline", "sorttitle", "cast", "votes", "trailer", "year", "country", "studio",
|
FIELDS_MUSICVIDEOS = FIELDS_FILE + ['genre', 'artist', 'tag', 'album', 'track',
|
||||||
"genre", "mpaa", "rating", "tagline", "writer", "originaltitle", "imdbnumber",
|
'studio', 'year']
|
||||||
"premiered", "episode", "showtitle",
|
FIELDS_FILES = FIELDS_FILE + ['plotoutline', 'sorttitle', 'cast', 'votes',
|
||||||
"firstaired", "watchedepisodes", "duration", "season"]
|
'trailer', 'year', 'country', 'studio', 'genre', 'mpaa', 'rating',
|
||||||
FIELDS_SONGS = ["artist", "displayartist", "title", "rating", "fanart", "thumbnail", "duration", "disc",
|
'tagline', 'writer', 'originaltitle', 'imdbnumber', 'premiered', 'episode',
|
||||||
"playcount", "comment", "file", "album", "lastplayed", "genre", "musicbrainzartistid", "track",
|
'showtitle', 'firstaired', 'watchedepisodes', 'duration', 'season']
|
||||||
"dateadded"]
|
FIELDS_SONGS = ['artist', 'displayartist', 'title', 'rating', 'fanart',
|
||||||
FIELDS_ALBUMS = ["title", "fanart", "thumbnail", "genre", "displayartist", "artist",
|
'thumbnail', 'duration', 'disc', 'playcount', 'comment', 'file', 'album',
|
||||||
"musicbrainzalbumartistid", "year", "rating", "artistid", "musicbrainzalbumid", "theme", "description",
|
'lastplayed', 'genre', 'musicbrainzartistid', 'track', 'dateadded']
|
||||||
"type", "style", "playcount", "albumlabel", "mood", "dateadded"]
|
FIELDS_ALBUMS = ['title', 'fanart', 'thumbnail', 'genre', 'displayartist',
|
||||||
FIELDS_ARTISTS = ["born", "formed", "died", "style", "yearsactive", "mood", "fanart", "thumbnail",
|
'artist', 'musicbrainzalbumartistid', 'year', 'rating', 'artistid',
|
||||||
"musicbrainzartistid", "disbanded", "description", "instrument"]
|
'musicbrainzalbumid', 'theme', 'description', 'type', 'style', 'playcount',
|
||||||
FIELDS_RECORDINGS = ["art", "channel", "directory", "endtime", "file", "genre", "icon", "playcount", "plot",
|
'albumlabel', 'mood', 'dateadded']
|
||||||
"plotoutline", "resume", "runtime", "starttime", "streamurl", "title"]
|
FIELDS_ARTISTS = ['born', 'formed', 'died', 'style', 'yearsactive', 'mood',
|
||||||
FIELDS_CHANNELS = ["broadcastnow", "channeltype", "hidden", "locked", "lastplayed", "thumbnail", "channel"]
|
'fanart', 'thumbnail', 'musicbrainzartistid', 'disbanded', 'description',
|
||||||
|
'instrument']
|
||||||
|
FIELDS_RECORDINGS = ['art', 'channel', 'directory', 'endtime', 'file', 'genre',
|
||||||
|
'icon', 'playcount', 'plot', 'plotoutline', 'resume', 'runtime',
|
||||||
|
'starttime', 'streamurl', 'title']
|
||||||
|
FIELDS_CHANNELS = ['broadcastnow', 'channeltype', 'hidden', 'locked',
|
||||||
|
'lastplayed', 'thumbnail', 'channel']
|
||||||
|
|
||||||
FILTER_UNWATCHED = {"operator": "lessthan", "field": "playcount", "value": "1"}
|
FILTER_UNWATCHED = {
|
||||||
FILTER_WATCHED = {"operator": "isnot", "field": "playcount", "value": "0"}
|
'operator': 'lessthan',
|
||||||
FILTER_RATING = {"operator": "greaterthan", "field": "rating", "value": "7"}
|
'field': 'playcount',
|
||||||
FILTER_RATING_MUSIC = {"operator": "greaterthan", "field": "rating", "value": "3"}
|
'value': '1'
|
||||||
FILTER_INPROGRESS = {"operator": "true", "field": "inprogress", "value": ""}
|
}
|
||||||
SORT_RATING = {"method": "rating", "order": "descending"}
|
FILTER_WATCHED = {
|
||||||
SORT_RANDOM = {"method": "random", "order": "descending"}
|
'operator': 'isnot',
|
||||||
SORT_TITLE = {"method": "title", "order": "ascending"}
|
'field': 'playcount',
|
||||||
SORT_DATEADDED = {"method": "dateadded", "order": "descending"}
|
'value': '0'
|
||||||
SORT_LASTPLAYED = {"method": "lastplayed", "order": "descending"}
|
}
|
||||||
SORT_EPISODE = {"method": "episode"}
|
FILTER_RATING = {
|
||||||
|
'operator': 'greaterthan',
|
||||||
|
'field': 'rating',
|
||||||
|
'value': '7'
|
||||||
|
}
|
||||||
|
FILTER_RATING_MUSIC = {
|
||||||
|
'operator': 'greaterthan',
|
||||||
|
'field': 'rating',
|
||||||
|
'value': '3'
|
||||||
|
}
|
||||||
|
FILTER_INPROGRESS = {
|
||||||
|
'operator': 'true',
|
||||||
|
'field': 'inprogress',
|
||||||
|
'value': ''
|
||||||
|
}
|
||||||
|
SORT_RATING = {
|
||||||
|
'method': 'rating',
|
||||||
|
'order': 'descending'
|
||||||
|
}
|
||||||
|
SORT_RANDOM = {
|
||||||
|
'method': 'random',
|
||||||
|
'order': 'descending'
|
||||||
|
}
|
||||||
|
SORT_TITLE = {
|
||||||
|
'method': 'title',
|
||||||
|
'order': 'ascending'
|
||||||
|
}
|
||||||
|
SORT_DATEADDED = {
|
||||||
|
'method': 'dateadded',
|
||||||
|
'order': 'descending'
|
||||||
|
}
|
||||||
|
SORT_LASTPLAYED = {
|
||||||
|
'method': 'lastplayed',
|
||||||
|
'order': 'descending'
|
||||||
|
}
|
||||||
|
SORT_EPISODE = {
|
||||||
|
'method': 'episode'
|
||||||
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
|
|
||||||
from .common import KODIDB_LOCK
|
from .common import KODIDB_LOCK
|
||||||
|
@ -20,6 +21,7 @@ def kodiid_from_filename(path, kodi_type=None, db_type=None):
|
||||||
Returns None, <kodi_type> if not possible
|
Returns None, <kodi_type> if not possible
|
||||||
"""
|
"""
|
||||||
kodi_id = None
|
kodi_id = None
|
||||||
|
path = utils.try_decode(path)
|
||||||
# Make sure path ends in either '/' or '\'
|
# Make sure path ends in either '/' or '\'
|
||||||
# We CANNOT use path_ops.path.join as this can result in \ where we need /
|
# We CANNOT use path_ops.path.join as this can result in \ where we need /
|
||||||
try:
|
try:
|
||||||
|
@ -72,7 +74,7 @@ def reset_cached_images():
|
||||||
for path in paths:
|
for path in paths:
|
||||||
new_path = path_ops.translate_path('special://thumbnails/%s' % path)
|
new_path = path_ops.translate_path('special://thumbnails/%s' % path)
|
||||||
try:
|
try:
|
||||||
path_ops.makedirs(new_path)
|
path_ops.makedirs(path_ops.encode_path(new_path))
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
LOG.warn('Could not create thumbnail directory %s: %s',
|
LOG.warn('Could not create thumbnail directory %s: %s',
|
||||||
new_path, err)
|
new_path, err)
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
|
|
||||||
from .. import db, path_ops
|
from .. import db, path_ops
|
||||||
|
@ -64,7 +65,7 @@ class KodiDBBase(object):
|
||||||
"""
|
"""
|
||||||
Pass in an artworks dict (see PlexAPI) to set an items artwork.
|
Pass in an artworks dict (see PlexAPI) to set an items artwork.
|
||||||
"""
|
"""
|
||||||
for kodi_art, url in artworks.items():
|
for kodi_art, url in artworks.iteritems():
|
||||||
self.add_art(url, kodi_id, kodi_type, kodi_art)
|
self.add_art(url, kodi_id, kodi_type, kodi_art)
|
||||||
|
|
||||||
@db.catch_operationalerrors
|
@db.catch_operationalerrors
|
||||||
|
@ -83,7 +84,7 @@ class KodiDBBase(object):
|
||||||
"""
|
"""
|
||||||
Pass in an artworks dict (see PlexAPI) to set an items artwork.
|
Pass in an artworks dict (see PlexAPI) to set an items artwork.
|
||||||
"""
|
"""
|
||||||
for kodi_art, url in artworks.items():
|
for kodi_art, url in artworks.iteritems():
|
||||||
self.modify_art(url, kodi_id, kodi_type, kodi_art)
|
self.modify_art(url, kodi_id, kodi_type, kodi_art)
|
||||||
|
|
||||||
@db.catch_operationalerrors
|
@db.catch_operationalerrors
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
|
|
||||||
from . import common
|
from . import common
|
||||||
|
@ -48,6 +49,7 @@ class KodiMusicDB(common.KodiDBBase):
|
||||||
strRole)
|
strRole)
|
||||||
VALUES (?, ?)
|
VALUES (?, ?)
|
||||||
''', (1, 'Artist'))
|
''', (1, 'Artist'))
|
||||||
|
if v.KODIVERSION >= 18:
|
||||||
self.cursor.execute('DELETE FROM versiontagscan')
|
self.cursor.execute('DELETE FROM versiontagscan')
|
||||||
self.cursor.execute('''
|
self.cursor.execute('''
|
||||||
INSERT INTO versiontagscan(
|
INSERT INTO versiontagscan(
|
||||||
|
@ -158,6 +160,87 @@ class KodiMusicDB(common.KodiDBBase):
|
||||||
self.cursor.execute('SELECT COALESCE(MAX(idAlbum), 0) FROM album')
|
self.cursor.execute('SELECT COALESCE(MAX(idAlbum), 0) FROM album')
|
||||||
return self.cursor.fetchone()[0] + 1
|
return self.cursor.fetchone()[0] + 1
|
||||||
|
|
||||||
|
@db.catch_operationalerrors
|
||||||
|
def add_album_17(self, *args):
|
||||||
|
"""
|
||||||
|
strReleaseType: 'album' or 'single'
|
||||||
|
"""
|
||||||
|
if app.SYNC.artwork:
|
||||||
|
self.cursor.execute('''
|
||||||
|
INSERT INTO album(
|
||||||
|
idAlbum,
|
||||||
|
strAlbum,
|
||||||
|
strMusicBrainzAlbumID,
|
||||||
|
strArtists,
|
||||||
|
strGenres,
|
||||||
|
iYear,
|
||||||
|
bCompilation,
|
||||||
|
strReview,
|
||||||
|
strImage,
|
||||||
|
strLabel,
|
||||||
|
iUserrating,
|
||||||
|
lastScraped,
|
||||||
|
strReleaseType)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
''', (args))
|
||||||
|
else:
|
||||||
|
args = list(args)
|
||||||
|
del args[8]
|
||||||
|
self.cursor.execute('''
|
||||||
|
INSERT INTO album(
|
||||||
|
idAlbum,
|
||||||
|
strAlbum,
|
||||||
|
strMusicBrainzAlbumID,
|
||||||
|
strArtists,
|
||||||
|
strGenres,
|
||||||
|
iYear,
|
||||||
|
bCompilation,
|
||||||
|
strReview,
|
||||||
|
strLabel,
|
||||||
|
iUserrating,
|
||||||
|
lastScraped,
|
||||||
|
strReleaseType)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
''', (args))
|
||||||
|
|
||||||
|
@db.catch_operationalerrors
|
||||||
|
def update_album_17(self, *args):
|
||||||
|
if app.SYNC.artwork:
|
||||||
|
self.cursor.execute('''
|
||||||
|
UPDATE album
|
||||||
|
SET strAlbum = ?,
|
||||||
|
strMusicBrainzAlbumID = ?,
|
||||||
|
strArtists = ?,
|
||||||
|
strGenres = ?,
|
||||||
|
iYear = ?,
|
||||||
|
bCompilation = ?,
|
||||||
|
strReview = ?,
|
||||||
|
strImage = ?,
|
||||||
|
strLabel = ?,
|
||||||
|
iUserrating = ?,
|
||||||
|
lastScraped = ?,
|
||||||
|
strReleaseType = ?
|
||||||
|
WHERE idAlbum = ?
|
||||||
|
''', (args))
|
||||||
|
else:
|
||||||
|
args = list(args)
|
||||||
|
del args[7]
|
||||||
|
self.cursor.execute('''
|
||||||
|
UPDATE album
|
||||||
|
SET strAlbum = ?,
|
||||||
|
strMusicBrainzAlbumID = ?,
|
||||||
|
strArtists = ?,
|
||||||
|
strGenres = ?,
|
||||||
|
iYear = ?,
|
||||||
|
bCompilation = ?,
|
||||||
|
strReview = ?,
|
||||||
|
strLabel = ?,
|
||||||
|
iUserrating = ?,
|
||||||
|
lastScraped = ?,
|
||||||
|
strReleaseType = ?
|
||||||
|
WHERE idAlbum = ?
|
||||||
|
''', (args))
|
||||||
|
|
||||||
@db.catch_operationalerrors
|
@db.catch_operationalerrors
|
||||||
def add_album(self, *args):
|
def add_album(self, *args):
|
||||||
"""
|
"""
|
||||||
|
@ -171,16 +254,15 @@ class KodiMusicDB(common.KodiDBBase):
|
||||||
strMusicBrainzAlbumID,
|
strMusicBrainzAlbumID,
|
||||||
strArtistDisp,
|
strArtistDisp,
|
||||||
strGenres,
|
strGenres,
|
||||||
strReleaseDate,
|
iYear,
|
||||||
strOrigReleaseDate,
|
|
||||||
bCompilation,
|
bCompilation,
|
||||||
strReview,
|
strReview,
|
||||||
|
strImage,
|
||||||
strLabel,
|
strLabel,
|
||||||
strType,
|
|
||||||
iUserrating,
|
iUserrating,
|
||||||
lastScraped,
|
lastScraped,
|
||||||
strReleaseType)
|
strReleaseType)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
''', (args))
|
''', (args))
|
||||||
else:
|
else:
|
||||||
args = list(args)
|
args = list(args)
|
||||||
|
@ -192,16 +274,14 @@ class KodiMusicDB(common.KodiDBBase):
|
||||||
strMusicBrainzAlbumID,
|
strMusicBrainzAlbumID,
|
||||||
strArtistDisp,
|
strArtistDisp,
|
||||||
strGenres,
|
strGenres,
|
||||||
strReleaseDate,
|
iYear,
|
||||||
strOrigReleaseDate,
|
|
||||||
bCompilation,
|
bCompilation,
|
||||||
strReview,
|
strReview,
|
||||||
strLabel,
|
strLabel,
|
||||||
strType,
|
|
||||||
iUserrating,
|
iUserrating,
|
||||||
lastScraped,
|
lastScraped,
|
||||||
strReleaseType)
|
strReleaseType)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
''', (args))
|
''', (args))
|
||||||
|
|
||||||
@db.catch_operationalerrors
|
@db.catch_operationalerrors
|
||||||
|
@ -213,12 +293,11 @@ class KodiMusicDB(common.KodiDBBase):
|
||||||
strMusicBrainzAlbumID = ?,
|
strMusicBrainzAlbumID = ?,
|
||||||
strArtistDisp = ?,
|
strArtistDisp = ?,
|
||||||
strGenres = ?,
|
strGenres = ?,
|
||||||
strReleaseDate = ?,
|
iYear = ?,
|
||||||
strOrigReleaseDate = ?,
|
|
||||||
bCompilation = ?,
|
bCompilation = ?,
|
||||||
strReview = ?,
|
strReview = ?,
|
||||||
|
strImage = ?,
|
||||||
strLabel = ?,
|
strLabel = ?,
|
||||||
strType = ?,
|
|
||||||
iUserrating = ?,
|
iUserrating = ?,
|
||||||
lastScraped = ?,
|
lastScraped = ?,
|
||||||
strReleaseType = ?
|
strReleaseType = ?
|
||||||
|
@ -233,8 +312,7 @@ class KodiMusicDB(common.KodiDBBase):
|
||||||
strMusicBrainzAlbumID = ?,
|
strMusicBrainzAlbumID = ?,
|
||||||
strArtistDisp = ?,
|
strArtistDisp = ?,
|
||||||
strGenres = ?,
|
strGenres = ?,
|
||||||
strReleaseDate = ?,
|
iYear = ?,
|
||||||
strOrigReleaseDate = ?,
|
|
||||||
bCompilation = ?,
|
bCompilation = ?,
|
||||||
strReview = ?,
|
strReview = ?,
|
||||||
strLabel = ?,
|
strLabel = ?,
|
||||||
|
@ -317,8 +395,7 @@ class KodiMusicDB(common.KodiDBBase):
|
||||||
strTitle,
|
strTitle,
|
||||||
iTrack,
|
iTrack,
|
||||||
iDuration,
|
iDuration,
|
||||||
strReleaseDate,
|
iYear,
|
||||||
strOrigReleaseDate,
|
|
||||||
strFileName,
|
strFileName,
|
||||||
strMusicBrainzTrackID,
|
strMusicBrainzTrackID,
|
||||||
iTimesPlayed,
|
iTimesPlayed,
|
||||||
|
@ -327,11 +404,33 @@ class KodiMusicDB(common.KodiDBBase):
|
||||||
iStartOffset,
|
iStartOffset,
|
||||||
iEndOffset,
|
iEndOffset,
|
||||||
mood,
|
mood,
|
||||||
iBitRate,
|
|
||||||
iSampleRate,
|
|
||||||
iChannels,
|
|
||||||
dateAdded)
|
dateAdded)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
''', (args))
|
||||||
|
|
||||||
|
@db.catch_operationalerrors
|
||||||
|
def add_song_17(self, *args):
|
||||||
|
self.cursor.execute('''
|
||||||
|
INSERT INTO song(
|
||||||
|
idSong,
|
||||||
|
idAlbum,
|
||||||
|
idPath,
|
||||||
|
strArtists,
|
||||||
|
strGenres,
|
||||||
|
strTitle,
|
||||||
|
iTrack,
|
||||||
|
iDuration,
|
||||||
|
iYear,
|
||||||
|
strFileName,
|
||||||
|
strMusicBrainzTrackID,
|
||||||
|
iTimesPlayed,
|
||||||
|
lastplayed,
|
||||||
|
rating,
|
||||||
|
iStartOffset,
|
||||||
|
iEndOffset,
|
||||||
|
mood,
|
||||||
|
dateAdded)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
''', (args))
|
''', (args))
|
||||||
|
|
||||||
@db.catch_operationalerrors
|
@db.catch_operationalerrors
|
||||||
|
@ -344,17 +443,13 @@ class KodiMusicDB(common.KodiDBBase):
|
||||||
strTitle = ?,
|
strTitle = ?,
|
||||||
iTrack = ?,
|
iTrack = ?,
|
||||||
iDuration = ?,
|
iDuration = ?,
|
||||||
strReleaseDate = ?,
|
iYear = ?,
|
||||||
strOrigReleaseDate = ?,
|
|
||||||
strFilename = ?,
|
strFilename = ?,
|
||||||
iTimesPlayed = ?,
|
iTimesPlayed = ?,
|
||||||
lastplayed = ?,
|
lastplayed = ?,
|
||||||
rating = ?,
|
rating = ?,
|
||||||
comment = ?,
|
comment = ?,
|
||||||
mood = ?,
|
mood = ?,
|
||||||
iBitRate = ?,
|
|
||||||
iSampleRate = ?,
|
|
||||||
iChannels = ?,
|
|
||||||
dateAdded = ?
|
dateAdded = ?
|
||||||
WHERE idSong = ?
|
WHERE idSong = ?
|
||||||
''', (args))
|
''', (args))
|
||||||
|
@ -368,6 +463,27 @@ class KodiMusicDB(common.KodiDBBase):
|
||||||
WHERE idSong = ?
|
WHERE idSong = ?
|
||||||
''', (args))
|
''', (args))
|
||||||
|
|
||||||
|
@db.catch_operationalerrors
|
||||||
|
def update_song_17(self, *args):
|
||||||
|
self.cursor.execute('''
|
||||||
|
UPDATE song
|
||||||
|
SET idAlbum = ?,
|
||||||
|
strArtists = ?,
|
||||||
|
strGenres = ?,
|
||||||
|
strTitle = ?,
|
||||||
|
iTrack = ?,
|
||||||
|
iDuration = ?,
|
||||||
|
iYear = ?,
|
||||||
|
strFilename = ?,
|
||||||
|
iTimesPlayed = ?,
|
||||||
|
lastplayed = ?,
|
||||||
|
rating = ?,
|
||||||
|
comment = ?,
|
||||||
|
mood = ?,
|
||||||
|
dateAdded = ?
|
||||||
|
WHERE idSong = ?
|
||||||
|
''', (args))
|
||||||
|
|
||||||
def path_id_from_song(self, kodi_id):
|
def path_id_from_song(self, kodi_id):
|
||||||
self.cursor.execute('SELECT idPath FROM song WHERE idSong = ? LIMIT 1',
|
self.cursor.execute('SELECT idPath FROM song WHERE idSong = ? LIMIT 1',
|
||||||
(kodi_id, ))
|
(kodi_id, ))
|
||||||
|
@ -411,6 +527,19 @@ class KodiMusicDB(common.KodiDBBase):
|
||||||
|
|
||||||
@db.catch_operationalerrors
|
@db.catch_operationalerrors
|
||||||
def update_artist(self, *args):
|
def update_artist(self, *args):
|
||||||
|
if app.SYNC.artwork:
|
||||||
|
self.cursor.execute('''
|
||||||
|
UPDATE artist
|
||||||
|
SET strGenres = ?,
|
||||||
|
strBiography = ?,
|
||||||
|
strImage = ?,
|
||||||
|
strFanart = ?,
|
||||||
|
lastScraped = ?
|
||||||
|
WHERE idArtist = ?
|
||||||
|
''', (args))
|
||||||
|
else:
|
||||||
|
args = list(args)
|
||||||
|
del args[3], args[2]
|
||||||
self.cursor.execute('''
|
self.cursor.execute('''
|
||||||
UPDATE artist
|
UPDATE artist
|
||||||
SET strGenres = ?,
|
SET strGenres = ?,
|
||||||
|
@ -439,6 +568,22 @@ class KodiMusicDB(common.KodiDBBase):
|
||||||
VALUES (?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?)
|
||||||
''', (artist_id, song_id, 1, 0, artist_name))
|
''', (artist_id, song_id, 1, 0, artist_name))
|
||||||
|
|
||||||
|
@db.catch_operationalerrors
|
||||||
|
def add_albuminfosong(self, song_id, album_id, track_no, track_title,
|
||||||
|
runtime):
|
||||||
|
"""
|
||||||
|
Kodi 17 only
|
||||||
|
"""
|
||||||
|
self.cursor.execute('''
|
||||||
|
INSERT OR REPLACE INTO albuminfosong(
|
||||||
|
idAlbumInfoSong,
|
||||||
|
idAlbumInfo,
|
||||||
|
iTrack,
|
||||||
|
strTitle,
|
||||||
|
iDuration)
|
||||||
|
VALUES (?, ?, ?, ?, ?)
|
||||||
|
''', (song_id, album_id, track_no, track_title, runtime))
|
||||||
|
|
||||||
@db.catch_operationalerrors
|
@db.catch_operationalerrors
|
||||||
def update_userrating(self, kodi_id, kodi_type, userrating):
|
def update_userrating(self, kodi_id, kodi_type, userrating):
|
||||||
"""
|
"""
|
||||||
|
@ -466,6 +611,9 @@ class KodiMusicDB(common.KodiDBBase):
|
||||||
|
|
||||||
@db.catch_operationalerrors
|
@db.catch_operationalerrors
|
||||||
def remove_album(self, kodi_id):
|
def remove_album(self, kodi_id):
|
||||||
|
if v.KODIVERSION < 18:
|
||||||
|
self.cursor.execute('DELETE FROM albuminfosong WHERE idAlbumInfo = ?',
|
||||||
|
(kodi_id, ))
|
||||||
self.cursor.execute('DELETE FROM album_artist WHERE idAlbum = ?',
|
self.cursor.execute('DELETE FROM album_artist WHERE idAlbum = ?',
|
||||||
(kodi_id, ))
|
(kodi_id, ))
|
||||||
self.cursor.execute('DELETE FROM album WHERE idAlbum = ?', (kodi_id, ))
|
self.cursor.execute('DELETE FROM album WHERE idAlbum = ?', (kodi_id, ))
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
|
|
||||||
from . import common
|
from . import common
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from sqlite3 import IntegrityError
|
from sqlite3 import IntegrityError
|
||||||
|
|
||||||
|
@ -45,15 +46,13 @@ class KodiVideoDB(common.KodiDBBase):
|
||||||
strContent,
|
strContent,
|
||||||
strScraper,
|
strScraper,
|
||||||
noUpdate,
|
noUpdate,
|
||||||
exclude,
|
exclude)
|
||||||
allAudio)
|
VALUES (?, ?, ?, ?, ?)
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
|
||||||
'''
|
'''
|
||||||
self.cursor.execute(query, (path,
|
self.cursor.execute(query, (path,
|
||||||
kind,
|
kind,
|
||||||
'metadata.local',
|
'metadata.local',
|
||||||
1,
|
1,
|
||||||
0,
|
|
||||||
0))
|
0))
|
||||||
|
|
||||||
@db.catch_operationalerrors
|
@db.catch_operationalerrors
|
||||||
|
@ -62,8 +61,9 @@ class KodiVideoDB(common.KodiDBBase):
|
||||||
Video DB: Adds all subdirectories to path table while setting a "trail"
|
Video DB: Adds all subdirectories to path table while setting a "trail"
|
||||||
of parent path ids
|
of parent path ids
|
||||||
"""
|
"""
|
||||||
parentpath = path_ops.path.split(path_ops.path.split(path)[0])[0]
|
parentpath = path_ops.path.abspath(
|
||||||
parentpath = path_ops.append_os_sep(parentpath)
|
path_ops.path.join(path,
|
||||||
|
path_ops.decode_path(path_ops.path.pardir)))
|
||||||
pathid = self.get_path(parentpath)
|
pathid = self.get_path(parentpath)
|
||||||
if pathid is None:
|
if pathid is None:
|
||||||
self.cursor.execute('''
|
self.cursor.execute('''
|
||||||
|
@ -110,13 +110,11 @@ class KodiVideoDB(common.KodiDBBase):
|
||||||
idParentPath,
|
idParentPath,
|
||||||
strContent,
|
strContent,
|
||||||
strScraper,
|
strScraper,
|
||||||
noUpdate,
|
noUpdate)
|
||||||
exclude,
|
VALUES (?, ?, ?, ?, ?, ?)
|
||||||
allAudio)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
||||||
''',
|
''',
|
||||||
(path, date_added, id_parent_path, content,
|
(path, date_added, id_parent_path, content,
|
||||||
scraper, 1, 0, 0))
|
scraper, 1))
|
||||||
pathid = self.cursor.lastrowid
|
pathid = self.cursor.lastrowid
|
||||||
return pathid
|
return pathid
|
||||||
|
|
||||||
|
@ -320,7 +318,7 @@ class KodiVideoDB(common.KodiDBBase):
|
||||||
for the elmement kodi_id, kodi_type.
|
for the elmement kodi_id, kodi_type.
|
||||||
Will also delete a freshly orphaned actor entry.
|
Will also delete a freshly orphaned actor entry.
|
||||||
"""
|
"""
|
||||||
for kind, people_list in people.items():
|
for kind, people_list in people.iteritems():
|
||||||
self._add_people_kind(kodi_id, kodi_type, kind, people_list)
|
self._add_people_kind(kodi_id, kodi_type, kind, people_list)
|
||||||
|
|
||||||
@db.catch_operationalerrors
|
@db.catch_operationalerrors
|
||||||
|
@ -365,7 +363,7 @@ class KodiVideoDB(common.KodiDBBase):
|
||||||
for kind, people_list in (people if people else
|
for kind, people_list in (people if people else
|
||||||
{'actor': [],
|
{'actor': [],
|
||||||
'director': [],
|
'director': [],
|
||||||
'writer': []}).items():
|
'writer': []}).iteritems():
|
||||||
self._modify_people_kind(kodi_id, kodi_type, kind, people_list)
|
self._modify_people_kind(kodi_id, kodi_type, kind, people_list)
|
||||||
|
|
||||||
@db.catch_operationalerrors
|
@db.catch_operationalerrors
|
||||||
|
@ -481,31 +479,6 @@ class KodiVideoDB(common.KodiDBBase):
|
||||||
(kodi_id, kodi_type))
|
(kodi_id, kodi_type))
|
||||||
return dict(self.cursor.fetchall())
|
return dict(self.cursor.fetchall())
|
||||||
|
|
||||||
def get_trailer(self, kodi_id, kodi_type):
|
|
||||||
"""
|
|
||||||
Returns the trailer's URL for kodi_type from the Kodi database or None
|
|
||||||
"""
|
|
||||||
if kodi_type == v.KODI_TYPE_MOVIE:
|
|
||||||
self.cursor.execute('SELECT c19 FROM movie WHERE idMovie=?',
|
|
||||||
(kodi_id, ))
|
|
||||||
else:
|
|
||||||
raise NotImplementedError(f'trailers for {kodi_type} not implemented')
|
|
||||||
try:
|
|
||||||
return self.cursor.fetchone()[0]
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@db.catch_operationalerrors
|
|
||||||
def set_trailer(self, kodi_id, kodi_type, url):
|
|
||||||
"""
|
|
||||||
Writes the trailer's url to the Kodi DB
|
|
||||||
"""
|
|
||||||
if kodi_type == v.KODI_TYPE_MOVIE:
|
|
||||||
self.cursor.execute('UPDATE movie SET c19=? WHERE idMovie=?',
|
|
||||||
(url, kodi_id))
|
|
||||||
else:
|
|
||||||
raise NotImplementedError(f'trailers for {kodi_type} not implemented')
|
|
||||||
|
|
||||||
@db.catch_operationalerrors
|
@db.catch_operationalerrors
|
||||||
def modify_streams(self, fileid, streamdetails=None, runtime=None):
|
def modify_streams(self, fileid, streamdetails=None, runtime=None):
|
||||||
"""
|
"""
|
||||||
|
@ -611,8 +584,6 @@ class KodiVideoDB(common.KodiDBBase):
|
||||||
identifier = 'idMovie'
|
identifier = 'idMovie'
|
||||||
elif kodi_type == v.KODI_TYPE_EPISODE:
|
elif kodi_type == v.KODI_TYPE_EPISODE:
|
||||||
identifier = 'idEpisode'
|
identifier = 'idEpisode'
|
||||||
else:
|
|
||||||
return
|
|
||||||
self.cursor.execute('SELECT idFile FROM %s WHERE %s = ? LIMIT 1'
|
self.cursor.execute('SELECT idFile FROM %s WHERE %s = ? LIMIT 1'
|
||||||
% (kodi_type, identifier), (kodi_id, ))
|
% (kodi_type, identifier), (kodi_id, ))
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
"""
|
"""
|
||||||
PKC Kodi Monitoring implementation
|
PKC Kodi Monitoring implementation
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from json import loads
|
from json import loads
|
||||||
import copy
|
import copy
|
||||||
|
@ -17,7 +18,7 @@ from .kodi_db import KodiVideoDB
|
||||||
from . import kodi_db
|
from . import kodi_db
|
||||||
from .downloadutils import DownloadUtils as DU
|
from .downloadutils import DownloadUtils as DU
|
||||||
from . import utils, timing, plex_functions as PF
|
from . import utils, timing, plex_functions as PF
|
||||||
from . import json_rpc as js, playlist_func as PL
|
from . import json_rpc as js, playqueue as PQ, playlist_func as PL
|
||||||
from . import backgroundthread, app, variables as v
|
from . import backgroundthread, app, variables as v
|
||||||
from . import exceptions
|
from . import exceptions
|
||||||
|
|
||||||
|
@ -31,9 +32,11 @@ class KodiMonitor(xbmc.Monitor):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._already_slept = False
|
self._already_slept = False
|
||||||
|
self._switched_to_plex_streams = True
|
||||||
xbmc.Monitor.__init__(self)
|
xbmc.Monitor.__init__(self)
|
||||||
for playerid in app.PLAYSTATE.player_states:
|
for playerid in app.PLAYSTATE.player_states:
|
||||||
app.PLAYSTATE.player_states[playerid] = copy.deepcopy(app.PLAYSTATE.template)
|
app.PLAYSTATE.player_states[playerid] = copy.deepcopy(app.PLAYSTATE.template)
|
||||||
|
app.PLAYSTATE.old_player_states[playerid] = copy.deepcopy(app.PLAYSTATE.template)
|
||||||
LOG.info("Kodi monitor started.")
|
LOG.info("Kodi monitor started.")
|
||||||
|
|
||||||
def onScanStarted(self, library):
|
def onScanStarted(self, library):
|
||||||
|
@ -59,7 +62,7 @@ class KodiMonitor(xbmc.Monitor):
|
||||||
Called when a bunch of different stuff happens on the Kodi side
|
Called when a bunch of different stuff happens on the Kodi side
|
||||||
"""
|
"""
|
||||||
if data:
|
if data:
|
||||||
data = loads(data)
|
data = loads(data, 'utf-8')
|
||||||
LOG.debug("Method: %s Data: %s", method, data)
|
LOG.debug("Method: %s Data: %s", method, data)
|
||||||
|
|
||||||
if method == "Player.OnPlay":
|
if method == "Player.OnPlay":
|
||||||
|
@ -139,7 +142,7 @@ class KodiMonitor(xbmc.Monitor):
|
||||||
u'playlistid': 1,
|
u'playlistid': 1,
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
playqueue = app.PLAYQUEUES[data['playlistid']]
|
playqueue = PQ.PLAYQUEUES[data['playlistid']]
|
||||||
if not playqueue.is_pkc_clear():
|
if not playqueue.is_pkc_clear():
|
||||||
playqueue.pkc_edit = True
|
playqueue.pkc_edit = True
|
||||||
playqueue.clear(kodi=False)
|
playqueue.clear(kodi=False)
|
||||||
|
@ -215,7 +218,7 @@ class KodiMonitor(xbmc.Monitor):
|
||||||
play_info = json.loads(play_info)
|
play_info = json.loads(play_info)
|
||||||
app.APP.player.stop()
|
app.APP.player.stop()
|
||||||
handle = 'RunPlugin(%s)' % play_info.get('handle')
|
handle = 'RunPlugin(%s)' % play_info.get('handle')
|
||||||
xbmc.executebuiltin(handle)
|
xbmc.executebuiltin(handle.encode('utf-8'))
|
||||||
|
|
||||||
def PlayBackStart(self, data):
|
def PlayBackStart(self, data):
|
||||||
"""
|
"""
|
||||||
|
@ -255,7 +258,7 @@ class KodiMonitor(xbmc.Monitor):
|
||||||
if not playerid:
|
if not playerid:
|
||||||
LOG.error('Coud not get playerid for data %s', data)
|
LOG.error('Coud not get playerid for data %s', data)
|
||||||
return
|
return
|
||||||
playqueue = app.PLAYQUEUES[playerid]
|
playqueue = PQ.PLAYQUEUES[playerid]
|
||||||
info = js.get_player_props(playerid)
|
info = js.get_player_props(playerid)
|
||||||
if playqueue.kodi_playlist_playback:
|
if playqueue.kodi_playlist_playback:
|
||||||
# Kodi will tell us the wrong position - of the playlist, not the
|
# Kodi will tell us the wrong position - of the playlist, not the
|
||||||
|
@ -308,7 +311,7 @@ class KodiMonitor(xbmc.Monitor):
|
||||||
initialize = False
|
initialize = False
|
||||||
if initialize:
|
if initialize:
|
||||||
LOG.debug('Need to initialize Plex and PKC playqueue')
|
LOG.debug('Need to initialize Plex and PKC playqueue')
|
||||||
if not kodi_id or not kodi_type or not path:
|
if not kodi_id or not kodi_type:
|
||||||
kodi_id, kodi_type, path = self._json_item(playerid)
|
kodi_id, kodi_type, path = self._json_item(playerid)
|
||||||
plex_id, plex_type = self._get_ids(kodi_id, kodi_type, path)
|
plex_id, plex_type = self._get_ids(kodi_id, kodi_type, path)
|
||||||
if not plex_id:
|
if not plex_id:
|
||||||
|
@ -325,7 +328,7 @@ class KodiMonitor(xbmc.Monitor):
|
||||||
container_key = None
|
container_key = None
|
||||||
if info['playlistid'] != -1:
|
if info['playlistid'] != -1:
|
||||||
# -1 is Kodi's answer if there is no playlist
|
# -1 is Kodi's answer if there is no playlist
|
||||||
container_key = app.PLAYQUEUES[playerid].id
|
container_key = PQ.PLAYQUEUES[playerid].id
|
||||||
if container_key is not None:
|
if container_key is not None:
|
||||||
container_key = '/playQueues/%s' % container_key
|
container_key = '/playQueues/%s' % container_key
|
||||||
elif plex_id is not None:
|
elif plex_id is not None:
|
||||||
|
@ -343,7 +346,6 @@ class KodiMonitor(xbmc.Monitor):
|
||||||
# Mechanik for Plex skip intro feature
|
# Mechanik for Plex skip intro feature
|
||||||
if utils.settings('enableSkipIntro') == 'true':
|
if utils.settings('enableSkipIntro') == 'true':
|
||||||
status['intro_markers'] = item.api.intro_markers()
|
status['intro_markers'] = item.api.intro_markers()
|
||||||
item.playerid = playerid
|
|
||||||
# Remember the currently playing item
|
# Remember the currently playing item
|
||||||
app.PLAYSTATE.item = item
|
app.PLAYSTATE.item = item
|
||||||
# Remember that this player has been active
|
# Remember that this player has been active
|
||||||
|
@ -364,10 +366,7 @@ class KodiMonitor(xbmc.Monitor):
|
||||||
# Workaround for the Kodi add-on Up Next
|
# Workaround for the Kodi add-on Up Next
|
||||||
if not app.SYNC.direct_paths:
|
if not app.SYNC.direct_paths:
|
||||||
_notify_upnext(item)
|
_notify_upnext(item)
|
||||||
|
self._switched_to_plex_streams = False
|
||||||
if playerid == v.KODI_VIDEO_PLAYER_ID:
|
|
||||||
task = InitVideoStreams(item)
|
|
||||||
backgroundthread.BGThreader.addTask(task)
|
|
||||||
|
|
||||||
def _on_av_change(self, data):
|
def _on_av_change(self, data):
|
||||||
"""
|
"""
|
||||||
|
@ -377,8 +376,28 @@ class KodiMonitor(xbmc.Monitor):
|
||||||
Example data as returned by Kodi:
|
Example data as returned by Kodi:
|
||||||
{'item': {'id': 5, 'type': 'movie'},
|
{'item': {'id': 5, 'type': 'movie'},
|
||||||
'player': {'playerid': 1, 'speed': 1}}
|
'player': {'playerid': 1, 'speed': 1}}
|
||||||
|
|
||||||
|
PICKING UP CHANGES ON SUBTITLES IS CURRENTLY BROKEN ON THE KODI SIDE!
|
||||||
|
Kodi subs will never change. Also see json_rpc.py
|
||||||
"""
|
"""
|
||||||
pass
|
playerid = data['player']['playerid']
|
||||||
|
if not playerid == v.KODI_VIDEO_PLAYER_ID:
|
||||||
|
# We're just messing with Kodi's videoplayer
|
||||||
|
return
|
||||||
|
item = app.PLAYSTATE.item
|
||||||
|
if item is None:
|
||||||
|
# Player might've quit
|
||||||
|
return
|
||||||
|
if not self._switched_to_plex_streams:
|
||||||
|
# We need to switch to the Plex streams ONCE upon playback start
|
||||||
|
# after onavchange has been fired
|
||||||
|
if utils.settings('audioStreamPick') == '0':
|
||||||
|
item.switch_to_plex_stream('audio')
|
||||||
|
if utils.settings('subtitleStreamPick') == '0':
|
||||||
|
item.switch_to_plex_stream('subtitle')
|
||||||
|
self._switched_to_plex_streams = True
|
||||||
|
else:
|
||||||
|
item.on_av_change(playerid)
|
||||||
|
|
||||||
|
|
||||||
def _playback_cleanup(ended=False):
|
def _playback_cleanup(ended=False):
|
||||||
|
@ -397,6 +416,8 @@ def _playback_cleanup(ended=False):
|
||||||
app.CONN.plex_transient_token = None
|
app.CONN.plex_transient_token = None
|
||||||
for playerid in app.PLAYSTATE.active_players:
|
for playerid in app.PLAYSTATE.active_players:
|
||||||
status = app.PLAYSTATE.player_states[playerid]
|
status = app.PLAYSTATE.player_states[playerid]
|
||||||
|
# Remember the last played item later
|
||||||
|
app.PLAYSTATE.old_player_states[playerid] = copy.deepcopy(status)
|
||||||
# Stop transcoding
|
# Stop transcoding
|
||||||
if status['playmethod'] == v.PLAYBACK_METHOD_TRANSCODE:
|
if status['playmethod'] == v.PLAYBACK_METHOD_TRANSCODE:
|
||||||
LOG.debug('Tell the PMS to stop transcoding')
|
LOG.debug('Tell the PMS to stop transcoding')
|
||||||
|
@ -605,11 +626,11 @@ def _notify_upnext(item):
|
||||||
}
|
}
|
||||||
_complete_artwork_keys(info[key])
|
_complete_artwork_keys(info[key])
|
||||||
info['play_info'] = {'handle': next_api.fullpath(force_addon=True)[0]}
|
info['play_info'] = {'handle': next_api.fullpath(force_addon=True)[0]}
|
||||||
sender = v.ADDON_ID
|
sender = v.ADDON_ID.encode('utf-8')
|
||||||
method = 'upnext_data'
|
method = 'upnext_data'.encode('utf-8')
|
||||||
data = binascii.hexlify(json.dumps(info).encode('utf-8'))
|
data = binascii.hexlify(json.dumps(info))
|
||||||
data = '\\"[\\"{0}\\"]\\"'.format(data)
|
data = '\\"[\\"{0}\\"]\\"'.format(data)
|
||||||
xbmc.executebuiltin(f'NotifyAll({sender}, {method}, {data})')
|
xbmc.executebuiltin('NotifyAll(%s, %s, %s)' % (sender, method, data))
|
||||||
|
|
||||||
|
|
||||||
def _videolibrary_onupdate(data):
|
def _videolibrary_onupdate(data):
|
||||||
|
@ -662,19 +683,3 @@ def _videolibrary_onupdate(data):
|
||||||
PF.scrobble(db_item['plex_id'], 'watched')
|
PF.scrobble(db_item['plex_id'], 'watched')
|
||||||
else:
|
else:
|
||||||
PF.scrobble(db_item['plex_id'], 'unwatched')
|
PF.scrobble(db_item['plex_id'], 'unwatched')
|
||||||
|
|
||||||
|
|
||||||
class InitVideoStreams(backgroundthread.Task):
|
|
||||||
"""
|
|
||||||
The Kodi player takes forever to initialize all streams Especially
|
|
||||||
subtitles, apparently. No way to tell when Kodi is done :-(
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, item):
|
|
||||||
self.item = item
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
if app.APP.monitor.waitForAbort(5):
|
|
||||||
return
|
|
||||||
self.item.init_streams()
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
|
|
||||||
from .full_sync import start
|
from .full_sync import start
|
||||||
from .websocket import store_websocket_message, process_websocket_messages, \
|
from .websocket import store_websocket_message, process_websocket_messages, \
|
||||||
WEBSOCKET_MESSAGES, PLAYSTATE_SESSIONS
|
WEBSOCKET_MESSAGES, PLAYSTATE_SESSIONS
|
||||||
from .common import update_kodi_library, PLAYLIST_SYNC_ENABLED
|
from .common import update_kodi_library, PLAYLIST_SYNC_ENABLED
|
||||||
from .additional_metadata import MetadataThread, ProcessMetadataTask
|
from .fanart import FanartThread, FanartTask
|
||||||
from .sections import force_full_sync, delete_files, clear_window_vars
|
from .sections import force_full_sync, delete_files, clear_window_vars
|
||||||
|
|
|
@ -1,112 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from logging import getLogger
|
|
||||||
|
|
||||||
from . import additional_metadata_tmdb
|
|
||||||
from ..plex_db import PlexDB
|
|
||||||
from .. import backgroundthread, utils
|
|
||||||
from .. import variables as v, app
|
|
||||||
from ..exceptions import ProcessingNotDone
|
|
||||||
|
|
||||||
|
|
||||||
logger = getLogger('PLEX.sync.metadata')
|
|
||||||
|
|
||||||
BATCH_SIZE = 500
|
|
||||||
|
|
||||||
SUPPORTED_METADATA = {
|
|
||||||
v.PLEX_TYPE_MOVIE: (
|
|
||||||
('missing_trailers', additional_metadata_tmdb.process_trailers),
|
|
||||||
('missing_fanart', additional_metadata_tmdb.process_fanart),
|
|
||||||
),
|
|
||||||
v.PLEX_TYPE_SHOW: (
|
|
||||||
('missing_fanart', additional_metadata_tmdb.process_fanart),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def processing_is_activated(item_getter):
|
|
||||||
"""Checks the PKC settings whether processing is even activated."""
|
|
||||||
if item_getter == 'missing_fanart':
|
|
||||||
return utils.settings('FanartTV') == 'true'
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class MetadataThread(backgroundthread.KillableThread):
|
|
||||||
"""This will potentially take hours!"""
|
|
||||||
def __init__(self, callback, refresh=False):
|
|
||||||
self.callback = callback
|
|
||||||
self.refresh = refresh
|
|
||||||
super(MetadataThread, self).__init__()
|
|
||||||
|
|
||||||
def should_suspend(self):
|
|
||||||
return self._suspended or app.APP.is_playing_video
|
|
||||||
|
|
||||||
def _process_in_batches(self, item_getter, processor, plex_type):
|
|
||||||
offset = 0
|
|
||||||
while True:
|
|
||||||
with PlexDB() as plexdb:
|
|
||||||
# Keep DB connection open only for a short period of time!
|
|
||||||
if self.refresh:
|
|
||||||
# Simply grab every single item if we want to refresh
|
|
||||||
func = plexdb.every_plex_id
|
|
||||||
else:
|
|
||||||
func = getattr(plexdb, item_getter)
|
|
||||||
batch = list(func(plex_type, offset, BATCH_SIZE))
|
|
||||||
for plex_id in batch:
|
|
||||||
# Do the actual, time-consuming processing
|
|
||||||
if self.should_suspend() or self.should_cancel():
|
|
||||||
raise ProcessingNotDone()
|
|
||||||
processor(plex_id, plex_type, self.refresh)
|
|
||||||
if len(batch) < BATCH_SIZE:
|
|
||||||
break
|
|
||||||
offset += BATCH_SIZE
|
|
||||||
|
|
||||||
def _loop(self):
|
|
||||||
for plex_type in SUPPORTED_METADATA:
|
|
||||||
for item_getter, processor in SUPPORTED_METADATA[plex_type]:
|
|
||||||
if not processing_is_activated(item_getter):
|
|
||||||
continue
|
|
||||||
self._process_in_batches(item_getter, processor, plex_type)
|
|
||||||
|
|
||||||
def _run(self):
|
|
||||||
finished = False
|
|
||||||
while not finished:
|
|
||||||
try:
|
|
||||||
self._loop()
|
|
||||||
except ProcessingNotDone:
|
|
||||||
finished = False
|
|
||||||
else:
|
|
||||||
finished = True
|
|
||||||
if self.wait_while_suspended():
|
|
||||||
break
|
|
||||||
logger.info('MetadataThread finished completely: %s', finished)
|
|
||||||
self.callback(finished)
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
logger.info('Starting MetadataThread')
|
|
||||||
app.APP.register_metadata_thread(self)
|
|
||||||
try:
|
|
||||||
self._run()
|
|
||||||
except Exception:
|
|
||||||
utils.ERROR(notify=True)
|
|
||||||
finally:
|
|
||||||
app.APP.deregister_metadata_thread(self)
|
|
||||||
|
|
||||||
|
|
||||||
class ProcessMetadataTask(backgroundthread.Task):
|
|
||||||
"""This task will also be executed while library sync is suspended!"""
|
|
||||||
def setup(self, plex_id, plex_type, refresh=False):
|
|
||||||
self.plex_id = plex_id
|
|
||||||
self.plex_type = plex_type
|
|
||||||
self.refresh = refresh
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
if self.plex_type not in SUPPORTED_METADATA:
|
|
||||||
return
|
|
||||||
for item_getter, processor in SUPPORTED_METADATA[self.plex_type]:
|
|
||||||
if self.should_cancel():
|
|
||||||
# Just don't process this item at all. Next full sync will
|
|
||||||
# take care of it
|
|
||||||
return
|
|
||||||
if not processing_is_activated(item_getter):
|
|
||||||
continue
|
|
||||||
processor(self.plex_id, self.plex_type, self.refresh)
|
|
|
@ -1,159 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import xbmcvfs
|
|
||||||
import xbmcaddon
|
|
||||||
|
|
||||||
from ..plex_api import API
|
|
||||||
from ..kodi_db import KodiVideoDB
|
|
||||||
from ..plex_db import PlexDB
|
|
||||||
from .. import itemtypes, plex_functions as PF, utils, variables as v
|
|
||||||
|
|
||||||
# Import the existing Kodi add-on metadata.themoviedb.org.python
|
|
||||||
__ADDON__ = xbmcaddon.Addon(id='metadata.themoviedb.org.python')
|
|
||||||
__TEMP_PATH__ = os.path.join(__ADDON__.getAddonInfo('path'), 'python', 'lib')
|
|
||||||
__BASE__ = xbmcvfs.translatePath(__TEMP_PATH__)
|
|
||||||
sys.path.append(__BASE__)
|
|
||||||
import tmdbscraper.tmdb as tmdb
|
|
||||||
|
|
||||||
logger = logging.getLogger('PLEX.metadata_movies')
|
|
||||||
PREFER_KODI_COLLECTION_ART = utils.settings('PreferKodiCollectionArt') == 'false'
|
|
||||||
TMDB_SUPPORTED_IDS = ('tmdb', 'imdb')
|
|
||||||
|
|
||||||
|
|
||||||
def get_tmdb_scraper(settings):
|
|
||||||
language = settings.getSettingString('language')
|
|
||||||
certcountry = settings.getSettingString('tmdbcertcountry')
|
|
||||||
# Simplify this in the future
|
|
||||||
# See https://github.com/croneter/PlexKodiConnect/issues/1657
|
|
||||||
search_language = settings.getSettingString('searchlanguage')
|
|
||||||
if search_language:
|
|
||||||
return tmdb.TMDBMovieScraper(settings, language, certcountry, search_language)
|
|
||||||
else:
|
|
||||||
return tmdb.TMDBMovieScraper(settings, language, certcountry)
|
|
||||||
|
|
||||||
|
|
||||||
def get_tmdb_details(unique_ids):
|
|
||||||
settings = xbmcaddon.Addon(id='metadata.themoviedb.org.python')
|
|
||||||
details = get_tmdb_scraper(settings).get_details(unique_ids)
|
|
||||||
if 'error' in details:
|
|
||||||
logger.debug('Could not get tmdb details for %s. Error: %s',
|
|
||||||
unique_ids, details)
|
|
||||||
return details
|
|
||||||
|
|
||||||
|
|
||||||
def process_trailers(plex_id, plex_type, refresh=False):
|
|
||||||
done = True
|
|
||||||
try:
|
|
||||||
with PlexDB() as plexdb:
|
|
||||||
db_item = plexdb.item_by_id(plex_id, plex_type)
|
|
||||||
if not db_item:
|
|
||||||
logger.error('Could not get Kodi id for %s %s', plex_type, plex_id)
|
|
||||||
done = False
|
|
||||||
return
|
|
||||||
with KodiVideoDB() as kodidb:
|
|
||||||
trailer = kodidb.get_trailer(db_item['kodi_id'],
|
|
||||||
db_item['kodi_type'])
|
|
||||||
if trailer and (trailer.startswith(f'plugin://{v.ADDON_ID}') or
|
|
||||||
not refresh):
|
|
||||||
# No need to get a trailer
|
|
||||||
return
|
|
||||||
logger.debug('Processing trailer for %s %s', plex_type, plex_id)
|
|
||||||
xml = PF.GetPlexMetadata(plex_id)
|
|
||||||
try:
|
|
||||||
xml[0].attrib
|
|
||||||
except (TypeError, IndexError, AttributeError):
|
|
||||||
logger.warn('Could not get metadata for %s. Skipping that %s '
|
|
||||||
'for now', plex_id, plex_type)
|
|
||||||
done = False
|
|
||||||
return
|
|
||||||
api = API(xml[0])
|
|
||||||
if (not api.guids or
|
|
||||||
not [x for x in api.guids if x in TMDB_SUPPORTED_IDS]):
|
|
||||||
logger.debug('No unique ids found for %s %s, cannot get a trailer',
|
|
||||||
plex_type, api.title())
|
|
||||||
return
|
|
||||||
trailer = get_tmdb_details(api.guids)
|
|
||||||
trailer = trailer.get('info', {}).get('trailer')
|
|
||||||
if trailer:
|
|
||||||
with KodiVideoDB() as kodidb:
|
|
||||||
kodidb.set_trailer(db_item['kodi_id'],
|
|
||||||
db_item['kodi_type'],
|
|
||||||
trailer)
|
|
||||||
logger.debug('Found a new trailer for %s %s: %s',
|
|
||||||
plex_type, api.title(), trailer)
|
|
||||||
else:
|
|
||||||
logger.debug('No trailer found for %s %s', plex_type, api.title())
|
|
||||||
finally:
|
|
||||||
if done is True:
|
|
||||||
with PlexDB() as plexdb:
|
|
||||||
plexdb.set_trailer_synced(plex_id, plex_type)
|
|
||||||
|
|
||||||
|
|
||||||
def process_fanart(plex_id, plex_type, refresh=False):
|
|
||||||
"""
|
|
||||||
Will look for additional fanart for the plex_type item with plex_id.
|
|
||||||
Will check if we already got all artwork and only look if some are indeed
|
|
||||||
missing.
|
|
||||||
Will set the fanart_synced flag in the Plex DB if successful.
|
|
||||||
"""
|
|
||||||
done = True
|
|
||||||
try:
|
|
||||||
artworks = None
|
|
||||||
with PlexDB() as plexdb:
|
|
||||||
db_item = plexdb.item_by_id(plex_id, plex_type)
|
|
||||||
if not db_item:
|
|
||||||
logger.error('Could not get Kodi id for %s %s', plex_type, plex_id)
|
|
||||||
done = False
|
|
||||||
return
|
|
||||||
if not refresh:
|
|
||||||
with KodiVideoDB() as kodidb:
|
|
||||||
artworks = kodidb.get_art(db_item['kodi_id'],
|
|
||||||
db_item['kodi_type'])
|
|
||||||
# Check if we even need to get additional art
|
|
||||||
for key in v.ALL_KODI_ARTWORK:
|
|
||||||
if key not in artworks:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
xml = PF.GetPlexMetadata(plex_id)
|
|
||||||
try:
|
|
||||||
xml[0].attrib
|
|
||||||
except (TypeError, IndexError, AttributeError):
|
|
||||||
logger.debug('Could not get metadata for %s %s. Skipping that '
|
|
||||||
'item for now', plex_type, plex_id)
|
|
||||||
done = False
|
|
||||||
return
|
|
||||||
api = API(xml[0])
|
|
||||||
if artworks is None:
|
|
||||||
artworks = api.artwork()
|
|
||||||
# Get additional missing artwork from fanart artwork sites
|
|
||||||
artworks = api.fanart_artwork(artworks)
|
|
||||||
with itemtypes.ITEMTYPE_FROM_PLEXTYPE[plex_type](None) as context:
|
|
||||||
context.set_fanart(artworks,
|
|
||||||
db_item['kodi_id'],
|
|
||||||
db_item['kodi_type'])
|
|
||||||
# Additional fanart for sets/collections
|
|
||||||
if plex_type == v.PLEX_TYPE_MOVIE:
|
|
||||||
for _, setname in api.collections():
|
|
||||||
logger.debug('Getting artwork for movie set %s', setname)
|
|
||||||
with KodiVideoDB() as kodidb:
|
|
||||||
setid = kodidb.create_collection(setname)
|
|
||||||
external_set_artwork = api.set_artwork()
|
|
||||||
if external_set_artwork and PREFER_KODI_COLLECTION_ART:
|
|
||||||
kodi_artwork = api.artwork(kodi_id=setid,
|
|
||||||
kodi_type=v.KODI_TYPE_SET)
|
|
||||||
for art in kodi_artwork:
|
|
||||||
if art in external_set_artwork:
|
|
||||||
del external_set_artwork[art]
|
|
||||||
with itemtypes.Movie(None) as movie:
|
|
||||||
movie.kodidb.modify_artwork(external_set_artwork,
|
|
||||||
setid,
|
|
||||||
v.KODI_TYPE_SET)
|
|
||||||
finally:
|
|
||||||
if done is True:
|
|
||||||
with PlexDB() as plexdb:
|
|
||||||
plexdb.set_fanart_synced(plex_id, plex_type)
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import xbmc
|
import xbmc
|
||||||
|
|
||||||
|
|
154
resources/lib/library_sync/fanart.py
Normal file
154
resources/lib/library_sync/fanart.py
Normal file
|
@ -0,0 +1,154 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
|
from logging import getLogger
|
||||||
|
|
||||||
|
from ..plex_api import API
|
||||||
|
from ..plex_db import PlexDB
|
||||||
|
from ..kodi_db import KodiVideoDB
|
||||||
|
from .. import backgroundthread, utils
|
||||||
|
from .. import itemtypes, plex_functions as PF, variables as v, app
|
||||||
|
|
||||||
|
|
||||||
|
LOG = getLogger('PLEX.sync.fanart')
|
||||||
|
|
||||||
|
SUPPORTED_TYPES = (v.PLEX_TYPE_MOVIE, v.PLEX_TYPE_SHOW)
|
||||||
|
SYNC_FANART = (utils.settings('FanartTV') == 'true' and
|
||||||
|
utils.settings('usePlexArtwork') == 'true')
|
||||||
|
PREFER_KODI_COLLECTION_ART = utils.settings('PreferKodiCollectionArt') == 'false'
|
||||||
|
BATCH_SIZE = 500
|
||||||
|
|
||||||
|
|
||||||
|
class FanartThread(backgroundthread.KillableThread):
|
||||||
|
"""
|
||||||
|
This will potentially take hours!
|
||||||
|
"""
|
||||||
|
def __init__(self, callback, refresh=False):
|
||||||
|
self.callback = callback
|
||||||
|
self.refresh = refresh
|
||||||
|
super(FanartThread, self).__init__()
|
||||||
|
|
||||||
|
def should_suspend(self):
|
||||||
|
return self._suspended or app.APP.is_playing_video
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
LOG.info('Starting FanartThread')
|
||||||
|
app.APP.register_fanart_thread(self)
|
||||||
|
try:
|
||||||
|
self._run()
|
||||||
|
except Exception:
|
||||||
|
utils.ERROR(notify=True)
|
||||||
|
finally:
|
||||||
|
app.APP.deregister_fanart_thread(self)
|
||||||
|
|
||||||
|
def _loop(self):
|
||||||
|
for typus in SUPPORTED_TYPES:
|
||||||
|
offset = 0
|
||||||
|
while True:
|
||||||
|
with PlexDB() as plexdb:
|
||||||
|
# Keep DB connection open only for a short period of time!
|
||||||
|
if self.refresh:
|
||||||
|
batch = list(plexdb.every_plex_id(typus,
|
||||||
|
offset,
|
||||||
|
BATCH_SIZE))
|
||||||
|
else:
|
||||||
|
batch = list(plexdb.missing_fanart(typus,
|
||||||
|
offset,
|
||||||
|
BATCH_SIZE))
|
||||||
|
for plex_id in batch:
|
||||||
|
# Do the actual, time-consuming processing
|
||||||
|
if self.should_suspend() or self.should_cancel():
|
||||||
|
return False
|
||||||
|
process_fanart(plex_id, typus, self.refresh)
|
||||||
|
if len(batch) < BATCH_SIZE:
|
||||||
|
break
|
||||||
|
offset += BATCH_SIZE
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _run(self):
|
||||||
|
finished = False
|
||||||
|
while not finished:
|
||||||
|
finished = self._loop()
|
||||||
|
if self.wait_while_suspended():
|
||||||
|
break
|
||||||
|
LOG.info('FanartThread finished: %s', finished)
|
||||||
|
self.callback(finished)
|
||||||
|
|
||||||
|
|
||||||
|
class FanartTask(backgroundthread.Task):
|
||||||
|
"""
|
||||||
|
This task will also be executed while library sync is suspended!
|
||||||
|
"""
|
||||||
|
def setup(self, plex_id, plex_type, refresh=False):
|
||||||
|
self.plex_id = plex_id
|
||||||
|
self.plex_type = plex_type
|
||||||
|
self.refresh = refresh
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
process_fanart(self.plex_id, self.plex_type, self.refresh)
|
||||||
|
|
||||||
|
|
||||||
|
def process_fanart(plex_id, plex_type, refresh=False):
|
||||||
|
"""
|
||||||
|
Will look for additional fanart for the plex_type item with plex_id.
|
||||||
|
Will check if we already got all artwork and only look if some are indeed
|
||||||
|
missing.
|
||||||
|
Will set the fanart_synced flag in the Plex DB if successful.
|
||||||
|
"""
|
||||||
|
done = False
|
||||||
|
try:
|
||||||
|
artworks = None
|
||||||
|
with PlexDB() as plexdb:
|
||||||
|
db_item = plexdb.item_by_id(plex_id,
|
||||||
|
plex_type)
|
||||||
|
if not db_item:
|
||||||
|
LOG.error('Could not get Kodi id for plex id %s', plex_id)
|
||||||
|
return
|
||||||
|
if not refresh:
|
||||||
|
with KodiVideoDB() as kodidb:
|
||||||
|
artworks = kodidb.get_art(db_item['kodi_id'],
|
||||||
|
db_item['kodi_type'])
|
||||||
|
# Check if we even need to get additional art
|
||||||
|
for key in v.ALL_KODI_ARTWORK:
|
||||||
|
if key not in artworks:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
done = True
|
||||||
|
return
|
||||||
|
xml = PF.GetPlexMetadata(plex_id)
|
||||||
|
try:
|
||||||
|
xml[0].attrib
|
||||||
|
except (TypeError, IndexError, AttributeError):
|
||||||
|
LOG.warn('Could not get metadata for %s. Skipping that item '
|
||||||
|
'for now', plex_id)
|
||||||
|
return
|
||||||
|
api = API(xml[0])
|
||||||
|
if artworks is None:
|
||||||
|
artworks = api.artwork()
|
||||||
|
# Get additional missing artwork from fanart artwork sites
|
||||||
|
artworks = api.fanart_artwork(artworks)
|
||||||
|
with itemtypes.ITEMTYPE_FROM_PLEXTYPE[plex_type](None) as context:
|
||||||
|
context.set_fanart(artworks,
|
||||||
|
db_item['kodi_id'],
|
||||||
|
db_item['kodi_type'])
|
||||||
|
# Additional fanart for sets/collections
|
||||||
|
if plex_type == v.PLEX_TYPE_MOVIE:
|
||||||
|
for _, setname in api.collections():
|
||||||
|
LOG.debug('Getting artwork for movie set %s', setname)
|
||||||
|
with KodiVideoDB() as kodidb:
|
||||||
|
setid = kodidb.create_collection(setname)
|
||||||
|
external_set_artwork = api.set_artwork()
|
||||||
|
if external_set_artwork and PREFER_KODI_COLLECTION_ART:
|
||||||
|
kodi_artwork = api.artwork(kodi_id=setid,
|
||||||
|
kodi_type=v.KODI_TYPE_SET)
|
||||||
|
for art in kodi_artwork:
|
||||||
|
if art in external_set_artwork:
|
||||||
|
del external_set_artwork[art]
|
||||||
|
with itemtypes.Movie(None) as movie:
|
||||||
|
movie.kodidb.modify_artwork(external_set_artwork,
|
||||||
|
setid,
|
||||||
|
v.KODI_TYPE_SET)
|
||||||
|
done = True
|
||||||
|
finally:
|
||||||
|
if done is True:
|
||||||
|
with PlexDB() as plexdb:
|
||||||
|
plexdb.set_fanart_synced(plex_id, plex_type)
|
|
@ -1,6 +1,7 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from queue import Full
|
from Queue import Full
|
||||||
|
|
||||||
from . import common, sections
|
from . import common, sections
|
||||||
from ..plex_db import PlexDB
|
from ..plex_db import PlexDB
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import queue
|
import Queue
|
||||||
|
|
||||||
import xbmcgui
|
import xbmcgui
|
||||||
|
|
||||||
|
@ -79,7 +80,7 @@ class FullSync(common.LibrarySyncMixin, bg.KillableThread):
|
||||||
@utils.log_time
|
@utils.log_time
|
||||||
def process_new_and_changed_items(self, section_queue, processing_queue):
|
def process_new_and_changed_items(self, section_queue, processing_queue):
|
||||||
LOG.debug('Start working')
|
LOG.debug('Start working')
|
||||||
get_metadata_queue = queue.Queue(maxsize=BACKLOG_QUEUE_SIZE)
|
get_metadata_queue = Queue.Queue(maxsize=BACKLOG_QUEUE_SIZE)
|
||||||
scanner_thread = FillMetadataQueue(self.repair,
|
scanner_thread = FillMetadataQueue(self.repair,
|
||||||
section_queue,
|
section_queue,
|
||||||
get_metadata_queue,
|
get_metadata_queue,
|
||||||
|
@ -192,7 +193,7 @@ class FullSync(common.LibrarySyncMixin, bg.KillableThread):
|
||||||
LOG.debug('Exiting threaded_get_generators')
|
LOG.debug('Exiting threaded_get_generators')
|
||||||
|
|
||||||
def full_library_sync(self):
|
def full_library_sync(self):
|
||||||
section_queue = queue.Queue()
|
section_queue = Queue.Queue()
|
||||||
processing_queue = bg.ProcessingQueue(maxsize=XML_QUEUE_SIZE)
|
processing_queue = bg.ProcessingQueue(maxsize=XML_QUEUE_SIZE)
|
||||||
kinds = [
|
kinds = [
|
||||||
(v.PLEX_TYPE_MOVIE, v.PLEX_TYPE_MOVIE),
|
(v.PLEX_TYPE_MOVIE, v.PLEX_TYPE_MOVIE),
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
|
|
||||||
from . import common
|
from . import common
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import urllib.request, urllib.parse, urllib.error
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
|
import urllib
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
import xml.etree.ElementTree as etree
|
from ..utils import etree
|
||||||
from .. import variables as v, utils
|
from .. import variables as v, utils
|
||||||
|
|
||||||
ICON_PATH = 'special://home/addons/plugin.video.plexkodiconnect/icon.png'
|
ICON_PATH = 'special://home/addons/plugin.video.plexkodiconnect/icon.png'
|
||||||
|
@ -55,7 +56,7 @@ NODE_TYPES = {
|
||||||
{
|
{
|
||||||
'mode': 'browseplex',
|
'mode': 'browseplex',
|
||||||
'key': ('/library/sections/{self.section_id}&%s'
|
'key': ('/library/sections/{self.section_id}&%s'
|
||||||
% urllib.parse.urlencode({'sort': 'rating:desc'})),
|
% urllib.urlencode({'sort': 'rating:desc'})),
|
||||||
'section_id': '{self.section_id}'
|
'section_id': '{self.section_id}'
|
||||||
},
|
},
|
||||||
v.CONTENT_TYPE_MOVIE,
|
v.CONTENT_TYPE_MOVIE,
|
||||||
|
@ -83,7 +84,7 @@ NODE_TYPES = {
|
||||||
{
|
{
|
||||||
'mode': 'browseplex',
|
'mode': 'browseplex',
|
||||||
'key': ('/library/sections/{self.section_id}&%s'
|
'key': ('/library/sections/{self.section_id}&%s'
|
||||||
% urllib.parse.urlencode({'sort': 'random'})),
|
% urllib.urlencode({'sort': 'random'})),
|
||||||
'section_id': '{self.section_id}'
|
'section_id': '{self.section_id}'
|
||||||
},
|
},
|
||||||
v.CONTENT_TYPE_MOVIE,
|
v.CONTENT_TYPE_MOVIE,
|
||||||
|
@ -153,7 +154,7 @@ NODE_TYPES = {
|
||||||
{
|
{
|
||||||
'mode': 'browseplex',
|
'mode': 'browseplex',
|
||||||
'key': ('/library/sections/{self.section_id}&%s'
|
'key': ('/library/sections/{self.section_id}&%s'
|
||||||
% urllib.parse.urlencode({'sort': 'rating:desc'})),
|
% urllib.urlencode({'sort': 'rating:desc'})),
|
||||||
'section_id': '{self.section_id}'
|
'section_id': '{self.section_id}'
|
||||||
},
|
},
|
||||||
v.CONTENT_TYPE_SHOW,
|
v.CONTENT_TYPE_SHOW,
|
||||||
|
@ -181,7 +182,7 @@ NODE_TYPES = {
|
||||||
{
|
{
|
||||||
'mode': 'browseplex',
|
'mode': 'browseplex',
|
||||||
'key': ('/library/sections/{self.section_id}&%s'
|
'key': ('/library/sections/{self.section_id}&%s'
|
||||||
% urllib.parse.urlencode({'sort': 'random'})),
|
% urllib.urlencode({'sort': 'random'})),
|
||||||
'section_id': '{self.section_id}'
|
'section_id': '{self.section_id}'
|
||||||
},
|
},
|
||||||
v.CONTENT_TYPE_SHOW,
|
v.CONTENT_TYPE_SHOW,
|
||||||
|
@ -191,7 +192,7 @@ NODE_TYPES = {
|
||||||
{
|
{
|
||||||
'mode': 'browseplex',
|
'mode': 'browseplex',
|
||||||
'key': ('/library/sections/{self.section_id}/recentlyViewed&%s'
|
'key': ('/library/sections/{self.section_id}/recentlyViewed&%s'
|
||||||
% urllib.parse.urlencode({'type': v.PLEX_TYPE_NUMBER_FROM_PLEX_TYPE[v.PLEX_TYPE_EPISODE]})),
|
% urllib.urlencode({'type': v.PLEX_TYPE_NUMBER_FROM_PLEX_TYPE[v.PLEX_TYPE_EPISODE]})),
|
||||||
'section_id': '{self.section_id}'
|
'section_id': '{self.section_id}'
|
||||||
},
|
},
|
||||||
v.CONTENT_TYPE_EPISODE,
|
v.CONTENT_TYPE_EPISODE,
|
||||||
|
@ -235,7 +236,7 @@ def node_pms(section, node_name, args):
|
||||||
else:
|
else:
|
||||||
folder = False
|
folder = False
|
||||||
xml = etree.Element('node',
|
xml = etree.Element('node',
|
||||||
attrib={'order': str(section.order),
|
attrib={'order': unicode(section.order),
|
||||||
'type': 'folder' if folder else 'filter'})
|
'type': 'folder' if folder else 'filter'})
|
||||||
etree.SubElement(xml, 'label').text = node_name
|
etree.SubElement(xml, 'label').text = node_name
|
||||||
etree.SubElement(xml, 'icon').text = ICON_PATH
|
etree.SubElement(xml, 'icon').text = ICON_PATH
|
||||||
|
@ -248,7 +249,7 @@ def node_ondeck(section, node_name):
|
||||||
"""
|
"""
|
||||||
For movies only - returns in-progress movies sorted by last played
|
For movies only - returns in-progress movies sorted by last played
|
||||||
"""
|
"""
|
||||||
xml = etree.Element('node', attrib={'order': str(section.order),
|
xml = etree.Element('node', attrib={'order': unicode(section.order),
|
||||||
'type': 'filter'})
|
'type': 'filter'})
|
||||||
etree.SubElement(xml, 'match').text = 'all'
|
etree.SubElement(xml, 'match').text = 'all'
|
||||||
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
||||||
|
@ -269,7 +270,7 @@ def node_ondeck(section, node_name):
|
||||||
|
|
||||||
def node_recent(section, node_name):
|
def node_recent(section, node_name):
|
||||||
xml = etree.Element('node',
|
xml = etree.Element('node',
|
||||||
attrib={'order': str(section.order),
|
attrib={'order': unicode(section.order),
|
||||||
'type': 'filter'})
|
'type': 'filter'})
|
||||||
etree.SubElement(xml, 'match').text = 'all'
|
etree.SubElement(xml, 'match').text = 'all'
|
||||||
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
||||||
|
@ -298,7 +299,7 @@ def node_recent(section, node_name):
|
||||||
|
|
||||||
|
|
||||||
def node_all(section, node_name):
|
def node_all(section, node_name):
|
||||||
xml = etree.Element('node', attrib={'order': str(section.order),
|
xml = etree.Element('node', attrib={'order': unicode(section.order),
|
||||||
'type': 'filter'})
|
'type': 'filter'})
|
||||||
etree.SubElement(xml, 'match').text = 'all'
|
etree.SubElement(xml, 'match').text = 'all'
|
||||||
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
||||||
|
@ -315,7 +316,7 @@ def node_all(section, node_name):
|
||||||
|
|
||||||
|
|
||||||
def node_recommended(section, node_name):
|
def node_recommended(section, node_name):
|
||||||
xml = etree.Element('node', attrib={'order': str(section.order),
|
xml = etree.Element('node', attrib={'order': unicode(section.order),
|
||||||
'type': 'filter'})
|
'type': 'filter'})
|
||||||
etree.SubElement(xml, 'match').text = 'all'
|
etree.SubElement(xml, 'match').text = 'all'
|
||||||
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
||||||
|
@ -336,7 +337,7 @@ def node_recommended(section, node_name):
|
||||||
|
|
||||||
|
|
||||||
def node_genres(section, node_name):
|
def node_genres(section, node_name):
|
||||||
xml = etree.Element('node', attrib={'order': str(section.order),
|
xml = etree.Element('node', attrib={'order': unicode(section.order),
|
||||||
'type': 'filter'})
|
'type': 'filter'})
|
||||||
etree.SubElement(xml, 'match').text = 'all'
|
etree.SubElement(xml, 'match').text = 'all'
|
||||||
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
||||||
|
@ -354,7 +355,7 @@ def node_genres(section, node_name):
|
||||||
|
|
||||||
|
|
||||||
def node_sets(section, node_name):
|
def node_sets(section, node_name):
|
||||||
xml = etree.Element('node', attrib={'order': str(section.order),
|
xml = etree.Element('node', attrib={'order': unicode(section.order),
|
||||||
'type': 'filter'})
|
'type': 'filter'})
|
||||||
etree.SubElement(xml, 'match').text = 'all'
|
etree.SubElement(xml, 'match').text = 'all'
|
||||||
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
||||||
|
@ -373,7 +374,7 @@ def node_sets(section, node_name):
|
||||||
|
|
||||||
|
|
||||||
def node_random(section, node_name):
|
def node_random(section, node_name):
|
||||||
xml = etree.Element('node', attrib={'order': str(section.order),
|
xml = etree.Element('node', attrib={'order': unicode(section.order),
|
||||||
'type': 'filter'})
|
'type': 'filter'})
|
||||||
etree.SubElement(xml, 'match').text = 'all'
|
etree.SubElement(xml, 'match').text = 'all'
|
||||||
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
||||||
|
@ -391,7 +392,7 @@ def node_random(section, node_name):
|
||||||
|
|
||||||
|
|
||||||
def node_lastplayed(section, node_name):
|
def node_lastplayed(section, node_name):
|
||||||
xml = etree.Element('node', attrib={'order': str(section.order),
|
xml = etree.Element('node', attrib={'order': unicode(section.order),
|
||||||
'type': 'filter'})
|
'type': 'filter'})
|
||||||
etree.SubElement(xml, 'match').text = 'all'
|
etree.SubElement(xml, 'match').text = 'all'
|
||||||
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
|
|
||||||
from . import common, sections
|
from . import common, sections
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
|
@ -9,7 +10,7 @@ from ..plex_api import API
|
||||||
from .. import kodi_db
|
from .. import kodi_db
|
||||||
from .. import itemtypes, path_ops
|
from .. import itemtypes, path_ops
|
||||||
from .. import plex_functions as PF, music, utils, variables as v, app
|
from .. import plex_functions as PF, music, utils, variables as v, app
|
||||||
import xml.etree.ElementTree as etree
|
from ..utils import etree
|
||||||
|
|
||||||
LOG = getLogger('PLEX.sync.sections')
|
LOG = getLogger('PLEX.sync.sections')
|
||||||
|
|
||||||
|
@ -20,10 +21,10 @@ SHOULD_CANCEL = None
|
||||||
LIBRARY_PATH = path_ops.translate_path('special://profile/library/video/')
|
LIBRARY_PATH = path_ops.translate_path('special://profile/library/video/')
|
||||||
# The video library might not yet exist for this user - create it
|
# The video library might not yet exist for this user - create it
|
||||||
if not path_ops.exists(LIBRARY_PATH):
|
if not path_ops.exists(LIBRARY_PATH):
|
||||||
path_ops.copytree(
|
path_ops.copy_tree(
|
||||||
src=path_ops.translate_path('special://xbmc/system/library/video'),
|
src=path_ops.translate_path('special://xbmc/system/library/video'),
|
||||||
dst=LIBRARY_PATH,
|
dst=LIBRARY_PATH,
|
||||||
copy_function=path_ops.shutil.copyfile)
|
preserve_mode=0) # dont copy permission bits so we have write access!
|
||||||
PLAYLISTS_PATH = path_ops.translate_path("special://profile/playlists/video/")
|
PLAYLISTS_PATH = path_ops.translate_path("special://profile/playlists/video/")
|
||||||
if not path_ops.exists(PLAYLISTS_PATH):
|
if not path_ops.exists(PLAYLISTS_PATH):
|
||||||
path_ops.makedirs(PLAYLISTS_PATH)
|
path_ops.makedirs(PLAYLISTS_PATH)
|
||||||
|
@ -95,16 +96,18 @@ class Section(object):
|
||||||
"'plex_type': '{self.plex_type}', "
|
"'plex_type': '{self.plex_type}', "
|
||||||
"'sync_to_kodi': {self.sync_to_kodi}, "
|
"'sync_to_kodi': {self.sync_to_kodi}, "
|
||||||
"'last_sync': {self.last_sync}"
|
"'last_sync': {self.last_sync}"
|
||||||
"}}").format(self=self)
|
"}}").format(self=self).encode('utf-8')
|
||||||
|
__str__ = __repr__
|
||||||
|
|
||||||
def __bool__(self):
|
def __nonzero__(self):
|
||||||
"""bool(Section) returns True if section_id, name and section_type are set."""
|
|
||||||
return (self.section_id is not None and
|
return (self.section_id is not None and
|
||||||
self.name is not None and
|
self.name is not None and
|
||||||
self.section_type is not None)
|
self.section_type is not None)
|
||||||
|
|
||||||
def __eq__(self, section):
|
def __eq__(self, section):
|
||||||
"""Sections compare equal if their section_id, name and plex_type (first prio) OR section_type (if there is no plex_type is set) compare equal.
|
"""
|
||||||
|
Sections compare equal if their section_id, name and plex_type (first
|
||||||
|
prio) OR section_type (if there is no plex_type is set) compare equal
|
||||||
"""
|
"""
|
||||||
if not isinstance(section, Section):
|
if not isinstance(section, Section):
|
||||||
return False
|
return False
|
||||||
|
@ -236,7 +239,7 @@ class Section(object):
|
||||||
{key: '{self.<Section attribute>}'}
|
{key: '{self.<Section attribute>}'}
|
||||||
"""
|
"""
|
||||||
args = copy.deepcopy(args)
|
args = copy.deepcopy(args)
|
||||||
for key, value in args.items():
|
for key, value in args.iteritems():
|
||||||
args[key] = value.format(self=self)
|
args[key] = value.format(self=self)
|
||||||
return utils.extend_url('plugin://%s' % v.ADDON_ID, args)
|
return utils.extend_url('plugin://%s' % v.ADDON_ID, args)
|
||||||
|
|
||||||
|
@ -265,7 +268,7 @@ class Section(object):
|
||||||
args = {
|
args = {
|
||||||
'mode': 'browseplex',
|
'mode': 'browseplex',
|
||||||
'key': '/library/sections/%s' % self.section_id,
|
'key': '/library/sections/%s' % self.section_id,
|
||||||
'section_id': str(self.section_id)
|
'section_id': unicode(self.section_id)
|
||||||
}
|
}
|
||||||
if not self.sync_to_kodi:
|
if not self.sync_to_kodi:
|
||||||
args['synched'] = 'false'
|
args['synched'] = 'false'
|
||||||
|
@ -276,7 +279,7 @@ class Section(object):
|
||||||
args = {
|
args = {
|
||||||
'mode': 'browseplex',
|
'mode': 'browseplex',
|
||||||
'key': '/library/sections/%s/all' % self.section_id,
|
'key': '/library/sections/%s/all' % self.section_id,
|
||||||
'section_id': str(self.section_id)
|
'section_id': unicode(self.section_id)
|
||||||
}
|
}
|
||||||
if not self.sync_to_kodi:
|
if not self.sync_to_kodi:
|
||||||
args['synched'] = 'false'
|
args['synched'] = 'false'
|
||||||
|
@ -318,7 +321,7 @@ class Section(object):
|
||||||
if not path_ops.exists(path_ops.path.join(self.path, 'index.xml')):
|
if not path_ops.exists(path_ops.path.join(self.path, 'index.xml')):
|
||||||
LOG.debug('Creating index.xml for section %s', self.name)
|
LOG.debug('Creating index.xml for section %s', self.name)
|
||||||
xml = etree.Element('node',
|
xml = etree.Element('node',
|
||||||
attrib={'order': str(self.order)})
|
attrib={'order': unicode(self.order)})
|
||||||
etree.SubElement(xml, 'label').text = self.name
|
etree.SubElement(xml, 'label').text = self.name
|
||||||
etree.SubElement(xml, 'icon').text = self.icon or nodes.ICON_PATH
|
etree.SubElement(xml, 'icon').text = self.icon or nodes.ICON_PATH
|
||||||
self._write_xml(xml, 'index.xml')
|
self._write_xml(xml, 'index.xml')
|
||||||
|
@ -711,7 +714,7 @@ def _clear_window_vars(index):
|
||||||
utils.window('%s.path' % node, clear=True)
|
utils.window('%s.path' % node, clear=True)
|
||||||
utils.window('%s.id' % node, clear=True)
|
utils.window('%s.id' % node, clear=True)
|
||||||
# Just clear everything here, ignore the plex_type
|
# Just clear everything here, ignore the plex_type
|
||||||
for typus in (x[0] for y in list(nodes.NODE_TYPES.values()) for x in y):
|
for typus in (x[0] for y in nodes.NODE_TYPES.values() for x in y):
|
||||||
for kind in WINDOW_ARGS:
|
for kind in WINDOW_ARGS:
|
||||||
node = 'Plex.nodes.%s.%s.%s' % (index, typus, kind)
|
node = 'Plex.nodes.%s.%s.%s' % (index, typus, kind)
|
||||||
utils.window(node, clear=True)
|
utils.window(node, clear=True)
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
|
|
||||||
from .common import update_kodi_library, PLAYLIST_SYNC_ENABLED
|
from .common import update_kodi_library, PLAYLIST_SYNC_ENABLED
|
||||||
from .additional_metadata import ProcessMetadataTask
|
from .fanart import SYNC_FANART, FanartTask
|
||||||
from ..plex_api import API
|
from ..plex_api import API
|
||||||
from ..plex_db import PlexDB
|
from ..plex_db import PlexDB
|
||||||
from .. import kodi_db
|
from .. import kodi_db
|
||||||
|
@ -84,8 +85,9 @@ def process_websocket_messages():
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
successful, video, music = process_new_item_message(message)
|
successful, video, music = process_new_item_message(message)
|
||||||
if successful:
|
if (successful and SYNC_FANART and
|
||||||
task = ProcessMetadataTask()
|
message['plex_type'] in (v.PLEX_TYPE_MOVIE, v.PLEX_TYPE_SHOW)):
|
||||||
|
task = FanartTask()
|
||||||
task.setup(message['plex_id'],
|
task.setup(message['plex_id'],
|
||||||
message['plex_type'],
|
message['plex_type'],
|
||||||
refresh=False)
|
refresh=False)
|
||||||
|
@ -158,7 +160,7 @@ def store_timeline_message(data):
|
||||||
continue
|
continue
|
||||||
status = int(message['state'])
|
status = int(message['state'])
|
||||||
if typus == 'playlist' and PLAYLIST_SYNC_ENABLED:
|
if typus == 'playlist' and PLAYLIST_SYNC_ENABLED:
|
||||||
playlists.websocket(plex_id=str(message['itemID']),
|
playlists.websocket(plex_id=unicode(message['itemID']),
|
||||||
status=status)
|
status=status)
|
||||||
elif status == 9:
|
elif status == 9:
|
||||||
# Immediately and always process deletions (as the PMS will
|
# Immediately and always process deletions (as the PMS will
|
||||||
|
|
|
@ -1,17 +1,34 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
import logging
|
import logging
|
||||||
import xbmc
|
import xbmc
|
||||||
###############################################################################
|
###############################################################################
|
||||||
LEVELS = {
|
LEVELS = {
|
||||||
logging.ERROR: xbmc.LOGERROR,
|
logging.ERROR: xbmc.LOGERROR,
|
||||||
logging.WARNING: xbmc.LOGWARNING,
|
logging.WARNING: xbmc.LOGWARNING,
|
||||||
logging.INFO: xbmc.LOGINFO,
|
logging.INFO: xbmc.LOGNOTICE,
|
||||||
logging.DEBUG: xbmc.LOGDEBUG
|
logging.DEBUG: xbmc.LOGDEBUG
|
||||||
}
|
}
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
|
|
||||||
|
def try_encode(uniString, encoding='utf-8'):
|
||||||
|
"""
|
||||||
|
Will try to encode uniString (in unicode) to encoding. This possibly
|
||||||
|
fails with e.g. Android TV's Python, which does not accept arguments for
|
||||||
|
string.encode()
|
||||||
|
"""
|
||||||
|
if isinstance(uniString, str):
|
||||||
|
# already encoded
|
||||||
|
return uniString
|
||||||
|
try:
|
||||||
|
uniString = uniString.encode(encoding, "ignore")
|
||||||
|
except TypeError:
|
||||||
|
uniString = uniString.encode()
|
||||||
|
return uniString
|
||||||
|
|
||||||
|
|
||||||
def config():
|
def config():
|
||||||
logger = logging.getLogger('PLEX')
|
logger = logging.getLogger('PLEX')
|
||||||
logger.addHandler(LogHandler())
|
logger.addHandler(LogHandler())
|
||||||
|
@ -21,7 +38,13 @@ def config():
|
||||||
class LogHandler(logging.StreamHandler):
|
class LogHandler(logging.StreamHandler):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
logging.StreamHandler.__init__(self)
|
logging.StreamHandler.__init__(self)
|
||||||
self.setFormatter(logging.Formatter(fmt='%(name)s: %(message)s'))
|
self.setFormatter(logging.Formatter(fmt=b"%(name)s: %(message)s"))
|
||||||
|
|
||||||
def emit(self, record):
|
def emit(self, record):
|
||||||
|
if isinstance(record.msg, unicode):
|
||||||
|
record.msg = record.msg.encode('utf-8')
|
||||||
|
try:
|
||||||
xbmc.log(self.format(record), level=LEVELS[record.levelno])
|
xbmc.log(self.format(record), level=LEVELS[record.levelno])
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
xbmc.log(try_encode(self.format(record)),
|
||||||
|
level=LEVELS[record.levelno])
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
|
|
||||||
from . import variables as v
|
from . import variables as v
|
||||||
|
@ -22,13 +23,80 @@ def check_migration():
|
||||||
LOG.info('Already migrated to PKC version %s' % v.ADDON_VERSION)
|
LOG.info('Already migrated to PKC version %s' % v.ADDON_VERSION)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not utils.compare_version(last_migration, '3.0.4'):
|
if not utils.compare_version(last_migration, '1.8.2'):
|
||||||
LOG.info('Migrating to version 3.0.4')
|
LOG.info('Migrating to version 1.8.1')
|
||||||
# Add an additional column `trailer_synced` in the Plex movie table
|
# Set the new PKC theMovieDB key
|
||||||
|
utils.settings('themoviedbAPIKey',
|
||||||
|
value='19c90103adb9e98f2172c6a6a3d85dc4')
|
||||||
|
|
||||||
|
if not utils.compare_version(last_migration, '2.0.25'):
|
||||||
|
LOG.info('Migrating to version 2.0.24')
|
||||||
|
# Need to re-connect with PMS to pick up on plex.direct URIs
|
||||||
|
utils.settings('ipaddress', value='')
|
||||||
|
utils.settings('port', value='')
|
||||||
|
|
||||||
|
if not utils.compare_version(last_migration, '2.7.6'):
|
||||||
|
LOG.info('Migrating to version 2.7.5')
|
||||||
|
from .library_sync.sections import delete_files
|
||||||
|
delete_files()
|
||||||
|
|
||||||
|
if not utils.compare_version(last_migration, '2.8.3'):
|
||||||
|
LOG.info('Migrating to version 2.8.2')
|
||||||
|
from .library_sync import sections
|
||||||
|
sections.clear_window_vars()
|
||||||
|
sections.delete_videonode_files()
|
||||||
|
|
||||||
|
if not utils.compare_version(last_migration, '2.8.7'):
|
||||||
|
LOG.info('Migrating to version 2.8.6')
|
||||||
|
# Need to delete the UNIQUE index that prevents creating several
|
||||||
|
# playlist entries with the same kodi_hash
|
||||||
from .plex_db import PlexDB
|
from .plex_db import PlexDB
|
||||||
with PlexDB() as plexdb:
|
with PlexDB() as plexdb:
|
||||||
query = 'ALTER TABLE movie ADD trailer_synced BOOLEAN'
|
plexdb.cursor.execute('DROP INDEX IF EXISTS ix_playlists_3')
|
||||||
plexdb.cursor.execute(query)
|
|
||||||
# Index will be automatically recreated on next PKC startup
|
# Index will be automatically recreated on next PKC startup
|
||||||
|
|
||||||
|
if not utils.compare_version(last_migration, '2.8.9'):
|
||||||
|
LOG.info('Migrating to version 2.8.8')
|
||||||
|
from .library_sync import sections
|
||||||
|
sections.clear_window_vars()
|
||||||
|
sections.delete_videonode_files()
|
||||||
|
|
||||||
|
if not utils.compare_version(last_migration, '2.9.3'):
|
||||||
|
LOG.info('Migrating to version 2.9.2')
|
||||||
|
# Re-sync all playlists to Kodi
|
||||||
|
from .playlists import remove_synced_playlists
|
||||||
|
remove_synced_playlists()
|
||||||
|
|
||||||
|
if not utils.compare_version(last_migration, '2.9.7'):
|
||||||
|
LOG.info('Migrating to version 2.9.6')
|
||||||
|
# Allow for a new "Direct Stream" setting (number 2), so shift the
|
||||||
|
# last setting for "force transcoding"
|
||||||
|
current_playback_type = utils.cast(int, utils.settings('playType')) or 0
|
||||||
|
if current_playback_type == 2:
|
||||||
|
current_playback_type = 3
|
||||||
|
utils.settings('playType', value=str(current_playback_type))
|
||||||
|
|
||||||
|
if not utils.compare_version(last_migration, '2.9.8'):
|
||||||
|
LOG.info('Migrating to version 2.9.7')
|
||||||
|
# Force-scan every single item in the library - seems like we could
|
||||||
|
# loose some recently added items otherwise
|
||||||
|
# Caused by 65a921c3cc2068c4a34990d07289e2958f515156
|
||||||
|
from . import library_sync
|
||||||
|
library_sync.force_full_sync()
|
||||||
|
|
||||||
|
if not utils.compare_version(last_migration, '2.11.3'):
|
||||||
|
LOG.info('Migrating to version 2.11.2')
|
||||||
|
# Re-sync all playlists to Kodi
|
||||||
|
from .playlists import remove_synced_playlists
|
||||||
|
remove_synced_playlists()
|
||||||
|
|
||||||
|
if not utils.compare_version(last_migration, '2.12.2'):
|
||||||
|
LOG.info('Migrating to version 2.12.1')
|
||||||
|
# Sign user out to make sure he needs to sign in again
|
||||||
|
utils.settings('username', value='')
|
||||||
|
utils.settings('userid', value='')
|
||||||
|
utils.settings('plex_restricteduser', value='')
|
||||||
|
utils.settings('accessToken', value='')
|
||||||
|
utils.settings('plexAvatar', value='')
|
||||||
|
|
||||||
utils.settings('last_migrated_PKC_version', value=v.ADDON_VERSION)
|
utils.settings('last_migrated_PKC_version', value=v.ADDON_VERSION)
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
|
@ -14,25 +14,41 @@ WARNING: os.path won't really work with smb paths (possibly others). For
|
||||||
xbmcvfs functions to work with smb paths, they need to be both in passwords.xml
|
xbmcvfs functions to work with smb paths, they need to be both in passwords.xml
|
||||||
as well as sources.xml
|
as well as sources.xml
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
import shutil
|
import shutil
|
||||||
import os
|
import os
|
||||||
from os import path # allows to use path_ops.path.join, for example
|
from os import path # allows to use path_ops.path.join, for example
|
||||||
|
from distutils import dir_util
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
import xbmc
|
||||||
import xbmcvfs
|
import xbmcvfs
|
||||||
|
|
||||||
|
from .tools import unicode_paths
|
||||||
|
|
||||||
# Kodi seems to encode in utf-8 in ALL cases (unlike e.g. the OS filesystem)
|
# Kodi seems to encode in utf-8 in ALL cases (unlike e.g. the OS filesystem)
|
||||||
KODI_ENCODING = 'utf-8'
|
KODI_ENCODING = 'utf-8'
|
||||||
REGEX_FILE_NUMBERING = re.compile(r'''_(\d\d)\.\w+$''')
|
REGEX_FILE_NUMBERING = re.compile(r'''_(\d\d)\.\w+$''')
|
||||||
|
|
||||||
|
|
||||||
def append_os_sep(path):
|
def encode_path(path):
|
||||||
"""
|
"""
|
||||||
Appends either a '\\' or '/' - IRRELEVANT of the host OS!! (os.path.join is
|
Filenames and paths are not necessarily utf-8 encoded. Use this function
|
||||||
dependant on the host OS)
|
instead of try_encode/trydecode if working with filenames and paths!
|
||||||
|
(os.walk only feeds on encoded paths. sys.getfilesystemencoding returns None
|
||||||
|
for Raspberry Pi)
|
||||||
"""
|
"""
|
||||||
separator = '/' if '/' in path else '\\'
|
return unicode_paths.encode(path)
|
||||||
return path if path.endswith(separator) else path + separator
|
|
||||||
|
|
||||||
|
def decode_path(path):
|
||||||
|
"""
|
||||||
|
Filenames and paths are not necessarily utf-8 encoded. Use this function
|
||||||
|
instead of try_encode/trydecode if working with filenames and paths!
|
||||||
|
(os.walk only feeds on encoded paths. sys.getfilesystemencoding returns None
|
||||||
|
for Raspberry Pi)
|
||||||
|
"""
|
||||||
|
return unicode_paths.decode(path)
|
||||||
|
|
||||||
|
|
||||||
def translate_path(path):
|
def translate_path(path):
|
||||||
|
@ -41,7 +57,8 @@ def translate_path(path):
|
||||||
e.g. Converts 'special://masterprofile/script_data'
|
e.g. Converts 'special://masterprofile/script_data'
|
||||||
-> '/home/user/XBMC/UserData/script_data' on Linux.
|
-> '/home/user/XBMC/UserData/script_data' on Linux.
|
||||||
"""
|
"""
|
||||||
return xbmcvfs.translatePath(path)
|
translated = xbmc.translatePath(path.encode(KODI_ENCODING, 'strict'))
|
||||||
|
return translated.decode(KODI_ENCODING, 'strict')
|
||||||
|
|
||||||
|
|
||||||
def exists(path):
|
def exists(path):
|
||||||
|
@ -49,7 +66,7 @@ def exists(path):
|
||||||
Returns True if the path [unicode] exists. Folders NEED a trailing slash or
|
Returns True if the path [unicode] exists. Folders NEED a trailing slash or
|
||||||
backslash!!
|
backslash!!
|
||||||
"""
|
"""
|
||||||
return xbmcvfs.exists(path) == 1
|
return xbmcvfs.exists(path.encode(KODI_ENCODING, 'strict')) == 1
|
||||||
|
|
||||||
|
|
||||||
def rmtree(path, *args, **kwargs):
|
def rmtree(path, *args, **kwargs):
|
||||||
|
@ -63,12 +80,12 @@ def rmtree(path, *args, **kwargs):
|
||||||
is false and onerror is None, an exception is raised.
|
is false and onerror is None, an exception is raised.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return shutil.rmtree(path, *args, **kwargs)
|
return shutil.rmtree(encode_path(path), *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def copyfile(src, dst):
|
def copyfile(src, dst):
|
||||||
"""Copy data from src to dst"""
|
"""Copy data from src to dst"""
|
||||||
return shutil.copyfile(src, dst)
|
return shutil.copyfile(encode_path(src), encode_path(dst))
|
||||||
|
|
||||||
|
|
||||||
def makedirs(path, *args, **kwargs):
|
def makedirs(path, *args, **kwargs):
|
||||||
|
@ -78,7 +95,7 @@ def makedirs(path, *args, **kwargs):
|
||||||
mkdir, except that any intermediate path segment (not just the rightmost)
|
mkdir, except that any intermediate path segment (not just the rightmost)
|
||||||
will be created if it does not exist. This is recursive.
|
will be created if it does not exist. This is recursive.
|
||||||
"""
|
"""
|
||||||
return os.makedirs(path, *args, **kwargs)
|
return os.makedirs(encode_path(path), *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def remove(path):
|
def remove(path):
|
||||||
|
@ -90,7 +107,7 @@ def remove(path):
|
||||||
removed but the storage allocated to the file is not made available until
|
removed but the storage allocated to the file is not made available until
|
||||||
the original file is no longer in use.
|
the original file is no longer in use.
|
||||||
"""
|
"""
|
||||||
return os.remove(path)
|
return os.remove(encode_path(path))
|
||||||
|
|
||||||
|
|
||||||
def walk(top, topdown=True, onerror=None, followlinks=False):
|
def walk(top, topdown=True, onerror=None, followlinks=False):
|
||||||
|
@ -153,57 +170,40 @@ def walk(top, topdown=True, onerror=None, followlinks=False):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Get all the results from os.walk and store them in a list
|
# Get all the results from os.walk and store them in a list
|
||||||
walker = list(os.walk(top,
|
walker = list(os.walk(encode_path(top),
|
||||||
topdown,
|
topdown,
|
||||||
onerror,
|
onerror,
|
||||||
followlinks))
|
followlinks))
|
||||||
for top, dirs, nondirs in walker:
|
for top, dirs, nondirs in walker:
|
||||||
yield (top,
|
yield (decode_path(top),
|
||||||
[x for x in dirs],
|
[decode_path(x) for x in dirs],
|
||||||
[x for x in nondirs])
|
[decode_path(x) for x in nondirs])
|
||||||
|
|
||||||
|
|
||||||
def copytree(src, dst, *args, **kwargs):
|
def copy_tree(src, dst, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Recursively copy an entire directory tree rooted at src to a directory named
|
Copy an entire directory tree 'src' to a new location 'dst'.
|
||||||
dst and return the destination directory. dirs_exist_ok dictates whether to
|
|
||||||
raise an exception in case dst or any missing parent directory already
|
|
||||||
exists.
|
|
||||||
|
|
||||||
Permissions and times of directories are copied with copystat(), individual
|
Both 'src' and 'dst' must be directory names. If 'src' is not a
|
||||||
files are copied using copy2().
|
directory, raise DistutilsFileError. If 'dst' does not exist, it is
|
||||||
|
created with 'mkpath()'. The end result of the copy is that every
|
||||||
|
file in 'src' is copied to 'dst', and directories under 'src' are
|
||||||
|
recursively copied to 'dst'. Return the list of files that were
|
||||||
|
copied or might have been copied, using their output name. The
|
||||||
|
return value is unaffected by 'update' or 'dry_run': it is simply
|
||||||
|
the list of all files under 'src', with the names changed to be
|
||||||
|
under 'dst'.
|
||||||
|
|
||||||
If symlinks is true, symbolic links in the source tree are represented as
|
'preserve_mode' and 'preserve_times' are the same as for
|
||||||
symbolic links in the new tree and the metadata of the original links will
|
'copy_file'; note that they only apply to regular files, not to
|
||||||
be copied as far as the platform allows; if false or omitted, the contents
|
directories. If 'preserve_symlinks' is true, symlinks will be
|
||||||
and metadata of the linked files are copied to the new tree.
|
copied as symlinks (on platforms that support them!); otherwise
|
||||||
|
(the default), the destination of the symlink will be copied.
|
||||||
When symlinks is false, if the file pointed by the symlink doesn’t exist, an
|
'update' and 'verbose' are the same as for 'copy_file'.
|
||||||
exception will be added in the list of errors raised in an Error exception
|
|
||||||
at the end of the copy process. You can set the optional
|
|
||||||
ignore_dangling_symlinks flag to true if you want to silence this exception.
|
|
||||||
Notice that this option has no effect on platforms that don’t support
|
|
||||||
os.symlink().
|
|
||||||
|
|
||||||
If ignore is given, it must be a callable that will receive as its arguments
|
|
||||||
the directory being visited by copytree(), and a list of its contents, as
|
|
||||||
returned by os.listdir(). Since copytree() is called recursively, the ignore
|
|
||||||
callable will be called once for each directory that is copied. The callable
|
|
||||||
must return a sequence of directory and file names relative to the current
|
|
||||||
directory (i.e. a subset of the items in its second argument); these names
|
|
||||||
will then be ignored in the copy process. ignore_patterns() can be used to
|
|
||||||
create such a callable that ignores names based on glob-style patterns.
|
|
||||||
|
|
||||||
If exception(s) occur, an Error is raised with a list of reasons.
|
|
||||||
|
|
||||||
If copy_function is given, it must be a callable that will be used to copy
|
|
||||||
each file. It will be called with the source path and the destination path
|
|
||||||
as arguments. By default, copy2() is used, but any function that supports
|
|
||||||
the same signature (like copy()) can be used.
|
|
||||||
|
|
||||||
Raises an auditing event shutil.copytree with arguments src, dst.
|
|
||||||
"""
|
"""
|
||||||
return shutil.copytree(src, dst, *args, **kwargs)
|
src = encode_path(src)
|
||||||
|
dst = encode_path(dst)
|
||||||
|
return dir_util.copy_tree(src, dst, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def basename(path):
|
def basename(path):
|
||||||
|
|
|
@ -38,6 +38,7 @@ Functions
|
||||||
.. autofunction:: real_absolute_path
|
.. autofunction:: real_absolute_path
|
||||||
.. autofunction:: parent_dir_path
|
.. autofunction:: parent_dir_path
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
|
@ -71,7 +72,7 @@ def get_dir_walker(recursive, topdown=True, followlinks=False):
|
||||||
try:
|
try:
|
||||||
yield next(os.walk(path, topdown=topdown, followlinks=followlinks))
|
yield next(os.walk(path, topdown=topdown, followlinks=followlinks))
|
||||||
except NameError:
|
except NameError:
|
||||||
yield next(os.walk(path, topdown=topdown, followlinks=followlinks)) #IGNORE:E1101
|
yield os.walk(path, topdown=topdown, followlinks=followlinks).next() #IGNORE:E1101
|
||||||
return walk
|
return walk
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -33,6 +33,7 @@ Functions
|
||||||
.. autofunction:: match_path_against
|
.. autofunction:: match_path_against
|
||||||
.. autofunction:: filter_paths
|
.. autofunction:: filter_paths
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from fnmatch import fnmatch, fnmatchcase
|
from fnmatch import fnmatch, fnmatchcase
|
||||||
|
|
||||||
__all__ = ['match_path',
|
__all__ = ['match_path',
|
||||||
|
|
|
@ -1,35 +0,0 @@
|
||||||
"""
|
|
||||||
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .__version__ import __author__, __copyright__, __email__, __license__, __version__
|
|
||||||
from ._common import (
|
|
||||||
Platform,
|
|
||||||
ascii_symbols,
|
|
||||||
normalize_platform,
|
|
||||||
replace_unprintable_char,
|
|
||||||
unprintable_ascii_chars,
|
|
||||||
validate_null_string,
|
|
||||||
validate_pathtype,
|
|
||||||
)
|
|
||||||
from ._filename import FileNameSanitizer, is_valid_filename, sanitize_filename, validate_filename
|
|
||||||
from ._filepath import (
|
|
||||||
FilePathSanitizer,
|
|
||||||
is_valid_filepath,
|
|
||||||
sanitize_file_path,
|
|
||||||
sanitize_filepath,
|
|
||||||
validate_file_path,
|
|
||||||
validate_filepath,
|
|
||||||
)
|
|
||||||
from ._ltsv import sanitize_ltsv_label, validate_ltsv_label
|
|
||||||
from ._symbol import replace_symbol, validate_symbol
|
|
||||||
from .error import (
|
|
||||||
ErrorReason,
|
|
||||||
InvalidCharError,
|
|
||||||
InvalidLengthError,
|
|
||||||
InvalidReservedNameError,
|
|
||||||
NullNameError,
|
|
||||||
ReservedNameError,
|
|
||||||
ValidationError,
|
|
||||||
ValidReservedNameError,
|
|
||||||
)
|
|
|
@ -1,6 +0,0 @@
|
||||||
__author__ = "Tsuyoshi Hombashi"
|
|
||||||
__copyright__ = "Copyright 2016, {}".format(__author__)
|
|
||||||
__license__ = "MIT License"
|
|
||||||
__version__ = "2.4.1"
|
|
||||||
__maintainer__ = __author__
|
|
||||||
__email__ = "tsuyoshi.hombashi@gmail.com"
|
|
|
@ -1,137 +0,0 @@
|
||||||
"""
|
|
||||||
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
|
|
||||||
"""
|
|
||||||
|
|
||||||
import abc
|
|
||||||
import os
|
|
||||||
from typing import Optional, Tuple, cast
|
|
||||||
|
|
||||||
from ._common import PathType, Platform, PlatformType, normalize_platform, unprintable_ascii_chars
|
|
||||||
from .error import ReservedNameError, ValidationError
|
|
||||||
|
|
||||||
|
|
||||||
class BaseFile:
|
|
||||||
_INVALID_PATH_CHARS = "".join(unprintable_ascii_chars)
|
|
||||||
_INVALID_FILENAME_CHARS = _INVALID_PATH_CHARS + "/"
|
|
||||||
_INVALID_WIN_PATH_CHARS = _INVALID_PATH_CHARS + ':*?"<>|\t\n\r\x0b\x0c'
|
|
||||||
_INVALID_WIN_FILENAME_CHARS = _INVALID_FILENAME_CHARS + _INVALID_WIN_PATH_CHARS + "\\"
|
|
||||||
|
|
||||||
_ERROR_MSG_TEMPLATE = "invalid char found: invalids=({invalid}), value={value}"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def platform(self) -> Platform:
|
|
||||||
return self.__platform
|
|
||||||
|
|
||||||
@property
|
|
||||||
def reserved_keywords(self) -> Tuple[str, ...]:
|
|
||||||
return tuple()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def min_len(self) -> int:
|
|
||||||
return self._min_len
|
|
||||||
|
|
||||||
@property
|
|
||||||
def max_len(self) -> int:
|
|
||||||
return self._max_len
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
min_len: Optional[int],
|
|
||||||
max_len: Optional[int],
|
|
||||||
check_reserved: bool,
|
|
||||||
platform_max_len: Optional[int] = None,
|
|
||||||
platform: PlatformType = None,
|
|
||||||
) -> None:
|
|
||||||
self.__platform = normalize_platform(platform)
|
|
||||||
self._check_reserved = check_reserved
|
|
||||||
|
|
||||||
if min_len is None:
|
|
||||||
min_len = 1
|
|
||||||
self._min_len = max(min_len, 1)
|
|
||||||
|
|
||||||
if platform_max_len is None:
|
|
||||||
platform_max_len = self._get_default_max_path_len()
|
|
||||||
|
|
||||||
if max_len in [None, -1]:
|
|
||||||
self._max_len = platform_max_len
|
|
||||||
else:
|
|
||||||
self._max_len = cast(int, max_len)
|
|
||||||
|
|
||||||
self._max_len = min(self._max_len, platform_max_len)
|
|
||||||
self._validate_max_len()
|
|
||||||
|
|
||||||
def _is_posix(self) -> bool:
|
|
||||||
return self.platform == Platform.POSIX
|
|
||||||
|
|
||||||
def _is_universal(self) -> bool:
|
|
||||||
return self.platform == Platform.UNIVERSAL
|
|
||||||
|
|
||||||
def _is_linux(self) -> bool:
|
|
||||||
return self.platform == Platform.LINUX
|
|
||||||
|
|
||||||
def _is_windows(self) -> bool:
|
|
||||||
return self.platform == Platform.WINDOWS
|
|
||||||
|
|
||||||
def _is_macos(self) -> bool:
|
|
||||||
return self.platform == Platform.MACOS
|
|
||||||
|
|
||||||
def _validate_max_len(self) -> None:
|
|
||||||
if self.max_len < 1:
|
|
||||||
raise ValueError("max_len must be greater or equals to one")
|
|
||||||
|
|
||||||
if self.min_len > self.max_len:
|
|
||||||
raise ValueError("min_len must be lower than max_len")
|
|
||||||
|
|
||||||
def _get_default_max_path_len(self) -> int:
|
|
||||||
if self._is_linux():
|
|
||||||
return 4096
|
|
||||||
|
|
||||||
if self._is_windows():
|
|
||||||
return 260
|
|
||||||
|
|
||||||
if self._is_posix() or self._is_macos():
|
|
||||||
return 1024
|
|
||||||
|
|
||||||
return 260 # universal
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractValidator(BaseFile, metaclass=abc.ABCMeta):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def validate(self, value: PathType) -> None: # pragma: no cover
|
|
||||||
pass
|
|
||||||
|
|
||||||
def is_valid(self, value: PathType) -> bool:
|
|
||||||
try:
|
|
||||||
self.validate(value)
|
|
||||||
except (TypeError, ValidationError):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _is_reserved_keyword(self, value: str) -> bool:
|
|
||||||
return value in self.reserved_keywords
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractSanitizer(BaseFile, metaclass=abc.ABCMeta):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def sanitize(self, value: PathType, replacement_text: str = "") -> PathType: # pragma: no cover
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class BaseValidator(AbstractValidator):
|
|
||||||
def _validate_reserved_keywords(self, name: str) -> None:
|
|
||||||
if not self._check_reserved:
|
|
||||||
return
|
|
||||||
|
|
||||||
root_name = self.__extract_root_name(name)
|
|
||||||
if self._is_reserved_keyword(root_name.upper()):
|
|
||||||
raise ReservedNameError(
|
|
||||||
"'{}' is a reserved name".format(root_name),
|
|
||||||
reusable_name=False,
|
|
||||||
reserved_name=root_name,
|
|
||||||
platform=self.platform,
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def __extract_root_name(path: str) -> str:
|
|
||||||
return os.path.splitext(os.path.basename(path))[0]
|
|
|
@ -1,147 +0,0 @@
|
||||||
"""
|
|
||||||
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
|
|
||||||
"""
|
|
||||||
|
|
||||||
import enum
|
|
||||||
import platform
|
|
||||||
import re
|
|
||||||
import string
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, List, Optional, Union, cast
|
|
||||||
|
|
||||||
|
|
||||||
_re_whitespaces = re.compile(r"^[\s]+$")
|
|
||||||
|
|
||||||
|
|
||||||
@enum.unique
|
|
||||||
class Platform(enum.Enum):
|
|
||||||
POSIX = "POSIX"
|
|
||||||
UNIVERSAL = "universal"
|
|
||||||
|
|
||||||
LINUX = "Linux"
|
|
||||||
WINDOWS = "Windows"
|
|
||||||
MACOS = "macOS"
|
|
||||||
|
|
||||||
|
|
||||||
PathType = Union[str, Path]
|
|
||||||
PlatformType = Union[str, Platform, None]
|
|
||||||
|
|
||||||
|
|
||||||
def is_pathlike_obj(value: PathType) -> bool:
|
|
||||||
return isinstance(value, Path)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_pathtype(
|
|
||||||
text: PathType, allow_whitespaces: bool = False, error_msg: Optional[str] = None
|
|
||||||
) -> None:
|
|
||||||
from .error import ErrorReason, ValidationError
|
|
||||||
|
|
||||||
if _is_not_null_string(text) or is_pathlike_obj(text):
|
|
||||||
return
|
|
||||||
|
|
||||||
if allow_whitespaces and _re_whitespaces.search(str(text)):
|
|
||||||
return
|
|
||||||
|
|
||||||
if is_null_string(text):
|
|
||||||
if not error_msg:
|
|
||||||
error_msg = "the value must be a not empty"
|
|
||||||
|
|
||||||
raise ValidationError(
|
|
||||||
description=error_msg,
|
|
||||||
reason=ErrorReason.NULL_NAME,
|
|
||||||
)
|
|
||||||
|
|
||||||
raise TypeError("text must be a string: actual={}".format(type(text)))
|
|
||||||
|
|
||||||
|
|
||||||
def validate_null_string(text: PathType, error_msg: Optional[str] = None) -> None:
|
|
||||||
# Deprecated: alias to validate_pathtype
|
|
||||||
validate_pathtype(text, False, error_msg)
|
|
||||||
|
|
||||||
|
|
||||||
def preprocess(name: PathType) -> str:
|
|
||||||
if is_pathlike_obj(name):
|
|
||||||
name = str(name)
|
|
||||||
|
|
||||||
return cast(str, name)
|
|
||||||
|
|
||||||
|
|
||||||
def is_null_string(value: Any) -> bool:
|
|
||||||
if value is None:
|
|
||||||
return True
|
|
||||||
|
|
||||||
try:
|
|
||||||
return len(value.strip()) == 0
|
|
||||||
except AttributeError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _is_not_null_string(value: Any) -> bool:
|
|
||||||
try:
|
|
||||||
return len(value.strip()) > 0
|
|
||||||
except AttributeError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _get_unprintable_ascii_chars() -> List[str]:
|
|
||||||
return [chr(c) for c in range(128) if chr(c) not in string.printable]
|
|
||||||
|
|
||||||
|
|
||||||
unprintable_ascii_chars = tuple(_get_unprintable_ascii_chars())
|
|
||||||
|
|
||||||
|
|
||||||
def _get_ascii_symbols() -> List[str]:
|
|
||||||
symbol_list = [] # type: List[str]
|
|
||||||
|
|
||||||
for i in range(128):
|
|
||||||
c = chr(i)
|
|
||||||
|
|
||||||
if c in unprintable_ascii_chars or c in string.digits + string.ascii_letters:
|
|
||||||
continue
|
|
||||||
|
|
||||||
symbol_list.append(c)
|
|
||||||
|
|
||||||
return symbol_list
|
|
||||||
|
|
||||||
|
|
||||||
ascii_symbols = tuple(_get_ascii_symbols())
|
|
||||||
|
|
||||||
__RE_UNPRINTABLE_CHARS = re.compile(
|
|
||||||
"[{}]".format(re.escape("".join(unprintable_ascii_chars))), re.UNICODE
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def replace_unprintable_char(text: str, replacement_text: str = "") -> str:
|
|
||||||
try:
|
|
||||||
return __RE_UNPRINTABLE_CHARS.sub(replacement_text, text)
|
|
||||||
except (TypeError, AttributeError):
|
|
||||||
raise TypeError("text must be a string")
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_platform(name: PlatformType) -> Platform:
|
|
||||||
if isinstance(name, Platform):
|
|
||||||
return name
|
|
||||||
|
|
||||||
if name:
|
|
||||||
name = name.strip().lower()
|
|
||||||
|
|
||||||
if name == "posix":
|
|
||||||
return Platform.POSIX
|
|
||||||
|
|
||||||
if name == "auto":
|
|
||||||
name = platform.system().lower()
|
|
||||||
|
|
||||||
if name in ["linux"]:
|
|
||||||
return Platform.LINUX
|
|
||||||
|
|
||||||
if name and name.startswith("win"):
|
|
||||||
return Platform.WINDOWS
|
|
||||||
|
|
||||||
if name in ["mac", "macos", "darwin"]:
|
|
||||||
return Platform.MACOS
|
|
||||||
|
|
||||||
return Platform.UNIVERSAL
|
|
||||||
|
|
||||||
|
|
||||||
def findall_to_str(match: List[Any]) -> str:
|
|
||||||
return ", ".join([repr(text) for text in match])
|
|
|
@ -1,16 +0,0 @@
|
||||||
_NTFS_RESERVED_FILE_NAMES = (
|
|
||||||
"$Mft",
|
|
||||||
"$MftMirr",
|
|
||||||
"$LogFile",
|
|
||||||
"$Volume",
|
|
||||||
"$AttrDef",
|
|
||||||
"$Bitmap",
|
|
||||||
"$Boot",
|
|
||||||
"$BadClus",
|
|
||||||
"$Secure",
|
|
||||||
"$Upcase",
|
|
||||||
"$Extend",
|
|
||||||
"$Quota",
|
|
||||||
"$ObjId",
|
|
||||||
"$Reparse",
|
|
||||||
) # Only in root directory
|
|
|
@ -1,341 +0,0 @@
|
||||||
"""
|
|
||||||
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
|
|
||||||
"""
|
|
||||||
|
|
||||||
import itertools
|
|
||||||
import ntpath
|
|
||||||
import posixpath
|
|
||||||
import re
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Optional, Pattern, Tuple
|
|
||||||
|
|
||||||
from ._base import AbstractSanitizer, BaseFile, BaseValidator
|
|
||||||
from ._common import (
|
|
||||||
PathType,
|
|
||||||
Platform,
|
|
||||||
PlatformType,
|
|
||||||
findall_to_str,
|
|
||||||
is_pathlike_obj,
|
|
||||||
preprocess,
|
|
||||||
validate_pathtype,
|
|
||||||
)
|
|
||||||
from .error import ErrorReason, InvalidCharError, InvalidLengthError, ValidationError
|
|
||||||
|
|
||||||
|
|
||||||
_DEFAULT_MAX_FILENAME_LEN = 255
|
|
||||||
_RE_INVALID_FILENAME = re.compile(
|
|
||||||
"[{:s}]".format(re.escape(BaseFile._INVALID_FILENAME_CHARS)), re.UNICODE
|
|
||||||
)
|
|
||||||
_RE_INVALID_WIN_FILENAME = re.compile(
|
|
||||||
"[{:s}]".format(re.escape(BaseFile._INVALID_WIN_FILENAME_CHARS)), re.UNICODE
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FileNameSanitizer(AbstractSanitizer):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
min_len: Optional[int] = 1,
|
|
||||||
max_len: Optional[int] = _DEFAULT_MAX_FILENAME_LEN,
|
|
||||||
platform: PlatformType = None,
|
|
||||||
check_reserved: bool = True,
|
|
||||||
) -> None:
|
|
||||||
super().__init__(
|
|
||||||
min_len=min_len,
|
|
||||||
max_len=max_len,
|
|
||||||
check_reserved=check_reserved,
|
|
||||||
platform_max_len=_DEFAULT_MAX_FILENAME_LEN,
|
|
||||||
platform=platform,
|
|
||||||
)
|
|
||||||
|
|
||||||
self._sanitize_regexp = self._get_sanitize_regexp()
|
|
||||||
self.__validator = FileNameValidator(
|
|
||||||
min_len=self.min_len,
|
|
||||||
max_len=self.max_len,
|
|
||||||
check_reserved=check_reserved,
|
|
||||||
platform=self.platform,
|
|
||||||
)
|
|
||||||
|
|
||||||
def sanitize(self, value: PathType, replacement_text: str = "") -> PathType:
|
|
||||||
try:
|
|
||||||
validate_pathtype(value, allow_whitespaces=True if not self._is_windows() else False)
|
|
||||||
except ValidationError as e:
|
|
||||||
if e.reason == ErrorReason.NULL_NAME:
|
|
||||||
return ""
|
|
||||||
raise
|
|
||||||
|
|
||||||
sanitized_filename = self._sanitize_regexp.sub(replacement_text, str(value))
|
|
||||||
sanitized_filename = sanitized_filename[: self.max_len]
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.__validator.validate(sanitized_filename)
|
|
||||||
except ValidationError as e:
|
|
||||||
if e.reason == ErrorReason.RESERVED_NAME and e.reusable_name is False:
|
|
||||||
sanitized_filename = re.sub(
|
|
||||||
re.escape(e.reserved_name), "{}_".format(e.reserved_name), sanitized_filename
|
|
||||||
)
|
|
||||||
elif e.reason == ErrorReason.INVALID_CHARACTER:
|
|
||||||
if self.platform in [Platform.UNIVERSAL, Platform.WINDOWS]:
|
|
||||||
sanitized_filename = sanitized_filename.rstrip(" .")
|
|
||||||
|
|
||||||
if is_pathlike_obj(value):
|
|
||||||
return Path(sanitized_filename)
|
|
||||||
|
|
||||||
return sanitized_filename
|
|
||||||
|
|
||||||
def _get_sanitize_regexp(self) -> Pattern:
|
|
||||||
if self.platform in [Platform.UNIVERSAL, Platform.WINDOWS]:
|
|
||||||
return _RE_INVALID_WIN_FILENAME
|
|
||||||
|
|
||||||
return _RE_INVALID_FILENAME
|
|
||||||
|
|
||||||
|
|
||||||
class FileNameValidator(BaseValidator):
|
|
||||||
_WINDOWS_RESERVED_FILE_NAMES = ("CON", "PRN", "AUX", "CLOCK$", "NUL") + tuple(
|
|
||||||
"{:s}{:d}".format(name, num)
|
|
||||||
for name, num in itertools.product(("COM", "LPT"), range(1, 10))
|
|
||||||
)
|
|
||||||
_MACOS_RESERVED_FILE_NAMES = (":",)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def reserved_keywords(self) -> Tuple[str, ...]:
|
|
||||||
common_keywords = super().reserved_keywords
|
|
||||||
|
|
||||||
if self._is_universal():
|
|
||||||
return (
|
|
||||||
common_keywords
|
|
||||||
+ self._WINDOWS_RESERVED_FILE_NAMES
|
|
||||||
+ self._MACOS_RESERVED_FILE_NAMES
|
|
||||||
)
|
|
||||||
|
|
||||||
if self._is_windows():
|
|
||||||
return common_keywords + self._WINDOWS_RESERVED_FILE_NAMES
|
|
||||||
|
|
||||||
if self._is_posix() or self._is_macos():
|
|
||||||
return common_keywords + self._MACOS_RESERVED_FILE_NAMES
|
|
||||||
|
|
||||||
return common_keywords
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
min_len: Optional[int] = 1,
|
|
||||||
max_len: Optional[int] = _DEFAULT_MAX_FILENAME_LEN,
|
|
||||||
platform: PlatformType = None,
|
|
||||||
check_reserved: bool = True,
|
|
||||||
) -> None:
|
|
||||||
super().__init__(
|
|
||||||
min_len=min_len,
|
|
||||||
max_len=max_len,
|
|
||||||
check_reserved=check_reserved,
|
|
||||||
platform_max_len=_DEFAULT_MAX_FILENAME_LEN,
|
|
||||||
platform=platform,
|
|
||||||
)
|
|
||||||
|
|
||||||
def validate(self, value: PathType) -> None:
|
|
||||||
validate_pathtype(
|
|
||||||
value,
|
|
||||||
allow_whitespaces=False
|
|
||||||
if self.platform in [Platform.UNIVERSAL, Platform.WINDOWS]
|
|
||||||
else True,
|
|
||||||
)
|
|
||||||
|
|
||||||
unicode_filename = preprocess(value)
|
|
||||||
value_len = len(unicode_filename)
|
|
||||||
|
|
||||||
self.validate_abspath(unicode_filename)
|
|
||||||
|
|
||||||
if value_len > self.max_len:
|
|
||||||
raise InvalidLengthError(
|
|
||||||
"filename is too long: expected<={:d}, actual={:d}".format(self.max_len, value_len)
|
|
||||||
)
|
|
||||||
if value_len < self.min_len:
|
|
||||||
raise InvalidLengthError(
|
|
||||||
"filename is too short: expected>={:d}, actual={:d}".format(self.min_len, value_len)
|
|
||||||
)
|
|
||||||
|
|
||||||
self._validate_reserved_keywords(unicode_filename)
|
|
||||||
|
|
||||||
if self._is_universal() or self._is_windows():
|
|
||||||
self.__validate_win_filename(unicode_filename)
|
|
||||||
else:
|
|
||||||
self.__validate_unix_filename(unicode_filename)
|
|
||||||
|
|
||||||
def validate_abspath(self, value: str) -> None:
|
|
||||||
err = ValidationError(
|
|
||||||
description="found an absolute path ({}), expected a filename".format(value),
|
|
||||||
platform=self.platform,
|
|
||||||
reason=ErrorReason.FOUND_ABS_PATH,
|
|
||||||
)
|
|
||||||
|
|
||||||
if self._is_universal() or self._is_windows():
|
|
||||||
if ntpath.isabs(value):
|
|
||||||
raise err
|
|
||||||
|
|
||||||
if posixpath.isabs(value):
|
|
||||||
raise err
|
|
||||||
|
|
||||||
def __validate_unix_filename(self, unicode_filename: str) -> None:
|
|
||||||
match = _RE_INVALID_FILENAME.findall(unicode_filename)
|
|
||||||
if match:
|
|
||||||
raise InvalidCharError(
|
|
||||||
self._ERROR_MSG_TEMPLATE.format(
|
|
||||||
invalid=findall_to_str(match), value=repr(unicode_filename)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def __validate_win_filename(self, unicode_filename: str) -> None:
|
|
||||||
match = _RE_INVALID_WIN_FILENAME.findall(unicode_filename)
|
|
||||||
if match:
|
|
||||||
raise InvalidCharError(
|
|
||||||
self._ERROR_MSG_TEMPLATE.format(
|
|
||||||
invalid=findall_to_str(match), value=repr(unicode_filename)
|
|
||||||
),
|
|
||||||
platform=Platform.WINDOWS,
|
|
||||||
)
|
|
||||||
|
|
||||||
if unicode_filename in (".", ".."):
|
|
||||||
return
|
|
||||||
|
|
||||||
if unicode_filename[-1] in (" ", "."):
|
|
||||||
raise InvalidCharError(
|
|
||||||
self._ERROR_MSG_TEMPLATE.format(
|
|
||||||
invalid=re.escape(unicode_filename[-1]), value=repr(unicode_filename)
|
|
||||||
),
|
|
||||||
platform=Platform.WINDOWS,
|
|
||||||
description="Do not end a file or directory name with a space or a period",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_filename(
|
|
||||||
filename: PathType,
|
|
||||||
platform: Optional[str] = None,
|
|
||||||
min_len: int = 1,
|
|
||||||
max_len: int = _DEFAULT_MAX_FILENAME_LEN,
|
|
||||||
check_reserved: bool = True,
|
|
||||||
) -> None:
|
|
||||||
"""Verifying whether the ``filename`` is a valid file name or not.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
filename:
|
|
||||||
Filename to validate.
|
|
||||||
platform:
|
|
||||||
Target platform name of the filename.
|
|
||||||
|
|
||||||
.. include:: platform.txt
|
|
||||||
min_len:
|
|
||||||
Minimum length of the ``filename``. The value must be greater or equal to one.
|
|
||||||
Defaults to ``1``.
|
|
||||||
max_len:
|
|
||||||
Maximum length of the ``filename``. The value must be lower than:
|
|
||||||
|
|
||||||
- ``Linux``: 4096
|
|
||||||
- ``macOS``: 1024
|
|
||||||
- ``Windows``: 260
|
|
||||||
- ``universal``: 260
|
|
||||||
|
|
||||||
Defaults to ``255``.
|
|
||||||
check_reserved:
|
|
||||||
If |True|, check reserved names of the ``platform``.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValidationError (ErrorReason.INVALID_LENGTH):
|
|
||||||
If the ``filename`` is longer than ``max_len`` characters.
|
|
||||||
ValidationError (ErrorReason.INVALID_CHARACTER):
|
|
||||||
If the ``filename`` includes invalid character(s) for a filename:
|
|
||||||
|invalid_filename_chars|.
|
|
||||||
The following characters are also invalid for Windows platform:
|
|
||||||
|invalid_win_filename_chars|.
|
|
||||||
ValidationError (ErrorReason.RESERVED_NAME):
|
|
||||||
If the ``filename`` equals reserved name by OS.
|
|
||||||
Windows reserved name is as follows:
|
|
||||||
``"CON"``, ``"PRN"``, ``"AUX"``, ``"NUL"``, ``"COM[1-9]"``, ``"LPT[1-9]"``.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
:ref:`example-validate-filename`
|
|
||||||
|
|
||||||
See Also:
|
|
||||||
`Naming Files, Paths, and Namespaces - Win32 apps | Microsoft Docs
|
|
||||||
<https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file>`__
|
|
||||||
"""
|
|
||||||
|
|
||||||
FileNameValidator(
|
|
||||||
platform=platform, min_len=min_len, max_len=max_len, check_reserved=check_reserved
|
|
||||||
).validate(filename)
|
|
||||||
|
|
||||||
|
|
||||||
def is_valid_filename(
|
|
||||||
filename: PathType,
|
|
||||||
platform: Optional[str] = None,
|
|
||||||
min_len: int = 1,
|
|
||||||
max_len: Optional[int] = None,
|
|
||||||
check_reserved: bool = True,
|
|
||||||
) -> bool:
|
|
||||||
"""Check whether the ``filename`` is a valid name or not.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
filename:
|
|
||||||
A filename to be checked.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
:ref:`example-is-valid-filename`
|
|
||||||
|
|
||||||
See Also:
|
|
||||||
:py:func:`.validate_filename()`
|
|
||||||
"""
|
|
||||||
|
|
||||||
return FileNameValidator(
|
|
||||||
platform=platform, min_len=min_len, max_len=max_len, check_reserved=check_reserved
|
|
||||||
).is_valid(filename)
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_filename(
|
|
||||||
filename: PathType,
|
|
||||||
replacement_text: str = "",
|
|
||||||
platform: Optional[str] = None,
|
|
||||||
max_len: Optional[int] = _DEFAULT_MAX_FILENAME_LEN,
|
|
||||||
check_reserved: bool = True,
|
|
||||||
) -> PathType:
|
|
||||||
"""Make a valid filename from a string.
|
|
||||||
|
|
||||||
To make a valid filename the function does:
|
|
||||||
|
|
||||||
- Replace invalid characters as file names included in the ``filename``
|
|
||||||
with the ``replacement_text``. Invalid characters are:
|
|
||||||
|
|
||||||
- unprintable characters
|
|
||||||
- |invalid_filename_chars|
|
|
||||||
- for Windows (or universal) only: |invalid_win_filename_chars|
|
|
||||||
|
|
||||||
- Append underscore (``"_"``) at the tail of the name if sanitized name
|
|
||||||
is one of the reserved names by operating systems
|
|
||||||
(only when ``check_reserved`` is |True|).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
filename: Filename to sanitize.
|
|
||||||
replacement_text:
|
|
||||||
Replacement text for invalid characters. Defaults to ``""``.
|
|
||||||
platform:
|
|
||||||
Target platform name of the filename.
|
|
||||||
|
|
||||||
.. include:: platform.txt
|
|
||||||
max_len:
|
|
||||||
Maximum length of the ``filename`` length. Truncate the name length if
|
|
||||||
the ``filename`` length exceeds this value.
|
|
||||||
Defaults to ``255``.
|
|
||||||
check_reserved:
|
|
||||||
If |True|, sanitize reserved names of the ``platform``.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Same type as the ``filename`` (str or PathLike object):
|
|
||||||
Sanitized filename.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError:
|
|
||||||
If the ``filename`` is an invalid filename.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
:ref:`example-sanitize-filename`
|
|
||||||
"""
|
|
||||||
|
|
||||||
return FileNameSanitizer(
|
|
||||||
platform=platform, max_len=max_len, check_reserved=check_reserved
|
|
||||||
).sanitize(filename, replacement_text)
|
|
|
@ -1,427 +0,0 @@
|
||||||
"""
|
|
||||||
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
|
|
||||||
"""
|
|
||||||
|
|
||||||
import ntpath
|
|
||||||
import os.path
|
|
||||||
import posixpath
|
|
||||||
import re
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List, Optional, Pattern, Tuple # noqa
|
|
||||||
|
|
||||||
from ._base import AbstractSanitizer, BaseFile, BaseValidator
|
|
||||||
from ._common import (
|
|
||||||
PathType,
|
|
||||||
Platform,
|
|
||||||
PlatformType,
|
|
||||||
findall_to_str,
|
|
||||||
is_pathlike_obj,
|
|
||||||
preprocess,
|
|
||||||
validate_pathtype,
|
|
||||||
)
|
|
||||||
from ._const import _NTFS_RESERVED_FILE_NAMES
|
|
||||||
from ._filename import FileNameSanitizer, FileNameValidator
|
|
||||||
from .error import (
|
|
||||||
ErrorReason,
|
|
||||||
InvalidCharError,
|
|
||||||
InvalidLengthError,
|
|
||||||
ReservedNameError,
|
|
||||||
ValidationError,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
_RE_INVALID_PATH = re.compile("[{:s}]".format(re.escape(BaseFile._INVALID_PATH_CHARS)), re.UNICODE)
|
|
||||||
_RE_INVALID_WIN_PATH = re.compile(
|
|
||||||
"[{:s}]".format(re.escape(BaseFile._INVALID_WIN_PATH_CHARS)), re.UNICODE
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FilePathSanitizer(AbstractSanitizer):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
min_len: Optional[int] = 1,
|
|
||||||
max_len: Optional[int] = None,
|
|
||||||
platform: PlatformType = None,
|
|
||||||
check_reserved: bool = True,
|
|
||||||
normalize: bool = True,
|
|
||||||
) -> None:
|
|
||||||
super().__init__(
|
|
||||||
min_len=min_len,
|
|
||||||
max_len=max_len,
|
|
||||||
check_reserved=check_reserved,
|
|
||||||
platform=platform,
|
|
||||||
)
|
|
||||||
|
|
||||||
self._sanitize_regexp = self._get_sanitize_regexp()
|
|
||||||
self.__fpath_validator = FilePathValidator(
|
|
||||||
min_len=self.min_len,
|
|
||||||
max_len=self.max_len,
|
|
||||||
check_reserved=check_reserved,
|
|
||||||
platform=self.platform,
|
|
||||||
)
|
|
||||||
self.__fname_sanitizer = FileNameSanitizer(
|
|
||||||
min_len=self.min_len,
|
|
||||||
max_len=self.max_len,
|
|
||||||
check_reserved=check_reserved,
|
|
||||||
platform=self.platform,
|
|
||||||
)
|
|
||||||
self.__normalize = normalize
|
|
||||||
|
|
||||||
if self._is_universal() or self._is_windows():
|
|
||||||
self.__split_drive = ntpath.splitdrive
|
|
||||||
else:
|
|
||||||
self.__split_drive = posixpath.splitdrive
|
|
||||||
|
|
||||||
def sanitize(self, value: PathType, replacement_text: str = "") -> PathType:
|
|
||||||
if not value:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
self.__fpath_validator.validate_abspath(value)
|
|
||||||
|
|
||||||
unicode_filepath = preprocess(value)
|
|
||||||
|
|
||||||
if self.__normalize:
|
|
||||||
unicode_filepath = os.path.normpath(unicode_filepath)
|
|
||||||
|
|
||||||
drive, unicode_filepath = self.__split_drive(unicode_filepath)
|
|
||||||
sanitized_path = self._sanitize_regexp.sub(replacement_text, unicode_filepath)
|
|
||||||
if self._is_windows():
|
|
||||||
path_separator = "\\"
|
|
||||||
else:
|
|
||||||
path_separator = "/"
|
|
||||||
|
|
||||||
sanitized_entries = [] # type: List[str]
|
|
||||||
if drive:
|
|
||||||
sanitized_entries.append(drive)
|
|
||||||
for entry in sanitized_path.replace("\\", "/").split("/"):
|
|
||||||
if entry in _NTFS_RESERVED_FILE_NAMES:
|
|
||||||
sanitized_entries.append("{}_".format(entry))
|
|
||||||
continue
|
|
||||||
|
|
||||||
sanitized_entry = str(self.__fname_sanitizer.sanitize(entry))
|
|
||||||
if not sanitized_entry:
|
|
||||||
if not sanitized_entries:
|
|
||||||
sanitized_entries.append("")
|
|
||||||
continue
|
|
||||||
|
|
||||||
sanitized_entries.append(sanitized_entry)
|
|
||||||
|
|
||||||
sanitized_path = path_separator.join(sanitized_entries)
|
|
||||||
|
|
||||||
if is_pathlike_obj(value):
|
|
||||||
return Path(sanitized_path)
|
|
||||||
|
|
||||||
return sanitized_path
|
|
||||||
|
|
||||||
def _get_sanitize_regexp(self) -> Pattern:
|
|
||||||
if self.platform in [Platform.UNIVERSAL, Platform.WINDOWS]:
|
|
||||||
return _RE_INVALID_WIN_PATH
|
|
||||||
|
|
||||||
return _RE_INVALID_PATH
|
|
||||||
|
|
||||||
|
|
||||||
class FilePathValidator(BaseValidator):
|
|
||||||
_RE_NTFS_RESERVED = re.compile(
|
|
||||||
"|".join("^/{}$".format(re.escape(pattern)) for pattern in _NTFS_RESERVED_FILE_NAMES),
|
|
||||||
re.IGNORECASE,
|
|
||||||
)
|
|
||||||
_MACOS_RESERVED_FILE_PATHS = ("/", ":")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def reserved_keywords(self) -> Tuple[str, ...]:
|
|
||||||
common_keywords = super().reserved_keywords
|
|
||||||
|
|
||||||
if any([self._is_universal(), self._is_posix(), self._is_macos()]):
|
|
||||||
return common_keywords + self._MACOS_RESERVED_FILE_PATHS
|
|
||||||
|
|
||||||
if self._is_linux():
|
|
||||||
return common_keywords + ("/",)
|
|
||||||
|
|
||||||
return common_keywords
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
min_len: Optional[int] = 1,
|
|
||||||
max_len: Optional[int] = None,
|
|
||||||
platform: PlatformType = None,
|
|
||||||
check_reserved: bool = True,
|
|
||||||
) -> None:
|
|
||||||
super().__init__(
|
|
||||||
min_len=min_len,
|
|
||||||
max_len=max_len,
|
|
||||||
check_reserved=check_reserved,
|
|
||||||
platform=platform,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.__fname_validator = FileNameValidator(
|
|
||||||
min_len=min_len, max_len=max_len, check_reserved=check_reserved, platform=platform
|
|
||||||
)
|
|
||||||
|
|
||||||
if self._is_universal() or self._is_windows():
|
|
||||||
self.__split_drive = ntpath.splitdrive
|
|
||||||
else:
|
|
||||||
self.__split_drive = posixpath.splitdrive
|
|
||||||
|
|
||||||
def validate(self, value: PathType) -> None:
|
|
||||||
validate_pathtype(
|
|
||||||
value,
|
|
||||||
allow_whitespaces=False
|
|
||||||
if self.platform in [Platform.UNIVERSAL, Platform.WINDOWS]
|
|
||||||
else True,
|
|
||||||
)
|
|
||||||
self.validate_abspath(value)
|
|
||||||
|
|
||||||
_drive, value = self.__split_drive(str(value))
|
|
||||||
if not value:
|
|
||||||
return
|
|
||||||
|
|
||||||
filepath = os.path.normpath(value)
|
|
||||||
unicode_filepath = preprocess(filepath)
|
|
||||||
value_len = len(unicode_filepath)
|
|
||||||
|
|
||||||
if value_len > self.max_len:
|
|
||||||
raise InvalidLengthError(
|
|
||||||
"file path is too long: expected<={:d}, actual={:d}".format(self.max_len, value_len)
|
|
||||||
)
|
|
||||||
if value_len < self.min_len:
|
|
||||||
raise InvalidLengthError(
|
|
||||||
"file path is too short: expected>={:d}, actual={:d}".format(
|
|
||||||
self.min_len, value_len
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
self._validate_reserved_keywords(unicode_filepath)
|
|
||||||
unicode_filepath = unicode_filepath.replace("\\", "/")
|
|
||||||
for entry in unicode_filepath.split("/"):
|
|
||||||
if not entry or entry in (".", ".."):
|
|
||||||
continue
|
|
||||||
|
|
||||||
self.__fname_validator._validate_reserved_keywords(entry)
|
|
||||||
|
|
||||||
if self._is_universal() or self._is_windows():
|
|
||||||
self.__validate_win_filepath(unicode_filepath)
|
|
||||||
else:
|
|
||||||
self.__validate_unix_filepath(unicode_filepath)
|
|
||||||
|
|
||||||
def validate_abspath(self, value: PathType) -> None:
|
|
||||||
value = str(value)
|
|
||||||
is_posix_abs = posixpath.isabs(value)
|
|
||||||
is_nt_abs = ntpath.isabs(value)
|
|
||||||
err_object = ValidationError(
|
|
||||||
description=(
|
|
||||||
"an invalid absolute file path ({}) for the platform ({}).".format(
|
|
||||||
value, self.platform.value
|
|
||||||
)
|
|
||||||
+ " to avoid the error, specify an appropriate platform correspond"
|
|
||||||
+ " with the path format, or 'auto'."
|
|
||||||
),
|
|
||||||
platform=self.platform,
|
|
||||||
reason=ErrorReason.MALFORMED_ABS_PATH,
|
|
||||||
)
|
|
||||||
|
|
||||||
if any([self._is_windows() and is_nt_abs, self._is_linux() and is_posix_abs]):
|
|
||||||
return
|
|
||||||
|
|
||||||
if self._is_universal() and any([is_posix_abs, is_nt_abs]):
|
|
||||||
ValidationError(
|
|
||||||
description=(
|
|
||||||
"{}. expected a platform independent file path".format(
|
|
||||||
"POSIX absolute file path found"
|
|
||||||
if is_posix_abs
|
|
||||||
else "NT absolute file path found"
|
|
||||||
)
|
|
||||||
),
|
|
||||||
platform=self.platform,
|
|
||||||
reason=ErrorReason.MALFORMED_ABS_PATH,
|
|
||||||
)
|
|
||||||
|
|
||||||
if any([self._is_windows(), self._is_universal()]) and is_posix_abs:
|
|
||||||
raise err_object
|
|
||||||
|
|
||||||
drive, _tail = ntpath.splitdrive(value)
|
|
||||||
if not self._is_windows() and drive and is_nt_abs:
|
|
||||||
raise err_object
|
|
||||||
|
|
||||||
def __validate_unix_filepath(self, unicode_filepath: str) -> None:
|
|
||||||
match = _RE_INVALID_PATH.findall(unicode_filepath)
|
|
||||||
if match:
|
|
||||||
raise InvalidCharError(
|
|
||||||
self._ERROR_MSG_TEMPLATE.format(
|
|
||||||
invalid=findall_to_str(match), value=repr(unicode_filepath)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def __validate_win_filepath(self, unicode_filepath: str) -> None:
|
|
||||||
match = _RE_INVALID_WIN_PATH.findall(unicode_filepath)
|
|
||||||
if match:
|
|
||||||
raise InvalidCharError(
|
|
||||||
self._ERROR_MSG_TEMPLATE.format(
|
|
||||||
invalid=findall_to_str(match), value=repr(unicode_filepath)
|
|
||||||
),
|
|
||||||
platform=Platform.WINDOWS,
|
|
||||||
)
|
|
||||||
|
|
||||||
_drive, value = self.__split_drive(unicode_filepath)
|
|
||||||
if value:
|
|
||||||
match_reserved = self._RE_NTFS_RESERVED.search(value)
|
|
||||||
if match_reserved:
|
|
||||||
reserved_name = match_reserved.group()
|
|
||||||
raise ReservedNameError(
|
|
||||||
"'{}' is a reserved name".format(reserved_name),
|
|
||||||
reusable_name=False,
|
|
||||||
reserved_name=reserved_name,
|
|
||||||
platform=self.platform,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_filepath(
|
|
||||||
file_path: PathType,
|
|
||||||
platform: Optional[str] = None,
|
|
||||||
min_len: int = 1,
|
|
||||||
max_len: Optional[int] = None,
|
|
||||||
check_reserved: bool = True,
|
|
||||||
) -> None:
|
|
||||||
"""Verifying whether the ``file_path`` is a valid file path or not.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_path:
|
|
||||||
File path to validate.
|
|
||||||
platform:
|
|
||||||
Target platform name of the file path.
|
|
||||||
|
|
||||||
.. include:: platform.txt
|
|
||||||
min_len:
|
|
||||||
Minimum length of the ``file_path``. The value must be greater or equal to one.
|
|
||||||
Defaults to ``1``.
|
|
||||||
max_len:
|
|
||||||
Maximum length of the ``file_path`` length. If the value is |None|,
|
|
||||||
automatically determined by the ``platform``:
|
|
||||||
|
|
||||||
- ``Linux``: 4096
|
|
||||||
- ``macOS``: 1024
|
|
||||||
- ``Windows``: 260
|
|
||||||
- ``universal``: 260
|
|
||||||
check_reserved:
|
|
||||||
If |True|, check reserved names of the ``platform``.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValidationError (ErrorReason.INVALID_CHARACTER):
|
|
||||||
If the ``file_path`` includes invalid char(s):
|
|
||||||
|invalid_file_path_chars|.
|
|
||||||
The following characters are also invalid for Windows platform:
|
|
||||||
|invalid_win_file_path_chars|
|
|
||||||
ValidationError (ErrorReason.INVALID_LENGTH):
|
|
||||||
If the ``file_path`` is longer than ``max_len`` characters.
|
|
||||||
ValidationError:
|
|
||||||
If ``file_path`` include invalid values.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
:ref:`example-validate-file-path`
|
|
||||||
|
|
||||||
See Also:
|
|
||||||
`Naming Files, Paths, and Namespaces - Win32 apps | Microsoft Docs
|
|
||||||
<https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file>`__
|
|
||||||
"""
|
|
||||||
|
|
||||||
FilePathValidator(
|
|
||||||
platform=platform, min_len=min_len, max_len=max_len, check_reserved=check_reserved
|
|
||||||
).validate(file_path)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_file_path(file_path, platform=None, max_path_len=None):
|
|
||||||
# Deprecated
|
|
||||||
validate_filepath(file_path, platform, max_path_len)
|
|
||||||
|
|
||||||
|
|
||||||
def is_valid_filepath(
|
|
||||||
file_path: PathType,
|
|
||||||
platform: Optional[str] = None,
|
|
||||||
min_len: int = 1,
|
|
||||||
max_len: Optional[int] = None,
|
|
||||||
check_reserved: bool = True,
|
|
||||||
) -> bool:
|
|
||||||
"""Check whether the ``file_path`` is a valid name or not.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_path:
|
|
||||||
A filepath to be checked.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
:ref:`example-is-valid-filepath`
|
|
||||||
|
|
||||||
See Also:
|
|
||||||
:py:func:`.validate_filepath()`
|
|
||||||
"""
|
|
||||||
|
|
||||||
return FilePathValidator(
|
|
||||||
platform=platform, min_len=min_len, max_len=max_len, check_reserved=check_reserved
|
|
||||||
).is_valid(file_path)
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_filepath(
|
|
||||||
file_path: PathType,
|
|
||||||
replacement_text: str = "",
|
|
||||||
platform: Optional[str] = None,
|
|
||||||
max_len: Optional[int] = None,
|
|
||||||
check_reserved: bool = True,
|
|
||||||
normalize: bool = True,
|
|
||||||
) -> PathType:
|
|
||||||
"""Make a valid file path from a string.
|
|
||||||
|
|
||||||
To make a valid file path the function does:
|
|
||||||
|
|
||||||
- replace invalid characters for a file path within the ``file_path``
|
|
||||||
with the ``replacement_text``. Invalid characters are as follows:
|
|
||||||
|
|
||||||
- unprintable characters
|
|
||||||
- |invalid_file_path_chars|
|
|
||||||
- for Windows (or universal) only: |invalid_win_file_path_chars|
|
|
||||||
|
|
||||||
- Append underscore (``"_"``) at the tail of the name if sanitized name
|
|
||||||
is one of the reserved names by operating systems
|
|
||||||
(only when ``check_reserved`` is |True|).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_path:
|
|
||||||
File path to sanitize.
|
|
||||||
replacement_text:
|
|
||||||
Replacement text for invalid characters.
|
|
||||||
Defaults to ``""``.
|
|
||||||
platform:
|
|
||||||
Target platform name of the file path.
|
|
||||||
|
|
||||||
.. include:: platform.txt
|
|
||||||
max_len:
|
|
||||||
Maximum length of the ``file_path`` length. Truncate the name if the ``file_path``
|
|
||||||
length exceedd this value. If the value is |None|,
|
|
||||||
``max_len`` will automatically determined by the ``platform``:
|
|
||||||
|
|
||||||
- ``Linux``: 4096
|
|
||||||
- ``macOS``: 1024
|
|
||||||
- ``Windows``: 260
|
|
||||||
- ``universal``: 260
|
|
||||||
check_reserved:
|
|
||||||
If |True|, sanitize reserved names of the ``platform``.
|
|
||||||
normalize:
|
|
||||||
If |True|, normalize the the file path.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Same type as the argument (str or PathLike object):
|
|
||||||
Sanitized filepath.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError:
|
|
||||||
If the ``file_path`` is an invalid file path.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
:ref:`example-sanitize-file-path`
|
|
||||||
"""
|
|
||||||
|
|
||||||
return FilePathSanitizer(
|
|
||||||
platform=platform, max_len=max_len, check_reserved=check_reserved, normalize=normalize
|
|
||||||
).sanitize(file_path, replacement_text)
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_file_path(file_path, replacement_text="", platform=None, max_path_len=None):
|
|
||||||
# Deprecated
|
|
||||||
return sanitize_filepath(file_path, platform, max_path_len)
|
|
|
@ -1,45 +0,0 @@
|
||||||
"""
|
|
||||||
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
from ._common import preprocess, validate_pathtype
|
|
||||||
from .error import InvalidCharError
|
|
||||||
|
|
||||||
|
|
||||||
__RE_INVALID_LTSV_LABEL = re.compile("[^0-9A-Za-z_.-]", re.UNICODE)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_ltsv_label(label: str) -> None:
|
|
||||||
"""
|
|
||||||
Verifying whether ``label`` is a valid
|
|
||||||
`Labeled Tab-separated Values (LTSV) <http://ltsv.org/>`__ label or not.
|
|
||||||
|
|
||||||
:param label: Label to validate.
|
|
||||||
:raises pathvalidate.ValidationError:
|
|
||||||
If invalid character(s) found in the ``label`` for a LTSV format label.
|
|
||||||
"""
|
|
||||||
|
|
||||||
validate_pathtype(label, allow_whitespaces=False, error_msg="label is empty")
|
|
||||||
|
|
||||||
match_list = __RE_INVALID_LTSV_LABEL.findall(preprocess(label))
|
|
||||||
if match_list:
|
|
||||||
raise InvalidCharError(
|
|
||||||
"invalid character found for a LTSV format label: {}".format(match_list)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_ltsv_label(label: str, replacement_text: str = "") -> str:
|
|
||||||
"""
|
|
||||||
Replace all of the symbols in text.
|
|
||||||
|
|
||||||
:param label: Input text.
|
|
||||||
:param replacement_text: Replacement text.
|
|
||||||
:return: A replacement string.
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
|
|
||||||
validate_pathtype(label, allow_whitespaces=False, error_msg="label is empty")
|
|
||||||
|
|
||||||
return __RE_INVALID_LTSV_LABEL.sub(replacement_text, preprocess(label))
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue