Compare commits
172 Commits
v2.5.0-bet
...
v2.5.5
Author | SHA1 | Date | |
---|---|---|---|
![]() |
9184ae4608 | ||
![]() |
de64b5ddfa | ||
![]() |
b3ffbbf3ea | ||
![]() |
aa80fdf738 | ||
![]() |
9ad95f51d4 | ||
![]() |
0902a61341 | ||
![]() |
55ffd54e5b | ||
![]() |
e014bfa63e | ||
![]() |
687672e9c1 | ||
![]() |
137889dc9c | ||
![]() |
f24f4a4250 | ||
![]() |
95fc108d57 | ||
![]() |
95f48ba9f6 | ||
![]() |
d80cf232c8 | ||
![]() |
ab3ec875a3 | ||
![]() |
668c9e6045 | ||
![]() |
67b452a461 | ||
![]() |
9b3bfd14db | ||
![]() |
e00c8fb186 | ||
![]() |
a0919e246d | ||
![]() |
003f684f8a | ||
![]() |
69d55c60c3 | ||
![]() |
560094dcf6 | ||
![]() |
4edd6ce911 | ||
![]() |
f76bd2af8e | ||
![]() |
7747503fee | ||
![]() |
1e1a8ddfb0 | ||
![]() |
9bcd18f1b6 | ||
![]() |
50b6f9a8f2 | ||
![]() |
b4ba88b3e5 | ||
![]() |
ba9acd6e23 | ||
![]() |
dd9513313b | ||
![]() |
288a1c86ab | ||
![]() |
8e28cb10fa | ||
![]() |
3d35a525d3 | ||
![]() |
f7153d0f3b | ||
![]() |
4285b55c15 | ||
![]() |
b54576f08f | ||
![]() |
6b4db681ff | ||
![]() |
f582f781f3 | ||
![]() |
9baecb0a41 | ||
![]() |
91a18e1a92 | ||
![]() |
acfbb0e96d | ||
![]() |
c52292962d | ||
![]() |
6e53743716 | ||
![]() |
873194b402 | ||
![]() |
21dec5feb3 | ||
![]() |
bee4106af0 | ||
![]() |
bbb6e46515 | ||
![]() |
570ebb4f73 | ||
![]() |
d93204af4e | ||
![]() |
702f116db9 | ||
![]() |
0c8607b3ec | ||
![]() |
3a2cc6efc7 | ||
![]() |
1b37ff1655 | ||
![]() |
769934c8a5 | ||
![]() |
7f1a4ec34a | ||
![]() |
27438f7915 | ||
![]() |
8651bef9c1 | ||
![]() |
36324d10dc | ||
![]() |
0272c35047 | ||
![]() |
70c0f912e2 | ||
![]() |
59a6acc088 | ||
![]() |
10b0726727 | ||
![]() |
8f1360d7c2 | ||
![]() |
e0e5ac9ecc | ||
![]() |
c814f219a2 | ||
![]() |
9095fc0c7a | ||
![]() |
a675202537 | ||
![]() |
b52ab4885b | ||
![]() |
43e26c9b56 | ||
![]() |
703a7feed2 | ||
![]() |
7b69ed4cec | ||
![]() |
fcca7f969e | ||
![]() |
ec34ea2116 | ||
![]() |
3dc36c3b92 | ||
![]() |
f0d4fd5523 | ||
![]() |
7fe6c72fe2 | ||
![]() |
d216d0f27f | ||
![]() |
43a7758acd | ||
![]() |
3043956dec | ||
![]() |
06665fdd06 | ||
![]() |
beff5caaac | ||
![]() |
3859412b2c | ||
![]() |
f7ec476fc0 | ||
![]() |
b97d32671d | ||
![]() |
01c56ef280 | ||
![]() |
b9422312f3 | ||
![]() |
9a0f83c3e7 | ||
![]() |
fbfedb2e62 | ||
![]() |
4f8a462041 | ||
![]() |
141d043a6a | ||
![]() |
c1266fed12 | ||
![]() |
4a4be9798d | ||
![]() |
172692ccca | ||
![]() |
50e7c0469f | ||
![]() |
44f74e3590 | ||
![]() |
63656b73c2 | ||
![]() |
40ecf56904 | ||
![]() |
b4a10adec2 | ||
![]() |
1698622d63 | ||
![]() |
fa27271647 | ||
![]() |
d837811c68 | ||
![]() |
ad195f0969 | ||
![]() |
4a8748e322 | ||
![]() |
0f016c83ea | ||
![]() |
061ae44da4 | ||
![]() |
a8b90bf100 | ||
![]() |
eb3cd49bc4 | ||
![]() |
416d869288 | ||
![]() |
a116c26c25 | ||
![]() |
cc4ec53dac | ||
![]() |
63164c7ff5 | ||
![]() |
9815c014e8 | ||
![]() |
69675151bf | ||
![]() |
99e395ddfa | ||
![]() |
7fe1e542df | ||
![]() |
938134081b | ||
![]() |
3fd2234a92 | ||
![]() |
41843dc573 | ||
![]() |
cc6bd528a5 | ||
![]() |
2625ef5fb9 | ||
![]() |
dbd2d28877 | ||
![]() |
f70f814c70 | ||
![]() |
6710e42134 | ||
![]() |
78c5b45e43 | ||
![]() |
e562ec96fa | ||
![]() |
9b5e01c319 | ||
![]() |
0097532f4a | ||
![]() |
91935c9018 | ||
![]() |
83df807f7e | ||
![]() |
eb3db20340 | ||
![]() |
6dab6194ea | ||
![]() |
356f64cac0 | ||
![]() |
f77f289125 | ||
![]() |
280257477a | ||
![]() |
660141cb16 | ||
![]() |
cd8a899521 | ||
![]() |
cb577c51b8 | ||
![]() |
1c395ab10c | ||
![]() |
07d7170e49 | ||
![]() |
88e23627fd | ||
![]() |
48f846da40 | ||
![]() |
ff887d9948 | ||
![]() |
617b0d6fd9 | ||
![]() |
805d45bd33 | ||
![]() |
fef428202f | ||
![]() |
40fd82febd | ||
![]() |
45f0001da5 | ||
![]() |
c7a3e1e3bf | ||
![]() |
9dd8cc9e49 | ||
![]() |
d252d4cd2d | ||
![]() |
bc1328040c | ||
![]() |
82919d3c1d | ||
![]() |
7c801c2f5e | ||
![]() |
9a932aea12 | ||
![]() |
5696e75abe | ||
![]() |
efb3f748c2 | ||
![]() |
450b3865a8 | ||
![]() |
970667adca | ||
![]() |
89307dad01 | ||
![]() |
451feda86b | ||
![]() |
4d241fac48 | ||
![]() |
4390f5cbc8 | ||
![]() |
7f9d46eac3 | ||
![]() |
d0f28883aa | ||
![]() |
48203e64a9 | ||
![]() |
42b17ca495 | ||
![]() |
d8080fe506 | ||
![]() |
be910e24f7 | ||
![]() |
ce6d70f6fd | ||
![]() |
827e05e4d7 |
@@ -7,3 +7,4 @@ package
|
||||
pylintrc
|
||||
*.md
|
||||
!CHANGELOG*.md
|
||||
start.bat
|
||||
|
22
.github/workflows/publish-docker.yml
vendored
22
.github/workflows/publish-docker.yml
vendored
@@ -7,6 +7,9 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Prepare
|
||||
id: prepare
|
||||
run: |
|
||||
@@ -29,16 +32,24 @@ jobs:
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: crazy-max/ghaction-docker-buildx@v1
|
||||
uses: crazy-max/ghaction-docker-buildx@v3
|
||||
with:
|
||||
version: latest
|
||||
buildx-version: latest
|
||||
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2.1.0
|
||||
- name: Cache Docker Layers
|
||||
id: cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Docker Buildx (no push)
|
||||
run: |
|
||||
docker buildx build \
|
||||
--cache-from "type=local,src=/tmp/.buildx-cache" \
|
||||
--cache-to "type=local,dest=/tmp/.buildx-cache" \
|
||||
--platform ${{ steps.prepare.outputs.docker_platforms }} \
|
||||
--output "type=image,push=false" \
|
||||
--build-arg "TAG=${{ steps.prepare.outputs.tag }}" \
|
||||
@@ -59,6 +70,7 @@ jobs:
|
||||
if: success()
|
||||
run: |
|
||||
docker buildx build \
|
||||
--cache-from "type=local,src=/tmp/.buildx-cache" \
|
||||
--platform ${{ steps.prepare.outputs.docker_platforms }} \
|
||||
--output "type=image,push=true" \
|
||||
--build-arg "TAG=${{ steps.prepare.outputs.tag }}" \
|
||||
@@ -79,5 +91,5 @@ jobs:
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ job.status }}
|
||||
job: ${{ github.workflow }}
|
||||
title: ${{ github.workflow }}
|
||||
nofail: true
|
||||
|
47
.github/workflows/publish-release.yml
vendored
47
.github/workflows/publish-release.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2.1.0
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set Release Version
|
||||
id: get_version
|
||||
@@ -28,13 +28,13 @@ jobs:
|
||||
echo $GITHUB_SHA > version.txt
|
||||
|
||||
- name: Set Up Python
|
||||
uses: actions/setup-python@v1.2.0
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Cache Dependencies
|
||||
id: cache_dependencies
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~\AppData\Local\pip\Cache
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('package/requirements-windows.txt') }}
|
||||
@@ -50,17 +50,18 @@ jobs:
|
||||
pyinstaller -y ./package/Tautulli-windows.spec
|
||||
|
||||
- name: Create Installer
|
||||
uses: joncloud/makensis-action@v1
|
||||
uses: joncloud/makensis-action@v1.2
|
||||
with:
|
||||
script-file: ./package/Tautulli.nsi
|
||||
arguments: /DVERSION=${{ steps.get_version.outputs.VERSION_NSIS }} /DINSTALLER_NAME=..\Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}.exe
|
||||
includeMorePlugins: package/nsis-plugins
|
||||
arguments: /DVERSION=${{ steps.get_version.outputs.VERSION_NSIS }} /DINSTALLER_NAME=..\Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
include-more-plugins: true
|
||||
include-custom-plugins-path: package/nsis-plugins
|
||||
|
||||
- name: Upload Installer
|
||||
uses: actions/upload-artifact@v1
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: Tautulli-windows-installer
|
||||
path: Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}.exe
|
||||
path: Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
@@ -68,14 +69,14 @@ jobs:
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ job.status }}
|
||||
job: Build Windows Installer
|
||||
title: Build Windows Installer
|
||||
nofail: true
|
||||
|
||||
build-macos:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2.1.0
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set Release Version
|
||||
id: get_version
|
||||
@@ -93,13 +94,13 @@ jobs:
|
||||
echo $GITHUB_SHA > version.txt
|
||||
|
||||
- name: Set Up Python
|
||||
uses: actions/setup-python@v1.2.0
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Cache Dependencies
|
||||
id: cache_dependencies
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/Library/Caches/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('package/requirements-macos.txt') }}
|
||||
@@ -116,13 +117,13 @@ jobs:
|
||||
|
||||
- name: Create Installer
|
||||
run: |
|
||||
sudo pkgbuild --install-location /Applications --version ${{ steps.get_version.outputs.VERSION }} --component ./dist/Tautulli.app --scripts ./package/macos-scripts Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}.pkg
|
||||
sudo pkgbuild --install-location /Applications --version ${{ steps.get_version.outputs.VERSION }} --component ./dist/Tautulli.app --scripts ./package/macos-scripts Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
|
||||
- name: Upload Installer
|
||||
uses: actions/upload-artifact@v1
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: Tautulli-macos-installer
|
||||
path: Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}.pkg
|
||||
path: Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
@@ -130,7 +131,7 @@ jobs:
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ job.status }}
|
||||
job: Build MacOS Installer
|
||||
title: Build MacOS Installer
|
||||
nofail: true
|
||||
|
||||
release:
|
||||
@@ -142,7 +143,7 @@ jobs:
|
||||
uses: technote-space/workflow-conclusion-action@v1
|
||||
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2.1.0
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set Release Version
|
||||
id: get_version
|
||||
@@ -151,13 +152,13 @@ jobs:
|
||||
|
||||
- name: Download Windows Installer
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
uses: actions/download-artifact@v1
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: Tautulli-windows-installer
|
||||
|
||||
- name: Download MacOS Installer
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
uses: actions/download-artifact@v1
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: Tautulli-macos-installer
|
||||
|
||||
@@ -187,8 +188,8 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: Tautulli-windows-installer/Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}.exe
|
||||
asset_name: Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}.exe
|
||||
asset_path: ./Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
asset_name: Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
asset_content_type: application/vnd.microsoft.portable-executable
|
||||
|
||||
- name: Upload MacOS Installer
|
||||
@@ -198,6 +199,6 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: Tautulli-macos-installer/Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}.pkg
|
||||
asset_name: Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}.pkg
|
||||
asset_path: ./Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
asset_name: Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
asset_content_type: application/vnd.apple.installer+xml
|
||||
|
76
API.md
76
API.md
@@ -1180,6 +1180,7 @@ Returns:
|
||||
"grandparent_thumb": "/library/metadata/1219/thumb/1462175063",
|
||||
"grandparent_title": "Game of Thrones",
|
||||
"guid": "com.plexapp.agents.thetvdb://121361/6/1?lang=en",
|
||||
"guids": [],
|
||||
"labels": [],
|
||||
"last_viewed_at": "1462165717",
|
||||
"library_name": "TV Shows",
|
||||
@@ -1901,6 +1902,7 @@ Returns:
|
||||
"grandparent_thumb": "/library/metadata/1219/thumb/1462175063",
|
||||
"grandparent_title": "Game of Thrones",
|
||||
"guid": "com.plexapp.agents.thetvdb://121361/6/1?lang=en",
|
||||
"guids": [],
|
||||
"labels": [],
|
||||
"last_viewed_at": "1462165717",
|
||||
"library_name": "TV Shows",
|
||||
@@ -1989,6 +1991,33 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_server_info
|
||||
Get the PMS server information.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
None
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"pms_identifier": "08u2phnlkdshf890bhdlksghnljsahgleikjfg9t",
|
||||
"pms_ip": "10.10.10.1",
|
||||
"pms_is_remote": 0,
|
||||
"pms_name": "Winterfell-Server",
|
||||
"pms_platform": "Windows",
|
||||
"pms_plexpass": 1,
|
||||
"pms_port": 32400,
|
||||
"pms_ssl": 0,
|
||||
"pms_url": "http://10.10.10.1:32400",
|
||||
"pms_url_manual": 0,
|
||||
"pms_version": "1.20.0.3133-fede5bdc7"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### get_server_list
|
||||
Get all your servers that are published to Plex.tv.
|
||||
|
||||
@@ -2267,8 +2296,8 @@ Required parameters:
|
||||
user_id (str): The id of the Plex user
|
||||
|
||||
Optional parameters:
|
||||
order_column (str): "last_seen", "ip_address", "platform", "player",
|
||||
"last_played", "play_count"
|
||||
order_column (str): "last_seen", "first_seen", "ip_address", "platform",
|
||||
"player", "last_played", "play_count"
|
||||
order_dir (str): "desc" or "asc"
|
||||
start (int): Row to start from, 0
|
||||
length (int): Number of items to return, 25
|
||||
@@ -2286,6 +2315,7 @@ Returns:
|
||||
"ip_address": "xxx.xxx.xxx.xxx",
|
||||
"last_played": "Game of Thrones - The Red Woman",
|
||||
"last_seen": 1462591869,
|
||||
"first_seen": 1583968210,
|
||||
"live": 0,
|
||||
"media_index": 1,
|
||||
"media_type": "episode",
|
||||
@@ -2554,13 +2584,38 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### import_config
|
||||
Import a Tautulli config file.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
config_file (file): The config file to import (multipart/form-data)
|
||||
or
|
||||
config_path (str): The full path to the config file to import
|
||||
|
||||
|
||||
Optional parameters:
|
||||
backup (bool): true or false whether to backup
|
||||
the current config before importing
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"result": "success",
|
||||
"message": "Config import has started. Check the logs to monitor any problems. "
|
||||
"Tautulli will restart automatically."
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### import_database
|
||||
Import a Tautulli, PlexWatch, or Plexivity database into Tautulli.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
app (str): "tautulli" or "plexwatch" or "plexivity"
|
||||
database_path (str): The full path to the plexwatch database file
|
||||
database_file (file): The database file to import (multipart/form-data)
|
||||
or
|
||||
database_path (str): The full path to the database file to import
|
||||
method (str): For Tautulli only, "merge" or "overwrite"
|
||||
table_name (str): For PlexWatch or Plexivity only, "processed" or "grouped"
|
||||
|
||||
@@ -2572,7 +2627,10 @@ Optional parameters:
|
||||
of seconds for a stream to import
|
||||
|
||||
Returns:
|
||||
None
|
||||
json:
|
||||
{"result": "success",
|
||||
"message": "Database import has started. Check the logs to monitor any problems."
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -2668,14 +2726,18 @@ Registers the Tautulli Android App for notifications.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
device_name (str): The device name of the Tautulli Android App
|
||||
device_id (str): The OneSignal device id of the Tautulli Android App
|
||||
device_id (str): The unique device identifier for the mobile device
|
||||
device_name (str): The device name of the mobile device
|
||||
|
||||
Optional parameters:
|
||||
friendly_name (str): A friendly name to identify the mobile device
|
||||
onesignal_id (str): The OneSignal id for the mobile device
|
||||
|
||||
Returns:
|
||||
None
|
||||
json:
|
||||
{"pms_name": "Winterfell-Server",
|
||||
"server_id": "ds48g4r354a8v9byrrtr697g3g79w"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
83
CHANGELOG.md
83
CHANGELOG.md
@@ -1,16 +1,93 @@
|
||||
# Changelog
|
||||
|
||||
# v2.5.0-beta (2020-05-31)
|
||||
## v2.5.5 (2020-09-06)
|
||||
|
||||
* Activity:
|
||||
* Fix: Filter out TV show background theme music sessions.
|
||||
* Notifications:
|
||||
* New: Check Plex external guids for notification metadata provider links.
|
||||
* UI:
|
||||
* Fix: Incorrect sorting for user/library recently played items.
|
||||
* API:
|
||||
* Fix: get_synced_items API command returning error with empty result.
|
||||
* Fix: Download API commands not returning the file.
|
||||
* Fix: get_logs API command encoding error.
|
||||
* Fix: get_user_player_stats API command returning error instead of empty result.
|
||||
* New: Added get_server_info API command.
|
||||
* New: Added external guids to get_metadata API command.
|
||||
* New: Added support for multi-column sorting for datatable API commands.
|
||||
* Change: get_activity API command return thumbnail override for clips.
|
||||
* Change: get_libraries_table API command return custom library artwork.
|
||||
* Other:
|
||||
* Fix: Tautulli failed to run with a stale pid file.
|
||||
* New: Added scheduled task to optimize the Tautulli database.
|
||||
* Change: Update plexapi to 3.6.0.
|
||||
* Change: Update some libraries for Python 3 compatibility.
|
||||
|
||||
|
||||
## v2.5.4 (2020-07-31)
|
||||
|
||||
* Monitoring:
|
||||
* Change: Montitoring remote access changed to use websockets. Refer to Tautulli/Tautulli-Issues#251 for details.
|
||||
* Notifications:
|
||||
* Fix: Uploading images to Cloudinary failed for titles with non-ASCII characters on Python 2.
|
||||
* New: Added plex_id notification parameter.
|
||||
* Remove: Running .exe files directly using script notifications is no longer supported.
|
||||
* Remove: php, perl, and ruby prefix overrides for script notifications is no longer supported.
|
||||
* Change: Stricter checking of file extensions for script notifications.
|
||||
* Change: Fallback to The Movie Database lookup using title and year.
|
||||
* Change: Fallback to TVmaze lookup using title.
|
||||
* UI:
|
||||
* New: Added ability to import a previous Tautullli configuration file in the settings.
|
||||
* New: Added a browse button for settings which require a folder or file input.
|
||||
* New: Added first streamed column to user IP addresses table. (Thanks @dotsam)
|
||||
* New: Added The Movie Database rating image to media page.
|
||||
* Change: Different icon to represent direct stream in the history tables.
|
||||
* API:
|
||||
* New: Updated API docs for importing a database and configuration file.
|
||||
|
||||
|
||||
## v2.5.3 (2020-07-10)
|
||||
|
||||
* History:
|
||||
* Fix: Unable to delete more than 1000 history entries at the same time.
|
||||
* Notifications:
|
||||
* Change: Python script notifications to run using the same Python interpreter as Tautulli.
|
||||
* Newsletters:
|
||||
* Fix: Unable to view newsletters with special characters.
|
||||
* Other:
|
||||
* Fix: Tautulli failing to start after enabling HTTPS when installed using the Windows / macOS installers.
|
||||
* Fix: Startup script not working on macOS.
|
||||
* Fix: Unable to hide dock icon on macOS with the pkg install. Refer to the FAQ regarding the Python rocket dock icon.
|
||||
* Change: Added path to Python interpreter in system startup (daemon) scripts.
|
||||
* Change: Added Python version to Google analytics.
|
||||
|
||||
|
||||
## v2.5.2 (2020-07-01)
|
||||
|
||||
* Announcements:
|
||||
* Tautulli now supports Python 3!
|
||||
* Python 2 is still supported for the time being, but it is recommended to upgrade to Python 3.
|
||||
* UI:
|
||||
* Notifications:
|
||||
* Fix: Error uploading images to Cloudinary on Python 2.
|
||||
* Fix: Testing browser notifications alert not disappearing.
|
||||
* Change: Default recently added notification delay set to 300 seconds.
|
||||
* UI:
|
||||
* Fix: MacOS menu bar icon causing Tautulli to fail to start.
|
||||
* Fix: Unable to login to Tautulli on Python 2.
|
||||
* New: Windows and MacOS setting to enable Tautulli to start automatically when you login.
|
||||
* New: Added system tray icon for MacOS.
|
||||
* New: Added menu bar icon for MacOS.
|
||||
* New: Ability to import a Tautulli database in the settings.
|
||||
* New: Added Tautulli news area on the settings page.
|
||||
* New: Added platform icon for LG devices.
|
||||
* Remove: Ability to login to Tautulli using a Plex username and password has been removed. Login using a Plex.tv account is only supported via OAuth.
|
||||
* Mobile App:
|
||||
* Fix: Improved API security and validation when registering the Android app.
|
||||
* Docker:
|
||||
* Fix: Docker container not respecting the PUID and PGID environment variables.
|
||||
* Other:
|
||||
* Fix: Error creating self-signed certificates on Python 3.
|
||||
* Fix: Tautulli login session cookie not set on the HTTP root path.
|
||||
* New: Windows and MacOS app installers to install Tautulli without needing Python installed.
|
||||
|
||||
|
||||
|
@@ -16,7 +16,7 @@ RUN \
|
||||
|
||||
COPY . /app
|
||||
|
||||
CMD [ "python", "Tautulli.py", "--datadir", "/config" ]
|
||||
ENTRYPOINT [ "./start.sh" ]
|
||||
|
||||
VOLUME /config
|
||||
EXPOSE 8181
|
||||
|
22
Tautulli.py
22
Tautulli.py
@@ -131,8 +131,7 @@ def main():
|
||||
|
||||
if args.daemon:
|
||||
if sys.platform == 'win32':
|
||||
sys.stderr.write(
|
||||
"Daemonizing not supported under Windows, starting normally\n")
|
||||
logger.warn("Daemonizing not supported under Windows, starting normally")
|
||||
else:
|
||||
plexpy.DAEMON = True
|
||||
plexpy.QUIET = True
|
||||
@@ -150,11 +149,13 @@ def main():
|
||||
try:
|
||||
with open(plexpy.PIDFILE, 'r') as fp:
|
||||
pid = int(fp.read())
|
||||
os.kill(pid, 0)
|
||||
except IOError as e:
|
||||
raise SystemExit("Unable to read PID file: %s", e)
|
||||
|
||||
try:
|
||||
os.kill(pid, 0)
|
||||
except OSError:
|
||||
logger.warn("PID file '%s' already exists, but PID %d is " \
|
||||
logger.warn("PID file '%s' already exists, but PID %d is "
|
||||
"not running. Ignoring PID file." %
|
||||
(plexpy.PIDFILE, pid))
|
||||
else:
|
||||
@@ -257,17 +258,18 @@ def main():
|
||||
plexpy.HTTP_ROOT)
|
||||
|
||||
if common.PLATFORM == 'Darwin' and plexpy.CONFIG.SYS_TRAY_ICON:
|
||||
try:
|
||||
import AppKit
|
||||
except ImportError:
|
||||
if not macos.HAS_PYOBJC:
|
||||
logger.warn("The pyobjc module is missing. Install this "
|
||||
"module to enable the system tray icon.")
|
||||
"module to enable the MacOS menu bar icon.")
|
||||
plexpy.CONFIG.SYS_TRAY_ICON = False
|
||||
|
||||
if plexpy.CONFIG.SYS_TRAY_ICON:
|
||||
# MacOS system tray icon must be run on the main thread and is blocking
|
||||
# MacOS menu bar icon must be run on the main thread and is blocking
|
||||
# Start the rest of Tautulli on a new thread
|
||||
threading.Thread(target=wait).start()
|
||||
thread = threading.Thread(target=wait)
|
||||
thread.daemon = True
|
||||
thread.start()
|
||||
|
||||
plexpy.MAC_SYS_TRAY_ICON = macos.MacOSSystemTray()
|
||||
plexpy.MAC_SYS_TRAY_ICON.start()
|
||||
else:
|
||||
|
@@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Display information
|
||||
echo "This script will remove *.pyc files. These files are generated by Python, but they can cause conflicts after an upgrade. It's safe to remove them, because they will be regenerated."
|
||||
|
@@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Parameter check
|
||||
if [ -z "$1" ]; then
|
||||
|
@@ -5,7 +5,7 @@
|
||||
<h4 class="modal-title">Import ${app} Database</h4>
|
||||
</div>
|
||||
<div class="modal-body" id="modal-text">
|
||||
<form id="import_database" enctype="multipart/form-data" method="post" name="import_database">
|
||||
<form id="import_database_form" enctype="multipart/form-data" method="post" name="import_database_form">
|
||||
<input type="hidden" id="import_app" name="import_app" value="${app.lower()}" />
|
||||
% if app in ('PlexWatch', 'Plexivity'):
|
||||
<p class="help-block">
|
||||
@@ -28,11 +28,11 @@
|
||||
<span class="btn btn-form">Upload</span>
|
||||
<input type="file" style="display: none;" id="import_database_file" name="import_database_file" required>
|
||||
</label>
|
||||
<input id="import_database_file_name" type="text" class="form-control" disabled>
|
||||
<input id="import_database_file_name" type="text" class="form-control" placeholder="tautulli.db" disabled>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Upload the ${app} database you wish to import.</p>
|
||||
<p class="help-block">Upload the ${app} database file you wish to import.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="import_database_path">Option 2: Browse for a Database File</label>
|
||||
@@ -40,13 +40,13 @@
|
||||
<div class="col-xs-12">
|
||||
<div class="input-group">
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="import_database_path_browse">Browse</button>
|
||||
<button class="btn btn-form" type="button" id="import_database_path_browse" data-toggle="browse" data-description="Database File" data-filter=".db" data-target="#import_database_path">Browse</button>
|
||||
</span>
|
||||
<input type="text" class="form-control" id="import_database_path" name="import_database_path" value="" required disabled>
|
||||
<input type="text" class="form-control" id="import_database_path" name="import_database_path" value="" placeholder="tautulli.db" required disabled>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Browse for the ${app} database you wish to import.</p>
|
||||
<p class="help-block">Browse for the ${app} database file you wish to import.</p>
|
||||
</div>
|
||||
% if app == 'Tautulli':
|
||||
<div class="form-group">
|
||||
@@ -64,7 +64,6 @@
|
||||
<li><strong>Merge</strong> will add all history and remove any duplicates from the imported database into the current database.</li>
|
||||
<li><strong>Overwrite</strong> will replace all history in the current database with the imported database.</li>
|
||||
</ul>
|
||||
<p class="help-block">Note: Libraries, users, notification agents, newsletter agents, and registered mobile devices will also be imported</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
@@ -72,6 +71,15 @@
|
||||
</label>
|
||||
<p class="help-block">Automatically create a backup of the current database before importing.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label>Import Notes</label>
|
||||
<p class="help-block">The following data will also be imported:</p>
|
||||
<ul class="help-block" style="padding-inline-start: 15px;">
|
||||
<li>Libraries and Users</li>
|
||||
<li>Notification / Newsletter Agents</li>
|
||||
<li>Registered Mobile Devices</li>
|
||||
</ul>
|
||||
</div>
|
||||
% else:
|
||||
<div class="form-group">
|
||||
<label for="import_table_name">Table Name</label>
|
||||
@@ -106,19 +114,9 @@
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
$('#import_database_path_browse').click(function () {
|
||||
$('#browse-path-type').text('Databse File');
|
||||
$('#browse-path-modal').modal('show');
|
||||
browsePath(null, null, '.db');
|
||||
});
|
||||
$('#select-browse-file').click(function () {
|
||||
$('#browse-path-modal').modal('hide');
|
||||
$("#import_database_path").val($('#browse-path').val());
|
||||
});
|
||||
|
||||
$('#import_database_file').change(function() {
|
||||
$("#import_database_file").change(function() {
|
||||
if ($(this)[0].files[0]) {
|
||||
$('#import_database_file_name').val($(this)[0].files[0].name);
|
||||
$("#import_database_file_name").val($(this)[0].files[0].name);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -126,7 +124,7 @@
|
||||
$(this).prop('disabled', true);
|
||||
|
||||
var app = $("#import_app").val();
|
||||
var database_file = $('#import_database_file')[0].files[0];
|
||||
var database_file = $("#import_database_file")[0].files[0];
|
||||
var database_path = $("#import_database_path").val();
|
||||
var method = $("#import_method").val();
|
||||
var backup = $("#import_backup_db").is(':checked');
|
||||
|
@@ -230,20 +230,12 @@ ${next.modalIncludes()}
|
||||
</div>
|
||||
</div>
|
||||
<ul id="donation_type" class="nav nav-pills" role="tablist" style="display: flex; justify-content: center; margin: 10px 0;">
|
||||
<li class="active"><a href="#patreon-donation" role="tab" data-toggle="tab">Patreon</a></li>
|
||||
<li><a href="#github-donation" role="tab" data-toggle="tab">GitHub</a></li>
|
||||
<li class="active"><a href="#github-donation" role="tab" data-toggle="tab">GitHub</a></li>
|
||||
<li><a href="#patreon-donation" role="tab" data-toggle="tab">Patreon</a></li>
|
||||
<li><a href="#paypal-donation" role="tab" data-toggle="tab">PayPal</a></li>
|
||||
</ul>
|
||||
<div class="tab-content">
|
||||
<div role="tabpanel" class="tab-pane active" id="patreon-donation" style="text-align: center">
|
||||
<p>
|
||||
Click the button below to continue to Patreon.
|
||||
</p>
|
||||
<a href="${anon_url('https://www.patreon.com/join/tautulli')}" target="_blank">
|
||||
<img src="images/become_a_patron_button.png" alt="Become a Patron" height="40">
|
||||
</a>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="github-donation" style="text-align: center">
|
||||
<div role="tabpanel" class="tab-pane active" id="github-donation" style="text-align: center">
|
||||
<p>
|
||||
Click the button below to continue to GitHub.
|
||||
</p>
|
||||
@@ -251,6 +243,14 @@ ${next.modalIncludes()}
|
||||
<i class="fa fa-heart fa-sm" style="color: #ea4aaa;"></i> Sponsor
|
||||
</a>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="patreon-donation" style="text-align: center">
|
||||
<p>
|
||||
Click the button below to continue to Patreon.
|
||||
</p>
|
||||
<a href="${anon_url('https://www.patreon.com/join/tautulli')}" target="_blank">
|
||||
<img src="images/become_a_patron_button.png" alt="Become a Patron" height="40">
|
||||
</a>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="paypal-donation" style="text-align: center">
|
||||
<p>
|
||||
Click the button below to continue to PayPal.
|
||||
@@ -296,9 +296,7 @@ ${next.modalIncludes()}
|
||||
<script src="${http_root}js/ipaddr.min.js"></script>
|
||||
<script src="${http_root}js/script.js${cache_param}"></script>
|
||||
<script src="${http_root}js/jquery.tripleclick.min.js"></script>
|
||||
% if _session['user_group'] == 'admin' and BROWSER_NOTIFIERS:
|
||||
<script src="${http_root}js/ajaxNotifications.js"></script>
|
||||
% endif
|
||||
<script>
|
||||
% if _session['user_group'] == 'admin':
|
||||
$('body').on('click', '#updateDismiss', function() {
|
||||
@@ -423,6 +421,10 @@ ${next.modalIncludes()}
|
||||
$(document).on('hidden.bs.modal', '.modal', function () {
|
||||
$('.modal:visible').length && $(document.body).addClass('modal-open');
|
||||
});
|
||||
|
||||
% if _session['user_group'] == 'admin' and BROWSER_NOTIFIERS:
|
||||
check_notifications();
|
||||
% endif
|
||||
});
|
||||
|
||||
% if _session['user_group'] != 'admin':
|
||||
|
138
data/interfaces/default/config_import.html
Normal file
138
data/interfaces/default/config_import.html
Normal file
@@ -0,0 +1,138 @@
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title">${title}</h4>
|
||||
</div>
|
||||
<div class="modal-body" id="modal-text">
|
||||
<form id="import_config_form" enctype="multipart/form-data" method="post" name="import_config_form">
|
||||
<div class="form-group">
|
||||
<label for="import_config_file">Option 1: Upload a Configuration File</label>
|
||||
<div class="row">
|
||||
<div class="col-xs-12">
|
||||
<div class="input-group">
|
||||
<label for="import_config_file" class="input-group-btn">
|
||||
<span class="btn btn-form">Upload</span>
|
||||
<input type="file" style="display: none;" id="import_config_file" name="import_config_file" required>
|
||||
</label>
|
||||
<input id="import_config_file_name" type="text" class="form-control" placeholder="config.ini" disabled>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Upload the Tautulli configuration file you wish to import.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="import_config_path">Option 2: Browse for a Configuration File</label>
|
||||
<div class="row">
|
||||
<div class="col-xs-12">
|
||||
<div class="input-group">
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="import_config_path_browse" data-toggle="browse" data-description="Configuration File" data-filter=".ini" data-target="#import_config_path">Browse</button>
|
||||
</span>
|
||||
<input type="text" class="form-control" id="import_config_path" name="import_config_path" value="" placeholder="config.ini" required disabled>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Browse for the Tautulli configuration file you wish to import.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" name="import_backup_config" id="import_backup_config" value="1" checked> Backup Current Configuration
|
||||
</label>
|
||||
<p class="help-block">Automatically create a backup of the current configuration before importing.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label>Import Notes</label>
|
||||
<p class="help-block">The following settings will <em>not</em> be imported:</p>
|
||||
<ul class="help-block" style="padding-inline-start: 15px;">
|
||||
<li>Git Path, Log / Backup / Cache Directory, Plex Logs Folder</li>
|
||||
<li>Custom Newsletter Templates Folder, Newsletter Output Directory</li>
|
||||
<li>HTTP Host / Port / Root / Username / Password</li>
|
||||
<li>Enable HTTPS, HTTPS Certificate / Certificate Chain / Key</li>
|
||||
</ul>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<div>
|
||||
<span id="status-message" style="padding-right: 25px;"></span>
|
||||
<input type="button" id="import_config" class="btn btn-bright" value="Import">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
$("#import_config_file").change(function() {
|
||||
if ($(this)[0].files[0]) {
|
||||
$("#import_config_file_name").val($(this)[0].files[0].name);
|
||||
}
|
||||
});
|
||||
|
||||
$("#import_config").click(function() {
|
||||
$(this).prop('disabled', true);
|
||||
|
||||
var config_file = $("#import_config_file")[0].files[0];
|
||||
var config_path = $("#import_config_path").val();
|
||||
var backup = $("#import_backup_config").is(':checked');
|
||||
|
||||
var content_type;
|
||||
var process_data;
|
||||
var data;
|
||||
|
||||
if (config_file) {
|
||||
content_type = false;
|
||||
process_data = false;
|
||||
data = new FormData();
|
||||
data.append('config_file', config_file);
|
||||
data.append('backup', backup);
|
||||
} else {
|
||||
content_type = 'application/x-www-form-urlencoded; charset=UTF-8';
|
||||
process_data = true;
|
||||
data = {
|
||||
config_path: config_path,
|
||||
backup: backup
|
||||
}
|
||||
}
|
||||
|
||||
if (config_file) {
|
||||
$("#status-message").html('<i class="fa fa-fw fa-spin fa-refresh"></i> Uploading config file...');
|
||||
} else {
|
||||
$("#status-message").html('<i class="fa fa-fw fa-spin fa-refresh"></i>');
|
||||
}
|
||||
|
||||
$.ajax({
|
||||
url: 'import_config',
|
||||
type: 'POST',
|
||||
data: data,
|
||||
cache: false,
|
||||
async: true,
|
||||
contentType: content_type,
|
||||
processData: process_data,
|
||||
success: function(data) {
|
||||
var msg;
|
||||
if (data.result === 'success') {
|
||||
msg = "<i class='fa fa-check'></i> " + data.message;
|
||||
window.location.href = 'restart_import_config';
|
||||
} else {
|
||||
msg = "<i class='fa fa-exclamation-triangle'></i> " + data.message;
|
||||
}
|
||||
$("#status-message").html(msg);
|
||||
$("#import_config_file").val(null);
|
||||
$("#import_config_file_name").val('');
|
||||
$("#import_config_path").val('');
|
||||
},
|
||||
error: function (xhr) {
|
||||
var msg = "<i class='fa fa-exclamation-triangle'></i> Error (" + xhr.status + "): ";
|
||||
if (xhr.status === 413) {
|
||||
msg += "file is too large to upload"
|
||||
} else {
|
||||
msg += 'try again'
|
||||
}
|
||||
$("#status-message").html(msg);
|
||||
},
|
||||
complete: function(xhr) {
|
||||
$("#import_config").prop('disabled', false);
|
||||
}
|
||||
});
|
||||
});
|
||||
</script>
|
@@ -1,6 +1,6 @@
|
||||
body {
|
||||
font-family: 'Open Sans', Arial, sans-serif;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
margin-top: 50px;
|
||||
overflow: hidden;
|
||||
}
|
||||
@@ -36,7 +36,7 @@ select.input-sm {
|
||||
select[multiple] {
|
||||
height: 125px;
|
||||
margin: 5px 0 5px 0;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
border: 0px solid #444;
|
||||
background: #555;
|
||||
padding: 2px 2px;
|
||||
@@ -48,7 +48,7 @@ select[multiple]:focus {
|
||||
outline: 0;
|
||||
outline: thin dotted \9;
|
||||
color: #555;
|
||||
background-color: #fff;
|
||||
background-color: #eee;
|
||||
transition: background-color .3s;
|
||||
}
|
||||
select[multiple]:focus::-webkit-scrollbar-thumb {
|
||||
@@ -63,7 +63,7 @@ select[multiple] option {
|
||||
select.form-control,
|
||||
div.form-control .selectize-input {
|
||||
margin: 5px 0 5px 0;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
border: 0px solid #444;
|
||||
background: #555;
|
||||
padding: 6px 12px;
|
||||
@@ -76,7 +76,7 @@ select.form-control {
|
||||
}
|
||||
.react-selectize.root-node .react-selectize-control,
|
||||
.selectize-control.form-control .selectize-input {
|
||||
color: #fff !important;
|
||||
color: #eee !important;
|
||||
border: 0px solid #444 !important;
|
||||
background: #555 !important;
|
||||
padding: 1px 2px;
|
||||
@@ -123,15 +123,15 @@ select.form-control {
|
||||
cursor: pointer;
|
||||
}
|
||||
.react-selectize.root-node .react-selectize-control .react-selectize-placeholder {
|
||||
color: #fff !important;
|
||||
color: #eee !important;
|
||||
}
|
||||
.react-selectize.root-node .react-selectize-control .react-selectize-toggle-button path {
|
||||
fill: #fff !important;
|
||||
fill: #eee !important;
|
||||
}
|
||||
.react-selectize.root-node .simple-value,
|
||||
.selectize-control.multi .selectize-input > div {
|
||||
background: #444444 !important;
|
||||
color: #ffffff !important;
|
||||
background: #444 !important;
|
||||
color: #eee !important;
|
||||
padding-bottom: 2px !important;
|
||||
transition: background-color .3s;
|
||||
}
|
||||
@@ -156,7 +156,7 @@ select.form-control:focus,
|
||||
outline: 0;
|
||||
outline: thin dotted \9;
|
||||
color: #555 !important;
|
||||
background-color: #fff !important;
|
||||
background-color: #eee !important;
|
||||
transition: background-color .3s;
|
||||
}
|
||||
.react-selectize.root-node.open .simple-value,
|
||||
@@ -219,7 +219,7 @@ select.form-control:focus,
|
||||
}
|
||||
select.form-control option {
|
||||
color: #555;
|
||||
background-color: #fff;
|
||||
background-color: #eee;
|
||||
}
|
||||
img {
|
||||
-webkit-box-sizing: content-box;
|
||||
@@ -278,13 +278,13 @@ object {
|
||||
}
|
||||
.dropdown-menu > li > a:hover,
|
||||
.dropdown-menu > li > a:focus {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #2f2f2f;
|
||||
}
|
||||
.dropdown-menu > .active > a,
|
||||
.dropdown-menu > .active > a:hover,
|
||||
.dropdown-menu > .active > a:focus {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #2f2f2f;
|
||||
}
|
||||
.dropdown-menu > .disabled > a,
|
||||
@@ -327,14 +327,14 @@ object {
|
||||
background-color: #3B3B3B;
|
||||
}
|
||||
.btn-dark:hover {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #333;
|
||||
border-color: #444;
|
||||
}
|
||||
.btn-dark:active,
|
||||
.btn-dark.active,
|
||||
.open > .dropdown-toggle.btn-dark {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #333;
|
||||
border-color: #444;
|
||||
}
|
||||
@@ -347,7 +347,7 @@ object {
|
||||
.btn-dark:active.focus,
|
||||
.btn-dark.active.focus,
|
||||
.open > .dropdown-toggle.btn-dark.focus {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #333;
|
||||
}
|
||||
.btn-dark:active,
|
||||
@@ -387,24 +387,24 @@ fieldset[disabled] .btn-dark.active {
|
||||
background-color: #3B3B3B;
|
||||
}
|
||||
.btn-bright {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #cc7b19;
|
||||
box-shadow: inset 0 1px 0 #e7993b;
|
||||
}
|
||||
.btn-bright:focus,
|
||||
.btn-bright.focus {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #eb8600;
|
||||
}
|
||||
.btn-bright:hover {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #e59029;
|
||||
box-shadow: inset 0 1px 0 #ebac60;
|
||||
}
|
||||
.btn-bright:active,
|
||||
.btn-bright.active,
|
||||
.open > .dropdown-toggle.btn-bright {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #cc7b19;
|
||||
box-shadow: inset 0 1px 0 #e7993b;
|
||||
}
|
||||
@@ -417,7 +417,7 @@ fieldset[disabled] .btn-dark.active {
|
||||
.btn-bright:active.focus,
|
||||
.btn-bright.active.focus,
|
||||
.open > .dropdown-toggle.btn-bright.focus {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #cc7b19;
|
||||
box-shadow: inset 0 1px 0 #e7993b;
|
||||
}
|
||||
@@ -448,7 +448,7 @@ fieldset[disabled] .btn-bright.active {
|
||||
border-color: #b56d16;
|
||||
}
|
||||
.btn-bright .badge {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #cc7b19;
|
||||
box-shadow: inset 0 1px 0 #e7993b;
|
||||
}
|
||||
@@ -459,17 +459,17 @@ fieldset[disabled] .btn-bright.active {
|
||||
float: right;
|
||||
}
|
||||
.btn-danger.btn-edit:hover {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #c9302c;
|
||||
border-color: #ac2925;
|
||||
}
|
||||
.btn-danger.btn-edit.active {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #c9302c;
|
||||
border-color: #ac2925;
|
||||
}
|
||||
.btn-danger.btn-edit.active:hover {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #ac2925;
|
||||
border-color: #761c19;
|
||||
}
|
||||
@@ -512,7 +512,7 @@ fieldset[disabled] .btn-bright.active {
|
||||
background-color: #222222;
|
||||
}
|
||||
.modal-body table {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
}
|
||||
.modal-body li {
|
||||
margin-top: 7px;
|
||||
@@ -526,7 +526,7 @@ fieldset[disabled] .btn-bright.active {
|
||||
color: #E5A00D;
|
||||
}
|
||||
.modal-body i.fa {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
}
|
||||
.modal-body td:hover a .fa,
|
||||
.modal-body a:focus i.fa {
|
||||
@@ -560,7 +560,7 @@ input[type="tel"],
|
||||
input[type="color"],
|
||||
.uneditable-input {
|
||||
margin: 5px 0 5px 0;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
border: 0px solid #444;
|
||||
background: #555;
|
||||
height: 32px;
|
||||
@@ -572,7 +572,7 @@ input[type="color"],
|
||||
textarea.form-control {
|
||||
height: initial;
|
||||
margin: 5px 0 5px 0;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
border: 0px solid #444;
|
||||
background: #555;
|
||||
padding: 6px 12px;
|
||||
@@ -584,7 +584,7 @@ textarea.form-control {
|
||||
textarea.form-control:focus {
|
||||
outline: 0;
|
||||
color: #555;
|
||||
background-color: #fff;
|
||||
background-color: #eee;
|
||||
transition: background-color .3s;
|
||||
}
|
||||
.pagination > li > a,
|
||||
@@ -594,7 +594,7 @@ textarea.form-control:focus {
|
||||
padding: 6px 12px;
|
||||
margin-left: -1px;
|
||||
line-height: 1.42857143;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
text-decoration: none;
|
||||
background-color: #262626;
|
||||
border: 1px solid #444444;
|
||||
@@ -613,7 +613,7 @@ textarea.form-control:focus {
|
||||
.pagination > .active > a:focus,
|
||||
.pagination > .active > span:focus {
|
||||
z-index: 2;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
cursor: default;
|
||||
background-color: #cc7b19;
|
||||
border-color: #444444;
|
||||
@@ -632,7 +632,7 @@ textarea.form-control:focus {
|
||||
.nav-pills > li.active > a,
|
||||
.nav-pills > li.active > a:hover,
|
||||
.nav-pills > li.active > a:focus {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #cc7b19;
|
||||
}
|
||||
.nav-pills > li > a {
|
||||
@@ -666,11 +666,11 @@ textarea.form-control:focus {
|
||||
-webkit-appearance:none;
|
||||
}
|
||||
.btn-form:hover {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #333;
|
||||
}
|
||||
.btn-form:focus {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
}
|
||||
.form-control-feedback {
|
||||
color: #E5A00D;
|
||||
@@ -682,7 +682,7 @@ fieldset[disabled] .form-control {
|
||||
background-color: #555;
|
||||
}
|
||||
.form-control[readonly]:focus {
|
||||
background-color: #fff;
|
||||
background-color: #eee;
|
||||
}
|
||||
.poster {
|
||||
position: relative;
|
||||
@@ -1071,7 +1071,7 @@ a:hover .dashboard-activity-cover {
|
||||
font-size: 13px;
|
||||
font-weight: bold;
|
||||
line-height: 25px;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
}
|
||||
.dashboard-activity-metadata-play_state-icon {
|
||||
flex-basis: 25px;
|
||||
@@ -1534,7 +1534,7 @@ a:hover .dashboard-recent-media-cover {
|
||||
}
|
||||
.dashboard-recent-media-metacontainer h3 {
|
||||
padding: 5px 3px 0 3px;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
@@ -1647,12 +1647,12 @@ a:hover .dashboard-recent-media-cover {
|
||||
color: #f9be03;
|
||||
}
|
||||
.summary-content-title h1 a:hover {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
}
|
||||
.summary-content-title h2 {
|
||||
margin-top: 0;
|
||||
margin-bottom: 10px;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
font-size: 28px;
|
||||
line-height: 40px;
|
||||
float: left;
|
||||
@@ -1806,7 +1806,7 @@ a:hover .summary-poster-face-track .summary-poster-face-overlay span {
|
||||
line-height: 24px;
|
||||
}
|
||||
.summary-content-details-tag strong {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
margin-left: 2px;
|
||||
margin-right: 10px;
|
||||
}
|
||||
@@ -1826,7 +1826,7 @@ a:hover .summary-poster-face-track .summary-poster-face-overlay span {
|
||||
}
|
||||
.summary-content-summary {
|
||||
overflow: hidden;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
float: left;
|
||||
position: relative;
|
||||
clear: both;
|
||||
@@ -1860,7 +1860,7 @@ a:hover .summary-poster-face-track .summary-poster-face-overlay span {
|
||||
display: block;
|
||||
font-size: 12px;
|
||||
line-height: 18px;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
}
|
||||
.summary-content-genres {
|
||||
margin-top: 13px;
|
||||
@@ -1879,7 +1879,7 @@ a:hover .summary-poster-face-track .summary-poster-face-overlay span {
|
||||
display: block;
|
||||
font-size: 12px;
|
||||
line-height: 18px;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
}
|
||||
.summary-content-writers {
|
||||
margin-top: 13px;
|
||||
@@ -1898,7 +1898,7 @@ a:hover .summary-poster-face-track .summary-poster-face-overlay span {
|
||||
display: block;
|
||||
font-size: 12px;
|
||||
line-height: 18px;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
}
|
||||
.star-rating {
|
||||
display: inline-block;
|
||||
@@ -1951,7 +1951,7 @@ a:hover .summary-poster-face-track .summary-poster-face-overlay span {
|
||||
position: relative;
|
||||
margin: 0;
|
||||
line-height: 22px;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
font-size: 16px;
|
||||
text-align: center;
|
||||
text-transform: uppercase;
|
||||
@@ -2047,7 +2047,7 @@ a:hover .item-children-poster {
|
||||
.item-children-instance-text-wrapper h3 {
|
||||
width: 100%;
|
||||
padding: 5px 3px 0 3px;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
@@ -2148,7 +2148,7 @@ span.settings-warning {
|
||||
padding-left: 10px;
|
||||
}
|
||||
#menu_link_show_advanced_settings.active {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #cc7b19;
|
||||
}
|
||||
.advanced-setting {
|
||||
@@ -2161,7 +2161,7 @@ div.advanced-setting {
|
||||
li.advanced-setting {
|
||||
border-left: 1px solid #cc7b19;
|
||||
}
|
||||
.docker-setting {
|
||||
.setting-message {
|
||||
color: #cc7b19;
|
||||
margin-left: 10px;
|
||||
}
|
||||
@@ -2183,7 +2183,7 @@ li.advanced-setting {
|
||||
}
|
||||
.user-info-username {
|
||||
font-size: 24px;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
padding-top: 27px;
|
||||
padding-left: 105px;
|
||||
}
|
||||
@@ -2249,7 +2249,7 @@ li.advanced-setting {
|
||||
left: 0px;
|
||||
}
|
||||
.user-overview-stats-instance h3 strong{
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
}
|
||||
.user-overview-stats-instance h3 {
|
||||
font-size: 30px;
|
||||
@@ -2262,7 +2262,7 @@ li.advanced-setting {
|
||||
float: left;
|
||||
}
|
||||
.user-overview-stats-instance h4 {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
margin-bottom: 25px;
|
||||
}
|
||||
.user-overview-stats-instance h1 {
|
||||
@@ -2302,7 +2302,7 @@ li.advanced-setting {
|
||||
.user-player-instance-name {
|
||||
float: left;
|
||||
padding-top: 14px;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
@@ -2312,6 +2312,7 @@ li.advanced-setting {
|
||||
width: 140px;
|
||||
margin-left: 10px;
|
||||
margin-bottom: 10px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.user-player-instance-playcount h3 {
|
||||
font-size: 30px;
|
||||
@@ -2440,7 +2441,7 @@ a .library-user-instance-box:hover {
|
||||
overflow: hidden;
|
||||
}
|
||||
.home-platforms-instance-name {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
@@ -2627,7 +2628,7 @@ a .library-user-instance-box:hover {
|
||||
}
|
||||
.home-platforms-instance-list-name {
|
||||
float: left;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
@@ -3042,7 +3043,7 @@ a .home-platforms-list-cover-face:hover
|
||||
}
|
||||
.submenu a:hover {
|
||||
background: #f9be03;
|
||||
color: #FFF;
|
||||
color: #eee;
|
||||
}
|
||||
.ajaxMsg {
|
||||
background-color: rgba(255,255,255,0.075);
|
||||
@@ -3101,21 +3102,21 @@ div.dataTables_info {
|
||||
white-space: normal !important;
|
||||
}
|
||||
.tooltip.top .tooltip-arrow {
|
||||
border-top-color: #fff;
|
||||
border-top-color: #eee;
|
||||
}
|
||||
.tooltip.right .tooltip-arrow {
|
||||
border-right-color: #fff;
|
||||
border-right-color: #eee;
|
||||
}
|
||||
.tooltip.bottom .tooltip-arrow {
|
||||
border-bottom-color: #fff;
|
||||
border-bottom-color: #eee;
|
||||
}
|
||||
.tooltip.left .tooltip-arrow {
|
||||
border-left-color: #fff;
|
||||
border-left-color: #eee;
|
||||
}
|
||||
.tooltip-inner {
|
||||
max-width: 250px;
|
||||
color: #000;
|
||||
background: #fff;
|
||||
background: #eee;
|
||||
border: 0;
|
||||
font-weight: bold;
|
||||
border-radius: 2px;
|
||||
@@ -3207,7 +3208,7 @@ div.dataTables_info {
|
||||
}
|
||||
.edit-user-toggles > input[type='checkbox']:checked + label,
|
||||
.edit-library-toggles > input[type='checkbox']:checked + label {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
cursor: pointer;
|
||||
}
|
||||
.edit-user-name > input[type='text'] {
|
||||
@@ -3511,13 +3512,13 @@ pre::-webkit-scrollbar-thumb {
|
||||
width: 225px;
|
||||
}
|
||||
.config-scheduler-table th {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
}
|
||||
a.no-highlight {
|
||||
color: #777;
|
||||
}
|
||||
a.no-highlight:hover {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
}
|
||||
.top-line {
|
||||
border-top: 1px dotted #777;
|
||||
@@ -3525,7 +3526,7 @@ a.no-highlight:hover {
|
||||
}
|
||||
.help-bold {
|
||||
font-weight: bold;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
}
|
||||
.save-button {
|
||||
margin-top: 15px;
|
||||
@@ -3670,7 +3671,7 @@ a.no-highlight:hover {
|
||||
margin: 0 2px;
|
||||
padding: 2px 5px;
|
||||
font-size: 13px;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background-color: #555;
|
||||
border: 0px solid #444;
|
||||
border-radius: 3px;
|
||||
@@ -3688,7 +3689,7 @@ a.no-highlight:hover {
|
||||
-webkit-transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
-moz-transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
-o-transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
text-shadow: -1px -1px 0 #fff, 1px -1px 0 #fff, -1px 1px 0 #fff, 1px 1px 0 #fff;
|
||||
text-shadow: -1px -1px 0 #eee, 1px -1px 0 #eee, -1px 1px 0 #eee, 1px 1px 0 #eee;
|
||||
}
|
||||
.overlay-refresh-image.left {
|
||||
left: 10px;
|
||||
@@ -3702,7 +3703,7 @@ a.no-highlight:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
.overlay-refresh-image.info-art:hover {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
text-shadow: none;
|
||||
}
|
||||
a:hover .overlay-refresh-image {
|
||||
@@ -3757,7 +3758,7 @@ a:hover .overlay-refresh-image:hover {
|
||||
#newsletter-config-modal .nav-tabs > li.active > a,
|
||||
#newsletter-config-modal .nav-tabs > li.active > a:hover,
|
||||
#newsletter-config-modal .nav-tabs > li.active > a:focus {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
background: #222;
|
||||
}
|
||||
#notifier-config-modal .nav-tabs > li.active > a,
|
||||
@@ -3873,6 +3874,10 @@ a:hover .overlay-refresh-image:hover {
|
||||
background-color: #31afe1;
|
||||
background-image: url(../images/platforms/kodi.svg);
|
||||
}
|
||||
.platform-lg {
|
||||
background-color: #a50034;
|
||||
background-image: url(../images/platforms/lg.svg);
|
||||
}
|
||||
.platform-linux {
|
||||
background-color: #1793d0;
|
||||
background-image: url(../images/platforms/linux.svg);
|
||||
@@ -3974,6 +3979,9 @@ a:hover .overlay-refresh-image:hover {
|
||||
.platform-kodi-rgba {
|
||||
background-color: rgba(49, 175, 225, 0.40);
|
||||
}
|
||||
.platform-lg-rgba {
|
||||
background-color: rgba(165, 0, 52, 0.40);
|
||||
}
|
||||
.platform-linux-rgba {
|
||||
background-color: rgba(23, 147, 208, 0.40);
|
||||
}
|
||||
@@ -4061,6 +4069,11 @@ a:hover .overlay-refresh-image:hover {
|
||||
width: 62px !important;
|
||||
background-image: url(../images/rating/imdb.svg);
|
||||
}
|
||||
.rating-themoviedb {
|
||||
width: 72px !important;
|
||||
background-image: url(../images/rating/themoviedb.svg);
|
||||
background-size: auto 16px !important;
|
||||
}
|
||||
.rating-rottentomatos-ripe {
|
||||
background-image: url(../images/rating/tomato-ripe.svg);
|
||||
}
|
||||
@@ -4104,7 +4117,7 @@ a:hover .overlay-refresh-image:hover {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
#info-modal .stream-info-item .sub-value {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
font-weight: bold;
|
||||
margin-left: 10px;
|
||||
text-align: left;
|
||||
@@ -4127,7 +4140,7 @@ a:hover .overlay-refresh-image:hover {
|
||||
.stream-info th:first-child {
|
||||
width: 125px;
|
||||
height: 30px;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
font-size: 12px;
|
||||
text-align: right;
|
||||
text-transform: uppercase;
|
||||
@@ -4250,7 +4263,7 @@ a[data-tab-destination] {
|
||||
transform: translate(-50%, -50%);
|
||||
}
|
||||
.iframe-button {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
border-radius: 20px;
|
||||
text-align: center;
|
||||
cursor: pointer;
|
||||
@@ -4267,7 +4280,7 @@ a[data-tab-destination] {
|
||||
}
|
||||
.iframe-button:hover,
|
||||
.iframe-button:focus {
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
box-shadow: rgba(0, 0, 0, 0.1) 0px 0px 0px 99999px inset, rgba(0, 0, 0, 0.2) 0px 1px 5px 0px, rgba(0, 0, 0, 0.14) 0px 2px 2px 0px, rgba(0, 0, 0, 0.12) 0px 3px 1px -2px;
|
||||
}
|
||||
.iframe-button:active {
|
||||
@@ -4332,7 +4345,7 @@ a[data-tab-destination] {
|
||||
display: inline !important;
|
||||
background: none !important;
|
||||
padding: 0 !important;
|
||||
color: #fff;
|
||||
color: #eee;
|
||||
}
|
||||
.news-body a:hover {
|
||||
color: #f9be03;
|
||||
|
@@ -118,9 +118,7 @@ DOCUMENTATION :: END
|
||||
<div id="poster-${sk}" class="dashboard-activity-cover" style="background-image: url(${page('pms_image_proxy', data['parent_thumb'], data['parent_rating_key'], 300, 300, fallback='cover', refresh=True)});"></div>
|
||||
</a>
|
||||
% elif data['media_type'] in ('photo', 'clip'):
|
||||
% if data['extra_type']:
|
||||
<div id="poster-${sk}" class="dashboard-activity-poster" style="background-image: url(${page('pms_image_proxy', data['art'].replace('/art', '/thumb') or data['thumb'], data['rating_key'], 300, 450, fallback='poster', refresh=True)});"></div>
|
||||
% elif data['parent_thumb']:
|
||||
% if data['parent_thumb']:
|
||||
<div id="poster-${sk}" class="dashboard-activity-poster" style="background-image: url(${page('pms_image_proxy', data['parent_thumb'], data['parent_rating_key'], 300, 450, fallback='poster', refresh=True)});"></div>
|
||||
% else:
|
||||
<div id="poster-${sk}" class="dashboard-activity-poster" style="background-image: url(${page('pms_image_proxy', data['thumb'], data['rating_key'], 300, 450, fallback='poster', refresh=True)});"></div>
|
||||
|
7
data/interfaces/default/images/platforms/lg.svg
Normal file
7
data/interfaces/default/images/platforms/lg.svg
Normal file
@@ -0,0 +1,7 @@
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" width="64" height="64" viewBox="0 0 64 64">
|
||||
<title>lg</title>
|
||||
<path fill="#fff" d="M30.203 31.797c0 8.176-6.654 14.832-14.835 14.82-7.927-0.011-14.818-6.28-14.812-14.838 0.005-8.282 6.541-14.82 14.841-14.803 8.618 0.017 14.807 6.969 14.806 14.822zM26.577 32.388c-0.087 4.433-3.485 9.518-9.37 10.487-6.122 1.008-11.584-2.989-12.814-8.656-0.632-2.912-0.221-5.696 1.362-8.228 2.347-3.754 5.815-5.502 10.222-5.453 0-0.387 0-0.761 0-1.134-4.114-0.281-9.226 1.824-11.763 6.923-2.454 4.932-1.296 10.953 2.811 14.672 4.153 3.762 10.224 4.309 14.953 1.326 2.328-1.468 3.999-3.496 4.997-6.067 0.628-1.617 0.882-3.296 0.813-5.032-2.967 0-5.909 0-8.864 0 0 0.39 0 0.768 0 1.162 2.558-0 5.097-0 7.652-0zM15.991 37.112c0-0.129 0-0.221 0-0.313 0-3.731 0-7.463 0-11.194 0-0.060-0.004-0.119 0-0.179 0.009-0.118-0.038-0.166-0.16-0.163-0.278 0.006-0.556 0.012-0.833-0.002-0.178-0.008-0.237 0.042-0.237 0.23 0.005 4.194 0.005 8.389-0 12.583-0 0.198 0.065 0.239 0.249 0.237 1.224-0.007 2.448-0.004 3.672-0.004 0.072 0 0.143 0 0.244 0 0-0.343-0.008-0.665 0.003-0.987 0.006-0.166-0.050-0.214-0.214-0.212-0.82 0.007-1.641 0.003-2.461 0.003-0.078 0-0.155 0-0.263 0zM12.434 27.068c0.003-0.987-0.799-1.798-1.785-1.805s-1.799 0.796-1.805 1.782c-0.006 0.985 0.799 1.8 1.783 1.805 0.985 0.004 1.804-0.803 1.807-1.783z"></path>
|
||||
<path fill="#fff" d="M63.467 30.606c0 2.864 0 5.707 0 8.571-1.242 0-2.479 0-3.742 0 0-0.468 0-0.933 0-1.433-0.203 0.226-0.366 0.432-0.553 0.612-0.683 0.656-1.518 1-2.441 1.136-1.187 0.174-2.348 0.075-3.462-0.4-1.234-0.526-2.145-1.407-2.8-2.565-0.599-1.058-0.906-2.207-1.035-3.409-0.148-1.367-0.103-2.723 0.28-4.051 0.797-2.764 2.635-4.391 5.453-4.899 1.534-0.277 3.058-0.208 4.54 0.311 1.243 0.436 2.298 1.139 3.011 2.276 0.431 0.688 0.584 1.467 0.687 2.258 0.013 0.097 0.028 0.195 0.046 0.318-0.064 0.003-0.126 0.010-0.188 0.010-1.389 0.001-2.779-0.002-4.169 0.003-0.151 0.001-0.215-0.034-0.245-0.197-0.229-1.234-1.281-1.773-2.308-1.679-1.182 0.108-1.823 0.859-2.22 1.888-0.211 0.547-0.315 1.12-0.352 1.703-0.066 1.061-0.039 2.117 0.31 3.138 0.211 0.618 0.523 1.173 1.050 1.579 1.371 1.055 3.326 0.436 3.877-1.228 0.090-0.274 0.157-0.557 0.246-0.875-0.112 0-0.182 0-0.251 0-0.794 0-1.588-0.005-2.382 0.004-0.168 0.002-0.213-0.053-0.212-0.214 0.006-0.887 0.005-1.774 0.001-2.66-0.001-0.139 0.019-0.215 0.192-0.215 2.17 0.006 4.341 0.004 6.511 0.004 0.045 0 0.090 0.008 0.157 0.013z"></path>
|
||||
<path fill="#fff" d="M48.501 35.522c0 1.233 0 2.44 0 3.661-3.613 0-7.216 0-10.834 0 0-4.923 0-9.841 0-14.77 1.44 0 2.872 0 4.331 0 0 0.092 0 0.175 0 0.259 0 3.526 0 7.053 0 10.579 0 0.271 0 0.271 0.267 0.271 1.985 0 3.97 0 5.954 0 0.086 0 0.171 0 0.281 0z"></path>
|
||||
</svg>
|
After Width: | Height: | Size: 2.7 KiB |
1
data/interfaces/default/images/rating/themoviedb.svg
Normal file
1
data/interfaces/default/images/rating/themoviedb.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 190.24 81.52"><defs><linearGradient id="a" y1="40.76" x2="190.24" y2="40.76" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#90cea1"/><stop offset=".56" stop-color="#3cbec9"/><stop offset="1" stop-color="#00b3e5"/></linearGradient></defs><g data-name="Layer 2"><path d="M105.67 36.06h66.9a17.67 17.67 0 0017.67-17.66A17.67 17.67 0 00172.57.73h-66.9A17.67 17.67 0 0088 18.4a17.67 17.67 0 0017.67 17.66zm-88 45h76.9a17.67 17.67 0 0017.67-17.66 17.67 17.67 0 00-17.67-17.67h-76.9A17.67 17.67 0 000 63.4a17.67 17.67 0 0017.67 17.66zm-7.26-45.64h7.8V6.92h10.1V0h-28v6.9h10.1zm28.1 0h7.8V8.25h.1l9 27.15h6l9.3-27.15h.1V35.4h7.8V0H66.76l-8.2 23.1h-.1L50.31 0h-11.8zm113.92 20.25a15.07 15.07 0 00-4.52-5.52 18.57 18.57 0 00-6.68-3.08 33.54 33.54 0 00-8.07-1h-11.7v35.4h12.75a24.58 24.58 0 007.55-1.15 19.34 19.34 0 006.35-3.32 16.27 16.27 0 004.37-5.5 16.91 16.91 0 001.63-7.58 18.5 18.5 0 00-1.68-8.25zM145 68.6a8.8 8.8 0 01-2.64 3.4 10.7 10.7 0 01-4 1.82 21.57 21.57 0 01-5 .55h-4.05v-21h4.6a17 17 0 014.67.63 11.66 11.66 0 013.88 1.87A9.14 9.14 0 01145 59a9.87 9.87 0 011 4.52 11.89 11.89 0 01-1 5.08zm44.63-.13a8 8 0 00-1.58-2.62 8.38 8.38 0 00-2.42-1.85 10.31 10.31 0 00-3.17-1v-.1a9.22 9.22 0 004.42-2.82 7.43 7.43 0 001.68-5 8.42 8.42 0 00-1.15-4.65 8.09 8.09 0 00-3-2.72 12.56 12.56 0 00-4.18-1.3 32.84 32.84 0 00-4.62-.33h-13.2v35.4h14.5a22.41 22.41 0 004.72-.5 13.53 13.53 0 004.28-1.65 9.42 9.42 0 003.1-3 8.52 8.52 0 001.2-4.68 9.39 9.39 0 00-.55-3.18zm-19.42-15.75h5.3a10 10 0 011.85.18 6.18 6.18 0 011.7.57 3.39 3.39 0 011.22 1.13 3.22 3.22 0 01.48 1.82 3.63 3.63 0 01-.43 1.8 3.4 3.4 0 01-1.12 1.2 4.92 4.92 0 01-1.58.65 7.51 7.51 0 01-1.77.2h-5.65zm11.72 20a3.9 3.9 0 01-1.22 1.3 4.64 4.64 0 01-1.68.7 8.18 8.18 0 01-1.82.2h-7v-8h5.9a15.35 15.35 0 012 .15 8.47 8.47 0 012.05.55 4 4 0 011.57 1.18 3.11 3.11 0 01.63 2 3.71 3.71 0 01-.43 1.92z" fill="url(#a)" data-name="Layer 1"/></g></svg>
|
After Width: | Height: | Size: 1.9 KiB |
@@ -179,10 +179,10 @@
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title">Terminate Session</h4>
|
||||
<h4 class="modal-title">Terminate Stream</h4>
|
||||
</div>
|
||||
<div class="modal-body" style="text-align: center;">
|
||||
<p>Are you sure you want to terminate this session?</p>
|
||||
<p>Are you sure you want to terminate this stream?</p>
|
||||
<p>
|
||||
<strong>
|
||||
<span id="terminate-user"></span><br />
|
||||
|
@@ -275,6 +275,11 @@ DOCUMENTATION :: END
|
||||
<span class="rating-image rating-imdb"><strong>${data['rating']}</strong></span>
|
||||
</div>
|
||||
% endif
|
||||
% if data['rating_image'].startswith('themoviedb://'):
|
||||
<div class="critic-rating hidden-xs hidden-sm" title="${data['rating']}">
|
||||
<span class="rating-image rating-themoviedb"><strong>${get_percent(data['rating'], 10)}%</strong></span>
|
||||
</div>
|
||||
% endif
|
||||
% if data['audience_rating_image'].startswith('rottentomatoes://'):
|
||||
<div class="critic-rating hidden-xs hidden-sm" title="${data['audience_rating']}">
|
||||
<span class="rating-image rating-rottentomatos-${data['audience_rating_image'].rsplit('.')[-1]}"><strong>${get_percent(data['audience_rating'], 10)}%</strong></span>
|
||||
|
@@ -36,7 +36,3 @@ function check_notifications() {
|
||||
check_notifications();
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
$(document).ready(function () {
|
||||
check_notifications();
|
||||
});
|
@@ -141,7 +141,7 @@ history_table_options = {
|
||||
if (rowData['transcode_decision'] === 'transcode') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>';
|
||||
} else if (rowData['transcode_decision'] === 'copy') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>';
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-stream fa-fw"></i></span>';
|
||||
} else if (rowData['transcode_decision'] === 'direct play') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>';
|
||||
}
|
||||
@@ -184,7 +184,9 @@ history_table_options = {
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="' + page('pms_image_proxy', rowData['thumb'], rowData['rating_key'], 300, 300, null, null, null, 'cover') + '" data-height="80" data-width="80">' + cellData + parent_info + '</span>';
|
||||
$(td).html('<div class="history-title"><a href="' + page('info', rowData['rating_key'], rowData['guid'], history, rowData['live']) + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'clip') {
|
||||
$(td).html(cellData);
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Clip"><i class="fa fa-video-camera fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="' + page('pms_image_proxy', rowData['thumb'], rowData['rating_key'], 300, 450, null, null, null, fallback) + '" data-height="120" data-width="80">' + cellData + parent_info + '</span>';
|
||||
$(td).html('<div class="history-title"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></div>');
|
||||
} else {
|
||||
$(td).html('<a href="' + page('info', rowData['rating_key']) + '">' + cellData + '</a>');
|
||||
}
|
||||
|
@@ -83,7 +83,7 @@ history_table_modal_options = {
|
||||
if (rowData['transcode_decision'] === 'transcode') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>';
|
||||
} else if (rowData['transcode_decision'] === 'copy') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>';
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-stream fa-fw"></i></span>';
|
||||
} else if (rowData['transcode_decision'] === 'direct play') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>';
|
||||
}
|
||||
|
@@ -1,3 +1,25 @@
|
||||
var date_format = 'YYYY-MM-DD';
|
||||
var time_format = 'hh:mm a';
|
||||
|
||||
$.ajax({
|
||||
url: 'get_date_formats',
|
||||
type: 'GET',
|
||||
success: function(data) {
|
||||
date_format = data.date_format;
|
||||
time_format = data.time_format;
|
||||
}
|
||||
});
|
||||
|
||||
var seenRender = function (data, type, full) {
|
||||
return moment(data, "X").fromNow();
|
||||
};
|
||||
|
||||
var seenCreatedCell = function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== null) {
|
||||
$(td).attr('title', moment(cellData, "X").format(date_format + ' ' + time_format));
|
||||
}
|
||||
};
|
||||
|
||||
user_ip_table_options = {
|
||||
"destroy": true,
|
||||
"language": {
|
||||
@@ -21,16 +43,24 @@ user_ip_table_options = {
|
||||
"columnDefs": [
|
||||
{
|
||||
"targets": [0],
|
||||
"data":"last_seen",
|
||||
"render": function ( data, type, full ) {
|
||||
return moment(data, "X").fromNow();
|
||||
},
|
||||
"data": "last_seen",
|
||||
"render": seenRender,
|
||||
"createdCell": seenCreatedCell,
|
||||
"searchable": false,
|
||||
"width": "15%",
|
||||
"width": "12%",
|
||||
"className": "no-wrap"
|
||||
},
|
||||
{
|
||||
"targets": [1],
|
||||
"data": "first_seen",
|
||||
"render": seenRender,
|
||||
"createdCell": seenCreatedCell,
|
||||
"searchable": false,
|
||||
"width": "12%",
|
||||
"className": "no-wrap"
|
||||
},
|
||||
{
|
||||
"targets": [2],
|
||||
"data": "ip_address",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData) {
|
||||
@@ -44,22 +74,22 @@ user_ip_table_options = {
|
||||
$(td).html('n/a');
|
||||
}
|
||||
},
|
||||
"width": "15%",
|
||||
"width": "12%",
|
||||
"className": "no-wrap modal-control-ip"
|
||||
},
|
||||
{
|
||||
"targets": [2],
|
||||
"targets": [3],
|
||||
"data": "platform",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
$(td).html(cellData);
|
||||
}
|
||||
},
|
||||
"width": "15%",
|
||||
"width": "12%",
|
||||
"className": "no-wrap"
|
||||
},
|
||||
{
|
||||
"targets": [3],
|
||||
"targets": [4],
|
||||
"data": "player",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
@@ -67,18 +97,18 @@ user_ip_table_options = {
|
||||
if (rowData['transcode_decision'] === 'transcode') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>';
|
||||
} else if (rowData['transcode_decision'] === 'copy') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>';
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-stream fa-fw"></i></span>';
|
||||
} else if (rowData['transcode_decision'] === 'direct play') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>';
|
||||
}
|
||||
$(td).html('<div><a href="#" data-target="#info-modal" data-toggle="modal"><div style="float: left;">' + transcode_dec + ' ' + cellData + '</div></a></div>');
|
||||
}
|
||||
},
|
||||
"width": "15%",
|
||||
"width": "12%",
|
||||
"className": "no-wrap modal-control"
|
||||
},
|
||||
{
|
||||
"targets": [4],
|
||||
"targets": [5],
|
||||
"data": "last_played",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
@@ -119,7 +149,7 @@ user_ip_table_options = {
|
||||
"className": "datatable-wrap"
|
||||
},
|
||||
{
|
||||
"targets": [5],
|
||||
"targets": [6],
|
||||
"data": "play_count",
|
||||
"searchable": false,
|
||||
"width": "10%",
|
||||
|
@@ -142,7 +142,7 @@ users_list_table_options = {
|
||||
if (rowData['transcode_decision'] === 'transcode') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>';
|
||||
} else if (rowData['transcode_decision'] === 'copy') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>';
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-stream fa-fw"></i></span>';
|
||||
} else if (rowData['transcode_decision'] === 'direct play') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>';
|
||||
}
|
||||
|
@@ -38,7 +38,7 @@
|
||||
<th align="left" id="count">Total Movies / TV Shows / Artists</th>
|
||||
<th align="left" id="parent_count">Total Seasons / Albums</th>
|
||||
<th align="left" id="child_count">Total Episodes / Tracks</th>
|
||||
<th align="left" id="last_accessed">Last Accessed</th>
|
||||
<th align="left" id="last_accessed">Last Streamed</th>
|
||||
<th align="left" id="last_played">Last Played</th>
|
||||
<th align="left" id="total_plays">Total Plays</th>
|
||||
<th align="left" id="total_duration">Total Played Duration</th>
|
||||
|
@@ -24,7 +24,7 @@
|
||||
|
||||
<!-- ICONS -->
|
||||
<!-- Android -->
|
||||
<link rel="manifest" href="${http_root}images/favicon/manifest.json?v=2.0.5" crossorigin="use-credentials>
|
||||
<link rel="manifest" href="${http_root}images/favicon/manifest.json?v=2.0.5" crossorigin="use-credentials">
|
||||
<meta name="theme-color" content="#282a2d">
|
||||
<!-- Apple -->
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="${http_root}images/favicon/apple-touch-icon.png?v=2.0.5">
|
||||
|
@@ -33,7 +33,7 @@
|
||||
<label for="friendly_name">OneSignal Device ID</label>
|
||||
<div class="row">
|
||||
<div class="col-md-8">
|
||||
<input type="text" class="form-control" id="device_id" value="${device['device_id']}" size="30" readonly>
|
||||
<input type="text" class="form-control" id="onesignal_id" value="${device['onesignal_id'] or ''}" size="30" readonly>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Your OneSignal device ID for notifications.</p>
|
||||
|
@@ -13,7 +13,11 @@ DOCUMENTATION :: END
|
||||
% for device in sorted(devices_list, key=lambda k: k['device_name']):
|
||||
<li class="mobile-device pointer" data-id="${device['id']}" data-name="${device['device_name']}">
|
||||
<span>
|
||||
<span class="toggle-left"><i class="fa fa-lg fa-fw fa-mobile"></i></span>
|
||||
% if device['official']:
|
||||
<span class="toggle-left"><i class="fa fa-lg fa-fw fa-mobile"></i></span>
|
||||
% else:
|
||||
<span class="toggle-left officail-tooltip" data-toggle="tooltip" data-placement="top" title="Unofficial or Unknown App"><i class="fa fa-lg fa-fw fa-exclamation-triangle"></i></span>
|
||||
% endif
|
||||
${device['friendly_name'] or device['device_name']} <span class="friendly_name">(${device['id']})</span>
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-fw fa-cog"></i></span>
|
||||
<span class="toggle-right friendly_name" id="device-last_seen-${device['id']}">
|
||||
@@ -117,6 +121,7 @@ DOCUMENTATION :: END
|
||||
});
|
||||
|
||||
$('#api_qr_address').change(function () {
|
||||
this.value = $.trim(this.value);
|
||||
var url = $(this).val();
|
||||
checkQRAddress(url);
|
||||
|
||||
@@ -138,4 +143,6 @@ DOCUMENTATION :: END
|
||||
}
|
||||
verifiedDevice = true;
|
||||
})
|
||||
|
||||
$('.officail-tooltip').tooltip();
|
||||
</script>
|
@@ -49,7 +49,16 @@
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
% if notifier['agent_name'] == 'scripts' and item['name'] == 'scripts_script_folder':
|
||||
<div class="input-group">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" ${'readonly' if item.get('readonly') else ''}>
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="${item['name']}_browse" data-toggle="browse" data-filter=".folderonly" data-target="#${item['name']}">Browse</button>
|
||||
</span>
|
||||
</div>
|
||||
% else:
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" ${'readonly' if item.get('readonly') else ''}>
|
||||
% endif
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
@@ -853,10 +862,7 @@
|
||||
PNotify.prototype.options.hide = true;
|
||||
PNotify.prototype.options.delay = $('#browser_auto_hide_delay').val() * 1000;
|
||||
}
|
||||
var notification = new PNotify({
|
||||
title: $('#test_subject').val(),
|
||||
text: $('#test_body').val()
|
||||
});
|
||||
displayPNotify($('#test_subject').val(), $('#test_body').val());
|
||||
showMsg('<i class="fa fa-check"></i> Notification sent.', false, true, 5000);
|
||||
}
|
||||
}
|
||||
|
@@ -32,7 +32,7 @@ DOCUMENTATION :: END
|
||||
|
||||
% if data != None:
|
||||
<%
|
||||
from plexpy.helpers import page
|
||||
from plexpy.helpers import cast_to_int, page
|
||||
%>
|
||||
% if data:
|
||||
<div class="dashboard-recent-media-row">
|
||||
@@ -87,7 +87,7 @@ DOCUMENTATION :: END
|
||||
<a href="${page('info', item['rating_key'])}" title="${item['title']}">${item['title']}</a>
|
||||
</h3>
|
||||
<h3 class="text-muted">
|
||||
${item['child_count']} Seasons
|
||||
${item['child_count']} Season${'s' if cast_to_int(item['child_count']) > 1 else ''}
|
||||
</h3>
|
||||
<h3 class="text-muted"> </h3>
|
||||
</div>
|
||||
@@ -151,7 +151,7 @@ DOCUMENTATION :: END
|
||||
<a href="${page('info', item['rating_key'])}" title="Episode ${item['media_index']}">E${item['media_index']}</a>
|
||||
</h3>
|
||||
</div>
|
||||
% elif item['media_type'] == 'album':
|
||||
% elif item['media_type'] == 'album':
|
||||
<a href="${page('info', item['rating_key'])}" title="${item['parent_title']}">
|
||||
<div class="dashboard-recent-media-cover">
|
||||
<div class="dashboard-recent-media-cover-face" style="background-image: url(${page('pms_image_proxy', item['thumb'], item['rating_key'], 300, 300, fallback='cover')});">
|
||||
@@ -177,7 +177,7 @@ DOCUMENTATION :: END
|
||||
</h3>
|
||||
<h3 class="text-muted"> </h3>
|
||||
</div>
|
||||
% endif
|
||||
% endif
|
||||
</li>
|
||||
</div>
|
||||
% endfor
|
||||
|
@@ -28,7 +28,7 @@ DOCUMENTATION :: END
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
% for job in common.SCHEDULER_LIST:
|
||||
% for job, job_type in common.SCHEDULER_LIST.items():
|
||||
% if job in scheduled_jobs:
|
||||
<%
|
||||
sched_job = plexpy.SCHED.get_job(job)
|
||||
@@ -41,12 +41,12 @@ DOCUMENTATION :: END
|
||||
<td>${helpers.format_timedelta_Hms(sched_job.next_run_time - now)}</td>
|
||||
<td>${sched_job.next_run_time.astimezone(plexpy.SYS_TIMEZONE).strftime('%Y-%m-%d %H:%M:%S')}</td>
|
||||
</tr>
|
||||
% elif job in ('Check for server response', 'Check for active sessions', 'Check for recently added items') and plexpy.WS_CONNECTED:
|
||||
% elif job_type == 'websocket' and plexpy.WS_CONNECTED:
|
||||
<tr>
|
||||
% if job == 'Check for active sessions':
|
||||
<td><a class="queue-modal-link" href="#" data-queue="active sessions">${job}</a></td>
|
||||
<td><a class="queue-modal-link no-highlight" href="#" data-queue="active sessions">${job}</a></td>
|
||||
% elif job == 'Check for recently added items':
|
||||
<td><a class="queue-modal-link" href="#" data-queue="recently added">${job}</a></td>
|
||||
<td><a class="queue-modal-link no-highlight" href="#" data-queue="recently added">${job}</a></td>
|
||||
% else:
|
||||
<td>${job}</td>
|
||||
% endif
|
||||
|
@@ -8,7 +8,7 @@
|
||||
from plexpy.helpers import anon_url, checked
|
||||
|
||||
docker_setting = 'disabled' if plexpy.DOCKER else ''
|
||||
docker_msg = '<span class="docker-setting small">(Controlled by Docker Container)</span>' if plexpy.DOCKER else ''
|
||||
docker_msg = '<span class="setting-message small">(Controlled by Docker Container)</span>' if plexpy.DOCKER else ''
|
||||
|
||||
available_notification_agents = sorted(notifiers.available_notification_agents(), key=lambda k: k['label'].lower())
|
||||
available_newsletter_agents = sorted(newsletters.available_newsletter_agents(), key=lambda k: k['label'].lower())
|
||||
@@ -458,11 +458,20 @@
|
||||
|
||||
<p class="help-block">Note: Web interface changes require a restart.</p>
|
||||
% if common.PLATFORM in ('Windows', 'Darwin'):
|
||||
<%
|
||||
tray = {'Windows': 'System Tray', 'Darwin': 'Menu Bar'}
|
||||
tray_disabled = tray_disabled_msg = ''
|
||||
if common.PLATFORM == 'Darwin':
|
||||
from plexpy.macos import HAS_PYOBJC
|
||||
if not HAS_PYOBJC:
|
||||
tray_disabled = 'disabled'
|
||||
tray_disabled_msg = '<span class="setting-message small">(Missing pyobjc module)</span>'
|
||||
%>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" class="http-settings" name="sys_tray_icon" id="sys_tray_icon" value="1" ${config['sys_tray_icon']}> Enable System Tray Icon
|
||||
<input type="checkbox" class="http-settings" name="sys_tray_icon" id="sys_tray_icon" value="1" ${config['sys_tray_icon']} ${tray_disabled}> Enable ${tray[common.PLATFORM]} Icon ${tray_disabled_msg | n}
|
||||
</label>
|
||||
<p class="help-block">Show Tautulli shortcut in the system tray.</p>
|
||||
<p class="help-block">Show Tautulli shortcut in the ${tray[common.PLATFORM].lower()}.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
@@ -559,29 +568,44 @@
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="https_cert">HTTPS Certificate</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control http-settings" id="https_cert" name="https_cert" value="${config['https_cert']}">
|
||||
<div class="col-md-7">
|
||||
<div class="input-group">
|
||||
<input type="text" class="form-control http-settings" id="https_cert" name="https_cert" value="${config['https_cert']}">
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="https_cert_browse" data-toggle="browse" data-filter=".pem" data-target="#https_cert">Browse</button>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">The location of the SSL certificate.</p>
|
||||
<p class="help-block">The location of the SSL certificate in PEM format.</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="https_cert_chain">HTTPS Certificate Chain</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control http-settings" id="https_cert_chain" name="https_cert_chain" value="${config['https_cert_chain']}">
|
||||
<div class="col-md-7">
|
||||
<div class="input-group">
|
||||
<input type="text" class="form-control http-settings" id="https_cert_chain" name="https_cert_chain" value="${config['https_cert_chain']}">
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="https_cert_chain_browse" data-toggle="browse" data-filter=".pem" data-target="#https_cert_chain">Browse</button>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">The location of the SSL certificate chain.</p>
|
||||
<p class="help-block">The location of the SSL certificate chain in PEM format.</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="https_key">HTTPS Key</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control http-settings" id="https_key" name="https_key" value="${config['https_key']}">
|
||||
<div class="col-md-7">
|
||||
<div class="input-group">
|
||||
<input type="text" class="form-control http-settings" id="https_key" name="https_key" value="${config['https_key']}">
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="https_key_browse" data-toggle="browse" data-filter=".pem" data-target="#https_key">Browse</button>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">The location of the SSL key.</p>
|
||||
<p class="help-block">The location of the SSL key in PEM format.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -766,7 +790,6 @@
|
||||
<label>
|
||||
<input type="checkbox" class="pms-settings" id="pms_url_manual" name="pms_url_manual" value="1" ${config['pms_url_manual']}> Manual Connection
|
||||
</label>
|
||||
<span id="cloudManualConnection" style="display: none; color: #eb8600; padding-left: 10px;"> Not available for Plex Cloud servers.</span>
|
||||
<p class="help-block">Use the user defined connection details. Do not retrieve the server connection URL automatically.</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
@@ -792,10 +815,15 @@
|
||||
<input type="checkbox" name="server_changed" id="server_changed" value="1" style="display: none;">
|
||||
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="pms_logs_folder">Logs Folder</label>
|
||||
<label for="pms_logs_folder">Plex Logs Folder</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="pms_logs_folder" name="pms_logs_folder" value="${config['pms_logs_folder']}" size="30" data-parsley-trigger="change" data-parsley-pattern="^[^\~\%]" data-parsley-errors-container="#pms_logs_folder_error" data-parsley-error-message="Shortcuts are not recognized.">
|
||||
<div class="col-md-7">
|
||||
<div class="input-group">
|
||||
<input type="text" class="form-control" id="pms_logs_folder" name="pms_logs_folder" value="${config['pms_logs_folder']}" size="30" data-parsley-trigger="change" data-parsley-pattern="^[^\~\%]" data-parsley-errors-container="#pms_logs_folder_error" data-parsley-error-message="Shortcuts are not recognized.">
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="pms_logs_folder_browse" data-toggle="browse" data-filter=".folderonly" data-target="#pms_logs_folder">Browse</button>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div id="pms_logs_folder_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
@@ -823,7 +851,6 @@
|
||||
<label>
|
||||
<input type="checkbox" id="monitor_pms_updates" name="monitor_pms_updates" value="1" ${config['monitor_pms_updates']}> Monitor Plex Updates
|
||||
</label>
|
||||
<span id="cloudMonitorUpdates" style="display: none; color: #eb8600; padding-left: 10px;"> Not available for Plex Cloud servers.</span>
|
||||
<p class="help-block">Enable to have Tautulli check if updates are available for the Plex Media Server.</p>
|
||||
</div>
|
||||
<div id="pms_update_options">
|
||||
@@ -857,36 +884,6 @@
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="monitor_remote_access" name="monitor_remote_access" value="1" ${config['monitor_remote_access']}> Monitor Plex Remote Access
|
||||
</label>
|
||||
<span id="cloudMonitorRemoteAccess" style="display: none; color: #eb8600; padding-left: 10px;"> Not available for Plex Cloud servers.</span>
|
||||
<span id="remoteAccessCheck" class="settings-warning"></span>
|
||||
<p class="help-block">Enable to have Tautulli check if remote access to the Plex Media Server goes down.</p>
|
||||
</div>
|
||||
<div id="monitor_remote_access_options">
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="remote_access_ping_interval">Remote Access Ping Interval</label>
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<input type="text" class="form-control" data-parsley-type="integer" id="remote_access_ping_interval" name="remote_access_ping_interval" value="${config['remote_access_ping_interval']}" size="5" data-parsley-min="60" data-parsley-trigger="change" data-parsley-errors-container="#remote_access_ping_interval_error" required>
|
||||
</div>
|
||||
<div id="remote_access_ping_interval_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">The interval (in seconds) Tautulli will ping the Plex Media Server for the remote access status. Minimum 60.</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="remote_access_ping_threshold">Remote Access Ping Threshold</label>
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<input type="text" class="form-control" data-parsley-type="integer" id="remote_access_ping_threshold" name="remote_access_ping_threshold" value="${config['remote_access_ping_threshold']}" size="5" data-parsley-min="1" data-parsley-trigger="change" data-parsley-errors-container="#remote_access_ping_threshold_error" required>
|
||||
</div>
|
||||
<div id="remote_access_ping_threshold_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">The number of consecutive remote access status failures to consider remote access as down. Minimum 1.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="refresh_users_interval">Users List Refresh Interval</label>
|
||||
@@ -1047,7 +1044,7 @@
|
||||
</div>
|
||||
<div id="notify_recently_added_delay_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">Set the delay (in seconds) to wait for consecutive recently added items to group together and to allow metadata to be processed before sending the notification. Minimum 60 seconds.</p>
|
||||
<p class="help-block">Set the delay (in seconds) to wait for consecutive recently added items to group together and to allow metadata to be processed before sending the recently added notification. Minimum 60 seconds, default 300.</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label>Flush Recently Added</label>
|
||||
@@ -1125,10 +1122,15 @@
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="newsletter_dir">Custom Newsletter Templates Folder</label>
|
||||
<label for="newsletter_custom_dir">Custom Newsletter Templates Folder</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="newsletter_custom_dir" name="newsletter_custom_dir" value="${config['newsletter_custom_dir']}">
|
||||
<div class="col-md-7">
|
||||
<div class="input-group">
|
||||
<input type="text" class="form-control" id="newsletter_custom_dir" name="newsletter_custom_dir" value="${config['newsletter_custom_dir']}">
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="newsletter_custom_dir_browse" data-toggle="browse" data-filter=".folderonly" data-target="#newsletter_custom_dir">Browse</button>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Optional: Enter the full path to your custom newsletter templates folder. Leave blank for default.</p>
|
||||
@@ -1136,8 +1138,13 @@
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="newsletter_dir">Newsletter Output Directory</label> ${docker_msg | n}
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="newsletter_dir" name="newsletter_dir" value="${config['newsletter_dir']}" ${docker_setting}>
|
||||
<div class="col-md-7">
|
||||
<div class="input-group">
|
||||
<input type="text" class="form-control" id="newsletter_dir" name="newsletter_dir" value="${config['newsletter_dir']}" ${docker_setting}>
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="newsletter_dir_browse" data-toggle="browse" data-filter=".folderonly" data-target="#newsletter_dir" ${docker_setting}>Browse</button>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Enter the full path to where newsletter files will be saved.</p>
|
||||
@@ -1325,14 +1332,32 @@
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-import_backups">
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Database Import</h3>
|
||||
<h3>Import</h3>
|
||||
</div>
|
||||
|
||||
<p class="help-block">Click a button below to import an existing database from the selected app.</p>
|
||||
<div class="btn-group">
|
||||
<button class="btn btn-form toggle-app-import-modal" type="button" data-target="#app-import-modal" data-toggle="modal" data-app="tautulli">Tautulli</button>
|
||||
<button class="btn btn-form toggle-app-import-modal" type="button" data-target="#app-import-modal" data-toggle="modal" data-app="plexwatch">PlexWatch</button>
|
||||
<button class="btn btn-form toggle-app-import-modal" type="button" data-target="#app-import-modal" data-toggle="modal" data-app="plexivity">Plexivity</button>
|
||||
<div class="form-group">
|
||||
<label for="database_import">Database Import</label>
|
||||
<p class="help-block">Click a button below to import an existing database from the selected app.</p>
|
||||
<div class="row">
|
||||
<div class="col-md-9">
|
||||
<div class="btn-group">
|
||||
<button class="btn btn-form toggle-app-import-modal" type="button" data-target="#app-import-modal" data-toggle="modal" data-app="tautulli">Tautulli</button>
|
||||
<button class="btn btn-form toggle-app-import-modal" type="button" data-target="#app-import-modal" data-toggle="modal" data-app="plexwatch">PlexWatch</button>
|
||||
<button class="btn btn-form toggle-app-import-modal" type="button" data-target="#app-import-modal" data-toggle="modal" data-app="plexivity">Plexivity</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="config_import">Configuration Import</label>
|
||||
<p class="help-block">Click the button below to import a previous Tautulli configuration.</p>
|
||||
<div class="row">
|
||||
<div class="col-md-9">
|
||||
<div class="btn-group">
|
||||
<button class="btn btn-form toggle-config-import-modal" type="button" data-target="#config-import-modal" data-toggle="modal">Tautulli</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="padded-header">
|
||||
@@ -1370,8 +1395,13 @@
|
||||
<div class="form-group">
|
||||
<label for="log_dir">Log Directory</label> ${docker_msg | n}
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control directory-settings" id="log_dir" name="log_dir" value="${config['log_dir']}" ${docker_setting}>
|
||||
<div class="col-md-7">
|
||||
<div class="input-group">
|
||||
<input type="text" class="form-control directory-settings" id="log_dir" name="log_dir" value="${config['log_dir']}" ${docker_setting}>
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="log_dir_browse" data-toggle="browse" data-filter=".folderonly" data-target="#log_dir" ${docker_setting}>Browse</button>
|
||||
</span>
|
||||
</div>
|
||||
<div class="btn-group">
|
||||
<button class="btn btn-form" type="button" id="clear_logs">Clear Logs</button>
|
||||
</div>
|
||||
@@ -1381,8 +1411,13 @@
|
||||
<div class="form-group">
|
||||
<label for="backup_dir">Backup Directory</label> ${docker_msg | n}
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control directory-settings" id="backup_dir" name="backup_dir" value="${config['backup_dir']}" ${docker_setting}>
|
||||
<div class="col-md-7">
|
||||
<div class="input-group">
|
||||
<input type="text" class="form-control directory-settings" id="backup_dir" name="backup_dir" value="${config['backup_dir']}" ${docker_setting}>
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="backup_dir_browse" data-toggle="browse" data-filter=".folderonly" data-target="#backup_dir" ${docker_setting}>Browse</button>
|
||||
</span>
|
||||
</div>
|
||||
<div class="btn-group">
|
||||
<button class="btn btn-form" type="button" id="backup_config">Backup Config</button>
|
||||
<button class="btn btn-form" type="button" id="backup_database">Backup Database</button>
|
||||
@@ -1393,8 +1428,13 @@
|
||||
<div class="form-group">
|
||||
<label for="cache_dir">Cache Directory</label> ${docker_msg | n}
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control directory-settings" id="cache_dir" name="cache_dir" value="${config['cache_dir']}" ${docker_setting}>
|
||||
<div class="col-md-7">
|
||||
<div class="input-group">
|
||||
<input type="text" class="form-control directory-settings" id="cache_dir" name="cache_dir" value="${config['cache_dir']}" ${docker_setting}>
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="cache_dir_browse" data-toggle="browse" data-filter=".folderonly" data-target="#cache_dir" ${docker_setting}>Browse</button>
|
||||
</span>
|
||||
</div>
|
||||
<div class="btn-group">
|
||||
<button class="btn btn-form" type="button" id="clear_cache">Clear All Cache</button>
|
||||
<button class="btn btn-form" type="button" id="clear_image_cache">Clear Image Cache</button>
|
||||
@@ -1426,6 +1466,7 @@
|
||||
<label>Registered Devices</label>
|
||||
<p class="help-block">Register a new device using a QR code, or configure an existing device by clicking the settings icon on the right.</p>
|
||||
<p id="app_api_msg" style="color: #eb8600;">Warning: The API must be enabled under <a data-tab-destination="web_interface" data-target="api_enabled">Web Interface</a> to use the app.</p>
|
||||
<br />
|
||||
<div class="row">
|
||||
<div id="plexpy-mobile-devices-table" class="col-md-12">
|
||||
<div class='text-muted'><i class="fa fa-refresh fa-spin"></i> Loading registered devices...</div>
|
||||
@@ -1522,6 +1563,7 @@
|
||||
</div>
|
||||
</div>
|
||||
<div id="app-import-modal" class="modal fade" tabindex="-1" role="dialog" aria-labelledby="app-import-modal"></div>
|
||||
<div id="config-import-modal" class="modal fade" tabindex="-1" role="dialog" aria-labelledby="config-import-modal"></div>
|
||||
<div id="add-notifier-modal" class="modal fade" tabindex="-1" role="dialog" aria-labelledby="add-notifier-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
@@ -1861,7 +1903,10 @@ Rating: {rating}/10 --> Rating: /10
|
||||
<label>Instructions</label>
|
||||
<p class="help-block">
|
||||
Scan the QR code below with the Tautulli Android app to automatically register it with the server (make sure the Tautulli Address below is correct)
|
||||
or manually enter the connection info and device token into the app settings.
|
||||
or manually enter the connection info and device token into the app settings. This window will automatically close once device registration is successful.
|
||||
</p>
|
||||
<p class="help-block">
|
||||
Note: OneSignal.com must not be blocked (e.g. in Pi-hole) for device registration.
|
||||
</p>
|
||||
<label>QR Code</label>
|
||||
<pre id="api_qr_code" style="text-align: center"></pre>
|
||||
@@ -1873,7 +1918,7 @@ Rating: {rating}/10 --> Rating: /10
|
||||
</p>
|
||||
<p class="help-block" id="api_qr_private" style="display: none;">
|
||||
Note: This is a private IP address. Tautulli will not be reachable outside of your home network.
|
||||
Access Tautulli via an externally address or manually enter the address above to generate the QR code for remote access.
|
||||
Access Tautulli via an external address or manually enter the address above to generate the QR code for remote access.
|
||||
</p>
|
||||
<p class="help-block" id="api_qr_https" style="display: none;">
|
||||
Note: This URL is not secure. Requests between the app and the server will not be encrypted.
|
||||
@@ -2027,6 +2072,22 @@ Rating: {rating}/10 --> Rating: /10
|
||||
});
|
||||
}
|
||||
|
||||
$("#browse-path-modal").on('hidden.bs.modal', function() {
|
||||
$("#select-browse-file").unbind('click');
|
||||
});
|
||||
|
||||
function openBrowsePath(key, path, filter_ext, file_description, select_target) {
|
||||
$("#browse-path-type").text(file_description);
|
||||
$("#browse-path-modal").modal('show');
|
||||
|
||||
$("#select-browse-file").click(function () {
|
||||
$("#browse-path-modal").modal('hide');
|
||||
$(select_target).val($("#browse-path").val()).change();
|
||||
});
|
||||
|
||||
browsePath(key, path, filter_ext);
|
||||
}
|
||||
|
||||
function browsePath(key, path, filter_ext) {
|
||||
$("#browse-path-status-message").html('<i class="fa fa-fw fa-spin fa-refresh"></i>');
|
||||
getBrowsePath(key, path, filter_ext).then(function (data) {
|
||||
@@ -2145,7 +2206,6 @@ $(document).ready(function() {
|
||||
initConfigCheckbox('#https_create_cert');
|
||||
initConfigCheckbox('#check_github');
|
||||
initConfigCheckbox('#monitor_pms_updates');
|
||||
initConfigCheckbox('#monitor_remote_access');
|
||||
initConfigCheckbox('#newsletter_self_hosted');
|
||||
|
||||
$('#menu_link_shutdown').click(function() {
|
||||
@@ -2391,7 +2451,6 @@ $(document).ready(function() {
|
||||
$('#pms_is_cloud').val(is_cloud !== 'undefined' && is_cloud === true ? 1 : 0);
|
||||
$('#pms_url_manual').prop('checked', false);
|
||||
$('#pms_url').val('Please verify your server above to retrieve the URL');
|
||||
PMSCloudCheck();
|
||||
},
|
||||
onDropdownOpen: function() {
|
||||
this.clear();
|
||||
@@ -2422,38 +2481,6 @@ $(document).ready(function() {
|
||||
}
|
||||
getServerOptions();
|
||||
|
||||
function PMSCloudCheck() {
|
||||
if ($('#pms_is_cloud').val() === "1") {
|
||||
$('#pms_port').val(443).prop('readonly', true);
|
||||
$('#pms_is_remote_checkbox').prop('checked', true).prop('disabled', true);
|
||||
$('#pms_is_remote').val(1);
|
||||
$('#pms_ssl_checkbox').prop('checked', true).prop('disabled', true);
|
||||
$('#pms_ssl').val(1);
|
||||
$('#pms_url_manual').prop('checked', false).prop('disabled', true);
|
||||
$('#monitor_pms_updates').prop('checked', false).prop('disabled', true);
|
||||
$('#pms_update_options').hide();
|
||||
$('#monitor_remote_access').prop('checked', false).prop('disabled', true);
|
||||
$('#cloudManualConnection').show();
|
||||
$('#cloudMonitorUpdates').show();
|
||||
$('#cloudMonitorRemoteAccess').show();
|
||||
$('#remoteAccessCheck').hide();
|
||||
} else {
|
||||
$('#pms_port').prop('readonly', false);
|
||||
$('#pms_is_remote_checkbox').prop('disabled', false);
|
||||
$('#pms_is_remote').val($('#pms_is_remote_checkbox').is(':checked') ? 1 : 0);
|
||||
$('#pms_ssl_checkbox').prop('disabled', false);
|
||||
$('#pms_ssl').val($('#pms_ssl_checkbox').is(':checked') ? 1 : 0);
|
||||
$('#pms_url_manual').prop('disabled', false);
|
||||
$('#monitor_pms_updates').prop('disabled', false);
|
||||
$('#monitor_remote_access').prop('disabled', false);
|
||||
$('#cloudManualConnection').hide();
|
||||
$('#cloudMonitorUpdates').hide();
|
||||
$('#cloudMonitorRemoteAccess').hide();
|
||||
remoteAccessEnabledCheck()
|
||||
}
|
||||
}
|
||||
PMSCloudCheck();
|
||||
|
||||
function verifyServer(_callback) {
|
||||
var pms_ip = $("#pms_ip").val();
|
||||
var pms_port = $("#pms_port").val();
|
||||
@@ -2541,9 +2568,7 @@ $(document).ready(function() {
|
||||
$("#token_verify").html('<i class="fa fa-refresh fa-spin"></i>').fadeIn('fast');
|
||||
}
|
||||
function OAuthSuccessCallback(authToken) {
|
||||
var x_plex_headers = getPlexHeaders();
|
||||
$("#pms_token").val(authToken);
|
||||
$("#pms_uuid").val(x_plex_headers['X-Plex-Client-Identifier']);
|
||||
$("#token_verify").html('<i class="fa fa-check"></i>').fadeIn('fast');
|
||||
getServerOptions(authToken);
|
||||
}
|
||||
@@ -2568,25 +2593,22 @@ $(document).ready(function() {
|
||||
});
|
||||
});
|
||||
|
||||
// Load config import modal
|
||||
$(".toggle-config-import-modal").click(function() {
|
||||
$.ajax({
|
||||
url: 'import_config_tool',
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#config-import-modal").html(xhr.responseText);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
pms_version = false;
|
||||
pms_logs_debug = false;
|
||||
pms_logs = false;
|
||||
|
||||
function remoteAccessEnabledCheck() {
|
||||
$.ajax({
|
||||
url: 'get_server_pref',
|
||||
data: { pref: 'PublishServerOnPlexOnlineKey' },
|
||||
async: true,
|
||||
success: function(data) {
|
||||
if (data === 'false' || data === '0') {
|
||||
$("#remoteAccessCheck").html("Remote access must be enabled on your Plex Server. <a target='_blank' href='${anon_url('https://support.plex.tv/hc/en-us/articles/200484543-Enabling-Remote-Access-for-a-Server')}'>Click here</a> for help.");
|
||||
$("#monitor_remote_access").attr("checked", false).attr("disabled", true);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
remoteAccessEnabledCheck();
|
||||
|
||||
// Sortable home_sections
|
||||
function set_home_sections() {
|
||||
var home_sections = [];
|
||||
@@ -3039,6 +3061,14 @@ $(document).ready(function() {
|
||||
tautulli_news.html('<p class="help-block"><i class="fa fa-exclamation-triangle"></i> Failed to retrieve news.</p>')
|
||||
}
|
||||
});
|
||||
|
||||
$("body").on('click', '[data-toggle=browse]', function () {
|
||||
var filter = $(this).data('filter');
|
||||
var target = $(this).data('target');
|
||||
var path = $(target).val();
|
||||
var description = $(this).data('description') || $("label[for='" + target.replace('#', '') + "']").text();
|
||||
openBrowsePath(null, path, filter, description, target);
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</%def>
|
||||
|
@@ -22,10 +22,10 @@
|
||||
<div class="modal-body" id="modal-text">
|
||||
<div align="center">
|
||||
% if message == "Shutting Down":
|
||||
<h3><i class="fa fa-refresh fa-spin"></i> Tautulli is ${message}.</h3>
|
||||
<h3><i class="fa fa-refresh fa-spin"></i> Tautulli is ${message.lower()}</h3>
|
||||
<br />
|
||||
% else:
|
||||
<h3><i class="fa fa-refresh fa-spin"></i> Tautulli is ${message}.</h3>
|
||||
<h3><i class="fa fa-refresh fa-spin"></i> Tautulli is ${message.lower()}</h3>
|
||||
<br />
|
||||
<h4>Restart in <span class="countdown"></span></h4>
|
||||
% endif
|
||||
|
@@ -284,7 +284,8 @@ DOCUMENTATION :: END
|
||||
<table class="display user_ip_table" id="user_ip_table-UID-${data['user_id']}" width="100%">
|
||||
<thead>
|
||||
<tr>
|
||||
<th align="left" id="last_seen">Last Seen</th>
|
||||
<th align="left" id="last_seen">Last Streamed</th>
|
||||
<th align="left" id="first_seen">First Streamed</th>
|
||||
<th align="left" id="ip_address">IP Address</th>
|
||||
<th align="left" id="platform">Last Platform</th>
|
||||
<th align="left" id="player">Last Player</th>
|
||||
|
@@ -27,7 +27,7 @@ DOCUMENTATION :: END
|
||||
<div id="user-player-image-${a['result_id']}">
|
||||
<div class="user-player-instance-box svg-icon platform-${a['platform_name']}"></div>
|
||||
</div>
|
||||
<div class="user-player-instance-name">
|
||||
<div class="user-player-instance-name" title="${a['player_name']}">
|
||||
${a['player_name']}
|
||||
</div>
|
||||
<div class="user-player-instance-playcount">
|
||||
|
@@ -34,7 +34,7 @@
|
||||
<th align="left" id="edit_row">Edit</th>
|
||||
<th align="right" id="avatar"></th>
|
||||
<th align="left" id="friendly_name">User</th>
|
||||
<th align="left" id="last_seen">Last Seen</th>
|
||||
<th align="left" id="last_seen">Last Streamed</th>
|
||||
<th align="left" id="last_known_ip">Last Known IP</th>
|
||||
<th align="left" id="last_platform">Last Platform</th>
|
||||
<th align="left" id="last_player">Last Player</th>
|
||||
|
@@ -38,6 +38,7 @@ load_rc_config ${name}
|
||||
status_cmd="${name}_status"
|
||||
stop_cmd="${name}_stop"
|
||||
|
||||
command_interpreter="python"
|
||||
command="${tautulli_dir}/Tautulli.py"
|
||||
command_args="--daemon --pidfile ${tautulli_pid} --quiet --nolaunch ${tautulli_flags}"
|
||||
|
||||
|
@@ -28,14 +28,16 @@
|
||||
# Ubuntu/Debian: sudo addgroup tautulli && sudo adduser --system --no-create-home tautulli --ingroup tautulli
|
||||
# CentOS/Fedora: sudo adduser --system --no-create-home tautulli
|
||||
# 2. Give the user ownership of the Tautulli directory:
|
||||
# sudo chown tautulli:tautulli -R /opt/Tautulli
|
||||
# sudo chown -R tautulli:tautulli /opt/Tautulli
|
||||
#
|
||||
# - Adjust ExecStart= to point to:
|
||||
# 1. Your Tautulli executable
|
||||
# 1. Your Python interpreter (get the path with "command -v python3")
|
||||
# - Default: /usr/bin/python3
|
||||
# 2. Your Tautulli executable
|
||||
# - Default: /opt/Tautulli/Tautulli.py
|
||||
# 2. Your config file (recommended is to put it somewhere in /etc)
|
||||
# 3. Your config file (recommended is to put it somewhere in /etc)
|
||||
# - Default: --config /opt/Tautulli/config.ini
|
||||
# 3. Your datadir (recommended is to NOT put it in your Tautulli exec dir)
|
||||
# 4. Your datadir (recommended is to NOT put it in your Tautulli exec dir)
|
||||
# - Default: --datadir /opt/Tautulli
|
||||
#
|
||||
# - Adjust User= and Group= to the user/group you want Tautulli to run as.
|
||||
@@ -50,7 +52,7 @@ Wants=network-online.target
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
ExecStart=/opt/Tautulli/Tautulli.py --config /opt/Tautulli/config.ini --datadir /opt/Tautulli --quiet --daemon --nolaunch
|
||||
ExecStart=/usr/bin/python3 /opt/Tautulli/Tautulli.py --config /opt/Tautulli/config.ini --datadir /opt/Tautulli --quiet --daemon --nolaunch
|
||||
GuessMainPID=no
|
||||
Type=forking
|
||||
User=tautulli
|
||||
|
@@ -100,7 +100,7 @@ def createSelfSignedCertificate(issuerName, issuerKey, serial, notBefore, notAft
|
||||
cert.set_pubkey(issuerKey)
|
||||
|
||||
if altNames:
|
||||
cert.add_extensions([crypto.X509Extension("subjectAltName", False, altNames)])
|
||||
cert.add_extensions([crypto.X509Extension(b"subjectAltName", False, altNames)])
|
||||
|
||||
cert.sign(issuerKey, digest)
|
||||
return cert
|
||||
|
237
lib/ipaddr.py
237
lib/ipaddr.py
@@ -22,9 +22,14 @@ and networks.
|
||||
|
||||
"""
|
||||
|
||||
__version__ = '2.1.11'
|
||||
__version__ = '2.2.0'
|
||||
|
||||
import struct
|
||||
import sys
|
||||
|
||||
if sys.version_info > (3,):
|
||||
long = int
|
||||
xrange = range
|
||||
|
||||
IPV4LENGTH = 32
|
||||
IPV6LENGTH = 128
|
||||
@@ -156,16 +161,19 @@ def _find_address_range(addresses):
|
||||
addresses: a list of IPv4 or IPv6 addresses.
|
||||
|
||||
Returns:
|
||||
A tuple containing the first and last IP addresses in the sequence.
|
||||
A tuple containing the first and last IP addresses in the sequence,
|
||||
and the index of the last IP address in the sequence.
|
||||
|
||||
"""
|
||||
first = last = addresses[0]
|
||||
last_index = 0
|
||||
for ip in addresses[1:]:
|
||||
if ip._ip == last._ip + 1:
|
||||
last = ip
|
||||
last_index += 1
|
||||
else:
|
||||
break
|
||||
return (first, last)
|
||||
return (first, last, last_index)
|
||||
|
||||
def _get_prefix_length(number1, number2, bits):
|
||||
"""Get the number of leading bits that are same for two numbers.
|
||||
@@ -358,8 +366,8 @@ def collapse_address_list(addresses):
|
||||
nets = sorted(set(nets))
|
||||
|
||||
while i < len(ips):
|
||||
(first, last) = _find_address_range(ips[i:])
|
||||
i = ips.index(last) + 1
|
||||
(first, last, last_index) = _find_address_range(ips[i:])
|
||||
i += last_index + 1
|
||||
addrs.extend(summarize_address_range(first, last))
|
||||
|
||||
return _collapse_address_list_recursive(sorted(
|
||||
@@ -876,6 +884,26 @@ class _BaseNet(_IPAddrBase):
|
||||
else:
|
||||
raise NetmaskValueError('Bit pattern does not match /1*0*/')
|
||||
|
||||
def _prefix_from_prefix_int(self, prefixlen):
|
||||
"""Validate and return a prefix length integer.
|
||||
|
||||
Args:
|
||||
prefixlen: An integer containing the prefix length.
|
||||
|
||||
Returns:
|
||||
The input, possibly converted from long to int.
|
||||
|
||||
Raises:
|
||||
NetmaskValueError: If the input is not an integer, or out of range.
|
||||
"""
|
||||
if not isinstance(prefixlen, (int, long)):
|
||||
raise NetmaskValueError('%r is not an integer' % prefixlen)
|
||||
prefixlen = int(prefixlen)
|
||||
if not (0 <= prefixlen <= self._max_prefixlen):
|
||||
raise NetmaskValueError('%d is not a valid prefix length' %
|
||||
prefixlen)
|
||||
return prefixlen
|
||||
|
||||
def _prefix_from_prefix_string(self, prefixlen_str):
|
||||
"""Turn a prefix length string into an integer.
|
||||
|
||||
@@ -893,12 +921,10 @@ class _BaseNet(_IPAddrBase):
|
||||
if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):
|
||||
raise ValueError
|
||||
prefixlen = int(prefixlen_str)
|
||||
if not (0 <= prefixlen <= self._max_prefixlen):
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
raise NetmaskValueError('%s is not a valid prefix length' %
|
||||
prefixlen_str)
|
||||
return prefixlen
|
||||
return self._prefix_from_prefix_int(prefixlen)
|
||||
|
||||
def _prefix_from_ip_string(self, ip_str):
|
||||
"""Turn a netmask/hostmask string into a prefix length.
|
||||
@@ -1239,6 +1265,11 @@ class IPv4Address(_BaseV4, _BaseIP):
|
||||
"""
|
||||
_BaseV4.__init__(self, address)
|
||||
|
||||
# Efficient copy constructor.
|
||||
if isinstance(address, IPv4Address):
|
||||
self._ip = address._ip
|
||||
return
|
||||
|
||||
# Efficient constructor from integer.
|
||||
if isinstance(address, (int, long)):
|
||||
self._ip = address
|
||||
@@ -1279,29 +1310,32 @@ class IPv4Network(_BaseV4, _BaseNet):
|
||||
"""Instantiate a new IPv4 network object.
|
||||
|
||||
Args:
|
||||
address: A string or integer representing the IP [& network].
|
||||
'192.168.1.1/24'
|
||||
'192.168.1.1/255.255.255.0'
|
||||
'192.168.1.1/0.0.0.255'
|
||||
are all functionally the same in IPv4. Similarly,
|
||||
'192.168.1.1'
|
||||
'192.168.1.1/255.255.255.255'
|
||||
'192.168.1.1/32'
|
||||
are also functionaly equivalent. That is to say, failing to
|
||||
provide a subnetmask will create an object with a mask of /32.
|
||||
address: The IPv4 network as a string, 2-tuple, or any format
|
||||
supported by the IPv4Address constructor.
|
||||
|
||||
If the mask (portion after the / in the argument) is given in
|
||||
dotted quad form, it is treated as a netmask if it starts with a
|
||||
non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
|
||||
starts with a zero field (e.g. 0.255.255.255 == /8), with the
|
||||
single exception of an all-zero mask which is treated as a
|
||||
netmask == /0. If no mask is given, a default of /32 is used.
|
||||
Strings typically use CIDR format, such as '192.0.2.0/24'.
|
||||
If a dotted-quad is provided after the '/', it is treated as
|
||||
a netmask if it starts with a nonzero bit (e.g. 255.0.0.0 == /8)
|
||||
or a hostmask if it starts with a zero bit
|
||||
(e.g. /0.0.0.255 == /8), with the single exception of an all-zero
|
||||
mask which is treated as /0.
|
||||
|
||||
Additionally, an integer can be passed, so
|
||||
IPv4Network('192.168.1.1') == IPv4Network(3232235777).
|
||||
or, more generally
|
||||
IPv4Network(int(IPv4Network('192.168.1.1'))) ==
|
||||
IPv4Network('192.168.1.1')
|
||||
The 2-tuple format consists of an (ip, prefixlen), where ip is any
|
||||
format recognized by the IPv4Address constructor, and prefixlen is
|
||||
an integer from 0 through 32.
|
||||
|
||||
A plain IPv4 address (in any format) will be forwarded to the
|
||||
IPv4Address constructor, with an implied prefixlen of 32.
|
||||
|
||||
For example, the following inputs are equivalent:
|
||||
IPv4Network('192.0.2.1/32')
|
||||
IPv4Network('192.0.2.1/255.255.255.255')
|
||||
IPv4Network('192.0.2.1')
|
||||
IPv4Network(0xc0000201)
|
||||
IPv4Network(IPv4Address('192.0.2.1'))
|
||||
IPv4Network(('192.0.2.1', 32))
|
||||
IPv4Network((0xc0000201, 32))
|
||||
IPv4Network((IPv4Address('192.0.2.1'), 32))
|
||||
|
||||
strict: A boolean. If true, ensure that we have been passed
|
||||
A true network address, eg, 192.168.1.0/24 and not an
|
||||
@@ -1318,41 +1352,51 @@ class IPv4Network(_BaseV4, _BaseNet):
|
||||
_BaseNet.__init__(self, address)
|
||||
_BaseV4.__init__(self, address)
|
||||
|
||||
# Constructing from an integer or packed bytes.
|
||||
if isinstance(address, (int, long, Bytes)):
|
||||
# Constructing from a single IP address.
|
||||
if isinstance(address, (int, long, Bytes, IPv4Address)):
|
||||
self.ip = IPv4Address(address)
|
||||
self._ip = self.ip._ip
|
||||
self._prefixlen = self._max_prefixlen
|
||||
self.netmask = IPv4Address(self._ALL_ONES)
|
||||
return
|
||||
|
||||
# Assume input argument to be string or any object representation
|
||||
# which converts into a formatted IP prefix string.
|
||||
addr = str(address).split('/')
|
||||
|
||||
if len(addr) > 2:
|
||||
raise AddressValueError(address)
|
||||
|
||||
self._ip = self._ip_int_from_string(addr[0])
|
||||
self.ip = IPv4Address(self._ip)
|
||||
|
||||
if len(addr) == 2:
|
||||
# Constructing from an (ip, prefixlen) tuple.
|
||||
if isinstance(address, tuple):
|
||||
try:
|
||||
# Check for a netmask in prefix length form.
|
||||
self._prefixlen = self._prefix_from_prefix_string(addr[1])
|
||||
except NetmaskValueError:
|
||||
# Check for a netmask or hostmask in dotted-quad form.
|
||||
# This may raise NetmaskValueError.
|
||||
self._prefixlen = self._prefix_from_ip_string(addr[1])
|
||||
ip, prefixlen = address
|
||||
except ValueError:
|
||||
raise AddressValueError(address)
|
||||
self.ip = IPv4Address(ip)
|
||||
self._ip = self.ip._ip
|
||||
self._prefixlen = self._prefix_from_prefix_int(prefixlen)
|
||||
|
||||
else:
|
||||
self._prefixlen = self._max_prefixlen
|
||||
# Assume input argument to be string or any object representation
|
||||
# which converts into a formatted IP prefix string.
|
||||
addr = str(address).split('/')
|
||||
|
||||
if len(addr) > 2:
|
||||
raise AddressValueError(address)
|
||||
|
||||
self._ip = self._ip_int_from_string(addr[0])
|
||||
self.ip = IPv4Address(self._ip)
|
||||
|
||||
if len(addr) == 2:
|
||||
try:
|
||||
# Check for a netmask in prefix length form.
|
||||
self._prefixlen = self._prefix_from_prefix_string(addr[1])
|
||||
except NetmaskValueError:
|
||||
# Check for a netmask or hostmask in dotted-quad form.
|
||||
# This may raise NetmaskValueError.
|
||||
self._prefixlen = self._prefix_from_ip_string(addr[1])
|
||||
else:
|
||||
self._prefixlen = self._max_prefixlen
|
||||
|
||||
self.netmask = IPv4Address(self._ip_int_from_prefix(self._prefixlen))
|
||||
|
||||
if strict:
|
||||
if self.ip != self.network:
|
||||
raise ValueError('%s has host bits set' %
|
||||
self.ip)
|
||||
raise ValueError('%s has host bits set' % self.ip)
|
||||
if self._prefixlen == (self._max_prefixlen - 1):
|
||||
self.iterhosts = self.__iter__
|
||||
|
||||
@@ -1447,7 +1491,7 @@ class _BaseV6(object):
|
||||
|
||||
try:
|
||||
# Now, parse the hextets into a 128-bit integer.
|
||||
ip_int = 0L
|
||||
ip_int = long(0)
|
||||
for i in xrange(parts_hi):
|
||||
ip_int <<= 16
|
||||
ip_int |= self._parse_hextet(parts[i])
|
||||
@@ -1752,6 +1796,11 @@ class IPv6Address(_BaseV6, _BaseIP):
|
||||
"""
|
||||
_BaseV6.__init__(self, address)
|
||||
|
||||
# Efficient copy constructor.
|
||||
if isinstance(address, IPv6Address):
|
||||
self._ip = address._ip
|
||||
return
|
||||
|
||||
# Efficient constructor from integer.
|
||||
if isinstance(address, (int, long)):
|
||||
self._ip = address
|
||||
@@ -1771,9 +1820,6 @@ class IPv6Address(_BaseV6, _BaseIP):
|
||||
# Assume input argument to be string or any object representation
|
||||
# which converts into a formatted IP string.
|
||||
addr_str = str(address)
|
||||
if not addr_str:
|
||||
raise AddressValueError('')
|
||||
|
||||
self._ip = self._ip_int_from_string(addr_str)
|
||||
|
||||
|
||||
@@ -1793,28 +1839,34 @@ class IPv6Network(_BaseV6, _BaseNet):
|
||||
|
||||
|
||||
def __init__(self, address, strict=False):
|
||||
"""Instantiate a new IPv6 Network object.
|
||||
"""Instantiate a new IPv6 network object.
|
||||
|
||||
Args:
|
||||
address: A string or integer representing the IPv6 network or the IP
|
||||
and prefix/netmask.
|
||||
'2001:4860::/128'
|
||||
'2001:4860:0000:0000:0000:0000:0000:0000/128'
|
||||
'2001:4860::'
|
||||
are all functionally the same in IPv6. That is to say,
|
||||
failing to provide a subnetmask will create an object with
|
||||
a mask of /128.
|
||||
address: The IPv6 network as a string, 2-tuple, or any format
|
||||
supported by the IPv6Address constructor.
|
||||
|
||||
Additionally, an integer can be passed, so
|
||||
IPv6Network('2001:4860::') ==
|
||||
IPv6Network(42541956101370907050197289607612071936L).
|
||||
or, more generally
|
||||
IPv6Network(IPv6Network('2001:4860::')._ip) ==
|
||||
IPv6Network('2001:4860::')
|
||||
Strings should be in CIDR format, such as '2001:db8::/32'.
|
||||
|
||||
The 2-tuple format consists of an (ip, prefixlen), where ip is any
|
||||
format recognized by the IPv6Address constructor, and prefixlen is
|
||||
an integer from 0 through 128.
|
||||
|
||||
A plain IPv6 address (in any format) will be forwarded to the
|
||||
IPv6Address constructor, with an implied prefixlen of 128.
|
||||
|
||||
For example, the following inputs are equivalent:
|
||||
IPv6Network('2001:db8::/128')
|
||||
IPv6Network('2001:db8:0:0:0:0:0:0/128')
|
||||
IPv6Network('2001:db8::')
|
||||
IPv6Network(0x20010db8 << 96)
|
||||
IPv6Network(IPv6Address('2001:db8::'))
|
||||
IPv6Network(('2001:db8::', 128))
|
||||
IPv6Network((0x20010db8 << 96, 128))
|
||||
IPv6Network((IPv6Address('2001:db8::'), 128))
|
||||
|
||||
strict: A boolean. If true, ensure that we have been passed
|
||||
A true network address, eg, 192.168.1.0/24 and not an
|
||||
IP address on a network, eg, 192.168.1.1/24.
|
||||
A true network address, eg, 2001:db8::/32 and not an
|
||||
IP address on a network, eg, 2001:db8::1/32.
|
||||
|
||||
Raises:
|
||||
AddressValueError: If address isn't a valid IPv6 address.
|
||||
@@ -1827,29 +1879,40 @@ class IPv6Network(_BaseV6, _BaseNet):
|
||||
_BaseNet.__init__(self, address)
|
||||
_BaseV6.__init__(self, address)
|
||||
|
||||
# Constructing from an integer or packed bytes.
|
||||
if isinstance(address, (int, long, Bytes)):
|
||||
# Constructing from a single IP address.
|
||||
if isinstance(address, (int, long, Bytes, IPv6Address)):
|
||||
self.ip = IPv6Address(address)
|
||||
self._ip = self.ip._ip
|
||||
self._prefixlen = self._max_prefixlen
|
||||
self.netmask = IPv6Address(self._ALL_ONES)
|
||||
return
|
||||
|
||||
# Assume input argument to be string or any object representation
|
||||
# which converts into a formatted IP prefix string.
|
||||
addr = str(address).split('/')
|
||||
# Constructing from an (ip, prefixlen) tuple.
|
||||
if isinstance(address, tuple):
|
||||
try:
|
||||
ip, prefixlen = address
|
||||
except ValueError:
|
||||
raise AddressValueError(address)
|
||||
self.ip = IPv6Address(ip)
|
||||
self._ip = self.ip._ip
|
||||
self._prefixlen = self._prefix_from_prefix_int(prefixlen)
|
||||
|
||||
if len(addr) > 2:
|
||||
raise AddressValueError(address)
|
||||
|
||||
self._ip = self._ip_int_from_string(addr[0])
|
||||
self.ip = IPv6Address(self._ip)
|
||||
|
||||
if len(addr) == 2:
|
||||
# This may raise NetmaskValueError
|
||||
self._prefixlen = self._prefix_from_prefix_string(addr[1])
|
||||
else:
|
||||
self._prefixlen = self._max_prefixlen
|
||||
# Assume input argument to be string or any object representation
|
||||
# which converts into a formatted IP prefix string.
|
||||
addr = str(address).split('/')
|
||||
|
||||
if len(addr) > 2:
|
||||
raise AddressValueError(address)
|
||||
|
||||
self._ip = self._ip_int_from_string(addr[0])
|
||||
self.ip = IPv6Address(self._ip)
|
||||
|
||||
if len(addr) == 2:
|
||||
# This may raise NetmaskValueError
|
||||
self._prefixlen = self._prefix_from_prefix_string(addr[1])
|
||||
else:
|
||||
self._prefixlen = self._max_prefixlen
|
||||
|
||||
self.netmask = IPv6Address(self._ip_int_from_prefix(self._prefixlen))
|
||||
|
||||
|
@@ -1,4 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
import mock.mock as _mock
|
||||
from mock.mock import *
|
||||
__all__ = _mock.__all__
|
2619
lib/mock/mock.py
2619
lib/mock/mock.py
File diff suppressed because it is too large
Load Diff
@@ -129,5 +129,5 @@ if __name__ == '__main__':
|
||||
|
||||
return app_path, 'App registered'
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
return None, 'Error creating App %s. %s' % (app_path, e)
|
@@ -3,9 +3,10 @@ import logging
|
||||
import os
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from platform import uname
|
||||
from uuid import getnode
|
||||
|
||||
from plexapi.config import PlexConfig, reset_base_headers
|
||||
from plexapi.utils import SecretsFilter
|
||||
from uuid import getnode
|
||||
|
||||
# Load User Defined Config
|
||||
DEFAULT_CONFIG_PATH = os.path.expanduser('~/.config/plexapi/config.ini')
|
||||
@@ -14,7 +15,7 @@ CONFIG = PlexConfig(CONFIG_PATH)
|
||||
|
||||
# PlexAPI Settings
|
||||
PROJECT = 'PlexAPI'
|
||||
VERSION = '3.3.0'
|
||||
VERSION = '3.6.0'
|
||||
TIMEOUT = CONFIG.get('plexapi.timeout', 30, int)
|
||||
X_PLEX_CONTAINER_SIZE = CONFIG.get('plexapi.container_size', 100, int)
|
||||
X_PLEX_ENABLE_FAST_CONNECT = CONFIG.get('plexapi.enable_fast_connect', False, bool)
|
||||
|
@@ -1,14 +1,14 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import json
|
||||
import threading
|
||||
import websocket
|
||||
|
||||
from plexapi import log
|
||||
|
||||
|
||||
class AlertListener(threading.Thread):
|
||||
""" Creates a websocket connection to the PlexServer to optionally recieve alert notifications.
|
||||
""" Creates a websocket connection to the PlexServer to optionally receive alert notifications.
|
||||
These often include messages from Plex about media scans as well as updates to currently running
|
||||
Transcode Sessions. This class implements threading.Thread, therfore to start monitoring
|
||||
Transcode Sessions. This class implements threading.Thread, therefore to start monitoring
|
||||
alerts you must call .start() on the object once it's created. When calling
|
||||
`PlexServer.startAlertListener()`, the thread will be started for you.
|
||||
|
||||
@@ -26,9 +26,9 @@ class AlertListener(threading.Thread):
|
||||
|
||||
Parameters:
|
||||
server (:class:`~plexapi.server.PlexServer`): PlexServer this listener is connected to.
|
||||
callback (func): Callback function to call on recieved messages. The callback function
|
||||
callback (func): Callback function to call on received messages. The callback function
|
||||
will be sent a single argument 'data' which will contain a dictionary of data
|
||||
recieved from the server. :samp:`def my_callback(data): ...`
|
||||
received from the server. :samp:`def my_callback(data): ...`
|
||||
"""
|
||||
key = '/:/websockets/notifications'
|
||||
|
||||
@@ -40,6 +40,11 @@ class AlertListener(threading.Thread):
|
||||
self._ws = None
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
import websocket
|
||||
except ImportError:
|
||||
log.warning("Can't use the AlertListener without websocket")
|
||||
return
|
||||
# create the websocket connection
|
||||
url = self._server.url(self.key, includeToken=True).replace('http', 'ws')
|
||||
log.info('Starting AlertListener: %s', url)
|
||||
@@ -48,15 +53,21 @@ class AlertListener(threading.Thread):
|
||||
self._ws.run_forever()
|
||||
|
||||
def stop(self):
|
||||
""" Stop the AlertListener thread. Once the notifier is stopped, it cannot be diractly
|
||||
""" Stop the AlertListener thread. Once the notifier is stopped, it cannot be directly
|
||||
started again. You must call :func:`plexapi.server.PlexServer.startAlertListener()`
|
||||
from a PlexServer instance.
|
||||
"""
|
||||
log.info('Stopping AlertListener.')
|
||||
self._ws.close()
|
||||
|
||||
def _onMessage(self, ws, message):
|
||||
""" Called when websocket message is recieved. """
|
||||
def _onMessage(self, *args):
|
||||
""" Called when websocket message is received.
|
||||
In earlier releases, websocket-client returned a tuple of two parameters: a websocket.app.WebSocketApp
|
||||
object and the message as a STR. Current releases appear to only return the message.
|
||||
We are assuming the last argument in the tuple is the message.
|
||||
This is to support compatibility with current and previous releases of websocket-client.
|
||||
"""
|
||||
message = args[-1]
|
||||
try:
|
||||
data = json.loads(message)['NotificationContainer']
|
||||
log.debug('Alert: %s %s %s', *data)
|
||||
@@ -65,6 +76,12 @@ class AlertListener(threading.Thread):
|
||||
except Exception as err: # pragma: no cover
|
||||
log.error('AlertListener Msg Error: %s', err)
|
||||
|
||||
def _onError(self, ws, err): # pragma: no cover
|
||||
""" Called when websocket error is recieved. """
|
||||
def _onError(self, *args): # pragma: no cover
|
||||
""" Called when websocket error is received.
|
||||
In earlier releases, websocket-client returned a tuple of two parameters: a websocket.app.WebSocketApp
|
||||
object and the error. Current releases appear to only return the error.
|
||||
We are assuming the last argument in the tuple is the message.
|
||||
This is to support compatibility with current and previous releases of websocket-client.
|
||||
"""
|
||||
err = args[-1]
|
||||
log.error('AlertListener Error: %s' % err)
|
||||
|
@@ -284,15 +284,15 @@ class Track(Audio, Playable):
|
||||
art (str): Track artwork (/library/metadata/<ratingkey>/art/<artid>)
|
||||
chapterSource (TYPE): Unknown
|
||||
duration (int): Length of this album in seconds.
|
||||
grandparentArt (str): Artist artowrk.
|
||||
grandparentKey (str): Artist API URL.
|
||||
grandparentRatingKey (str): Unique key identifying artist.
|
||||
grandparentThumb (str): URL to artist thumbnail image.
|
||||
grandparentTitle (str): Name of the artist for this track.
|
||||
grandparentArt (str): Album artist artwork.
|
||||
grandparentKey (str): Album artist API URL.
|
||||
grandparentRatingKey (str): Unique key identifying album artist.
|
||||
grandparentThumb (str): URL to album artist thumbnail image.
|
||||
grandparentTitle (str): Name of the album artist for this track.
|
||||
guid (str): Unknown (unique ID).
|
||||
media (list): List of :class:`~plexapi.media.Media` objects for this track.
|
||||
moods (list): List of :class:`~plexapi.media.Mood` objects for this track.
|
||||
originalTitle (str): Original track title (if translated).
|
||||
originalTitle (str): Track artist.
|
||||
parentIndex (int): Album index.
|
||||
parentKey (str): Album API URL.
|
||||
parentRatingKey (int): Unique key identifying album.
|
||||
|
@@ -132,6 +132,8 @@ class PlexObject(object):
|
||||
* __regex: Value matches the specified regular expression.
|
||||
* __startswith: Value starts with specified arg.
|
||||
"""
|
||||
if ekey is None:
|
||||
raise BadRequest('ekey was not provided')
|
||||
if isinstance(ekey, int):
|
||||
ekey = '/library/metadata/%s' % ekey
|
||||
for elem in self._server.query(ekey):
|
||||
@@ -140,13 +142,27 @@ class PlexObject(object):
|
||||
clsname = cls.__name__ if cls else 'None'
|
||||
raise NotFound('Unable to find elem: cls=%s, attrs=%s' % (clsname, kwargs))
|
||||
|
||||
def fetchItems(self, ekey, cls=None, **kwargs):
|
||||
def fetchItems(self, ekey, cls=None, container_start=None, container_size=None, **kwargs):
|
||||
""" Load the specified key to find and build all items with the specified tag
|
||||
and attrs. See :func:`~plexapi.base.PlexObject.fetchItem` for more details
|
||||
on how this is used.
|
||||
|
||||
Parameters:
|
||||
container_start (None, int): offset to get a subset of the data
|
||||
container_size (None, int): How many items in data
|
||||
|
||||
"""
|
||||
data = self._server.query(ekey)
|
||||
url_kw = {}
|
||||
if container_start is not None:
|
||||
url_kw["X-Plex-Container-Start"] = container_start
|
||||
if container_size is not None:
|
||||
url_kw["X-Plex-Container-Size"] = container_size
|
||||
|
||||
if ekey is None:
|
||||
raise BadRequest('ekey was not provided')
|
||||
data = self._server.query(ekey, params=url_kw)
|
||||
items = self.findItems(data, cls, ekey, **kwargs)
|
||||
|
||||
librarySectionID = data.attrib.get('librarySectionID')
|
||||
if librarySectionID:
|
||||
for item in items:
|
||||
@@ -421,6 +437,141 @@ class PlexPartialObject(PlexObject):
|
||||
'havnt allowed items to be deleted' % self.key)
|
||||
raise
|
||||
|
||||
def history(self, maxresults=9999999, mindate=None):
|
||||
""" Get Play History for a media item.
|
||||
Parameters:
|
||||
maxresults (int): Only return the specified number of results (optional).
|
||||
mindate (datetime): Min datetime to return results from.
|
||||
"""
|
||||
return self._server.history(maxresults=maxresults, mindate=mindate, ratingKey=self.ratingKey)
|
||||
|
||||
def posters(self):
|
||||
""" Returns list of available poster objects. :class:`~plexapi.media.Poster`. """
|
||||
|
||||
return self.fetchItems('%s/posters' % self.key)
|
||||
|
||||
def uploadPoster(self, url=None, filepath=None):
|
||||
""" Upload poster from url or filepath. :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video`. """
|
||||
if url:
|
||||
key = '%s/posters?url=%s' % (self.key, quote_plus(url))
|
||||
self._server.query(key, method=self._server._session.post)
|
||||
elif filepath:
|
||||
key = '%s/posters?' % self.key
|
||||
data = open(filepath, 'rb').read()
|
||||
self._server.query(key, method=self._server._session.post, data=data)
|
||||
|
||||
def setPoster(self, poster):
|
||||
""" Set . :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video` """
|
||||
poster.select()
|
||||
|
||||
def arts(self):
|
||||
""" Returns list of available art objects. :class:`~plexapi.media.Poster`. """
|
||||
|
||||
return self.fetchItems('%s/arts' % self.key)
|
||||
|
||||
def uploadArt(self, url=None, filepath=None):
|
||||
""" Upload art from url or filepath. :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video`. """
|
||||
if url:
|
||||
key = '/library/metadata/%s/arts?url=%s' % (self.ratingKey, quote_plus(url))
|
||||
self._server.query(key, method=self._server._session.post)
|
||||
elif filepath:
|
||||
key = '/library/metadata/%s/arts?' % self.ratingKey
|
||||
data = open(filepath, 'rb').read()
|
||||
self._server.query(key, method=self._server._session.post, data=data)
|
||||
|
||||
def setArt(self, art):
|
||||
""" Set :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video` """
|
||||
art.select()
|
||||
|
||||
def unmatch(self):
|
||||
""" Unmatches metadata match from object. """
|
||||
key = '/library/metadata/%s/unmatch' % self.ratingKey
|
||||
self._server.query(key, method=self._server._session.put)
|
||||
|
||||
def matches(self, agent=None, title=None, year=None, language=None):
|
||||
""" Return list of (:class:`~plexapi.media.SearchResult`) metadata matches.
|
||||
|
||||
Parameters:
|
||||
agent (str): Agent name to be used (imdb, thetvdb, themoviedb, etc.)
|
||||
title (str): Title of item to search for
|
||||
year (str): Year of item to search in
|
||||
language (str) : Language of item to search in
|
||||
|
||||
Examples:
|
||||
1. video.matches()
|
||||
2. video.matches(title="something", year=2020)
|
||||
3. video.matches(title="something")
|
||||
4. video.matches(year=2020)
|
||||
5. video.matches(title="something", year="")
|
||||
6. video.matches(title="", year=2020)
|
||||
7. video.matches(title="", year="")
|
||||
|
||||
1. The default behaviour in Plex Web = no params in plexapi
|
||||
2. Both title and year specified by user
|
||||
3. Year automatically filled in
|
||||
4. Title automatically filled in
|
||||
5. Explicitly searches for title with blank year
|
||||
6. Explicitly searches for blank title with year
|
||||
7. I don't know what the user is thinking... return the same result as 1
|
||||
|
||||
For 2 to 7, the agent and language is automatically filled in
|
||||
"""
|
||||
key = '/library/metadata/%s/matches' % self.ratingKey
|
||||
params = {'manual': 1}
|
||||
|
||||
if agent and not any([title, year, language]):
|
||||
params['language'] = self.section().language
|
||||
params['agent'] = utils.getAgentIdentifier(self.section(), agent)
|
||||
else:
|
||||
if any(x is not None for x in [agent, title, year, language]):
|
||||
if title is None:
|
||||
params['title'] = self.title
|
||||
else:
|
||||
params['title'] = title
|
||||
|
||||
if year is None:
|
||||
params['year'] = self.year
|
||||
else:
|
||||
params['year'] = year
|
||||
|
||||
params['language'] = language or self.section().language
|
||||
|
||||
if agent is None:
|
||||
params['agent'] = self.section().agent
|
||||
else:
|
||||
params['agent'] = utils.getAgentIdentifier(self.section(), agent)
|
||||
|
||||
key = key + '?' + urlencode(params)
|
||||
data = self._server.query(key, method=self._server._session.get)
|
||||
return self.findItems(data, initpath=key)
|
||||
|
||||
def fixMatch(self, searchResult=None, auto=False, agent=None):
|
||||
""" Use match result to update show metadata.
|
||||
|
||||
Parameters:
|
||||
auto (bool): True uses first match from matches
|
||||
False allows user to provide the match
|
||||
searchResult (:class:`~plexapi.media.SearchResult`): Search result from
|
||||
~plexapi.base.matches()
|
||||
agent (str): Agent name to be used (imdb, thetvdb, themoviedb, etc.)
|
||||
"""
|
||||
key = '/library/metadata/%s/match' % self.ratingKey
|
||||
if auto:
|
||||
autoMatch = self.matches(agent=agent)
|
||||
if autoMatch:
|
||||
searchResult = autoMatch[0]
|
||||
else:
|
||||
raise NotFound('No matches found using this agent: (%s:%s)' % (agent, autoMatch))
|
||||
elif not searchResult:
|
||||
raise NotFound('fixMatch() requires either auto=True or '
|
||||
'searchResult=:class:`~plexapi.media.SearchResult`.')
|
||||
|
||||
params = {'guid': searchResult.guid,
|
||||
'name': searchResult.name}
|
||||
|
||||
data = key + '?' + urlencode(params)
|
||||
self._server.query(data, method=self._server._session.put)
|
||||
|
||||
# The photo tag cant be built atm. TODO
|
||||
# def arts(self):
|
||||
# part = '%s/arts' % self.key
|
||||
@@ -509,6 +660,14 @@ class Playable(object):
|
||||
key = '%s/split' % self.key
|
||||
return self._server.query(key, method=self._server._session.put)
|
||||
|
||||
def merge(self, ratingKeys):
|
||||
"""Merge duplicate items."""
|
||||
if not isinstance(ratingKeys, list):
|
||||
ratingKeys = str(ratingKeys).split(",")
|
||||
|
||||
key = '%s/merge?ids=%s' % (self.key, ','.join(ratingKeys))
|
||||
return self._server.query(key, method=self._server._session.put)
|
||||
|
||||
def unmatch(self):
|
||||
"""Unmatch a media file."""
|
||||
key = '%s/unmatch' % self.key
|
||||
@@ -573,7 +732,7 @@ class Playable(object):
|
||||
time, state)
|
||||
self._server.query(key)
|
||||
self.reload()
|
||||
|
||||
|
||||
def updateTimeline(self, time, state='stopped', duration=None):
|
||||
""" Set the timeline progress for this video.
|
||||
|
||||
|
@@ -1,15 +1,13 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import time
|
||||
import requests
|
||||
|
||||
from requests.status_codes import _codes as codes
|
||||
from plexapi import BASE_HEADERS, CONFIG, TIMEOUT
|
||||
from plexapi import log, logfilter, utils
|
||||
import requests
|
||||
from plexapi import BASE_HEADERS, CONFIG, TIMEOUT, log, logfilter, utils
|
||||
from plexapi.base import PlexObject
|
||||
from plexapi.compat import ElementTree
|
||||
from plexapi.exceptions import BadRequest, Unsupported
|
||||
from plexapi.exceptions import BadRequest, NotFound, Unauthorized, Unsupported
|
||||
from plexapi.playqueue import PlayQueue
|
||||
|
||||
from requests.status_codes import _codes as codes
|
||||
|
||||
DEFAULT_MTYPE = 'video'
|
||||
|
||||
@@ -159,11 +157,16 @@ class PlexClient(PlexObject):
|
||||
log.debug('%s %s', method.__name__.upper(), url)
|
||||
headers = self._headers(**headers or {})
|
||||
response = method(url, headers=headers, timeout=timeout, **kwargs)
|
||||
if response.status_code not in (200, 201):
|
||||
if response.status_code not in (200, 201, 204):
|
||||
codename = codes.get(response.status_code)[0]
|
||||
errtext = response.text.replace('\n', ' ')
|
||||
log.warning('BadRequest (%s) %s %s; %s' % (response.status_code, codename, response.url, errtext))
|
||||
raise BadRequest('(%s) %s; %s %s' % (response.status_code, codename, response.url, errtext))
|
||||
message = '(%s) %s; %s %s' % (response.status_code, codename, response.url, errtext)
|
||||
if response.status_code == 401:
|
||||
raise Unauthorized(message)
|
||||
elif response.status_code == 404:
|
||||
raise NotFound(message)
|
||||
else:
|
||||
raise BadRequest(message)
|
||||
data = response.text.encode('utf8')
|
||||
return ElementTree.fromstring(data) if data.strip() else None
|
||||
|
||||
@@ -204,10 +207,13 @@ class PlexClient(PlexObject):
|
||||
return query(key, headers=headers)
|
||||
except ElementTree.ParseError:
|
||||
# Workaround for players which don't return valid XML on successful commands
|
||||
# - Plexamp: `b'OK'`
|
||||
# - Plexamp, Plex for Android: `b'OK'`
|
||||
# - Plex for Samsung: `b'<?xml version="1.0"?><Response code="200" status="OK">'`
|
||||
if self.product in (
|
||||
'Plexamp',
|
||||
'Plex for Android (TV)',
|
||||
'Plex for Android (Mobile)',
|
||||
'Plex for Samsung',
|
||||
):
|
||||
return
|
||||
raise
|
||||
@@ -300,6 +306,8 @@ class PlexClient(PlexObject):
|
||||
'address': server_url[1].strip('/'),
|
||||
'port': server_url[-1],
|
||||
'key': media.key,
|
||||
'protocol': server_url[0],
|
||||
'token': media._server.createToken()
|
||||
}, **params))
|
||||
|
||||
# -------------------
|
||||
@@ -465,6 +473,18 @@ class PlexClient(PlexObject):
|
||||
server_url = media._server._baseurl.split(':')
|
||||
server_port = server_url[-1].strip('/')
|
||||
|
||||
if hasattr(media, "playlistType"):
|
||||
mediatype = media.playlistType
|
||||
else:
|
||||
if isinstance(media, PlayQueue):
|
||||
mediatype = media.items[0].listType
|
||||
else:
|
||||
mediatype = media.listType
|
||||
|
||||
# mediatype must be in ["video", "music", "photo"]
|
||||
if mediatype == "audio":
|
||||
mediatype = "music"
|
||||
|
||||
if self.product != 'OpenPHT':
|
||||
try:
|
||||
self.sendCommand('timeline/subscribe', port=server_port, protocol='http')
|
||||
@@ -481,7 +501,8 @@ class PlexClient(PlexObject):
|
||||
'port': server_port,
|
||||
'offset': offset,
|
||||
'key': media.key,
|
||||
'token': media._server._token,
|
||||
'token': media._server.createToken(),
|
||||
'type': mediatype,
|
||||
'containerKey': '/playQueues/%s?window=100&own=1' % playqueue.playQueueID,
|
||||
}, **params))
|
||||
|
||||
@@ -527,9 +548,9 @@ class PlexClient(PlexObject):
|
||||
|
||||
# -------------------
|
||||
# Timeline Commands
|
||||
def timeline(self):
|
||||
def timeline(self, wait=1):
|
||||
""" Poll the current timeline and return the XML response. """
|
||||
return self.sendCommand('timeline/poll', wait=1)
|
||||
return self.sendCommand('timeline/poll', wait=wait)
|
||||
|
||||
def isPlayingMedia(self, includePaused=False):
|
||||
""" Returns True if any media is currently playing.
|
||||
@@ -538,7 +559,7 @@ class PlexClient(PlexObject):
|
||||
includePaused (bool): Set True to treat currently paused items
|
||||
as playing (optional; default True).
|
||||
"""
|
||||
for mediatype in self.timeline():
|
||||
for mediatype in self.timeline(wait=0):
|
||||
if mediatype.get('state') == 'playing':
|
||||
return True
|
||||
if includePaused and mediatype.get('state') == 'paused':
|
||||
|
@@ -25,9 +25,9 @@ except ImportError:
|
||||
from urllib import quote
|
||||
|
||||
try:
|
||||
from urllib.parse import quote_plus
|
||||
from urllib.parse import quote_plus, quote
|
||||
except ImportError:
|
||||
from urllib import quote_plus
|
||||
from urllib import quote_plus, quote
|
||||
|
||||
try:
|
||||
from urllib.parse import unquote
|
||||
@@ -44,11 +44,6 @@ try:
|
||||
except ImportError:
|
||||
from xml.etree import ElementTree
|
||||
|
||||
try:
|
||||
from unittest.mock import patch, MagicMock
|
||||
except ImportError:
|
||||
from mock import patch, MagicMock
|
||||
|
||||
|
||||
def makedirs(name, mode=0o777, exist_ok=False):
|
||||
""" Mimicks os.makedirs() from Python 3. """
|
||||
|
@@ -26,6 +26,6 @@ class Unsupported(PlexApiException):
|
||||
pass
|
||||
|
||||
|
||||
class Unauthorized(PlexApiException):
|
||||
""" Invalid username or password. """
|
||||
class Unauthorized(BadRequest):
|
||||
""" Invalid username/password or token. """
|
||||
pass
|
||||
|
148
lib/plexapi/gdm.py
Normal file
148
lib/plexapi/gdm.py
Normal file
@@ -0,0 +1,148 @@
|
||||
"""
|
||||
Support for discovery using GDM (Good Day Mate), multicast protocol by Plex.
|
||||
|
||||
# Licensed Apache 2.0
|
||||
# From https://github.com/home-assistant/netdisco/netdisco/gdm.py
|
||||
|
||||
Inspired by:
|
||||
hippojay's plexGDM: https://github.com/hippojay/script.plexbmc.helper/resources/lib/plexgdm.py
|
||||
iBaa's PlexConnect: https://github.com/iBaa/PlexConnect/PlexAPI.py
|
||||
"""
|
||||
import socket
|
||||
import struct
|
||||
|
||||
|
||||
class GDM:
|
||||
"""Base class to discover GDM services."""
|
||||
|
||||
def __init__(self):
|
||||
self.entries = []
|
||||
self.last_scan = None
|
||||
|
||||
def scan(self, scan_for_clients=False):
|
||||
"""Scan the network."""
|
||||
self.update(scan_for_clients)
|
||||
|
||||
def all(self):
|
||||
"""Return all found entries.
|
||||
|
||||
Will scan for entries if not scanned recently.
|
||||
"""
|
||||
self.scan()
|
||||
return list(self.entries)
|
||||
|
||||
def find_by_content_type(self, value):
|
||||
"""Return a list of entries that match the content_type."""
|
||||
self.scan()
|
||||
return [entry for entry in self.entries
|
||||
if value in entry['data']['Content_Type']]
|
||||
|
||||
def find_by_data(self, values):
|
||||
"""Return a list of entries that match the search parameters."""
|
||||
self.scan()
|
||||
return [entry for entry in self.entries
|
||||
if all(item in entry['data'].items()
|
||||
for item in values.items())]
|
||||
|
||||
def update(self, scan_for_clients):
|
||||
"""Scan for new GDM services.
|
||||
|
||||
Examples of the dict list assigned to self.entries by this function:
|
||||
|
||||
Server:
|
||||
|
||||
[{'data': {
|
||||
'Content-Type': 'plex/media-server',
|
||||
'Host': '53f4b5b6023d41182fe88a99b0e714ba.plex.direct',
|
||||
'Name': 'myfirstplexserver',
|
||||
'Port': '32400',
|
||||
'Resource-Identifier': '646ab0aa8a01c543e94ba975f6fd6efadc36b7',
|
||||
'Updated-At': '1585769946',
|
||||
'Version': '1.18.8.2527-740d4c206',
|
||||
},
|
||||
'from': ('10.10.10.100', 32414)}]
|
||||
|
||||
Clients:
|
||||
|
||||
[{'data': {'Content-Type': 'plex/media-player',
|
||||
'Device-Class': 'stb',
|
||||
'Name': 'plexamp',
|
||||
'Port': '36000',
|
||||
'Product': 'Plexamp',
|
||||
'Protocol': 'plex',
|
||||
'Protocol-Capabilities': 'timeline,playback,playqueues,playqueues-creation',
|
||||
'Protocol-Version': '1',
|
||||
'Resource-Identifier': 'b6e57a3f-e0f8-494f-8884-f4b58501467e',
|
||||
'Version': '1.1.0',
|
||||
},
|
||||
'from': ('10.10.10.101', 32412)}]
|
||||
"""
|
||||
|
||||
gdm_msg = 'M-SEARCH * HTTP/1.0'.encode('ascii')
|
||||
gdm_timeout = 1
|
||||
|
||||
self.entries = []
|
||||
known_responses = []
|
||||
|
||||
# setup socket for discovery -> multicast message
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
sock.settimeout(gdm_timeout)
|
||||
|
||||
# Set the time-to-live for messages for local network
|
||||
sock.setsockopt(socket.IPPROTO_IP,
|
||||
socket.IP_MULTICAST_TTL,
|
||||
struct.pack("B", gdm_timeout))
|
||||
|
||||
if scan_for_clients:
|
||||
# setup socket for broadcast to Plex clients
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
|
||||
gdm_ip = '255.255.255.255'
|
||||
gdm_port = 32412
|
||||
else:
|
||||
# setup socket for multicast to Plex server(s)
|
||||
gdm_ip = '239.0.0.250'
|
||||
gdm_port = 32414
|
||||
|
||||
try:
|
||||
# Send data to the multicast group
|
||||
sock.sendto(gdm_msg, (gdm_ip, gdm_port))
|
||||
|
||||
# Look for responses from all recipients
|
||||
while True:
|
||||
try:
|
||||
bdata, host = sock.recvfrom(1024)
|
||||
data = bdata.decode('utf-8')
|
||||
if '200 OK' in data.splitlines()[0]:
|
||||
ddata = {k: v.strip() for (k, v) in (
|
||||
line.split(':') for line in
|
||||
data.splitlines() if ':' in line)}
|
||||
identifier = ddata.get('Resource-Identifier')
|
||||
if identifier and identifier in known_responses:
|
||||
continue
|
||||
known_responses.append(identifier)
|
||||
self.entries.append({'data': ddata,
|
||||
'from': host})
|
||||
except socket.timeout:
|
||||
break
|
||||
finally:
|
||||
sock.close()
|
||||
|
||||
|
||||
def main():
|
||||
"""Test GDM discovery."""
|
||||
from pprint import pprint
|
||||
|
||||
gdm = GDM()
|
||||
|
||||
pprint("Scanning GDM for servers...")
|
||||
gdm.scan()
|
||||
pprint(gdm.entries)
|
||||
|
||||
pprint("Scanning GDM for clients...")
|
||||
gdm.scan(scan_for_clients=True)
|
||||
pprint(gdm.entries)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -1,9 +1,10 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from plexapi import X_PLEX_CONTAINER_SIZE, log, utils
|
||||
from plexapi.base import PlexObject
|
||||
from plexapi.compat import unquote, urlencode, quote_plus
|
||||
from plexapi.media import MediaTag
|
||||
from plexapi.compat import quote, quote_plus, unquote, urlencode
|
||||
from plexapi.exceptions import BadRequest, NotFound
|
||||
from plexapi.media import MediaTag
|
||||
from plexapi.settings import Setting
|
||||
|
||||
|
||||
class Library(PlexObject):
|
||||
@@ -294,6 +295,17 @@ class Library(PlexObject):
|
||||
part += urlencode(kwargs)
|
||||
return self._server.query(part, method=self._server._session.post)
|
||||
|
||||
def history(self, maxresults=9999999, mindate=None):
|
||||
""" Get Play History for all library Sections for the owner.
|
||||
Parameters:
|
||||
maxresults (int): Only return the specified number of results (optional).
|
||||
mindate (datetime): Min datetime to return results from.
|
||||
"""
|
||||
hist = []
|
||||
for section in self.sections():
|
||||
hist.extend(section.history(maxresults=maxresults, mindate=mindate))
|
||||
return hist
|
||||
|
||||
|
||||
class LibrarySection(PlexObject):
|
||||
""" Base class for a single library section.
|
||||
@@ -320,6 +332,8 @@ class LibrarySection(PlexObject):
|
||||
type (str): Type of content section represents (movie, artist, photo, show).
|
||||
updatedAt (datetime): Datetime this library section was last updated.
|
||||
uuid (str): Unique id for this section (32258d7c-3e6c-4ac5-98ad-bad7a3b78c63)
|
||||
totalSize (int): Total number of item in the library
|
||||
|
||||
"""
|
||||
ALLOWED_FILTERS = ()
|
||||
ALLOWED_SORT = ()
|
||||
@@ -343,6 +357,51 @@ class LibrarySection(PlexObject):
|
||||
self.type = data.attrib.get('type')
|
||||
self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt'))
|
||||
self.uuid = data.attrib.get('uuid')
|
||||
# Private attrs as we dont want a reload.
|
||||
self._total_size = None
|
||||
|
||||
def fetchItems(self, ekey, cls=None, container_start=None, container_size=None, **kwargs):
|
||||
""" Load the specified key to find and build all items with the specified tag
|
||||
and attrs. See :func:`~plexapi.base.PlexObject.fetchItem` for more details
|
||||
on how this is used.
|
||||
|
||||
Parameters:
|
||||
container_start (None, int): offset to get a subset of the data
|
||||
container_size (None, int): How many items in data
|
||||
|
||||
"""
|
||||
url_kw = {}
|
||||
if container_start is not None:
|
||||
url_kw["X-Plex-Container-Start"] = container_start
|
||||
if container_size is not None:
|
||||
url_kw["X-Plex-Container-Size"] = container_size
|
||||
|
||||
if ekey is None:
|
||||
raise BadRequest('ekey was not provided')
|
||||
data = self._server.query(ekey, params=url_kw)
|
||||
|
||||
if '/all' in ekey:
|
||||
# totalSize is only included in the xml response
|
||||
# if container size is used.
|
||||
total_size = data.attrib.get("totalSize") or data.attrib.get("size")
|
||||
self._total_size = utils.cast(int, total_size)
|
||||
|
||||
items = self.findItems(data, cls, ekey, **kwargs)
|
||||
|
||||
librarySectionID = data.attrib.get('librarySectionID')
|
||||
if librarySectionID:
|
||||
for item in items:
|
||||
item.librarySectionID = librarySectionID
|
||||
return items
|
||||
|
||||
@property
|
||||
def totalSize(self):
|
||||
if self._total_size is None:
|
||||
part = '/library/sections/%s/all?X-Plex-Container-Start=0&X-Plex-Container-Size=1' % self.key
|
||||
data = self._server.query(part)
|
||||
self._total_size = int(data.attrib.get("totalSize"))
|
||||
|
||||
return self._total_size
|
||||
|
||||
def delete(self):
|
||||
""" Delete a library section. """
|
||||
@@ -354,13 +413,18 @@ class LibrarySection(PlexObject):
|
||||
log.error(msg)
|
||||
raise
|
||||
|
||||
def edit(self, **kwargs):
|
||||
def reload(self, key=None):
|
||||
return self._server.library.section(self.title)
|
||||
|
||||
def edit(self, agent=None, **kwargs):
|
||||
""" Edit a library (Note: agent is required). See :class:`~plexapi.library.Library` for example usage.
|
||||
|
||||
Parameters:
|
||||
kwargs (dict): Dict of settings to edit.
|
||||
"""
|
||||
part = '/library/sections/%s?%s' % (self.key, urlencode(kwargs))
|
||||
if not agent:
|
||||
agent = self.agent
|
||||
part = '/library/sections/%s?agent=%s&%s' % (self.key, agent, urlencode(kwargs))
|
||||
self._server.query(part, method=self._server._session.put)
|
||||
|
||||
# Reload this way since the self.key dont have a full path, but is simply a id.
|
||||
@@ -374,7 +438,7 @@ class LibrarySection(PlexObject):
|
||||
Parameters:
|
||||
title (str): Title of the item to return.
|
||||
"""
|
||||
key = '/library/sections/%s/all' % self.key
|
||||
key = '/library/sections/%s/all?title=%s' % (self.key, quote(title, safe=''))
|
||||
return self.fetchItem(key, title__iexact=title)
|
||||
|
||||
def all(self, sort=None, **kwargs):
|
||||
@@ -390,6 +454,17 @@ class LibrarySection(PlexObject):
|
||||
key = '/library/sections/%s/all%s' % (self.key, sortStr)
|
||||
return self.fetchItems(key, **kwargs)
|
||||
|
||||
def agents(self):
|
||||
""" Returns a list of available `:class:`~plexapi.media.Agent` for this library section.
|
||||
"""
|
||||
return self._server.agents(utils.searchType(self.type))
|
||||
|
||||
def settings(self):
|
||||
""" Returns a list of all library settings. """
|
||||
key = '/library/sections/%s/prefs' % self.key
|
||||
data = self._server.query(key)
|
||||
return self.findItems(data, cls=Setting)
|
||||
|
||||
def onDeck(self):
|
||||
""" Returns a list of media items on deck from this library section. """
|
||||
key = '/library/sections/%s/onDeck' % self.key
|
||||
@@ -464,9 +539,9 @@ class LibrarySection(PlexObject):
|
||||
key = '/library/sections/%s/%s%s' % (self.key, category, utils.joinArgs(args))
|
||||
return self.fetchItems(key, cls=FilterChoice)
|
||||
|
||||
def search(self, title=None, sort=None, maxresults=999999, libtype=None, **kwargs):
|
||||
""" Search the library. If there are many results, they will be fetched from the server
|
||||
in batches of X_PLEX_CONTAINER_SIZE amounts. If you're only looking for the first <num>
|
||||
def search(self, title=None, sort=None, maxresults=None,
|
||||
libtype=None, container_start=0, container_size=X_PLEX_CONTAINER_SIZE, **kwargs):
|
||||
""" Search the library. The http requests will be batched in container_size. If you're only looking for the first <num>
|
||||
results, it would be wise to set the maxresults option to that amount so this functions
|
||||
doesn't iterate over all results on the server.
|
||||
|
||||
@@ -477,6 +552,8 @@ class LibrarySection(PlexObject):
|
||||
maxresults (int): Only return the specified number of results (optional).
|
||||
libtype (str): Filter results to a spcifiec libtype (movie, show, episode, artist,
|
||||
album, track; optional).
|
||||
container_start (int): default 0
|
||||
container_size (int): default X_PLEX_CONTAINER_SIZE in your config file.
|
||||
**kwargs (dict): Any of the available filters for the current library section. Partial string
|
||||
matches allowed. Multiple matches OR together. Negative filtering also possible, just add an
|
||||
exclamation mark to the end of filter name, e.g. `resolution!=1x1`.
|
||||
@@ -508,15 +585,37 @@ class LibrarySection(PlexObject):
|
||||
args['sort'] = self._cleanSearchSort(sort)
|
||||
if libtype is not None:
|
||||
args['type'] = utils.searchType(libtype)
|
||||
# iterate over the results
|
||||
results, subresults = [], '_init'
|
||||
args['X-Plex-Container-Start'] = 0
|
||||
args['X-Plex-Container-Size'] = min(X_PLEX_CONTAINER_SIZE, maxresults)
|
||||
while subresults and maxresults > len(results):
|
||||
|
||||
results = []
|
||||
subresults = []
|
||||
offset = container_start
|
||||
|
||||
if maxresults is not None:
|
||||
container_size = min(container_size, maxresults)
|
||||
while True:
|
||||
key = '/library/sections/%s/all%s' % (self.key, utils.joinArgs(args))
|
||||
subresults = self.fetchItems(key)
|
||||
results += subresults[:maxresults - len(results)]
|
||||
args['X-Plex-Container-Start'] += args['X-Plex-Container-Size']
|
||||
subresults = self.fetchItems(key, container_start=container_start,
|
||||
container_size=container_size)
|
||||
if not len(subresults):
|
||||
if offset > self.totalSize:
|
||||
log.info("container_start is higher then the number of items in the library")
|
||||
break
|
||||
|
||||
results.extend(subresults)
|
||||
|
||||
# self.totalSize is not used as a condition in the while loop as
|
||||
# this require a additional http request.
|
||||
# self.totalSize is updated from .fetchItems
|
||||
wanted_number_of_items = self.totalSize - offset
|
||||
if maxresults is not None:
|
||||
wanted_number_of_items = min(maxresults, wanted_number_of_items)
|
||||
container_size = min(container_size, maxresults - len(results))
|
||||
|
||||
if wanted_number_of_items <= len(results):
|
||||
break
|
||||
|
||||
container_start += container_size
|
||||
|
||||
return results
|
||||
|
||||
def _cleanSearchFilter(self, category, value, libtype=None):
|
||||
@@ -543,7 +642,7 @@ class LibrarySection(PlexObject):
|
||||
matches = [k for t, k in lookup.items() if item in t]
|
||||
if matches: map(result.add, matches); continue
|
||||
# nothing matched; use raw item value
|
||||
log.warning('Filter value not listed, using raw item value: %s' % item)
|
||||
log.debug('Filter value not listed, using raw item value: %s' % item)
|
||||
result.add(item)
|
||||
return ','.join(result)
|
||||
|
||||
@@ -633,6 +732,14 @@ class LibrarySection(PlexObject):
|
||||
|
||||
return myplex.sync(client=client, clientId=clientId, sync_item=sync_item)
|
||||
|
||||
def history(self, maxresults=9999999, mindate=None):
|
||||
""" Get Play History for this library Section for the owner.
|
||||
Parameters:
|
||||
maxresults (int): Only return the specified number of results (optional).
|
||||
mindate (datetime): Min datetime to return results from.
|
||||
"""
|
||||
return self._server.history(maxresults=maxresults, mindate=mindate, librarySectionID=self.key, accountID=1)
|
||||
|
||||
|
||||
class MovieSection(LibrarySection):
|
||||
""" Represents a :class:`~plexapi.library.LibrarySection` section containing movies.
|
||||
@@ -869,7 +976,7 @@ class PhotoSection(LibrarySection):
|
||||
TYPE (str): 'photo'
|
||||
"""
|
||||
ALLOWED_FILTERS = ('all', 'iso', 'make', 'lens', 'aperture', 'exposure', 'device', 'resolution', 'place',
|
||||
'originallyAvailableAt', 'addedAt', 'title', 'userRating')
|
||||
'originallyAvailableAt', 'addedAt', 'title', 'userRating', 'tag', 'year')
|
||||
ALLOWED_SORT = ('addedAt',)
|
||||
TAG = 'Directory'
|
||||
TYPE = 'photo'
|
||||
@@ -968,6 +1075,7 @@ class Hub(PlexObject):
|
||||
self.size = utils.cast(int, data.attrib.get('size'))
|
||||
self.title = data.attrib.get('title')
|
||||
self.type = data.attrib.get('type')
|
||||
self.key = data.attrib.get('key')
|
||||
self.items = self.findItems(data)
|
||||
|
||||
def __len__(self):
|
||||
@@ -979,9 +1087,11 @@ class Collections(PlexObject):
|
||||
|
||||
TAG = 'Directory'
|
||||
TYPE = 'collection'
|
||||
_include = "?includeExternalMedia=1&includePreferences=1"
|
||||
|
||||
def _loadData(self, data):
|
||||
self.ratingKey = utils.cast(int, data.attrib.get('ratingKey'))
|
||||
self._details_key = "/library/metadata/%s%s" % (self.ratingKey, self._include)
|
||||
self.key = data.attrib.get('key')
|
||||
self.type = data.attrib.get('type')
|
||||
self.title = data.attrib.get('title')
|
||||
@@ -1051,5 +1161,43 @@ class Collections(PlexObject):
|
||||
part = '/library/metadata/%s/prefs?collectionSort=%s' % (self.ratingKey, key)
|
||||
return self._server.query(part, method=self._server._session.put)
|
||||
|
||||
def posters(self):
|
||||
""" Returns list of available poster objects. :class:`~plexapi.media.Poster`. """
|
||||
|
||||
return self.fetchItems('/library/metadata/%s/posters' % self.ratingKey)
|
||||
|
||||
def uploadPoster(self, url=None, filepath=None):
|
||||
""" Upload poster from url or filepath. :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video`. """
|
||||
if url:
|
||||
key = '/library/metadata/%s/posters?url=%s' % (self.ratingKey, quote_plus(url))
|
||||
self._server.query(key, method=self._server._session.post)
|
||||
elif filepath:
|
||||
key = '/library/metadata/%s/posters?' % self.ratingKey
|
||||
data = open(filepath, 'rb').read()
|
||||
self._server.query(key, method=self._server._session.post, data=data)
|
||||
|
||||
def setPoster(self, poster):
|
||||
""" Set . :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video` """
|
||||
poster.select()
|
||||
|
||||
def arts(self):
|
||||
""" Returns list of available art objects. :class:`~plexapi.media.Poster`. """
|
||||
|
||||
return self.fetchItems('/library/metadata/%s/arts' % self.ratingKey)
|
||||
|
||||
def uploadArt(self, url=None, filepath=None):
|
||||
""" Upload art from url or filepath. :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video`. """
|
||||
if url:
|
||||
key = '/library/metadata/%s/arts?url=%s' % (self.ratingKey, quote_plus(url))
|
||||
self._server.query(key, method=self._server._session.post)
|
||||
elif filepath:
|
||||
key = '/library/metadata/%s/arts?' % self.ratingKey
|
||||
data = open(filepath, 'rb').read()
|
||||
self._server.query(key, method=self._server._session.post, data=data)
|
||||
|
||||
def setArt(self, art):
|
||||
""" Set :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video` """
|
||||
art.select()
|
||||
|
||||
# def edit(self, **kwargs):
|
||||
# TODO
|
||||
|
@@ -1,5 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from plexapi import log, utils
|
||||
|
||||
import xml
|
||||
|
||||
from plexapi import compat, log, settings, utils
|
||||
from plexapi.base import PlexObject
|
||||
from plexapi.exceptions import BadRequest
|
||||
from plexapi.utils import cast
|
||||
@@ -143,7 +146,7 @@ class MediaPart(PlexObject):
|
||||
|
||||
def setDefaultSubtitleStream(self, stream):
|
||||
""" Set the default :class:`~plexapi.media.SubtitleStream` for this MediaPart.
|
||||
|
||||
|
||||
Parameters:
|
||||
stream (:class:`~plexapi.media.SubtitleStream`): SubtitleStream to set as default.
|
||||
"""
|
||||
@@ -349,6 +352,118 @@ class TranscodeSession(PlexObject):
|
||||
self.width = cast(int, data.attrib.get('width'))
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class TranscodeJob(PlexObject):
|
||||
""" Represents an Optimizing job.
|
||||
TrancodeJobs are the process for optimizing conversions.
|
||||
Active or paused optimization items. Usually one item as a time"""
|
||||
TAG = 'TranscodeJob'
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.generatorID = data.attrib.get('generatorID')
|
||||
self.key = data.attrib.get('key')
|
||||
self.progress = data.attrib.get('progress')
|
||||
self.ratingKey = data.attrib.get('ratingKey')
|
||||
self.size = data.attrib.get('size')
|
||||
self.targetTagID = data.attrib.get('targetTagID')
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
self.title = data.attrib.get('title')
|
||||
self.type = data.attrib.get('type')
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Optimized(PlexObject):
|
||||
""" Represents a Optimized item.
|
||||
Optimized items are optimized and queued conversions items."""
|
||||
TAG = 'Item'
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.id = data.attrib.get('id')
|
||||
self.composite = data.attrib.get('composite')
|
||||
self.title = data.attrib.get('title')
|
||||
self.type = data.attrib.get('type')
|
||||
self.target = data.attrib.get('target')
|
||||
self.targetTagID = data.attrib.get('targetTagID')
|
||||
|
||||
def remove(self):
|
||||
""" Remove an Optimized item"""
|
||||
key = '%s/%s' % (self._initpath, self.id)
|
||||
self._server.query(key, method=self._server._session.delete)
|
||||
|
||||
def rename(self, title):
|
||||
""" Rename an Optimized item"""
|
||||
key = '%s/%s?Item[title]=%s' % (self._initpath, self.id, title)
|
||||
self._server.query(key, method=self._server._session.put)
|
||||
|
||||
def reprocess(self, ratingKey):
|
||||
""" Reprocess a removed Conversion item that is still a listed Optimize item"""
|
||||
key = '%s/%s/%s/enable' % (self._initpath, self.id, ratingKey)
|
||||
self._server.query(key, method=self._server._session.put)
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Conversion(PlexObject):
|
||||
""" Represents a Conversion item.
|
||||
Conversions are items queued for optimization or being actively optimized."""
|
||||
TAG = 'Video'
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.addedAt = data.attrib.get('addedAt')
|
||||
self.art = data.attrib.get('art')
|
||||
self.chapterSource = data.attrib.get('chapterSource')
|
||||
self.contentRating = data.attrib.get('contentRating')
|
||||
self.duration = data.attrib.get('duration')
|
||||
self.generatorID = data.attrib.get('generatorID')
|
||||
self.generatorType = data.attrib.get('generatorType')
|
||||
self.guid = data.attrib.get('guid')
|
||||
self.key = data.attrib.get('key')
|
||||
self.lastViewedAt = data.attrib.get('lastViewedAt')
|
||||
self.librarySectionID = data.attrib.get('librarySectionID')
|
||||
self.librarySectionKey = data.attrib.get('librarySectionKey')
|
||||
self.librarySectionTitle = data.attrib.get('librarySectionTitle')
|
||||
self.originallyAvailableAt = data.attrib.get('originallyAvailableAt')
|
||||
self.playQueueItemID = data.attrib.get('playQueueItemID')
|
||||
self.playlistID = data.attrib.get('playlistID')
|
||||
self.primaryExtraKey = data.attrib.get('primaryExtraKey')
|
||||
self.rating = data.attrib.get('rating')
|
||||
self.ratingKey = data.attrib.get('ratingKey')
|
||||
self.studio = data.attrib.get('studio')
|
||||
self.summary = data.attrib.get('summary')
|
||||
self.tagline = data.attrib.get('tagline')
|
||||
self.target = data.attrib.get('target')
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
self.title = data.attrib.get('title')
|
||||
self.type = data.attrib.get('type')
|
||||
self.updatedAt = data.attrib.get('updatedAt')
|
||||
self.userID = data.attrib.get('userID')
|
||||
self.username = data.attrib.get('username')
|
||||
self.viewOffset = data.attrib.get('viewOffset')
|
||||
self.year = data.attrib.get('year')
|
||||
|
||||
def remove(self):
|
||||
""" Remove Conversion from queue """
|
||||
key = '/playlists/%s/items/%s/%s/disable' % (self.playlistID, self.generatorID, self.ratingKey)
|
||||
self._server.query(key, method=self._server._session.put)
|
||||
|
||||
def move(self, after):
|
||||
""" Move Conversion items position in queue
|
||||
after (int): Place item after specified playQueueItemID. '-1' is the active conversion.
|
||||
|
||||
Example:
|
||||
Move 5th conversion Item to active conversion
|
||||
conversions[4].move('-1')
|
||||
|
||||
Move 4th conversion Item to 3rd in conversion queue
|
||||
conversions[3].move(conversions[1].playQueueItemID)
|
||||
"""
|
||||
|
||||
key = '%s/items/%s/move?after=%s' % (self._initpath, self.playQueueItemID, after)
|
||||
self._server.query(key, method=self._server._session.put)
|
||||
|
||||
|
||||
class MediaTag(PlexObject):
|
||||
""" Base class for media tags used for filtering and searching your library
|
||||
items or navigating the metadata of media items in your library. Tags are
|
||||
@@ -419,6 +534,25 @@ class Label(MediaTag):
|
||||
FILTER = 'label'
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Tag(MediaTag):
|
||||
""" Represents a single tag media tag.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'tag'
|
||||
FILTER (str): 'tag'
|
||||
"""
|
||||
TAG = 'Tag'
|
||||
FILTER = 'tag'
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.id = cast(int, data.attrib.get('id', 0))
|
||||
self.filter = data.attrib.get('filter')
|
||||
self.tag = data.attrib.get('tag')
|
||||
self.title = self.tag
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Country(MediaTag):
|
||||
""" Represents a single Country media tag.
|
||||
@@ -483,6 +617,14 @@ class Poster(PlexObject):
|
||||
self.selected = data.attrib.get('selected')
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
|
||||
def select(self):
|
||||
key = self._initpath[:-1]
|
||||
data = '%s?url=%s' % (key, compat.quote_plus(self.ratingKey))
|
||||
try:
|
||||
self._server.query(data, method=self._server._session.put)
|
||||
except xml.etree.ElementTree.ParseError:
|
||||
pass
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Producer(MediaTag):
|
||||
@@ -565,3 +707,74 @@ class Field(PlexObject):
|
||||
self._data = data
|
||||
self.name = data.attrib.get('name')
|
||||
self.locked = cast(bool, data.attrib.get('locked'))
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class SearchResult(PlexObject):
|
||||
""" Represents a single SearchResult.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'SearchResult'
|
||||
"""
|
||||
TAG = 'SearchResult'
|
||||
|
||||
def __repr__(self):
|
||||
name = self._clean(self.firstAttr('name'))
|
||||
score = self._clean(self.firstAttr('score'))
|
||||
return '<%s>' % ':'.join([p for p in [self.__class__.__name__, name, score] if p])
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.guid = data.attrib.get('guid')
|
||||
self.lifespanEnded = data.attrib.get('lifespanEnded')
|
||||
self.name = data.attrib.get('name')
|
||||
self.score = cast(int, data.attrib.get('score'))
|
||||
self.year = data.attrib.get('year')
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Agent(PlexObject):
|
||||
""" Represents a single Agent.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Agent'
|
||||
"""
|
||||
TAG = 'Agent'
|
||||
|
||||
def __repr__(self):
|
||||
uid = self._clean(self.firstAttr('shortIdentifier'))
|
||||
return '<%s>' % ':'.join([p for p in [self.__class__.__name__, uid] if p])
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.hasAttribution = data.attrib.get('hasAttribution')
|
||||
self.hasPrefs = data.attrib.get('hasPrefs')
|
||||
self.identifier = data.attrib.get('identifier')
|
||||
self.primary = data.attrib.get('primary')
|
||||
self.shortIdentifier = self.identifier.rsplit('.', 1)[1]
|
||||
if 'mediaType' in self._initpath:
|
||||
self.name = data.attrib.get('name')
|
||||
self.languageCode = []
|
||||
for code in data:
|
||||
self.languageCode += [code.attrib.get('code')]
|
||||
else:
|
||||
self.mediaTypes = [AgentMediaType(server=self._server, data=d) for d in data]
|
||||
|
||||
def _settings(self):
|
||||
key = '/:/plugins/%s/prefs' % self.identifier
|
||||
data = self._server.query(key)
|
||||
return self.findItems(data, cls=settings.Setting)
|
||||
|
||||
|
||||
class AgentMediaType(Agent):
|
||||
|
||||
def __repr__(self):
|
||||
uid = self._clean(self.firstAttr('name'))
|
||||
return '<%s>' % ':'.join([p for p in [self.__class__.__name__, uid] if p])
|
||||
|
||||
def _loadData(self, data):
|
||||
self.mediaType = cast(int, data.attrib.get('mediaType'))
|
||||
self.name = data.attrib.get('name')
|
||||
self.languageCode = []
|
||||
for code in data:
|
||||
self.languageCode += [code.attrib.get('code')]
|
||||
|
@@ -1,18 +1,21 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import copy
|
||||
import requests
|
||||
import threading
|
||||
import time
|
||||
from requests.status_codes import _codes as codes
|
||||
from plexapi import BASE_HEADERS, CONFIG, TIMEOUT, X_PLEX_IDENTIFIER, X_PLEX_ENABLE_FAST_CONNECT
|
||||
from plexapi import log, logfilter, utils
|
||||
|
||||
import requests
|
||||
from plexapi import (BASE_HEADERS, CONFIG, TIMEOUT, X_PLEX_ENABLE_FAST_CONNECT,
|
||||
X_PLEX_IDENTIFIER, log, logfilter, utils)
|
||||
from plexapi.base import PlexObject
|
||||
from plexapi.exceptions import BadRequest, NotFound
|
||||
from plexapi.exceptions import BadRequest, NotFound, Unauthorized
|
||||
from plexapi.client import PlexClient
|
||||
from plexapi.compat import ElementTree
|
||||
from plexapi.library import LibrarySection
|
||||
from plexapi.server import PlexServer
|
||||
from plexapi.sync import SyncList, SyncItem
|
||||
from plexapi.sonos import PlexSonosClient
|
||||
from plexapi.sync import SyncItem, SyncList
|
||||
from plexapi.utils import joinArgs
|
||||
from requests.status_codes import _codes as codes
|
||||
|
||||
|
||||
class MyPlexAccount(PlexObject):
|
||||
@@ -73,6 +76,12 @@ class MyPlexAccount(PlexObject):
|
||||
REQUESTS = 'https://plex.tv/api/invites/requests' # get
|
||||
SIGNIN = 'https://plex.tv/users/sign_in.xml' # get with auth
|
||||
WEBHOOKS = 'https://plex.tv/api/v2/user/webhooks' # get, post with data
|
||||
# Hub sections
|
||||
VOD = 'https://vod.provider.plex.tv/' # get
|
||||
WEBSHOWS = 'https://webshows.provider.plex.tv/' # get
|
||||
NEWS = 'https://news.provider.plex.tv/' # get
|
||||
PODCASTS = 'https://podcasts.provider.plex.tv/' # get
|
||||
MUSIC = 'https://music.provider.plex.tv/' # get
|
||||
# Key may someday switch to the following url. For now the current value works.
|
||||
# https://plex.tv/api/v2/user?X-Plex-Token={token}&X-Plex-Client-Identifier={clientId}
|
||||
key = 'https://plex.tv/users/account'
|
||||
@@ -80,6 +89,8 @@ class MyPlexAccount(PlexObject):
|
||||
def __init__(self, username=None, password=None, token=None, session=None, timeout=None):
|
||||
self._token = token
|
||||
self._session = session or requests.Session()
|
||||
self._sonos_cache = []
|
||||
self._sonos_cache_timestamp = 0
|
||||
data, initpath = self._signin(username, password, timeout)
|
||||
super(MyPlexAccount, self).__init__(self, data, initpath)
|
||||
|
||||
@@ -175,7 +186,13 @@ class MyPlexAccount(PlexObject):
|
||||
if response.status_code not in (200, 201, 204): # pragma: no cover
|
||||
codename = codes.get(response.status_code)[0]
|
||||
errtext = response.text.replace('\n', ' ')
|
||||
raise BadRequest('(%s) %s %s; %s' % (response.status_code, codename, response.url, errtext))
|
||||
message = '(%s) %s; %s %s' % (response.status_code, codename, response.url, errtext)
|
||||
if response.status_code == 401:
|
||||
raise Unauthorized(message)
|
||||
elif response.status_code == 404:
|
||||
raise NotFound(message)
|
||||
else:
|
||||
raise BadRequest(message)
|
||||
data = response.text.encode('utf8')
|
||||
return ElementTree.fromstring(data) if data.strip() else None
|
||||
|
||||
@@ -195,6 +212,24 @@ class MyPlexAccount(PlexObject):
|
||||
data = self.query(MyPlexResource.key)
|
||||
return [MyPlexResource(self, elem) for elem in data]
|
||||
|
||||
def sonos_speakers(self):
|
||||
if 'companions_sonos' not in self.subscriptionFeatures:
|
||||
return []
|
||||
|
||||
t = time.time()
|
||||
if t - self._sonos_cache_timestamp > 60:
|
||||
self._sonos_cache_timestamp = t
|
||||
data = self.query('https://sonos.plex.tv/resources')
|
||||
self._sonos_cache = [PlexSonosClient(self, elem) for elem in data]
|
||||
|
||||
return self._sonos_cache
|
||||
|
||||
def sonos_speaker(self, name):
|
||||
return [x for x in self.sonos_speakers() if x.title == name][0]
|
||||
|
||||
def sonos_speaker_by_id(self, identifier):
|
||||
return [x for x in self.sonos_speakers() if x.machineIdentifier == identifier][0]
|
||||
|
||||
def inviteFriend(self, user, server, sections=None, allowSync=False, allowCameraUpload=False,
|
||||
allowChannels=False, filterMovies=None, filterTelevision=None, filterMusic=None):
|
||||
""" Share library content with the specified user.
|
||||
@@ -384,8 +419,8 @@ class MyPlexAccount(PlexObject):
|
||||
params = {'server_id': machineId, 'shared_server': {'library_section_ids': sectionIds}}
|
||||
url = self.FRIENDSERVERS.format(machineId=machineId, serverId=serverId)
|
||||
else:
|
||||
params = {'server_id': machineId, 'shared_server': {'library_section_ids': sectionIds,
|
||||
'invited_id': user.id}}
|
||||
params = {'server_id': machineId,
|
||||
'shared_server': {'library_section_ids': sectionIds, 'invited_id': user.id}}
|
||||
url = self.FRIENDINVITE.format(machineId=machineId)
|
||||
# Remove share sections, add shares to user without shares, or update shares
|
||||
if not user_servers or sectionIds:
|
||||
@@ -429,7 +464,7 @@ class MyPlexAccount(PlexObject):
|
||||
return user
|
||||
|
||||
elif (user.username and user.email and user.id and username.lower() in
|
||||
(user.username.lower(), user.email.lower(), str(user.id))):
|
||||
(user.username.lower(), user.email.lower(), str(user.id))):
|
||||
return user
|
||||
|
||||
raise NotFound('Unable to find user %s' % username)
|
||||
@@ -600,6 +635,54 @@ class MyPlexAccount(PlexObject):
|
||||
raise BadRequest('(%s) %s %s; %s' % (response.status_code, codename, response.url, errtext))
|
||||
return response.json()['token']
|
||||
|
||||
def history(self, maxresults=9999999, mindate=None):
|
||||
""" Get Play History for all library sections on all servers for the owner.
|
||||
Parameters:
|
||||
maxresults (int): Only return the specified number of results (optional).
|
||||
mindate (datetime): Min datetime to return results from.
|
||||
"""
|
||||
servers = [x for x in self.resources() if x.provides == 'server' and x.owned]
|
||||
hist = []
|
||||
for server in servers:
|
||||
conn = server.connect()
|
||||
hist.extend(conn.history(maxresults=maxresults, mindate=mindate, accountID=1))
|
||||
return hist
|
||||
|
||||
def videoOnDemand(self):
|
||||
""" Returns a list of VOD Hub items :class:`~plexapi.library.Hub`
|
||||
"""
|
||||
req = requests.get(self.VOD + 'hubs/', headers={'X-Plex-Token': self._token})
|
||||
elem = ElementTree.fromstring(req.text)
|
||||
return self.findItems(elem)
|
||||
|
||||
def webShows(self):
|
||||
""" Returns a list of Webshow Hub items :class:`~plexapi.library.Hub`
|
||||
"""
|
||||
req = requests.get(self.WEBSHOWS + 'hubs/', headers={'X-Plex-Token': self._token})
|
||||
elem = ElementTree.fromstring(req.text)
|
||||
return self.findItems(elem)
|
||||
|
||||
def news(self):
|
||||
""" Returns a list of News Hub items :class:`~plexapi.library.Hub`
|
||||
"""
|
||||
req = requests.get(self.NEWS + 'hubs/sections/all', headers={'X-Plex-Token': self._token})
|
||||
elem = ElementTree.fromstring(req.text)
|
||||
return self.findItems(elem)
|
||||
|
||||
def podcasts(self):
|
||||
""" Returns a list of Podcasts Hub items :class:`~plexapi.library.Hub`
|
||||
"""
|
||||
req = requests.get(self.PODCASTS + 'hubs/', headers={'X-Plex-Token': self._token})
|
||||
elem = ElementTree.fromstring(req.text)
|
||||
return self.findItems(elem)
|
||||
|
||||
def tidal(self):
|
||||
""" Returns a list of tidal Hub items :class:`~plexapi.library.Hub`
|
||||
"""
|
||||
req = requests.get(self.MUSIC + 'hubs/', headers={'X-Plex-Token': self._token})
|
||||
elem = ElementTree.fromstring(req.text)
|
||||
return self.findItems(elem)
|
||||
|
||||
|
||||
class MyPlexUser(PlexObject):
|
||||
""" This object represents non-signed in users such as friends and linked
|
||||
@@ -654,6 +737,8 @@ class MyPlexUser(PlexObject):
|
||||
self.title = data.attrib.get('title', '')
|
||||
self.username = data.attrib.get('username', '')
|
||||
self.servers = self.findItems(data, MyPlexServerShare)
|
||||
for server in self.servers:
|
||||
server.accountID = self.id
|
||||
|
||||
def get_token(self, machineIdentifier):
|
||||
try:
|
||||
@@ -663,6 +748,29 @@ class MyPlexUser(PlexObject):
|
||||
except Exception:
|
||||
log.exception('Failed to get access token for %s' % self.title)
|
||||
|
||||
def server(self, name):
|
||||
""" Returns the :class:`~plexapi.myplex.MyPlexServerShare` that matches the name specified.
|
||||
|
||||
Parameters:
|
||||
name (str): Name of the server to return.
|
||||
"""
|
||||
for server in self.servers:
|
||||
if name.lower() == server.name.lower():
|
||||
return server
|
||||
|
||||
raise NotFound('Unable to find server %s' % name)
|
||||
|
||||
def history(self, maxresults=9999999, mindate=None):
|
||||
""" Get all Play History for a user in all shared servers.
|
||||
Parameters:
|
||||
maxresults (int): Only return the specified number of results (optional).
|
||||
mindate (datetime): Min datetime to return results from.
|
||||
"""
|
||||
hist = []
|
||||
for server in self.servers:
|
||||
hist.extend(server.history(maxresults=maxresults, mindate=mindate))
|
||||
return hist
|
||||
|
||||
|
||||
class Section(PlexObject):
|
||||
""" This refers to a shared section. The raw xml for the data presented here
|
||||
@@ -689,6 +797,16 @@ class Section(PlexObject):
|
||||
self.type = data.attrib.get('type')
|
||||
self.shared = utils.cast(bool, data.attrib.get('shared'))
|
||||
|
||||
def history(self, maxresults=9999999, mindate=None):
|
||||
""" Get all Play History for a user for this section in this shared server.
|
||||
Parameters:
|
||||
maxresults (int): Only return the specified number of results (optional).
|
||||
mindate (datetime): Min datetime to return results from.
|
||||
"""
|
||||
server = self._server._server.resource(self._server.name).connect()
|
||||
return server.history(maxresults=maxresults, mindate=mindate,
|
||||
accountID=self._server.accountID, librarySectionID=self.sectionKey)
|
||||
|
||||
|
||||
class MyPlexServerShare(PlexObject):
|
||||
""" Represents a single user's server reference. Used for library sharing.
|
||||
@@ -711,6 +829,7 @@ class MyPlexServerShare(PlexObject):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.id = utils.cast(int, data.attrib.get('id'))
|
||||
self.accountID = utils.cast(int, data.attrib.get('accountID'))
|
||||
self.serverId = utils.cast(int, data.attrib.get('serverId'))
|
||||
self.machineIdentifier = data.attrib.get('machineIdentifier')
|
||||
self.name = data.attrib.get('name')
|
||||
@@ -720,7 +839,21 @@ class MyPlexServerShare(PlexObject):
|
||||
self.owned = utils.cast(bool, data.attrib.get('owned'))
|
||||
self.pending = utils.cast(bool, data.attrib.get('pending'))
|
||||
|
||||
def section(self, name):
|
||||
""" Returns the :class:`~plexapi.myplex.Section` that matches the name specified.
|
||||
|
||||
Parameters:
|
||||
name (str): Name of the section to return.
|
||||
"""
|
||||
for section in self.sections():
|
||||
if name.lower() == section.title.lower():
|
||||
return section
|
||||
|
||||
raise NotFound('Unable to find section %s' % name)
|
||||
|
||||
def sections(self):
|
||||
""" Returns a list of all :class:`~plexapi.myplex.Section` objects shared with this user.
|
||||
"""
|
||||
url = MyPlexAccount.FRIENDSERVERS.format(machineId=self.machineIdentifier, serverId=self.id)
|
||||
data = self._server.query(url)
|
||||
sections = []
|
||||
@@ -731,6 +864,15 @@ class MyPlexServerShare(PlexObject):
|
||||
|
||||
return sections
|
||||
|
||||
def history(self, maxresults=9999999, mindate=None):
|
||||
""" Get all Play History for a user in this shared server.
|
||||
Parameters:
|
||||
maxresults (int): Only return the specified number of results (optional).
|
||||
mindate (datetime): Min datetime to return results from.
|
||||
"""
|
||||
server = self._server.resource(self.name).connect()
|
||||
return server.history(maxresults=maxresults, mindate=mindate, accountID=self.accountID)
|
||||
|
||||
|
||||
class MyPlexResource(PlexObject):
|
||||
""" This object represents resources connected to your Plex server that can provide
|
||||
@@ -932,6 +1074,186 @@ class MyPlexDevice(PlexObject):
|
||||
return self._server.syncItems(client=self)
|
||||
|
||||
|
||||
class MyPlexPinLogin(object):
|
||||
"""
|
||||
MyPlex PIN login class which supports getting the four character PIN which the user must
|
||||
enter on https://plex.tv/link to authenticate the client and provide an access token to
|
||||
create a :class:`~plexapi.myplex.MyPlexAccount` instance.
|
||||
This helper class supports a polling, threaded and callback approach.
|
||||
|
||||
- The polling approach expects the developer to periodically check if the PIN login was
|
||||
successful using :func:`plexapi.myplex.MyPlexPinLogin.checkLogin`.
|
||||
- The threaded approach expects the developer to call
|
||||
:func:`plexapi.myplex.MyPlexPinLogin.run` and then at a later time call
|
||||
:func:`plexapi.myplex.MyPlexPinLogin.waitForLogin` to wait for and check the result.
|
||||
- The callback approach is an extension of the threaded approach and expects the developer
|
||||
to pass the `callback` parameter to the call to :func:`plexapi.myplex.MyPlexPinLogin.run`.
|
||||
The callback will be called when the thread waiting for the PIN login to succeed either
|
||||
finishes or expires. The parameter passed to the callback is the received authentication
|
||||
token or `None` if the login expired.
|
||||
|
||||
Parameters:
|
||||
session (requests.Session, optional): Use your own session object if you want to
|
||||
cache the http responses from PMS
|
||||
requestTimeout (int): timeout in seconds on initial connect to plex.tv (default config.TIMEOUT).
|
||||
|
||||
Attributes:
|
||||
PINS (str): 'https://plex.tv/pins.xml'
|
||||
CHECKPINS (str): 'https://plex.tv/pins/{pinid}.xml'
|
||||
POLLINTERVAL (int): 1
|
||||
finished (bool): Whether the pin login has finished or not.
|
||||
expired (bool): Whether the pin login has expired or not.
|
||||
token (str): Token retrieved through the pin login.
|
||||
pin (str): Pin to use for the login on https://plex.tv/link.
|
||||
"""
|
||||
PINS = 'https://plex.tv/pins.xml' # get
|
||||
CHECKPINS = 'https://plex.tv/pins/{pinid}.xml' # get
|
||||
POLLINTERVAL = 1
|
||||
|
||||
def __init__(self, session=None, requestTimeout=None):
|
||||
super(MyPlexPinLogin, self).__init__()
|
||||
self._session = session or requests.Session()
|
||||
self._requestTimeout = requestTimeout or TIMEOUT
|
||||
|
||||
self._loginTimeout = None
|
||||
self._callback = None
|
||||
self._thread = None
|
||||
self._abort = False
|
||||
self._id = None
|
||||
|
||||
self.finished = False
|
||||
self.expired = False
|
||||
self.token = None
|
||||
self.pin = self._getPin()
|
||||
|
||||
def run(self, callback=None, timeout=None):
|
||||
""" Starts the thread which monitors the PIN login state.
|
||||
Parameters:
|
||||
callback (Callable[str]): Callback called with the received authentication token (optional).
|
||||
timeout (int): Timeout in seconds waiting for the PIN login to succeed (optional).
|
||||
|
||||
Raises:
|
||||
:class:`RuntimeError`: if the thread is already running.
|
||||
:class:`RuntimeError`: if the PIN login for the current PIN has expired.
|
||||
"""
|
||||
if self._thread and not self._abort:
|
||||
raise RuntimeError('MyPlexPinLogin thread is already running')
|
||||
if self.expired:
|
||||
raise RuntimeError('MyPlexPinLogin has expired')
|
||||
|
||||
self._loginTimeout = timeout
|
||||
self._callback = callback
|
||||
self._abort = False
|
||||
self.finished = False
|
||||
self._thread = threading.Thread(target=self._pollLogin, name='plexapi.myplex.MyPlexPinLogin')
|
||||
self._thread.start()
|
||||
|
||||
def waitForLogin(self):
|
||||
""" Waits for the PIN login to succeed or expire.
|
||||
Parameters:
|
||||
callback (Callable[str]): Callback called with the received authentication token (optional).
|
||||
timeout (int): Timeout in seconds waiting for the PIN login to succeed (optional).
|
||||
|
||||
Returns:
|
||||
`True` if the PIN login succeeded or `False` otherwise.
|
||||
"""
|
||||
if not self._thread or self._abort:
|
||||
return False
|
||||
|
||||
self._thread.join()
|
||||
if self.expired or not self.token:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def stop(self):
|
||||
""" Stops the thread monitoring the PIN login state. """
|
||||
if not self._thread or self._abort:
|
||||
return
|
||||
|
||||
self._abort = True
|
||||
self._thread.join()
|
||||
|
||||
def checkLogin(self):
|
||||
""" Returns `True` if the PIN login has succeeded. """
|
||||
if self._thread:
|
||||
return False
|
||||
|
||||
try:
|
||||
return self._checkLogin()
|
||||
except Exception:
|
||||
self.expired = True
|
||||
self.finished = True
|
||||
|
||||
return False
|
||||
|
||||
def _getPin(self):
|
||||
if self.pin:
|
||||
return self.pin
|
||||
|
||||
url = self.PINS
|
||||
response = self._query(url, self._session.post)
|
||||
if not response:
|
||||
return None
|
||||
|
||||
self._id = response.find('id').text
|
||||
self.pin = response.find('code').text
|
||||
|
||||
return self.pin
|
||||
|
||||
def _checkLogin(self):
|
||||
if not self._id:
|
||||
return False
|
||||
|
||||
if self.token:
|
||||
return True
|
||||
|
||||
url = self.CHECKPINS.format(pinid=self._id)
|
||||
response = self._query(url)
|
||||
if not response:
|
||||
return False
|
||||
|
||||
token = response.find('auth_token').text
|
||||
if not token:
|
||||
return False
|
||||
|
||||
self.token = token
|
||||
self.finished = True
|
||||
return True
|
||||
|
||||
def _pollLogin(self):
|
||||
try:
|
||||
start = time.time()
|
||||
while not self._abort and (not self._loginTimeout or (time.time() - start) < self._loginTimeout):
|
||||
try:
|
||||
result = self._checkLogin()
|
||||
except Exception:
|
||||
self.expired = True
|
||||
break
|
||||
|
||||
if result:
|
||||
break
|
||||
|
||||
time.sleep(self.POLLINTERVAL)
|
||||
|
||||
if self.token and self._callback:
|
||||
self._callback(self.token)
|
||||
finally:
|
||||
self.finished = True
|
||||
|
||||
def _query(self, url, method=None):
|
||||
method = method or self._session.get
|
||||
log.debug('%s %s', method.__name__.upper(), url)
|
||||
headers = BASE_HEADERS.copy()
|
||||
response = method(url, headers=headers, timeout=self._requestTimeout)
|
||||
if not response.ok: # pragma: no cover
|
||||
codename = codes.get(response.status_code)[0]
|
||||
errtext = response.text.replace('\n', ' ')
|
||||
raise BadRequest('(%s) %s %s; %s' % (response.status_code, codename, response.url, errtext))
|
||||
data = response.text.encode('utf8')
|
||||
return ElementTree.fromstring(data) if data.strip() else None
|
||||
|
||||
|
||||
def _connect(cls, url, token, timeout, results, i, job_is_done_event=None):
|
||||
""" Connects to the specified cls with url and token. Stores the connection
|
||||
information to results[i] in a threadsafe way.
|
||||
|
@@ -117,6 +117,7 @@ class Photo(PlexPartialObject):
|
||||
self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt'))
|
||||
self.year = utils.cast(int, data.attrib.get('year'))
|
||||
self.media = self.findItems(data, media.Media)
|
||||
self.tag = self.findItems(data, media.Tag)
|
||||
|
||||
def photoalbum(self):
|
||||
""" Return this photo's :class:`~plexapi.photo.Photoalbum`. """
|
||||
|
@@ -268,3 +268,41 @@ class Playlist(PlexPartialObject, Playable):
|
||||
raise Unsupported('Unsupported playlist content')
|
||||
|
||||
return myplex.sync(sync_item, client=client, clientId=clientId)
|
||||
|
||||
def posters(self):
|
||||
""" Returns list of available poster objects. :class:`~plexapi.media.Poster`. """
|
||||
|
||||
return self.fetchItems('/library/metadata/%s/posters' % self.ratingKey)
|
||||
|
||||
def uploadPoster(self, url=None, filepath=None):
|
||||
""" Upload poster from url or filepath. :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video`. """
|
||||
if url:
|
||||
key = '/library/metadata/%s/posters?url=%s' % (self.ratingKey, quote_plus(url))
|
||||
self._server.query(key, method=self._server._session.post)
|
||||
elif filepath:
|
||||
key = '/library/metadata/%s/posters?' % self.ratingKey
|
||||
data = open(filepath, 'rb').read()
|
||||
self._server.query(key, method=self._server._session.post, data=data)
|
||||
|
||||
def setPoster(self, poster):
|
||||
""" Set . :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video` """
|
||||
poster.select()
|
||||
|
||||
def arts(self):
|
||||
""" Returns list of available art objects. :class:`~plexapi.media.Poster`. """
|
||||
|
||||
return self.fetchItems('/library/metadata/%s/arts' % self.ratingKey)
|
||||
|
||||
def uploadArt(self, url=None, filepath=None):
|
||||
""" Upload art from url or filepath. :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video`. """
|
||||
if url:
|
||||
key = '/library/metadata/%s/arts?url=%s' % (self.ratingKey, quote_plus(url))
|
||||
self._server.query(key, method=self._server._session.post)
|
||||
elif filepath:
|
||||
key = '/library/metadata/%s/arts?' % self.ratingKey
|
||||
data = open(filepath, 'rb').read()
|
||||
self._server.query(key, method=self._server._session.post, data=data)
|
||||
|
||||
def setArt(self, art):
|
||||
""" Set :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video` """
|
||||
art.select()
|
||||
|
@@ -7,12 +7,13 @@ from plexapi.alert import AlertListener
|
||||
from plexapi.base import PlexObject
|
||||
from plexapi.client import PlexClient
|
||||
from plexapi.compat import ElementTree, urlencode
|
||||
from plexapi.exceptions import BadRequest, NotFound
|
||||
from plexapi.exceptions import BadRequest, NotFound, Unauthorized
|
||||
from plexapi.library import Library, Hub
|
||||
from plexapi.settings import Settings
|
||||
from plexapi.playlist import Playlist
|
||||
from plexapi.playqueue import PlayQueue
|
||||
from plexapi.utils import cast
|
||||
from plexapi.media import Optimized, Conversion
|
||||
|
||||
# Need these imports to populate utils.PLEXOBJECTS
|
||||
from plexapi import (audio as _audio, video as _video, # noqa: F401
|
||||
@@ -183,8 +184,18 @@ class PlexServer(PlexObject):
|
||||
data = self.query(Account.key)
|
||||
return Account(self, data)
|
||||
|
||||
def agents(self, mediaType=None):
|
||||
""" Returns the `:class:`~plexapi.media.Agent` objects this server has available. """
|
||||
key = '/system/agents'
|
||||
if mediaType:
|
||||
key += '?mediaType=%s' % mediaType
|
||||
return self.fetchItems(key)
|
||||
|
||||
def createToken(self, type='delegation', scope='all'):
|
||||
"""Create a temp access token for the server."""
|
||||
if not self._token:
|
||||
# Handle unclaimed servers
|
||||
return None
|
||||
q = self.query('/security/token?type=%s&scope=%s' % (type, scope))
|
||||
return q.attrib.get('token')
|
||||
|
||||
@@ -322,7 +333,7 @@ class PlexServer(PlexObject):
|
||||
# figure out what method this is..
|
||||
return self.query(part, method=self._session.put)
|
||||
|
||||
def history(self, maxresults=9999999, mindate=None):
|
||||
def history(self, maxresults=9999999, mindate=None, ratingKey=None, accountID=None, librarySectionID=None):
|
||||
""" Returns a list of media items from watched history. If there are many results, they will
|
||||
be fetched from the server in batches of X_PLEX_CONTAINER_SIZE amounts. If you're only
|
||||
looking for the first <num> results, it would be wise to set the maxresults option to that
|
||||
@@ -332,9 +343,18 @@ class PlexServer(PlexObject):
|
||||
maxresults (int): Only return the specified number of results (optional).
|
||||
mindate (datetime): Min datetime to return results from. This really helps speed
|
||||
up the result listing. For example: datetime.now() - timedelta(days=7)
|
||||
ratingKey (int/str) Request history for a specific ratingKey item.
|
||||
accountID (int/str) Request history for a specific account ID.
|
||||
librarySectionID (int/str) Request history for a specific library section ID.
|
||||
"""
|
||||
results, subresults = [], '_init'
|
||||
args = {'sort': 'viewedAt:desc'}
|
||||
if ratingKey:
|
||||
args['metadataItemID'] = ratingKey
|
||||
if accountID:
|
||||
args['accountID'] = accountID
|
||||
if librarySectionID:
|
||||
args['librarySectionID'] = librarySectionID
|
||||
if mindate:
|
||||
args['viewedAt>'] = int(mindate.timestamp())
|
||||
args['X-Plex-Container-Start'] = 0
|
||||
@@ -363,6 +383,36 @@ class PlexServer(PlexObject):
|
||||
"""
|
||||
return self.fetchItem('/playlists', title=title)
|
||||
|
||||
def optimizedItems(self, removeAll=None):
|
||||
""" Returns list of all :class:`~plexapi.media.Optimized` objects connected to server. """
|
||||
if removeAll is True:
|
||||
key = '/playlists/generators?type=42'
|
||||
self.query(key, method=self._server._session.delete)
|
||||
else:
|
||||
backgroundProcessing = self.fetchItem('/playlists?type=42')
|
||||
return self.fetchItems('%s/items' % backgroundProcessing.key, cls=Optimized)
|
||||
|
||||
def optimizedItem(self, optimizedID):
|
||||
""" Returns single queued optimized item :class:`~plexapi.media.Video` object.
|
||||
Allows for using optimized item ID to connect back to source item.
|
||||
"""
|
||||
|
||||
backgroundProcessing = self.fetchItem('/playlists?type=42')
|
||||
return self.fetchItem('%s/items/%s/items' % (backgroundProcessing.key, optimizedID))
|
||||
|
||||
def conversions(self, pause=None):
|
||||
""" Returns list of all :class:`~plexapi.media.Conversion` objects connected to server. """
|
||||
if pause is True:
|
||||
self.query('/:/prefs?BackgroundQueueIdlePaused=1', method=self._server._session.put)
|
||||
elif pause is False:
|
||||
self.query('/:/prefs?BackgroundQueueIdlePaused=0', method=self._server._session.put)
|
||||
else:
|
||||
return self.fetchItems('/playQueues/1', cls=Conversion)
|
||||
|
||||
def currentBackgroundProcess(self):
|
||||
""" Returns list of all :class:`~plexapi.media.TranscodeJob` objects running or paused on server. """
|
||||
return self.fetchItems('/status/sessions/background')
|
||||
|
||||
def query(self, key, method=None, headers=None, timeout=None, **kwargs):
|
||||
""" Main method used to handle HTTPS requests to the Plex server. This method helps
|
||||
by encoding the response to utf-8 and parsing the returned XML into and
|
||||
@@ -377,8 +427,13 @@ class PlexServer(PlexObject):
|
||||
if response.status_code not in (200, 201):
|
||||
codename = codes.get(response.status_code)[0]
|
||||
errtext = response.text.replace('\n', ' ')
|
||||
log.warning('BadRequest (%s) %s %s; %s' % (response.status_code, codename, response.url, errtext))
|
||||
raise BadRequest('(%s) %s; %s %s' % (response.status_code, codename, response.url, errtext))
|
||||
message = '(%s) %s; %s %s' % (response.status_code, codename, response.url, errtext)
|
||||
if response.status_code == 401:
|
||||
raise Unauthorized(message)
|
||||
elif response.status_code == 404:
|
||||
raise NotFound(message)
|
||||
else:
|
||||
raise BadRequest(message)
|
||||
data = response.text.encode('utf8')
|
||||
return ElementTree.fromstring(data) if data.strip() else None
|
||||
|
||||
@@ -472,6 +527,25 @@ class PlexServer(PlexObject):
|
||||
self.refreshSynclist()
|
||||
self.refreshContent()
|
||||
|
||||
def _allowMediaDeletion(self, toggle=False):
|
||||
""" Toggle allowMediaDeletion.
|
||||
Parameters:
|
||||
toggle (bool): True enables Media Deletion
|
||||
False or None disable Media Deletion (Default)
|
||||
"""
|
||||
if self.allowMediaDeletion and toggle is False:
|
||||
log.debug('Plex is currently allowed to delete media. Toggling off.')
|
||||
elif self.allowMediaDeletion and toggle is True:
|
||||
log.debug('Plex is currently allowed to delete media. Toggle set to allow, exiting.')
|
||||
raise BadRequest('Plex is currently allowed to delete media. Toggle set to allow, exiting.')
|
||||
elif self.allowMediaDeletion is None and toggle is True:
|
||||
log.debug('Plex is currently not allowed to delete media. Toggle set to allow.')
|
||||
else:
|
||||
log.debug('Plex is currently not allowed to delete media. Toggle set to not allow, exiting.')
|
||||
raise BadRequest('Plex is currently not allowed to delete media. Toggle set to not allow, exiting.')
|
||||
value = 1 if toggle is True else 0
|
||||
return self.query('/:/prefs?allowMediaDeletion=%s' % value, self._session.put)
|
||||
|
||||
|
||||
class Account(PlexObject):
|
||||
""" Contains the locally cached MyPlex account information. The properties provided don't
|
||||
|
@@ -124,8 +124,8 @@ class Setting(PlexObject):
|
||||
self.enumValues = self._getEnumValues(data)
|
||||
|
||||
def _cast(self, value):
|
||||
""" Cast the specifief value to the type of this setting. """
|
||||
if self.type != 'text':
|
||||
""" Cast the specific value to the type of this setting. """
|
||||
if self.type != 'enum':
|
||||
value = utils.cast(self.TYPES.get(self.type)['cast'], value)
|
||||
return value
|
||||
|
||||
|
116
lib/plexapi/sonos.py
Normal file
116
lib/plexapi/sonos.py
Normal file
@@ -0,0 +1,116 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import requests
|
||||
from plexapi import CONFIG, X_PLEX_IDENTIFIER
|
||||
from plexapi.client import PlexClient
|
||||
from plexapi.exceptions import BadRequest
|
||||
from plexapi.playqueue import PlayQueue
|
||||
|
||||
|
||||
class PlexSonosClient(PlexClient):
|
||||
""" Class for interacting with a Sonos speaker via the Plex API. This class
|
||||
makes requests to an external Plex API which then forwards the
|
||||
Sonos-specific commands back to your Plex server & Sonos speakers. Use
|
||||
of this feature requires an active Plex Pass subscription and Sonos
|
||||
speakers linked to your Plex account. It also requires remote access to
|
||||
be working properly.
|
||||
|
||||
More details on the Sonos integration are avaialble here:
|
||||
https://support.plex.tv/articles/218237558-requirements-for-using-plex-for-sonos/
|
||||
|
||||
The Sonos API emulates the Plex player control API closely:
|
||||
https://github.com/plexinc/plex-media-player/wiki/Remote-control-API
|
||||
|
||||
Parameters:
|
||||
account (:class:`~plexapi.myplex.PlexAccount`): PlexAccount instance this
|
||||
Sonos speaker is associated with.
|
||||
data (ElementTree): Response from Plex Sonos API used to build this client.
|
||||
|
||||
Attributes:
|
||||
deviceClass (str): "speaker"
|
||||
lanIP (str): Local IP address of speaker.
|
||||
machineIdentifier (str): Unique ID for this device.
|
||||
platform (str): "Sonos"
|
||||
platformVersion (str): Build version of Sonos speaker firmware.
|
||||
product (str): "Sonos"
|
||||
protocol (str): "plex"
|
||||
protocolCapabilities (list<str>): List of client capabilities (timeline, playback,
|
||||
playqueues, provider-playback)
|
||||
server (:class:`~plexapi.server.PlexServer`): Server this client is connected to.
|
||||
session (:class:`~requests.Session`): Session object used for connection.
|
||||
title (str): Name of this Sonos speaker.
|
||||
token (str): X-Plex-Token used for authenication
|
||||
_baseurl (str): Address of public Plex Sonos API endpoint.
|
||||
_commandId (int): Counter for commands sent to Plex API.
|
||||
_token (str): Token associated with linked Plex account.
|
||||
_session (obj): Requests session object used to access this client.
|
||||
"""
|
||||
|
||||
def __init__(self, account, data):
|
||||
self._data = data
|
||||
self.deviceClass = data.attrib.get("deviceClass")
|
||||
self.machineIdentifier = data.attrib.get("machineIdentifier")
|
||||
self.product = data.attrib.get("product")
|
||||
self.platform = data.attrib.get("platform")
|
||||
self.platformVersion = data.attrib.get("platformVersion")
|
||||
self.protocol = data.attrib.get("protocol")
|
||||
self.protocolCapabilities = data.attrib.get("protocolCapabilities")
|
||||
self.lanIP = data.attrib.get("lanIP")
|
||||
self.title = data.attrib.get("title")
|
||||
self._baseurl = "https://sonos.plex.tv"
|
||||
self._commandId = 0
|
||||
self._token = account._token
|
||||
self._session = account._session or requests.Session()
|
||||
|
||||
# Dummy values for PlexClient inheritance
|
||||
self._last_call = 0
|
||||
self._proxyThroughServer = False
|
||||
self._showSecrets = CONFIG.get("log.show_secrets", "").lower() == "true"
|
||||
|
||||
def playMedia(self, media, offset=0, **params):
|
||||
|
||||
if hasattr(media, "playlistType"):
|
||||
mediatype = media.playlistType
|
||||
else:
|
||||
if isinstance(media, PlayQueue):
|
||||
mediatype = media.items[0].listType
|
||||
else:
|
||||
mediatype = media.listType
|
||||
|
||||
if mediatype == "audio":
|
||||
mediatype = "music"
|
||||
else:
|
||||
raise BadRequest("Sonos currently only supports music for playback")
|
||||
|
||||
server_protocol, server_address, server_port = media._server._baseurl.split(":")
|
||||
server_address = server_address.strip("/")
|
||||
server_port = server_port.strip("/")
|
||||
|
||||
playqueue = (
|
||||
media
|
||||
if isinstance(media, PlayQueue)
|
||||
else media._server.createPlayQueue(media)
|
||||
)
|
||||
self.sendCommand(
|
||||
"playback/playMedia",
|
||||
**dict(
|
||||
{
|
||||
"type": "music",
|
||||
"providerIdentifier": "com.plexapp.plugins.library",
|
||||
"containerKey": "/playQueues/{}?own=1".format(
|
||||
playqueue.playQueueID
|
||||
),
|
||||
"key": media.key,
|
||||
"offset": offset,
|
||||
"machineIdentifier": media._server.machineIdentifier,
|
||||
"protocol": server_protocol,
|
||||
"address": server_address,
|
||||
"port": server_port,
|
||||
"token": media._server.createToken(),
|
||||
"commandID": self._nextCommandId(),
|
||||
"X-Plex-Client-Identifier": X_PLEX_IDENTIFIER,
|
||||
"X-Plex-Token": media._server._token,
|
||||
"X-Plex-Target-Client-Identifier": self.machineIdentifier,
|
||||
},
|
||||
**params
|
||||
)
|
||||
)
|
@@ -2,16 +2,21 @@
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
import time
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from getpass import getpass
|
||||
from threading import Thread, Event
|
||||
from tqdm import tqdm
|
||||
from threading import Event, Thread
|
||||
|
||||
import requests
|
||||
from plexapi import compat
|
||||
from plexapi.exceptions import NotFound
|
||||
|
||||
try:
|
||||
from tqdm import tqdm
|
||||
except ImportError:
|
||||
tqdm = None
|
||||
|
||||
log = logging.getLogger('plexapi')
|
||||
|
||||
# Search Types - Plex uses these to filter specific media types when searching.
|
||||
@@ -59,7 +64,7 @@ def registerPlexObject(cls):
|
||||
|
||||
def cast(func, value):
|
||||
""" Cast the specified value to the specified type (returned by func). Currently this
|
||||
only support int, float, bool. Should be extended if needed.
|
||||
only support str, int, float, bool. Should be extended if needed.
|
||||
|
||||
Parameters:
|
||||
func (func): Calback function to used cast to type (int, bool, float).
|
||||
@@ -67,7 +72,13 @@ def cast(func, value):
|
||||
"""
|
||||
if value is not None:
|
||||
if func == bool:
|
||||
return bool(int(value))
|
||||
if value in (1, True, "1", "true"):
|
||||
return True
|
||||
elif value in (0, False, "0", "false"):
|
||||
return False
|
||||
else:
|
||||
raise ValueError(value)
|
||||
|
||||
elif func in (int, float):
|
||||
try:
|
||||
return func(value)
|
||||
@@ -89,7 +100,7 @@ def joinArgs(args):
|
||||
arglist = []
|
||||
for key in sorted(args, key=lambda x: x.lower()):
|
||||
value = compat.ustr(args[key])
|
||||
arglist.append('%s=%s' % (key, compat.quote(value)))
|
||||
arglist.append('%s=%s' % (key, compat.quote(value, safe='')))
|
||||
return '?%s' % '&'.join(arglist)
|
||||
|
||||
|
||||
@@ -287,17 +298,17 @@ def download(url, token, filename=None, savepath=None, session=None, chunksize=4
|
||||
|
||||
# save the file to disk
|
||||
log.info('Downloading: %s', fullpath)
|
||||
if showstatus: # pragma: no cover
|
||||
if showstatus and tqdm: # pragma: no cover
|
||||
total = int(response.headers.get('content-length', 0))
|
||||
bar = tqdm(unit='B', unit_scale=True, total=total, desc=filename)
|
||||
|
||||
with open(fullpath, 'wb') as handle:
|
||||
for chunk in response.iter_content(chunk_size=chunksize):
|
||||
handle.write(chunk)
|
||||
if showstatus:
|
||||
if showstatus and tqdm:
|
||||
bar.update(len(chunk))
|
||||
|
||||
if showstatus: # pragma: no cover
|
||||
if showstatus and tqdm: # pragma: no cover
|
||||
bar.close()
|
||||
# check we want to unzip the contents
|
||||
if fullpath.endswith('zip') and unpack:
|
||||
@@ -375,3 +386,15 @@ def choose(msg, items, attr): # pragma: no cover
|
||||
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
|
||||
|
||||
def getAgentIdentifier(section, agent):
|
||||
""" Return the full agent identifier from a short identifier, name, or confirm full identifier. """
|
||||
agents = []
|
||||
for ag in section.agents():
|
||||
identifiers = [ag.identifier, ag.shortIdentifier, ag.name]
|
||||
if agent in identifiers:
|
||||
return ag.identifier
|
||||
agents += identifiers
|
||||
raise NotFound('Couldnt find "%s" in agents list (%s)' %
|
||||
(agent, ', '.join(agents)))
|
||||
|
@@ -2,7 +2,8 @@
|
||||
from plexapi import media, utils
|
||||
from plexapi.exceptions import BadRequest, NotFound
|
||||
from plexapi.base import Playable, PlexPartialObject
|
||||
from plexapi.compat import quote_plus
|
||||
from plexapi.compat import quote_plus, urlencode
|
||||
import os
|
||||
|
||||
|
||||
class Video(PlexPartialObject):
|
||||
@@ -89,10 +90,112 @@ class Video(PlexPartialObject):
|
||||
""" Returns str, default title for a new syncItem. """
|
||||
return self.title
|
||||
|
||||
def posters(self):
|
||||
""" Returns list of available poster objects. :class:`~plexapi.media.Poster`:"""
|
||||
def subtitleStreams(self):
|
||||
""" Returns a list of :class:`~plexapi.media.SubtitleStream` objects for all MediaParts. """
|
||||
streams = []
|
||||
|
||||
return self.fetchItems('%s/posters' % self.key, cls=media.Poster)
|
||||
parts = self.iterParts()
|
||||
for part in parts:
|
||||
streams += part.subtitleStreams()
|
||||
return streams
|
||||
|
||||
def uploadSubtitles(self, filepath):
|
||||
""" Upload Subtitle file for video. """
|
||||
url = '%s/subtitles' % self.key
|
||||
filename = os.path.basename(filepath)
|
||||
subFormat = os.path.splitext(filepath)[1][1:]
|
||||
with open(filepath, 'rb') as subfile:
|
||||
params = {'title': filename,
|
||||
'format': subFormat
|
||||
}
|
||||
headers = {'Accept': 'text/plain, */*'}
|
||||
self._server.query(url, self._server._session.post, data=subfile, params=params, headers=headers)
|
||||
|
||||
def removeSubtitles(self, streamID=None, streamTitle=None):
|
||||
""" Remove Subtitle from movie's subtitles listing.
|
||||
|
||||
Note: If subtitle file is located inside video directory it will bbe deleted.
|
||||
Files outside of video directory are not effected.
|
||||
"""
|
||||
for stream in self.subtitleStreams():
|
||||
if streamID == stream.id or streamTitle == stream.title:
|
||||
self._server.query(stream.key, self._server._session.delete)
|
||||
|
||||
def optimize(self, title=None, target="", targetTagID=None, locationID=-1, policyScope='all',
|
||||
policyValue="", policyUnwatched=0, videoQuality=None, deviceProfile=None):
|
||||
""" Optimize item
|
||||
|
||||
locationID (int): -1 in folder with orginal items
|
||||
2 library path
|
||||
|
||||
target (str): custom quality name.
|
||||
if none provided use "Custom: {deviceProfile}"
|
||||
|
||||
targetTagID (int): Default quality settings
|
||||
1 Mobile
|
||||
2 TV
|
||||
3 Original Quality
|
||||
|
||||
deviceProfile (str): Android, IOS, Universal TV, Universal Mobile, Windows Phone,
|
||||
Windows, Xbox One
|
||||
|
||||
Example:
|
||||
Optimize for Mobile
|
||||
item.optimize(targetTagID="Mobile") or item.optimize(targetTagID=1")
|
||||
Optimize for Android 10 MBPS 1080p
|
||||
item.optimize(deviceProfile="Android", videoQuality=10)
|
||||
Optimize for IOS Original Quality
|
||||
item.optimize(deviceProfile="IOS", videoQuality=-1)
|
||||
|
||||
* see sync.py VIDEO_QUALITIES for additional information for using videoQuality
|
||||
"""
|
||||
tagValues = [1, 2, 3]
|
||||
tagKeys = ["Mobile", "TV", "Original Quality"]
|
||||
tagIDs = tagKeys + tagValues
|
||||
|
||||
if targetTagID not in tagIDs and (deviceProfile is None or videoQuality is None):
|
||||
raise BadRequest('Unexpected or missing quality profile.')
|
||||
|
||||
if isinstance(targetTagID, str):
|
||||
tagIndex = tagKeys.index(targetTagID)
|
||||
targetTagID = tagValues[tagIndex]
|
||||
|
||||
if title is None:
|
||||
title = self.title
|
||||
|
||||
backgroundProcessing = self.fetchItem('/playlists?type=42')
|
||||
key = '%s/items?' % backgroundProcessing.key
|
||||
params = {
|
||||
'Item[type]': 42,
|
||||
'Item[target]': target,
|
||||
'Item[targetTagID]': targetTagID if targetTagID else '',
|
||||
'Item[locationID]': locationID,
|
||||
'Item[Policy][scope]': policyScope,
|
||||
'Item[Policy][value]': policyValue,
|
||||
'Item[Policy][unwatched]': policyUnwatched
|
||||
}
|
||||
|
||||
if deviceProfile:
|
||||
params['Item[Device][profile]'] = deviceProfile
|
||||
|
||||
if videoQuality:
|
||||
from plexapi.sync import MediaSettings
|
||||
mediaSettings = MediaSettings.createVideo(videoQuality)
|
||||
params['Item[MediaSettings][videoQuality]'] = mediaSettings.videoQuality
|
||||
params['Item[MediaSettings][videoResolution]'] = mediaSettings.videoResolution
|
||||
params['Item[MediaSettings][maxVideoBitrate]'] = mediaSettings.maxVideoBitrate
|
||||
params['Item[MediaSettings][audioBoost]'] = ''
|
||||
params['Item[MediaSettings][subtitleSize]'] = ''
|
||||
params['Item[MediaSettings][musicBitrate]'] = ''
|
||||
params['Item[MediaSettings][photoQuality]'] = ''
|
||||
|
||||
titleParam = {'Item[title]': title}
|
||||
section = self._server.library.sectionByID(self.librarySectionID)
|
||||
params['Item[Location][uri]'] = 'library://' + section.uuid + '/item/' + \
|
||||
quote_plus(self.key + '?includeExternalMedia=1')
|
||||
|
||||
data = key + urlencode(params) + '&' + urlencode(titleParam)
|
||||
return self._server.query(data, method=self._server._session.put)
|
||||
|
||||
def sync(self, videoQuality, client=None, clientId=None, limit=None, unwatched=False, title=None):
|
||||
""" Add current video (movie, tv-show, season or episode) as sync item for specified device.
|
||||
@@ -224,14 +327,6 @@ class Movie(Playable, Video):
|
||||
"""
|
||||
return [part.file for part in self.iterParts() if part]
|
||||
|
||||
def subtitleStreams(self):
|
||||
""" Returns a list of :class:`~plexapi.media.SubtitleStream` objects for all MediaParts. """
|
||||
streams = []
|
||||
for elem in self.media:
|
||||
for part in elem.parts:
|
||||
streams += part.subtitleStreams()
|
||||
return streams
|
||||
|
||||
def _prettyfilename(self):
|
||||
# This is just for compat.
|
||||
return self.title
|
||||
@@ -257,7 +352,7 @@ class Movie(Playable, Video):
|
||||
else:
|
||||
self._server.url('%s?download=1' % location.key)
|
||||
filepath = utils.download(url, self._server._token, filename=name,
|
||||
savepath=savepath, session=self._server._session)
|
||||
savepath=savepath, session=self._server._session)
|
||||
if filepath:
|
||||
filepaths.append(filepath)
|
||||
return filepaths
|
||||
@@ -481,7 +576,7 @@ class Season(Video):
|
||||
|
||||
def show(self):
|
||||
""" Return this seasons :func:`~plexapi.video.Show`.. """
|
||||
return self.fetchItem(self.parentKey)
|
||||
return self.fetchItem(int(self.parentRatingKey))
|
||||
|
||||
def watched(self):
|
||||
""" Returns list of watched :class:`~plexapi.video.Episode` objects. """
|
||||
@@ -622,8 +717,33 @@ class Episode(Playable, Video):
|
||||
|
||||
def show(self):
|
||||
"""" Return this episodes :func:`~plexapi.video.Show`.. """
|
||||
return self.fetchItem(self.grandparentKey)
|
||||
return self.fetchItem(int(self.grandparentRatingKey))
|
||||
|
||||
def _defaultSyncTitle(self):
|
||||
""" Returns str, default title for a new syncItem. """
|
||||
return '%s - %s - (%s) %s' % (self.grandparentTitle, self.parentTitle, self.seasonEpisode, self.title)
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Clip(Playable, Video):
|
||||
""" Represents a single Clip."""
|
||||
|
||||
TAG = 'Video'
|
||||
TYPE = 'clip'
|
||||
METADATA_TYPE = 'clip'
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.addedAt = data.attrib.get('addedAt')
|
||||
self.duration = data.attrib.get('duration')
|
||||
self.guid = data.attrib.get('guid')
|
||||
self.key = data.attrib.get('key')
|
||||
self.originallyAvailableAt = data.attrib.get('originallyAvailableAt')
|
||||
self.ratingKey = data.attrib.get('ratingKey')
|
||||
self.skipDetails = utils.cast(int, data.attrib.get('skipDetails'))
|
||||
self.subtype = data.attrib.get('subtype')
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
self.thumbAspectRatio = data.attrib.get('thumbAspectRatio')
|
||||
self.title = data.attrib.get('title')
|
||||
self.type = data.attrib.get('type')
|
||||
self.year = data.attrib.get('year')
|
||||
|
@@ -7,7 +7,7 @@ coverage reports. There's a third convenient decorator (`timecall`) that
|
||||
measures the duration of function execution without the extra profiling
|
||||
overhead.
|
||||
|
||||
Usage example (Python 2.4 or newer)::
|
||||
Usage example::
|
||||
|
||||
from profilehooks import profile, coverage
|
||||
|
||||
@@ -16,20 +16,18 @@ Usage example (Python 2.4 or newer)::
|
||||
if n < 2: return 1
|
||||
else: return n * fn(n-1)
|
||||
|
||||
print fn(42)
|
||||
print(fn(42))
|
||||
|
||||
Usage example (Python 2.3 or older)::
|
||||
Or without imports, with some hack
|
||||
|
||||
from profilehooks import profile, coverage
|
||||
$ python -m profilehooks yourmodule
|
||||
|
||||
@profile # or @coverage
|
||||
def fn(n):
|
||||
if n < 2: return 1
|
||||
else: return n * fn(n-1)
|
||||
|
||||
# Now wrap that function in a decorator
|
||||
fn = profile(fn) # or coverage(fn)
|
||||
|
||||
print fn(42)
|
||||
print(fn(42))
|
||||
|
||||
Reports for all thusly decorated functions will be printed to sys.stdout
|
||||
on program termination. You can alternatively request for immediate
|
||||
@@ -42,7 +40,7 @@ instead of a detailed (but costly) profile.
|
||||
Caveats
|
||||
|
||||
A thread on python-dev convinced me that hotshot produces bogus numbers.
|
||||
See http://mail.python.org/pipermail/python-dev/2005-November/058264.html
|
||||
See https://mail.python.org/pipermail/python-dev/2005-November/058264.html
|
||||
|
||||
I don't know what will happen if a decorated function will try to call
|
||||
another decorated function. All decorators probably need to explicitly
|
||||
@@ -62,7 +60,7 @@ Caveats
|
||||
executed. For this reason coverage analysis now uses trace.py which is
|
||||
slower, but more accurate.
|
||||
|
||||
Copyright (c) 2004--2008 Marius Gedminas <marius@pov.lt>
|
||||
Copyright (c) 2004--2020 Marius Gedminas <marius@gedmin.as>
|
||||
Copyright (c) 2007 Hanno Schlichting
|
||||
Copyright (c) 2008 Florian Schulze
|
||||
|
||||
@@ -88,24 +86,30 @@ Released under the MIT licence since December 2006:
|
||||
|
||||
(Previously it was distributed under the GNU General Public Licence.)
|
||||
"""
|
||||
# $Id: profilehooks.py 29 2010-08-13 16:29:20Z mg $
|
||||
from __future__ import print_function
|
||||
|
||||
__author__ = "Marius Gedminas (marius@gedmin.as)"
|
||||
__copyright__ = "Copyright 2004-2009 Marius Gedminas"
|
||||
__author__ = "Marius Gedminas <marius@gedmin.as>"
|
||||
__copyright__ = "Copyright 2004-2020 Marius Gedminas and contributors"
|
||||
__license__ = "MIT"
|
||||
__version__ = "1.4"
|
||||
__date__ = "2009-03-31"
|
||||
|
||||
__version__ = '1.12.0'
|
||||
__date__ = "2020-08-20"
|
||||
|
||||
import atexit
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import sys
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
# For profiling
|
||||
from profile import Profile
|
||||
import pstats
|
||||
|
||||
# For timecall
|
||||
import timeit
|
||||
|
||||
# For hotshot profiling (inaccurate!)
|
||||
try:
|
||||
import hotshot
|
||||
@@ -115,6 +119,9 @@ except ImportError:
|
||||
|
||||
# For trace.py coverage
|
||||
import trace
|
||||
import dis
|
||||
import token
|
||||
import tokenize
|
||||
|
||||
# For hotshot coverage (inaccurate!; uses undocumented APIs; might break)
|
||||
if hotshot is not None:
|
||||
@@ -127,24 +134,55 @@ try:
|
||||
except ImportError:
|
||||
cProfile = None
|
||||
|
||||
# For timecall
|
||||
import time
|
||||
|
||||
|
||||
# registry of available profilers
|
||||
AVAILABLE_PROFILERS = {}
|
||||
|
||||
__all__ = ['coverage', 'coverage_with_hotshot', 'profile', 'timecall']
|
||||
|
||||
|
||||
# Use tokenize.open() on Python >= 3.2, fall back to open() on Python 2
|
||||
tokenize_open = getattr(tokenize, 'open', open)
|
||||
|
||||
|
||||
def _unwrap(fn):
|
||||
# inspect.unwrap() doesn't exist on Python 2
|
||||
if not hasattr(fn, '__wrapped__'):
|
||||
return fn
|
||||
else:
|
||||
# intentionally using recursion here instead of a while loop to
|
||||
# make cycles fail with a recursion error instead of looping forever.
|
||||
return _unwrap(fn.__wrapped__)
|
||||
|
||||
|
||||
def _identify(fn):
|
||||
fn = _unwrap(fn)
|
||||
funcname = fn.__name__
|
||||
filename = fn.__code__.co_filename
|
||||
lineno = fn.__code__.co_firstlineno
|
||||
return (funcname, filename, lineno)
|
||||
|
||||
|
||||
def _is_file_like(o):
|
||||
return hasattr(o, 'write')
|
||||
|
||||
|
||||
def profile(fn=None, skip=0, filename=None, immediate=False, dirs=False,
|
||||
sort=None, entries=40,
|
||||
profiler=('cProfile', 'profile', 'hotshot')):
|
||||
profiler=('cProfile', 'profile', 'hotshot'),
|
||||
stdout=True):
|
||||
"""Mark `fn` for profiling.
|
||||
|
||||
If `skip` is > 0, first `skip` calls to `fn` will not be profiled.
|
||||
|
||||
If `stdout` is not file-like and truthy, output will be printed to
|
||||
sys.stdout. If it is a file-like object, output will be printed to it
|
||||
instead. `stdout` must be writable in text mode (as opposed to binary)
|
||||
if it is file-like.
|
||||
|
||||
If `immediate` is False, profiling results will be printed to
|
||||
sys.stdout on program termination. Otherwise results will be printed
|
||||
after each call.
|
||||
self.stdout on program termination. Otherwise results will be printed
|
||||
after each call. (If you don't want this, set stdout=False and specify a
|
||||
`filename` to store profile data.)
|
||||
|
||||
If `dirs` is False only the name of the file will be printed.
|
||||
Otherwise the full path is used.
|
||||
@@ -170,7 +208,8 @@ def profile(fn=None, skip=0, filename=None, immediate=False, dirs=False,
|
||||
'profile', 'hotshot').
|
||||
|
||||
If `filename` is specified, the profile stats will be stored in the
|
||||
named file. You can load them pstats.Stats(filename).
|
||||
named file. You can load them with pstats.Stats(filename) or use a
|
||||
visualization tool like RunSnakeRun.
|
||||
|
||||
Usage::
|
||||
|
||||
@@ -192,12 +231,12 @@ def profile(fn=None, skip=0, filename=None, immediate=False, dirs=False,
|
||||
...
|
||||
|
||||
"""
|
||||
if fn is None: # @profile() syntax -- we are a decorator maker
|
||||
if fn is None: # @profile() syntax -- we are a decorator maker
|
||||
def decorator(fn):
|
||||
return profile(fn, skip=skip, filename=filename,
|
||||
immediate=immediate, dirs=dirs,
|
||||
sort=sort, entries=entries,
|
||||
profiler=profiler)
|
||||
profiler=profiler, stdout=stdout)
|
||||
return decorator
|
||||
# @profile syntax -- we are a decorator.
|
||||
if isinstance(profiler, str):
|
||||
@@ -208,20 +247,16 @@ def profile(fn=None, skip=0, filename=None, immediate=False, dirs=False,
|
||||
break
|
||||
else:
|
||||
raise ValueError('only these profilers are available: %s'
|
||||
% ', '.join(AVAILABLE_PROFILERS))
|
||||
% ', '.join(sorted(AVAILABLE_PROFILERS)))
|
||||
fp = profiler_class(fn, skip=skip, filename=filename,
|
||||
immediate=immediate, dirs=dirs,
|
||||
sort=sort, entries=entries)
|
||||
# fp = HotShotFuncProfile(fn, skip=skip, filename=filename, ...)
|
||||
# or HotShotFuncProfile
|
||||
sort=sort, entries=entries, stdout=stdout)
|
||||
# We cannot return fp or fp.__call__ directly as that would break method
|
||||
# definitions, instead we need to return a plain function.
|
||||
|
||||
@functools.wraps(fn)
|
||||
def new_fn(*args, **kw):
|
||||
return fp(*args, **kw)
|
||||
new_fn.__doc__ = fn.__doc__
|
||||
new_fn.__name__ = fn.__name__
|
||||
new_fn.__dict__ = fn.__dict__
|
||||
new_fn.__module__ = fn.__module__
|
||||
return new_fn
|
||||
|
||||
|
||||
@@ -244,15 +279,13 @@ def coverage(fn):
|
||||
...
|
||||
|
||||
"""
|
||||
fp = TraceFuncCoverage(fn) # or HotShotFuncCoverage
|
||||
fp = TraceFuncCoverage(fn) # or HotShotFuncCoverage
|
||||
# We cannot return fp or fp.__call__ directly as that would break method
|
||||
# definitions, instead we need to return a plain function.
|
||||
|
||||
@functools.wraps(fn)
|
||||
def new_fn(*args, **kw):
|
||||
return fp(*args, **kw)
|
||||
new_fn.__doc__ = fn.__doc__
|
||||
new_fn.__name__ = fn.__name__
|
||||
new_fn.__dict__ = fn.__dict__
|
||||
new_fn.__module__ = fn.__module__
|
||||
return new_fn
|
||||
|
||||
|
||||
@@ -268,12 +301,10 @@ def coverage_with_hotshot(fn):
|
||||
fp = HotShotFuncCoverage(fn)
|
||||
# We cannot return fp or fp.__call__ directly as that would break method
|
||||
# definitions, instead we need to return a plain function.
|
||||
|
||||
@functools.wraps(fn)
|
||||
def new_fn(*args, **kw):
|
||||
return fp(*args, **kw)
|
||||
new_fn.__doc__ = fn.__doc__
|
||||
new_fn.__name__ = fn.__name__
|
||||
new_fn.__dict__ = fn.__dict__
|
||||
new_fn.__module__ = fn.__module__
|
||||
return new_fn
|
||||
|
||||
|
||||
@@ -286,7 +317,7 @@ class FuncProfile(object):
|
||||
Profile = Profile
|
||||
|
||||
def __init__(self, fn, skip=0, filename=None, immediate=False, dirs=False,
|
||||
sort=None, entries=40):
|
||||
sort=None, entries=40, stdout=True):
|
||||
"""Creates a profiler for a function.
|
||||
|
||||
Every profiler has its own log file (the name of which is derived
|
||||
@@ -298,14 +329,21 @@ class FuncProfile(object):
|
||||
self.fn = fn
|
||||
self.skip = skip
|
||||
self.filename = filename
|
||||
self.immediate = immediate
|
||||
self._immediate = immediate
|
||||
self.stdout = stdout
|
||||
self._stdout_is_fp = self.stdout and _is_file_like(self.stdout)
|
||||
self.dirs = dirs
|
||||
self.sort = sort or ('cumulative', 'time', 'calls')
|
||||
if isinstance(self.sort, str):
|
||||
self.sort = (self.sort, )
|
||||
self.entries = entries
|
||||
self.reset_stats()
|
||||
atexit.register(self.atexit)
|
||||
if not self.immediate:
|
||||
atexit.register(self.atexit)
|
||||
|
||||
@property
|
||||
def immediate(self):
|
||||
return self._immediate
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
"""Profile a singe call to the function."""
|
||||
@@ -332,40 +370,45 @@ class FuncProfile(object):
|
||||
|
||||
def print_stats(self):
|
||||
"""Print profile information to sys.stdout."""
|
||||
funcname = self.fn.__name__
|
||||
filename = self.fn.func_code.co_filename
|
||||
lineno = self.fn.func_code.co_firstlineno
|
||||
print
|
||||
print "*** PROFILER RESULTS ***"
|
||||
print "%s (%s:%s)" % (funcname, filename, lineno)
|
||||
print "function called %d times" % self.ncalls,
|
||||
if self.skipped:
|
||||
print "(%d calls not profiled)" % self.skipped
|
||||
else:
|
||||
print
|
||||
print
|
||||
stats = self.stats
|
||||
if self.filename:
|
||||
stats.dump_stats(self.filename)
|
||||
if not self.dirs:
|
||||
stats.strip_dirs()
|
||||
stats.sort_stats(*self.sort)
|
||||
stats.print_stats(self.entries)
|
||||
if self.stdout:
|
||||
funcname, filename, lineno = _identify(self.fn)
|
||||
print_f = print
|
||||
if self._stdout_is_fp:
|
||||
print_f = functools.partial(print, file=self.stdout)
|
||||
|
||||
print_f("")
|
||||
print_f("*** PROFILER RESULTS ***")
|
||||
print_f("%s (%s:%s)" % (funcname, filename, lineno))
|
||||
if self.skipped:
|
||||
skipped = " (%d calls not profiled)" % self.skipped
|
||||
else:
|
||||
skipped = ""
|
||||
print_f("function called %d times%s" % (self.ncalls, skipped))
|
||||
print_f("")
|
||||
if not self.dirs:
|
||||
stats.strip_dirs()
|
||||
stats.sort_stats(*self.sort)
|
||||
stats.print_stats(self.entries)
|
||||
|
||||
def reset_stats(self):
|
||||
"""Reset accumulated profiler statistics."""
|
||||
# send stats printing to specified stdout if it's file-like
|
||||
stream = self.stdout if self._stdout_is_fp else sys.stdout
|
||||
|
||||
# Note: not using self.Profile, since pstats.Stats() fails then
|
||||
self.stats = pstats.Stats(Profile())
|
||||
self.stats = pstats.Stats(Profile(), stream=stream)
|
||||
self.ncalls = 0
|
||||
self.skipped = 0
|
||||
|
||||
def atexit(self):
|
||||
"""Stop profiling and print profile information to sys.stdout.
|
||||
"""Stop profiling and print profile information to sys.stdout or self.stdout.
|
||||
|
||||
This function is registered as an atexit hook.
|
||||
"""
|
||||
if not self.immediate:
|
||||
self.print_stats()
|
||||
self.print_stats()
|
||||
|
||||
|
||||
AVAILABLE_PROFILERS['profile'] = FuncProfile
|
||||
@@ -383,13 +426,14 @@ if cProfile is not None:
|
||||
|
||||
if hotshot is not None:
|
||||
|
||||
class HotShotFuncProfile(object):
|
||||
class HotShotFuncProfile(FuncProfile):
|
||||
"""Profiler for a function (uses hotshot)."""
|
||||
|
||||
# This flag is shared between all instances
|
||||
in_profiler = False
|
||||
|
||||
def __init__(self, fn, skip=0, filename=None):
|
||||
def __init__(self, fn, skip=0, filename=None, immediate=False,
|
||||
dirs=False, sort=None, entries=40, stdout=True):
|
||||
"""Creates a profiler for a function.
|
||||
|
||||
Every profiler has its own log file (the name of which is derived
|
||||
@@ -401,17 +445,13 @@ if hotshot is not None:
|
||||
The log file is not removed and remains there to clutter the
|
||||
current working directory.
|
||||
"""
|
||||
self.fn = fn
|
||||
self.filename = filename
|
||||
if self.filename:
|
||||
if filename:
|
||||
self.logfilename = filename + ".raw"
|
||||
else:
|
||||
self.logfilename = fn.__name__ + ".prof"
|
||||
self.profiler = hotshot.Profile(self.logfilename)
|
||||
self.ncalls = 0
|
||||
self.skip = skip
|
||||
self.skipped = 0
|
||||
atexit.register(self.atexit)
|
||||
self.logfilename = "%s.%d.prof" % (fn.__name__, os.getpid())
|
||||
super(HotShotFuncProfile, self).__init__(
|
||||
fn, skip=skip, filename=filename, immediate=immediate,
|
||||
dirs=dirs, sort=sort, entries=entries, stdout=stdout)
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
"""Profile a singe call to the function."""
|
||||
@@ -423,43 +463,32 @@ if hotshot is not None:
|
||||
if HotShotFuncProfile.in_profiler:
|
||||
# handle recursive calls
|
||||
return self.fn(*args, **kw)
|
||||
if self.profiler is None:
|
||||
self.profiler = hotshot.Profile(self.logfilename)
|
||||
try:
|
||||
HotShotFuncProfile.in_profiler = True
|
||||
return self.profiler.runcall(self.fn, *args, **kw)
|
||||
finally:
|
||||
HotShotFuncProfile.in_profiler = False
|
||||
if self.immediate:
|
||||
self.print_stats()
|
||||
self.reset_stats()
|
||||
|
||||
def atexit(self):
|
||||
"""Stop profiling and print profile information to sys.stderr.
|
||||
|
||||
This function is registered as an atexit hook.
|
||||
"""
|
||||
self.profiler.close()
|
||||
funcname = self.fn.__name__
|
||||
filename = self.fn.func_code.co_filename
|
||||
lineno = self.fn.func_code.co_firstlineno
|
||||
print
|
||||
print "*** PROFILER RESULTS ***"
|
||||
print "%s (%s:%s)" % (funcname, filename, lineno)
|
||||
print "function called %d times" % self.ncalls,
|
||||
if self.skipped:
|
||||
print "(%d calls not profiled)" % self.skipped
|
||||
def print_stats(self):
|
||||
if self.profiler is None:
|
||||
self.stats = pstats.Stats(Profile())
|
||||
else:
|
||||
print
|
||||
print
|
||||
stats = hotshot.stats.load(self.logfilename)
|
||||
# hotshot.stats.load takes ages, and the .prof file eats megabytes, but
|
||||
# a saved stats object is small and fast
|
||||
if self.filename:
|
||||
stats.dump_stats(self.filename)
|
||||
# it is best to save before strip_dirs
|
||||
stats.strip_dirs()
|
||||
stats.sort_stats('cumulative', 'time', 'calls')
|
||||
stats.print_stats(40)
|
||||
self.profiler.close()
|
||||
self.stats = hotshot.stats.load(self.logfilename)
|
||||
super(HotShotFuncProfile, self).print_stats()
|
||||
|
||||
def reset_stats(self):
|
||||
self.profiler = None
|
||||
self.ncalls = 0
|
||||
self.skipped = 0
|
||||
|
||||
AVAILABLE_PROFILERS['hotshot'] = HotShotFuncProfile
|
||||
|
||||
|
||||
class HotShotFuncCoverage:
|
||||
"""Coverage analysis for a function (uses _hotshot).
|
||||
|
||||
@@ -482,7 +511,7 @@ if hotshot is not None:
|
||||
current working directory.
|
||||
"""
|
||||
self.fn = fn
|
||||
self.logfilename = fn.__name__ + ".cprof"
|
||||
self.logfilename = "%s.%d.cprof" % (fn.__name__, os.getpid())
|
||||
self.profiler = _hotshot.coverage(self.logfilename)
|
||||
self.ncalls = 0
|
||||
atexit.register(self.atexit)
|
||||
@@ -490,7 +519,11 @@ if hotshot is not None:
|
||||
def __call__(self, *args, **kw):
|
||||
"""Profile a singe call to the function."""
|
||||
self.ncalls += 1
|
||||
return self.profiler.runcall(self.fn, args, kw)
|
||||
old_trace = sys.gettrace()
|
||||
try:
|
||||
return self.profiler.runcall(self.fn, args, kw)
|
||||
finally: # pragma: nocover
|
||||
sys.settrace(old_trace)
|
||||
|
||||
def atexit(self):
|
||||
"""Stop profiling and print profile information to sys.stderr.
|
||||
@@ -498,14 +531,12 @@ if hotshot is not None:
|
||||
This function is registered as an atexit hook.
|
||||
"""
|
||||
self.profiler.close()
|
||||
funcname = self.fn.__name__
|
||||
filename = self.fn.func_code.co_filename
|
||||
lineno = self.fn.func_code.co_firstlineno
|
||||
print
|
||||
print "*** COVERAGE RESULTS ***"
|
||||
print "%s (%s:%s)" % (funcname, filename, lineno)
|
||||
print "function called %d times" % self.ncalls
|
||||
print
|
||||
funcname, filename, lineno = _identify(self.fn)
|
||||
print("")
|
||||
print("*** COVERAGE RESULTS ***")
|
||||
print("%s (%s:%s)" % (funcname, filename, lineno))
|
||||
print("function called %d times" % self.ncalls)
|
||||
print("")
|
||||
fs = FuncSource(self.fn)
|
||||
reader = hotshot.log.LogReader(self.logfilename)
|
||||
for what, (filename, lineno, funcname), tdelta in reader:
|
||||
@@ -514,15 +545,19 @@ if hotshot is not None:
|
||||
if what == hotshot.log.LINE:
|
||||
fs.mark(lineno)
|
||||
if what == hotshot.log.ENTER:
|
||||
# hotshot gives us the line number of the function definition
|
||||
# and never gives us a LINE event for the first statement in
|
||||
# a function, so if we didn't perform this mapping, the first
|
||||
# statement would be marked as never executed
|
||||
# hotshot gives us the line number of the function
|
||||
# definition and never gives us a LINE event for the first
|
||||
# statement in a function, so if we didn't perform this
|
||||
# mapping, the first statement would be marked as never
|
||||
# executed
|
||||
if lineno == fs.firstlineno:
|
||||
lineno = fs.firstcodelineno
|
||||
fs.mark(lineno)
|
||||
reader.close()
|
||||
print fs
|
||||
print(fs)
|
||||
never_executed = fs.count_never_executed()
|
||||
if never_executed:
|
||||
print("%d lines were not executed." % never_executed)
|
||||
|
||||
|
||||
class TraceFuncCoverage:
|
||||
@@ -552,19 +587,21 @@ class TraceFuncCoverage:
|
||||
current working directory.
|
||||
"""
|
||||
self.fn = fn
|
||||
self.logfilename = fn.__name__ + ".cprof"
|
||||
self.logfilename = "%s.%d.cprof" % (fn.__name__, os.getpid())
|
||||
self.ncalls = 0
|
||||
atexit.register(self.atexit)
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
"""Profile a singe call to the function."""
|
||||
self.ncalls += 1
|
||||
if TraceFuncCoverage.tracing:
|
||||
if TraceFuncCoverage.tracing: # pragma: nocover
|
||||
return self.fn(*args, **kw)
|
||||
old_trace = sys.gettrace()
|
||||
try:
|
||||
TraceFuncCoverage.tracing = True
|
||||
return self.tracer.runfunc(self.fn, *args, **kw)
|
||||
finally:
|
||||
finally: # pragma: nocover
|
||||
sys.settrace(old_trace)
|
||||
TraceFuncCoverage.tracing = False
|
||||
|
||||
def atexit(self):
|
||||
@@ -572,23 +609,21 @@ class TraceFuncCoverage:
|
||||
|
||||
This function is registered as an atexit hook.
|
||||
"""
|
||||
funcname = self.fn.__name__
|
||||
filename = self.fn.func_code.co_filename
|
||||
lineno = self.fn.func_code.co_firstlineno
|
||||
print
|
||||
print "*** COVERAGE RESULTS ***"
|
||||
print "%s (%s:%s)" % (funcname, filename, lineno)
|
||||
print "function called %d times" % self.ncalls
|
||||
print
|
||||
funcname, filename, lineno = _identify(self.fn)
|
||||
print("")
|
||||
print("*** COVERAGE RESULTS ***")
|
||||
print("%s (%s:%s)" % (funcname, filename, lineno))
|
||||
print("function called %d times" % self.ncalls)
|
||||
print("")
|
||||
fs = FuncSource(self.fn)
|
||||
for (filename, lineno), count in self.tracer.counts.items():
|
||||
if filename != fs.filename:
|
||||
continue
|
||||
fs.mark(lineno, count)
|
||||
print fs
|
||||
print(fs)
|
||||
never_executed = fs.count_never_executed()
|
||||
if never_executed:
|
||||
print "%d lines were not executed." % never_executed
|
||||
print("%d lines were not executed." % never_executed)
|
||||
|
||||
|
||||
class FuncSource:
|
||||
@@ -599,22 +634,47 @@ class FuncSource:
|
||||
def __init__(self, fn):
|
||||
self.fn = fn
|
||||
self.filename = inspect.getsourcefile(fn)
|
||||
self.source, self.firstlineno = inspect.getsourcelines(fn)
|
||||
self.sourcelines = {}
|
||||
self.firstcodelineno = self.firstlineno
|
||||
self.find_source_lines()
|
||||
self.source = []
|
||||
self.firstlineno = self.firstcodelineno = 0
|
||||
try:
|
||||
self.source, self.firstlineno = inspect.getsourcelines(fn)
|
||||
self.firstcodelineno = self.firstlineno
|
||||
self.find_source_lines()
|
||||
except IOError:
|
||||
self.filename = None
|
||||
|
||||
def find_source_lines(self):
|
||||
"""Mark all executable source lines in fn as executed 0 times."""
|
||||
strs = trace.find_strings(self.filename)
|
||||
lines = trace.find_lines_from_code(self.fn.func_code, strs)
|
||||
self.firstcodelineno = sys.maxint
|
||||
if self.filename is None:
|
||||
return
|
||||
strs = self._find_docstrings(self.filename)
|
||||
lines = {
|
||||
ln
|
||||
for off, ln in dis.findlinestarts(_unwrap(self.fn).__code__)
|
||||
if ln not in strs
|
||||
}
|
||||
for lineno in lines:
|
||||
self.firstcodelineno = min(self.firstcodelineno, lineno)
|
||||
self.sourcelines.setdefault(lineno, 0)
|
||||
if self.firstcodelineno == sys.maxint:
|
||||
if lines:
|
||||
self.firstcodelineno = min(lines)
|
||||
else: # pragma: nocover
|
||||
# This branch cannot be reached, I'm just being paranoid.
|
||||
self.firstcodelineno = self.firstlineno
|
||||
|
||||
def _find_docstrings(self, filename):
|
||||
# A replacement for trace.find_strings() which was deprecated in
|
||||
# Python 3.2 and removed in 3.6.
|
||||
strs = set()
|
||||
prev = token.INDENT # so module docstring is detected as docstring
|
||||
with tokenize_open(filename) as f:
|
||||
tokens = tokenize.generate_tokens(f.readline)
|
||||
for ttype, tstr, start, end, line in tokens:
|
||||
if ttype == token.STRING and prev == token.INDENT:
|
||||
strs.update(range(start[0], end[0] + 1))
|
||||
prev = ttype
|
||||
return strs
|
||||
|
||||
def mark(self, lineno, count=1):
|
||||
"""Mark a given source line as executed count times.
|
||||
|
||||
@@ -635,6 +695,8 @@ class FuncSource:
|
||||
|
||||
def __str__(self):
|
||||
"""Return annotated source code for the function."""
|
||||
if self.filename is None:
|
||||
return "cannot show coverage data since co_filename is None"
|
||||
lines = []
|
||||
lineno = self.firstlineno
|
||||
for line in self.source:
|
||||
@@ -642,7 +704,10 @@ class FuncSource:
|
||||
if counter is None:
|
||||
prefix = ' ' * 7
|
||||
elif counter == 0:
|
||||
if self.blank_rx.match(line):
|
||||
if self.blank_rx.match(line): # pragma: nocover
|
||||
# This is an workaround for an ancient bug I can't
|
||||
# reproduce, perhaps because it was fixed, or perhaps
|
||||
# because I can't remember all the details.
|
||||
prefix = ' ' * 7
|
||||
else:
|
||||
prefix = '>' * 6 + ' '
|
||||
@@ -653,7 +718,10 @@ class FuncSource:
|
||||
return ''.join(lines)
|
||||
|
||||
|
||||
def timecall(fn=None, immediate=True, timer=time.time):
|
||||
def timecall(
|
||||
fn=None, immediate=True, timer=None,
|
||||
log_name=None, log_level=logging.DEBUG,
|
||||
):
|
||||
"""Wrap `fn` and print its execution time.
|
||||
|
||||
Example::
|
||||
@@ -665,36 +733,56 @@ def timecall(fn=None, immediate=True, timer=time.time):
|
||||
somefunc(2, 3)
|
||||
|
||||
will print the time taken by somefunc on every call. If you want just
|
||||
a summary at program termination, use
|
||||
a summary at program termination, use ::
|
||||
|
||||
@timecall(immediate=False)
|
||||
|
||||
You can also choose a timing method other than the default ``time.time()``,
|
||||
e.g.:
|
||||
You can also choose a timing method other than the default
|
||||
``timeit.default_timer()``, e.g.::
|
||||
|
||||
@timecall(timer=time.clock)
|
||||
|
||||
You can also log the output to a logger by specifying the name and level
|
||||
of the logger to use, eg:
|
||||
|
||||
@timecall(immediate=True,
|
||||
log_name='profile_log',
|
||||
log_level=logging.DEBUG)
|
||||
|
||||
"""
|
||||
if fn is None: # @timecall() syntax -- we are a decorator maker
|
||||
if fn is None: # @timecall() syntax -- we are a decorator maker
|
||||
def decorator(fn):
|
||||
return timecall(fn, immediate=immediate, timer=timer)
|
||||
return timecall(
|
||||
fn, immediate=immediate, timer=timer,
|
||||
log_name=log_name, log_level=log_level,
|
||||
)
|
||||
return decorator
|
||||
# @timecall syntax -- we are a decorator.
|
||||
fp = FuncTimer(fn, immediate=immediate, timer=timer)
|
||||
if timer is None:
|
||||
timer = timeit.default_timer
|
||||
fp = FuncTimer(
|
||||
fn, immediate=immediate, timer=timer,
|
||||
log_name=log_name, log_level=log_level,
|
||||
)
|
||||
# We cannot return fp or fp.__call__ directly as that would break method
|
||||
# definitions, instead we need to return a plain function.
|
||||
|
||||
@functools.wraps(fn)
|
||||
def new_fn(*args, **kw):
|
||||
return fp(*args, **kw)
|
||||
new_fn.__doc__ = fn.__doc__
|
||||
new_fn.__name__ = fn.__name__
|
||||
new_fn.__dict__ = fn.__dict__
|
||||
new_fn.__module__ = fn.__module__
|
||||
return new_fn
|
||||
|
||||
|
||||
class FuncTimer(object):
|
||||
|
||||
def __init__(self, fn, immediate, timer):
|
||||
def __init__(
|
||||
self, fn, immediate, timer,
|
||||
log_name=None, log_level=logging.DEBUG,
|
||||
):
|
||||
self.logger = None
|
||||
if log_name:
|
||||
self.logger = logging.getLogger(log_name)
|
||||
self.log_level = log_level
|
||||
self.fn = fn
|
||||
self.ncalls = 0
|
||||
self.totaltime = 0
|
||||
@@ -708,25 +796,57 @@ class FuncTimer(object):
|
||||
fn = self.fn
|
||||
timer = self.timer
|
||||
self.ncalls += 1
|
||||
start = timer()
|
||||
try:
|
||||
start = timer()
|
||||
return fn(*args, **kw)
|
||||
finally:
|
||||
duration = timer() - start
|
||||
self.totaltime += duration
|
||||
if self.immediate:
|
||||
funcname = fn.__name__
|
||||
filename = fn.func_code.co_filename
|
||||
lineno = fn.func_code.co_firstlineno
|
||||
print >> sys.stderr, "\n %s (%s:%s):\n %.3f seconds\n" % (
|
||||
funcname, filename, lineno, duration)
|
||||
funcname, filename, lineno = _identify(fn)
|
||||
message = "%s (%s:%s):\n %.3f seconds\n\n" % (
|
||||
funcname, filename, lineno, duration,
|
||||
)
|
||||
if self.logger:
|
||||
self.logger.log(self.log_level, message)
|
||||
else:
|
||||
sys.stderr.write("\n " + message)
|
||||
sys.stderr.flush()
|
||||
|
||||
def atexit(self):
|
||||
if not self.ncalls:
|
||||
return
|
||||
funcname = self.fn.__name__
|
||||
filename = self.fn.func_code.co_filename
|
||||
lineno = self.fn.func_code.co_firstlineno
|
||||
print ("\n %s (%s:%s):\n"
|
||||
" %d calls, %.3f seconds (%.3f seconds per call)\n" % (
|
||||
funcname, filename, lineno, self.ncalls,
|
||||
self.totaltime, self.totaltime / self.ncalls))
|
||||
funcname, filename, lineno = _identify(self.fn)
|
||||
message = "\n %s (%s:%s):\n"\
|
||||
" %d calls, %.3f seconds (%.3f seconds per call)\n" % (
|
||||
funcname, filename, lineno, self.ncalls,
|
||||
self.totaltime, self.totaltime / self.ncalls)
|
||||
if self.logger:
|
||||
self.logger.log(self.log_level, message)
|
||||
else:
|
||||
print(message)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
local = dict((name, globals()[name]) for name in __all__)
|
||||
message = """********
|
||||
Injected `profilehooks`
|
||||
--------
|
||||
{}
|
||||
********
|
||||
""".format("\n".join(local.keys()))
|
||||
|
||||
def interact_():
|
||||
from code import interact
|
||||
interact(message, local=local)
|
||||
|
||||
def run_():
|
||||
from runpy import run_module
|
||||
print(message)
|
||||
run_module(sys.argv[1], init_globals=local)
|
||||
|
||||
if len(sys.argv) == 1:
|
||||
interact_()
|
||||
else:
|
||||
run_()
|
||||
|
@@ -16,10 +16,10 @@ analysis = Analysis(
|
||||
('../CHANGELOG.md', '.'),
|
||||
('../LICENSE', '.'),
|
||||
('../version.txt', '.'),
|
||||
('../lib/ipwhois/data', 'data')
|
||||
('../lib/ipwhois/data', 'ipwhois/data')
|
||||
],
|
||||
excludes=['FixTk', 'tcl', 'tk', '_tkinter', 'tkinter', 'Tkinter'],
|
||||
hiddenimports=['Foundation', 'AppKit'],
|
||||
hiddenimports=['pkg_resources.py2_warn', 'Foundation', 'AppKit', 'cheroot.ssl', 'cheroot.ssl.builtin'],
|
||||
cipher=block_cipher
|
||||
)
|
||||
pyz = PYZ(
|
||||
@@ -47,5 +47,9 @@ app = BUNDLE(
|
||||
name='Tautulli.app',
|
||||
icon='../data/interfaces/default/images/logo-circle.icns',
|
||||
bundle_identifier='com.Tautulli.Tautulli',
|
||||
version=VERSION
|
||||
version=VERSION,
|
||||
info_plist={
|
||||
'LSBackgroundOnly': True,
|
||||
'LSUIElement': True
|
||||
}
|
||||
)
|
||||
|
@@ -16,6 +16,7 @@ analysis = Analysis(
|
||||
('..\\lib\\ipwhois\\data', 'data')
|
||||
],
|
||||
excludes=['FixTk', 'tcl', 'tk', '_tkinter', 'tkinter', 'Tkinter'],
|
||||
hiddenimports=['pkg_resources.py2_warn', 'cheroot.ssl', 'cheroot.ssl.builtin'],
|
||||
cipher=block_cipher,
|
||||
)
|
||||
pyz = PYZ(
|
||||
|
@@ -3,7 +3,7 @@
|
||||
dialogText=`osascript -e 'set dialogText to button returned of (display dialog "Installation complete. Start Tautulli?" buttons {"Start", "Close"})'`;
|
||||
if [[ $dialogText == 'Start' ]]
|
||||
then
|
||||
open /Applications/Tautulli.app
|
||||
open /Applications/Tautulli.app
|
||||
else
|
||||
exit 0;
|
||||
fi
|
||||
|
@@ -1,4 +1,4 @@
|
||||
pyinstaller
|
||||
pyinstaller==3.6
|
||||
pyopenssl
|
||||
pycryptodomex
|
||||
pyobjc
|
||||
pyobjc-framework-Cocoa
|
@@ -1,4 +1,4 @@
|
||||
pyinstaller
|
||||
pyinstaller==3.6
|
||||
pyopenssl
|
||||
pycryptodomex
|
||||
pywin32
|
@@ -37,8 +37,7 @@ from apscheduler.triggers.interval import IntervalTrigger
|
||||
from UniversalAnalytics import Tracker
|
||||
import pytz
|
||||
|
||||
PYTHON_VERSION = sys.version_info[:3]
|
||||
PYTHON2 = PYTHON_VERSION[0] == 2
|
||||
PYTHON2 = sys.version_info[0] == 2
|
||||
|
||||
if PYTHON2:
|
||||
import activity_handler
|
||||
@@ -137,6 +136,7 @@ DEV = False
|
||||
WEBSOCKET = None
|
||||
WS_CONNECTED = False
|
||||
PLEX_SERVER_UP = None
|
||||
PLEX_REMOTE_ACCESS_UP = None
|
||||
|
||||
TRACKER = None
|
||||
|
||||
@@ -160,7 +160,11 @@ def initialize(config_file):
|
||||
global UMASK
|
||||
global _UPDATE
|
||||
|
||||
CONFIG = config.Config(config_file)
|
||||
try:
|
||||
CONFIG = config.Config(config_file)
|
||||
except:
|
||||
raise SystemExit("Unable to initialize Tautulli due to a corrupted config file. Exiting...")
|
||||
|
||||
CONFIG_FILE = config_file
|
||||
|
||||
assert CONFIG is not None
|
||||
@@ -435,6 +439,8 @@ def initialize_scheduler():
|
||||
|
||||
backup_hours = CONFIG.BACKUP_INTERVAL if 1 <= CONFIG.BACKUP_INTERVAL <= 24 else 6
|
||||
|
||||
schedule_job(database.optimize, 'Optimize Tautulli database',
|
||||
hours=24, minutes=0, seconds=0)
|
||||
schedule_job(database.make_backup, 'Backup Tautulli database',
|
||||
hours=backup_hours, minutes=0, seconds=0, args=(True, True))
|
||||
schedule_job(config.make_backup, 'Backup Tautulli config',
|
||||
@@ -444,10 +450,6 @@ def initialize_scheduler():
|
||||
schedule_job(plextv.get_server_resources, 'Refresh Plex server URLs',
|
||||
hours=12 * (not bool(CONFIG.PMS_URL_MANUAL)), minutes=0, seconds=0)
|
||||
|
||||
pms_remote_access_seconds = CONFIG.REMOTE_ACCESS_PING_INTERVAL if 60 <= CONFIG.REMOTE_ACCESS_PING_INTERVAL else 60
|
||||
|
||||
schedule_job(activity_pinger.check_server_access, 'Check for Plex remote access',
|
||||
hours=0, minutes=0, seconds=pms_remote_access_seconds * bool(CONFIG.MONITOR_REMOTE_ACCESS))
|
||||
schedule_job(activity_pinger.check_server_updates, 'Check for Plex updates',
|
||||
hours=pms_update_check_hours * bool(CONFIG.MONITOR_PMS_UPDATES), minutes=0, seconds=0)
|
||||
|
||||
@@ -470,8 +472,6 @@ def initialize_scheduler():
|
||||
schedule_job(plextv.get_server_resources, 'Refresh Plex server URLs',
|
||||
hours=0, minutes=0, seconds=0)
|
||||
|
||||
schedule_job(activity_pinger.check_server_access, 'Check for Plex remote access',
|
||||
hours=0, minutes=0, seconds=0)
|
||||
schedule_job(activity_pinger.check_server_updates, 'Check for Plex updates',
|
||||
hours=0, minutes=0, seconds=0)
|
||||
|
||||
@@ -745,7 +745,7 @@ def dbcheck():
|
||||
c_db.execute(
|
||||
'CREATE TABLE IF NOT EXISTS mobile_devices (id INTEGER PRIMARY KEY AUTOINCREMENT, '
|
||||
'device_id TEXT NOT NULL UNIQUE, device_token TEXT, device_name TEXT, friendly_name TEXT, '
|
||||
'last_seen INTEGER)'
|
||||
'onesignal_id TEXT, last_seen INTEGER, official INTEGER DEFAULT 0)'
|
||||
)
|
||||
|
||||
# tvmaze_lookup table :: This table keeps record of the TVmaze lookups
|
||||
@@ -2001,6 +2001,28 @@ def dbcheck():
|
||||
'ALTER TABLE mobile_devices ADD COLUMN last_seen INTEGER'
|
||||
)
|
||||
|
||||
# Upgrade mobile_devices table from earlier versions
|
||||
try:
|
||||
c_db.execute('SELECT official FROM mobile_devices')
|
||||
except sqlite3.OperationalError:
|
||||
logger.debug("Altering database. Updating database table mobile_devices.")
|
||||
c_db.execute(
|
||||
'ALTER TABLE mobile_devices ADD COLUMN official INTEGER DEFAULT 0'
|
||||
)
|
||||
# Update official mobile device flag
|
||||
for device_id, in c_db.execute('SELECT device_id FROM mobile_devices').fetchall():
|
||||
c_db.execute('UPDATE mobile_devices SET official = ? WHERE device_id = ?',
|
||||
[mobile_app.validate_device_id(device_id), device_id])
|
||||
|
||||
# Upgrade mobile_devices table from earlier versions
|
||||
try:
|
||||
c_db.execute('SELECT onesignal_id FROM mobile_devices')
|
||||
except sqlite3.OperationalError:
|
||||
logger.debug("Altering database. Updating database table mobile_devices.")
|
||||
c_db.execute(
|
||||
'ALTER TABLE mobile_devices ADD COLUMN onesignal_id TEXT'
|
||||
)
|
||||
|
||||
# Upgrade notifiers table from earlier versions
|
||||
try:
|
||||
c_db.execute('SELECT custom_conditions FROM notifiers')
|
||||
@@ -2149,10 +2171,7 @@ def dbcheck():
|
||||
|
||||
|
||||
def upgrade():
|
||||
if CONFIG.UPDATE_NOTIFIERS_DB:
|
||||
notifiers.upgrade_config_to_db()
|
||||
if CONFIG.UPDATE_LIBRARIES_DB_NOTIFY:
|
||||
libraries.update_libraries_db_notify()
|
||||
return
|
||||
|
||||
|
||||
def shutdown(restart=False, update=False, checkout=False, reset=False):
|
||||
@@ -2198,11 +2217,6 @@ def shutdown(restart=False, update=False, checkout=False, reset=False):
|
||||
logger.info("Removing pidfile %s", PIDFILE)
|
||||
os.remove(PIDFILE)
|
||||
|
||||
if WIN_SYS_TRAY_ICON:
|
||||
WIN_SYS_TRAY_ICON.shutdown()
|
||||
elif MAC_SYS_TRAY_ICON:
|
||||
MAC_SYS_TRAY_ICON.shutdown()
|
||||
|
||||
if restart:
|
||||
logger.info("Tautulli is restarting...")
|
||||
|
||||
@@ -2225,7 +2239,7 @@ def shutdown(restart=False, update=False, checkout=False, reset=False):
|
||||
# https://bugs.python.org/issue19066
|
||||
if NOFORK:
|
||||
pass
|
||||
elif common.PLATFORM == 'Windows':
|
||||
elif common.PLATFORM in ('Windows', 'Darwin'):
|
||||
subprocess.Popen(args, cwd=os.getcwd())
|
||||
else:
|
||||
os.execv(exe, args)
|
||||
@@ -2235,6 +2249,11 @@ def shutdown(restart=False, update=False, checkout=False, reset=False):
|
||||
|
||||
logger.shutdown()
|
||||
|
||||
if WIN_SYS_TRAY_ICON:
|
||||
WIN_SYS_TRAY_ICON.shutdown()
|
||||
elif MAC_SYS_TRAY_ICON:
|
||||
MAC_SYS_TRAY_ICON.shutdown()
|
||||
|
||||
os._exit(0)
|
||||
|
||||
|
||||
@@ -2251,6 +2270,7 @@ def initialize_tracker():
|
||||
'appInstallerId': CONFIG.GIT_BRANCH,
|
||||
'dimension1': '{} {}'.format(common.PLATFORM, common.PLATFORM_RELEASE), # App Platform
|
||||
'dimension2': common.PLATFORM_LINUX_DISTRO, # Linux Distro
|
||||
'dimension3': common.PYTHON_VERSION,
|
||||
'userLanguage': SYS_LANGUAGE,
|
||||
'documentEncoding': SYS_ENCODING,
|
||||
'noninteractive': True
|
||||
|
@@ -505,6 +505,55 @@ class TimelineHandler(object):
|
||||
schedule_callback('rating_key-{}'.format(rating_key), remove_job=True)
|
||||
|
||||
|
||||
class ReachabilityHandler(object):
|
||||
|
||||
def __init__(self, data):
|
||||
self.data = data
|
||||
|
||||
def is_reachable(self):
|
||||
if 'reachability' in self.data:
|
||||
return self.data['reachability']
|
||||
return False
|
||||
|
||||
def remote_access_enabled(self):
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
pref = pms_connect.get_server_pref(pref='PublishServerOnPlexOnlineKey')
|
||||
return helpers.bool_true(pref)
|
||||
|
||||
def process(self):
|
||||
# Check if remote access is enabled
|
||||
if not self.remote_access_enabled():
|
||||
return
|
||||
|
||||
# Do nothing if remote access is still up and hasn't changed
|
||||
if self.is_reachable() and plexpy.PLEX_REMOTE_ACCESS_UP:
|
||||
return
|
||||
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
server_response = pms_connect.get_server_response()
|
||||
|
||||
if server_response:
|
||||
# Waiting for port mapping
|
||||
if server_response['mapping_state'] == 'waiting':
|
||||
logger.warn("Tautulli Monitor :: Remote access waiting for port mapping.")
|
||||
|
||||
elif plexpy.PLEX_REMOTE_ACCESS_UP is not False and server_response['reason']:
|
||||
logger.warn("Tautulli Monitor :: Remote access failed: %s" % server_response['reason'])
|
||||
logger.info("Tautulli Monitor :: Plex remote access is down.")
|
||||
|
||||
plexpy.PLEX_REMOTE_ACCESS_UP = False
|
||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extdown', 'remote_access_info': server_response})
|
||||
|
||||
elif plexpy.PLEX_REMOTE_ACCESS_UP is False and not server_response['reason']:
|
||||
logger.info("Tautulli Monitor :: Plex remote access is back up.")
|
||||
|
||||
plexpy.PLEX_REMOTE_ACCESS_UP = True
|
||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extup', 'remote_access_info': server_response})
|
||||
|
||||
elif plexpy.PLEX_REMOTE_ACCESS_UP is None:
|
||||
plexpy.PLEX_REMOTE_ACCESS_UP = self.is_reachable()
|
||||
|
||||
|
||||
def del_keys(key):
|
||||
if isinstance(key, set):
|
||||
for child_key in key:
|
||||
|
@@ -216,75 +216,6 @@ def check_active_sessions(ws_request=False):
|
||||
logger.debug("Tautulli Monitor :: Unable to read session list.")
|
||||
|
||||
|
||||
def check_recently_added():
|
||||
|
||||
with monitor_lock:
|
||||
# add delay to allow for metadata processing
|
||||
delay = plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY
|
||||
time_threshold = helpers.timestamp() - delay
|
||||
time_interval = plexpy.CONFIG.MONITORING_INTERVAL
|
||||
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
recently_added_list = pms_connect.get_recently_added_details(count='10')
|
||||
|
||||
library_data = libraries.Libraries()
|
||||
if recently_added_list:
|
||||
recently_added = recently_added_list['recently_added']
|
||||
|
||||
for item in recently_added:
|
||||
library_details = library_data.get_details(section_id=item['section_id'])
|
||||
|
||||
if not library_details['do_notify_created']:
|
||||
continue
|
||||
|
||||
metadata = []
|
||||
|
||||
if 0 < time_threshold - int(item['added_at']) <= time_interval:
|
||||
if item['media_type'] == 'movie':
|
||||
metadata = pms_connect.get_metadata_details(item['rating_key'])
|
||||
if metadata:
|
||||
metadata = [metadata]
|
||||
else:
|
||||
logger.error("Tautulli Monitor :: Unable to retrieve metadata for rating_key %s" \
|
||||
% str(item['rating_key']))
|
||||
|
||||
else:
|
||||
metadata = pms_connect.get_metadata_children_details(item['rating_key'])
|
||||
if not metadata:
|
||||
logger.error("Tautulli Monitor :: Unable to retrieve children metadata for rating_key %s" \
|
||||
% str(item['rating_key']))
|
||||
|
||||
if metadata:
|
||||
|
||||
if not plexpy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED:
|
||||
for item in metadata:
|
||||
|
||||
library_details = library_data.get_details(section_id=item['section_id'])
|
||||
|
||||
if 0 < time_threshold - int(item['added_at']) <= time_interval:
|
||||
logger.debug("Tautulli Monitor :: Library item %s added to Plex." % str(item['rating_key']))
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'timeline_data': item.copy(), 'notify_action': 'on_created'})
|
||||
|
||||
else:
|
||||
item = max(metadata, key=lambda x:x['added_at'])
|
||||
|
||||
if 0 < time_threshold - int(item['added_at']) <= time_interval:
|
||||
if item['media_type'] == 'episode' or item['media_type'] == 'track':
|
||||
metadata = pms_connect.get_metadata_details(item['grandparent_rating_key'])
|
||||
|
||||
if metadata:
|
||||
item = metadata
|
||||
else:
|
||||
logger.error("Tautulli Monitor :: Unable to retrieve grandparent metadata for grandparent_rating_key %s" \
|
||||
% str(item['rating_key']))
|
||||
|
||||
logger.debug("Tautulli Monitor :: Library item %s added to Plex." % str(item['rating_key']))
|
||||
|
||||
# Check if any notification agents have notifications enabled
|
||||
plexpy.NOTIFY_QUEUE.put({'timeline_data': item.copy(), 'notify_action': 'on_created'})
|
||||
|
||||
|
||||
def connect_server(log=True, startup=False):
|
||||
if plexpy.CONFIG.PMS_IS_CLOUD:
|
||||
if log:
|
||||
@@ -318,47 +249,6 @@ def connect_server(log=True, startup=False):
|
||||
logger.error("Websocket :: Unable to open connection: %s." % e)
|
||||
|
||||
|
||||
def check_server_access():
|
||||
with monitor_lock:
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
server_response = pms_connect.get_server_response()
|
||||
|
||||
global ext_ping_count
|
||||
global ext_ping_error
|
||||
|
||||
# Check for remote access
|
||||
if server_response:
|
||||
log = (server_response['mapping_error'] != ext_ping_error)
|
||||
|
||||
if server_response['reason']:
|
||||
ext_ping_count += 1
|
||||
ext_ping_error = server_response['mapping_error']
|
||||
if log:
|
||||
logger.warn("Tautulli Monitor :: Remote access failed: %s, ping attempt %s."
|
||||
% (server_response['reason'], str(ext_ping_count)))
|
||||
|
||||
# Waiting for port mapping
|
||||
elif server_response['mapping_state'] == 'waiting':
|
||||
ext_ping_error = server_response['mapping_error']
|
||||
if log:
|
||||
logger.warn("Tautulli Monitor :: Remote access waiting for port mapping, ping attempt %s."
|
||||
% str(ext_ping_count))
|
||||
|
||||
# Reset external ping counter
|
||||
else:
|
||||
if ext_ping_count >= plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD:
|
||||
logger.info("Tautulli Monitor :: Plex remote access is back up.")
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extup', 'remote_access_info': server_response})
|
||||
|
||||
ext_ping_count = 0
|
||||
ext_ping_error = None
|
||||
|
||||
if ext_ping_count == plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD:
|
||||
logger.info("Tautulli Monitor: Plex remote access is down.")
|
||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extdown', 'remote_access_info': server_response})
|
||||
|
||||
|
||||
def check_server_updates():
|
||||
|
||||
with monitor_lock:
|
||||
|
@@ -20,6 +20,9 @@ from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
from future.builtins import object
|
||||
|
||||
from hashing_passwords import check_hash
|
||||
from io import open
|
||||
|
||||
import hashlib
|
||||
import inspect
|
||||
import json
|
||||
@@ -136,7 +139,11 @@ class API2(object):
|
||||
self._api_app = True
|
||||
|
||||
if plexpy.CONFIG.API_ENABLED and not self._api_msg or self._api_cmd in ('get_apikey', 'docs', 'docs_md'):
|
||||
if self._api_apikey == plexpy.CONFIG.API_KEY or (self._api_app and self._api_apikey == mobile_app.get_temp_device_token()):
|
||||
if not self._api_app and self._api_apikey == plexpy.CONFIG.API_KEY:
|
||||
self._api_authenticated = True
|
||||
|
||||
elif self._api_app and self._api_apikey == mobile_app.get_temp_device_token() and \
|
||||
self._api_cmd == 'register_device':
|
||||
self._api_authenticated = True
|
||||
|
||||
elif self._api_app and mobile_app.get_mobile_device_by_token(self._api_apikey):
|
||||
@@ -201,7 +208,7 @@ class API2(object):
|
||||
logger.api_debug("Tautulli APIv2 :: Filtering log using regex '%s'" % regex)
|
||||
reg = re.compile(regex, flags=re.I)
|
||||
|
||||
with open(logfile, 'r') as f:
|
||||
with open(logfile, 'r', encoding='utf-8') as f:
|
||||
for line in f.readlines():
|
||||
temp_loglevel_and_time = None
|
||||
|
||||
@@ -217,7 +224,7 @@ class API2(object):
|
||||
except IndexError:
|
||||
# We assume this is a traceback
|
||||
tl = (len(templog) - 1)
|
||||
templog[tl]['msg'] += helpers.sanitize(str(line.replace('\n', ''), 'utf-8'))
|
||||
templog[tl]['msg'] += helpers.sanitize(line.replace('\n', ''))
|
||||
continue
|
||||
|
||||
if len(line) > 1 and temp_loglevel_and_time is not None and loglvl in line:
|
||||
@@ -225,7 +232,7 @@ class API2(object):
|
||||
d = {
|
||||
'time': temp_loglevel_and_time[0],
|
||||
'loglevel': loglvl,
|
||||
'msg': helpers.sanitize(str(msg.replace('\n', ''), 'utf-8')),
|
||||
'msg': helpers.sanitize(msg.replace('\n', '')),
|
||||
'thread': thread
|
||||
}
|
||||
templog.append(d)
|
||||
@@ -387,42 +394,60 @@ class API2(object):
|
||||
|
||||
return data
|
||||
|
||||
def register_device(self, device_id='', device_name='', friendly_name='', **kwargs):
|
||||
def register_device(self, device_id='', device_name='', friendly_name='', onesignal_id=None, **kwargs):
|
||||
""" Registers the Tautulli Android App for notifications.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
device_name (str): The device name of the Tautulli Android App
|
||||
device_id (str): The OneSignal device id of the Tautulli Android App
|
||||
device_id (str): The unique device identifier for the mobile device
|
||||
device_name (str): The device name of the mobile device
|
||||
|
||||
Optional parameters:
|
||||
friendly_name (str): A friendly name to identify the mobile device
|
||||
onesignal_id (str): The OneSignal id for the mobile device
|
||||
|
||||
Returns:
|
||||
None
|
||||
json:
|
||||
{"pms_name": "Winterfell-Server",
|
||||
"server_id": "ds48g4r354a8v9byrrtr697g3g79w"
|
||||
}
|
||||
```
|
||||
"""
|
||||
if not device_id:
|
||||
self._api_msg = 'Device registartion failed: no device id provided.'
|
||||
self._api_msg = 'Device registration failed: no device id provided.'
|
||||
self._api_result_type = 'error'
|
||||
return
|
||||
|
||||
elif not device_name:
|
||||
self._api_msg = 'Device registartion failed: no device name provided.'
|
||||
self._api_msg = 'Device registration failed: no device name provided.'
|
||||
self._api_result_type = 'error'
|
||||
return
|
||||
|
||||
## TODO: Temporary for backwards compatibility, assume device_id is onesignal_id
|
||||
if device_id and onesignal_id is None:
|
||||
onesignal_id = device_id
|
||||
|
||||
result = mobile_app.add_mobile_device(device_id=device_id,
|
||||
device_name=device_name,
|
||||
device_token=self._api_apikey,
|
||||
friendly_name=friendly_name)
|
||||
friendly_name=friendly_name,
|
||||
onesignal_id=onesignal_id)
|
||||
|
||||
if result:
|
||||
self._api_msg = 'Device registration successful.'
|
||||
self._api_result_type = 'success'
|
||||
|
||||
mobile_app.set_temp_device_token(None)
|
||||
|
||||
data = {
|
||||
"pms_name": plexpy.CONFIG.PMS_NAME,
|
||||
"server_id": plexpy.CONFIG.PMS_UUID
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
else:
|
||||
self._api_msg = 'Device registartion failed: database error.'
|
||||
self._api_msg = 'Device registration failed: database error.'
|
||||
self._api_result_type = 'error'
|
||||
|
||||
return
|
||||
@@ -587,9 +612,17 @@ General optional parameters:
|
||||
```
|
||||
"""
|
||||
data = None
|
||||
apikey = hashlib.sha224(str(random.getrandbits(256))).hexdigest()[0:32]
|
||||
apikey = hashlib.sha224(str(random.getrandbits(256)).encode('utf-8')).hexdigest()[0:32]
|
||||
if plexpy.CONFIG.HTTP_USERNAME and plexpy.CONFIG.HTTP_PASSWORD:
|
||||
if username == plexpy.CONFIG.HTTP_USERNAME and password == plexpy.CONFIG.HTTP_PASSWORD:
|
||||
authenticated = False
|
||||
if plexpy.CONFIG.HTTP_HASHED_PASSWORD and \
|
||||
username == plexpy.CONFIG.HTTP_USERNAME and check_hash(password, plexpy.CONFIG.HTTP_PASSWORD):
|
||||
authenticated = True
|
||||
elif not plexpy.CONFIG.HTTP_HASHED_PASSWORD and \
|
||||
username == plexpy.CONFIG.HTTP_USERNAME and password == plexpy.CONFIG.HTTP_PASSWORD:
|
||||
authenticated = True
|
||||
|
||||
if authenticated:
|
||||
if plexpy.CONFIG.API_KEY:
|
||||
data = plexpy.CONFIG.API_KEY
|
||||
else:
|
||||
@@ -624,7 +657,7 @@ General optional parameters:
|
||||
return out['response']['data']
|
||||
|
||||
elif self._api_cmd and self._api_cmd.startswith('download_'):
|
||||
return
|
||||
return out['response']['data']
|
||||
|
||||
elif self._api_cmd == 'pms_image_proxy':
|
||||
if 'return_hash' not in self._api_kwargs:
|
||||
@@ -682,6 +715,17 @@ General optional parameters:
|
||||
def _api_run(self, *args, **kwargs):
|
||||
""" handles the stuff from the handler """
|
||||
|
||||
# Make sure the device ID is not shown in the logs
|
||||
if kwargs.get('cmd') == 'register_device':
|
||||
if kwargs.get('device_id'):
|
||||
logger._BLACKLIST_WORDS.add(kwargs['device_id'])
|
||||
if kwargs.get('onesignal_id'):
|
||||
logger._BLACKLIST_WORDS.add(kwargs['onesignal_id'])
|
||||
|
||||
elif kwargs.get('cmd') == 'get_apikey':
|
||||
if kwargs.get('password'):
|
||||
logger._BLACKLIST_WORDS.add(kwargs['password'])
|
||||
|
||||
result = {}
|
||||
logger.api_debug('Tautulli APIv2 :: API called with kwargs: %s' % kwargs)
|
||||
|
||||
|
@@ -35,6 +35,7 @@ PLATFORM_RELEASE = platform.release()
|
||||
PLATFORM_VERSION = platform.version()
|
||||
PLATFORM_LINUX_DISTRO = ' '.join(x for x in distro.linux_distribution() if x)
|
||||
PLATFORM_DEVICE_NAME = platform.node()
|
||||
PYTHON_VERSION = platform.python_version()
|
||||
BRANCH = version.PLEXPY_BRANCH
|
||||
RELEASE = version.PLEXPY_RELEASE_VERSION
|
||||
|
||||
@@ -104,6 +105,7 @@ PLATFORM_NAMES = {
|
||||
'nexus': 'android',
|
||||
'macos': 'macos',
|
||||
'microsoft edge': 'msedge',
|
||||
'netcast': 'lg',
|
||||
'opera': 'opera',
|
||||
'osx': 'macos',
|
||||
'playstation': 'playstation',
|
||||
@@ -119,6 +121,7 @@ PLATFORM_NAMES = {
|
||||
'tizen': 'samsung',
|
||||
'tvos': 'atv',
|
||||
'vizio': 'opera',
|
||||
'webos': 'lg',
|
||||
'wiiu': 'wiiu',
|
||||
'windows': 'windows',
|
||||
'windows phone': 'wp',
|
||||
@@ -214,18 +217,20 @@ EXTRA_TYPES = {
|
||||
}
|
||||
|
||||
SCHEDULER_LIST = [
|
||||
'Check GitHub for updates',
|
||||
'Check for server response',
|
||||
'Check for active sessions',
|
||||
'Check for recently added items',
|
||||
'Check for Plex updates',
|
||||
'Check for Plex remote access',
|
||||
'Refresh users list',
|
||||
'Refresh libraries list',
|
||||
'Refresh Plex server URLs',
|
||||
'Backup Tautulli database',
|
||||
'Backup Tautulli config'
|
||||
('Check GitHub for updates', 'websocket'),
|
||||
('Check for server response', 'websocket'),
|
||||
('Check for active sessions', 'websocket'),
|
||||
('Check for recently added items', 'websocket'),
|
||||
('Check for server remote access', 'websocket'),
|
||||
('Check for Plex updates', 'scheduled'),
|
||||
('Refresh users list', 'scheduled'),
|
||||
('Refresh libraries list', 'scheduled'),
|
||||
('Refresh Plex server URLs', 'scheduled'),
|
||||
('Optimize Tautulli database', 'scheduled'),
|
||||
('Backup Tautulli database', 'scheduled'),
|
||||
('Backup Tautulli config', 'scheduled')
|
||||
]
|
||||
SCHEDULER_LIST = OrderedDict(SCHEDULER_LIST)
|
||||
|
||||
DATE_TIME_FORMATS = [
|
||||
{
|
||||
@@ -494,6 +499,7 @@ NOTIFICATION_PARAMETERS = [
|
||||
{'name': 'Audience Rating', 'type': 'int', 'value': 'audience_rating', 'description': 'The audience rating (%) for the item.', 'help_text': 'Ratings source must be Rotten Tomatoes for the Plex Movie agent'},
|
||||
{'name': 'Duration', 'type': 'int', 'value': 'duration', 'description': 'The duration (in minutes) for the item.'},
|
||||
{'name': 'Poster URL', 'type': 'str', 'value': 'poster_url', 'description': 'A URL for the movie, TV show, or album poster.'},
|
||||
{'name': 'Plex ID', 'type': 'str', 'value': 'plex_id', 'description': 'The Plex ID for the item.', 'example': 'e.g. 5d7769a9594b2b001e6a6b7e'},
|
||||
{'name': 'Plex URL', 'type': 'str', 'value': 'plex_url', 'description': 'The Plex URL to your server for the item.'},
|
||||
{'name': 'IMDB ID', 'type': 'str', 'value': 'imdb_id', 'description': 'The IMDB ID for the movie.', 'example': 'e.g. tt2488496'},
|
||||
{'name': 'IMDB URL', 'type': 'str', 'value': 'imdb_url', 'description': 'The IMDB URL for the movie.'},
|
||||
@@ -524,7 +530,7 @@ NOTIFICATION_PARAMETERS = [
|
||||
{'name': 'Video Full Resolution', 'type': 'str', 'value': 'video_full_resolution', 'description': 'The video resolution of the original media with scan type.'},
|
||||
{'name': 'Video Ref Frames', 'type': 'int', 'value': 'video_ref_frames', 'description': 'The video reference frames of the original media.'},
|
||||
{'name': 'Video Resolution', 'type': 'str', 'value': 'video_resolution', 'description': 'The video resolution of the original media.'},
|
||||
{'name': 'Video Scan Tpye', 'type': 'str', 'value': 'video_scan_type', 'description': 'The video scan type of the original media.'},
|
||||
{'name': 'Video Scan Type', 'type': 'str', 'value': 'video_scan_type', 'description': 'The video scan type of the original media.'},
|
||||
{'name': 'Video Height', 'type': 'int', 'value': 'video_height', 'description': 'The video height of the original media.'},
|
||||
{'name': 'Video Width', 'type': 'int', 'value': 'video_width', 'description': 'The video width of the original media.'},
|
||||
{'name': 'Video Language', 'type': 'str', 'value': 'video_language', 'description': 'The video language of the original media.'},
|
||||
|
634
plexpy/config.py
634
plexpy/config.py
@@ -17,18 +17,20 @@ from __future__ import unicode_literals
|
||||
from future.builtins import object
|
||||
from future.builtins import str
|
||||
|
||||
import arrow
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import time
|
||||
import threading
|
||||
|
||||
from configobj import ConfigObj
|
||||
from configobj import ConfigObj, ParseError
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import helpers
|
||||
import logger
|
||||
else:
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
|
||||
|
||||
@@ -41,15 +43,12 @@ def bool_int(value):
|
||||
value = 0
|
||||
return int(bool(value))
|
||||
|
||||
|
||||
FILENAME = "config.ini"
|
||||
|
||||
_CONFIG_DEFINITIONS = {
|
||||
'ALLOW_GUEST_ACCESS': (int, 'General', 0),
|
||||
'DATE_FORMAT': (str, 'General', 'YYYY-MM-DD'),
|
||||
'GROUPING_GLOBAL_HISTORY': (int, 'PlexWatch', 0),
|
||||
'GROUPING_USER_HISTORY': (int, 'PlexWatch', 0),
|
||||
'GROUPING_CHARTS': (int, 'PlexWatch', 0),
|
||||
'PLEXWATCH_DATABASE': (str, 'PlexWatch', ''),
|
||||
'PMS_IDENTIFIER': (str, 'PMS', ''),
|
||||
'PMS_IP': (str, 'PMS', '127.0.0.1'),
|
||||
'PMS_IS_CLOUD': (int, 'PMS', 0),
|
||||
@@ -75,44 +74,10 @@ _CONFIG_DEFINITIONS = {
|
||||
'PMS_UPDATE_CHECK_INTERVAL': (int, 'Advanced', 24),
|
||||
'PMS_WEB_URL': (str, 'PMS', 'https://app.plex.tv/desktop'),
|
||||
'TIME_FORMAT': (str, 'General', 'HH:mm'),
|
||||
'ADD_LIVE_TV_LIBRARY': (int, 'Advanced', 1),
|
||||
'ANON_REDIRECT': (str, 'General', 'http://www.nullrefer.com/?'),
|
||||
'ANON_REDIRECT': (str, 'General', 'https://www.nullrefer.com/?'),
|
||||
'API_ENABLED': (int, 'General', 1),
|
||||
'API_KEY': (str, 'General', ''),
|
||||
'API_SQL': (int, 'General', 0),
|
||||
'BOXCAR_ENABLED': (int, 'Boxcar', 0),
|
||||
'BOXCAR_TOKEN': (str, 'Boxcar', ''),
|
||||
'BOXCAR_SOUND': (str, 'Boxcar', ''),
|
||||
'BOXCAR_ON_PLAY': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_STOP': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_PAUSE': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_RESUME': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_BUFFER': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_WATCHED': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_CREATED': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_EXTDOWN': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_INTDOWN': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_EXTUP': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_INTUP': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_PMSUPDATE': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_CONCURRENT': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_NEWDEVICE': (int, 'Boxcar', 0),
|
||||
'BROWSER_ENABLED': (int, 'Browser', 0),
|
||||
'BROWSER_AUTO_HIDE_DELAY': (int, 'Browser', 5),
|
||||
'BROWSER_ON_PLAY': (int, 'Browser', 0),
|
||||
'BROWSER_ON_STOP': (int, 'Browser', 0),
|
||||
'BROWSER_ON_PAUSE': (int, 'Browser', 0),
|
||||
'BROWSER_ON_RESUME': (int, 'Browser', 0),
|
||||
'BROWSER_ON_BUFFER': (int, 'Browser', 0),
|
||||
'BROWSER_ON_WATCHED': (int, 'Browser', 0),
|
||||
'BROWSER_ON_CREATED': (int, 'Browser', 0),
|
||||
'BROWSER_ON_EXTDOWN': (int, 'Browser', 0),
|
||||
'BROWSER_ON_INTDOWN': (int, 'Browser', 0),
|
||||
'BROWSER_ON_EXTUP': (int, 'Browser', 0),
|
||||
'BROWSER_ON_INTUP': (int, 'Browser', 0),
|
||||
'BROWSER_ON_PMSUPDATE': (int, 'Browser', 0),
|
||||
'BROWSER_ON_CONCURRENT': (int, 'Browser', 0),
|
||||
'BROWSER_ON_NEWDEVICE': (int, 'Browser', 0),
|
||||
'BUFFER_THRESHOLD': (int, 'Monitoring', 10),
|
||||
'BUFFER_WAIT': (int, 'Monitoring', 900),
|
||||
'BACKUP_DAYS': (int, 'General', 3),
|
||||
@@ -130,56 +95,7 @@ _CONFIG_DEFINITIONS = {
|
||||
'CLOUDINARY_API_SECRET': (str, 'Cloudinary', ''),
|
||||
'CONFIG_VERSION': (int, 'Advanced', 0),
|
||||
'DO_NOT_OVERRIDE_GIT_BRANCH': (int, 'General', 0),
|
||||
'EMAIL_ENABLED': (int, 'Email', 0),
|
||||
'EMAIL_FROM_NAME': (str, 'Email', 'Tautulli'),
|
||||
'EMAIL_FROM': (str, 'Email', ''),
|
||||
'EMAIL_TO': (str, 'Email', ''),
|
||||
'EMAIL_CC': (str, 'Email', ''),
|
||||
'EMAIL_BCC': (str, 'Email', ''),
|
||||
'EMAIL_SMTP_SERVER': (str, 'Email', ''),
|
||||
'EMAIL_SMTP_USER': (str, 'Email', ''),
|
||||
'EMAIL_SMTP_PASSWORD': (str, 'Email', ''),
|
||||
'EMAIL_SMTP_PORT': (int, 'Email', 25),
|
||||
'EMAIL_TLS': (int, 'Email', 0),
|
||||
'EMAIL_HTML_SUPPORT': (int, 'Email', 1),
|
||||
'EMAIL_ON_PLAY': (int, 'Email', 0),
|
||||
'EMAIL_ON_STOP': (int, 'Email', 0),
|
||||
'EMAIL_ON_PAUSE': (int, 'Email', 0),
|
||||
'EMAIL_ON_RESUME': (int, 'Email', 0),
|
||||
'EMAIL_ON_BUFFER': (int, 'Email', 0),
|
||||
'EMAIL_ON_WATCHED': (int, 'Email', 0),
|
||||
'EMAIL_ON_CREATED': (int, 'Email', 0),
|
||||
'EMAIL_ON_EXTDOWN': (int, 'Email', 0),
|
||||
'EMAIL_ON_INTDOWN': (int, 'Email', 0),
|
||||
'EMAIL_ON_EXTUP': (int, 'Email', 0),
|
||||
'EMAIL_ON_INTUP': (int, 'Email', 0),
|
||||
'EMAIL_ON_PMSUPDATE': (int, 'Email', 0),
|
||||
'EMAIL_ON_CONCURRENT': (int, 'Email', 0),
|
||||
'EMAIL_ON_NEWDEVICE': (int, 'Email', 0),
|
||||
'ENABLE_HTTPS': (int, 'General', 0),
|
||||
'FACEBOOK_ENABLED': (int, 'Facebook', 0),
|
||||
'FACEBOOK_REDIRECT_URI': (str, 'Facebook', ''),
|
||||
'FACEBOOK_APP_ID': (str, 'Facebook', ''),
|
||||
'FACEBOOK_APP_SECRET': (str, 'Facebook', ''),
|
||||
'FACEBOOK_TOKEN': (str, 'Facebook', ''),
|
||||
'FACEBOOK_GROUP': (str, 'Facebook', ''),
|
||||
'FACEBOOK_INCL_PMSLINK': (int, 'Facebook', 0),
|
||||
'FACEBOOK_INCL_POSTER': (int, 'Facebook', 0),
|
||||
'FACEBOOK_INCL_SUBJECT': (int, 'Facebook', 1),
|
||||
'FACEBOOK_ON_PLAY': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_STOP': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_PAUSE': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_RESUME': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_BUFFER': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_WATCHED': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_CREATED': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_EXTDOWN': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_INTDOWN': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_EXTUP': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_INTUP': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_PMSUPDATE': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_CONCURRENT': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_NEWDEVICE': (int, 'Facebook', 0),
|
||||
'FIRST_RUN_COMPLETE': (int, 'General', 0),
|
||||
'FREEZE_DB': (int, 'General', 0),
|
||||
'GET_FILE_SIZES': (int, 'General', 0),
|
||||
@@ -191,27 +107,10 @@ _CONFIG_DEFINITIONS = {
|
||||
'GIT_USER': (str, 'General', 'Tautulli'),
|
||||
'GIT_REPO': (str, 'General', 'Tautulli'),
|
||||
'GROUP_HISTORY_TABLES': (int, 'General', 1),
|
||||
'GROWL_ENABLED': (int, 'Growl', 0),
|
||||
'GROWL_HOST': (str, 'Growl', ''),
|
||||
'GROWL_PASSWORD': (str, 'Growl', ''),
|
||||
'GROWL_ON_PLAY': (int, 'Growl', 0),
|
||||
'GROWL_ON_STOP': (int, 'Growl', 0),
|
||||
'GROWL_ON_PAUSE': (int, 'Growl', 0),
|
||||
'GROWL_ON_RESUME': (int, 'Growl', 0),
|
||||
'GROWL_ON_BUFFER': (int, 'Growl', 0),
|
||||
'GROWL_ON_WATCHED': (int, 'Growl', 0),
|
||||
'GROWL_ON_CREATED': (int, 'Growl', 0),
|
||||
'GROWL_ON_EXTDOWN': (int, 'Growl', 0),
|
||||
'GROWL_ON_INTDOWN': (int, 'Growl', 0),
|
||||
'GROWL_ON_EXTUP': (int, 'Growl', 0),
|
||||
'GROWL_ON_INTUP': (int, 'Growl', 0),
|
||||
'GROWL_ON_PMSUPDATE': (int, 'Growl', 0),
|
||||
'GROWL_ON_CONCURRENT': (int, 'Growl', 0),
|
||||
'GROWL_ON_NEWDEVICE': (int, 'Growl', 0),
|
||||
'HISTORY_TABLE_ACTIVITY': (int, 'General', 1),
|
||||
'HOME_SECTIONS': (list, 'General', ['current_activity','watch_stats','library_stats','recently_added']),
|
||||
'HOME_SECTIONS': (list, 'General', ['current_activity', 'watch_stats', 'library_stats', 'recently_added']),
|
||||
'HOME_LIBRARY_CARDS': (list, 'General', ['first_run']),
|
||||
'HOME_STATS_CARDS': (list, 'General', ['top_movies', 'popular_movies', 'top_tv', 'popular_tv', 'top_music', \
|
||||
'HOME_STATS_CARDS': (list, 'General', ['top_movies', 'popular_movies', 'top_tv', 'popular_tv', 'top_music',
|
||||
'popular_music', 'last_watched', 'top_users', 'top_platforms', 'most_concurrent']),
|
||||
'HOME_REFRESH_INTERVAL': (int, 'General', 10),
|
||||
'HTTPS_CREATE_CERT': (int, 'General', 1),
|
||||
@@ -232,65 +131,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'HTTP_USERNAME': (str, 'General', ''),
|
||||
'HTTP_PLEX_ADMIN': (int, 'General', 0),
|
||||
'HTTP_BASE_URL': (str, 'General', ''),
|
||||
'HIPCHAT_URL': (str, 'Hipchat', ''),
|
||||
'HIPCHAT_COLOR': (str, 'Hipchat', ''),
|
||||
'HIPCHAT_INCL_SUBJECT': (int, 'Hipchat', 1),
|
||||
'HIPCHAT_INCL_PMSLINK': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_INCL_POSTER': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_EMOTICON': (str, 'Hipchat', ''),
|
||||
'HIPCHAT_ENABLED': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_PLAY': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_STOP': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_PAUSE': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_RESUME': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_BUFFER': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_WATCHED': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_CREATED': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_EXTDOWN': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_INTDOWN': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_EXTUP': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_INTUP': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_PMSUPDATE': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_CONCURRENT': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_NEWDEVICE': (int, 'Hipchat', 0),
|
||||
'INTERFACE': (str, 'General', 'default'),
|
||||
'IP_LOGGING_ENABLE': (int, 'General', 0),
|
||||
'IFTTT_KEY': (str, 'IFTTT', ''),
|
||||
'IFTTT_EVENT': (str, 'IFTTT', 'tautulli'),
|
||||
'IFTTT_ENABLED': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_PLAY': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_STOP': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_PAUSE': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_RESUME': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_BUFFER': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_WATCHED': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_CREATED': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_EXTDOWN': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_INTDOWN': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_EXTUP': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_INTUP': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_PMSUPDATE': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_CONCURRENT': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_NEWDEVICE': (int, 'IFTTT', 0),
|
||||
'IMGUR_CLIENT_ID': (str, 'Monitoring', ''),
|
||||
'JOIN_APIKEY': (str, 'Join', ''),
|
||||
'JOIN_DEVICEID': (str, 'Join', ''),
|
||||
'JOIN_ENABLED': (int, 'Join', 0),
|
||||
'JOIN_INCL_SUBJECT': (int, 'Join', 1),
|
||||
'JOIN_ON_PLAY': (int, 'Join', 0),
|
||||
'JOIN_ON_STOP': (int, 'Join', 0),
|
||||
'JOIN_ON_PAUSE': (int, 'Join', 0),
|
||||
'JOIN_ON_RESUME': (int, 'Join', 0),
|
||||
'JOIN_ON_BUFFER': (int, 'Join', 0),
|
||||
'JOIN_ON_WATCHED': (int, 'Join', 0),
|
||||
'JOIN_ON_CREATED': (int, 'Join', 0),
|
||||
'JOIN_ON_EXTDOWN': (int, 'Join', 0),
|
||||
'JOIN_ON_INTDOWN': (int, 'Join', 0),
|
||||
'JOIN_ON_EXTUP': (int, 'Join', 0),
|
||||
'JOIN_ON_INTUP': (int, 'Join', 0),
|
||||
'JOIN_ON_PMSUPDATE': (int, 'Join', 0),
|
||||
'JOIN_ON_CONCURRENT': (int, 'Join', 0),
|
||||
'JOIN_ON_NEWDEVICE': (int, 'Join', 0),
|
||||
'JOURNAL_MODE': (str, 'Advanced', 'WAL'),
|
||||
'LAUNCH_BROWSER': (int, 'General', 1),
|
||||
'LAUNCH_STARTUP': (int, 'General', 1),
|
||||
@@ -298,23 +140,11 @@ _CONFIG_DEFINITIONS = {
|
||||
'LOG_DIR': (str, 'General', ''),
|
||||
'LOGGING_IGNORE_INTERVAL': (int, 'Monitoring', 120),
|
||||
'METADATA_CACHE_SECONDS': (int, 'Advanced', 1800),
|
||||
'MOVIE_LOGGING_ENABLE': (int, 'Monitoring', 1),
|
||||
'MOVIE_NOTIFY_ENABLE': (int, 'Monitoring', 0),
|
||||
'MOVIE_NOTIFY_ON_START': (int, 'Monitoring', 1),
|
||||
'MOVIE_NOTIFY_ON_STOP': (int, 'Monitoring', 0),
|
||||
'MOVIE_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),
|
||||
'MOVIE_WATCHED_PERCENT': (int, 'Monitoring', 85),
|
||||
'MUSIC_LOGGING_ENABLE': (int, 'Monitoring', 1),
|
||||
'MUSIC_NOTIFY_ENABLE': (int, 'Monitoring', 0),
|
||||
'MUSIC_NOTIFY_ON_START': (int, 'Monitoring', 1),
|
||||
'MUSIC_NOTIFY_ON_STOP': (int, 'Monitoring', 0),
|
||||
'MUSIC_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),
|
||||
'MUSIC_WATCHED_PERCENT': (int, 'Monitoring', 85),
|
||||
'MUSICBRAINZ_LOOKUP': (int, 'General', 0),
|
||||
'MONITOR_PMS_UPDATES': (int, 'Monitoring', 0),
|
||||
'MONITOR_REMOTE_ACCESS': (int, 'Monitoring', 0),
|
||||
'MONITORING_INTERVAL': (int, 'Monitoring', 60),
|
||||
'MONITORING_USE_WEBSOCKET': (int, 'Monitoring', 0),
|
||||
'NEWSLETTER_AUTH': (int, 'Newsletter', 0),
|
||||
'NEWSLETTER_PASSWORD': (str, 'Newsletter', ''),
|
||||
'NEWSLETTER_CUSTOM_DIR': (str, 'Newsletter', ''),
|
||||
@@ -322,319 +152,37 @@ _CONFIG_DEFINITIONS = {
|
||||
'NEWSLETTER_TEMPLATES': (str, 'Newsletter', 'newsletters'),
|
||||
'NEWSLETTER_DIR': (str, 'Newsletter', ''),
|
||||
'NEWSLETTER_SELF_HOSTED': (int, 'Newsletter', 0),
|
||||
'NEWSLETTER_STATIC_URL': (int, 'Newsletter', 0),
|
||||
'NMA_APIKEY': (str, 'NMA', ''),
|
||||
'NMA_ENABLED': (int, 'NMA', 0),
|
||||
'NMA_PRIORITY': (int, 'NMA', 0),
|
||||
'NMA_ON_PLAY': (int, 'NMA', 0),
|
||||
'NMA_ON_STOP': (int, 'NMA', 0),
|
||||
'NMA_ON_PAUSE': (int, 'NMA', 0),
|
||||
'NMA_ON_RESUME': (int, 'NMA', 0),
|
||||
'NMA_ON_BUFFER': (int, 'NMA', 0),
|
||||
'NMA_ON_WATCHED': (int, 'NMA', 0),
|
||||
'NMA_ON_CREATED': (int, 'NMA', 0),
|
||||
'NMA_ON_EXTDOWN': (int, 'NMA', 0),
|
||||
'NMA_ON_INTDOWN': (int, 'NMA', 0),
|
||||
'NMA_ON_EXTUP': (int, 'NMA', 0),
|
||||
'NMA_ON_INTUP': (int, 'NMA', 0),
|
||||
'NMA_ON_PMSUPDATE': (int, 'NMA', 0),
|
||||
'NMA_ON_CONCURRENT': (int, 'NMA', 0),
|
||||
'NMA_ON_NEWDEVICE': (int, 'NMA', 0),
|
||||
'NOTIFICATION_THREADS': (int, 'Advanced', 2),
|
||||
'NOTIFY_CONSECUTIVE': (int, 'Monitoring', 1),
|
||||
'NOTIFY_CONTINUED_SESSION_THRESHOLD': (int, 'Monitoring', 15),
|
||||
'NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT': (int, 'Monitoring', 1),
|
||||
'NOTIFY_GROUP_RECENTLY_ADDED_PARENT': (int, 'Monitoring', 1),
|
||||
'NOTIFY_GROUP_RECENTLY_ADDED': (int, 'Monitoring', 1),
|
||||
'NOTIFY_UPLOAD_POSTERS': (int, 'Monitoring', 0),
|
||||
'NOTIFY_RECENTLY_ADDED': (int, 'Monitoring', 0),
|
||||
'NOTIFY_RECENTLY_ADDED_DELAY': (int, 'Monitoring', 60),
|
||||
'NOTIFY_RECENTLY_ADDED_DELAY': (int, 'Monitoring', 300),
|
||||
'NOTIFY_RECENTLY_ADDED_GRANDPARENT': (int, 'Monitoring', 0),
|
||||
'NOTIFY_RECENTLY_ADDED_UPGRADE': (int, 'Monitoring', 0),
|
||||
'NOTIFY_CONCURRENT_BY_IP': (int, 'Monitoring', 0),
|
||||
'NOTIFY_CONCURRENT_THRESHOLD': (int, 'Monitoring', 2),
|
||||
'NOTIFY_WATCHED_PERCENT': (int, 'Monitoring', 85),
|
||||
'NOTIFY_ON_START_SUBJECT_TEXT': (str, 'Monitoring', 'Tautulli ({server_name})'),
|
||||
'NOTIFY_ON_START_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) started playing {title}.'),
|
||||
'NOTIFY_ON_STOP_SUBJECT_TEXT': (str, 'Monitoring', 'Tautulli ({server_name})'),
|
||||
'NOTIFY_ON_STOP_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) has stopped {title}.'),
|
||||
'NOTIFY_ON_PAUSE_SUBJECT_TEXT': (str, 'Monitoring', 'Tautulli ({server_name})'),
|
||||
'NOTIFY_ON_PAUSE_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) has paused {title}.'),
|
||||
'NOTIFY_ON_RESUME_SUBJECT_TEXT': (str, 'Monitoring', 'Tautulli ({server_name})'),
|
||||
'NOTIFY_ON_RESUME_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) has resumed {title}.'),
|
||||
'NOTIFY_ON_BUFFER_SUBJECT_TEXT': (str, 'Monitoring', 'Tautulli ({server_name})'),
|
||||
'NOTIFY_ON_BUFFER_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) is buffering {title}.'),
|
||||
'NOTIFY_ON_WATCHED_SUBJECT_TEXT': (str, 'Monitoring', 'Tautulli ({server_name})'),
|
||||
'NOTIFY_ON_WATCHED_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) has watched {title}.'),
|
||||
'NOTIFY_ON_CREATED_SUBJECT_TEXT': (str, 'Monitoring', 'Tautulli ({server_name})'),
|
||||
'NOTIFY_ON_CREATED_BODY_TEXT': (str, 'Monitoring', '{title} was recently added to Plex.'),
|
||||
'NOTIFY_ON_EXTDOWN_SUBJECT_TEXT': (str, 'Monitoring', 'Tautulli ({server_name})'),
|
||||
'NOTIFY_ON_EXTDOWN_BODY_TEXT': (str, 'Monitoring', 'The Plex Media Server remote access is down.'),
|
||||
'NOTIFY_ON_INTDOWN_SUBJECT_TEXT': (str, 'Monitoring', 'Tautulli ({server_name})'),
|
||||
'NOTIFY_ON_INTDOWN_BODY_TEXT': (str, 'Monitoring', 'The Plex Media Server is down.'),
|
||||
'NOTIFY_ON_EXTUP_SUBJECT_TEXT': (str, 'Monitoring', 'Tautulli ({server_name})'),
|
||||
'NOTIFY_ON_EXTUP_BODY_TEXT': (str, 'Monitoring', 'The Plex Media Server remote access is back up.'),
|
||||
'NOTIFY_ON_INTUP_SUBJECT_TEXT': (str, 'Monitoring', 'Tautulli ({server_name})'),
|
||||
'NOTIFY_ON_INTUP_BODY_TEXT': (str, 'Monitoring', 'The Plex Media Server is back up.'),
|
||||
'NOTIFY_ON_PMSUPDATE_SUBJECT_TEXT': (str, 'Monitoring', 'Tautulli ({server_name})'),
|
||||
'NOTIFY_ON_PMSUPDATE_BODY_TEXT': (str, 'Monitoring', 'An update is available for the Plex Media Server (version {update_version}).'),
|
||||
'NOTIFY_ON_CONCURRENT_SUBJECT_TEXT': (str, 'Monitoring', 'Tautulli ({server_name})'),
|
||||
'NOTIFY_ON_CONCURRENT_BODY_TEXT': (str, 'Monitoring', '{user} has {user_streams} concurrent streams.'),
|
||||
'NOTIFY_ON_NEWDEVICE_SUBJECT_TEXT': (str, 'Monitoring', 'Tautulli ({server_name})'),
|
||||
'NOTIFY_ON_NEWDEVICE_BODY_TEXT': (str, 'Monitoring', '{user} is streaming from a new device: {player}.'),
|
||||
'NOTIFY_SCRIPTS_ARGS_TEXT': (str, 'Monitoring', ''),
|
||||
'OSX_NOTIFY_APP': (str, 'OSX_Notify', '/Applications/Tautulli'),
|
||||
'OSX_NOTIFY_ENABLED': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_PLAY': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_STOP': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_PAUSE': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_RESUME': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_BUFFER': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_WATCHED': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_CREATED': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_EXTDOWN': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_INTDOWN': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_EXTUP': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_INTUP': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_PMSUPDATE': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_CONCURRENT': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_NEWDEVICE': (int, 'OSX_Notify', 0),
|
||||
'PLEX_CLIENT_HOST': (str, 'Plex', ''),
|
||||
'PLEX_ENABLED': (int, 'Plex', 0),
|
||||
'PLEX_PASSWORD': (str, 'Plex', ''),
|
||||
'PLEX_USERNAME': (str, 'Plex', ''),
|
||||
'PLEX_ON_PLAY': (int, 'Plex', 0),
|
||||
'PLEX_ON_STOP': (int, 'Plex', 0),
|
||||
'PLEX_ON_PAUSE': (int, 'Plex', 0),
|
||||
'PLEX_ON_RESUME': (int, 'Plex', 0),
|
||||
'PLEX_ON_BUFFER': (int, 'Plex', 0),
|
||||
'PLEX_ON_WATCHED': (int, 'Plex', 0),
|
||||
'PLEX_ON_CREATED': (int, 'Plex', 0),
|
||||
'PLEX_ON_EXTDOWN': (int, 'Plex', 0),
|
||||
'PLEX_ON_INTDOWN': (int, 'Plex', 0),
|
||||
'PLEX_ON_EXTUP': (int, 'Plex', 0),
|
||||
'PLEX_ON_INTUP': (int, 'Plex', 0),
|
||||
'PLEX_ON_PMSUPDATE': (int, 'Plex', 0),
|
||||
'PLEX_ON_CONCURRENT': (int, 'Plex', 0),
|
||||
'PLEX_ON_NEWDEVICE': (int, 'Plex', 0),
|
||||
'PLEXPY_AUTO_UPDATE': (int, 'General', 0),
|
||||
'PROWL_ENABLED': (int, 'Prowl', 0),
|
||||
'PROWL_KEYS': (str, 'Prowl', ''),
|
||||
'PROWL_PRIORITY': (int, 'Prowl', 0),
|
||||
'PROWL_ON_PLAY': (int, 'Prowl', 0),
|
||||
'PROWL_ON_STOP': (int, 'Prowl', 0),
|
||||
'PROWL_ON_PAUSE': (int, 'Prowl', 0),
|
||||
'PROWL_ON_RESUME': (int, 'Prowl', 0),
|
||||
'PROWL_ON_BUFFER': (int, 'Prowl', 0),
|
||||
'PROWL_ON_WATCHED': (int, 'Prowl', 0),
|
||||
'PROWL_ON_CREATED': (int, 'Prowl', 0),
|
||||
'PROWL_ON_EXTDOWN': (int, 'Prowl', 0),
|
||||
'PROWL_ON_INTDOWN': (int, 'Prowl', 0),
|
||||
'PROWL_ON_EXTUP': (int, 'Prowl', 0),
|
||||
'PROWL_ON_INTUP': (int, 'Prowl', 0),
|
||||
'PROWL_ON_PMSUPDATE': (int, 'Prowl', 0),
|
||||
'PROWL_ON_CONCURRENT': (int, 'Prowl', 0),
|
||||
'PROWL_ON_NEWDEVICE': (int, 'Prowl', 0),
|
||||
'PUSHALOT_APIKEY': (str, 'Pushalot', ''),
|
||||
'PUSHALOT_ENABLED': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_PLAY': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_STOP': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_PAUSE': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_RESUME': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_BUFFER': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_WATCHED': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_CREATED': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_EXTDOWN': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_INTDOWN': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_EXTUP': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_INTUP': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_PMSUPDATE': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_CONCURRENT': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_NEWDEVICE': (int, 'Pushalot', 0),
|
||||
'PUSHBULLET_APIKEY': (str, 'PushBullet', ''),
|
||||
'PUSHBULLET_DEVICEID': (str, 'PushBullet', ''),
|
||||
'PUSHBULLET_CHANNEL_TAG': (str, 'PushBullet', ''),
|
||||
'PUSHBULLET_ENABLED': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_PLAY': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_STOP': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_PAUSE': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_RESUME': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_BUFFER': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_WATCHED': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_CREATED': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_EXTDOWN': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_INTDOWN': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_EXTUP': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_INTUP': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_PMSUPDATE': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_CONCURRENT': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_NEWDEVICE': (int, 'PushBullet', 0),
|
||||
'PUSHOVER_APITOKEN': (str, 'Pushover', ''),
|
||||
'PUSHOVER_ENABLED': (int, 'Pushover', 0),
|
||||
'PUSHOVER_HTML_SUPPORT': (int, 'Pushover', 1),
|
||||
'PUSHOVER_INCL_PMSLINK': (int, 'Pushover', 0),
|
||||
'PUSHOVER_INCL_URL': (int, 'Pushover', 1),
|
||||
'PUSHOVER_KEYS': (str, 'Pushover', ''),
|
||||
'PUSHOVER_PRIORITY': (int, 'Pushover', 0),
|
||||
'PUSHOVER_SOUND': (str, 'Pushover', ''),
|
||||
'PUSHOVER_ON_PLAY': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_STOP': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_PAUSE': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_RESUME': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_BUFFER': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_WATCHED': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_CREATED': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_EXTDOWN': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_INTDOWN': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_EXTUP': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_INTUP': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_PMSUPDATE': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_CONCURRENT': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_NEWDEVICE': (int, 'Pushover', 0),
|
||||
'REFRESH_LIBRARIES_INTERVAL': (int, 'Monitoring', 12),
|
||||
'REFRESH_LIBRARIES_ON_STARTUP': (int, 'Monitoring', 1),
|
||||
'REFRESH_USERS_INTERVAL': (int, 'Monitoring', 12),
|
||||
'REFRESH_USERS_ON_STARTUP': (int, 'Monitoring', 1),
|
||||
'REMOTE_ACCESS_PING_INTERVAL': (int, 'Advanced', 60),
|
||||
'REMOTE_ACCESS_PING_THRESHOLD': (int, 'Advanced', 3),
|
||||
'SESSION_DB_WRITE_ATTEMPTS': (int, 'Advanced', 5),
|
||||
'SHOW_ADVANCED_SETTINGS': (int, 'General', 0),
|
||||
'SLACK_ENABLED': (int, 'Slack', 0),
|
||||
'SLACK_HOOK': (str, 'Slack', ''),
|
||||
'SLACK_CHANNEL': (str, 'Slack', ''),
|
||||
'SLACK_ICON_EMOJI': (str, 'Slack', ''),
|
||||
'SLACK_INCL_PMSLINK': (int, 'Slack', 0),
|
||||
'SLACK_INCL_POSTER': (int, 'Slack', 0),
|
||||
'SLACK_INCL_SUBJECT': (int, 'Slack', 1),
|
||||
'SLACK_USERNAME': (str, 'Slack', ''),
|
||||
'SLACK_ON_PLAY': (int, 'Slack', 0),
|
||||
'SLACK_ON_STOP': (int, 'Slack', 0),
|
||||
'SLACK_ON_PAUSE': (int, 'Slack', 0),
|
||||
'SLACK_ON_RESUME': (int, 'Slack', 0),
|
||||
'SLACK_ON_BUFFER': (int, 'Slack', 0),
|
||||
'SLACK_ON_WATCHED': (int, 'Slack', 0),
|
||||
'SLACK_ON_CREATED': (int, 'Slack', 0),
|
||||
'SLACK_ON_EXTDOWN': (int, 'Slack', 0),
|
||||
'SLACK_ON_INTDOWN': (int, 'Slack', 0),
|
||||
'SLACK_ON_EXTUP': (int, 'Slack', 0),
|
||||
'SLACK_ON_INTUP': (int, 'Slack', 0),
|
||||
'SLACK_ON_PMSUPDATE': (int, 'Slack', 0),
|
||||
'SLACK_ON_CONCURRENT': (int, 'Slack', 0),
|
||||
'SLACK_ON_NEWDEVICE': (int, 'Slack', 0),
|
||||
'SCRIPTS_ENABLED': (int, 'Scripts', 0),
|
||||
'SCRIPTS_FOLDER': (str, 'Scripts', ''),
|
||||
'SCRIPTS_TIMEOUT': (int, 'Scripts', 30),
|
||||
'SCRIPTS_ON_PLAY': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_STOP': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_PAUSE': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_RESUME': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_BUFFER': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_WATCHED': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_CREATED': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_EXTDOWN': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_EXTUP': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_INTDOWN': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_INTUP': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_PMSUPDATE': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_CONCURRENT': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_NEWDEVICE': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_PLAY_SCRIPT': (str, 'Scripts', ''),
|
||||
'SCRIPTS_ON_STOP_SCRIPT': (str, 'Scripts', ''),
|
||||
'SCRIPTS_ON_PAUSE_SCRIPT': (str, 'Scripts', ''),
|
||||
'SCRIPTS_ON_RESUME_SCRIPT': (str, 'Scripts', ''),
|
||||
'SCRIPTS_ON_BUFFER_SCRIPT': (str, 'Scripts', ''),
|
||||
'SCRIPTS_ON_WATCHED_SCRIPT': (str, 'Scripts', ''),
|
||||
'SCRIPTS_ON_CREATED_SCRIPT': (str, 'Scripts', ''),
|
||||
'SCRIPTS_ON_EXTDOWN_SCRIPT': (str, 'Scripts', ''),
|
||||
'SCRIPTS_ON_EXTUP_SCRIPT': (str, 'Scripts', ''),
|
||||
'SCRIPTS_ON_INTDOWN_SCRIPT': (str, 'Scripts', ''),
|
||||
'SCRIPTS_ON_INTUP_SCRIPT': (str, 'Scripts', ''),
|
||||
'SCRIPTS_ON_PMSUPDATE_SCRIPT': (str, 'Scripts', ''),
|
||||
'SCRIPTS_ON_CONCURRENT_SCRIPT': (str, 'Scripts', ''),
|
||||
'SCRIPTS_ON_NEWDEVICE_SCRIPT': (str, 'Scripts', ''),
|
||||
'SYNCHRONOUS_MODE': (str, 'Advanced', 'NORMAL'),
|
||||
'TELEGRAM_BOT_TOKEN': (str, 'Telegram', ''),
|
||||
'TELEGRAM_ENABLED': (int, 'Telegram', 0),
|
||||
'TELEGRAM_CHAT_ID': (str, 'Telegram', ''),
|
||||
'TELEGRAM_DISABLE_WEB_PREVIEW': (int, 'Telegram', 0),
|
||||
'TELEGRAM_HTML_SUPPORT': (int, 'Telegram', 1),
|
||||
'TELEGRAM_INCL_POSTER': (int, 'Telegram', 0),
|
||||
'TELEGRAM_INCL_SUBJECT': (int, 'Telegram', 1),
|
||||
'TELEGRAM_ON_PLAY': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_STOP': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_PAUSE': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_RESUME': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_BUFFER': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_WATCHED': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_CREATED': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_EXTDOWN': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_INTDOWN': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_EXTUP': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_INTUP': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_PMSUPDATE': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_CONCURRENT': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_NEWDEVICE': (int, 'Telegram', 0),
|
||||
'THEMOVIEDB_APIKEY': (str, 'General', 'e9a6655bae34bf694a0f3e33338dc28e'),
|
||||
'THEMOVIEDB_LOOKUP': (int, 'General', 0),
|
||||
'TVMAZE_LOOKUP': (int, 'General', 0),
|
||||
'TV_LOGGING_ENABLE': (int, 'Monitoring', 1),
|
||||
'TV_NOTIFY_ENABLE': (int, 'Monitoring', 0),
|
||||
'TV_NOTIFY_ON_START': (int, 'Monitoring', 1),
|
||||
'TV_NOTIFY_ON_STOP': (int, 'Monitoring', 0),
|
||||
'TV_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),
|
||||
'TV_WATCHED_PERCENT': (int, 'Monitoring', 85),
|
||||
'TWITTER_ENABLED': (int, 'Twitter', 0),
|
||||
'TWITTER_ACCESS_TOKEN': (str, 'Twitter', ''),
|
||||
'TWITTER_ACCESS_TOKEN_SECRET': (str, 'Twitter', ''),
|
||||
'TWITTER_CONSUMER_KEY': (str, 'Twitter', ''),
|
||||
'TWITTER_CONSUMER_SECRET': (str, 'Twitter', ''),
|
||||
'TWITTER_INCL_POSTER': (int, 'Twitter', 0),
|
||||
'TWITTER_INCL_SUBJECT': (int, 'Twitter', 1),
|
||||
'TWITTER_ON_PLAY': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_STOP': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_PAUSE': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_RESUME': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_BUFFER': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_WATCHED': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_CREATED': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_EXTDOWN': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_INTDOWN': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_EXTUP': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_INTUP': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_PMSUPDATE': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_CONCURRENT': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_NEWDEVICE': (int, 'Twitter', 0),
|
||||
'UPDATE_DB_INTERVAL': (int, 'General', 24),
|
||||
'UPDATE_SECTION_IDS': (int, 'General', 1),
|
||||
'UPDATE_SHOW_CHANGELOG': (int, 'General', 1),
|
||||
'UPDATE_LABELS': (int, 'General', 1),
|
||||
'UPDATE_LIBRARIES_DB_NOTIFY': (int, 'General', 1),
|
||||
'UPDATE_NOTIFIERS_DB': (int, 'General', 1),
|
||||
'VERBOSE_LOGS': (int, 'Advanced', 1),
|
||||
'VERIFY_SSL_CERT': (bool_int, 'Advanced', 1),
|
||||
'VIDEO_LOGGING_ENABLE': (int, 'Monitoring', 1),
|
||||
'WEBSOCKET_MONITOR_PING_PONG': (int, 'Advanced', 0),
|
||||
'WEBSOCKET_CONNECTION_ATTEMPTS': (int, 'Advanced', 5),
|
||||
'WEBSOCKET_CONNECTION_TIMEOUT': (int, 'Advanced', 5),
|
||||
'WEEK_START_MONDAY': (int, 'General', 0),
|
||||
'XBMC_ENABLED': (int, 'XBMC', 0),
|
||||
'XBMC_HOST': (str, 'XBMC', ''),
|
||||
'XBMC_PASSWORD': (str, 'XBMC', ''),
|
||||
'XBMC_USERNAME': (str, 'XBMC', ''),
|
||||
'XBMC_ON_PLAY': (int, 'XBMC', 0),
|
||||
'XBMC_ON_STOP': (int, 'XBMC', 0),
|
||||
'XBMC_ON_PAUSE': (int, 'XBMC', 0),
|
||||
'XBMC_ON_RESUME': (int, 'XBMC', 0),
|
||||
'XBMC_ON_BUFFER': (int, 'XBMC', 0),
|
||||
'XBMC_ON_WATCHED': (int, 'XBMC', 0),
|
||||
'XBMC_ON_CREATED': (int, 'XBMC', 0),
|
||||
'XBMC_ON_EXTDOWN': (int, 'XBMC', 0),
|
||||
'XBMC_ON_INTDOWN': (int, 'XBMC', 0),
|
||||
'XBMC_ON_EXTUP': (int, 'XBMC', 0),
|
||||
'XBMC_ON_INTUP': (int, 'XBMC', 0),
|
||||
'XBMC_ON_PMSUPDATE': (int, 'XBMC', 0),
|
||||
'XBMC_ON_CONCURRENT': (int, 'XBMC', 0),
|
||||
'XBMC_ON_NEWDEVICE': (int, 'XBMC', 0),
|
||||
'JWT_SECRET': (str, 'Advanced', ''),
|
||||
'JWT_UPDATE_SECRET': (bool_int, 'Advanced', 0),
|
||||
'SYSTEM_ANALYTICS': (int, 'Advanced', 1),
|
||||
@@ -642,16 +190,88 @@ _CONFIG_DEFINITIONS = {
|
||||
}
|
||||
|
||||
_BLACKLIST_KEYS = ['_APITOKEN', '_TOKEN', '_KEY', '_SECRET', '_PASSWORD', '_APIKEY', '_ID', '_HOOK']
|
||||
_WHITELIST_KEYS = ['HTTPS_KEY', 'UPDATE_SECTION_IDS']
|
||||
_WHITELIST_KEYS = ['HTTPS_KEY']
|
||||
|
||||
_DO_NOT_IMPORT_KEYS = [
|
||||
'FIRST_RUN_COMPLETE', 'GET_FILE_SIZES_HOLD', 'GIT_PATH', 'PMS_LOGS_FOLDER',
|
||||
'BACKUP_DIR', 'CACHE_DIR', 'LOG_DIR', 'NEWSLETTER_DIR', 'NEWSLETTER_CUSTOM_DIR',
|
||||
'HTTP_HOST', 'HTTP_PORT', 'HTTP_ROOT',
|
||||
'HTTP_USERNAME', 'HTTP_PASSWORD', 'HTTP_HASH_PASSWORD', 'HTTP_HASHED_PASSWORD',
|
||||
'ENABLE_HTTPS', 'HTTPS_CREATE_CERT', 'HTTPS_CERT', 'HTTPS_CERT_CHAIN', 'HTTPS_KEY'
|
||||
]
|
||||
_DO_NOT_IMPORT_KEYS_DOCKER = [
|
||||
'PLEXPY_AUTO_UPDATE', 'GIT_REMOTE', 'GIT_BRANCH'
|
||||
]
|
||||
|
||||
IS_IMPORTING = False
|
||||
IMPORT_THREAD = None
|
||||
|
||||
|
||||
def set_is_importing(value):
|
||||
global IS_IMPORTING
|
||||
IS_IMPORTING = value
|
||||
|
||||
|
||||
def set_import_thread(config=None, backup=False):
|
||||
global IMPORT_THREAD
|
||||
if config:
|
||||
if IMPORT_THREAD:
|
||||
return
|
||||
IMPORT_THREAD = threading.Thread(target=import_tautulli_config,
|
||||
kwargs={'config': config, 'backup': backup})
|
||||
else:
|
||||
IMPORT_THREAD = None
|
||||
|
||||
|
||||
def import_tautulli_config(config=None, backup=False):
|
||||
if IS_IMPORTING:
|
||||
logger.warn("Tautulli Config :: Another Tautulli config is currently being imported. "
|
||||
"Please wait until it is complete before importing another config.")
|
||||
return False
|
||||
|
||||
if backup:
|
||||
# Make a backup of the current config first
|
||||
logger.info("Tautulli Config :: Creating a config backup before importing.")
|
||||
if not make_backup():
|
||||
logger.error("Tautulli Config :: Failed to import Tautulli config: failed to create config backup")
|
||||
return False
|
||||
|
||||
# Create a new Config object with the imported config file
|
||||
try:
|
||||
imported_config = Config(config, is_import=True)
|
||||
except:
|
||||
logger.error("Tautulli Config :: Failed to import Tautulli config: error reading imported config file")
|
||||
return False
|
||||
|
||||
logger.info("Tautulli Config :: Importing Tautulli config '%s'...", config)
|
||||
set_is_importing(True)
|
||||
|
||||
# Remove keys that should not be imported
|
||||
for key in _DO_NOT_IMPORT_KEYS:
|
||||
delattr(imported_config, key)
|
||||
if plexpy.DOCKER:
|
||||
for key in _DO_NOT_IMPORT_KEYS_DOCKER:
|
||||
delattr(imported_config, key)
|
||||
|
||||
# Merge the imported config file into the current config file
|
||||
plexpy.CONFIG._config.merge(imported_config._config)
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
logger.info("Tautulli Config :: Tautulli config import complete.")
|
||||
set_import_thread(None)
|
||||
set_is_importing(False)
|
||||
|
||||
# Restart to apply changes
|
||||
plexpy.SIGNAL = 'restart'
|
||||
|
||||
|
||||
def make_backup(cleanup=False, scheduler=False):
|
||||
""" Makes a backup of config file, removes all but the last 5 backups """
|
||||
|
||||
if scheduler:
|
||||
backup_file = 'config.backup-%s.sched.ini' % arrow.now().format('YYYYMMDDHHmmss')
|
||||
backup_file = 'config.backup-{}.sched.ini'.format(helpers.now())
|
||||
else:
|
||||
backup_file = 'config.backup-%s.ini' % arrow.now().format('YYYYMMDDHHmmss')
|
||||
backup_file = 'config.backup-{}.ini'.format(helpers.now())
|
||||
backup_folder = plexpy.CONFIG.BACKUP_DIR
|
||||
backup_file_fp = os.path.join(backup_folder, backup_file)
|
||||
|
||||
@@ -687,14 +307,20 @@ def make_backup(cleanup=False, scheduler=False):
|
||||
class Config(object):
|
||||
""" Wraps access to particular values in a config file """
|
||||
|
||||
def __init__(self, config_file):
|
||||
def __init__(self, config_file, is_import=False):
|
||||
""" Initialize the config with values from a file """
|
||||
self._config_file = config_file
|
||||
self._config = ConfigObj(self._config_file, encoding='utf-8')
|
||||
try:
|
||||
self._config = ConfigObj(self._config_file, encoding='utf-8')
|
||||
except ParseError as e:
|
||||
logger.error("Tautulli Config :: Error reading configuration file: %s", e)
|
||||
raise
|
||||
|
||||
for key in _CONFIG_DEFINITIONS:
|
||||
self.check_setting(key)
|
||||
self._upgrade()
|
||||
self._blacklist()
|
||||
if not is_import:
|
||||
self._upgrade()
|
||||
self._blacklist()
|
||||
|
||||
def _blacklist(self):
|
||||
""" Add tokens and passwords to blacklisted words in logger """
|
||||
@@ -791,6 +417,16 @@ class Config(object):
|
||||
self._config[section][ini_key] = definition_type(value)
|
||||
return self._config[section][ini_key]
|
||||
|
||||
def __delattr__(self, name):
|
||||
"""
|
||||
Deletes a key from the configuration object.
|
||||
"""
|
||||
if not re.match(r'[A-Z_]+$', name):
|
||||
return super(Config, self).__delattr__(name)
|
||||
else:
|
||||
key, definition_type, section, ini_key, default = self._define(name)
|
||||
del self._config[section][ini_key]
|
||||
|
||||
def process_kwargs(self, kwargs):
|
||||
"""
|
||||
Given a big bunch of key value pairs, apply them to the ini.
|
||||
@@ -804,14 +440,6 @@ class Config(object):
|
||||
Upgrades config file from previous verisions and bumps up config version
|
||||
"""
|
||||
if self.CONFIG_VERSION == 0:
|
||||
# Separate out movie and tv notifications
|
||||
if self.MOVIE_NOTIFY_ENABLE == 1:
|
||||
self.TV_NOTIFY_ENABLE = 1
|
||||
# Separate out movie and tv logging
|
||||
if self.VIDEO_LOGGING_ENABLE == 0:
|
||||
self.MOVIE_LOGGING_ENABLE = 0
|
||||
self.TV_LOGGING_ENABLE = 0
|
||||
|
||||
self.CONFIG_VERSION = 1
|
||||
|
||||
if self.CONFIG_VERSION == 1:
|
||||
@@ -831,23 +459,6 @@ class Config(object):
|
||||
self.CONFIG_VERSION = 2
|
||||
|
||||
if self.CONFIG_VERSION == 2:
|
||||
def rep(s):
|
||||
return s.replace('{progress}', '{progress_duration}')
|
||||
|
||||
self.NOTIFY_ON_START_SUBJECT_TEXT = rep(self.NOTIFY_ON_START_SUBJECT_TEXT)
|
||||
self.NOTIFY_ON_START_BODY_TEXT = rep(self.NOTIFY_ON_START_BODY_TEXT)
|
||||
self.NOTIFY_ON_STOP_SUBJECT_TEXT = rep(self.NOTIFY_ON_STOP_SUBJECT_TEXT)
|
||||
self.NOTIFY_ON_STOP_BODY_TEXT = rep(self.NOTIFY_ON_STOP_BODY_TEXT)
|
||||
self.NOTIFY_ON_PAUSE_SUBJECT_TEXT = rep(self.NOTIFY_ON_PAUSE_SUBJECT_TEXT)
|
||||
self.NOTIFY_ON_PAUSE_BODY_TEXT = rep(self.NOTIFY_ON_PAUSE_BODY_TEXT)
|
||||
self.NOTIFY_ON_RESUME_SUBJECT_TEXT = rep(self.NOTIFY_ON_RESUME_SUBJECT_TEXT)
|
||||
self.NOTIFY_ON_RESUME_BODY_TEXT = rep(self.NOTIFY_ON_RESUME_BODY_TEXT)
|
||||
self.NOTIFY_ON_BUFFER_SUBJECT_TEXT = rep(self.NOTIFY_ON_BUFFER_SUBJECT_TEXT)
|
||||
self.NOTIFY_ON_BUFFER_BODY_TEXT = rep(self.NOTIFY_ON_BUFFER_BODY_TEXT)
|
||||
self.NOTIFY_ON_WATCHED_SUBJECT_TEXT = rep(self.NOTIFY_ON_WATCHED_SUBJECT_TEXT)
|
||||
self.NOTIFY_ON_WATCHED_BODY_TEXT = rep(self.NOTIFY_ON_WATCHED_BODY_TEXT)
|
||||
self.NOTIFY_SCRIPTS_ARGS_TEXT = rep(self.NOTIFY_SCRIPTS_ARGS_TEXT)
|
||||
|
||||
self.CONFIG_VERSION = 3
|
||||
|
||||
if self.CONFIG_VERSION == 3:
|
||||
@@ -880,37 +491,9 @@ class Config(object):
|
||||
self.CONFIG_VERSION = 7
|
||||
|
||||
if self.CONFIG_VERSION == 7:
|
||||
def rep(s):
|
||||
return s.replace('<tv>', '<episode>') \
|
||||
.replace('</tv>', '</episode>') \
|
||||
.replace('<music>', '<track>') \
|
||||
.replace('</music>', '</track>')
|
||||
|
||||
self.NOTIFY_ON_START_SUBJECT_TEXT = rep(self.NOTIFY_ON_START_SUBJECT_TEXT)
|
||||
self.NOTIFY_ON_START_BODY_TEXT = rep(self.NOTIFY_ON_START_BODY_TEXT)
|
||||
self.NOTIFY_ON_STOP_SUBJECT_TEXT = rep(self.NOTIFY_ON_STOP_SUBJECT_TEXT)
|
||||
self.NOTIFY_ON_STOP_BODY_TEXT = rep(self.NOTIFY_ON_STOP_BODY_TEXT)
|
||||
self.NOTIFY_ON_PAUSE_SUBJECT_TEXT = rep(self.NOTIFY_ON_PAUSE_SUBJECT_TEXT)
|
||||
self.NOTIFY_ON_PAUSE_BODY_TEXT = rep(self.NOTIFY_ON_PAUSE_BODY_TEXT)
|
||||
self.NOTIFY_ON_RESUME_SUBJECT_TEXT = rep(self.NOTIFY_ON_RESUME_SUBJECT_TEXT)
|
||||
self.NOTIFY_ON_RESUME_BODY_TEXT = rep(self.NOTIFY_ON_RESUME_BODY_TEXT)
|
||||
self.NOTIFY_ON_BUFFER_SUBJECT_TEXT = rep(self.NOTIFY_ON_BUFFER_SUBJECT_TEXT)
|
||||
self.NOTIFY_ON_BUFFER_BODY_TEXT = rep(self.NOTIFY_ON_BUFFER_BODY_TEXT)
|
||||
self.NOTIFY_ON_WATCHED_SUBJECT_TEXT = rep(self.NOTIFY_ON_WATCHED_SUBJECT_TEXT)
|
||||
self.NOTIFY_ON_WATCHED_BODY_TEXT = rep(self.NOTIFY_ON_WATCHED_BODY_TEXT)
|
||||
self.NOTIFY_SCRIPTS_ARGS_TEXT = rep(self.NOTIFY_SCRIPTS_ARGS_TEXT)
|
||||
|
||||
self.NOTIFY_GROUP_RECENTLY_ADDED_PARENT = self.NOTIFY_GROUP_RECENTLY_ADDED
|
||||
|
||||
self.MONITORING_USE_WEBSOCKET = 1
|
||||
|
||||
self.CONFIG_VERSION = 8
|
||||
|
||||
if self.CONFIG_VERSION == 8:
|
||||
self.MOVIE_WATCHED_PERCENT = self.NOTIFY_WATCHED_PERCENT
|
||||
self.TV_WATCHED_PERCENT = self.NOTIFY_WATCHED_PERCENT
|
||||
self.MUSIC_WATCHED_PERCENT = self.NOTIFY_WATCHED_PERCENT
|
||||
|
||||
self.CONFIG_VERSION = 9
|
||||
|
||||
if self.CONFIG_VERSION == 9:
|
||||
@@ -936,11 +519,16 @@ class Config(object):
|
||||
self.CONFIG_VERSION = 13
|
||||
|
||||
if self.CONFIG_VERSION == 13:
|
||||
|
||||
self.CONFIG_VERSION = 14
|
||||
|
||||
if self.CONFIG_VERSION == 14:
|
||||
if plexpy.DOCKER:
|
||||
self.PLEXPY_AUTO_UPDATE = 0
|
||||
|
||||
self.CONFIG_VERSION == 15
|
||||
self.CONFIG_VERSION = 15
|
||||
|
||||
if self.CONFIG_VERSION == 15:
|
||||
if self.HTTP_ROOT and self.HTTP_ROOT != '/':
|
||||
self.JWT_UPDATE_SECRET = True
|
||||
|
||||
self.CONFIG_VERSION = 16
|
||||
|
@@ -17,7 +17,6 @@ from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
from future.builtins import object
|
||||
|
||||
import arrow
|
||||
import os
|
||||
import sqlite3
|
||||
import shutil
|
||||
@@ -26,11 +25,11 @@ import time
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import helpers
|
||||
import logger
|
||||
from helpers import cast_to_int, bool_true
|
||||
else:
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
from plexpy.helpers import cast_to_int, bool_true
|
||||
|
||||
|
||||
FILENAME = "tautulli.db"
|
||||
@@ -41,7 +40,7 @@ IS_IMPORTING = False
|
||||
|
||||
def set_is_importing(value):
|
||||
global IS_IMPORTING
|
||||
IS_IMPORTING = bool_true(value)
|
||||
IS_IMPORTING = value
|
||||
|
||||
|
||||
def validate_database(database=None):
|
||||
@@ -68,6 +67,11 @@ def validate_database(database=None):
|
||||
|
||||
|
||||
def import_tautulli_db(database=None, method=None, backup=False):
|
||||
if IS_IMPORTING:
|
||||
logger.warn("Tautulli Database :: Another Tautulli database is currently being imported. "
|
||||
"Please wait until it is complete before importing another database.")
|
||||
return False
|
||||
|
||||
db_validate = validate_database(database=database)
|
||||
if not db_validate == 'success':
|
||||
logger.error("Tautulli Database :: Failed to import Tautulli database: %s", db_validate)
|
||||
@@ -176,7 +180,7 @@ def import_tautulli_db(database=None, method=None, backup=False):
|
||||
for table_name in session_history_tables:
|
||||
db.action('DROP TABLE {table}_copy'.format(table=table_name))
|
||||
|
||||
db.action('VACUUM')
|
||||
vacuum()
|
||||
|
||||
logger.info("Tautulli Database :: Tautulli database import complete.")
|
||||
set_is_importing(False)
|
||||
@@ -195,7 +199,7 @@ def clear_table(table=None):
|
||||
logger.debug("Tautulli Database :: Clearing database table '%s'." % table)
|
||||
try:
|
||||
monitor_db.action('DELETE FROM %s' % table)
|
||||
monitor_db.action('VACUUM')
|
||||
vacuum()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error("Tautulli Database :: Failed to clear database table '%s': %s." % (table, e))
|
||||
@@ -214,16 +218,21 @@ def delete_recently_added():
|
||||
|
||||
def delete_rows_from_table(table, row_ids):
|
||||
if row_ids and isinstance(row_ids, str):
|
||||
row_ids = list(map(cast_to_int, row_ids.split(',')))
|
||||
row_ids = list(map(helpers.cast_to_int, row_ids.split(',')))
|
||||
|
||||
if row_ids:
|
||||
logger.info("Tautulli Database :: Deleting row ids %s from %s database table", row_ids, table)
|
||||
query = "DELETE FROM " + table + " WHERE id IN (%s) " % ','.join(['?'] * len(row_ids))
|
||||
monitor_db = MonitorDatabase()
|
||||
|
||||
# SQlite verions prior to 3.32.0 (2020-05-22) have maximum variable limit of 999
|
||||
# https://sqlite.org/limits.html
|
||||
sqlite_max_variable_number = 999
|
||||
|
||||
monitor_db = MonitorDatabase()
|
||||
try:
|
||||
monitor_db.action(query, row_ids)
|
||||
return True
|
||||
for row_ids_group in helpers.chunk(row_ids, sqlite_max_variable_number):
|
||||
query = "DELETE FROM " + table + " WHERE id IN (%s) " % ','.join(['?'] * len(row_ids_group))
|
||||
monitor_db.action(query, row_ids_group)
|
||||
vacuum()
|
||||
except Exception as e:
|
||||
logger.error("Tautulli Database :: Failed to delete rows from %s database table: %s" % (table, e))
|
||||
return False
|
||||
@@ -266,6 +275,20 @@ def delete_library_history(section_id=None):
|
||||
return delete_session_history_rows(row_ids=row_ids)
|
||||
|
||||
|
||||
def vacuum():
|
||||
monitor_db = MonitorDatabase()
|
||||
|
||||
logger.info("Tautulli Database :: Vacuuming database.")
|
||||
try:
|
||||
monitor_db.action('VACUUM')
|
||||
except Exception as e:
|
||||
logger.error("Tautulli Database :: Failed to vacuum database: %s" % e)
|
||||
|
||||
|
||||
def optimize():
|
||||
vacuum()
|
||||
|
||||
|
||||
def db_filename(filename=FILENAME):
|
||||
""" Returns the filepath to the db """
|
||||
|
||||
@@ -284,9 +307,9 @@ def make_backup(cleanup=False, scheduler=False):
|
||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpydbcorrupt'})
|
||||
|
||||
if scheduler:
|
||||
backup_file = 'tautulli.backup-{}{}.sched.db'.format(arrow.now().format('YYYYMMDDHHmmss'), corrupt)
|
||||
backup_file = 'tautulli.backup-{}{}.sched.db'.format(helpers.now(), corrupt)
|
||||
else:
|
||||
backup_file = 'tautulli.backup-{}{}.db'.format(arrow.now().format('YYYYMMDDHHmmss'), corrupt)
|
||||
backup_file = 'tautulli.backup-{}{}.db'.format(helpers.now(), corrupt)
|
||||
backup_folder = plexpy.CONFIG.BACKUP_DIR
|
||||
backup_file_fp = os.path.join(backup_folder, backup_file)
|
||||
|
||||
|
@@ -31,7 +31,7 @@ import datetime
|
||||
from functools import wraps
|
||||
import hashlib
|
||||
import imghdr
|
||||
from future.moves.itertools import zip_longest
|
||||
from future.moves.itertools import islice, zip_longest
|
||||
import ipwhois
|
||||
import ipwhois.exceptions
|
||||
import ipwhois.utils
|
||||
@@ -52,10 +52,12 @@ import xmltodict
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import common
|
||||
import logger
|
||||
import request
|
||||
from api2 import API2
|
||||
else:
|
||||
from plexpy import common
|
||||
from plexpy import logger
|
||||
from plexpy import request
|
||||
from plexpy.api2 import API2
|
||||
@@ -110,7 +112,7 @@ def radio(variable, pos):
|
||||
return ''
|
||||
|
||||
|
||||
def latinToAscii(unicrap):
|
||||
def latinToAscii(unicrap, replace=False):
|
||||
"""
|
||||
From couch potato
|
||||
"""
|
||||
@@ -148,7 +150,8 @@ def latinToAscii(unicrap):
|
||||
if ord(i) in xlate:
|
||||
r += xlate[ord(i)]
|
||||
elif ord(i) >= 0x80:
|
||||
pass
|
||||
if replace:
|
||||
r += '?'
|
||||
else:
|
||||
r += str(i)
|
||||
|
||||
@@ -210,24 +213,25 @@ def today():
|
||||
return yyyymmdd
|
||||
|
||||
|
||||
def now():
|
||||
now = datetime.datetime.now()
|
||||
|
||||
return now.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
|
||||
def utc_now_iso():
|
||||
utcnow = datetime.datetime.utcnow()
|
||||
|
||||
return utcnow.isoformat()
|
||||
|
||||
|
||||
def timestamp_to_YMD(timestamp):
|
||||
return timestamp_to_datetime(timestamp).strftime("%Y-%m-%d")
|
||||
def now(sep=False):
|
||||
return timestamp_to_YMDHMS(timestamp(), sep=sep)
|
||||
|
||||
|
||||
def timestamp_to_datetime(timestamp):
|
||||
return datetime.datetime.fromtimestamp(cast_to_int(str(timestamp)))
|
||||
def timestamp_to_YMDHMS(ts, sep=False):
|
||||
dt = timestamp_to_datetime(ts)
|
||||
if sep:
|
||||
return dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
return dt.strftime("%Y%m%d%H%M%S")
|
||||
|
||||
|
||||
def timestamp_to_datetime(ts):
|
||||
return datetime.datetime.fromtimestamp(ts)
|
||||
|
||||
|
||||
def iso_to_YMD(iso):
|
||||
@@ -445,22 +449,25 @@ def create_https_certificates(ssl_cert, ssl_key):
|
||||
return False
|
||||
from certgen import createKeyPair, createSelfSignedCertificate, TYPE_RSA
|
||||
|
||||
issuer = common.PRODUCT
|
||||
serial = timestamp()
|
||||
not_before = 0
|
||||
not_after = 60 * 60 * 24 * 365 * 10 # ten years
|
||||
domains = ['DNS:' + d.strip() for d in plexpy.CONFIG.HTTPS_DOMAIN.split(',') if d]
|
||||
ips = ['IP:' + d.strip() for d in plexpy.CONFIG.HTTPS_IP.split(',') if d]
|
||||
altNames = ','.join(domains + ips)
|
||||
alt_names = ','.join(domains + ips).encode('utf-8')
|
||||
|
||||
# Create the self-signed Tautulli certificate
|
||||
logger.debug("Generating self-signed SSL certificate.")
|
||||
pkey = createKeyPair(TYPE_RSA, 2048)
|
||||
cert = createSelfSignedCertificate("Tautulli", pkey, serial, 0, 60 * 60 * 24 * 365 * 10, altNames) # ten years
|
||||
cert = createSelfSignedCertificate(issuer, pkey, serial, not_before, not_after, alt_names)
|
||||
|
||||
# Save the key and certificate to disk
|
||||
try:
|
||||
with open(ssl_cert, "w") as fp:
|
||||
fp.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
|
||||
fp.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode('utf-8'))
|
||||
with open(ssl_key, "w") as fp:
|
||||
fp.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
|
||||
fp.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey).decode('utf-8'))
|
||||
except IOError as e:
|
||||
logger.error("Error creating SSL key and certificate: %s", e)
|
||||
return False
|
||||
@@ -610,7 +617,8 @@ def whois_lookup(ip_address):
|
||||
nets = []
|
||||
err = None
|
||||
try:
|
||||
whois = ipwhois.IPWhois(ip_address).lookup_whois(retry_count=0)
|
||||
whois = ipwhois.IPWhois(ip_address).lookup_whois(retry_count=0,
|
||||
asn_methods=['dns', 'whois', 'http'])
|
||||
countries = ipwhois.utils.get_countries()
|
||||
nets = whois['nets']
|
||||
for net in nets:
|
||||
@@ -731,11 +739,17 @@ def upload_to_cloudinary(img_data, img_title='', rating_key='', fallback=''):
|
||||
api_secret=plexpy.CONFIG.CLOUDINARY_API_SECRET
|
||||
)
|
||||
|
||||
# Cloudinary library has very poor support for non-ASCII characters on Python 2
|
||||
if plexpy.PYTHON2:
|
||||
_img_title = latinToAscii(img_title, replace=True)
|
||||
else:
|
||||
_img_title = img_title
|
||||
|
||||
try:
|
||||
response = upload(img_data,
|
||||
response = upload((img_title, img_data),
|
||||
public_id='{}_{}'.format(fallback, rating_key),
|
||||
tags=['tautulli', fallback, str(rating_key)],
|
||||
context={'title': img_title, 'rating_key': str(rating_key), 'fallback': fallback})
|
||||
context={'title': _img_title, 'rating_key': str(rating_key), 'fallback': fallback})
|
||||
logger.debug("Tautulli Helpers :: Image '{}' ({}) uploaded to Cloudinary.".format(img_title, fallback))
|
||||
img_url = response.get('url', '')
|
||||
except Exception as e:
|
||||
@@ -854,13 +868,28 @@ def build_datatables_json(kwargs, dt_columns, default_sort_col=None):
|
||||
if not default_sort_col:
|
||||
default_sort_col = dt_columns[0][0]
|
||||
|
||||
order_column = [c[0] for c in dt_columns].index(kwargs.pop("order_column", default_sort_col))
|
||||
column_names = [c[0] for c in dt_columns]
|
||||
order_columns = [c.strip() for c in kwargs.pop("order_column", default_sort_col).split(",")]
|
||||
order_dirs = [d.strip() for d in kwargs.pop("order_dir", "desc").split(",")]
|
||||
|
||||
order = []
|
||||
for c, d in zip_longest(order_columns, order_dirs, fillvalue=""):
|
||||
try:
|
||||
order_column = column_names.index(c)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
if d.lower() in ("asc", "desc"):
|
||||
order_dir = d.lower()
|
||||
else:
|
||||
order_dir = "desc"
|
||||
|
||||
order.append({"column": order_column, "dir": order_dir})
|
||||
|
||||
# Build json data
|
||||
json_data = {"draw": 1,
|
||||
"columns": columns,
|
||||
"order": [{"column": order_column,
|
||||
"dir": kwargs.pop("order_dir", "desc")}],
|
||||
"order": order,
|
||||
"start": int(kwargs.pop("start", 0)),
|
||||
"length": int(kwargs.pop("length", 25)),
|
||||
"search": {"value": kwargs.pop("search", "")}
|
||||
@@ -1063,6 +1092,11 @@ def grouper(iterable, n, fillvalue=None):
|
||||
return zip_longest(fillvalue=fillvalue, *args)
|
||||
|
||||
|
||||
def chunk(it, size):
|
||||
it = iter(it)
|
||||
return iter(lambda: tuple(islice(it, size)), ())
|
||||
|
||||
|
||||
def traverse_map(obj, func):
|
||||
if isinstance(obj, list):
|
||||
new_obj = []
|
||||
@@ -1215,6 +1249,9 @@ def browse_path(path=None, include_hidden=False, filter_ext=''):
|
||||
}
|
||||
output.append(out)
|
||||
|
||||
if os.path.isfile(path):
|
||||
path = os.path.dirname(path)
|
||||
|
||||
if not os.path.isdir(path):
|
||||
return output
|
||||
|
||||
@@ -1252,6 +1289,10 @@ def browse_path(path=None, include_hidden=False, filter_ext=''):
|
||||
'icon': 'folder'
|
||||
}
|
||||
output.append(out)
|
||||
|
||||
if filter_ext == '.folderonly':
|
||||
break
|
||||
|
||||
for f in sorted(files):
|
||||
if not include_hidden and f.startswith('.'):
|
||||
continue
|
||||
|
@@ -88,6 +88,8 @@ def refresh_libraries():
|
||||
if result == 'insert':
|
||||
new_keys.append(section['section_id'])
|
||||
|
||||
add_live_tv_library(refresh=True)
|
||||
|
||||
query = 'UPDATE library_sections SET is_active = 0 WHERE server_id != ? OR ' \
|
||||
'section_id NOT IN ({})'.format(', '.join(['?'] * len(section_ids)))
|
||||
monitor_db.action(query=query, args=[plexpy.CONFIG.PMS_IDENTIFIER] + section_ids)
|
||||
@@ -100,14 +102,6 @@ def refresh_libraries():
|
||||
plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', new_keys)
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
#if plexpy.CONFIG.UPDATE_SECTION_IDS == 1 or plexpy.CONFIG.UPDATE_SECTION_IDS == -1:
|
||||
# # Start library section_id update on it's own thread
|
||||
# threading.Thread(target=libraries.update_section_ids).start()
|
||||
|
||||
#if plexpy.CONFIG.UPDATE_LABELS == 1 or plexpy.CONFIG.UPDATE_LABELS == -1:
|
||||
# # Start library labels update on it's own thread
|
||||
# threading.Thread(target=libraries.update_labels).start()
|
||||
|
||||
logger.info("Tautulli Libraries :: Libraries list refreshed.")
|
||||
return True
|
||||
else:
|
||||
@@ -115,28 +109,28 @@ def refresh_libraries():
|
||||
return False
|
||||
|
||||
|
||||
def add_live_tv_library():
|
||||
if not plexpy.CONFIG.ADD_LIVE_TV_LIBRARY:
|
||||
def add_live_tv_library(refresh=False):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
result = monitor_db.select_single('SELECT * FROM library_sections '
|
||||
'WHERE section_id = ? and server_id = ?',
|
||||
[common.LIVE_TV_SECTION_ID, plexpy.CONFIG.PMS_IDENTIFIER])
|
||||
|
||||
if result and not refresh or not result and refresh:
|
||||
return
|
||||
|
||||
logger.info("Tautulli Libraries :: Adding Live TV library to the database.")
|
||||
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
section_keys = {'server_id': plexpy.CONFIG.PMS_IDENTIFIER,
|
||||
'section_id': common.LIVE_TV_SECTION_ID}
|
||||
section_values = {'server_id': plexpy.CONFIG.PMS_IDENTIFIER,
|
||||
'section_id': common.LIVE_TV_SECTION_ID,
|
||||
'section_name': common.LIVE_TV_SECTION_NAME,
|
||||
'section_type': 'live'
|
||||
'section_type': 'live',
|
||||
'is_active': 1
|
||||
}
|
||||
|
||||
result = monitor_db.upsert('library_sections', key_dict=section_keys, value_dict=section_values)
|
||||
|
||||
if result == 'insert':
|
||||
plexpy.CONFIG.__setattr__('ADD_LIVE_TV_LIBRARY', 0)
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
|
||||
def has_library_type(section_type):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
@@ -146,152 +140,6 @@ def has_library_type(section_type):
|
||||
return bool(result)
|
||||
|
||||
|
||||
def update_section_ids():
|
||||
plexpy.CONFIG.UPDATE_SECTION_IDS = -1
|
||||
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
try:
|
||||
query = 'SELECT id, rating_key, grandparent_rating_key, media_type ' \
|
||||
'FROM session_history_metadata WHERE section_id IS NULL'
|
||||
history_results = monitor_db.select(query=query)
|
||||
query = 'SELECT section_id, section_type FROM library_sections'
|
||||
library_results = monitor_db.select(query=query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Libraries :: Unable to execute database query for update_section_ids: %s." % e)
|
||||
|
||||
logger.warn("Tautulli Libraries :: Unable to update section_id's in database.")
|
||||
plexpy.CONFIG.UPDATE_SECTION_IDS = 1
|
||||
plexpy.CONFIG.write()
|
||||
return None
|
||||
|
||||
if not history_results:
|
||||
plexpy.CONFIG.UPDATE_SECTION_IDS = 0
|
||||
plexpy.CONFIG.write()
|
||||
return None
|
||||
|
||||
logger.debug("Tautulli Libraries :: Updating section_id's in database.")
|
||||
|
||||
# Get rating_key: section_id mapping pairs
|
||||
key_mappings = {}
|
||||
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
for library in library_results:
|
||||
section_id = library['section_id']
|
||||
section_type = library['section_type']
|
||||
|
||||
if section_type != 'photo':
|
||||
library_children = pms_connect.get_library_children_details(section_id=section_id,
|
||||
section_type=section_type)
|
||||
if library_children:
|
||||
children_list = library_children['children_list']
|
||||
key_mappings.update({child['rating_key']: child['section_id'] for child in children_list})
|
||||
else:
|
||||
logger.warn("Tautulli Libraries :: Unable to get a list of library items for section_id %s." % section_id)
|
||||
|
||||
error_keys = set()
|
||||
for item in history_results:
|
||||
rating_key = item['grandparent_rating_key'] if item['media_type'] != 'movie' else item['rating_key']
|
||||
section_id = key_mappings.get(str(rating_key), None)
|
||||
|
||||
if section_id:
|
||||
try:
|
||||
section_keys = {'id': item['id']}
|
||||
section_values = {'section_id': section_id}
|
||||
monitor_db.upsert('session_history_metadata', key_dict=section_keys, value_dict=section_values)
|
||||
except:
|
||||
error_keys.add(item['rating_key'])
|
||||
else:
|
||||
error_keys.add(item['rating_key'])
|
||||
|
||||
if error_keys:
|
||||
logger.info("Tautulli Libraries :: Updated all section_id's in database except for rating_keys: %s." %
|
||||
', '.join(str(key) for key in error_keys))
|
||||
else:
|
||||
logger.info("Tautulli Libraries :: Updated all section_id's in database.")
|
||||
|
||||
plexpy.CONFIG.UPDATE_SECTION_IDS = 0
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
return True
|
||||
|
||||
def update_labels():
|
||||
plexpy.CONFIG.UPDATE_LABELS = -1
|
||||
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
try:
|
||||
query = 'SELECT section_id, section_type FROM library_sections'
|
||||
library_results = monitor_db.select(query=query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Libraries :: Unable to execute database query for update_labels: %s." % e)
|
||||
|
||||
logger.warn("Tautulli Libraries :: Unable to update labels in database.")
|
||||
plexpy.CONFIG.UPDATE_LABELS = 1
|
||||
plexpy.CONFIG.write()
|
||||
return None
|
||||
|
||||
if not library_results:
|
||||
plexpy.CONFIG.UPDATE_LABELS = 0
|
||||
plexpy.CONFIG.write()
|
||||
return None
|
||||
|
||||
logger.debug("Tautulli Libraries :: Updating labels in database.")
|
||||
|
||||
# Get rating_key: section_id mapping pairs
|
||||
key_mappings = {}
|
||||
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
for library in library_results:
|
||||
section_id = library['section_id']
|
||||
section_type = library['section_type']
|
||||
|
||||
if section_type != 'photo':
|
||||
library_children = []
|
||||
library_labels = pms_connect.get_library_label_details(section_id=section_id)
|
||||
|
||||
if library_labels:
|
||||
for label in library_labels:
|
||||
library_children = pms_connect.get_library_children_details(section_id=section_id,
|
||||
section_type=section_type,
|
||||
label_key=label['label_key'])
|
||||
|
||||
if library_children:
|
||||
children_list = library_children['children_list']
|
||||
# rating_key_list = [child['rating_key'] for child in children_list]
|
||||
|
||||
for rating_key in [child['rating_key'] for child in children_list]:
|
||||
if key_mappings.get(rating_key):
|
||||
key_mappings[rating_key].append(label['label_title'])
|
||||
else:
|
||||
key_mappings[rating_key] = [label['label_title']]
|
||||
|
||||
else:
|
||||
logger.warn("Tautulli Libraries :: Unable to get a list of library items for section_id %s."
|
||||
% section_id)
|
||||
|
||||
error_keys = set()
|
||||
for rating_key, labels in key_mappings.items():
|
||||
try:
|
||||
labels = ';'.join(labels)
|
||||
monitor_db.action('UPDATE session_history_metadata SET labels = ? '
|
||||
'WHERE rating_key = ? OR parent_rating_key = ? OR grandparent_rating_key = ? ',
|
||||
args=[labels, rating_key, rating_key, rating_key])
|
||||
except:
|
||||
error_keys.add(rating_key)
|
||||
|
||||
if error_keys:
|
||||
logger.info("Tautulli Libraries :: Updated all labels in database except for rating_keys: %s." %
|
||||
', '.join(str(key) for key in error_keys))
|
||||
else:
|
||||
logger.info("Tautulli Libraries :: Updated all labels in database.")
|
||||
|
||||
plexpy.CONFIG.UPDATE_LABELS = 0
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class Libraries(object):
|
||||
|
||||
def __init__(self):
|
||||
@@ -326,7 +174,8 @@ class Libraries(object):
|
||||
'library_sections.child_count',
|
||||
'library_sections.thumb AS library_thumb',
|
||||
'library_sections.custom_thumb_url AS custom_thumb',
|
||||
'library_sections.art',
|
||||
'library_sections.art AS library_art',
|
||||
'library_sections.custom_art_url AS custom_art',
|
||||
'COUNT(DISTINCT %s) AS plays' % group_by,
|
||||
'SUM(CASE WHEN session_history.stopped > 0 THEN (session_history.stopped - session_history.started) \
|
||||
ELSE 0 END) - SUM(CASE WHEN session_history.paused_counter IS NULL THEN 0 ELSE \
|
||||
@@ -391,6 +240,11 @@ class Libraries(object):
|
||||
else:
|
||||
library_thumb = common.DEFAULT_COVER_THUMB
|
||||
|
||||
if item['custom_art'] and item['custom_art'] != item['library_art']:
|
||||
library_art = item['custom_art']
|
||||
else:
|
||||
library_art = item['library_art']
|
||||
|
||||
row = {'row_id': item['row_id'],
|
||||
'server_id': item['server_id'],
|
||||
'section_id': item['section_id'],
|
||||
@@ -400,7 +254,7 @@ class Libraries(object):
|
||||
'parent_count': item['parent_count'],
|
||||
'child_count': item['child_count'],
|
||||
'library_thumb': library_thumb,
|
||||
'library_art': item['art'],
|
||||
'library_art': library_art,
|
||||
'plays': item['plays'],
|
||||
'duration': item['duration'],
|
||||
'last_accessed': item['last_accessed'],
|
||||
@@ -935,7 +789,8 @@ class Libraries(object):
|
||||
if str(section_id).isdigit():
|
||||
query = 'SELECT (CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
|
||||
'THEN users.username ELSE users.friendly_name END) AS friendly_name, ' \
|
||||
'users.user_id, users.thumb, COUNT(DISTINCT %s) AS user_count ' \
|
||||
'users.user_id, users.thumb, users.custom_avatar_url AS custom_thumb, ' \
|
||||
'COUNT(DISTINCT %s) AS user_count ' \
|
||||
'FROM session_history ' \
|
||||
'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
|
||||
'JOIN users ON users.user_id = session_history.user_id ' \
|
||||
@@ -950,9 +805,16 @@ class Libraries(object):
|
||||
result = []
|
||||
|
||||
for item in result:
|
||||
if item['custom_thumb'] and item['custom_thumb'] != item['thumb']:
|
||||
user_thumb = item['custom_thumb']
|
||||
elif item['thumb']:
|
||||
user_thumb = item['thumb']
|
||||
else:
|
||||
user_thumb = common.DEFAULT_USER_THUMB
|
||||
|
||||
row = {'friendly_name': item['friendly_name'],
|
||||
'user_id': item['user_id'],
|
||||
'user_thumb': item['thumb'],
|
||||
'user_thumb': user_thumb,
|
||||
'total_plays': item['user_count']
|
||||
}
|
||||
user_stats.append(row)
|
||||
@@ -980,7 +842,7 @@ class Libraries(object):
|
||||
'JOIN session_history ON session_history_metadata.id = session_history.id ' \
|
||||
'WHERE section_id = ? ' \
|
||||
'GROUP BY session_history.rating_key ' \
|
||||
'ORDER BY started DESC LIMIT ?'
|
||||
'ORDER BY MAX(started) DESC LIMIT ?'
|
||||
result = monitor_db.select(query, args=[section_id, limit])
|
||||
else:
|
||||
result = []
|
||||
@@ -1149,39 +1011,3 @@ class Libraries(object):
|
||||
return 'Deleted duplicate libraries from the database.'
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Libraries :: Unable to delete duplicate libraries: %s." % e)
|
||||
|
||||
|
||||
def update_libraries_db_notify():
|
||||
logger.info("Tautulli Libraries :: Upgrading library notification toggles...")
|
||||
|
||||
# Set flag first in case something fails we don't want to keep re-adding the notifiers
|
||||
plexpy.CONFIG.__setattr__('UPDATE_LIBRARIES_DB_NOTIFY', 0)
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
libraries = Libraries()
|
||||
sections = libraries.get_sections()
|
||||
|
||||
for section in sections:
|
||||
section_details = libraries.get_details(section['section_id'])
|
||||
|
||||
if (section_details['do_notify'] == 1 and
|
||||
(section_details['section_type'] == 'movie' and not plexpy.CONFIG.MOVIE_NOTIFY_ENABLE) or
|
||||
(section_details['section_type'] == 'show' and not plexpy.CONFIG.TV_NOTIFY_ENABLE) or
|
||||
(section_details['section_type'] == 'artist' and not plexpy.CONFIG.MUSIC_NOTIFY_ENABLE)):
|
||||
do_notify = 0
|
||||
else:
|
||||
do_notify = section_details['do_notify']
|
||||
|
||||
if (section_details['keep_history'] == 1 and
|
||||
(section_details['section_type'] == 'movie' and not plexpy.CONFIG.MOVIE_LOGGING_ENABLE) or
|
||||
(section_details['section_type'] == 'show' and not plexpy.CONFIG.TV_LOGGING_ENABLE) or
|
||||
(section_details['section_type'] == 'artist' and not plexpy.CONFIG.MUSIC_LOGGING_ENABLE)):
|
||||
keep_history = 0
|
||||
else:
|
||||
keep_history = section_details['keep_history']
|
||||
|
||||
libraries.set_config(section_id=section_details['section_id'],
|
||||
custom_thumb=section_details['library_thumb'],
|
||||
do_notify=do_notify,
|
||||
keep_history=keep_history,
|
||||
do_notify_created=section_details['do_notify_created'])
|
||||
|
@@ -99,9 +99,9 @@ class BlacklistFilter(logging.Filter):
|
||||
for item in _BLACKLIST_WORDS:
|
||||
try:
|
||||
if item in record.msg:
|
||||
record.msg = record.msg.replace(item, 8 * '*' + item[-2:])
|
||||
record.msg = record.msg.replace(item, 16 * '*')
|
||||
if any(item in str(arg) for arg in record.args):
|
||||
record.args = tuple(arg.replace(item, 8 * '*' + item[-2:]) if isinstance(arg, str) else arg
|
||||
record.args = tuple(arg.replace(item, 16 * '*') if isinstance(arg, str) else arg
|
||||
for arg in record.args)
|
||||
except:
|
||||
pass
|
||||
@@ -155,7 +155,7 @@ class PublicIPFilter(RegexFilter):
|
||||
def replace(self, text, ip):
|
||||
if helpers.is_public_ip(ip.replace('-', '.')):
|
||||
partition = '-' if '-' in ip else '.'
|
||||
return text.replace(ip, ip.partition(partition)[0] + (partition + '***') * 3)
|
||||
return text.replace(ip, partition.join(['***'] * 4))
|
||||
return text
|
||||
|
||||
|
||||
@@ -172,7 +172,7 @@ class EmailFilter(RegexFilter):
|
||||
|
||||
def replace(self, text, email):
|
||||
email_parts = email.partition('@')
|
||||
return text.replace(email, email_parts[0][:2] + 8 * '*' + email_parts[1] + 8 * '*')
|
||||
return text.replace(email, 16 * '*' + email_parts[1] + 8 * '*')
|
||||
|
||||
|
||||
class PlexTokenFilter(RegexFilter):
|
||||
@@ -185,7 +185,7 @@ class PlexTokenFilter(RegexFilter):
|
||||
self.regex = re.compile(r'X-Plex-Token(?:=|%3D)([a-zA-Z0-9]+)')
|
||||
|
||||
def replace(self, text, token):
|
||||
return text.replace(token, 8 * '*' + token[-2:])
|
||||
return text.replace(token, 16 * '*')
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
@@ -19,7 +19,16 @@ import os
|
||||
import subprocess
|
||||
import sys
|
||||
import plistlib
|
||||
import rumps
|
||||
|
||||
try:
|
||||
import AppKit
|
||||
import Foundation
|
||||
HAS_PYOBJC = True
|
||||
except ImportError:
|
||||
HAS_PYOBJC = False
|
||||
|
||||
if HAS_PYOBJC:
|
||||
import rumps
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
@@ -59,11 +68,11 @@ class MacOSSystemTray(object):
|
||||
self.tray_icon = rumps.App(common.PRODUCT, icon=self.icon, menu=self.menu, quit_button=None)
|
||||
|
||||
def start(self):
|
||||
logger.info("Launching MacOS system tray icon.")
|
||||
logger.info("Launching MacOS menu bar icon.")
|
||||
try:
|
||||
self.tray_icon.run()
|
||||
except Exception as e:
|
||||
logger.error("Unable to launch system tray icon: %s." % e)
|
||||
logger.error("Unable to launch menu bar icon: %s." % e)
|
||||
|
||||
def shutdown(self):
|
||||
rumps.quit_application()
|
||||
|
@@ -18,6 +18,7 @@
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
|
||||
import requests
|
||||
import threading
|
||||
|
||||
import plexpy
|
||||
@@ -34,11 +35,18 @@ else:
|
||||
TEMP_DEVICE_TOKEN = None
|
||||
INVALIDATE_TIMER = None
|
||||
|
||||
_ONESIGNAL_APP_ID = '3b4b666a-d557-4b92-acdf-e2c8c4b95357'
|
||||
|
||||
|
||||
def set_temp_device_token(token=None):
|
||||
global TEMP_DEVICE_TOKEN
|
||||
TEMP_DEVICE_TOKEN = token
|
||||
|
||||
if TEMP_DEVICE_TOKEN:
|
||||
logger._BLACKLIST_WORDS.add(TEMP_DEVICE_TOKEN)
|
||||
else:
|
||||
logger._BLACKLIST_WORDS.discard(TEMP_DEVICE_TOKEN)
|
||||
|
||||
if TEMP_DEVICE_TOKEN is not None:
|
||||
global INVALIDATE_TIMER
|
||||
if INVALIDATE_TIMER:
|
||||
@@ -79,18 +87,21 @@ def get_mobile_device_by_token(device_token=None):
|
||||
return get_mobile_devices(device_token=device_token)
|
||||
|
||||
|
||||
def add_mobile_device(device_id=None, device_name=None, device_token=None, friendly_name=None):
|
||||
def add_mobile_device(device_id=None, device_name=None, device_token=None, friendly_name=None, onesignal_id=None):
|
||||
db = database.MonitorDatabase()
|
||||
|
||||
keys = {'device_id': device_id}
|
||||
values = {'device_name': device_name,
|
||||
'device_token': device_token}
|
||||
'device_token': device_token,
|
||||
'onesignal_id': onesignal_id,
|
||||
'official': validate_onesignal_id(onesignal_id=onesignal_id)}
|
||||
|
||||
if friendly_name:
|
||||
values['friendly_name'] = friendly_name
|
||||
|
||||
try:
|
||||
result = db.upsert(table_name='mobile_devices', key_dict=keys, value_dict=values)
|
||||
blacklist_logger()
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli MobileApp :: Failed to register mobile device in the database: %s." % e)
|
||||
return
|
||||
@@ -131,6 +142,7 @@ def set_mobile_device_config(mobile_device_id=None, **kwargs):
|
||||
try:
|
||||
db.upsert(table_name='mobile_devices', key_dict=keys, value_dict=values)
|
||||
logger.info("Tautulli MobileApp :: Updated mobile device agent: mobile_device_id %s." % mobile_device_id)
|
||||
blacklist_logger()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli MobileApp :: Unable to update mobile device: %s." % e)
|
||||
@@ -161,6 +173,17 @@ def set_last_seen(device_token=None):
|
||||
return
|
||||
|
||||
|
||||
def validate_onesignal_id(onesignal_id):
|
||||
if onesignal_id is None:
|
||||
return False
|
||||
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
payload = {'app_id': _ONESIGNAL_APP_ID}
|
||||
|
||||
r = requests.get('https://onesignal.com/api/v1/players/{}'.format(onesignal_id), headers=headers, json=payload)
|
||||
return r.status_code == 200
|
||||
|
||||
|
||||
def blacklist_logger():
|
||||
devices = get_mobile_devices()
|
||||
for d in devices:
|
||||
|
@@ -17,6 +17,7 @@
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from io import open
|
||||
import os
|
||||
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
@@ -214,7 +215,7 @@ def get_newsletter(newsletter_uuid=None, newsletter_id_name=None):
|
||||
|
||||
if newsletter_file in os.listdir(newsletter_folder):
|
||||
try:
|
||||
with open(newsletter_file_fp, 'r') as n_file:
|
||||
with open(newsletter_file_fp, 'r', encoding='utf-8') as n_file:
|
||||
newsletter = n_file.read()
|
||||
return newsletter
|
||||
except OSError as e:
|
||||
|
@@ -320,6 +320,7 @@ def blacklist_logger():
|
||||
|
||||
def serve_template(templatename, **kwargs):
|
||||
if plexpy.CONFIG.NEWSLETTER_CUSTOM_DIR:
|
||||
logger.info("Tautulli Newsletters :: Using custom newsletter template directory.")
|
||||
template_dir = plexpy.CONFIG.NEWSLETTER_CUSTOM_DIR
|
||||
else:
|
||||
interface_dir = os.path.join(str(plexpy.PROG_DIR), 'data/interfaces/')
|
||||
@@ -471,6 +472,8 @@ class Newsletter(object):
|
||||
|
||||
self.retrieve_data()
|
||||
|
||||
logger.info("Tautulli Newsletters :: Generating newsletter%s." % (' preview' if self.is_preview else ''))
|
||||
|
||||
newsletter_rendered, self.template_error = serve_template(
|
||||
templatename=self._TEMPLATE,
|
||||
uuid=self.uuid,
|
||||
|
@@ -601,39 +601,51 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
pms_identifier=plexpy.CONFIG.PMS_IDENTIFIER,
|
||||
rating_key=plex_web_rating_key)
|
||||
|
||||
# Check external guids
|
||||
for guid in notify_params['guids']:
|
||||
if 'imdb://' in guid:
|
||||
notify_params['imdb_id'] = guid.split('imdb://')[1]
|
||||
elif 'tmdb://' in guid:
|
||||
notify_params['themoviedb_id'] = guid.split('tmdb://')[1]
|
||||
elif 'tvdb://' in guid:
|
||||
notify_params['thetvdb_id'] = guid.split('tvdb://')[1]
|
||||
|
||||
# Get media IDs from guid and build URLs
|
||||
if 'imdb://' in notify_params['guid']:
|
||||
notify_params['imdb_id'] = notify_params['guid'].split('imdb://')[1].split('?')[0]
|
||||
if 'plex://' in notify_params['guid']:
|
||||
notify_params['plex_id'] = notify_params['guid'].split('plex://')[1].split('/')[1]
|
||||
|
||||
if 'imdb://' in notify_params['guid'] or notify_params['imdb_id']:
|
||||
notify_params['imdb_id'] = notify_params['imdb_id'] or notify_params['guid'].split('imdb://')[1].split('?')[0]
|
||||
notify_params['imdb_url'] = 'https://www.imdb.com/title/' + notify_params['imdb_id']
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/imdb/' + notify_params['imdb_id']
|
||||
|
||||
if 'thetvdb://' in notify_params['guid']:
|
||||
notify_params['thetvdb_id'] = notify_params['guid'].split('thetvdb://')[1].split('/')[0].split('?')[0]
|
||||
if 'thetvdb://' in notify_params['guid'] or notify_params['thetvdb_id']:
|
||||
notify_params['thetvdb_id'] = notify_params['thetvdb_id'] or notify_params['guid'].split('thetvdb://')[1].split('/')[0].split('?')[0]
|
||||
notify_params['thetvdb_url'] = 'https://thetvdb.com/?tab=series&id=' + notify_params['thetvdb_id']
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tvdb/' + notify_params['thetvdb_id'] + '?id_type=show'
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tvdb/' + notify_params['thetvdb_id'] + '?type=show'
|
||||
|
||||
elif 'thetvdbdvdorder://' in notify_params['guid']:
|
||||
notify_params['thetvdb_id'] = notify_params['guid'].split('thetvdbdvdorder://')[1].split('/')[0].split('?')[0]
|
||||
notify_params['thetvdb_url'] = 'https://thetvdb.com/?tab=series&id=' + notify_params['thetvdb_id']
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tvdb/' + notify_params['thetvdb_id'] + '?id_type=show'
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tvdb/' + notify_params['thetvdb_id'] + '?type=show'
|
||||
|
||||
if 'themoviedb://' in notify_params['guid']:
|
||||
if 'themoviedb://' in notify_params['guid'] or notify_params['themoviedb_id']:
|
||||
if notify_params['media_type'] == 'movie':
|
||||
notify_params['themoviedb_id'] = notify_params['guid'].split('themoviedb://')[1].split('?')[0]
|
||||
notify_params['themoviedb_id'] = notify_params['themoviedb_id'] or notify_params['guid'].split('themoviedb://')[1].split('?')[0]
|
||||
notify_params['themoviedb_url'] = 'https://www.themoviedb.org/movie/' + notify_params['themoviedb_id']
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tmdb/' + notify_params['themoviedb_id'] + '?id_type=movie'
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tmdb/' + notify_params['themoviedb_id'] + '?type=movie'
|
||||
|
||||
elif notify_params['media_type'] in ('show', 'season', 'episode'):
|
||||
notify_params['themoviedb_id'] = notify_params['guid'].split('themoviedb://')[1].split('/')[0].split('?')[0]
|
||||
notify_params['themoviedb_id'] = notify_params['themoviedb_id'] or notify_params['guid'].split('themoviedb://')[1].split('/')[0].split('?')[0]
|
||||
notify_params['themoviedb_url'] = 'https://www.themoviedb.org/tv/' + notify_params['themoviedb_id']
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tmdb/' + notify_params['themoviedb_id'] + '?id_type=show'
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tmdb/' + notify_params['themoviedb_id'] + '?type=show'
|
||||
|
||||
if 'lastfm://' in notify_params['guid']:
|
||||
notify_params['lastfm_id'] = '/'.join(notify_params['guid'].split('lastfm://')[1].split('?')[0].split('/')[:2])
|
||||
notify_params['lastfm_url'] = 'https://www.last.fm/music/' + notify_params['lastfm_id']
|
||||
|
||||
# Get TheMovieDB info
|
||||
if plexpy.CONFIG.THEMOVIEDB_LOOKUP:
|
||||
# Get TheMovieDB info (for movies and tv only)
|
||||
if plexpy.CONFIG.THEMOVIEDB_LOOKUP and notify_params['media_type'] in ('movie', 'show', 'season', 'episode'):
|
||||
if notify_params.get('themoviedb_id'):
|
||||
themoveidb_json = get_themoviedb_info(rating_key=rating_key,
|
||||
media_type=notify_params['media_type'],
|
||||
@@ -643,40 +655,64 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
notify_params['imdb_id'] = themoveidb_json['imdb_id']
|
||||
notify_params['imdb_url'] = 'https://www.imdb.com/title/' + themoveidb_json['imdb_id']
|
||||
|
||||
elif notify_params.get('thetvdb_id') or notify_params.get('imdb_id'):
|
||||
if notify_params['media_type'] in ('episode', 'track'):
|
||||
elif notify_params.get('thetvdb_id') or notify_params.get('imdb_id') or notify_params.get('plex_id'):
|
||||
if notify_params['media_type'] == 'episode':
|
||||
lookup_key = notify_params['grandparent_rating_key']
|
||||
elif notify_params['media_type'] in ('season', 'album'):
|
||||
lookup_title = notify_params['grandparent_title']
|
||||
lookup_year = notify_params['year']
|
||||
lookup_media_type = 'tv'
|
||||
elif notify_params['media_type'] == 'season':
|
||||
lookup_key = notify_params['parent_rating_key']
|
||||
lookup_title = notify_params['parent_title']
|
||||
lookup_year = notify_params['year']
|
||||
lookup_media_type = 'tv'
|
||||
else:
|
||||
lookup_key = rating_key
|
||||
lookup_title = notify_params['title']
|
||||
lookup_year = notify_params['year']
|
||||
lookup_media_type = 'tv' if notify_params['media_type'] == 'show' else 'movie'
|
||||
|
||||
themoviedb_info = lookup_themoviedb_by_id(rating_key=lookup_key,
|
||||
thetvdb_id=notify_params.get('thetvdb_id'),
|
||||
imdb_id=notify_params.get('imdb_id'))
|
||||
imdb_id=notify_params.get('imdb_id'),
|
||||
title=lookup_title,
|
||||
year=lookup_year,
|
||||
media_type=lookup_media_type)
|
||||
themoviedb_info.pop('rating_key', None)
|
||||
notify_params.update(themoviedb_info)
|
||||
|
||||
if themoviedb_info.get('imdb_id'):
|
||||
notify_params['imdb_url'] = 'https://www.imdb.com/title/' + themoviedb_info['imdb_id']
|
||||
if themoviedb_info.get('themoviedb_id'):
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tmdb/{}?type={}'.format(
|
||||
notify_params['themoviedb_id'], 'show' if lookup_media_type == 'tv' else 'movie')
|
||||
|
||||
# Get TVmaze info (for tv shows only)
|
||||
if plexpy.CONFIG.TVMAZE_LOOKUP:
|
||||
if notify_params['media_type'] in ('show', 'season', 'episode') and (notify_params.get('thetvdb_id') or notify_params.get('imdb_id')):
|
||||
if notify_params['media_type'] in ('episode', 'track'):
|
||||
if plexpy.CONFIG.TVMAZE_LOOKUP and notify_params['media_type'] in ('show', 'season', 'episode'):
|
||||
if notify_params.get('thetvdb_id') or notify_params.get('imdb_id') or notify_params.get('plex_id'):
|
||||
if notify_params['media_type'] == 'episode':
|
||||
lookup_key = notify_params['grandparent_rating_key']
|
||||
elif notify_params['media_type'] in ('season', 'album'):
|
||||
lookup_title = notify_params['grandparent_title']
|
||||
elif notify_params['media_type'] == 'season':
|
||||
lookup_key = notify_params['parent_rating_key']
|
||||
lookup_title = notify_params['parent_title']
|
||||
else:
|
||||
lookup_key = rating_key
|
||||
lookup_title = notify_params['title']
|
||||
|
||||
tvmaze_info = lookup_tvmaze_by_id(rating_key=lookup_key,
|
||||
thetvdb_id=notify_params.get('thetvdb_id'),
|
||||
imdb_id=notify_params.get('imdb_id'))
|
||||
imdb_id=notify_params.get('imdb_id'),
|
||||
title=lookup_title)
|
||||
tvmaze_info.pop('rating_key', None)
|
||||
notify_params.update(tvmaze_info)
|
||||
|
||||
if tvmaze_info.get('thetvdb_id'):
|
||||
notify_params['thetvdb_url'] = 'https://thetvdb.com/?tab=series&id=' + str(tvmaze_info['thetvdb_id'])
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tvdb/{}' + str(notify_params['thetvdb_id']) + '?type=show'
|
||||
if tvmaze_info.get('imdb_id'):
|
||||
notify_params['imdb_url'] = 'https://www.imdb.com/title/' + tvmaze_info['imdb_id']
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/imdb/' + notify_params['imdb_id']
|
||||
|
||||
# Get MusicBrainz info (for music only)
|
||||
if plexpy.CONFIG.MUSICBRAINZ_LOOKUP and notify_params['media_type'] in ('artist', 'album', 'track'):
|
||||
@@ -982,6 +1018,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
'duration': duration,
|
||||
'poster_title': notify_params['poster_title'],
|
||||
'poster_url': notify_params['poster_url'],
|
||||
'plex_id': notify_params['plex_id'],
|
||||
'plex_url': notify_params['plex_url'],
|
||||
'imdb_id': notify_params['imdb_id'],
|
||||
'imdb_url': notify_params['imdb_url'],
|
||||
@@ -1447,7 +1484,7 @@ def get_hash_image_info(img_hash=None):
|
||||
return result
|
||||
|
||||
|
||||
def lookup_tvmaze_by_id(rating_key=None, thetvdb_id=None, imdb_id=None):
|
||||
def lookup_tvmaze_by_id(rating_key=None, thetvdb_id=None, imdb_id=None, title=None):
|
||||
db = database.MonitorDatabase()
|
||||
|
||||
try:
|
||||
@@ -1463,11 +1500,21 @@ def lookup_tvmaze_by_id(rating_key=None, thetvdb_id=None, imdb_id=None):
|
||||
|
||||
if thetvdb_id:
|
||||
logger.debug("Tautulli NotificationHandler :: Looking up TVmaze info for thetvdb_id '{}'.".format(thetvdb_id))
|
||||
else:
|
||||
elif imdb_id:
|
||||
logger.debug("Tautulli NotificationHandler :: Looking up TVmaze info for imdb_id '{}'.".format(imdb_id))
|
||||
else:
|
||||
logger.debug("Tautulli NotificationHandler :: Looking up TVmaze info for '{}'.".format(title))
|
||||
|
||||
params = {'thetvdb': thetvdb_id} if thetvdb_id else {'imdb': imdb_id}
|
||||
response, err_msg, req_msg = request.request_response2('http://api.tvmaze.com/lookup/shows', params=params)
|
||||
if thetvdb_id or imdb_id:
|
||||
params = {'thetvdb': thetvdb_id} if thetvdb_id else {'imdb': imdb_id}
|
||||
response, err_msg, req_msg = request.request_response2(
|
||||
'http://api.tvmaze.com/lookup/shows', params=params)
|
||||
elif title:
|
||||
params = {'q': title}
|
||||
response, err_msg, req_msg = request.request_response2(
|
||||
'https://api.tvmaze.com/singlesearch/shows', params=params)
|
||||
else:
|
||||
return tvmaze_info
|
||||
|
||||
if response and not err_msg:
|
||||
tvmaze_json = response.json()
|
||||
@@ -1497,7 +1544,7 @@ def lookup_tvmaze_by_id(rating_key=None, thetvdb_id=None, imdb_id=None):
|
||||
return tvmaze_info
|
||||
|
||||
|
||||
def lookup_themoviedb_by_id(rating_key=None, thetvdb_id=None, imdb_id=None):
|
||||
def lookup_themoviedb_by_id(rating_key=None, thetvdb_id=None, imdb_id=None, title=None, year=None, media_type=None):
|
||||
db = database.MonitorDatabase()
|
||||
|
||||
try:
|
||||
@@ -1513,13 +1560,24 @@ def lookup_themoviedb_by_id(rating_key=None, thetvdb_id=None, imdb_id=None):
|
||||
|
||||
if thetvdb_id:
|
||||
logger.debug("Tautulli NotificationHandler :: Looking up The Movie Database info for thetvdb_id '{}'.".format(thetvdb_id))
|
||||
else:
|
||||
elif imdb_id:
|
||||
logger.debug("Tautulli NotificationHandler :: Looking up The Movie Database info for imdb_id '{}'.".format(imdb_id))
|
||||
else:
|
||||
logger.debug("Tautulli NotificationHandler :: Looking up The Movie Database info for '{} ({})'.".format(title, year))
|
||||
|
||||
params = {'api_key': plexpy.CONFIG.THEMOVIEDB_APIKEY,
|
||||
'external_source': 'tvdb_id' if thetvdb_id else 'imdb_id'
|
||||
}
|
||||
response, err_msg, req_msg = request.request_response2('https://api.themoviedb.org/3/find/{}'.format(thetvdb_id or imdb_id), params=params)
|
||||
params = {'api_key': plexpy.CONFIG.THEMOVIEDB_APIKEY}
|
||||
|
||||
if thetvdb_id or imdb_id:
|
||||
params['external_source'] = 'tvdb_id' if thetvdb_id else 'imdb_id'
|
||||
response, err_msg, req_msg = request.request_response2(
|
||||
'https://api.themoviedb.org/3/find/{}'.format(thetvdb_id or imdb_id), params=params)
|
||||
elif title and year and media_type:
|
||||
params['query'] = title
|
||||
params['year'] = year
|
||||
response, err_msg, req_msg = request.request_response2(
|
||||
'https://api.themoviedb.org/3/search/{}'.format(media_type), params=params)
|
||||
else:
|
||||
return themoviedb_info
|
||||
|
||||
if response and not err_msg:
|
||||
themoviedb_find_json = response.json()
|
||||
@@ -1527,11 +1585,12 @@ def lookup_themoviedb_by_id(rating_key=None, thetvdb_id=None, imdb_id=None):
|
||||
themoviedb_id = themoviedb_find_json['tv_results'][0]['id']
|
||||
elif themoviedb_find_json.get('movie_results'):
|
||||
themoviedb_id = themoviedb_find_json['movie_results'][0]['id']
|
||||
elif themoviedb_find_json.get('results'):
|
||||
themoviedb_id = themoviedb_find_json['results'][0]['id']
|
||||
else:
|
||||
themoviedb_id = ''
|
||||
|
||||
if themoviedb_id:
|
||||
media_type = 'tv' if thetvdb_id else 'movie'
|
||||
themoviedb_url = 'https://www.themoviedb.org/{}/{}'.format(media_type, themoviedb_id)
|
||||
themoviedb_json = get_themoviedb_info(rating_key=rating_key,
|
||||
media_type=media_type,
|
||||
|
@@ -882,8 +882,6 @@ class ANDROIDAPP(Notifier):
|
||||
'priority': 3
|
||||
}
|
||||
|
||||
_ONESIGNAL_APP_ID = '3b4b666a-d557-4b92-acdf-e2c8c4b95357'
|
||||
|
||||
def agent_notify(self, subject='', body='', action='', notification_id=None, **kwargs):
|
||||
# Check mobile device is still registered
|
||||
device = mobile_app.get_mobile_devices(device_id=self.config['device_id'])
|
||||
@@ -930,24 +928,26 @@ class ANDROIDAPP(Notifier):
|
||||
#logger.debug("Nonce (base64): {}".format(base64.b64encode(nonce)))
|
||||
#logger.debug("Salt (base64): {}".format(base64.b64encode(salt)))
|
||||
|
||||
payload = {'app_id': self._ONESIGNAL_APP_ID,
|
||||
'include_player_ids': [self.config['device_id']],
|
||||
payload = {'app_id': mobile_app._ONESIGNAL_APP_ID,
|
||||
'include_player_ids': [device['onesignal_id']],
|
||||
'contents': {'en': 'Tautulli Notification'},
|
||||
'data': {'encrypted': True,
|
||||
'cipher_text': base64.b64encode(encrypted_data),
|
||||
'nonce': base64.b64encode(nonce),
|
||||
'salt': base64.b64encode(salt)}
|
||||
'salt': base64.b64encode(salt),
|
||||
'server_id': plexpy.CONFIG.PMS_UUID}
|
||||
}
|
||||
else:
|
||||
logger.warn("Tautulli Notifiers :: PyCryptodome library is missing. "
|
||||
"Android app notifications will be sent unecrypted. "
|
||||
"Install the library to encrypt the notifications.")
|
||||
|
||||
payload = {'app_id': self._ONESIGNAL_APP_ID,
|
||||
'include_player_ids': [self.config['device_id']],
|
||||
payload = {'app_id': mobile_app._ONESIGNAL_APP_ID,
|
||||
'include_player_ids': [device['onesignal_id']],
|
||||
'contents': {'en': 'Tautulli Notification'},
|
||||
'data': {'encrypted': False,
|
||||
'plain_text': plaintext_data}
|
||||
'plain_text': plaintext_data,
|
||||
'server_id': plexpy.CONFIG.PMS_UUID}
|
||||
}
|
||||
|
||||
#logger.debug("OneSignal payload: {}".format(payload))
|
||||
@@ -960,7 +960,8 @@ class ANDROIDAPP(Notifier):
|
||||
db = database.MonitorDatabase()
|
||||
|
||||
try:
|
||||
query = 'SELECT * FROM mobile_devices'
|
||||
query = 'SELECT * FROM mobile_devices WHERE official = 1 ' \
|
||||
'AND onesignal_id IS NOT NULL AND onesignal_id != ""'
|
||||
result = db.select(query=query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Notifiers :: Unable to retrieve Android app devices list: %s." % e)
|
||||
@@ -985,9 +986,9 @@ class ANDROIDAPP(Notifier):
|
||||
'The content of your notifications will be sent unencrypted!</strong><br>'
|
||||
'Please install the library to encrypt the notification contents. '
|
||||
'Instructions can be found in the '
|
||||
'<a href="https://github.com/%s/%s-Wiki/wiki/'
|
||||
'Frequently-Asked-Questions#notifications-pycryptodome'
|
||||
'" target="_blank">FAQ</a>.' % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO),
|
||||
'<a href="' + helpers.anon_url(
|
||||
'https://github.com/%s/%s-Wiki/wiki/Frequently-Asked-Questions#notifications-pycryptodome'
|
||||
% (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO)) + '" target="_blank">FAQ</a>.' ,
|
||||
'input_type': 'help'
|
||||
})
|
||||
else:
|
||||
@@ -1000,7 +1001,7 @@ class ANDROIDAPP(Notifier):
|
||||
|
||||
config_option[-1]['description'] += '<br><br>Notifications are sent using the ' \
|
||||
'<a href="' + helpers.anon_url('https://onesignal.com') + '" target="_blank">' \
|
||||
'OneSignal</a> API. Some user data is collected and cannot be encrypted. ' \
|
||||
'OneSignal</a>. Some user data is collected and cannot be encrypted. ' \
|
||||
'Please read the <a href="' + helpers.anon_url(
|
||||
'https://onesignal.com/privacy_policy') + '" target="_blank">' \
|
||||
'OneSignal Privacy Policy</a> for more details.'
|
||||
@@ -1010,7 +1011,7 @@ class ANDROIDAPP(Notifier):
|
||||
if not devices:
|
||||
config_option.append({
|
||||
'label': 'Device',
|
||||
'description': 'No devices registered. '
|
||||
'description': 'No mobile devices registered with OneSignal. '
|
||||
'<a data-tab-destination="android_app" data-toggle="tab" data-dismiss="modal">'
|
||||
'Get the Android App</a> and register a device.',
|
||||
'input_type': 'help'
|
||||
@@ -1020,7 +1021,7 @@ class ANDROIDAPP(Notifier):
|
||||
'label': 'Device',
|
||||
'value': self.config['device_id'],
|
||||
'name': 'androidapp_device_id',
|
||||
'description': 'Set your Android app device or '
|
||||
'description': 'Set your mobile device or '
|
||||
'<a data-tab-destination="android_app" data-toggle="tab" data-dismiss="modal">'
|
||||
'register a new device</a> with Tautulli.',
|
||||
'input_type': 'select',
|
||||
@@ -1123,17 +1124,22 @@ class BROWSER(Notifier):
|
||||
return True
|
||||
|
||||
def _return_config_options(self):
|
||||
config_option = [{'label': 'Allow Notifications',
|
||||
config_option = [{'label': 'Note',
|
||||
'description': 'You may need to refresh the page after saving for changes to take effect.',
|
||||
'input_type': 'help'
|
||||
},
|
||||
{'label': 'Allow Notifications',
|
||||
'value': 'Allow Notifications',
|
||||
'name': 'browser_allow_browser',
|
||||
'description': 'Click to allow browser notifications. You must click this button for each browser.',
|
||||
'description': 'Click to allow browser notifications. '
|
||||
'You must click this button for each browser.',
|
||||
'input_type': 'button'
|
||||
},
|
||||
{'label': 'Auto Hide Delay',
|
||||
'value': self.config['auto_hide_delay'],
|
||||
'name': 'browser_auto_hide_delay',
|
||||
'description': 'Set the number of seconds for the notification to remain visible. \
|
||||
Set 0 to disable auto hiding. (Note: Some browsers have a maximum time limit.)',
|
||||
'description': 'Set the number of seconds for the notification to remain visible. '
|
||||
'Set 0 to disable auto hiding. (Note: Some browsers have a maximum time limit.)',
|
||||
'input_type': 'number'
|
||||
}
|
||||
]
|
||||
@@ -1396,8 +1402,7 @@ class EMAIL(Notifier):
|
||||
success = True
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Tautulli Notifiers :: {name} notification failed: {e}".format(
|
||||
name=self.NAME, e=str(e).decode('utf-8')))
|
||||
logger.error("Tautulli Notifiers :: %s notification failed: %s", self.NAME, e)
|
||||
|
||||
finally:
|
||||
if mailserver:
|
||||
@@ -2961,21 +2966,26 @@ class SCRIPTS(Notifier):
|
||||
def __init__(self, config=None):
|
||||
super(SCRIPTS, self).__init__(config=config)
|
||||
|
||||
self.script_exts = {'.bat': '',
|
||||
'.cmd': '',
|
||||
'.exe': '',
|
||||
'.php': 'php',
|
||||
'.pl': 'perl',
|
||||
'.ps1': 'powershell -executionPolicy bypass -file',
|
||||
'.py': 'python',
|
||||
'.pyw': 'pythonw',
|
||||
'.rb': 'ruby',
|
||||
'.sh': ''
|
||||
}
|
||||
self.script_exts = {
|
||||
'.bat': '',
|
||||
'.cmd': '',
|
||||
'.php': 'php',
|
||||
'.pl': 'perl',
|
||||
'.ps1': 'powershell -executionPolicy bypass -file',
|
||||
'.py': 'python' if plexpy.FROZEN else sys.executable,
|
||||
'.pyw': 'pythonw',
|
||||
'.rb': 'ruby',
|
||||
'.sh': ''
|
||||
}
|
||||
|
||||
self.pythonpath_override = 'nopythonpath'
|
||||
self.pythonpath = True
|
||||
self.prefix_overrides = ('python2', 'python3', 'python', 'pythonw', 'php', 'ruby', 'perl')
|
||||
self.prefix_overrides = {
|
||||
'python': ['.py'],
|
||||
'python2': ['.py'],
|
||||
'python3': ['.py'],
|
||||
'pythonw': ['.py', '.pyw']
|
||||
}
|
||||
self.script_killed = False
|
||||
|
||||
def list_scripts(self):
|
||||
@@ -3005,7 +3015,7 @@ class SCRIPTS(Notifier):
|
||||
'TAUTULLI_PUBLIC_URL': plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT,
|
||||
'TAUTULLI_APIKEY': plexpy.CONFIG.API_KEY,
|
||||
'TAUTULLI_ENCODING': plexpy.SYS_ENCODING,
|
||||
'TAUTULLI_PYTHON_VERSION': '.'.join(map(str, plexpy.PYTHON_VERSION))
|
||||
'TAUTULLI_PYTHON_VERSION': common.PYTHON_VERSION
|
||||
}
|
||||
|
||||
if user_id:
|
||||
@@ -3078,21 +3088,24 @@ class SCRIPTS(Notifier):
|
||||
logger.error("Tautulli Notifiers :: No script folder specified.")
|
||||
return
|
||||
|
||||
script_args = helpers.split_args(kwargs.get('script_args', subject))
|
||||
|
||||
logger.debug("Tautulli Notifiers :: Trying to run notify script, action: %s, arguments: %s"
|
||||
% (action, script_args))
|
||||
|
||||
script = kwargs.get('script', self.config.get('script', ''))
|
||||
script_args = helpers.split_args(kwargs.get('script_args', subject))
|
||||
user_id = kwargs.get('parameters', {}).get('user_id')
|
||||
|
||||
logger.debug("Tautulli Notifiers :: Trying to run notify script: %s, arguments: %s, action: %s"
|
||||
% (script, script_args, action))
|
||||
|
||||
# Don't try to run the script if the action does not have one
|
||||
if action and not script:
|
||||
logger.debug("Tautulli Notifiers :: No script selected for action %s, exiting..." % action)
|
||||
logger.debug("Tautulli Notifiers :: No script selected for action '%s', exiting..." % action)
|
||||
return
|
||||
elif not script:
|
||||
logger.debug("Tautulli Notifiers :: No script selected, exiting...")
|
||||
return
|
||||
# Check for a valid script file
|
||||
elif not os.path.isfile(script) or not script.endswith(tuple(self.script_exts)):
|
||||
logger.error("Tautulli Notifiers :: Invalid script file '%s' specified, exiting..." % script)
|
||||
return
|
||||
|
||||
name, ext = os.path.splitext(script)
|
||||
prefix = self.script_exts.get(ext, '')
|
||||
@@ -3109,10 +3122,14 @@ class SCRIPTS(Notifier):
|
||||
del script_args[0]
|
||||
|
||||
# Allow overrides for shitty systems
|
||||
if prefix and script_args:
|
||||
if script_args[0] in self.prefix_overrides:
|
||||
if prefix and script_args and script_args[0] in self.prefix_overrides:
|
||||
if ext in self.prefix_overrides[script_args[0]]:
|
||||
script[0] = script_args[0]
|
||||
del script_args[0]
|
||||
else:
|
||||
logger.error("Tautulli Notifiers :: Invalid prefix override '%s' for '%s' script, exiting..."
|
||||
% (script_args[0], ext))
|
||||
return
|
||||
|
||||
script.extend(script_args)
|
||||
|
||||
@@ -3814,129 +3831,6 @@ class ZAPIER(Notifier):
|
||||
return config_option
|
||||
|
||||
|
||||
def upgrade_config_to_db():
|
||||
logger.info("Tautulli Notifiers :: Upgrading to new notification system...")
|
||||
|
||||
# Set flag first in case something fails we don't want to keep re-adding the notifiers
|
||||
plexpy.CONFIG.__setattr__('UPDATE_NOTIFIERS_DB', 0)
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
# Config section names from the {new: old} config
|
||||
section_overrides = {'xbmc': 'XBMC',
|
||||
'nma': 'NMA',
|
||||
'pushbullet': 'PushBullet',
|
||||
'osx': 'OSX_Notify',
|
||||
'ifttt': 'IFTTT'
|
||||
}
|
||||
|
||||
# Config keys from the {new: old} config
|
||||
config_key_overrides = {'plex': {'hosts': 'client_host'},
|
||||
'facebook': {'access_token': 'token',
|
||||
'group_id': 'group',
|
||||
'incl_poster': 'incl_card'},
|
||||
'join': {'api_key': 'apikey',
|
||||
'device_id': 'deviceid'},
|
||||
'hipchat': {'hook': 'url',
|
||||
'incl_poster': 'incl_card'},
|
||||
'nma': {'api_key': 'apikey'},
|
||||
'osx': {'notify_app': 'app'},
|
||||
'prowl': {'key': 'keys'},
|
||||
'pushalot': {'api_key': 'apikey'},
|
||||
'pushbullet': {'api_key': 'apikey',
|
||||
'device_id': 'deviceid'},
|
||||
'pushover': {'api_token': 'apitoken',
|
||||
'key': 'keys'},
|
||||
'scripts': {'script_folder': 'folder'},
|
||||
'slack': {'incl_poster': 'incl_card'}
|
||||
}
|
||||
|
||||
# Get Monitoring config section
|
||||
monitoring = plexpy.CONFIG._config['Monitoring']
|
||||
|
||||
# Get the new default notification subject and body text
|
||||
defualt_subject_text = {a['name']: a['subject'] for a in available_notification_actions()}
|
||||
defualt_body_text = {a['name']: a['body'] for a in available_notification_actions()}
|
||||
|
||||
# Get the old notification subject and body text
|
||||
notify_text = {}
|
||||
for action in get_notify_actions():
|
||||
subject_key = 'notify_' + action + '_subject_text'
|
||||
body_key = 'notify_' + action + '_body_text'
|
||||
notify_text[action + '_subject'] = monitoring.get(subject_key, defualt_subject_text[action])
|
||||
notify_text[action + '_body'] = monitoring.get(body_key, defualt_body_text[action])
|
||||
|
||||
# Check through each notification agent
|
||||
for agent in get_notify_agents():
|
||||
agent_id = AGENT_IDS[agent]
|
||||
|
||||
# Get the old config section for the agent
|
||||
agent_section = section_overrides.get(agent, agent.capitalize())
|
||||
agent_config = plexpy.CONFIG._config.get(agent_section)
|
||||
agent_config_key = agent_section.lower()
|
||||
|
||||
# Make sure there is an existing config section (to prevent adding v2 agents)
|
||||
if not agent_config:
|
||||
continue
|
||||
|
||||
# Get all the actions for the agent
|
||||
agent_actions = {}
|
||||
for action in get_notify_actions():
|
||||
a_key = agent_config_key + '_' + action
|
||||
agent_actions[action] = helpers.cast_to_int(agent_config.get(a_key, 0))
|
||||
|
||||
# Check if any of the actions were enabled
|
||||
# If so, the agent will be added to the database
|
||||
if any(agent_actions.values()):
|
||||
# Get the new default config for the agent
|
||||
notifier_default_config = get_agent_class(agent_id).config
|
||||
|
||||
# Update the new config with the old config values
|
||||
notifier_config = {}
|
||||
for conf, val in notifier_default_config.items():
|
||||
c_key = agent_config_key + '_' + config_key_overrides.get(agent, {}).get(conf, conf)
|
||||
notifier_config[agent + '_' + conf] = agent_config.get(c_key, val)
|
||||
|
||||
# Special handling for scripts - one script with multiple actions
|
||||
if agent == 'scripts':
|
||||
# Get the old script arguments
|
||||
script_args = monitoring.get('notify_scripts_args_text', '')
|
||||
|
||||
# Get the old scripts for each action
|
||||
action_scripts = {}
|
||||
for action in get_notify_actions():
|
||||
s_key = agent + '_' + action + '_script'
|
||||
action_scripts[action] = agent_config.get(s_key, '')
|
||||
|
||||
# Reverse the dict to {script: [actions]}
|
||||
script_actions = {}
|
||||
for k, v in action_scripts.items():
|
||||
if v: script_actions.setdefault(v, set()).add(k)
|
||||
|
||||
# Add a new script notifier for each script if the action was enabled
|
||||
for script, actions in script_actions.items():
|
||||
if any(agent_actions[a] for a in actions):
|
||||
temp_config = notifier_config
|
||||
temp_config.update({a: 0 for a in agent_actions})
|
||||
temp_config.update({a + '_subject': '' for a in agent_actions})
|
||||
for a in actions:
|
||||
if agent_actions[a]:
|
||||
temp_config[a] = agent_actions[a]
|
||||
temp_config[a + '_subject'] = script_args
|
||||
temp_config[agent + '_script'] = script
|
||||
|
||||
# Add a new notifier and update the config
|
||||
notifier_id = add_notifier_config(agent_id=agent_id)
|
||||
set_notifier_config(notifier_id=notifier_id, agent_id=agent_id, **temp_config)
|
||||
|
||||
else:
|
||||
notifier_config.update(agent_actions)
|
||||
notifier_config.update(notify_text)
|
||||
|
||||
# Add a new notifier and update the config
|
||||
notifier_id = add_notifier_config(agent_id=agent_id)
|
||||
set_notifier_config(notifier_id=notifier_id, agent_id=agent_id, **notifier_config)
|
||||
|
||||
|
||||
def check_browser_enabled():
|
||||
global BROWSER_NOTIFIERS
|
||||
BROWSER_NOTIFIERS = {}
|
||||
|
@@ -42,8 +42,8 @@ else:
|
||||
from plexpy import session
|
||||
|
||||
|
||||
def get_server_resources(return_presence=False, return_server=False, **kwargs):
|
||||
if not return_presence:
|
||||
def get_server_resources(return_presence=False, return_server=False, return_info=False, **kwargs):
|
||||
if not return_presence and not return_info:
|
||||
logger.info("Tautulli PlexTV :: Requesting resources for server...")
|
||||
|
||||
server = {'pms_name': plexpy.CONFIG.PMS_NAME,
|
||||
@@ -56,9 +56,13 @@ def get_server_resources(return_presence=False, return_server=False, **kwargs):
|
||||
'pms_is_cloud': plexpy.CONFIG.PMS_IS_CLOUD,
|
||||
'pms_url': plexpy.CONFIG.PMS_URL,
|
||||
'pms_url_manual': plexpy.CONFIG.PMS_URL_MANUAL,
|
||||
'pms_identifier': plexpy.CONFIG.PMS_IDENTIFIER
|
||||
'pms_identifier': plexpy.CONFIG.PMS_IDENTIFIER,
|
||||
'pms_plexpass': plexpy.CONFIG.PMS_PLEXPASS
|
||||
}
|
||||
|
||||
if return_info:
|
||||
return server
|
||||
|
||||
if kwargs:
|
||||
server.update(kwargs)
|
||||
for k in ['pms_ssl', 'pms_is_remote', 'pms_is_cloud', 'pms_url_manual']:
|
||||
@@ -125,6 +129,9 @@ def get_server_resources(return_presence=False, return_server=False, **kwargs):
|
||||
if return_server:
|
||||
return server
|
||||
|
||||
logger.info("Tautulli PlexTV :: Selected server: %s (%s) (%s - Version %s)",
|
||||
server['pms_name'], server['pms_url'], server['pms_platform'], server['pms_version'])
|
||||
|
||||
plexpy.CONFIG.process_kwargs(server)
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
|
@@ -513,6 +513,7 @@ class PmsConnect(object):
|
||||
genres = []
|
||||
labels = []
|
||||
collections = []
|
||||
guids = []
|
||||
|
||||
if m.getElementsByTagName('Director'):
|
||||
for director in m.getElementsByTagName('Director'):
|
||||
@@ -538,6 +539,10 @@ class PmsConnect(object):
|
||||
for collection in m.getElementsByTagName('Collection'):
|
||||
collections.append(helpers.get_xml_attr(collection, 'tag'))
|
||||
|
||||
if m.getElementsByTagName('Guid'):
|
||||
for guid in m.getElementsByTagName('Guid'):
|
||||
guids.append(helpers.get_xml_attr(guid, 'id'))
|
||||
|
||||
recent_item = {'media_type': helpers.get_xml_attr(m, 'type'),
|
||||
'section_id': helpers.get_xml_attr(m, 'librarySectionID'),
|
||||
'library_name': helpers.get_xml_attr(m, 'librarySectionTitle'),
|
||||
@@ -578,6 +583,7 @@ class PmsConnect(object):
|
||||
'genres': genres,
|
||||
'labels': labels,
|
||||
'collections': collections,
|
||||
'guids': guids,
|
||||
'full_title': helpers.get_xml_attr(m, 'title'),
|
||||
'child_count': helpers.get_xml_attr(m, 'childCount')
|
||||
}
|
||||
@@ -672,6 +678,7 @@ class PmsConnect(object):
|
||||
genres = []
|
||||
labels = []
|
||||
collections = []
|
||||
guids = []
|
||||
|
||||
if metadata_main.getElementsByTagName('Director'):
|
||||
for director in metadata_main.getElementsByTagName('Director'):
|
||||
@@ -697,6 +704,10 @@ class PmsConnect(object):
|
||||
for collection in metadata_main.getElementsByTagName('Collection'):
|
||||
collections.append(helpers.get_xml_attr(collection, 'tag'))
|
||||
|
||||
if metadata_main.getElementsByTagName('Guid'):
|
||||
for guid in metadata_main.getElementsByTagName('Guid'):
|
||||
guids.append(helpers.get_xml_attr(guid, 'id'))
|
||||
|
||||
if metadata_type == 'movie':
|
||||
metadata = {'media_type': metadata_type,
|
||||
'section_id': section_id,
|
||||
@@ -740,6 +751,7 @@ class PmsConnect(object):
|
||||
'genres': genres,
|
||||
'labels': labels,
|
||||
'collections': collections,
|
||||
'guids': guids,
|
||||
'full_title': helpers.get_xml_attr(metadata_main, 'title'),
|
||||
'children_count': helpers.cast_to_int(helpers.get_xml_attr(metadata_main, 'leafCount')),
|
||||
'live': int(helpers.get_xml_attr(metadata_main, 'live') == '1')
|
||||
@@ -793,6 +805,7 @@ class PmsConnect(object):
|
||||
'genres': genres,
|
||||
'labels': labels,
|
||||
'collections': collections,
|
||||
'guids': guids,
|
||||
'full_title': helpers.get_xml_attr(metadata_main, 'title'),
|
||||
'children_count': helpers.cast_to_int(helpers.get_xml_attr(metadata_main, 'leafCount')),
|
||||
'live': int(helpers.get_xml_attr(metadata_main, 'live') == '1')
|
||||
@@ -849,6 +862,7 @@ class PmsConnect(object):
|
||||
'genres': show_details.get('genres', []),
|
||||
'labels': show_details.get('labels', []),
|
||||
'collections': show_details.get('collections', []),
|
||||
'guids': show_details.get('guids', []),
|
||||
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle'),
|
||||
helpers.get_xml_attr(metadata_main, 'title')),
|
||||
'children_count': helpers.cast_to_int(helpers.get_xml_attr(metadata_main, 'leafCount')),
|
||||
@@ -921,6 +935,7 @@ class PmsConnect(object):
|
||||
'genres': show_details.get('genres', []),
|
||||
'labels': show_details.get('labels', []),
|
||||
'collections': show_details.get('collections', []),
|
||||
'guids': show_details.get('guids', []),
|
||||
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
|
||||
helpers.get_xml_attr(metadata_main, 'title')),
|
||||
'children_count': helpers.cast_to_int(helpers.get_xml_attr(metadata_main, 'leafCount')),
|
||||
@@ -970,6 +985,7 @@ class PmsConnect(object):
|
||||
'genres': genres,
|
||||
'labels': labels,
|
||||
'collections': collections,
|
||||
'guids': guids,
|
||||
'full_title': helpers.get_xml_attr(metadata_main, 'title'),
|
||||
'children_count': helpers.cast_to_int(helpers.get_xml_attr(metadata_main, 'leafCount')),
|
||||
'live': int(helpers.get_xml_attr(metadata_main, 'live') == '1')
|
||||
@@ -1020,6 +1036,7 @@ class PmsConnect(object):
|
||||
'genres': genres,
|
||||
'labels': labels,
|
||||
'collections': collections,
|
||||
'guids': guids,
|
||||
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle'),
|
||||
helpers.get_xml_attr(metadata_main, 'title')),
|
||||
'children_count': helpers.cast_to_int(helpers.get_xml_attr(metadata_main, 'leafCount')),
|
||||
@@ -1073,6 +1090,7 @@ class PmsConnect(object):
|
||||
'genres': album_details.get('genres', []),
|
||||
'labels': album_details.get('labels', []),
|
||||
'collections': album_details.get('collections', []),
|
||||
'guids': album_details.get('guids', []),
|
||||
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'title'),
|
||||
track_artist),
|
||||
'children_count': helpers.cast_to_int(helpers.get_xml_attr(metadata_main, 'leafCount')),
|
||||
@@ -1122,6 +1140,7 @@ class PmsConnect(object):
|
||||
'genres': genres,
|
||||
'labels': labels,
|
||||
'collections': collections,
|
||||
'guids': guids,
|
||||
'full_title': helpers.get_xml_attr(metadata_main, 'title'),
|
||||
'children_count': helpers.cast_to_int(helpers.get_xml_attr(metadata_main, 'leafCount')),
|
||||
'live': int(helpers.get_xml_attr(metadata_main, 'live') == '1')
|
||||
@@ -1172,6 +1191,7 @@ class PmsConnect(object):
|
||||
'genres': photo_album_details.get('genres', []),
|
||||
'labels': photo_album_details.get('labels', []),
|
||||
'collections': photo_album_details.get('collections', []),
|
||||
'guids': photo_album_details.get('guids', []),
|
||||
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle') or library_name,
|
||||
helpers.get_xml_attr(metadata_main, 'title')),
|
||||
'children_count': helpers.cast_to_int(helpers.get_xml_attr(metadata_main, 'leafCount')),
|
||||
@@ -1225,6 +1245,7 @@ class PmsConnect(object):
|
||||
'genres': genres,
|
||||
'labels': labels,
|
||||
'collections': collections,
|
||||
'guids': guids,
|
||||
'full_title': helpers.get_xml_attr(metadata_main, 'title'),
|
||||
'children_count': helpers.cast_to_int(helpers.get_xml_attr(metadata_main, 'leafCount')),
|
||||
'live': int(helpers.get_xml_attr(metadata_main, 'live') == '1')
|
||||
@@ -1273,6 +1294,7 @@ class PmsConnect(object):
|
||||
'genres': genres,
|
||||
'labels': labels,
|
||||
'collections': collections,
|
||||
'guids': guids,
|
||||
'full_title': helpers.get_xml_attr(metadata_main, 'title'),
|
||||
'extra_type': helpers.get_xml_attr(metadata_main, 'extraType'),
|
||||
'sub_type': helpers.get_xml_attr(metadata_main, 'subtype'),
|
||||
@@ -1548,6 +1570,9 @@ class PmsConnect(object):
|
||||
if a.getElementsByTagName('Track'):
|
||||
session_data = a.getElementsByTagName('Track')
|
||||
for session_ in session_data:
|
||||
# Filter out background theme music sessions
|
||||
if helpers.get_xml_attr(session_, 'guid').startswith('library://'):
|
||||
continue
|
||||
session_output = self.get_session_each(session_, skip_cache=skip_cache)
|
||||
session_list.append(session_output)
|
||||
if a.getElementsByTagName('Video'):
|
||||
@@ -2018,6 +2043,10 @@ class PmsConnect(object):
|
||||
source_subtitle_details = next((p for p in source_media_part_streams if p['id'] == subtitle_id),
|
||||
next((p for p in source_media_part_streams if p['type'] == '3'), source_subtitle_details))
|
||||
|
||||
# Override the thumb for clips
|
||||
if media_type == 'clip' and metadata_details.get('extra_type') and metadata_details['art']:
|
||||
metadata_details['thumb'] = metadata_details['art'].replace('/art', '/thumb')
|
||||
|
||||
# Overrides for live sessions
|
||||
if stream_details['live'] and transcode_session:
|
||||
stream_details['stream_container_decision'] = 'transcode'
|
||||
@@ -2971,8 +3000,6 @@ class PmsConnect(object):
|
||||
return key_list
|
||||
|
||||
def get_server_response(self):
|
||||
# Refresh Plex remote access port mapping first
|
||||
self.put_refresh_reachability()
|
||||
account_data = self.get_account(output_format='xml')
|
||||
|
||||
try:
|
||||
|
@@ -295,20 +295,20 @@ def server_message(response, return_msg=False):
|
||||
try:
|
||||
soup = BeautifulSoup(response.content, "html5lib")
|
||||
except Exception:
|
||||
pass
|
||||
soup = None
|
||||
|
||||
# Find body and cleanup common tags to grab content, which probably
|
||||
# contains the message.
|
||||
message = soup.find("body")
|
||||
elements = ("header", "script", "footer", "nav", "input", "textarea")
|
||||
if soup:
|
||||
# Find body and cleanup common tags to grab content, which probably
|
||||
# contains the message.
|
||||
message = soup.find("body")
|
||||
elements = ("header", "script", "footer", "nav", "input", "textarea")
|
||||
|
||||
for element in elements:
|
||||
for element in elements:
|
||||
for tag in soup.find_all(element):
|
||||
tag.replaceWith("")
|
||||
|
||||
for tag in soup.find_all(element):
|
||||
tag.replaceWith("")
|
||||
|
||||
message = message.text if message else soup.text
|
||||
message = message.strip()
|
||||
message = message.text if message else soup.text
|
||||
message = message.strip()
|
||||
|
||||
# Second attempt is to just take the response
|
||||
if message is None:
|
||||
|
@@ -245,6 +245,7 @@ class Users(object):
|
||||
custom_where = ['users.user_id', user_id]
|
||||
|
||||
columns = ['session_history.id AS history_row_id',
|
||||
'MIN(session_history.started) AS first_seen',
|
||||
'MAX(session_history.started) AS last_seen',
|
||||
'session_history.ip_address',
|
||||
'COUNT(session_history.id) AS play_count',
|
||||
@@ -306,6 +307,7 @@ class Users(object):
|
||||
|
||||
row = {'history_row_id': item['history_row_id'],
|
||||
'last_seen': item['last_seen'],
|
||||
'first_seen': item['first_seen'],
|
||||
'ip_address': item['ip_address'],
|
||||
'play_count': item['play_count'],
|
||||
'platform': platform,
|
||||
@@ -600,7 +602,7 @@ class Users(object):
|
||||
'WHERE user_id = ? ' \
|
||||
'GROUP BY (CASE WHEN session_history.media_type = "track" THEN session_history.parent_rating_key ' \
|
||||
' ELSE session_history.rating_key END) ' \
|
||||
'ORDER BY started DESC LIMIT ?'
|
||||
'ORDER BY MAX(started) DESC LIMIT ?'
|
||||
result = monitor_db.select(query, args=[user_id, limit])
|
||||
else:
|
||||
result = []
|
||||
|
@@ -17,5 +17,5 @@
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
PLEXPY_BRANCH = "beta"
|
||||
PLEXPY_RELEASE_VERSION = "v2.5.0-beta"
|
||||
PLEXPY_BRANCH = "master"
|
||||
PLEXPY_RELEASE_VERSION = "v2.5.5"
|
@@ -176,7 +176,7 @@ def run():
|
||||
logger.info("Tautulli WebSocket :: Ready")
|
||||
plexpy.WS_CONNECTED = True
|
||||
except (websocket.WebSocketException, IOError, Exception) as e:
|
||||
logger.error("Tautulli WebSocket :: %s." % e)
|
||||
logger.error("Tautulli WebSocket :: %s.", e)
|
||||
|
||||
if plexpy.WS_CONNECTED:
|
||||
on_connect()
|
||||
@@ -209,7 +209,7 @@ def run():
|
||||
logger.info("Tautulli WebSocket :: Ready")
|
||||
plexpy.WS_CONNECTED = True
|
||||
except (websocket.WebSocketException, IOError, Exception) as e:
|
||||
logger.error("Tautulli WebSocket :: %s." % e)
|
||||
logger.error("Tautulli WebSocket :: %s.", e)
|
||||
|
||||
else:
|
||||
close()
|
||||
@@ -219,7 +219,7 @@ def run():
|
||||
if ws_shutdown:
|
||||
break
|
||||
|
||||
logger.error("Tautulli WebSocket :: %s." % e)
|
||||
logger.error("Tautulli WebSocket :: %s.", e)
|
||||
close()
|
||||
break
|
||||
|
||||
@@ -255,42 +255,55 @@ def process(opcode, data):
|
||||
try:
|
||||
data = data.decode('utf-8')
|
||||
logger.websocket_debug(data)
|
||||
info = json.loads(data)
|
||||
event = json.loads(data)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli WebSocket :: Error decoding message from websocket: %s" % e)
|
||||
logger.websocket_error(data)
|
||||
return False
|
||||
|
||||
info = info.get('NotificationContainer', info)
|
||||
info_type = info.get('type')
|
||||
event = event.get('NotificationContainer', event)
|
||||
event_type = event.get('type')
|
||||
|
||||
if not info_type:
|
||||
if not event_type:
|
||||
return False
|
||||
|
||||
if info_type == 'playing':
|
||||
time_line = info.get('PlaySessionStateNotification', info.get('_children', {}))
|
||||
if event_type == 'playing':
|
||||
event_data = event.get('PlaySessionStateNotification', event.get('_children', {}))
|
||||
|
||||
if not time_line:
|
||||
logger.debug("Tautulli WebSocket :: Session found but unable to get timeline data.")
|
||||
if not event_data:
|
||||
logger.debug("Tautulli WebSocket :: Session event found but unable to get websocket data.")
|
||||
return False
|
||||
|
||||
try:
|
||||
activity = activity_handler.ActivityHandler(timeline=time_line[0])
|
||||
activity = activity_handler.ActivityHandler(timeline=event_data[0])
|
||||
activity.process()
|
||||
except Exception as e:
|
||||
logger.exception("Tautulli WebSocket :: Failed to process session data: %s." % e)
|
||||
|
||||
if info_type == 'timeline':
|
||||
time_line = info.get('TimelineEntry', info.get('_children', {}))
|
||||
if event_type == 'timeline':
|
||||
event_data = event.get('TimelineEntry', event.get('_children', {}))
|
||||
|
||||
if not time_line:
|
||||
logger.debug("Tautulli WebSocket :: Timeline event found but unable to get timeline data.")
|
||||
if not event_data:
|
||||
logger.debug("Tautulli WebSocket :: Timeline event found but unable to get websocket data.")
|
||||
return False
|
||||
|
||||
try:
|
||||
activity = activity_handler.TimelineHandler(timeline=time_line[0])
|
||||
activity = activity_handler.TimelineHandler(timeline=event_data[0])
|
||||
activity.process()
|
||||
except Exception as e:
|
||||
logger.exception("Tautulli WebSocket :: Failed to process timeline data: %s." % e)
|
||||
|
||||
if event_type == 'reachability':
|
||||
event_data = event.get('ReachabilityNotification', event.get('_children', {}))
|
||||
|
||||
if not event_data:
|
||||
logger.debug("Tautulli WebSocket :: Reachability event found but unable to get websocket data.")
|
||||
return False
|
||||
|
||||
try:
|
||||
activity = activity_handler.ReachabilityHandler(data=event_data[0])
|
||||
activity.process()
|
||||
except Exception as e:
|
||||
logger.exception("Tautulli WebSocket :: Failed to process reachability data: %s." % e)
|
||||
|
||||
return True
|
||||
|
@@ -41,6 +41,13 @@ else:
|
||||
from plexpy.users import Users, refresh_users
|
||||
from plexpy.plextv import PlexTV
|
||||
|
||||
# Monkey patch SameSite support into cookies.
|
||||
# https://stackoverflow.com/a/50813092
|
||||
try:
|
||||
from http.cookies import Morsel
|
||||
except ImportError:
|
||||
from Cookie import Morsel
|
||||
Morsel._reserved[str('samesite')] = str('SameSite')
|
||||
|
||||
JWT_ALGORITHM = 'HS256'
|
||||
JWT_COOKIE_NAME = 'tautulli_token_'
|
||||
@@ -141,7 +148,7 @@ def check_credentials(username=None, password=None, token=None, admin_login='0',
|
||||
return True, user_details, 'admin'
|
||||
|
||||
if plexpy.CONFIG.HTTP_PLEX_ADMIN or (not admin_login == '1' and plexpy.CONFIG.ALLOW_GUEST_ACCESS):
|
||||
plex_login = plex_user_login(username=username, password=password, token=token, headers=headers)
|
||||
plex_login = plex_user_login(token=token, headers=headers)
|
||||
if plex_login is not None:
|
||||
return True, plex_login[0], plex_login[1]
|
||||
|
||||
@@ -296,9 +303,13 @@ class AuthController(object):
|
||||
self.on_logout(payload['user'], payload['user_group'])
|
||||
|
||||
jwt_cookie = str(JWT_COOKIE_NAME + plexpy.CONFIG.PMS_UUID)
|
||||
cherrypy.response.cookie[jwt_cookie] = 'expire'
|
||||
cherrypy.response.cookie[jwt_cookie] = ''
|
||||
cherrypy.response.cookie[jwt_cookie]['expires'] = 0
|
||||
cherrypy.response.cookie[jwt_cookie]['path'] = '/'
|
||||
cherrypy.response.cookie[jwt_cookie]['path'] = plexpy.HTTP_ROOT.rstrip('/') or '/'
|
||||
|
||||
if plexpy.HTTP_ROOT != '/':
|
||||
# Also expire the JWT on the root path
|
||||
cherrypy.response.headers['Set-Cookie'] = jwt_cookie + '=""; expires=Thu, 01 Jan 1970 12:00:00 GMT; path=/'
|
||||
|
||||
cherrypy.request.login = None
|
||||
|
||||
@@ -344,7 +355,9 @@ class AuthController(object):
|
||||
jwt_cookie = str(JWT_COOKIE_NAME + plexpy.CONFIG.PMS_UUID)
|
||||
cherrypy.response.cookie[jwt_cookie] = jwt_token
|
||||
cherrypy.response.cookie[jwt_cookie]['expires'] = int(time_delta.total_seconds())
|
||||
cherrypy.response.cookie[jwt_cookie]['path'] = '/'
|
||||
cherrypy.response.cookie[jwt_cookie]['path'] = plexpy.HTTP_ROOT.rstrip('/') or '/'
|
||||
cherrypy.response.cookie[jwt_cookie]['httponly'] = True
|
||||
cherrypy.response.cookie[jwt_cookie]['samesite'] = 'lax'
|
||||
|
||||
cherrypy.request.login = payload
|
||||
cherrypy.response.status = 200
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of Tautulli.
|
||||
#
|
||||
@@ -578,6 +578,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_library_sections.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -907,6 +908,7 @@ class WebInterface(object):
|
||||
return library_details
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_library.")
|
||||
return library_details
|
||||
else:
|
||||
logger.warn("Library details requested but no section_id received.")
|
||||
|
||||
@@ -956,6 +958,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_library_watch_time_stats.")
|
||||
return result
|
||||
else:
|
||||
logger.warn("Library watch time stats requested but no section_id received.")
|
||||
|
||||
@@ -999,6 +1002,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_library_user_stats.")
|
||||
return result
|
||||
else:
|
||||
logger.warn("Library user stats requested but no section_id received.")
|
||||
|
||||
@@ -1378,8 +1382,8 @@ class WebInterface(object):
|
||||
user_id (str): The id of the Plex user
|
||||
|
||||
Optional parameters:
|
||||
order_column (str): "last_seen", "ip_address", "platform", "player",
|
||||
"last_played", "play_count"
|
||||
order_column (str): "last_seen", "first_seen", "ip_address", "platform",
|
||||
"player", "last_played", "play_count"
|
||||
order_dir (str): "desc" or "asc"
|
||||
start (int): Row to start from, 0
|
||||
length (int): Number of items to return, 25
|
||||
@@ -1397,6 +1401,7 @@ class WebInterface(object):
|
||||
"ip_address": "xxx.xxx.xxx.xxx",
|
||||
"last_played": "Game of Thrones - The Red Woman",
|
||||
"last_seen": 1462591869,
|
||||
"first_seen": 1583968210,
|
||||
"live": 0,
|
||||
"media_index": 1,
|
||||
"media_type": "episode",
|
||||
@@ -1423,6 +1428,7 @@ class WebInterface(object):
|
||||
if not kwargs.get('json_data'):
|
||||
# TODO: Find some one way to automatically get the columns
|
||||
dt_columns = [("last_seen", True, False),
|
||||
("first_seen", True, False),
|
||||
("ip_address", True, True),
|
||||
("platform", True, True),
|
||||
("player", True, True),
|
||||
@@ -1535,6 +1541,7 @@ class WebInterface(object):
|
||||
return user_details
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_user.")
|
||||
return user_details
|
||||
else:
|
||||
logger.warn("User details requested but no user_id received.")
|
||||
|
||||
@@ -1583,6 +1590,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_user_watch_time_stats.")
|
||||
return result
|
||||
else:
|
||||
logger.warn("User watch time stats requested but no user_id received.")
|
||||
|
||||
@@ -1626,6 +1634,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_user_player_stats.")
|
||||
return result
|
||||
else:
|
||||
logger.warn("User watch time stats requested but no user_id received.")
|
||||
|
||||
@@ -2075,6 +2084,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_plays_by_date.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -2118,6 +2128,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_plays_by_dayofweek.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -2161,6 +2172,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_plays_by_hourofday.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -2204,6 +2216,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_plays_per_month.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -2247,6 +2260,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_plays_by_top_10_platforms.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -2290,6 +2304,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_plays_by_top_10_users.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -2332,6 +2347,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_plays_by_stream_type.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -2374,6 +2390,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_plays_by_source_resolution.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -2416,6 +2433,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_plays_by_stream_resolution.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -2458,6 +2476,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_stream_type_by_top_10_users.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -2500,6 +2519,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_stream_type_by_top_10_platforms.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth()
|
||||
@@ -2993,13 +3013,7 @@ class WebInterface(object):
|
||||
"time_format": plexpy.CONFIG.TIME_FORMAT,
|
||||
"week_start_monday": checked(plexpy.CONFIG.WEEK_START_MONDAY),
|
||||
"get_file_sizes": checked(plexpy.CONFIG.GET_FILE_SIZES),
|
||||
"grouping_global_history": checked(plexpy.CONFIG.GROUPING_GLOBAL_HISTORY),
|
||||
"grouping_user_history": checked(plexpy.CONFIG.GROUPING_USER_HISTORY),
|
||||
"grouping_charts": checked(plexpy.CONFIG.GROUPING_CHARTS),
|
||||
"monitor_pms_updates": checked(plexpy.CONFIG.MONITOR_PMS_UPDATES),
|
||||
"monitor_remote_access": checked(plexpy.CONFIG.MONITOR_REMOTE_ACCESS),
|
||||
"remote_access_ping_interval": plexpy.CONFIG.REMOTE_ACCESS_PING_INTERVAL,
|
||||
"remote_access_ping_threshold": plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD,
|
||||
"refresh_libraries_interval": plexpy.CONFIG.REFRESH_LIBRARIES_INTERVAL,
|
||||
"refresh_libraries_on_startup": checked(plexpy.CONFIG.REFRESH_LIBRARIES_ON_STARTUP),
|
||||
"refresh_users_interval": plexpy.CONFIG.REFRESH_USERS_INTERVAL,
|
||||
@@ -3072,12 +3086,12 @@ class WebInterface(object):
|
||||
checked_configs = [
|
||||
"launch_browser", "launch_startup", "enable_https", "https_create_cert",
|
||||
"api_enabled", "freeze_db", "check_github",
|
||||
"grouping_global_history", "grouping_user_history", "grouping_charts", "group_history_tables",
|
||||
"group_history_tables",
|
||||
"pms_url_manual", "week_start_monday",
|
||||
"refresh_libraries_on_startup", "refresh_users_on_startup",
|
||||
"notify_consecutive", "notify_recently_added_upgrade",
|
||||
"notify_group_recently_added_grandparent", "notify_group_recently_added_parent",
|
||||
"monitor_pms_updates", "monitor_remote_access", "get_file_sizes", "log_blacklist", "http_hash_password",
|
||||
"monitor_pms_updates", "get_file_sizes", "log_blacklist", "http_hash_password",
|
||||
"allow_guest_access", "cache_images", "http_proxy", "http_basic_auth", "notify_concurrent_by_ip",
|
||||
"history_table_activity", "plexpy_auto_update",
|
||||
"themoviedb_lookup", "tvmaze_lookup", "musicbrainz_lookup", "http_plex_admin",
|
||||
@@ -3130,8 +3144,6 @@ class WebInterface(object):
|
||||
kwargs.get('refresh_users_interval') != str(plexpy.CONFIG.REFRESH_USERS_INTERVAL) or \
|
||||
kwargs.get('pms_update_check_interval') != str(plexpy.CONFIG.PMS_UPDATE_CHECK_INTERVAL) or \
|
||||
kwargs.get('monitor_pms_updates') != plexpy.CONFIG.MONITOR_PMS_UPDATES or \
|
||||
kwargs.get('monitor_remote_access') != plexpy.CONFIG.MONITOR_REMOTE_ACCESS or \
|
||||
kwargs.get('remote_access_ping_interval') != str(plexpy.CONFIG.REMOTE_ACCESS_PING_INTERVAL) or \
|
||||
kwargs.get('pms_url_manual') != plexpy.CONFIG.PMS_URL_MANUAL:
|
||||
reschedule = True
|
||||
|
||||
@@ -3756,7 +3768,7 @@ class WebInterface(object):
|
||||
app (str): "tautulli" or "plexwatch" or "plexivity"
|
||||
database_file (file): The database file to import (multipart/form-data)
|
||||
or
|
||||
database_path (str): The full path to the plexwatch database file
|
||||
database_path (str): The full path to the database file to import
|
||||
method (str): For Tautulli only, "merge" or "overwrite"
|
||||
table_name (str): For PlexWatch or Plexivity only, "processed" or "grouped"
|
||||
|
||||
@@ -3770,7 +3782,7 @@ class WebInterface(object):
|
||||
Returns:
|
||||
json:
|
||||
{"result": "success",
|
||||
"message": "Import has started. Check the logs to monitor any problems."
|
||||
"message": "Database import has started. Check the logs to monitor any problems."
|
||||
}
|
||||
```
|
||||
"""
|
||||
@@ -3799,7 +3811,7 @@ class WebInterface(object):
|
||||
'method': method,
|
||||
'backup': helpers.bool_true(backup)}).start()
|
||||
return {'result': 'success',
|
||||
'message': 'Import has started. Check the logs to monitor any problems.'}
|
||||
'message': 'Database import has started. Check the logs to monitor any problems.'}
|
||||
else:
|
||||
if database_file:
|
||||
helpers.delete_file(database_path)
|
||||
@@ -3814,7 +3826,7 @@ class WebInterface(object):
|
||||
'table_name': table_name,
|
||||
'import_ignore_interval': import_ignore_interval}).start()
|
||||
return {'result': 'success',
|
||||
'message': 'Import has started. Check the logs to monitor any problems.'}
|
||||
'message': 'Database import has started. Check the logs to monitor any problems.'}
|
||||
else:
|
||||
if database_file:
|
||||
helpers.delete_file(database_path)
|
||||
@@ -3829,7 +3841,7 @@ class WebInterface(object):
|
||||
'table_name': table_name,
|
||||
'import_ignore_interval': import_ignore_interval}).start()
|
||||
return {'result': 'success',
|
||||
'message': 'Import has started. Check the logs to monitor any problems.'}
|
||||
'message': 'Database import has started. Check the logs to monitor any problems.'}
|
||||
else:
|
||||
if database_file:
|
||||
helpers.delete_file(database_path)
|
||||
@@ -3838,6 +3850,56 @@ class WebInterface(object):
|
||||
else:
|
||||
return {'result': 'error', 'message': 'App not recognized for import'}
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def import_config(self, config_file=None, config_path=None, backup=False, **kwargs):
|
||||
""" Import a Tautulli config file.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
config_file (file): The config file to import (multipart/form-data)
|
||||
or
|
||||
config_path (str): The full path to the config file to import
|
||||
|
||||
|
||||
Optional parameters:
|
||||
backup (bool): true or false whether to backup
|
||||
the current config before importing
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"result": "success",
|
||||
"message": "Config import has started. Check the logs to monitor any problems. "
|
||||
"Tautulli will restart automatically."
|
||||
}
|
||||
```
|
||||
"""
|
||||
if database.IS_IMPORTING:
|
||||
return {'result': 'error',
|
||||
'message': 'Database import is in progress. Please wait until it is finished to import a config.'}
|
||||
|
||||
if config_file:
|
||||
config_path = os.path.join(plexpy.CONFIG.CACHE_DIR, config_file.filename + '.import.ini')
|
||||
logger.info("Received config file '%s' for import. Saving to cache '%s'.",
|
||||
config_file.filename, config_path)
|
||||
with open(config_path, 'wb') as f:
|
||||
while True:
|
||||
data = config_file.file.read(8192)
|
||||
if not data:
|
||||
break
|
||||
f.write(data)
|
||||
|
||||
if not config_path:
|
||||
return {'result': 'error', 'message': 'No config specified for import'}
|
||||
|
||||
config.set_import_thread(config=config_path, backup=helpers.bool_true(backup))
|
||||
|
||||
return {'result': 'success',
|
||||
'message': 'Config import has started. Check the logs to monitor any problems. '
|
||||
'Tautulli will restart automatically.'}
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def import_database_tool(self, app=None, **kwargs):
|
||||
@@ -3851,6 +3913,11 @@ class WebInterface(object):
|
||||
logger.warn("No app specified for import.")
|
||||
return
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def import_config_tool(self, **kwargs):
|
||||
return serve_template(templatename="config_import.html", title="Import Tautulli Configuration")
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@@ -3998,6 +4065,40 @@ class WebInterface(object):
|
||||
logger.warn('Unable to retrieve the PMS identifier.')
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def get_server_info(self, **kwargs):
|
||||
""" Get the PMS server information.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
None
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"pms_identifier": "08u2phnlkdshf890bhdlksghnljsahgleikjfg9t",
|
||||
"pms_ip": "10.10.10.1",
|
||||
"pms_is_remote": 0,
|
||||
"pms_name": "Winterfell-Server",
|
||||
"pms_platform": "Windows",
|
||||
"pms_plexpass": 1,
|
||||
"pms_port": 32400,
|
||||
"pms_ssl": 0,
|
||||
"pms_url": "http://10.10.10.1:32400",
|
||||
"pms_url_manual": 0,
|
||||
"pms_version": "1.20.0.3133-fede5bdc7"
|
||||
}
|
||||
```
|
||||
"""
|
||||
server = plextv.get_server_resources(return_info=True)
|
||||
server.pop('pms_is_cloud', None)
|
||||
return server
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
@@ -4020,6 +4121,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_server_pref.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -4113,7 +4215,8 @@ class WebInterface(object):
|
||||
def do_state_change(self, signal, title, timer, **kwargs):
|
||||
message = title
|
||||
quote = self.random_arnold_quotes()
|
||||
plexpy.SIGNAL = signal
|
||||
if signal:
|
||||
plexpy.SIGNAL = signal
|
||||
|
||||
if plexpy.CONFIG.HTTP_ROOT.strip('/'):
|
||||
new_http_root = '/' + plexpy.CONFIG.HTTP_ROOT.strip('/') + '/'
|
||||
@@ -4162,6 +4265,13 @@ class WebInterface(object):
|
||||
def reset_git_install(self, **kwargs):
|
||||
return self.do_state_change('reset', 'Resetting to {}'.format(common.RELEASE), 120)
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def restart_import_config(self, **kwargs):
|
||||
if config.IMPORT_THREAD:
|
||||
config.IMPORT_THREAD.start()
|
||||
return self.do_state_change(None, 'Importing a Config', 15)
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def get_changelog(self, latest_only=False, since_prev_release=False, update_shown=False, **kwargs):
|
||||
@@ -4250,7 +4360,7 @@ class WebInterface(object):
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi('notify_recently_added')
|
||||
def send_manual_on_created(self, notifier_id='', rating_key='', **kwargs):
|
||||
""" Send a recently added notification using Tautulli.
|
||||
@@ -4658,6 +4768,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for search_results.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth()
|
||||
@@ -4765,6 +4876,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_new_rating_keys.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -4794,7 +4906,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_old_rating_keys.")
|
||||
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -4855,6 +4967,7 @@ class WebInterface(object):
|
||||
"grandparent_thumb": "/library/metadata/1219/thumb/1462175063",
|
||||
"grandparent_title": "Game of Thrones",
|
||||
"guid": "com.plexapp.agents.thetvdb://121361/6/1?lang=en",
|
||||
"guids": [],
|
||||
"labels": [],
|
||||
"last_viewed_at": "1462165717",
|
||||
"library_name": "TV Shows",
|
||||
@@ -4975,6 +5088,7 @@ class WebInterface(object):
|
||||
return metadata
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_metadata_details.")
|
||||
return metadata
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -5021,6 +5135,7 @@ class WebInterface(object):
|
||||
"grandparent_thumb": "/library/metadata/1219/thumb/1462175063",
|
||||
"grandparent_title": "Game of Thrones",
|
||||
"guid": "com.plexapp.agents.thetvdb://121361/6/1?lang=en",
|
||||
"guids": [],
|
||||
"labels": [],
|
||||
"last_viewed_at": "1462165717",
|
||||
"library_name": "TV Shows",
|
||||
@@ -5067,6 +5182,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_recently_added_details.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -5167,6 +5283,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_servers_info.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -5197,6 +5314,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_server_identity.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -5222,6 +5340,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_server_friendly_name.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -5521,6 +5640,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_activity.")
|
||||
return {}
|
||||
except Exception as e:
|
||||
logger.exception("Unable to retrieve data for get_activity: %s" % e)
|
||||
|
||||
@@ -5562,6 +5682,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_full_libraries_list.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -5612,6 +5733,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_full_users_list.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -5666,6 +5788,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_synced_items.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -5778,6 +5901,7 @@ class WebInterface(object):
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_home_stats.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
@@ -5831,8 +5955,9 @@ class WebInterface(object):
|
||||
if args and 'v2' in args[0]:
|
||||
return API2()._api_run(**kwargs)
|
||||
else:
|
||||
cherrypy.response.headers['Content-Type'] = 'application/json;charset=UTF-8'
|
||||
return json.dumps(API2()._api_responds(result_type='error',
|
||||
msg='Please use the /api/v2 endpoint.'))
|
||||
msg='Please use the /api/v2 endpoint.')).encode('utf-8')
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -5949,12 +6074,6 @@ class WebInterface(object):
|
||||
whois_info = helpers.whois_lookup(ip_address)
|
||||
return whois_info
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth()
|
||||
def get_plexpy_url(self, **kwargs):
|
||||
return helpers.get_plexpy_url()
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@@ -6235,7 +6354,7 @@ class WebInterface(object):
|
||||
|
||||
if raw:
|
||||
cherrypy.response.headers['Content-Type'] = 'application/json;charset=UTF-8'
|
||||
return json.dumps(newsletter_agent.raw_data(preview=preview))
|
||||
return json.dumps(newsletter_agent.raw_data(preview=preview)).encode('utf-8')
|
||||
|
||||
return newsletter_agent.generate_newsletter(preview=preview)
|
||||
|
||||
@@ -6246,7 +6365,7 @@ class WebInterface(object):
|
||||
return "Failed to retrieve newsletter: missing newsletter_id parameter"
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth()
|
||||
@requireAuth(member_of("admin"))
|
||||
def support(self, **kwargs):
|
||||
return serve_template(templatename="support.html", title="Support")
|
||||
|
||||
|
@@ -263,8 +263,8 @@ def initialize(options):
|
||||
cherrypy.engine.signals.subscribe()
|
||||
cherrypy.engine.start()
|
||||
cherrypy.engine.block()
|
||||
except IOError:
|
||||
sys.stderr.write('Failed to start on port: %i. Is something else running?\n' % (options['http_port']))
|
||||
except IOError as e:
|
||||
logger.error("Tautulli WebStart :: Failed to start Tautulli: %s", e)
|
||||
sys.exit(1)
|
||||
|
||||
cherrypy.server.wait()
|
||||
|
33
start.sh
33
start.sh
@@ -1,2 +1,33 @@
|
||||
#!/usr/bin/env bash
|
||||
python Tautulli.py &> /dev/null &
|
||||
|
||||
if [[ "$TAUTULLI_DOCKER" == "True" ]]; then
|
||||
if [[ -n $PUID && -n $PGID ]]; then
|
||||
getent group "$PGID" 2>&1 > /dev/null || groupadd -g "$PGID" tautulli
|
||||
getent passwd "$PUID" 2>&1 > /dev/null || useradd -r -u "$PUID" -g "$PGID" tautulli
|
||||
|
||||
user=$(getent passwd "$PUID" | cut -d: -f1)
|
||||
group=$(getent group "$PGID" | cut -d: -f1)
|
||||
usermod -a -G root "$user"
|
||||
|
||||
chown -R "$user":"$group" /config
|
||||
|
||||
echo "Running Tautulli using user $user (uid=$PUID) and group $group (gid=$PGID)"
|
||||
su "$user" -g "$group" -c "python /app/Tautulli.py --datadir /config"
|
||||
else
|
||||
python Tautulli.py --datadir /config
|
||||
fi
|
||||
else
|
||||
python_versions=("python3" "python3.8" "python3.7" "python3.6" "python" "python2" "python2.7")
|
||||
for cmd in "${python_versions[@]}"; do
|
||||
if command -v "$cmd" >/dev/null; then
|
||||
echo "Starting Tautulli with $cmd."
|
||||
if [[ "$(uname -s)" == "Darwin" ]]; then
|
||||
$cmd Tautulli.py &> /dev/null &
|
||||
else
|
||||
$cmd Tautulli.py --quiet --daemon
|
||||
fi
|
||||
exit
|
||||
fi
|
||||
done
|
||||
echo "Unable to start Tautulli. No Python interpreter was found in the following options:" "${python_versions[@]}"
|
||||
fi
|
||||
|
Reference in New Issue
Block a user