Compare commits
111 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| ee2bb22dc9 | |||
| 3852275b74 | |||
| 6e1067e43e | |||
| 110baa67d5 | |||
| 3fb7f4ddd1 | |||
| 38f0a44fa0 | |||
| af3c826f7d | |||
| cbee080b54 | |||
| ecbf38c005 | |||
| 91e28df1a5 | |||
| 1df28243c3 | |||
| b867dc9be2 | |||
|
|
97f80adf0b | ||
|
|
2fc7b08909 | ||
|
|
defceed696 | ||
|
|
249533ac51 | ||
|
|
12aee8762e | ||
|
|
d9325b7adf | ||
|
|
4975cad4fa | ||
|
|
33fc079318 | ||
|
|
b3b2752554 | ||
|
|
505cf25ca3 | ||
|
|
9747e3ba98 | ||
|
|
729191722a | ||
|
|
ff2cf73f23 | ||
|
|
9c4d97c0f8 | ||
|
|
be911e7700 | ||
|
|
00629c0983 | ||
|
|
52ebc9a908 | ||
|
|
a029d6a931 | ||
|
|
7641e3b081 | ||
|
|
b54210480f | ||
|
|
0d9c1c640e | ||
|
|
7f84353c69 | ||
|
|
c319a4a5cc | ||
|
|
60f13df992 | ||
|
|
dea51e32a5 | ||
|
|
7019f5618b | ||
|
|
9106c068ac | ||
|
|
0b845294fb | ||
|
|
7e850dd88d | ||
|
|
877bf7060e | ||
|
|
9326d03a57 | ||
|
|
4787f42d2e | ||
|
|
56a9ccd818 | ||
|
|
1019fecc9e | ||
|
|
1855f93c1c | ||
|
|
52e6a44aa4 | ||
|
|
0b77808af6 | ||
|
|
9233ed5c53 | ||
|
|
ee68c0f622 | ||
|
|
366823cee9 | ||
|
|
40e1eb9a49 | ||
|
|
1af419a860 | ||
|
|
397f18c435 | ||
|
|
2e5dd05a6c | ||
|
|
a9fb8ddfb8 | ||
|
|
562c726787 | ||
|
|
5f82c1dc17 | ||
|
|
222800bdb6 | ||
|
|
5dd3636571 | ||
|
|
2296a9fbb3 | ||
|
|
63b5a7c036 | ||
|
|
b74ca2670e | ||
|
|
393f4e0e58 | ||
|
|
3a9ca29e99 | ||
|
|
32995fef24 | ||
|
|
a73c99fc64 | ||
|
|
a5834470ba | ||
|
|
da3bc127dc | ||
|
|
0dddc4d58f | ||
|
|
a4d5d9157b | ||
|
|
c70d5d4398 | ||
|
|
7c08b07ef5 | ||
|
|
e426b5dd35 | ||
|
|
2fdf619582 | ||
|
|
d9eed14b7a | ||
|
|
8230ffb8a4 | ||
|
|
7098930b19 | ||
|
|
56244245a4 | ||
|
|
dd2f12fa8e | ||
|
|
9598247a0d | ||
|
|
230ee90b1c | ||
|
|
e705bedc91 | ||
|
|
b5ebe7590c | ||
|
|
6d0831ceaa | ||
|
|
19e00ee2f2 | ||
|
|
80723d224e | ||
|
|
0c82bb023a | ||
|
|
0a86f24095 | ||
|
|
b41249cfa8 | ||
|
|
6659802689 | ||
|
|
964c503223 | ||
|
|
15568bf20a | ||
|
|
d10cd324bb | ||
|
|
2a22ab8c33 | ||
|
|
ca736cdae2 | ||
|
|
d589c57dd2 | ||
|
|
9b0caf2a47 | ||
|
|
f8b00bbd67 | ||
|
|
91a8c0e7a0 | ||
|
|
2089172384 | ||
|
|
1ab87e5334 | ||
|
|
b5e6861032 | ||
|
|
189930918a | ||
|
|
ff1bd0a4b8 | ||
|
|
e544d0dd07 | ||
|
|
3e0b240154 | ||
|
|
199119cafb | ||
|
|
89ab665923 | ||
|
|
dfb60de6d2 |
@@ -1,10 +0,0 @@
|
||||
.git
|
||||
.github
|
||||
.gitignore
|
||||
contrib
|
||||
init-scripts
|
||||
package
|
||||
pylintrc
|
||||
*.md
|
||||
!CHANGELOG*.md
|
||||
start.bat
|
||||
3
.github/FUNDING.yml
vendored
3
.github/FUNDING.yml
vendored
@@ -1,3 +0,0 @@
|
||||
github: JonnyWong16
|
||||
patreon: Tautulli
|
||||
custom: ["https://bit.ly/2InPp15"]
|
||||
93
.github/workflows/publish-docker.yml
vendored
93
.github/workflows/publish-docker.yml
vendored
@@ -1,93 +0,0 @@
|
||||
name: Publish Docker
|
||||
on:
|
||||
push:
|
||||
branches: [master, beta, nightly, python3]
|
||||
tags: [v*]
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Prepare
|
||||
id: prepare
|
||||
run: |
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
echo ::set-output name=tag::${GITHUB_REF#refs/tags/}
|
||||
elif [[ $GITHUB_REF == refs/heads/master ]]; then
|
||||
echo ::set-output name=tag::latest
|
||||
else
|
||||
echo ::set-output name=tag::${GITHUB_REF#refs/heads/}
|
||||
fi
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
echo ::set-output name=branch::master
|
||||
else
|
||||
echo ::set-output name=branch::${GITHUB_REF#refs/heads/}
|
||||
fi
|
||||
echo ::set-output name=commit::${GITHUB_SHA}
|
||||
echo ::set-output name=build_date::$(date -u +'%Y-%m-%dT%H:%M:%SZ')
|
||||
echo ::set-output name=docker_platforms::linux/amd64,linux/arm64/v8,linux/arm/v7,linux/arm/v6
|
||||
echo ::set-output name=docker_image::${{ secrets.DOCKER_REPO }}/tautulli
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
platforms: all
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
version: latest
|
||||
|
||||
- name: Cache Docker Layers
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
if: success()
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
if: success()
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.GHCR_TOKEN }}
|
||||
|
||||
- name: Docker Build and Push
|
||||
uses: docker/build-push-action@v2
|
||||
if: success()
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
platforms: ${{ steps.prepare.outputs.docker_platforms }}
|
||||
build-args: |
|
||||
TAG=${{ steps.prepare.outputs.tag }}
|
||||
BRANCH=${{ steps.prepare.outputs.branch }}
|
||||
COMMIT=${{ steps.prepare.outputs.commit }}
|
||||
BUILD_DATE=${{ steps.prepare.outputs.build_date }}
|
||||
tags: |
|
||||
${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
||||
ghcr.io/${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ job.status }}
|
||||
title: ${{ github.workflow }}
|
||||
nofail: true
|
||||
204
.github/workflows/publish-release.yml
vendored
204
.github/workflows/publish-release.yml
vendored
@@ -1,204 +0,0 @@
|
||||
name: Publish Release
|
||||
on:
|
||||
push:
|
||||
branches: [master, beta, nightly, python3]
|
||||
tags: [v*]
|
||||
|
||||
jobs:
|
||||
build-windows:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set Release Version
|
||||
id: get_version
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
VERSION_NSIS=${GITHUB_REF#refs/tags/v}.1
|
||||
echo ::set-output name=VERSION_NSIS::${VERSION_NSIS/%-beta.1/.0}
|
||||
echo ::set-output name=VERSION::${GITHUB_REF#refs/tags/v}
|
||||
echo ::set-output name=RELEASE_VERSION::${GITHUB_REF#refs/tags/}
|
||||
else
|
||||
echo ::set-output name=VERSION_NSIS::0.0.0.0
|
||||
echo ::set-output name=VERSION::0.0.0
|
||||
echo ::set-output name=RELEASE_VERSION::${GITHUB_SHA::7}
|
||||
fi
|
||||
echo $GITHUB_SHA > version.txt
|
||||
|
||||
- name: Set Up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Cache Dependencies
|
||||
id: cache_dependencies
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~\AppData\Local\pip\Cache
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('package/requirements-windows.txt') }}
|
||||
restore-keys: ${{ runner.os }}-pip-
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r package/requirements-windows.txt
|
||||
|
||||
- name: Build Package
|
||||
run: |
|
||||
pyinstaller -y ./package/Tautulli-windows.spec
|
||||
|
||||
- name: Create Installer
|
||||
uses: joncloud/makensis-action@v1.2
|
||||
with:
|
||||
script-file: ./package/Tautulli.nsi
|
||||
arguments: /DVERSION=${{ steps.get_version.outputs.VERSION_NSIS }} /DINSTALLER_NAME=..\Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
include-more-plugins: true
|
||||
include-custom-plugins-path: package/nsis-plugins
|
||||
|
||||
- name: Upload Installer
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: Tautulli-windows-installer
|
||||
path: Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ job.status }}
|
||||
title: Build Windows Installer
|
||||
nofail: true
|
||||
|
||||
build-macos:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set Release Version
|
||||
id: get_version
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV
|
||||
echo ::set-output name=VERSION::${GITHUB_REF#refs/tags/v}
|
||||
echo ::set-output name=RELEASE_VERSION::${GITHUB_REF#refs/tags/}
|
||||
else
|
||||
echo "VERSION=0.0.0" >> $GITHUB_ENV
|
||||
echo ::set-output name=VERSION::0.0.0
|
||||
echo ::set-output name=RELEASE_VERSION::${GITHUB_SHA::7}
|
||||
fi
|
||||
echo $GITHUB_SHA > version.txt
|
||||
|
||||
- name: Set Up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Cache Dependencies
|
||||
id: cache_dependencies
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/Library/Caches/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('package/requirements-macos.txt') }}
|
||||
restore-keys: ${{ runner.os }}-pip-
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r package/requirements-macos.txt
|
||||
|
||||
- name: Build Package
|
||||
run: |
|
||||
pyinstaller -y ./package/Tautulli-macos.spec
|
||||
|
||||
- name: Create Installer
|
||||
run: |
|
||||
sudo pkgbuild --install-location /Applications --version ${{ steps.get_version.outputs.VERSION }} --component ./dist/Tautulli.app --scripts ./package/macos-scripts Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
|
||||
- name: Upload Installer
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: Tautulli-macos-installer
|
||||
path: Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ job.status }}
|
||||
title: Build MacOS Installer
|
||||
nofail: true
|
||||
|
||||
release:
|
||||
needs: [build-windows, build-macos]
|
||||
if: startsWith(github.ref, 'refs/tags/') && always()
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get Build Job Status
|
||||
uses: technote-space/workflow-conclusion-action@v1
|
||||
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set Release Version
|
||||
id: get_version
|
||||
run: |
|
||||
echo ::set-output name=RELEASE_VERSION::${GITHUB_REF#refs/tags/}
|
||||
|
||||
- name: Download Windows Installer
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: Tautulli-windows-installer
|
||||
|
||||
- name: Download MacOS Installer
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: Tautulli-macos-installer
|
||||
|
||||
- name: Get Changelog
|
||||
id: get_changelog
|
||||
run: echo ::set-output name=CHANGELOG::"$( sed -n '/^## /{p; :loop n; p; /^## /q; b loop}' CHANGELOG.md | sed '$d' | sed '$d' | sed '$d' | sed ':a;N;$!ba;s/\n/%0A/g' )"
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ steps.get_version.outputs.RELEASE_VERSION }}
|
||||
release_name: Tautulli ${{ steps.get_version.outputs.RELEASE_VERSION }}
|
||||
body: |
|
||||
## Changelog
|
||||
|
||||
##${{ steps.get_changelog.outputs.CHANGELOG }}
|
||||
draft: false
|
||||
prerelease: ${{ endsWith(steps.get_version.outputs.RELEASE_VERSION, '-beta') }}
|
||||
|
||||
- name: Upload Windows Installer
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: ./Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
asset_name: Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
asset_content_type: application/vnd.microsoft.portable-executable
|
||||
|
||||
- name: Upload MacOS Installer
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: ./Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
asset_name: Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
asset_content_type: application/vnd.apple.installer+xml
|
||||
341
.gitignore
vendored
341
.gitignore
vendored
@@ -1,83 +1,280 @@
|
||||
|
||||
# Compiled source #
|
||||
###################
|
||||
*.pyc
|
||||
*.py~
|
||||
*.pyproj
|
||||
*.sln
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/pycharm+all,python,linux,windows
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=pycharm+all,python,linux,windows
|
||||
|
||||
# PlexPy files #
|
||||
######################
|
||||
### Linux ###
|
||||
*~
|
||||
|
||||
# temporary files which can be created if a process still has a handle open of a deleted file
|
||||
.fuse_hidden*
|
||||
|
||||
# KDE directory preferences
|
||||
.directory
|
||||
|
||||
# Linux trash folder which might appear on any partition or disk
|
||||
.Trash-*
|
||||
|
||||
# .nfs files are created when an open file is removed but is still being accessed
|
||||
.nfs*
|
||||
|
||||
### PyCharm+all ###
|
||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||
|
||||
# User-specific stuff
|
||||
.idea/**/workspace.xml
|
||||
.idea/**/tasks.xml
|
||||
.idea/**/usage.statistics.xml
|
||||
.idea/**/dictionaries
|
||||
.idea/**/shelf
|
||||
|
||||
# Generated files
|
||||
.idea/**/contentModel.xml
|
||||
|
||||
# Sensitive or high-churn files
|
||||
.idea/**/dataSources/
|
||||
.idea/**/dataSources.ids
|
||||
.idea/**/dataSources.local.xml
|
||||
.idea/**/sqlDataSources.xml
|
||||
.idea/**/dynamic.xml
|
||||
.idea/**/uiDesigner.xml
|
||||
.idea/**/dbnavigator.xml
|
||||
|
||||
# Gradle
|
||||
.idea/**/gradle.xml
|
||||
.idea/**/libraries
|
||||
|
||||
# Gradle and Maven with auto-import
|
||||
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||
# since they will be recreated, and may cause churn. Uncomment if using
|
||||
# auto-import.
|
||||
# .idea/artifacts
|
||||
# .idea/compiler.xml
|
||||
# .idea/jarRepositories.xml
|
||||
# .idea/modules.xml
|
||||
# .idea/*.iml
|
||||
# .idea/modules
|
||||
# *.iml
|
||||
# *.ipr
|
||||
|
||||
# CMake
|
||||
cmake-build-*/
|
||||
|
||||
# Mongo Explorer plugin
|
||||
.idea/**/mongoSettings.xml
|
||||
|
||||
# File-based project format
|
||||
*.iws
|
||||
|
||||
# IntelliJ
|
||||
out/
|
||||
|
||||
# mpeltonen/sbt-idea plugin
|
||||
.idea_modules/
|
||||
|
||||
# JIRA plugin
|
||||
atlassian-ide-plugin.xml
|
||||
|
||||
# Cursive Clojure plugin
|
||||
.idea/replstate.xml
|
||||
|
||||
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||
com_crashlytics_export_strings.xml
|
||||
crashlytics.properties
|
||||
crashlytics-build.properties
|
||||
fabric.properties
|
||||
|
||||
# Editor-based Rest Client
|
||||
.idea/httpRequests
|
||||
|
||||
# Android studio 3.1+ serialized cache file
|
||||
.idea/caches/build_file_checksums.ser
|
||||
|
||||
### PyCharm+all Patch ###
|
||||
# Ignores the whole .idea folder and all .iml files
|
||||
# See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360
|
||||
|
||||
.idea/
|
||||
|
||||
# Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023
|
||||
|
||||
*.iml
|
||||
modules.xml
|
||||
.idea/misc.xml
|
||||
*.ipr
|
||||
|
||||
# Sonarlint plugin
|
||||
.idea/sonarlint
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
pytestdebug.log
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
*.db*
|
||||
*.db-journal
|
||||
*.ini
|
||||
release.lock
|
||||
version.lock
|
||||
logs/*
|
||||
backups/*
|
||||
cache/*
|
||||
exports/*
|
||||
newsletters/*
|
||||
*.mmdb
|
||||
version.txt
|
||||
branch.txt
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# HTTPS Cert/Key #
|
||||
##################
|
||||
/*.crt
|
||||
/*.key
|
||||
/*.csr
|
||||
/*.pem
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Mergetool
|
||||
*.orgin
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# OS generated files #
|
||||
######################
|
||||
.DS_Store?
|
||||
.DS_Store
|
||||
ehthumbs.db
|
||||
Icon?
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
doc/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
pythonenv*
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# profiling data
|
||||
.prof
|
||||
|
||||
### Windows ###
|
||||
# Windows thumbnail cache files
|
||||
Thumbs.db
|
||||
Thumbs.db:encryptable
|
||||
ehthumbs.db
|
||||
ehthumbs_vista.db
|
||||
|
||||
#Ignore files generated by PyCharm
|
||||
*.idea/*
|
||||
# Dump file
|
||||
*.stackdump
|
||||
|
||||
#Ignore files generated by vi
|
||||
*.swp
|
||||
# Folder config file
|
||||
[Dd]esktop.ini
|
||||
|
||||
#Ignore files build by Visual Studio
|
||||
*.obj
|
||||
*.exe
|
||||
*.pdb
|
||||
*.user
|
||||
*.aps
|
||||
*.pch
|
||||
*.vspscc
|
||||
*_i.c
|
||||
*_p.c
|
||||
*.ncb
|
||||
*.suo
|
||||
*.tlb
|
||||
*.tlh
|
||||
*.bak
|
||||
*.cache
|
||||
*.ilk
|
||||
*.log
|
||||
[Bb]in
|
||||
[Dd]ebug*/
|
||||
*.lib
|
||||
*.sbr
|
||||
obj/
|
||||
[Rr]elease*/
|
||||
_ReSharper*/
|
||||
[Tt]est[Rr]esult*
|
||||
/cache
|
||||
/logs
|
||||
.project
|
||||
.pydevproject
|
||||
# Recycle Bin used on file shares
|
||||
$RECYCLE.BIN/
|
||||
|
||||
#Ignore files generated by pyinstaller
|
||||
/build
|
||||
/dist
|
||||
# Windows Installer files
|
||||
*.cab
|
||||
*.msi
|
||||
*.msix
|
||||
*.msm
|
||||
*.msp
|
||||
|
||||
# Windows shortcuts
|
||||
*.lnk
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/pycharm+all,python,linux,windows
|
||||
/config.ini
|
||||
/release.lock
|
||||
/tautulli.db
|
||||
/version.lock
|
||||
/cache/
|
||||
|
||||
3
API.md
3
API.md
@@ -1,3 +0,0 @@
|
||||
# API Reference
|
||||
|
||||
Tautulli API documentation has been moved to the [wiki page](https://github.com/Tautulli/Tautulli-Wiki/wiki/Tautulli-API-Reference).
|
||||
44
CHANGELOG.md
44
CHANGELOG.md
@@ -1,5 +1,49 @@
|
||||
# Changelog
|
||||
|
||||
## v2.6.5 (2021-01-09)
|
||||
|
||||
* Other:
|
||||
* Fix: Some IP addresses not being masked in the logs.
|
||||
* New: Auto-updater for Windows exe installer.
|
||||
* Change: Allow Snap package to access the user home directory.
|
||||
* Change: Migrate Snap user data to a persistent location that is retained if Tautulli is reinstalled.
|
||||
|
||||
|
||||
## v2.6.4 (2020-12-20)
|
||||
|
||||
* Other:
|
||||
* Fix: Restore Snap data folder from previous installs.
|
||||
|
||||
|
||||
## v2.6.3 (2020-12-19)
|
||||
|
||||
* Announcements:
|
||||
* This is the last Tautulli version to support Python 2. Python 3 will be required to continue receiving updates. You can check your Python version on the settings page.
|
||||
* Exporter:
|
||||
* Fix: Accessible and exists attributes were blank for media info export level 9.
|
||||
* UI:
|
||||
* Fix: Guest usernames were not masked on mouse hover.
|
||||
* Other:
|
||||
* Fix: macOS menu bar icon for light and dark mode.
|
||||
* New: Tautulli can officially be installed on Linux using a Snap package. See the installation wiki for details.
|
||||
|
||||
|
||||
## v2.6.2 (2020-12-05)
|
||||
|
||||
* Notifications:
|
||||
* Change: Send a notification of a user new device for the first time only. This can be toggled off in the settings.
|
||||
* Exporter:
|
||||
* Fix: Allow exporting child fields only without requiring the parent fields as well.
|
||||
* Fix: Exporting individual collection would fail.
|
||||
* Change: Remove accessible and exists fields from the default media info export levels. This prevents the Plex server from reading the media files unnecessarily.
|
||||
* Other:
|
||||
* Fix: Enable high resolution for the macOS system tray icon and menu.
|
||||
* New: Added rate limiting for failed login attempts.
|
||||
* Change: Use a white logo for the macOS system tray icon.
|
||||
* API:
|
||||
* New: Added machine_id to the get_history API response.
|
||||
|
||||
|
||||
## v2.6.1 (2020-11-03)
|
||||
|
||||
* Other:
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
# Contributing to Tautulli
|
||||
|
||||
## Pull Requests
|
||||
If you think you can contribute code to the Tautulli repository, do not hesitate to submit a pull request.
|
||||
|
||||
### Branches
|
||||
All pull requests should be based on the `nightly` branch, to minimize cross merges. When you want to develop a new feature, clone the repository with `git clone origin/nightly -b FEATURE_NAME`. Use meaningful commit messages.
|
||||
|
||||
### Python Code
|
||||
|
||||
#### Compatibility
|
||||
The code should work with Python 2.7.17 or Python 3.6+. Note that Tautulli runs on many different platforms.
|
||||
|
||||
Re-use existing code. Do not hesitate to add logging in your code. You can the logger module `plexpy.logger.*` for this. Web requests are invoked via `plexpy.request.*` and derived ones. Use these methods to automatically add proper and meaningful error handling.
|
||||
|
||||
#### Code conventions
|
||||
Although Tautulli did not adapt a code convention in the past, we try to follow the [PEP8](http://legacy.python.org/dev/peps/pep-0008/) conventions for future code. A short summary to remind you (copied from http://wiki.ros.org/PyStyleGuide):
|
||||
|
||||
* 4 space indentation
|
||||
* 80 characters per line
|
||||
* `package_name`
|
||||
* `ClassName`
|
||||
* `method_name`
|
||||
* `field_name`
|
||||
* `_private_something`
|
||||
* `self.__really_private_field`
|
||||
* `_global`
|
||||
|
||||
#### Documentation
|
||||
Document your code. Use docstrings See [PEP-257](https://www.python.org/dev/peps/pep-0257/) for more information.
|
||||
|
||||
### HTML/Template code
|
||||
|
||||
#### Compatibility
|
||||
HTML5 compatible browsers are targeted.
|
||||
|
||||
#### Conventions
|
||||
* 4 space indentation
|
||||
* `methodName`
|
||||
* `variableName`
|
||||
* `ClassName`
|
||||
26
Dockerfile
26
Dockerfile
@@ -1,26 +0,0 @@
|
||||
FROM tautulli/tautulli-baseimage:python3
|
||||
|
||||
LABEL maintainer="Tautulli"
|
||||
|
||||
ARG BRANCH
|
||||
ARG COMMIT
|
||||
|
||||
ENV TAUTULLI_DOCKER=True
|
||||
ENV TZ=UTC
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN \
|
||||
groupadd -g 1000 tautulli && \
|
||||
useradd -u 1000 -g 1000 tautulli && \
|
||||
echo ${BRANCH} > /app/branch.txt && \
|
||||
echo ${COMMIT} > /app/version.txt
|
||||
|
||||
COPY . /app
|
||||
|
||||
CMD [ "python", "Tautulli.py", "--datadir", "/config" ]
|
||||
ENTRYPOINT [ "./start.sh" ]
|
||||
|
||||
VOLUME /config
|
||||
EXPOSE 8181
|
||||
HEALTHCHECK --start-period=90s CMD curl -ILfSs http://localhost:8181/status > /dev/null || curl -ILfkSs https://localhost:8181/status > /dev/null || exit 1
|
||||
@@ -20,31 +20,27 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Ensure lib added to path, before any other imports
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'lib'))
|
||||
|
||||
from future.builtins import str
|
||||
|
||||
import appdirs
|
||||
import argparse
|
||||
import datetime
|
||||
import locale
|
||||
import pytz
|
||||
import signal
|
||||
import shutil
|
||||
import time
|
||||
import threading
|
||||
import tzlocal
|
||||
|
||||
import plexpy
|
||||
from plexpy import common, config, database, helpers, logger, webstart
|
||||
import jellypy
|
||||
from jellypy import common, config, database, helpers, logger, webstart
|
||||
if common.PLATFORM == 'Windows':
|
||||
from plexpy import windows
|
||||
from jellypy import windows
|
||||
elif common.PLATFORM == 'Darwin':
|
||||
from plexpy import macos
|
||||
from jellypy import macos
|
||||
|
||||
# Register signals, such as CTRL + C
|
||||
signal.signal(signal.SIGINT, plexpy.sig_handler)
|
||||
signal.signal(signal.SIGTERM, plexpy.sig_handler)
|
||||
signal.signal(signal.SIGINT, jellypy.sig_handler)
|
||||
signal.signal(signal.SIGTERM, jellypy.sig_handler)
|
||||
|
||||
|
||||
def main():
|
||||
@@ -55,28 +51,28 @@ def main():
|
||||
|
||||
# Fixed paths to Tautulli
|
||||
if hasattr(sys, 'frozen') and hasattr(sys, '_MEIPASS'):
|
||||
plexpy.FROZEN = True
|
||||
plexpy.FULL_PATH = os.path.abspath(sys.executable)
|
||||
plexpy.PROG_DIR = sys._MEIPASS
|
||||
jellypy.FROZEN = True
|
||||
jellypy.FULL_PATH = os.path.abspath(sys.executable)
|
||||
jellypy.PROG_DIR = sys._MEIPASS
|
||||
else:
|
||||
plexpy.FULL_PATH = os.path.abspath(__file__)
|
||||
plexpy.PROG_DIR = os.path.dirname(plexpy.FULL_PATH)
|
||||
jellypy.FULL_PATH = os.path.abspath(__file__)
|
||||
jellypy.PROG_DIR = os.path.dirname(jellypy.FULL_PATH)
|
||||
|
||||
plexpy.ARGS = sys.argv[1:]
|
||||
jellypy.ARGS = sys.argv[1:]
|
||||
|
||||
# From sickbeard
|
||||
plexpy.SYS_PLATFORM = sys.platform
|
||||
plexpy.SYS_ENCODING = None
|
||||
jellypy.SYS_PLATFORM = sys.platform
|
||||
jellypy.SYS_ENCODING = None
|
||||
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, "")
|
||||
plexpy.SYS_LANGUAGE, plexpy.SYS_ENCODING = locale.getdefaultlocale()
|
||||
jellypy.SYS_LANGUAGE, jellypy.SYS_ENCODING = locale.getdefaultlocale()
|
||||
except (locale.Error, IOError):
|
||||
pass
|
||||
|
||||
# for OSes that are poorly configured I'll just force UTF-8
|
||||
if not plexpy.SYS_ENCODING or plexpy.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
|
||||
plexpy.SYS_ENCODING = 'UTF-8'
|
||||
if not jellypy.SYS_ENCODING or jellypy.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
|
||||
jellypy.SYS_ENCODING = 'UTF-8'
|
||||
|
||||
# Set up and gather command line arguments
|
||||
parser = argparse.ArgumentParser(
|
||||
@@ -106,48 +102,50 @@ def main():
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.verbose:
|
||||
plexpy.VERBOSE = True
|
||||
jellypy.VERBOSE = True
|
||||
if args.quiet:
|
||||
plexpy.QUIET = True
|
||||
jellypy.QUIET = True
|
||||
|
||||
# Do an intial setup of the logger.
|
||||
# Require verbose for pre-initilization to see critical errors
|
||||
logger.initLogger(console=not plexpy.QUIET, log_dir=False, verbose=True)
|
||||
logger.initLogger(console=not jellypy.QUIET, log_dir=False, verbose=True)
|
||||
|
||||
try:
|
||||
plexpy.SYS_TIMEZONE = tzlocal.get_localzone()
|
||||
jellypy.SYS_TIMEZONE = tzlocal.get_localzone()
|
||||
except (pytz.UnknownTimeZoneError, LookupError, ValueError) as e:
|
||||
logger.error("Could not determine system timezone: %s" % e)
|
||||
plexpy.SYS_TIMEZONE = pytz.UTC
|
||||
jellypy.SYS_TIMEZONE = pytz.UTC
|
||||
|
||||
plexpy.SYS_UTC_OFFSET = datetime.datetime.now(plexpy.SYS_TIMEZONE).strftime('%z')
|
||||
jellypy.SYS_UTC_OFFSET = datetime.datetime.now(jellypy.SYS_TIMEZONE).strftime('%z')
|
||||
|
||||
if helpers.bool_true(os.getenv('TAUTULLI_DOCKER', False)):
|
||||
plexpy.DOCKER = True
|
||||
jellypy.DOCKER = True
|
||||
if helpers.bool_true(os.getenv('TAUTULLI_SNAP', False)):
|
||||
jellypy.SNAP = True
|
||||
|
||||
if args.dev:
|
||||
plexpy.DEV = True
|
||||
jellypy.DEV = True
|
||||
logger.debug("Tautulli is running in the dev environment.")
|
||||
|
||||
if args.daemon:
|
||||
if sys.platform == 'win32':
|
||||
logger.warn("Daemonizing not supported under Windows, starting normally")
|
||||
else:
|
||||
plexpy.DAEMON = True
|
||||
plexpy.QUIET = True
|
||||
jellypy.DAEMON = True
|
||||
jellypy.QUIET = True
|
||||
|
||||
if args.nofork:
|
||||
plexpy.NOFORK = True
|
||||
jellypy.NOFORK = True
|
||||
logger.info("Tautulli is running as a service, it will not fork when restarted.")
|
||||
|
||||
if args.pidfile:
|
||||
plexpy.PIDFILE = str(args.pidfile)
|
||||
jellypy.PIDFILE = str(args.pidfile)
|
||||
|
||||
# If the pidfile already exists, plexpy may still be running, so
|
||||
# If the pidfile already exists, jellypy may still be running, so
|
||||
# exit
|
||||
if os.path.exists(plexpy.PIDFILE):
|
||||
if os.path.exists(jellypy.PIDFILE):
|
||||
try:
|
||||
with open(plexpy.PIDFILE, 'r') as fp:
|
||||
with open(jellypy.PIDFILE, 'r') as fp:
|
||||
pid = int(fp.read())
|
||||
except IOError as e:
|
||||
raise SystemExit("Unable to read PID file: %s", e)
|
||||
@@ -157,20 +155,20 @@ def main():
|
||||
except OSError:
|
||||
logger.warn("PID file '%s' already exists, but PID %d is "
|
||||
"not running. Ignoring PID file." %
|
||||
(plexpy.PIDFILE, pid))
|
||||
(jellypy.PIDFILE, pid))
|
||||
else:
|
||||
# The pidfile exists and points to a live PID. plexpy may
|
||||
# The pidfile exists and points to a live PID. jellypy may
|
||||
# still be running, so exit.
|
||||
raise SystemExit("PID file '%s' already exists. Exiting." %
|
||||
plexpy.PIDFILE)
|
||||
jellypy.PIDFILE)
|
||||
|
||||
# The pidfile is only useful in daemon mode, make sure we can write the
|
||||
# file properly
|
||||
if plexpy.DAEMON:
|
||||
plexpy.CREATEPID = True
|
||||
if jellypy.DAEMON:
|
||||
jellypy.CREATEPID = True
|
||||
|
||||
try:
|
||||
with open(plexpy.PIDFILE, 'w') as fp:
|
||||
with open(jellypy.PIDFILE, 'w') as fp:
|
||||
fp.write("pid\n")
|
||||
except IOError as e:
|
||||
raise SystemExit("Unable to write PID file: %s", e)
|
||||
@@ -180,98 +178,107 @@ def main():
|
||||
|
||||
# Determine which data directory and config file to use
|
||||
if args.datadir:
|
||||
plexpy.DATA_DIR = args.datadir
|
||||
elif plexpy.FROZEN:
|
||||
plexpy.DATA_DIR = appdirs.user_data_dir("Tautulli", False)
|
||||
jellypy.DATA_DIR = args.datadir
|
||||
elif jellypy.FROZEN:
|
||||
jellypy.DATA_DIR = appdirs.user_data_dir("Tautulli", False)
|
||||
else:
|
||||
plexpy.DATA_DIR = plexpy.PROG_DIR
|
||||
jellypy.DATA_DIR = jellypy.PROG_DIR
|
||||
|
||||
# Migrate Snap data dir
|
||||
if jellypy.SNAP:
|
||||
snap_common = os.environ['SNAP_COMMON']
|
||||
old_data_dir = os.path.join(snap_common, 'Tautulli')
|
||||
if os.path.exists(old_data_dir) and os.listdir(old_data_dir):
|
||||
jellypy.SNAP_MIGRATE = True
|
||||
logger.info("Migrating Snap user data.")
|
||||
shutil.move(old_data_dir, jellypy.DATA_DIR)
|
||||
|
||||
if args.config:
|
||||
config_file = args.config
|
||||
else:
|
||||
config_file = os.path.join(plexpy.DATA_DIR, config.FILENAME)
|
||||
config_file = os.path.join(jellypy.DATA_DIR, config.FILENAME)
|
||||
|
||||
# Try to create the DATA_DIR if it doesn't exist
|
||||
if not os.path.exists(plexpy.DATA_DIR):
|
||||
if not os.path.exists(jellypy.DATA_DIR):
|
||||
try:
|
||||
os.makedirs(plexpy.DATA_DIR)
|
||||
os.makedirs(jellypy.DATA_DIR)
|
||||
except OSError:
|
||||
raise SystemExit(
|
||||
'Could not create data directory: ' + plexpy.DATA_DIR + '. Exiting....')
|
||||
'Could not create data directory: ' + jellypy.DATA_DIR + '. Exiting....')
|
||||
|
||||
# Make sure the DATA_DIR is writeable
|
||||
if not os.access(plexpy.DATA_DIR, os.W_OK):
|
||||
if not os.access(jellypy.DATA_DIR, os.W_OK):
|
||||
raise SystemExit(
|
||||
'Cannot write to the data directory: ' + plexpy.DATA_DIR + '. Exiting...')
|
||||
'Cannot write to the data directory: ' + jellypy.DATA_DIR + '. Exiting...')
|
||||
|
||||
# Put the database in the DATA_DIR
|
||||
plexpy.DB_FILE = os.path.join(plexpy.DATA_DIR, database.FILENAME)
|
||||
jellypy.DB_FILE = os.path.join(jellypy.DATA_DIR, database.FILENAME)
|
||||
|
||||
# Move 'plexpy.db' to 'tautulli.db'
|
||||
if os.path.isfile(os.path.join(plexpy.DATA_DIR, 'plexpy.db')) and \
|
||||
not os.path.isfile(os.path.join(plexpy.DATA_DIR, plexpy.DB_FILE)):
|
||||
# Move 'jellypy.db' to 'tautulli.db'
|
||||
if os.path.isfile(os.path.join(jellypy.DATA_DIR, 'jellypy.db')) and \
|
||||
not os.path.isfile(os.path.join(jellypy.DATA_DIR, jellypy.DB_FILE)):
|
||||
try:
|
||||
os.rename(os.path.join(plexpy.DATA_DIR, 'plexpy.db'), plexpy.DB_FILE)
|
||||
os.rename(os.path.join(jellypy.DATA_DIR, 'jellypy.db'), jellypy.DB_FILE)
|
||||
except OSError as e:
|
||||
raise SystemExit("Unable to rename plexpy.db to tautulli.db: %s", e)
|
||||
raise SystemExit("Unable to rename jellypy.db to tautulli.db: %s", e)
|
||||
|
||||
if plexpy.DAEMON:
|
||||
plexpy.daemonize()
|
||||
if jellypy.DAEMON:
|
||||
jellypy.daemonize()
|
||||
|
||||
# Read config and start logging
|
||||
plexpy.initialize(config_file)
|
||||
jellypy.initialize(config_file)
|
||||
|
||||
# Start the background threads
|
||||
plexpy.start()
|
||||
jellypy.start()
|
||||
|
||||
# Force the http port if neccessary
|
||||
if args.port:
|
||||
plexpy.HTTP_PORT = args.port
|
||||
logger.info('Using forced web server port: %i', plexpy.HTTP_PORT)
|
||||
jellypy.HTTP_PORT = args.port
|
||||
logger.info('Using forced web server port: %i', jellypy.HTTP_PORT)
|
||||
else:
|
||||
plexpy.HTTP_PORT = int(plexpy.CONFIG.HTTP_PORT)
|
||||
jellypy.HTTP_PORT = int(jellypy.CONFIG.HTTP_PORT)
|
||||
|
||||
# Check if pyOpenSSL is installed. It is required for certificate generation
|
||||
# and for CherryPy.
|
||||
if plexpy.CONFIG.ENABLE_HTTPS:
|
||||
if jellypy.CONFIG.ENABLE_HTTPS:
|
||||
try:
|
||||
import OpenSSL
|
||||
except ImportError:
|
||||
logger.warn("The pyOpenSSL module is missing. Install this "
|
||||
"module to enable HTTPS. HTTPS will be disabled.")
|
||||
plexpy.CONFIG.ENABLE_HTTPS = False
|
||||
jellypy.CONFIG.ENABLE_HTTPS = False
|
||||
|
||||
# Try to start the server. Will exit here is address is already in use.
|
||||
webstart.start()
|
||||
|
||||
if common.PLATFORM == 'Windows':
|
||||
if plexpy.CONFIG.SYS_TRAY_ICON:
|
||||
plexpy.WIN_SYS_TRAY_ICON = windows.WindowsSystemTray()
|
||||
plexpy.WIN_SYS_TRAY_ICON.start()
|
||||
if jellypy.CONFIG.SYS_TRAY_ICON:
|
||||
jellypy.WIN_SYS_TRAY_ICON = windows.WindowsSystemTray()
|
||||
jellypy.WIN_SYS_TRAY_ICON.start()
|
||||
windows.set_startup()
|
||||
elif common.PLATFORM == 'Darwin':
|
||||
macos.set_startup()
|
||||
|
||||
# Open webbrowser
|
||||
if plexpy.CONFIG.LAUNCH_BROWSER and not args.nolaunch and not plexpy.DEV:
|
||||
plexpy.launch_browser(plexpy.CONFIG.HTTP_HOST, plexpy.HTTP_PORT,
|
||||
plexpy.HTTP_ROOT)
|
||||
if jellypy.CONFIG.LAUNCH_BROWSER and not args.nolaunch and not jellypy.DEV:
|
||||
jellypy.launch_browser(jellypy.CONFIG.HTTP_HOST, jellypy.HTTP_PORT,
|
||||
jellypy.HTTP_ROOT)
|
||||
|
||||
if common.PLATFORM == 'Darwin' and plexpy.CONFIG.SYS_TRAY_ICON:
|
||||
if common.PLATFORM == 'Darwin' and jellypy.CONFIG.SYS_TRAY_ICON:
|
||||
if not macos.HAS_PYOBJC:
|
||||
logger.warn("The pyobjc module is missing. Install this "
|
||||
"module to enable the MacOS menu bar icon.")
|
||||
plexpy.CONFIG.SYS_TRAY_ICON = False
|
||||
jellypy.CONFIG.SYS_TRAY_ICON = False
|
||||
|
||||
if plexpy.CONFIG.SYS_TRAY_ICON:
|
||||
if jellypy.CONFIG.SYS_TRAY_ICON:
|
||||
# MacOS menu bar icon must be run on the main thread and is blocking
|
||||
# Start the rest of Tautulli on a new thread
|
||||
thread = threading.Thread(target=wait)
|
||||
thread.daemon = True
|
||||
thread.start()
|
||||
|
||||
plexpy.MAC_SYS_TRAY_ICON = macos.MacOSSystemTray()
|
||||
plexpy.MAC_SYS_TRAY_ICON.start()
|
||||
jellypy.MAC_SYS_TRAY_ICON = macos.MacOSSystemTray()
|
||||
jellypy.MAC_SYS_TRAY_ICON.start()
|
||||
else:
|
||||
wait()
|
||||
else:
|
||||
@@ -283,29 +290,29 @@ def wait():
|
||||
|
||||
# Wait endlessly for a signal to happen
|
||||
while True:
|
||||
if not plexpy.SIGNAL:
|
||||
if not jellypy.SIGNAL:
|
||||
try:
|
||||
time.sleep(1)
|
||||
except KeyboardInterrupt:
|
||||
plexpy.SIGNAL = 'shutdown'
|
||||
jellypy.SIGNAL = 'shutdown'
|
||||
else:
|
||||
logger.info('Received signal: %s', plexpy.SIGNAL)
|
||||
logger.info('Received signal: %s', jellypy.SIGNAL)
|
||||
|
||||
if plexpy.SIGNAL == 'shutdown':
|
||||
plexpy.shutdown()
|
||||
elif plexpy.SIGNAL == 'restart':
|
||||
plexpy.shutdown(restart=True)
|
||||
elif plexpy.SIGNAL == 'checkout':
|
||||
plexpy.shutdown(restart=True, checkout=True)
|
||||
elif plexpy.SIGNAL == 'reset':
|
||||
plexpy.shutdown(restart=True, reset=True)
|
||||
elif plexpy.SIGNAL == 'update':
|
||||
plexpy.shutdown(restart=True, update=True)
|
||||
if jellypy.SIGNAL == 'shutdown':
|
||||
jellypy.shutdown()
|
||||
elif jellypy.SIGNAL == 'restart':
|
||||
jellypy.shutdown(restart=True)
|
||||
elif jellypy.SIGNAL == 'checkout':
|
||||
jellypy.shutdown(restart=True, checkout=True)
|
||||
elif jellypy.SIGNAL == 'reset':
|
||||
jellypy.shutdown(restart=True, reset=True)
|
||||
elif jellypy.SIGNAL == 'update':
|
||||
jellypy.shutdown(restart=True, update=True)
|
||||
else:
|
||||
logger.error('Unknown signal. Shutting down...')
|
||||
plexpy.shutdown()
|
||||
jellypy.shutdown()
|
||||
|
||||
plexpy.SIGNAL = None
|
||||
jellypy.SIGNAL = None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
24
PlexPy.py
24
PlexPy.py
@@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of Tautulli.
|
||||
#
|
||||
# Tautulli is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Tautulli is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from Tautulli import main
|
||||
|
||||
# Call main() from Tautulli.py
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
72
README.md
72
README.md
@@ -1,65 +1,33 @@
|
||||
# Tautulli
|
||||
# JellyPy 
|
||||
|
||||
A python based web application for monitoring, analytics and notifications for [Plex Media Server](https://plex.tv).
|
||||
A python based web application for monitoring, analytics and notifications for [Jellyfin](https://jellyfin.org/).
|
||||
|
||||
This project is based on code from [Headphones](https://github.com/rembo10/headphones) and [PlexWatchWeb](https://github.com/ecleese/plexWatchWeb).
|
||||
This project is based on [Tautulli](https://github.com/Tautulli/Tautulli) (v2.6.5 at the time).
|
||||
|
||||
## Features
|
||||
JellyPy only supports Jellyfin. If you are running run Plex, head over to Tautulli.
|
||||
|
||||
* Responsive web design viewable on desktop, tablet and mobile web browsers.
|
||||
* Themed to complement Plex/Web.
|
||||
* Easy configuration setup (no separate web server required).
|
||||
* Monitor current Plex Media Server activity.
|
||||
* Fully customizable notifications for stream activity and recently added media.
|
||||
* Top statistics on home page with configurable duration and measurement metric.
|
||||
* Global watching history with search/filtering & dynamic column sorting.
|
||||
* Full user list with general information and comparison stats.
|
||||
* Individual user information including devices IP addresses.
|
||||
* Complete library statistics and media file information.
|
||||
* Rich analytics presented using Highcharts graphing.
|
||||
* Beautiful content information pages.
|
||||
* Full sync list data on all users syncing items from your library.
|
||||
* And many more!!
|
||||
## Status
|
||||
|
||||
## Preview
|
||||
Working on getting basic functionality up. It's going to take some time, based on that Jellyfin's API is
|
||||
not well documented (read as: not documented at all).
|
||||
|
||||
* [Full preview gallery available on our website](https://tautulli.com)
|
||||
- [x] Login to Jellyfin
|
||||
- [ ] Libraries/Media
|
||||
- [ ] Activity
|
||||
- [ ] History
|
||||
- [ ] User
|
||||
|
||||

|
||||
## Major Differences compared to Tautulli
|
||||
|
||||
* Dropped Plex/PMS Support
|
||||
* Dropped Google Analytics
|
||||
* Dropped Python2 support
|
||||
* Dropped import from varius abondonded projects
|
||||
|
||||
## Installation & Support
|
||||
|
||||
[](https://python.org/downloads)
|
||||
[](https://hub.docker.com/r/tautulli/tautulli)
|
||||
[](https://hub.docker.com/r/tautulli/tautulli)
|
||||
[](https://github.com/Tautulli/Tautulli/releases/latest)
|
||||
|
||||
| Status | Branch: `master` | Branch: `beta` | Branch: `nightly` |
|
||||
| --- | --- | --- | --- |
|
||||
| Release | [](https://github.com/Tautulli/Tautulli/releases/latest) <br> [](https://github.com/Tautulli/Tautulli/releases/latest) | [](https://github.com/Tautulli/Tautulli/releases) <br> [](https://github.com/Tautulli/Tautulli/commits/beta) | [](https://github.com/Tautulli/Tautulli/commits/nightly) <br> [](https://github.com/Tautulli/Tautulli/commits/nightly) |
|
||||
| Docker | [](https://hub.docker.com/r/tautulli/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Amaster) | [](https://hub.docker.com/r/tautulli/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Abeta) | [](https://hub.docker.com/r/tautulli/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Anightly) |
|
||||
| Installer | [](https://github.com/Tautulli/Tautulli/releases/latest) <br> [](https://github.com/Tautulli/Tautulli/releases/latest) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Release"+branch%3Amaster) | [](https://github.com/Tautulli/Tautulli/releases) <br> [](https://github.com/Tautulli/Tautulli/releases) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Release"+branch%3Abeta) | [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Release"+branch%3Anightly) |
|
||||
|
||||
[](https://github.com/Tautulli/Tautulli-Wiki/wiki)
|
||||
[](https://tautulli.com/discord)
|
||||
[](https://www.reddit.com/r/Tautulli/)
|
||||
[](https://forums.plex.tv/t/tautulli-monitor-your-plex-media-server/225242)
|
||||
|
||||
* Read the [Installation Guides](https://github.com/Tautulli/Tautulli-Wiki/wiki/Installation) for instructions to install Tautulli.
|
||||
* The [Frequently Asked Questions](https://github.com/Tautulli/Tautulli-Wiki/wiki/Frequently-Asked-Questions) in the wiki can help you with common problems.
|
||||
* Support is available on [Discord](https://tautulli.com/discord), [Reddit](https://www.reddit.com/r/Tautulli), or the [Plex Forums](https://forums.plex.tv/t/tautulli-monitor-your-plex-media-server/225242).
|
||||
|
||||
## Issues & Feature Requests
|
||||
|
||||
[](https://github.com/Tautulli/Tautulli-Issues)
|
||||
[](https://feathub.com/Tautulli/Tautulli)
|
||||
|
||||
* Please see the [Issues Repository](https://github.com/Tautulli/Tautulli-Issues).
|
||||
TODO
|
||||
|
||||
## License
|
||||
|
||||
[](https://github.com/Tautulli/Tautulli/blob/master/LICENSE)
|
||||
|
||||
This is free software under the GPL v3 open source license. Feel free to do with it what you wish, but any modification must be open sourced. A copy of the license is included.
|
||||
|
||||
This software includes Highsoft software libraries which you may freely distribute for non-commercial use. Commerical users must licence this software, for more information visit https://shop.highsoft.com/faq/non-commercial#non-commercial-redistribution.
|
||||
[](https://git.harting.dev/anonfunc/JellyPy/src/branch/master/LICENSE)
|
||||
@@ -1,8 +1,8 @@
|
||||
<%
|
||||
import plexpy
|
||||
from plexpy import version
|
||||
from plexpy.helpers import anon_url
|
||||
from plexpy.notifiers import BROWSER_NOTIFIERS
|
||||
import jellypy
|
||||
from jellypy import version
|
||||
from jellypy.helpers import anon_url
|
||||
from jellypy.notifiers import BROWSER_NOTIFIERS
|
||||
%>
|
||||
<!doctype html>
|
||||
|
||||
@@ -59,7 +59,9 @@
|
||||
% endif
|
||||
% if plexpy.INSTALL_TYPE == 'docker':
|
||||
Update your Docker container or <a href="#" id="updateDismiss">Dismiss</a>
|
||||
% elif plexpy.INSTALL_TYPE in ('windows', 'macos'):
|
||||
% elif plexpy.INSTALL_TYPE == 'snap':
|
||||
Update your Snap package or <a href="#" id="updateDismiss">Dismiss</a>
|
||||
% elif plexpy.INSTALL_TYPE == 'macos':
|
||||
<a href="${anon_url('https://github.com/%s/%s/releases/tag/%s' % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO, plexpy.LATEST_RELEASE))}" target="_blank" rel="noreferrer">Download</a> and install the latest version or <a href="#" id="updateDismiss">Dismiss</a>
|
||||
% else:
|
||||
<a href="update">Update</a> or <a href="#" id="updateDismiss">Dismiss</a>
|
||||
@@ -204,7 +206,7 @@ ${next.modalIncludes()}
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<span id="incorrect-login" style="padding-right: 25px; display: none;">Incorrect username or password.</span>
|
||||
<span id="sign-in-alert" style="padding-right: 25px; display: none;"></span>
|
||||
<button id="sign-in" type="submit" class="btn btn-bright login-button"><i class="fa fa-sign-in"></i> Sign In</button>
|
||||
</div>
|
||||
<input type="hidden" id="admin_login" name="admin_login" value="1" />
|
||||
@@ -235,6 +237,7 @@ ${next.modalIncludes()}
|
||||
<li class="active"><a href="#github-donation" role="tab" data-toggle="tab">GitHub</a></li>
|
||||
<li><a href="#patreon-donation" role="tab" data-toggle="tab">Patreon</a></li>
|
||||
<li><a href="#paypal-donation" role="tab" data-toggle="tab">PayPal</a></li>
|
||||
<li><a href="#crypto-donation" role="tab" data-toggle="tab">Crypto</a></li>
|
||||
</ul>
|
||||
<div class="tab-content">
|
||||
<div role="tabpanel" class="tab-pane active" id="github-donation" style="text-align: center">
|
||||
@@ -261,6 +264,14 @@ ${next.modalIncludes()}
|
||||
<img src="images/gold-rect-paypal-34px.png" alt="PayPal">
|
||||
</a>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="crypto-donation" style="text-align: center">
|
||||
<p>
|
||||
Click the button below to continue to Coinbase.
|
||||
</p>
|
||||
<a href="https://blankrefer.com/?https://commerce.coinbase.com/checkout/8a9fa08c-8a38-409e-9220-868124c4ba0c" target="_blank" rel="noreferrer" class="donate-with-crypto">
|
||||
<span>Donate with Crypto</span>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
@@ -337,7 +348,9 @@ ${next.modalIncludes()}
|
||||
}
|
||||
if (result.install_type === 'docker') {
|
||||
msg += 'Update your Docker container or <a href="#" id="updateDismiss">Dismiss</a>';
|
||||
} else if (result.install_type === 'windows' || result.install_type === 'macos') {
|
||||
} else if (result.install_type === 'snap') {
|
||||
msg += 'Update your Snap package or <a href="#" id="updateDismiss">Dismiss</a>';
|
||||
} else if (result.install_type === 'macos') {
|
||||
msg += '<a href="' + result.release_url + '" target="_blank" rel="noreferrer">Download</a> and install the latest version or <a href="#" id="updateDismiss">Dismiss</a>'
|
||||
} else {
|
||||
msg += '<a href="update">Update</a> or <a href="#" id="updateDismiss">Dismiss</a>';
|
||||
@@ -446,12 +459,16 @@ ${next.modalIncludes()}
|
||||
data: $(this).serialize(),
|
||||
dataType: 'json',
|
||||
statusCode: {
|
||||
200: function() {
|
||||
200: function(xhr, status) {
|
||||
window.location = "${http_root}";
|
||||
},
|
||||
401: function() {
|
||||
$('#incorrect-login').show();
|
||||
$('#username').focus();
|
||||
401: function(xhr, status) {
|
||||
$('#sign-in-alert').text('Incorrect username or password.').show();
|
||||
$('#username').focus();
|
||||
},
|
||||
429: function(xhr, status) {
|
||||
var retry = Math.ceil(xhr.getResponseHeader('Retry-After') / 60)
|
||||
$('#sign-in-alert').text('Too many login attempts. Try again in ' + retry + ' minute(s).').show();
|
||||
}
|
||||
},
|
||||
complete: function() {
|
||||
|
||||
@@ -13,8 +13,8 @@ DOCUMENTATION :: END
|
||||
import os
|
||||
import sys
|
||||
import plexpy
|
||||
from plexpy import common, logger
|
||||
from plexpy.helpers import anon_url
|
||||
from jellypy import common, logger
|
||||
from jellypy.helpers import anon_url
|
||||
%>
|
||||
|
||||
<table class="config-info-table small-muted">
|
||||
|
||||
@@ -89,14 +89,14 @@ select.form-control {
|
||||
margin-bottom: 4px;
|
||||
padding-left: 5px;
|
||||
}
|
||||
.selectize-control.form-control.selectize-pms-ip .selectize-input {
|
||||
.selectize-control.form-control.selectize-jellyfin-ip .selectize-input {
|
||||
padding-left: 12px !important;
|
||||
border-top-left-radius: 3px;
|
||||
border-bottom-left-radius: 3px;
|
||||
min-height: 32px !important;
|
||||
height: 32px !important;
|
||||
}
|
||||
.input-group .selectize-control.form-control.selectize-pms-ip .selectize-input > div {
|
||||
.input-group .selectize-control.form-control.selectize-jellyfin-ip .selectize-input > div {
|
||||
max-width: 450px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
@@ -104,18 +104,18 @@ select.form-control {
|
||||
.wizard-input-section p.welcome-message {
|
||||
margin: 20px 0;
|
||||
}
|
||||
.wizard-input-section .selectize-control.form-control.selectize-pms-ip .selectize-input > div {
|
||||
.wizard-input-section .selectize-control.form-control.selectize-jellyfin-ip .selectize-input > div {
|
||||
max-width: 360px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
#selectize-pms-ip-container .selectize-dropdown.form-control.selectize-pms-ip {
|
||||
#selectize-jellyfin-ip-container .selectize-dropdown.form-control.selectize-jellyfin-ip {
|
||||
margin-left: 15px;
|
||||
}
|
||||
.wizard-input-section .selectize-control.form-control.selectize-pms-ip .selectize-dropdown .selectize-dropdown-content {
|
||||
.wizard-input-section .selectize-control.form-control.selectize-jellyfin-ip .selectize-dropdown .selectize-dropdown-content {
|
||||
max-height: 150px;
|
||||
}
|
||||
.wizard-input-section .selectize-dropdown.form-control.selectize-pms-ip {
|
||||
.wizard-input-section .selectize-dropdown.form-control.selectize-jellyfin-ip {
|
||||
margin-top: 0 !important;
|
||||
}
|
||||
#condition-widget .fa-plus,
|
||||
@@ -4368,3 +4368,66 @@ a[data-tab-destination] {
|
||||
.news-body a:hover {
|
||||
color: #f9be03;
|
||||
}
|
||||
|
||||
a.donate-with-crypto,
|
||||
a.donate-with-crypto > span {
|
||||
background: none;
|
||||
border: none;
|
||||
border-radius: 0;
|
||||
box-sizing: border-box;
|
||||
clear: none;
|
||||
clip: auto;
|
||||
cursor: default;
|
||||
display: block;
|
||||
float: none;
|
||||
height: auto;
|
||||
margin: 0;
|
||||
max-height: none;
|
||||
min-height: none;
|
||||
padding: 0;
|
||||
opacity: 1;
|
||||
text-shadow: none;
|
||||
vertical-align: baseline;
|
||||
visibility: visible;
|
||||
width: auto;
|
||||
}
|
||||
a.donate-with-crypto {
|
||||
user-select: none;
|
||||
user-drag: none;
|
||||
-webkit-user-drag: none;
|
||||
text-decoration: none;
|
||||
background: #1652f0 linear-gradient(#1652f0, #0655ab);
|
||||
cursor: pointer;
|
||||
transition: background 0.2s ease-in-out, padding 0.2s;
|
||||
border-radius: 6px;
|
||||
display: inline-block;
|
||||
height: 40px;
|
||||
padding: 9px 15px 11px 15px;
|
||||
position: relative;
|
||||
min-width: 160px;
|
||||
}
|
||||
a.donate-with-crypto:hover {
|
||||
background: #1652f0;
|
||||
}
|
||||
a.donate-with-crypto > span {
|
||||
color: white;
|
||||
font: normal 500 14px/20px -apple-system, BlinkMacSystemFont, '.SFNSText-Regular', 'San Francisco', 'Roboto', 'Segoe UI', 'Helvetica Neue', 'Lucida Grande', sans-serif;
|
||||
letter-spacing: 0;
|
||||
overflow: hidden;
|
||||
text-align: center;
|
||||
text-overflow: ellipsis;
|
||||
text-shadow: 0 1px 0 rgba(0, 0, 0, 0.15);
|
||||
white-space: nowrap;
|
||||
}
|
||||
a.donate-with-crypto::after {
|
||||
border-radius: 6px;
|
||||
box-shadow: 0 1px 2px 0 rgba(0, 0, 0, 0.2);
|
||||
content: '';
|
||||
display: block;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
position: absolute;
|
||||
opacity: 1;
|
||||
top: 0;
|
||||
left: 0;
|
||||
}
|
||||
@@ -27,7 +27,7 @@ DOCUMENTATION :: END
|
||||
</%doc>
|
||||
|
||||
<%!
|
||||
from plexpy import helpers
|
||||
from jellypy import helpers
|
||||
%>
|
||||
|
||||
% if data != None:
|
||||
|
||||
@@ -27,7 +27,7 @@ DOCUMENTATION :: END
|
||||
</%doc>
|
||||
|
||||
<%!
|
||||
from plexpy import helpers
|
||||
from jellypy import helpers
|
||||
%>
|
||||
|
||||
% if data != None:
|
||||
|
||||
@@ -14,9 +14,9 @@ data :: Usable parameters
|
||||
DOCUMENTATION :: END
|
||||
</%doc>
|
||||
<%
|
||||
import plexpy
|
||||
from plexpy import exporter
|
||||
from plexpy.helpers import anon_url
|
||||
import jellypy
|
||||
from jellypy import exporter
|
||||
from jellypy.helpers import anon_url
|
||||
export = exporter.Export()
|
||||
thumb_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[0]])
|
||||
art_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[1]])
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 123 KiB |
BIN
data/interfaces/default/images/logo-flat-white.ico
Normal file
BIN
data/interfaces/default/images/logo-flat-white.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 200 KiB |
BIN
data/interfaces/default/images/logo-flat-white.png
Normal file
BIN
data/interfaces/default/images/logo-flat-white.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
@@ -5,7 +5,7 @@
|
||||
</%def>
|
||||
|
||||
<%def name="body()">
|
||||
<% from plexpy import PLEX_SERVER_UP %>
|
||||
<% from jellypy import PLEX_SERVER_UP %>
|
||||
<div class="container-fluid">
|
||||
% for section in config['home_sections']:
|
||||
% if section == 'current_activity':
|
||||
@@ -212,6 +212,28 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<% from plexpy.helpers import anon_url %>
|
||||
<div id="python2-modal" class="modal fade wide" tabindex="-1" role="dialog" aria-labelledby="python2-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title">Unable to Update</h4>
|
||||
</div>
|
||||
<div class="modal-body" style="text-align: center;">
|
||||
<p>Tautulli is still running using Python 2 and cannot be updated past v2.6.3.</p>
|
||||
<p>Python 3 is required to continue receiving updates.</p>
|
||||
<p>
|
||||
<strong>Please see the <a href="${anon_url('https://github.com/Tautulli/Tautulli-Wiki/wiki/Upgrading-to-Python-3-%28Tautulli-v2.5%29')}" target="_blank" rel="noreferrer">wiki</a>
|
||||
for instructions on how to upgrade to Python 3.</strong>
|
||||
</p>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<input type="button" class="btn btn-bright" data-dismiss="modal" value="Close">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% endif
|
||||
|
||||
<div class="modal fade" id="ip-info-modal" tabindex="-1" role="dialog" aria-labelledby="ip-info-modal">
|
||||
@@ -258,22 +280,32 @@
|
||||
|
||||
var error_msg = 'There was an error communicating with your Plex Server.' + msg_settings;
|
||||
|
||||
% if 'current_activity' in config['home_sections'] or 'recently_added' in config['home_sections']:
|
||||
var server_status;
|
||||
server_status = setInterval(function() {
|
||||
$.getJSON('server_status', function (data) {
|
||||
if (data.connected === true) {
|
||||
clearInterval(server_status);
|
||||
% if 'current_activity' in config['home_sections']:
|
||||
$('#currentActivity').html('<div id="dashboard-checking-activity" class="text-muted"><i class="fa fa-refresh fa-spin"></i> Checking for activity...</div>');
|
||||
$('#recentlyAdded').html('<div id="dashboard-checking-recently-added" class="text-muted"><i class="fa fa-refresh fa-spin"></i> Looking for new items...</div>');
|
||||
activityConnected();
|
||||
% endif
|
||||
% if 'recently_added' in config['home_sections']:
|
||||
$('#recentlyAdded').html('<div id="dashboard-checking-recently-added" class="text-muted"><i class="fa fa-refresh fa-spin"></i> Looking for new items...</div>');
|
||||
recentlyAddedConnected();
|
||||
% endif
|
||||
} else if (data.connected === false) {
|
||||
clearInterval(server_status);
|
||||
% if 'current_activity' in config['home_sections']:
|
||||
$('#currentActivity').html('<div id="dashboard-no-activity" class="text-muted">' + error_msg + '</div>');
|
||||
% endif
|
||||
% if 'recently_added' in config['home_sections']:
|
||||
$('#recentlyAdded').html('<div id="dashboard-no-recently-added" class="text-muted">' + error_msg + '</div>');
|
||||
% endif
|
||||
}
|
||||
});
|
||||
}, 1000);
|
||||
% endif
|
||||
</script>
|
||||
% if 'current_activity' in config['home_sections']:
|
||||
<script>
|
||||
@@ -1010,4 +1042,16 @@
|
||||
});
|
||||
</script>
|
||||
% endif
|
||||
% if _session['user_group'] == 'admin':
|
||||
<script>
|
||||
const queryString = window.location.search;
|
||||
const urlParams = new URLSearchParams(queryString);
|
||||
if (urlParams.get('update') === 'python2') {
|
||||
$("#python2-modal").modal({
|
||||
backdrop: 'static',
|
||||
keyboard: false
|
||||
});
|
||||
}
|
||||
</script>
|
||||
% endif
|
||||
</%def>
|
||||
@@ -39,9 +39,9 @@ DOCUMENTATION :: END
|
||||
from collections import defaultdict
|
||||
import re
|
||||
|
||||
from plexpy import notifiers
|
||||
from plexpy.common import MEDIA_TYPE_HEADERS, MEDIA_FLAGS_AUDIO, MEDIA_FLAGS_VIDEO
|
||||
from plexpy.helpers import page, get_percent, cast_to_int
|
||||
from jellypy import notifiers
|
||||
from jellypy.common import MEDIA_TYPE_HEADERS, MEDIA_FLAGS_AUDIO, MEDIA_FLAGS_VIDEO
|
||||
from jellypy.helpers import page, get_percent, cast_to_int
|
||||
|
||||
# Get audio codec file
|
||||
def af(codec):
|
||||
|
||||
@@ -28,7 +28,7 @@ DOCUMENTATION :: END
|
||||
|
||||
% if data != None:
|
||||
<%
|
||||
from plexpy.helpers import cast_to_int, page
|
||||
from jellypy.helpers import cast_to_int, page
|
||||
%>
|
||||
% if data['children_count'] > 0:
|
||||
<div class="item-children-wrapper">
|
||||
|
||||
@@ -28,8 +28,8 @@ DOCUMENTATION :: END
|
||||
|
||||
% if data != None:
|
||||
<%
|
||||
from plexpy.common import MEDIA_TYPE_HEADERS
|
||||
from plexpy.helpers import page
|
||||
from jellypy.common import MEDIA_TYPE_HEADERS
|
||||
from jellypy.helpers import page
|
||||
types = ('movie', 'show', 'artist', 'album')
|
||||
%>
|
||||
% for media_type in types:
|
||||
|
||||
@@ -54,7 +54,7 @@ DOCUMENTATION :: END
|
||||
|
||||
% if data != None:
|
||||
<%
|
||||
from plexpy.helpers import page
|
||||
from jellypy.helpers import page
|
||||
%>
|
||||
% if data['results_count'] > 0:
|
||||
% if 'collection' in data['results_list'] and data['results_list']['collection']:
|
||||
|
||||
@@ -145,7 +145,7 @@ function doAjaxCall(url, elem, reload, form, showMsg, callback) {
|
||||
dataString = $(formID).serialize();
|
||||
}
|
||||
// Loader Image
|
||||
var loader = $("<div class='msg ajaxLoader-" + url +"'><i class='fa fa-refresh fa-spin'></i> Saving...</div>");
|
||||
var loader = $("<div class='msg ajaxLoader-" + url + "'><i class='fa fa-refresh fa-spin'></i> Saving...</div>");
|
||||
// Data Success Message
|
||||
var dataSucces = $(elem).data('success');
|
||||
if (typeof dataSucces === "undefined") {
|
||||
@@ -248,10 +248,10 @@ getBrowsePath = function (key, path, filter_ext) {
|
||||
path: path,
|
||||
filter_ext: filter_ext
|
||||
},
|
||||
success: function(data) {
|
||||
success: function (data) {
|
||||
deferred.resolve(data);
|
||||
},
|
||||
error: function() {
|
||||
error: function () {
|
||||
deferred.reject();
|
||||
}
|
||||
});
|
||||
@@ -350,13 +350,13 @@ function getPercent(value1, value2) {
|
||||
|
||||
function millisecondsToMinutes(ms, roundToMinute) {
|
||||
if (ms > 0) {
|
||||
var minutes = Math.floor(ms / 60000);
|
||||
var seconds = ((ms % 60000) / 1000).toFixed(0);
|
||||
if (roundToMinute) {
|
||||
return (seconds >= 30 ? (minutes + 1) : minutes);
|
||||
} else {
|
||||
return (seconds == 60 ? (minutes + 1) + ":00" : minutes + ":" + (seconds < 10 ? "0" : "") + seconds);
|
||||
}
|
||||
var minutes = Math.floor(ms / 60000);
|
||||
var seconds = ((ms % 60000) / 1000).toFixed(0);
|
||||
if (roundToMinute) {
|
||||
return (seconds >= 30 ? (minutes + 1) : minutes);
|
||||
} else {
|
||||
return (seconds == 60 ? (minutes + 1) + ":00" : minutes + ":" + (seconds < 10 ? "0" : "") + seconds);
|
||||
}
|
||||
} else {
|
||||
if (roundToMinute) {
|
||||
return '0';
|
||||
@@ -366,7 +366,7 @@ function millisecondsToMinutes(ms, roundToMinute) {
|
||||
}
|
||||
}
|
||||
|
||||
function humanDuration(ms, sig='dhm', units='ms', return_seconds=300000) {
|
||||
function humanDuration(ms, sig = 'dhm', units = 'ms', return_seconds = 300000) {
|
||||
var factors = {
|
||||
d: 86400000,
|
||||
h: 3600000,
|
||||
@@ -385,7 +385,7 @@ function humanDuration(ms, sig='dhm', units='ms', return_seconds=300000) {
|
||||
|
||||
ms = ms * factors[units];
|
||||
|
||||
h = ms % factors['d'];
|
||||
h = ms % factors['d'];
|
||||
d = Math.trunc(ms / factors['d']);
|
||||
|
||||
m = h % factors['h'];
|
||||
@@ -460,6 +460,7 @@ function getCookie(cname) {
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
var Accordion = function (el, multiple, close) {
|
||||
this.el = el || {};
|
||||
this.multiple = multiple || false;
|
||||
@@ -496,6 +497,7 @@ function clearSearchButton(tableName, table) {
|
||||
table.search('').draw();
|
||||
});
|
||||
}
|
||||
|
||||
// Taken from https://github.com/Hellowlol/HTPC-Manager
|
||||
window.onerror = function (message, file, line) {
|
||||
var e = {
|
||||
@@ -504,7 +506,8 @@ window.onerror = function (message, file, line) {
|
||||
'file': file,
|
||||
'line': line
|
||||
};
|
||||
$.post("log_js_errors", e, function (data) { });
|
||||
$.post("log_js_errors", e, function (data) {
|
||||
});
|
||||
};
|
||||
|
||||
$('*').on('click', '.refresh_pms_image', function (e) {
|
||||
@@ -554,14 +557,11 @@ function forceMinMax(elem) {
|
||||
var default_val = parseInt(elem.data('default'));
|
||||
if (isNaN(val)) {
|
||||
elem.val(default_val);
|
||||
}
|
||||
else if (min !== undefined && val < min) {
|
||||
} else if (min !== undefined && val < min) {
|
||||
elem.val(min);
|
||||
}
|
||||
else if (max !== undefined && val > max) {
|
||||
} else if (max !== undefined && val > max) {
|
||||
elem.val(max);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
elem.val(val);
|
||||
}
|
||||
}
|
||||
@@ -570,14 +570,14 @@ function capitalizeFirstLetter(string) {
|
||||
return string.charAt(0).toUpperCase() + string.slice(1);
|
||||
}
|
||||
|
||||
$.fn.slideToggleBool = function(bool, options) {
|
||||
return bool ? $(this).slideDown(options) : $(this).slideUp(options);
|
||||
$.fn.slideToggleBool = function (bool, options) {
|
||||
return bool ? $(this).slideDown(options) : $(this).slideUp(options);
|
||||
};
|
||||
|
||||
function openPlexXML(endpoint, plextv, params) {
|
||||
var data = $.extend({endpoint: endpoint, plextv: plextv}, params);
|
||||
$.getJSON('return_plex_xml_url', data, function(xml_url) {
|
||||
window.open(xml_url, '_blank');
|
||||
$.getJSON('return_plex_xml_url', data, function (xml_url) {
|
||||
window.open(xml_url, '_blank');
|
||||
});
|
||||
}
|
||||
|
||||
@@ -609,6 +609,7 @@ function setLocalStorage(key, value, path) {
|
||||
}
|
||||
localStorage.setItem(key_path, value);
|
||||
}
|
||||
|
||||
function getLocalStorage(key, default_value, path) {
|
||||
var key_path = key;
|
||||
if (path !== false) {
|
||||
@@ -624,7 +625,7 @@ function getLocalStorage(key, default_value, path) {
|
||||
}
|
||||
|
||||
function uuidv4() {
|
||||
return ([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g, function(c) {
|
||||
return ([1e7] + -1e3 + -4e3 + -8e3 + -1e11).replace(/[018]/g, function (c) {
|
||||
var cryptoObj = window.crypto || window.msCrypto; // for IE 11
|
||||
return (c ^ cryptoObj.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)
|
||||
});
|
||||
@@ -648,44 +649,44 @@ function getPlexHeaders() {
|
||||
|
||||
var plex_oauth_window = null;
|
||||
const plex_oauth_loader = '<style>' +
|
||||
'.login-loader-container {' +
|
||||
'font-family: "Open Sans", Arial, sans-serif;' +
|
||||
'position: absolute;' +
|
||||
'top: 0;' +
|
||||
'right: 0;' +
|
||||
'bottom: 0;' +
|
||||
'left: 0;' +
|
||||
'}' +
|
||||
'.login-loader-message {' +
|
||||
'color: #282A2D;' +
|
||||
'text-align: center;' +
|
||||
'position: absolute;' +
|
||||
'left: 50%;' +
|
||||
'top: 25%;' +
|
||||
'transform: translate(-50%, -50%);' +
|
||||
'}' +
|
||||
'.login-loader {' +
|
||||
'border: 5px solid #ccc;' +
|
||||
'-webkit-animation: spin 1s linear infinite;' +
|
||||
'animation: spin 1s linear infinite;' +
|
||||
'border-top: 5px solid #282A2D;' +
|
||||
'border-radius: 50%;' +
|
||||
'width: 50px;' +
|
||||
'height: 50px;' +
|
||||
'position: relative;' +
|
||||
'left: calc(50% - 25px);' +
|
||||
'}' +
|
||||
'@keyframes spin {' +
|
||||
'0% { transform: rotate(0deg); }' +
|
||||
'100% { transform: rotate(360deg); }' +
|
||||
'}' +
|
||||
'.login-loader-container {' +
|
||||
'font-family: "Open Sans", Arial, sans-serif;' +
|
||||
'position: absolute;' +
|
||||
'top: 0;' +
|
||||
'right: 0;' +
|
||||
'bottom: 0;' +
|
||||
'left: 0;' +
|
||||
'}' +
|
||||
'.login-loader-message {' +
|
||||
'color: #282A2D;' +
|
||||
'text-align: center;' +
|
||||
'position: absolute;' +
|
||||
'left: 50%;' +
|
||||
'top: 25%;' +
|
||||
'transform: translate(-50%, -50%);' +
|
||||
'}' +
|
||||
'.login-loader {' +
|
||||
'border: 5px solid #ccc;' +
|
||||
'-webkit-animation: spin 1s linear infinite;' +
|
||||
'animation: spin 1s linear infinite;' +
|
||||
'border-top: 5px solid #282A2D;' +
|
||||
'border-radius: 50%;' +
|
||||
'width: 50px;' +
|
||||
'height: 50px;' +
|
||||
'position: relative;' +
|
||||
'left: calc(50% - 25px);' +
|
||||
'}' +
|
||||
'@keyframes spin {' +
|
||||
'0% { transform: rotate(0deg); }' +
|
||||
'100% { transform: rotate(360deg); }' +
|
||||
'}' +
|
||||
'</style>' +
|
||||
'<div class="login-loader-container">' +
|
||||
'<div class="login-loader-message">' +
|
||||
'<div class="login-loader"></div>' +
|
||||
'<br>' +
|
||||
'Redirecting to the Plex login page...' +
|
||||
'</div>' +
|
||||
'<div class="login-loader-message">' +
|
||||
'<div class="login-loader"></div>' +
|
||||
'<br>' +
|
||||
'Redirecting to the Plex login page...' +
|
||||
'</div>' +
|
||||
'</div>';
|
||||
|
||||
function closePlexOAuthWindow() {
|
||||
@@ -702,10 +703,10 @@ getPlexOAuthPin = function () {
|
||||
url: 'https://plex.tv/api/v2/pins?strong=true',
|
||||
type: 'POST',
|
||||
headers: x_plex_headers,
|
||||
success: function(data) {
|
||||
success: function (data) {
|
||||
deferred.resolve({pin: data.id, code: data.code});
|
||||
},
|
||||
error: function() {
|
||||
error: function () {
|
||||
closePlexOAuthWindow();
|
||||
deferred.reject();
|
||||
}
|
||||
@@ -751,7 +752,7 @@ function PlexOAuth(success, error, pre) {
|
||||
type: 'GET',
|
||||
headers: x_plex_headers,
|
||||
success: function (data) {
|
||||
if (data.authToken){
|
||||
if (data.authToken) {
|
||||
closePlexOAuthWindow();
|
||||
if (typeof success === "function") {
|
||||
success(data.authToken)
|
||||
@@ -767,8 +768,10 @@ function PlexOAuth(success, error, pre) {
|
||||
}
|
||||
},
|
||||
complete: function () {
|
||||
if (!plex_oauth_window.closed && polling === pin){
|
||||
setTimeout(function() {poll()}, 1000);
|
||||
if (!plex_oauth_window.closed && polling === pin) {
|
||||
setTimeout(function () {
|
||||
poll()
|
||||
}, 1000);
|
||||
}
|
||||
},
|
||||
timeout: 10000
|
||||
@@ -783,7 +786,7 @@ function PlexOAuth(success, error, pre) {
|
||||
}
|
||||
|
||||
function encodeData(data) {
|
||||
return Object.keys(data).map(function(key) {
|
||||
return Object.keys(data).map(function (key) {
|
||||
return [key, data[key]].map(encodeURIComponent).join("=");
|
||||
}).join("&");
|
||||
}
|
||||
@@ -808,17 +811,39 @@ function page(endpoint, ...args) {
|
||||
function pms_image_proxy(img, rating_key, width, height, opacity, background, blur, fallback, refresh, clip, img_format) {
|
||||
var params = {};
|
||||
|
||||
if (img != null) { params.img = img; }
|
||||
if (rating_key != null) { params.rating_key = rating_key; }
|
||||
if (width != null) { params.width = width; }
|
||||
if (height != null) { params.height = height; }
|
||||
if (opacity != null) { params.opacity = opacity; }
|
||||
if (background != null) { params.background = background; }
|
||||
if (blur != null) { params.blur = blur; }
|
||||
if (fallback != null) { params.fallback = fallback; }
|
||||
if (refresh != null) { params.refresh = true; }
|
||||
if (clip != null) { params.clip = true; }
|
||||
if (img_format != null) { params.img_format = img_format; }
|
||||
if (img != null) {
|
||||
params.img = img;
|
||||
}
|
||||
if (rating_key != null) {
|
||||
params.rating_key = rating_key;
|
||||
}
|
||||
if (width != null) {
|
||||
params.width = width;
|
||||
}
|
||||
if (height != null) {
|
||||
params.height = height;
|
||||
}
|
||||
if (opacity != null) {
|
||||
params.opacity = opacity;
|
||||
}
|
||||
if (background != null) {
|
||||
params.background = background;
|
||||
}
|
||||
if (blur != null) {
|
||||
params.blur = blur;
|
||||
}
|
||||
if (fallback != null) {
|
||||
params.fallback = fallback;
|
||||
}
|
||||
if (refresh != null) {
|
||||
params.refresh = true;
|
||||
}
|
||||
if (clip != null) {
|
||||
params.clip = true;
|
||||
}
|
||||
if (img_format != null) {
|
||||
params.img_format = img_format;
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
@@ -832,7 +857,9 @@ function info_page(rating_key, guid, history, live) {
|
||||
params.rating_key = rating_key;
|
||||
}
|
||||
|
||||
if (history) { params.source = 'history'; }
|
||||
if (history) {
|
||||
params.source = 'history';
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
@@ -840,7 +867,9 @@ function info_page(rating_key, guid, history, live) {
|
||||
function library_page(section_id) {
|
||||
var params = {};
|
||||
|
||||
if (section_id != null) { params.section_id = section_id; }
|
||||
if (section_id != null) {
|
||||
params.section_id = section_id;
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
@@ -848,8 +877,12 @@ function library_page(section_id) {
|
||||
function user_page(user_id, user) {
|
||||
var params = {};
|
||||
|
||||
if (user_id != null) { params.user_id = user_id; }
|
||||
if (user != null) { params.user = user; }
|
||||
if (user_id != null) {
|
||||
params.user_id = user_id;
|
||||
}
|
||||
if (user != null) {
|
||||
params.user = user;
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
@@ -36,8 +36,8 @@ DOCUMENTATION :: END
|
||||
<%def name="body()">
|
||||
% if data:
|
||||
<%
|
||||
from plexpy.common import LIVE_TV_SECTION_ID
|
||||
from plexpy.helpers import page
|
||||
from jellypy.common import LIVE_TV_SECTION_ID
|
||||
from jellypy.helpers import page
|
||||
%>
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
|
||||
@@ -32,7 +32,7 @@ DOCUMENTATION :: END
|
||||
|
||||
% if data:
|
||||
<%
|
||||
from plexpy.helpers import page
|
||||
from jellypy.helpers import page
|
||||
%>
|
||||
<div class="dashboard-recent-media-row">
|
||||
<div id="recently-added-row-scroller" style="left: 0;">
|
||||
|
||||
@@ -25,7 +25,7 @@ DOCUMENTATION :: END
|
||||
|
||||
% if data:
|
||||
<%
|
||||
from plexpy.helpers import page
|
||||
from jellypy.helpers import page
|
||||
|
||||
types = ('movie', 'show', 'artist', 'photo')
|
||||
headers = {'movie': ('Movie Libraries', ('Movies', '', '')),
|
||||
|
||||
@@ -19,7 +19,7 @@ DOCUMENTATION :: END
|
||||
</%doc>
|
||||
|
||||
% if data:
|
||||
<% from plexpy.helpers import page %>
|
||||
<% from jellypy.helpers import page %>
|
||||
% for a in data:
|
||||
<ul class="list-unstyled">
|
||||
<div class="user-player-instance">
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<%
|
||||
import plexpy
|
||||
plex_login = plexpy.CONFIG.HTTP_PLEX_ADMIN or plexpy.CONFIG.ALLOW_GUEST_ACCESS
|
||||
import jellypy
|
||||
plex_login = jellypy.CONFIG.HTTP_PLEX_ADMIN or jellypy.CONFIG.ALLOW_GUEST_ACCESS
|
||||
%>
|
||||
<!doctype html>
|
||||
|
||||
@@ -159,16 +159,20 @@
|
||||
data: data,
|
||||
dataType: 'json',
|
||||
statusCode: {
|
||||
200: function() {
|
||||
200: function(xhr, status) {
|
||||
window.location = "${redirect_uri or http_root}";
|
||||
},
|
||||
401: function() {
|
||||
401: function(xhr, status) {
|
||||
if (plex) {
|
||||
$('#sign-in-alert').text('Invalid Plex Login.').show();
|
||||
} else {
|
||||
$('#sign-in-alert').text('Incorrect username or password.').show();
|
||||
$('#username').focus();
|
||||
}
|
||||
},
|
||||
429: function(xhr, status) {
|
||||
var retry = Math.ceil(xhr.getResponseHeader('Retry-After') / 60)
|
||||
$('#sign-in-alert').text('Too many login attempts. Try again in ' + retry + ' minute(s).').show();
|
||||
}
|
||||
},
|
||||
complete: function() {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<%inherit file="base.html"/>
|
||||
<%!
|
||||
from plexpy import helpers
|
||||
from jellypy import helpers
|
||||
%>
|
||||
|
||||
<%def name="headIncludes()">
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
% if newsletter:
|
||||
<%!
|
||||
import json
|
||||
from plexpy import notifiers
|
||||
from plexpy.helpers import anon_url, checked
|
||||
from jellypy import notifiers
|
||||
from jellypy.helpers import anon_url, checked
|
||||
|
||||
all_notifiers = sorted(notifiers.get_notifiers(), key=lambda k: (k['agent_label'].lower(), k['friendly_name'], k['id']))
|
||||
email_notifiers = [n for n in all_notifiers if n['agent_name'] == 'email']
|
||||
|
||||
@@ -9,7 +9,7 @@ Version: 0.1
|
||||
DOCUMENTATION :: END
|
||||
</%doc>
|
||||
|
||||
<% from plexpy import newsletter_handler %>
|
||||
<% from jellypy import newsletter_handler %>
|
||||
<ul class="stacked-configs list-unstyled">
|
||||
% for newsletter in sorted(newsletters_list, key=lambda k: (k['agent_label'], k['friendly_name'], k['id'])):
|
||||
<li class="newsletter-agent pointer" data-id="${newsletter['id']}">
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
% if notifier:
|
||||
<%
|
||||
import json
|
||||
from plexpy import notifiers, users
|
||||
from plexpy.helpers import checked
|
||||
from jellypy import notifiers, users
|
||||
from jellypy.helpers import checked
|
||||
available_notification_actions = notifiers.available_notification_actions(agent_id=notifier['agent_id'])
|
||||
|
||||
user_emails = [{'user': u['friendly_name'] or u['username'], 'email': u['email']} for u in users.Users().get_users() if u['email']]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<%
|
||||
import datetime
|
||||
import plexpy
|
||||
from plexpy import activity_handler, helpers
|
||||
import jellypy
|
||||
from jellypy import activity_handler, helpers
|
||||
|
||||
if queue == 'active sessions':
|
||||
filter_key = 'session_key-'
|
||||
|
||||
@@ -32,7 +32,7 @@ DOCUMENTATION :: END
|
||||
|
||||
% if data != None:
|
||||
<%
|
||||
from plexpy.helpers import cast_to_int, page
|
||||
from jellypy.helpers import cast_to_int, page
|
||||
%>
|
||||
% if data:
|
||||
<div class="dashboard-recent-media-row">
|
||||
|
||||
@@ -11,10 +11,10 @@ DOCUMENTATION :: END
|
||||
|
||||
<%!
|
||||
import datetime
|
||||
import plexpy
|
||||
from plexpy import common, helpers
|
||||
import jellypy
|
||||
from jellypy import common, helpers
|
||||
|
||||
scheduled_jobs = [j.id for j in plexpy.SCHED.get_jobs()]
|
||||
scheduled_jobs = [j.id for j in jellypy.SCHED.get_jobs()]
|
||||
%>
|
||||
|
||||
<table class="config-scheduler-table small-muted">
|
||||
|
||||
@@ -3,12 +3,12 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
import plexpy
|
||||
from plexpy import common, notifiers, newsletters
|
||||
from plexpy.helpers import anon_url, checked
|
||||
import jellypy
|
||||
from jellypy import common, notifiers, newsletters
|
||||
from jellypy.helpers import anon_url, checked
|
||||
|
||||
docker_setting = 'disabled' if plexpy.DOCKER else ''
|
||||
docker_msg = '<span class="setting-message small">(Controlled by Docker Container)</span>' if plexpy.DOCKER else ''
|
||||
docker_setting = 'disabled' if jellypy.DOCKER else ''
|
||||
docker_msg = '<span class="setting-message small">(Controlled by Docker Container)</span>' if jellypy.DOCKER else ''
|
||||
|
||||
available_notification_agents = sorted(notifiers.available_notification_agents(), key=lambda k: k['label'].lower())
|
||||
available_newsletter_agents = sorted(newsletters.available_newsletter_agents(), key=lambda k: k['label'].lower())
|
||||
@@ -220,7 +220,7 @@
|
||||
<p class="help-block">Check for Tautulli updates periodically.</p>
|
||||
</div>
|
||||
<div id="git_update_options">
|
||||
% if not plexpy.FROZEN:
|
||||
% if not plexpy.SNAP and not (plexpy.FROZEN and common.PLATFORM == 'Darwin'):
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="plexpy_auto_update" name="plexpy_auto_update" value="1" ${config['plexpy_auto_update']} ${docker_setting}> Update Automatically ${docker_msg | n}
|
||||
@@ -977,15 +977,21 @@
|
||||
</div>
|
||||
<div class="checkbox advanced-setting">
|
||||
<label>
|
||||
<input type="checkbox" name="notify_consecutive" id="notify_consecutive" value="1" ${config['notify_consecutive']}> Allow Consecutive Notifications
|
||||
<input type="checkbox" name="notify_consecutive" id="notify_consecutive" value="1" ${config['notify_consecutive']}> Allow Playback Stop Notifications Exceeding Watched Percent
|
||||
</label>
|
||||
<p class="help-block">Enable to allow sending of consecutive notifications (i.e. both watched & stopped notifications).</p>
|
||||
<p class="help-block">
|
||||
Enable to allow sending of playback stop notifications after the watched percent is exceeded.
|
||||
Disable to only send playback stop notifications below the watched percent.
|
||||
</p>
|
||||
</div>
|
||||
<div class="checkbox advanced-setting">
|
||||
<label>
|
||||
<input type="checkbox" name="notify_concurrent_by_ip" id="notify_concurrent_by_ip" value="1" ${config['notify_concurrent_by_ip']}> User Concurrent Streams Notifications by IP Address
|
||||
</label>
|
||||
<p class="help-block">Enable to only send a notification of concurrent streams by a single user from different IP addresses.</p>
|
||||
<p class="help-block">
|
||||
Enable to only send a concurrent streams notification by a single user from different IP addresses.
|
||||
Disable to send a concurrent streams notification anytime the concurrent stream threshold is exceeded regardless of IP address.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="notify_concurrent_threshold">User Concurrent Stream Threshold</label>
|
||||
@@ -997,6 +1003,15 @@
|
||||
</div>
|
||||
<p class="help-block">The number of concurrent streams by a single user for Tautulli to trigger a notification. Minimum 2.</p>
|
||||
</div>
|
||||
<div class="checkbox advanced-setting">
|
||||
<label>
|
||||
<input type="checkbox" name="notify_new_device_initial_only" id="notify_new_device_initial_only" value="1" ${config['notify_new_device_initial_only']}> User New Device Notification First Time Only
|
||||
</label>
|
||||
<p class="help-block">
|
||||
Enable to only send a new device notification the first time a user streams from a new device.
|
||||
Disable to send a new device notification everytime a user streams from the device until it is recorded in history (i.e. exceeds the ignore interval).
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="notify_concurrent_threshold">Continued Session Threshold</label>
|
||||
<div class="row">
|
||||
@@ -2009,7 +2024,7 @@ Rating: {rating}/10 --> Rating: /10
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-configuration-table").html(xhr.responseText);
|
||||
$("#jellypy-configuration-table").html(xhr.responseText);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -2020,7 +2035,7 @@ Rating: {rating}/10 --> Rating: /10
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-scheduler-table").html(xhr.responseText);
|
||||
$("#jellypy-scheduler-table").html(xhr.responseText);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -2031,7 +2046,7 @@ Rating: {rating}/10 --> Rating: /10
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-notifiers-table").html(xhr.responseText);
|
||||
$("#jellypy-notifiers-table").html(xhr.responseText);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -2056,7 +2071,7 @@ Rating: {rating}/10 --> Rating: /10
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-newsletters-table").html(xhr.responseText);
|
||||
$("#jellypy-newsletters-table").html(xhr.responseText);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -2081,7 +2096,7 @@ Rating: {rating}/10 --> Rating: /10
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-mobile-devices-table").html(xhr.responseText);
|
||||
$("#jellypy-mobile-devices-table").html(xhr.responseText);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -2911,10 +2926,6 @@ $(document).ready(function() {
|
||||
});
|
||||
});
|
||||
|
||||
$('#http_base_url').change(function () {
|
||||
$(this).val($(this).val().replace(/\/*$/, ''));
|
||||
});
|
||||
|
||||
$('#http_root').change(function() {
|
||||
setBaseURLSuffix();
|
||||
});
|
||||
@@ -3094,7 +3105,7 @@ $(document).ready(function() {
|
||||
if (news_item.subtitle) { content.append(subtitle); }
|
||||
content.append(body);
|
||||
var li = $('<li/>').append(header).append(content)
|
||||
if (index === 0 && Math.abs(now.diff(date, 'days')) < 7) {
|
||||
if (index === 0 && Math.abs(now.diff(date, 'days')) <= 30) {
|
||||
li.addClass('open');
|
||||
content.css('display', 'block');
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ DOCUMENTATION :: END
|
||||
|
||||
% if data:
|
||||
<%
|
||||
from plexpy.common import VIDEO_RESOLUTION_OVERRIDES, AUDIO_CODEC_OVERRIDES
|
||||
from jellypy.common import VIDEO_RESOLUTION_OVERRIDES, AUDIO_CODEC_OVERRIDES
|
||||
%>
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<%inherit file="base.html"/>
|
||||
<%!
|
||||
from plexpy.helpers import anon_url
|
||||
from jellypy.helpers import anon_url
|
||||
%>
|
||||
<%def name="headIncludes()">
|
||||
</%def>
|
||||
|
||||
@@ -26,7 +26,7 @@ DOCUMENTATION :: END
|
||||
|
||||
<%inherit file="base.html"/>
|
||||
<%!
|
||||
from plexpy import helpers
|
||||
from jellypy import helpers
|
||||
%>
|
||||
|
||||
<%def name="headIncludes()">
|
||||
|
||||
@@ -28,7 +28,7 @@ DOCUMENTATION :: END
|
||||
|
||||
% if data:
|
||||
<%
|
||||
from plexpy.helpers import page
|
||||
from jellypy.helpers import page
|
||||
%>
|
||||
<div class="dashboard-recent-media-row">
|
||||
<div id="recently-watched-row-scroller" style="left: 0;">
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<%
|
||||
import plexpy
|
||||
from plexpy import common, helpers
|
||||
import jellypy
|
||||
from jellypy import common, helpers
|
||||
%>
|
||||
|
||||
<!doctype html>
|
||||
@@ -41,7 +41,7 @@
|
||||
<body>
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<div class="wizard" id="setup-wizard" data-title="Tautulli Setup Wizard">
|
||||
<div class="wizard" id="setup-wizard" data-title="JellyPy Setup Wizard">
|
||||
<form>
|
||||
<div class="wizard-card" data-cardname="card1">
|
||||
<div style="float: right;">
|
||||
@@ -50,10 +50,7 @@
|
||||
<h3 style="line-height: 50px;">Welcome!</h3>
|
||||
<div class="wizard-input-section">
|
||||
<p class="welcome-message">
|
||||
Thanks for taking the time to try out Tautulli. Hope you find it useful.
|
||||
</p>
|
||||
<p class="welcome-message">
|
||||
Tautulli requires a permanent internet connection to ensure a reliable experience.
|
||||
Thanks for taking the time to try out JellyPy. Hope you find it useful.
|
||||
</p>
|
||||
<p class="welcome-message">
|
||||
This wizard will help you get set up, to continue press Next.
|
||||
@@ -65,122 +62,150 @@
|
||||
<h3>Authentication</h3>
|
||||
<div class="wizard-input-section">
|
||||
<p class="help-block">
|
||||
Please setup an admin username and password for Tautulli.
|
||||
Please setup an admin username and password for JellyPy.
|
||||
</p>
|
||||
</div>
|
||||
<div class="wizard-input-section">
|
||||
<label for="http_username">HTTP Username</label>
|
||||
<label for="http_username">Username</label>
|
||||
<div class="row">
|
||||
<div class="col-xs-8">
|
||||
<input type="text" class="form-control auth-settings" id="http_username" name="http_username" value="" size="30">
|
||||
<input type="text" class="form-control auth-settings" id="http_username"
|
||||
name="http_username" value="" size="30">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="wizard-input-section">
|
||||
<label for="http_password">HTTP Password</label>
|
||||
<label for="http_password">Password</label>
|
||||
<div class="row">
|
||||
<div class="col-xs-8">
|
||||
<input type="password" class="form-control auth-settings" id="http_password" name="http_password" value="" size="30" autocomplete="new-password">
|
||||
<input type="password" class="form-control auth-settings" id="http_password"
|
||||
name="http_password" value="" size="30" autocomplete="new-password">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<input type="hidden" class="form-control" name="http_hash_password" id="http_hash_password" value="1">
|
||||
<input type="hidden" class="form-control" name="http_hash_password" id="http_hash_password"
|
||||
value="1">
|
||||
<input type="hidden" class="form-control" name="http_plex_admin" id="http_plex_admin" value="1">
|
||||
<input type="hidden" id="authentication_valid" data-validate="validateAuthentication" value="">
|
||||
<span style="display: none;" id="authentication-status"></span>
|
||||
</div>
|
||||
|
||||
<div class="wizard-card" data-cardname="card3">
|
||||
<h3>Plex Account</h3>
|
||||
<h3>Jellyfin Server</h3>
|
||||
<div class="wizard-input-section">
|
||||
<p class="help-block">
|
||||
Tautulli requires a Plex.tv account. Click the button below to sign in on Plex.tv. You may need to allow popups in your browser.
|
||||
</p>
|
||||
</div>
|
||||
<input type="hidden" class="form-control" name="pms_token" id="pms_token" value="" data-validate="validatePMStoken">
|
||||
<a class="btn btn-dark" id="sign-in-plex" href="#" role="button">Sign In with Plex</a>
|
||||
<span style="margin-left: 10px; display: none;" id="pms-token-status"></span>
|
||||
</div>
|
||||
|
||||
<div class="wizard-card" data-cardname="card4">
|
||||
<h3>Plex Media Server</h3>
|
||||
<div class="wizard-input-section">
|
||||
<p class="help-block">
|
||||
Select your Plex Media Server from the dropdown menu or enter an IP address or hostname.
|
||||
Select your Jellyfin Server from the dropdown menu or enter an IP address or hostname.
|
||||
</p>
|
||||
</div>
|
||||
<div class="wizard-input-section">
|
||||
<label for="pms_ip_selectize">Plex IP Address or Hostname</label>
|
||||
<label for="jellyfin_ip_selectize">Jellyfin IP Address or Hostname</label>
|
||||
<div class="row">
|
||||
<div class="col-xs-12">
|
||||
<select class="form-control pms-settings selectize-pms-ip" id="pms_ip_selectize">
|
||||
% if config['pms_identifier']:
|
||||
<option value="${config['pms_ip']}:${config['pms_port']}"
|
||||
data-identifier="${config['pms_identifier']}"
|
||||
data-ip="${config['pms_ip']}"
|
||||
data-port="${config['pms_port']}"
|
||||
data-local="${int(not int(config['pms_is_remote']))}"
|
||||
data-ssl="${config['pms_ssl']}"
|
||||
data-is_cloud="${config['pms_is_cloud']}"
|
||||
data-label="${config['pms_name'] or 'Local'}"
|
||||
selected>${config['pms_ip']}</option>
|
||||
<select class="form-control jellyfin-settings selectize-jellyfin-ip" id="jellyfin_ip_selectize">
|
||||
% if config['jellyfin_identifier']:
|
||||
<option value="${config['jellyfin_ip']}:${config['jellyfin_port']}"
|
||||
data-identifier="${config['jellyfin_identifier']}"
|
||||
data-ip="${config['jellyfin_ip']}"
|
||||
data-port="${config['jellyfin_port']}"
|
||||
data-local="${int(not int(config['jellyfin_is_remote']))}"
|
||||
data-ssl="${config['jellyfin_ssl']}"
|
||||
data-is_cloud="${config['jellyfin_is_cloud']}"
|
||||
data-label="${config['jellyfin_name'] or 'Local'}"
|
||||
selected>${config['jellyfin_ip']}
|
||||
</option>
|
||||
% endif
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="wizard-input-section">
|
||||
<label for="pms_port">Plex Port</label>
|
||||
<label for="jellyfin_port">Jellyfin Port</label>
|
||||
<div class="row">
|
||||
<div class="col-xs-3">
|
||||
<input type="text" class="form-control pms-settings" name="pms_port" id="pms_port" placeholder="32400" value="${config['pms_port']}" required>
|
||||
<input type="text" class="form-control jellyfin-settings" name="jellyfin_port" id="jellyfin_port"
|
||||
placeholder="8096" value="${config['jellyfin_port']}" required>
|
||||
</div>
|
||||
<div class="col-xs-4">
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_ssl_checkbox" class="checkbox-toggle pms-settings" data-id="pms_ssl" value="1" ${helpers.checked(config['pms_ssl'])}> Use SSL
|
||||
<input type="hidden" id="pms_ssl" name="pms_ssl" value="${config['pms_ssl']}">
|
||||
<input type="checkbox" id="jellyfin_ssl_checkbox"
|
||||
class="checkbox-toggle jellyfin-settings" data-id="jellyfin_ssl" value="1"
|
||||
${helpers.checked(config['jellyfin_ssl'])}> Use SSL
|
||||
<input type="hidden" id="jellyfin_ssl" name="jellyfin_ssl" value="${config['jellyfin_ssl']}">
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-xs-4">
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_is_remote_checkbox" class="checkbox-toggle pms-settings" data-id="pms_is_remote" value="1" ${helpers.checked(config['pms_is_remote'])}> Remote Server
|
||||
<input type="hidden" id="pms_is_remote" name="pms_is_remote" value="${config['pms_is_remote']}">
|
||||
<input type="checkbox" id="jellyfin_is_remote_checkbox"
|
||||
class="checkbox-toggle jellyfin-settings" data-id="jellyfin_is_remote" value="1"
|
||||
${helpers.checked(config['jellyfin_is_remote'])}> Remote Server
|
||||
<input type="hidden" id="jellyfin_is_remote" name="jellyfin_is_remote"
|
||||
value="${config['jellyfin_is_remote']}">
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<input type="hidden" id="pms_valid" data-validate="validatePMSip" value="">
|
||||
<input type="hidden" id="pms_ip" name="pms_ip" value="${config['pms_ip']}">
|
||||
<input type="hidden" id="pms_is_cloud" name="pms_is_cloud" value="${config['pms_is_cloud']}">
|
||||
<input type="hidden" id="pms_identifier" name="pms_identifier" value="${config['pms_identifier']}">
|
||||
<a class="btn btn-dark" id="verify-plex-server" href="#" role="button">Verify</a>
|
||||
<span style="margin-left: 10px; display: none;" id="pms-verify-status"></span>
|
||||
<input type="hidden" id="jellyfin_ip" name="jellyfin_ip" value="${config['jellyfin_ip']}">
|
||||
</div>
|
||||
|
||||
<div class="wizard-card" data-cardname="card4">
|
||||
<h3>Jellyfin Auth</h3>
|
||||
<div class="wizard-input-section">
|
||||
<p class="help-block">
|
||||
Authentificate with your Jellyfin instance.
|
||||
</p>
|
||||
</div>
|
||||
<div class="wizard-input-section">
|
||||
<label for="jellyfin_user">Jellyfin Username</label>
|
||||
<div class="row">
|
||||
<div class="col-xs-4">
|
||||
<input type="text" class="form-control jellyfin-settings" name="jellyfin_user"
|
||||
id="jellyfin_user" value="" required>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="wizard-input-section">
|
||||
<label for="jellyfin_password">Jellyfin Password</label>
|
||||
<div class="row">
|
||||
<div class="col-xs-4">
|
||||
<input type="password" class="form-control jellyfin-settings" name="jellyfin_password"
|
||||
id="jellyfin_password" value="" required>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<a class="btn btn-dark" id="verify-jellyfin-server" href="#" role="button">Verify</a>
|
||||
<span style="margin-left: 10px; display: none;" id="jellyfin-verify-status"></span>
|
||||
</div>
|
||||
|
||||
<div class="wizard-card" data-cardname="card5">
|
||||
<h3>Activity Logging</h3>
|
||||
<div class="wizard-input-section">
|
||||
<p class="help-block">
|
||||
Tautulli will keep a history of all streaming activity on your Plex server.
|
||||
JellyPy will keep a history of all streaming activity on your Jellyfin server.
|
||||
</p>
|
||||
</div>
|
||||
<div class="wizard-input-section">
|
||||
<label for="logging_ignore_interval">Ignore Interval</label>
|
||||
<div class="row">
|
||||
<div class="col-xs-4">
|
||||
<input type="text" class="form-control pms-monitoring" id="logging_ignore_interval" name="logging_ignore_interval" placeholder="120" value="${config['logging_ignore_interval']}" data-validate="validateIgnoreInterval" required>
|
||||
<input type="text" class="form-control jellyfin-monitoring" id="logging_ignore_interval"
|
||||
name="logging_ignore_interval" placeholder="120"
|
||||
value="${config['logging_ignore_interval']}"
|
||||
data-validate="validateIgnoreInterval" required>
|
||||
</div>
|
||||
<span style="margin-left: 10px; line-height: 35px; display: none;" id="ignore-int-status"></span>
|
||||
<span style="margin-left: 10px; line-height: 35px; display: none;"
|
||||
id="ignore-int-status"></span>
|
||||
</div>
|
||||
<p class="help-block">The interval (in seconds) an item must be in a playing state before logging it. 0 to disable.</p>
|
||||
<p class="help-block">The interval (in seconds) an item must be in a playing state before
|
||||
logging it. 0 to disable.</p>
|
||||
</div>
|
||||
<div class="wizard-input-section">
|
||||
<p class="help-block">
|
||||
Additional options to disable history logging for certain libraries or users can be found by editing them
|
||||
Additional options to disable history logging for certain libraries or users can be found by
|
||||
editing them
|
||||
on the <strong>Libraries</strong> or <strong>Users</strong> pages.
|
||||
</p>
|
||||
</div>
|
||||
@@ -190,24 +215,13 @@
|
||||
<h3>Notifications</h3>
|
||||
<div class="wizard-input-section">
|
||||
<p class="help-block">
|
||||
Tautulli can send a wide variety of notifications to alert you of activity on your Plex server.
|
||||
JellyPy can send a wide variety of notifications to alert you of activity on your Jellyfin
|
||||
server.
|
||||
</p>
|
||||
<p class="help-block">
|
||||
To set up a notification agent, navigate to the <strong>Settings</strong> page
|
||||
and to the <strong>Notification Agents</strong> tab after you have completed this setup wizard.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="wizard-card" data-cardname="card7">
|
||||
<h3>Database Import</h3>
|
||||
<div class="wizard-input-section">
|
||||
<p class="help-block">
|
||||
If you have an existing Tautulli, PlexWatch, or Plexivity database, you can import the data into Tautulli.
|
||||
</p>
|
||||
<p class="help-block">
|
||||
To import a database, navigate to the <strong>Settings</strong> page
|
||||
and to the <strong>Import & Backups</strong> tab after you have completed this setup wizard.
|
||||
and to the <strong>Notification Agents</strong> tab after you have completed this setup
|
||||
wizard.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
@@ -215,33 +229,39 @@
|
||||
<div style="display: none;">
|
||||
<input type="checkbox" name="first_run" id="first_run" value="1" checked>
|
||||
<input type="checkbox" name="group_history_tables" id="group_history_tables" value="1" checked>
|
||||
<input type="checkbox" name="history_table_activity" id="history_table_activity" value="1" checked>
|
||||
<input type="checkbox" name="history_table_activity" id="history_table_activity" value="1"
|
||||
checked>
|
||||
<input type="checkbox" name="sys_tray_icon" id="sys_tray_icon" value="1" checked>
|
||||
<input type="checkbox" name="launch_startup" id="launch_startup" value="1" checked>
|
||||
<input type="checkbox" name="launch_browser" id="launch_browser" value="1" checked>
|
||||
<input type="checkbox" name="api_enabled" id="api_enabled" value="1" checked>
|
||||
<input type="checkbox" name="refresh_users_on_startup" id="refresh_users_on_startup" value="1" checked>
|
||||
<input type="checkbox" name="refresh_libraries_on_startup" id="refresh_libraries_on_startup" value="1" checked>
|
||||
<input type="checkbox" name="refresh_users_on_startup" id="refresh_users_on_startup" value="1"
|
||||
checked>
|
||||
<input type="checkbox" name="refresh_libraries_on_startup" id="refresh_libraries_on_startup"
|
||||
value="1" checked>
|
||||
<input type="checkbox" name="check_github" id="check_github" value="1" checked>
|
||||
<input type="checkbox" name="log_blacklist" id="log_blacklist" value="1" checked>
|
||||
<input type="checkbox" name="cache_images" id="cache_images" value="1" checked>
|
||||
<input type="checkbox" name="notify_group_recently_added_grandparent" id="notify_group_recently_added_grandparent" value="1" checked>
|
||||
<input type="checkbox" name="notify_group_recently_added_parent" id="notify_group_recently_added_parent" value="1" checked>
|
||||
<input type="checkbox" name="notify_group_recently_added_grandparent"
|
||||
id="notify_group_recently_added_grandparent" value="1" checked>
|
||||
<input type="checkbox" name="notify_group_recently_added_parent"
|
||||
id="notify_group_recently_added_parent" value="1" checked>
|
||||
<input type="checkbox" name="server_changed" id="server_changed" value="1" checked>
|
||||
<input type="checkbox" name="first_run_complete" id="first_run_complete" value="1" checked>
|
||||
<input type="text" name="home_stats_cards" id="home_stats_cards" value="first_run_wizard">
|
||||
<input type="text" name="home_library_cards" id="home_library_cards" value="first_run_wizard">
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
</form>
|
||||
<div class="wizard-success">
|
||||
<h3>Setup Complete!</h3>
|
||||
<br/>
|
||||
<p>Setup is now complete. For more configuration options please visit the Settings menu on the home page.</p>
|
||||
<p>Setup is now complete. For more configuration options please visit the Settings menu on the home
|
||||
page.</p>
|
||||
<br/>
|
||||
<i class="fa fa-refresh fa-spin"></i> Waiting <span class="countdown">5</span> seconds to ensure authentication token is registered...
|
||||
<i class="fa fa-refresh fa-spin"></i> Waiting <span class="countdown">5</span> seconds to ensure
|
||||
authentication token is registered...
|
||||
|
||||
</div>
|
||||
</div>
|
||||
@@ -258,7 +278,6 @@
|
||||
function validateAuthentication(el) {
|
||||
var http_username = $("#http_username").val();
|
||||
var http_password = $("#http_password").val();
|
||||
var valid_authentication = el.val();
|
||||
var retValue = {};
|
||||
|
||||
if (http_username === "" || http_password === "") {
|
||||
@@ -273,15 +292,15 @@
|
||||
return retValue;
|
||||
}
|
||||
|
||||
function validatePMSip(el) {
|
||||
var valid_pms_ip = el.val();
|
||||
function validateJellyfinIp(el) {
|
||||
var valid_jellyfin_ip = el.val();
|
||||
var retValue = {};
|
||||
|
||||
if (valid_pms_ip === "") {
|
||||
if (valid_jellyfin_ip === "") {
|
||||
retValue.status = false;
|
||||
retValue.msg = "Please verify your server.";
|
||||
$("#pms-verify-status").html('<i class="fa fa-exclamation-circle"></i> Please verify your server.');
|
||||
$('#pms-verify-status').fadeIn('fast').delay(2000).fadeOut('fast');
|
||||
$("#jellyfin-verify-status").html('<i class="fa fa-exclamation-circle"></i> Please verify your server.');
|
||||
$('#jellyfin-verify-status').fadeIn('fast').delay(2000).fadeOut('fast');
|
||||
} else {
|
||||
retValue.status = true;
|
||||
}
|
||||
@@ -289,15 +308,15 @@
|
||||
return retValue;
|
||||
}
|
||||
|
||||
function validatePMStoken(el) {
|
||||
var valid_pms_token = el.val();
|
||||
function validateJellyfinToken(el) {
|
||||
var valid_jellyfin_token = el.val();
|
||||
var retValue = {};
|
||||
|
||||
if (valid_pms_token === "") {
|
||||
if (valid_jellyfin_token === "") {
|
||||
retValue.status = false;
|
||||
retValue.msg = "Please authenticate.";
|
||||
$("#pms-token-status").html('<i class="fa fa-exclamation-circle"></i> Please authenticate.');
|
||||
$('#pms-token-status').fadeIn('fast').delay(2000).fadeOut('fast');
|
||||
$("#jellyfin-token-status").html('<i class="fa fa-exclamation-circle"></i> Please authenticate.');
|
||||
$('#jellyfin-token-status').fadeIn('fast').delay(2000).fadeOut('fast');
|
||||
} else {
|
||||
retValue.status = true;
|
||||
}
|
||||
@@ -325,71 +344,73 @@
|
||||
return $.isNumeric(n) && (Math.floor(n) == n) && (n >= 0)
|
||||
}
|
||||
|
||||
$(document).ready(function() {
|
||||
$(document).ready(function () {
|
||||
|
||||
$.fn.wizard.logging = false;
|
||||
var options = {
|
||||
keyboard : false,
|
||||
contentHeight : 450,
|
||||
contentWidth : 700,
|
||||
backdrop: 'static',
|
||||
buttons: {submitText: 'Finish'},
|
||||
submitUrl: "configUpdate"
|
||||
};
|
||||
var wizard = $("#setup-wizard").wizard(options);
|
||||
wizard.show();
|
||||
$.fn.wizard.logging = false;
|
||||
var options = {
|
||||
keyboard: false,
|
||||
contentHeight: 450,
|
||||
contentWidth: 700,
|
||||
backdrop: 'static',
|
||||
buttons: {submitText: 'Finish'},
|
||||
submitUrl: "configUpdate"
|
||||
};
|
||||
var wizard = $("#setup-wizard").wizard(options);
|
||||
wizard.show();
|
||||
|
||||
// Change button classes
|
||||
wizard.find('.wizard-back').addClass('btn-dark');
|
||||
wizard.on('incrementCard', function(wizard) {
|
||||
wizard.find('.wizard-next.btn-success').removeClass('btn-success').addClass('btn-bright');
|
||||
});
|
||||
wizard.on('decrementCard', function(wizard) {
|
||||
wizard.find('.wizard-next').removeClass('btn-bright').text('Next');
|
||||
});
|
||||
// Change button classes
|
||||
wizard.find('.wizard-back').addClass('btn-dark');
|
||||
wizard.on('incrementCard', function (wizard) {
|
||||
wizard.find('.wizard-next.btn-success').removeClass('btn-success').addClass('btn-bright');
|
||||
});
|
||||
wizard.on('decrementCard', function (wizard) {
|
||||
wizard.find('.wizard-next').removeClass('btn-bright').text('Next');
|
||||
});
|
||||
|
||||
wizard.on("submit", function(wizard) {
|
||||
// Probably should not success before we know, but hopefully validation is good enough.
|
||||
wizard.submitSuccess();
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: wizard.args.submitUrl,
|
||||
data: wizard.serialize(),
|
||||
dataType: "json",
|
||||
complete: function (data) {
|
||||
$(".countdown").countdown(function () { location.reload(); }, 5, "");
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
$('.checkbox-toggle').click(function () {
|
||||
var configToggle = $(this).data('id');
|
||||
if ($(this).is(':checked')) {
|
||||
$('#'+configToggle).val(1);
|
||||
} else {
|
||||
$('#'+configToggle).val(0);
|
||||
}
|
||||
});
|
||||
|
||||
var $select_pms = $('#pms_ip_selectize').selectize({
|
||||
createOnBlur: true,
|
||||
openOnFocus: true,
|
||||
maxItems: 1,
|
||||
closeAfterSelect: true,
|
||||
sortField: 'label',
|
||||
searchField: ['label', 'value'],
|
||||
inputClass: 'form-control selectize-input',
|
||||
render: {
|
||||
item: function (item, escape) {
|
||||
if (!item.label) {
|
||||
$.extend(item,
|
||||
$(this.revertSettings.$children)
|
||||
.filter('[value="' + item.value + '"]').data()
|
||||
);
|
||||
wizard.on("submit", function (wizard) {
|
||||
// Probably should not success before we know, but hopefully validation is good enough.
|
||||
wizard.submitSuccess();
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: wizard.args.submitUrl,
|
||||
data: wizard.serialize(),
|
||||
dataType: "json",
|
||||
complete: function (data) {
|
||||
$(".countdown").countdown(function () {
|
||||
location.reload();
|
||||
}, 5, "");
|
||||
}
|
||||
var label = item.label || item.value;
|
||||
var caption = item.label ? item.ip : null;
|
||||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
})
|
||||
});
|
||||
|
||||
$('.checkbox-toggle').click(function () {
|
||||
var configToggle = $(this).data('id');
|
||||
if ($(this).is(':checked')) {
|
||||
$('#' + configToggle).val(1);
|
||||
} else {
|
||||
$('#' + configToggle).val(0);
|
||||
}
|
||||
});
|
||||
|
||||
var $select_jellyfin = $('#jellyfin_ip_selectize').selectize({
|
||||
createOnBlur: true,
|
||||
openOnFocus: true,
|
||||
maxItems: 1,
|
||||
closeAfterSelect: true,
|
||||
sortField: 'label',
|
||||
searchField: ['label', 'value'],
|
||||
inputClass: 'form-control selectize-input',
|
||||
render: {
|
||||
item: function (item, escape) {
|
||||
if (!item.label) {
|
||||
$.extend(item,
|
||||
$(this.revertSettings.$children)
|
||||
.filter('[value="' + item.value + '"]').data()
|
||||
);
|
||||
}
|
||||
var label = item.label || item.value;
|
||||
var caption = item.label ? item.ip : null;
|
||||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
'" data-ip="' + item.ip +
|
||||
'" data-port="' + item.port +
|
||||
'" data-local="' + item.local +
|
||||
@@ -399,11 +420,11 @@ $(document).ready(function() {
|
||||
'<span class="item-text">' + escape(label) + '</span>' +
|
||||
(caption ? '<span class="item-value">' + escape(caption) + '</span>' : '') +
|
||||
'</div>';
|
||||
},
|
||||
option: function (item, escape) {
|
||||
var label = item.label || item.value;
|
||||
var caption = item.label ? item.value : null;
|
||||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
},
|
||||
option: function (item, escape) {
|
||||
var label = item.label || item.value;
|
||||
var caption = item.label ? item.value : null;
|
||||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
'" data-ip="' + item.ip +
|
||||
'" data-port="' + item.port +
|
||||
'" data-local="' + item.local +
|
||||
@@ -413,152 +434,131 @@ $(document).ready(function() {
|
||||
escape(label) +
|
||||
(caption ? '<span class="caption">' + escape(caption) + '</span>' : '') +
|
||||
'</div>';
|
||||
}
|
||||
},
|
||||
create: function(input) {
|
||||
return {label: '', value: input};
|
||||
},
|
||||
onInitialize: function () {
|
||||
var s = this;
|
||||
this.revertSettings.$children.each(function () {
|
||||
$.extend(s.options[this.value], $(this).data());
|
||||
});
|
||||
},
|
||||
onChange: function (item) {
|
||||
var pms_ip_selected = this.getItem(item)[0];
|
||||
var identifier = $(pms_ip_selected).data('identifier');
|
||||
var ip = $(pms_ip_selected).data('ip');
|
||||
var port = $(pms_ip_selected).data('port');
|
||||
var local = $(pms_ip_selected).data('local');
|
||||
var ssl = $(pms_ip_selected).data('ssl');
|
||||
var is_cloud = $(pms_ip_selected).data('is_cloud');
|
||||
var value = $(pms_ip_selected).data('value');
|
||||
|
||||
$("#pms_valid").val(identifier !== 'undefined' ? 'valid' : '');
|
||||
$("#pms-verify-status").html(identifier !== 'undefined' ? '<i class="fa fa-check"></i> Server found!' : '').fadeIn('fast');
|
||||
|
||||
$("#pms_identifier").val(identifier !== 'undefined' ? identifier : '');
|
||||
$('#pms_ip').val(ip !== 'undefined' ? ip : value);
|
||||
$('#pms_port').val(port !== 'undefined' ? port : 32400);
|
||||
$('#pms_is_remote_checkbox').prop('checked', (local !== 'undefined' && local === 0));
|
||||
$('#pms_is_remote').val(local !== 'undefined' && local === 0 ? 1 : 0);
|
||||
$('#pms_ssl_checkbox').prop('checked', (ssl !== 'undefined' && ssl === 1));
|
||||
$('#pms_ssl').val(ssl !== 'undefined' && ssl === 1 ? 1 : 0);
|
||||
$('#pms_is_cloud').val(is_cloud !== 'undefined' && is_cloud === true ? 1 : 0);
|
||||
|
||||
if (is_cloud === true) {
|
||||
$('#pms_port').prop('readonly', true);
|
||||
$('#pms_is_remote_checkbox').prop('disabled', true);
|
||||
$('#pms_ssl_checkbox').prop('disabled', true);
|
||||
} else {
|
||||
$('#pms_port').prop('readonly', false);
|
||||
$('#pms_is_remote_checkbox').prop('disabled', false);
|
||||
$('#pms_ssl_checkbox').prop('disabled', false);
|
||||
}
|
||||
},
|
||||
onDropdownOpen: function() {
|
||||
this.clear();
|
||||
}
|
||||
});
|
||||
var select_pms = $select_pms[0].selectize;
|
||||
|
||||
function getServerOptions(token) {
|
||||
/* Set token and returns server options */
|
||||
$.ajax({
|
||||
url: 'discover',
|
||||
data: {
|
||||
token: token
|
||||
}
|
||||
},
|
||||
success: function (result) {
|
||||
if (result) {
|
||||
var existing_ip = $('#pms_ip').val();
|
||||
var existing_port = $('#pms_port').val();
|
||||
result.forEach(function (item) {
|
||||
if (item.ip === existing_ip && item.port === existing_port) {
|
||||
select_pms.updateOption(item.value, item);
|
||||
} else {
|
||||
select_pms.addOption(item);
|
||||
create: function (input) {
|
||||
return {label: '', value: input};
|
||||
},
|
||||
onInitialize: function () {
|
||||
var s = this;
|
||||
this.revertSettings.$children.each(function () {
|
||||
$.extend(s.options[this.value], $(this).data());
|
||||
});
|
||||
},
|
||||
onChange: function (item) {
|
||||
var jellyfin_ip_selected = this.getItem(item)[0];
|
||||
var identifier = $(jellyfin_ip_selected).data('identifier');
|
||||
var ip = $(jellyfin_ip_selected).data('ip');
|
||||
var port = $(jellyfin_ip_selected).data('port');
|
||||
var local = $(jellyfin_ip_selected).data('local');
|
||||
var ssl = $(jellyfin_ip_selected).data('ssl');
|
||||
var value = $(jellyfin_ip_selected).data('value');
|
||||
|
||||
$("#jellyfin_valid").val(identifier !== 'undefined' ? 'valid' : '');
|
||||
$("#jellyfin-verify-status").html(identifier !== 'undefined' ? '<i class="fa fa-check"></i> Server found!' : '').fadeIn('fast');
|
||||
|
||||
$("#jellyfin_identifier").val(identifier !== 'undefined' ? identifier : '');
|
||||
$('#jellyfin_ip').val(ip !== 'undefined' ? ip : value);
|
||||
$('#jellyfin_port').val(port !== 'undefined' ? port : 8096);
|
||||
$('#jellyfin_is_remote_checkbox').prop('checked', (local !== 'undefined' && local === 0));
|
||||
$('#jellyfin_is_remote').val(local !== 'undefined' && local === 0 ? 1 : 0);
|
||||
$('#jellyfin_ssl_checkbox').prop('checked', (ssl !== 'undefined' && ssl === 1));
|
||||
$('#jellyfin_ssl').val(ssl !== 'undefined' && ssl === 1 ? 1 : 0);
|
||||
|
||||
|
||||
$('#jellyfin_port').prop('readonly', false);
|
||||
$('#jellyfin_is_remote_checkbox').prop('disabled', false);
|
||||
$('#jellyfin_ssl_checkbox').prop('disabled', false);
|
||||
},
|
||||
onDropdownOpen: function () {
|
||||
this.clear();
|
||||
}
|
||||
});
|
||||
var select_jellyfin = $select_jellyfin[0].selectize;
|
||||
|
||||
function getServerOptions(token) {
|
||||
/* Set token and returns server options */
|
||||
$.ajax({
|
||||
url: 'discover',
|
||||
data: {
|
||||
token: token
|
||||
},
|
||||
success: function (result) {
|
||||
if (result) {
|
||||
var existing_ip = $('#jellyfin_ip').val();
|
||||
var existing_port = $('#jellyfin_port').val();
|
||||
result.forEach(function (item) {
|
||||
if (item.ip === existing_ip && item.port === existing_port) {
|
||||
select_jellyfin.updateOption(item.value, item);
|
||||
} else {
|
||||
select_jellyfin.addOption(item);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
var jellyfin_verified = false;
|
||||
var authenticated = false;
|
||||
|
||||
$("#verify-jellyfin-server").click(function () {
|
||||
if (!(jellyfin_verified)) {
|
||||
var jellyfin_ip = $("#jellyfin_ip").val().trim();
|
||||
var jellyfin_port = $("#jellyfin_port").val().trim();
|
||||
var jellyfin_ssl = $("#jellyfin_ssl").val();
|
||||
var jellyfin_is_remote = $("#jellyfin_is_remote").val();
|
||||
var jellyfin_user = $("#jellyfin_user").val().trim();
|
||||
var jellyfin_password = $("#jellyfin_password").val();
|
||||
|
||||
if ((jellyfin_ip !== '') || (jellyfin_port !== '')) {
|
||||
$("#jellyfin-verify-status").html('<i class="fa fa-refresh fa-spin"></i> Verifying server...');
|
||||
$('#jellyfin-verify-status').fadeIn('fast');
|
||||
$.ajax({
|
||||
url: 'check_login',
|
||||
data: {
|
||||
hostname: jellyfin_ip,
|
||||
port: jellyfin_port,
|
||||
ssl: jellyfin_ssl,
|
||||
remote: jellyfin_is_remote,
|
||||
user: jellyfin_user,
|
||||
password: jellyfin_password
|
||||
},
|
||||
cache: true,
|
||||
async: true,
|
||||
timeout: 5000,
|
||||
error: function (jqXHR, textStatus, errorThrown) {
|
||||
$("#jellyfin-verify-status").html('<i class="fa fa-exclamation-circle"></i> Error verifying server: ' + textStatus);
|
||||
$('#jellyfin-verify-status').fadeIn('fast');
|
||||
},
|
||||
success: function (xhr, status) {
|
||||
var result = xhr;
|
||||
var identifier = result.identifier;
|
||||
if (identifier) {
|
||||
$("#jellyfin_identifier").val(identifier);
|
||||
$("#jellyfin-verify-status").html('<i class="fa fa-check"></i> Login successfull!');
|
||||
$('#jellyfin-verify-status').fadeIn('fast');
|
||||
jellyfin_verified = true;
|
||||
$("#jellyfin_valid").val("valid");
|
||||
} else {
|
||||
$("#jellyfin-verify-status").html('<i class="fa fa-exclamation-circle"></i> This is not a Jellyfin Server!');
|
||||
$('#jellyfin-verify-status').fadeIn('fast');
|
||||
}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
$("#jellyfin-verify-status").html('<i class="fa fa-exclamation-circle"></i> Please enter both fields.');
|
||||
$('#jellyfin-verify-status').fadeIn('fast');
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
var pms_verified = false;
|
||||
var authenticated = false;
|
||||
});
|
||||
|
||||
$("#verify-plex-server").click(function () {
|
||||
if (!(pms_verified)) {
|
||||
var pms_ip = $("#pms_ip").val().trim();
|
||||
var pms_port = $("#pms_port").val().trim();
|
||||
var pms_identifier = $("#pms_identifier").val();
|
||||
var pms_ssl = $("#pms_ssl").val();
|
||||
var pms_is_remote = $("#pms_is_remote").val();
|
||||
if ((pms_ip !== '') || (pms_port !== '')) {
|
||||
$("#pms-verify-status").html('<i class="fa fa-refresh fa-spin"></i> Verifying server...');
|
||||
$('#pms-verify-status').fadeIn('fast');
|
||||
$.ajax({
|
||||
url: 'get_server_id',
|
||||
data: {
|
||||
hostname: pms_ip,
|
||||
port: pms_port,
|
||||
identifier: pms_identifier,
|
||||
ssl: pms_ssl,
|
||||
remote: pms_is_remote
|
||||
},
|
||||
cache: true,
|
||||
async: true,
|
||||
timeout: 5000,
|
||||
error: function (jqXHR, textStatus, errorThrown) {
|
||||
$("#pms-verify-status").html('<i class="fa fa-exclamation-circle"></i> Error verifying server: ' + textStatus);
|
||||
$('#pms-verify-status').fadeIn('fast');
|
||||
},
|
||||
success: function(xhr, status) {
|
||||
var result = xhr;
|
||||
var identifier = result.identifier;
|
||||
if (identifier) {
|
||||
$("#pms_identifier").val(identifier);
|
||||
$("#pms-verify-status").html('<i class="fa fa-check"></i> Server found!');
|
||||
$('#pms-verify-status').fadeIn('fast');
|
||||
pms_verified = true;
|
||||
$("#pms_valid").val("valid");
|
||||
} else {
|
||||
$("#pms-verify-status").html('<i class="fa fa-exclamation-circle"></i> This is not a Plex Server!');
|
||||
$('#pms-verify-status').fadeIn('fast');
|
||||
}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
$("#pms-verify-status").html('<i class="fa fa-exclamation-circle"></i> Please enter both fields.');
|
||||
$('#pms-verify-status').fadeIn('fast');
|
||||
}
|
||||
}
|
||||
$(".jellyfin-settings").change(function () {
|
||||
jellyfin_verified = false;
|
||||
$("#jellyfin_valid").val("");
|
||||
$("#jellyfin-verify-status").html("");
|
||||
});
|
||||
});
|
||||
|
||||
$( ".pms-settings" ).change(function() {
|
||||
pms_verified = false;
|
||||
$("#pms_valid").val("");
|
||||
$("#pms-verify-status").html("");
|
||||
});
|
||||
|
||||
function OAuthPreFunction() {
|
||||
$("#pms_token").val('');
|
||||
$("#pms-token-status").html('<i class="fa fa-refresh fa-spin"></i> Waiting for authentication...').fadeIn('fast');
|
||||
}
|
||||
function OAuthSuccessCallback(authToken) {
|
||||
$("#pms_token").val(authToken);
|
||||
$("#pms-token-status").html('<i class="fa fa-check"></i> Authentication successful.').fadeIn('fast');
|
||||
authenticated = true;
|
||||
getServerOptions(authToken);
|
||||
}
|
||||
function OAuthErrorCallback() {
|
||||
$("#pms-token-status").html('<i class="fa fa-exclamation-circle"></i> Error communicating with Plex.tv.').fadeIn('fast');
|
||||
}
|
||||
|
||||
$('#sign-in-plex').click(function() {
|
||||
PlexOAuth(OAuthSuccessCallback, OAuthErrorCallback, OAuthPreFunction);
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
</body>
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
% if data:
|
||||
<%
|
||||
import plexpy
|
||||
from plexpy.helpers import grouper, get_img_service
|
||||
import jellypy
|
||||
from jellypy.helpers import grouper, get_img_service
|
||||
|
||||
recently_added = data['recently_added']
|
||||
if plexpy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'newsletter/'
|
||||
if jellypy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url = jellypy.CONFIG.HTTP_BASE_URL + jellypy.HTTP_ROOT + 'newsletter/'
|
||||
elif preview:
|
||||
base_url = 'newsletter/'
|
||||
else:
|
||||
@@ -13,7 +13,7 @@
|
||||
|
||||
service = get_img_service(include_self=True)
|
||||
if service == 'self-hosted' and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url_image = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'image/'
|
||||
base_url_image = jellypy.CONFIG.HTTP_BASE_URL + jellypy.HTTP_ROOT + 'image/'
|
||||
elif preview and service and service != 'self-hosted':
|
||||
base_url_image = 'image/'
|
||||
else:
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
% if data:
|
||||
<%
|
||||
import plexpy
|
||||
from plexpy.helpers import grouper, get_img_service
|
||||
import jellypy
|
||||
from jellypy.helpers import grouper, get_img_service
|
||||
|
||||
recently_added = data['recently_added']
|
||||
if plexpy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'newsletter/'
|
||||
if jellypy.CONFIG.NEWSLETTER_SELF_HOSTED and jellypy.CONFIG.HTTP_BASE_URL:
|
||||
base_url = jellypy.CONFIG.HTTP_BASE_URL + jellypy.HTTP_ROOT + 'newsletter/'
|
||||
elif preview:
|
||||
base_url = 'newsletter/'
|
||||
else:
|
||||
base_url = ''
|
||||
|
||||
service = get_img_service(include_self=True)
|
||||
if service == 'self-hosted' and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url_image = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'image/'
|
||||
if service == 'self-hosted' and jellypy.CONFIG.HTTP_BASE_URL:
|
||||
base_url_image = jellypy.CONFIG.HTTP_BASE_URL + jellypy.HTTP_ROOT + 'image/'
|
||||
elif preview and service and service != 'self-hosted':
|
||||
base_url_image = 'image/'
|
||||
else:
|
||||
|
||||
@@ -13,73 +13,45 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import range
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import future.moves.queue as queue
|
||||
import queue
|
||||
import sqlite3
|
||||
import sys
|
||||
import subprocess
|
||||
import sys
|
||||
import threading
|
||||
import uuid
|
||||
|
||||
# Some cut down versions of Python may not include this module and it's not critical for us
|
||||
try:
|
||||
import webbrowser
|
||||
|
||||
no_browser = False
|
||||
except ImportError:
|
||||
no_browser = True
|
||||
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.interval import IntervalTrigger
|
||||
from UniversalAnalytics import Tracker
|
||||
import pytz
|
||||
|
||||
PYTHON2 = sys.version_info[0] == 2
|
||||
|
||||
if PYTHON2:
|
||||
import activity_handler
|
||||
import activity_pinger
|
||||
import common
|
||||
import database
|
||||
import datafactory
|
||||
import exporter
|
||||
import libraries
|
||||
import logger
|
||||
import mobile_app
|
||||
import newsletters
|
||||
import newsletter_handler
|
||||
import notification_handler
|
||||
import notifiers
|
||||
import plextv
|
||||
import users
|
||||
import versioncheck
|
||||
import web_socket
|
||||
import webstart
|
||||
import config
|
||||
else:
|
||||
from plexpy import activity_handler
|
||||
from plexpy import activity_pinger
|
||||
from plexpy import common
|
||||
from plexpy import database
|
||||
from plexpy import datafactory
|
||||
from plexpy import exporter
|
||||
from plexpy import libraries
|
||||
from plexpy import logger
|
||||
from plexpy import mobile_app
|
||||
from plexpy import newsletters
|
||||
from plexpy import newsletter_handler
|
||||
from plexpy import notification_handler
|
||||
from plexpy import notifiers
|
||||
from plexpy import plextv
|
||||
from plexpy import users
|
||||
from plexpy import versioncheck
|
||||
from plexpy import web_socket
|
||||
from plexpy import webstart
|
||||
from plexpy import config
|
||||
|
||||
from jellypy import activity_handler
|
||||
from jellypy import activity_pinger
|
||||
from jellypy import common
|
||||
from jellypy import database
|
||||
from jellypy import datafactory
|
||||
from jellypy import exporter
|
||||
from jellypy import libraries
|
||||
from jellypy import logger
|
||||
from jellypy import mobile_app
|
||||
from jellypy import newsletters
|
||||
from jellypy import newsletter_handler
|
||||
from jellypy import notification_handler
|
||||
from jellypy import notifiers
|
||||
from jellypy import users
|
||||
from jellypy import versioncheck
|
||||
from jellypy import web_socket
|
||||
from jellypy import webstart
|
||||
from jellypy import config
|
||||
|
||||
PROG_DIR = None
|
||||
FULL_PATH = None
|
||||
@@ -98,6 +70,8 @@ CREATEPID = False
|
||||
PIDFILE = None
|
||||
NOFORK = False
|
||||
DOCKER = False
|
||||
SNAP = False
|
||||
SNAP_MIGRATE = False
|
||||
FROZEN = False
|
||||
|
||||
SCHED = None
|
||||
@@ -172,6 +146,18 @@ def initialize(config_file):
|
||||
if _INITIALIZED:
|
||||
return False
|
||||
|
||||
if SNAP_MIGRATE:
|
||||
snap_common = os.environ['SNAP_COMMON']
|
||||
old_data_dir = os.path.join(snap_common, 'Tautulli')
|
||||
CONFIG.HTTPS_CERT = CONFIG.HTTPS_CERT.replace(old_data_dir, DATA_DIR)
|
||||
CONFIG.HTTPS_CERT_CHAIN = CONFIG.HTTPS_CERT_CHAIN.replace(old_data_dir, DATA_DIR)
|
||||
CONFIG.HTTPS_KEY = CONFIG.HTTPS_KEY.replace(old_data_dir, DATA_DIR)
|
||||
CONFIG.LOG_DIR = CONFIG.LOG_DIR.replace(old_data_dir, DATA_DIR)
|
||||
CONFIG.BACKUP_DIR = CONFIG.BACKUP_DIR.replace(old_data_dir, DATA_DIR)
|
||||
CONFIG.CACHE_DIR = CONFIG.CACHE_DIR.replace(old_data_dir, DATA_DIR)
|
||||
CONFIG.EXPORT_DIR = CONFIG.EXPORT_DIR.replace(old_data_dir, DATA_DIR)
|
||||
CONFIG.NEWSLETTER_DIR = CONFIG.NEWSLETTER_DIR.replace(old_data_dir, DATA_DIR)
|
||||
|
||||
if CONFIG.HTTP_PORT < 21 or CONFIG.HTTP_PORT > 65535:
|
||||
logger.warn("HTTP_PORT out of bounds: 21 < %s < 65535", CONFIG.HTTP_PORT)
|
||||
CONFIG.HTTP_PORT = 8181
|
||||
@@ -194,6 +180,8 @@ def initialize(config_file):
|
||||
|
||||
if DOCKER:
|
||||
build = '[Docker] '
|
||||
elif SNAP:
|
||||
build = '[Snap] '
|
||||
elif FROZEN:
|
||||
build = '[Bundle] '
|
||||
else:
|
||||
@@ -251,9 +239,9 @@ def initialize(config_file):
|
||||
mobile_app.blacklist_logger()
|
||||
|
||||
# Check if Tautulli has a uuid
|
||||
if CONFIG.PMS_UUID == '' or not CONFIG.PMS_UUID:
|
||||
if CONFIG.JELLYFIN_UUID == '' or not CONFIG.JELLYFIN_UUID:
|
||||
logger.debug("Generating UUID...")
|
||||
CONFIG.PMS_UUID = generate_uuid()
|
||||
CONFIG.JELLYFIN_UUID = generate_uuid()
|
||||
CONFIG.write()
|
||||
|
||||
# Check if Tautulli has an API key
|
||||
@@ -422,7 +410,7 @@ def initialize_scheduler():
|
||||
|
||||
# Update check
|
||||
github_minutes = CONFIG.CHECK_GITHUB_INTERVAL if CONFIG.CHECK_GITHUB_INTERVAL and CONFIG.CHECK_GITHUB else 0
|
||||
pms_update_check_hours = CONFIG.PMS_UPDATE_CHECK_INTERVAL if 1 <= CONFIG.PMS_UPDATE_CHECK_INTERVAL else 24
|
||||
JELLYFIN_update_check_hours = CONFIG.JELLYFIN_UPDATE_CHECK_INTERVAL if 1 <= CONFIG.JELLYFIN_UPDATE_CHECK_INTERVAL else 24
|
||||
|
||||
schedule_job(versioncheck.check_update, 'Check GitHub for updates',
|
||||
hours=0, minutes=github_minutes, seconds=0, args=(True, True))
|
||||
@@ -436,12 +424,13 @@ def initialize_scheduler():
|
||||
schedule_job(config.make_backup, 'Backup Tautulli config',
|
||||
hours=backup_hours, minutes=0, seconds=0, args=(True, True))
|
||||
|
||||
if WS_CONNECTED and CONFIG.PMS_IP and CONFIG.PMS_TOKEN:
|
||||
schedule_job(plextv.get_server_resources, 'Refresh Plex server URLs',
|
||||
hours=12 * (not bool(CONFIG.PMS_URL_MANUAL)), minutes=0, seconds=0)
|
||||
if WS_CONNECTED and CONFIG.JELLYFIN_IP and CONFIG.JELLYFIN_TOKEN:
|
||||
# TODO: Jellyfin
|
||||
# schedule_job(plextv.get_server_resources, 'Refresh Plex server URLs',
|
||||
# hours=12 * (not bool(CONFIG.JELLYFIN_URL_MANUAL)), minutes=0, seconds=0)
|
||||
|
||||
schedule_job(activity_pinger.check_server_updates, 'Check for Plex updates',
|
||||
hours=pms_update_check_hours * bool(CONFIG.MONITOR_PMS_UPDATES), minutes=0, seconds=0)
|
||||
hours=JELLYFIN_update_check_hours * bool(CONFIG.MONITOR_JELLYFIN_UPDATES), minutes=0, seconds=0)
|
||||
|
||||
# Refresh the users list and libraries list
|
||||
user_hours = CONFIG.REFRESH_USERS_INTERVAL if 1 <= CONFIG.REFRESH_USERS_INTERVAL <= 24 else 12
|
||||
@@ -459,8 +448,9 @@ def initialize_scheduler():
|
||||
|
||||
else:
|
||||
# Cancel all jobs
|
||||
schedule_job(plextv.get_server_resources, 'Refresh Plex server URLs',
|
||||
hours=0, minutes=0, seconds=0)
|
||||
# TODO: Jellyfin
|
||||
# schedule_job(plextv.get_server_resources, 'Refresh Plex server URLs',
|
||||
# hours=0, minutes=0, seconds=0)
|
||||
|
||||
schedule_job(activity_pinger.check_server_updates, 'Check for Plex updates',
|
||||
hours=0, minutes=0, seconds=0)
|
||||
@@ -486,7 +476,7 @@ def initialize_scheduler():
|
||||
|
||||
def schedule_job(func, name, hours=0, minutes=0, seconds=0, args=None):
|
||||
"""
|
||||
Start scheduled job if starting or restarting plexpy.
|
||||
Start scheduled job if starting or restarting jellypy.
|
||||
Reschedule job if Interval Settings have changed.
|
||||
Remove job if if Interval Settings changed to 0
|
||||
|
||||
@@ -498,12 +488,16 @@ def schedule_job(func, name, hours=0, minutes=0, seconds=0, args=None):
|
||||
SCHED.remove_job(name)
|
||||
logger.info("Removed background task: %s", name)
|
||||
elif job.trigger.interval != datetime.timedelta(hours=hours, minutes=minutes):
|
||||
SCHED.reschedule_job(name, trigger=IntervalTrigger(
|
||||
hours=hours, minutes=minutes, seconds=seconds, timezone=pytz.UTC), args=args)
|
||||
SCHED.reschedule_job(
|
||||
name, trigger=IntervalTrigger(
|
||||
hours=hours, minutes=minutes, seconds=seconds, timezone=pytz.UTC),
|
||||
args=args)
|
||||
logger.info("Re-scheduled background task: %s", name)
|
||||
elif hours > 0 or minutes > 0 or seconds > 0:
|
||||
SCHED.add_job(func, id=name, trigger=IntervalTrigger(
|
||||
hours=hours, minutes=minutes, seconds=seconds, timezone=pytz.UTC), args=args)
|
||||
SCHED.add_job(
|
||||
func, id=name, trigger=IntervalTrigger(
|
||||
hours=hours, minutes=minutes, seconds=seconds, timezone=pytz.UTC),
|
||||
args=args, misfire_grace_time=None)
|
||||
logger.info("Scheduled background task: %s", name)
|
||||
|
||||
|
||||
@@ -533,37 +527,26 @@ def start():
|
||||
# Cancel processing exports
|
||||
exporter.cancel_exports()
|
||||
|
||||
if CONFIG.SYSTEM_ANALYTICS:
|
||||
global TRACKER
|
||||
TRACKER = initialize_tracker()
|
||||
|
||||
# Send system analytics events
|
||||
if not CONFIG.FIRST_RUN_COMPLETE:
|
||||
analytics_event(category='system', action='install')
|
||||
|
||||
elif _UPDATE:
|
||||
analytics_event(category='system', action='update')
|
||||
|
||||
analytics_event(category='system', action='start')
|
||||
|
||||
_STARTED = True
|
||||
|
||||
|
||||
def startup_refresh():
|
||||
# Get the real PMS urls for SSL and remote access
|
||||
if CONFIG.PMS_TOKEN and CONFIG.PMS_IP and CONFIG.PMS_PORT:
|
||||
plextv.get_server_resources()
|
||||
# Get the real JELLYFIN urls for SSL and remote access
|
||||
if CONFIG.JELLYFIN_TOKEN and CONFIG.JELLYFIN_IP and CONFIG.JELLYFIN_PORT:
|
||||
pass
|
||||
# TODO: Jellyfin
|
||||
# plextv.get_server_resources()
|
||||
|
||||
# Connect server after server resource is refreshed
|
||||
if CONFIG.FIRST_RUN_COMPLETE:
|
||||
activity_pinger.connect_server(log=True, startup=True)
|
||||
|
||||
# Refresh the users list on startup
|
||||
if CONFIG.PMS_TOKEN and CONFIG.REFRESH_USERS_ON_STARTUP:
|
||||
if CONFIG.JELLYFIN_TOKEN and CONFIG.REFRESH_USERS_ON_STARTUP:
|
||||
users.refresh_users()
|
||||
|
||||
# Refresh the libraries list on startup
|
||||
if CONFIG.PMS_IP and CONFIG.PMS_TOKEN and CONFIG.REFRESH_LIBRARIES_ON_STARTUP:
|
||||
if CONFIG.JELLYFIN_IP and CONFIG.JELLYFIN_TOKEN and CONFIG.REFRESH_LIBRARIES_ON_STARTUP:
|
||||
libraries.refresh_libraries()
|
||||
|
||||
|
||||
@@ -701,19 +684,19 @@ def dbcheck():
|
||||
'on_resume INTEGER DEFAULT 0, on_change INTEGER DEFAULT 0, on_buffer INTEGER DEFAULT 0, '
|
||||
'on_error INTEGER DEFAULT 0, on_watched INTEGER DEFAULT 0, on_created INTEGER DEFAULT 0, '
|
||||
'on_extdown INTEGER DEFAULT 0, on_intdown INTEGER DEFAULT 0, '
|
||||
'on_extup INTEGER DEFAULT 0, on_intup INTEGER DEFAULT 0, on_pmsupdate INTEGER DEFAULT 0, '
|
||||
'on_extup INTEGER DEFAULT 0, on_intup INTEGER DEFAULT 0, on_JELLYFINupdate INTEGER DEFAULT 0, '
|
||||
'on_concurrent INTEGER DEFAULT 0, on_newdevice INTEGER DEFAULT 0, on_plexpyupdate INTEGER DEFAULT 0, '
|
||||
'on_plexpydbcorrupt INTEGER DEFAULT 0, '
|
||||
'on_play_subject TEXT, on_stop_subject TEXT, on_pause_subject TEXT, '
|
||||
'on_resume_subject TEXT, on_change_subject TEXT, on_buffer_subject TEXT, on_error_subject TEXT, '
|
||||
'on_watched_subject TEXT, on_created_subject TEXT, on_extdown_subject TEXT, on_intdown_subject TEXT, '
|
||||
'on_extup_subject TEXT, on_intup_subject TEXT, on_pmsupdate_subject TEXT, '
|
||||
'on_extup_subject TEXT, on_intup_subject TEXT, on_JELLYFINupdate_subject TEXT, '
|
||||
'on_concurrent_subject TEXT, on_newdevice_subject TEXT, on_plexpyupdate_subject TEXT, '
|
||||
'on_plexpydbcorrupt_subject TEXT, '
|
||||
'on_play_body TEXT, on_stop_body TEXT, on_pause_body TEXT, '
|
||||
'on_resume_body TEXT, on_change_body TEXT, on_buffer_body TEXT, on_error_body TEXT, '
|
||||
'on_watched_body TEXT, on_created_body TEXT, on_extdown_body TEXT, on_intdown_body TEXT, '
|
||||
'on_extup_body TEXT, on_intup_body TEXT, on_pmsupdate_body TEXT, '
|
||||
'on_extup_body TEXT, on_intup_body TEXT, on_JELLYFINupdate_body TEXT, '
|
||||
'on_concurrent_body TEXT, on_newdevice_body TEXT, on_plexpyupdate_body TEXT, '
|
||||
'on_plexpydbcorrupt_body TEXT, '
|
||||
'custom_conditions TEXT, custom_conditions_logic TEXT)'
|
||||
@@ -748,7 +731,7 @@ def dbcheck():
|
||||
# recently_added table :: This table keeps record of recently added items
|
||||
c_db.execute(
|
||||
'CREATE TABLE IF NOT EXISTS recently_added (id INTEGER PRIMARY KEY AUTOINCREMENT, '
|
||||
'added_at INTEGER, pms_identifier TEXT, section_id INTEGER, '
|
||||
'added_at INTEGER, JELLYFIN_identifier TEXT, section_id INTEGER, '
|
||||
'rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, '
|
||||
'media_info TEXT)'
|
||||
)
|
||||
@@ -2275,7 +2258,12 @@ def upgrade():
|
||||
return
|
||||
|
||||
|
||||
def shutdown(restart=False, update=False, checkout=False, reset=False):
|
||||
def shutdown(restart=False, update=False, checkout=False, reset=False,
|
||||
_shutdown=True):
|
||||
if FROZEN and common.PLATFORM == 'Windows' and update:
|
||||
restart = False
|
||||
_shutdown = False
|
||||
|
||||
webstart.stop()
|
||||
|
||||
# Shutdown the websocket connection
|
||||
@@ -2348,61 +2336,21 @@ def shutdown(restart=False, update=False, checkout=False, reset=False):
|
||||
else:
|
||||
logger.info("Tautulli is shutting down...")
|
||||
|
||||
logger.shutdown()
|
||||
if _shutdown:
|
||||
logger.shutdown()
|
||||
|
||||
if WIN_SYS_TRAY_ICON:
|
||||
WIN_SYS_TRAY_ICON.shutdown()
|
||||
elif MAC_SYS_TRAY_ICON:
|
||||
MAC_SYS_TRAY_ICON.shutdown()
|
||||
if WIN_SYS_TRAY_ICON:
|
||||
WIN_SYS_TRAY_ICON.shutdown()
|
||||
elif MAC_SYS_TRAY_ICON:
|
||||
MAC_SYS_TRAY_ICON.shutdown()
|
||||
|
||||
os._exit(0)
|
||||
os._exit(0)
|
||||
|
||||
|
||||
def generate_uuid():
|
||||
return uuid.uuid4().hex
|
||||
|
||||
|
||||
def initialize_tracker():
|
||||
data = {
|
||||
'dataSource': 'server',
|
||||
'appName': common.PRODUCT,
|
||||
'appVersion': common.RELEASE,
|
||||
'appId': INSTALL_TYPE,
|
||||
'appInstallerId': CONFIG.GIT_BRANCH,
|
||||
'dimension1': '{} {}'.format(common.PLATFORM, common.PLATFORM_RELEASE), # App Platform
|
||||
'dimension2': common.PLATFORM_LINUX_DISTRO, # Linux Distro
|
||||
'dimension3': common.PYTHON_VERSION,
|
||||
'userLanguage': SYS_LANGUAGE,
|
||||
'documentEncoding': SYS_ENCODING,
|
||||
'noninteractive': True
|
||||
}
|
||||
|
||||
tracker = Tracker.create('UA-111522699-2', client_id=CONFIG.PMS_UUID, hash_client_id=True,
|
||||
user_agent=common.USER_AGENT)
|
||||
tracker.set(data)
|
||||
|
||||
return tracker
|
||||
|
||||
|
||||
def analytics_event(category, action, label=None, value=None, **kwargs):
|
||||
data = {'category': category, 'action': action}
|
||||
|
||||
if label is not None:
|
||||
data['label'] = label
|
||||
|
||||
if value is not None:
|
||||
data['value'] = value
|
||||
|
||||
if kwargs:
|
||||
data.update(kwargs)
|
||||
|
||||
if TRACKER:
|
||||
try:
|
||||
TRACKER.send('event', data)
|
||||
except Exception as e:
|
||||
logger.warn("Failed to send analytics event for category '%s', action '%s': %s" % (category, action, e))
|
||||
|
||||
|
||||
def check_folder_writable(folder, fallback, name):
|
||||
if not folder:
|
||||
folder = fallback
|
||||
@@ -2435,7 +2383,7 @@ def get_tautulli_info():
|
||||
'tautulli_version': common.RELEASE,
|
||||
'tautulli_branch': CONFIG.GIT_BRANCH,
|
||||
'tautulli_commit': CURRENT_VERSION,
|
||||
'tautulli_platform':common.PLATFORM,
|
||||
'tautulli_platform': common.PLATFORM,
|
||||
'tautulli_platform_release': common.PLATFORM_RELEASE,
|
||||
'tautulli_platform_version': common.PLATFORM_VERSION,
|
||||
'tautulli_platform_linux_distro': common.PLATFORM_LINUX_DISTRO,
|
||||
@@ -13,33 +13,20 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
from future.builtins import object
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import time
|
||||
|
||||
from apscheduler.triggers.date import DateTrigger
|
||||
import pytz
|
||||
from apscheduler.triggers.date import DateTrigger
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import activity_processor
|
||||
import datafactory
|
||||
import helpers
|
||||
import logger
|
||||
import notification_handler
|
||||
import pmsconnect
|
||||
else:
|
||||
from plexpy import activity_processor
|
||||
from plexpy import datafactory
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
from plexpy import notification_handler
|
||||
from plexpy import pmsconnect
|
||||
import jellypy
|
||||
|
||||
from jellypy import activity_processor
|
||||
from jellypy import datafactory
|
||||
from jellypy import helpers
|
||||
from jellypy import logger
|
||||
from jellypy import notification_handler
|
||||
|
||||
ACTIVITY_SCHED = None
|
||||
|
||||
@@ -72,27 +59,29 @@ class ActivityHandler(object):
|
||||
|
||||
def get_metadata(self, skip_cache=False):
|
||||
cache_key = None if skip_cache else self.get_session_key()
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
metadata = pms_connect.get_metadata_details(rating_key=self.get_rating_key(), cache_key=cache_key)
|
||||
|
||||
if metadata:
|
||||
return metadata
|
||||
# TODO: Jellyfin
|
||||
# pms_connect = pmsconnect.PmsConnect()
|
||||
# metadata = pms_connect.get_metadata_details(rating_key=self.get_rating_key(), cache_key=cache_key)
|
||||
#
|
||||
# if metadata:
|
||||
# return metadata
|
||||
|
||||
return None
|
||||
|
||||
def get_live_session(self, skip_cache=False):
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
session_list = pms_connect.get_current_activity(skip_cache=skip_cache)
|
||||
|
||||
if session_list:
|
||||
for session in session_list['sessions']:
|
||||
if int(session['session_key']) == self.get_session_key():
|
||||
# Live sessions don't have rating keys in sessions
|
||||
# Get it from the websocket data
|
||||
if not session['rating_key']:
|
||||
session['rating_key'] = self.get_rating_key()
|
||||
session['rating_key_websocket'] = self.get_rating_key()
|
||||
return session
|
||||
# TODO: Jellyfin
|
||||
# pms_connect = pmsconnect.PmsConnect()
|
||||
# session_list = pms_connect.get_current_activity(skip_cache=skip_cache)
|
||||
#
|
||||
# if session_list:
|
||||
# for session in session_list['sessions']:
|
||||
# if int(session['session_key']) == self.get_session_key():
|
||||
# # Live sessions don't have rating keys in sessions
|
||||
# # Get it from the websocket data
|
||||
# if not session['rating_key']:
|
||||
# session['rating_key'] = self.get_rating_key()
|
||||
# session['rating_key_websocket'] = self.get_rating_key()
|
||||
# return session
|
||||
|
||||
return None
|
||||
|
||||
@@ -134,7 +123,7 @@ class ActivityHandler(object):
|
||||
str(session['rating_key']), session['full_title'], '[Live TV]' if session['live'] else ''))
|
||||
|
||||
# Send notification after updating db
|
||||
#plexpy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
|
||||
# jellypy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
|
||||
|
||||
# Write the new session to our temp session table
|
||||
self.update_db_session(session=session, notify=True)
|
||||
@@ -162,7 +151,7 @@ class ActivityHandler(object):
|
||||
# Retrieve the session data from our temp table
|
||||
db_session = ap.get_session_by_key(session_key=self.get_session_key())
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_stop'})
|
||||
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_stop'})
|
||||
|
||||
# Write it to the history table
|
||||
monitor_proc = activity_processor.ActivityProcessor()
|
||||
@@ -198,7 +187,7 @@ class ActivityHandler(object):
|
||||
db_session = ap.get_session_by_key(session_key=self.get_session_key())
|
||||
|
||||
if not still_paused:
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_pause'})
|
||||
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_pause'})
|
||||
|
||||
def on_resume(self):
|
||||
if self.is_valid_session():
|
||||
@@ -214,11 +203,12 @@ class ActivityHandler(object):
|
||||
# Retrieve the session data from our temp table
|
||||
db_session = ap.get_session_by_key(session_key=self.get_session_key())
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_resume'})
|
||||
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_resume'})
|
||||
|
||||
def on_change(self):
|
||||
if self.is_valid_session():
|
||||
logger.debug("Tautulli ActivityHandler :: Session %s has changed transcode decision." % str(self.get_session_key()))
|
||||
logger.debug(
|
||||
"Tautulli ActivityHandler :: Session %s has changed transcode decision." % str(self.get_session_key()))
|
||||
|
||||
# Update the session state and viewOffset
|
||||
self.update_db_session()
|
||||
@@ -227,7 +217,7 @@ class ActivityHandler(object):
|
||||
ap = activity_processor.ActivityProcessor()
|
||||
db_session = ap.get_session_by_key(session_key=self.get_session_key())
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_change'})
|
||||
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_change'})
|
||||
|
||||
def on_buffer(self):
|
||||
if self.is_valid_session():
|
||||
@@ -255,14 +245,14 @@ class ActivityHandler(object):
|
||||
(self.get_session_key(), buffer_last_triggered))
|
||||
time_since_last_trigger = helpers.timestamp() - int(buffer_last_triggered)
|
||||
|
||||
if current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \
|
||||
time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT:
|
||||
if current_buffer_count >= jellypy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \
|
||||
time_since_last_trigger >= jellypy.CONFIG.BUFFER_WAIT:
|
||||
ap.set_session_buffer_trigger_time(session_key=self.get_session_key())
|
||||
|
||||
# Retrieve the session data from our temp table
|
||||
db_session = ap.get_session_by_key(session_key=self.get_session_key())
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_buffer'})
|
||||
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_buffer'})
|
||||
|
||||
def on_error(self):
|
||||
if self.is_valid_session():
|
||||
@@ -275,7 +265,7 @@ class ActivityHandler(object):
|
||||
ap = activity_processor.ActivityProcessor()
|
||||
db_session = ap.get_session_by_key(session_key=self.get_session_key())
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_error'})
|
||||
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_error'})
|
||||
|
||||
# This function receives events from our websocket connection
|
||||
def process(self):
|
||||
@@ -316,8 +306,8 @@ class ActivityHandler(object):
|
||||
|
||||
# Make sure the same item is being played
|
||||
if (this_rating_key == last_rating_key
|
||||
or this_rating_key == last_rating_key_websocket
|
||||
or this_live_uuid == last_live_uuid) \
|
||||
or this_rating_key == last_rating_key_websocket
|
||||
or this_live_uuid == last_live_uuid) \
|
||||
and this_guid == last_guid:
|
||||
# Update the session state and viewOffset
|
||||
if this_state == 'playing':
|
||||
@@ -358,10 +348,10 @@ class ActivityHandler(object):
|
||||
# The only purpose of this is for notifications
|
||||
if not db_session['watched'] and this_state != 'buffering':
|
||||
progress_percent = helpers.get_percent(self.timeline['viewOffset'], db_session['duration'])
|
||||
watched_percent = {'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT,
|
||||
'episode': plexpy.CONFIG.TV_WATCHED_PERCENT,
|
||||
'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT,
|
||||
'clip': plexpy.CONFIG.TV_WATCHED_PERCENT
|
||||
watched_percent = {'movie': jellypy.CONFIG.MOVIE_WATCHED_PERCENT,
|
||||
'episode': jellypy.CONFIG.TV_WATCHED_PERCENT,
|
||||
'track': jellypy.CONFIG.MUSIC_WATCHED_PERCENT,
|
||||
'clip': jellypy.CONFIG.TV_WATCHED_PERCENT
|
||||
}
|
||||
|
||||
if progress_percent >= watched_percent.get(db_session['media_type'], 101):
|
||||
@@ -373,9 +363,9 @@ class ActivityHandler(object):
|
||||
session=db_session, notify_action='on_watched', notified=False)
|
||||
|
||||
for d in watched_notifiers:
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(),
|
||||
'notifier_id': d['notifier_id'],
|
||||
'notify_action': 'on_watched'})
|
||||
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(),
|
||||
'notifier_id': d['notifier_id'],
|
||||
'notify_action': 'on_watched'})
|
||||
|
||||
else:
|
||||
# We don't have this session in our table yet, start a new one.
|
||||
@@ -401,11 +391,12 @@ class TimelineHandler(object):
|
||||
return None
|
||||
|
||||
def get_metadata(self):
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
metadata = pms_connect.get_metadata_details(self.get_rating_key())
|
||||
|
||||
if metadata:
|
||||
return metadata
|
||||
# TODO: Jellyfin
|
||||
# pms_connect = pmsconnect.PmsConnect()
|
||||
# metadata = pms_connect.get_metadata_details(self.get_rating_key())
|
||||
#
|
||||
# if metadata:
|
||||
# return metadata
|
||||
|
||||
return None
|
||||
|
||||
@@ -439,9 +430,9 @@ class TimelineHandler(object):
|
||||
|
||||
# Add a new media item to the recently added queue
|
||||
if media_type and section_id > 0 and \
|
||||
((state_type == 0 and metadata_state == 'created')): # or \
|
||||
#(plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_UPGRADE and state_type in (1, 5) and \
|
||||
#media_state == 'analyzing' and queue_size is None)):
|
||||
((state_type == 0 and metadata_state == 'created')): # or \
|
||||
# (jellypy.CONFIG.NOTIFY_RECENTLY_ADDED_UPGRADE and state_type in (1, 5) and \
|
||||
# media_state == 'analyzing' and queue_size is None)):
|
||||
|
||||
if media_type in ('episode', 'track'):
|
||||
metadata = self.get_metadata()
|
||||
@@ -460,14 +451,15 @@ class TimelineHandler(object):
|
||||
|
||||
RECENTLY_ADDED_QUEUE[rating_key] = set([grandparent_rating_key])
|
||||
|
||||
logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s, grandparent %s) added to recently added queue."
|
||||
% (title, str(rating_key), str(grandparent_rating_key)))
|
||||
logger.debug(
|
||||
"Tautulli TimelineHandler :: Library item '%s' (%s, grandparent %s) added to recently added queue."
|
||||
% (title, str(rating_key), str(grandparent_rating_key)))
|
||||
|
||||
# Schedule a callback to clear the recently added queue
|
||||
schedule_callback('rating_key-{}'.format(grandparent_rating_key),
|
||||
func=clear_recently_added_queue,
|
||||
args=[grandparent_rating_key, grandparent_title],
|
||||
seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
|
||||
seconds=jellypy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
|
||||
|
||||
elif media_type in ('season', 'album'):
|
||||
metadata = self.get_metadata()
|
||||
@@ -479,14 +471,15 @@ class TimelineHandler(object):
|
||||
parent_set.add(rating_key)
|
||||
RECENTLY_ADDED_QUEUE[parent_rating_key] = parent_set
|
||||
|
||||
logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s , parent %s) added to recently added queue."
|
||||
% (title, str(rating_key), str(parent_rating_key)))
|
||||
logger.debug(
|
||||
"Tautulli TimelineHandler :: Library item '%s' (%s , parent %s) added to recently added queue."
|
||||
% (title, str(rating_key), str(parent_rating_key)))
|
||||
|
||||
# Schedule a callback to clear the recently added queue
|
||||
schedule_callback('rating_key-{}'.format(parent_rating_key),
|
||||
func=clear_recently_added_queue,
|
||||
args=[parent_rating_key, parent_title],
|
||||
seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
|
||||
seconds=jellypy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
|
||||
|
||||
else:
|
||||
queue_set = RECENTLY_ADDED_QUEUE.get(rating_key, set())
|
||||
@@ -499,12 +492,12 @@ class TimelineHandler(object):
|
||||
schedule_callback('rating_key-{}'.format(rating_key),
|
||||
func=clear_recently_added_queue,
|
||||
args=[rating_key, title],
|
||||
seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
|
||||
seconds=jellypy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
|
||||
|
||||
# A movie, show, or artist is done processing
|
||||
elif media_type in ('movie', 'show', 'artist') and section_id > 0 and \
|
||||
state_type == 5 and metadata_state is None and queue_size is None and \
|
||||
rating_key in RECENTLY_ADDED_QUEUE:
|
||||
state_type == 5 and metadata_state is None and queue_size is None and \
|
||||
rating_key in RECENTLY_ADDED_QUEUE:
|
||||
|
||||
logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s) done processing metadata."
|
||||
% (title, str(rating_key)))
|
||||
@@ -531,15 +524,17 @@ class ReachabilityHandler(object):
|
||||
return False
|
||||
|
||||
def remote_access_enabled(self):
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
pref = pms_connect.get_server_pref(pref='PublishServerOnPlexOnlineKey')
|
||||
return helpers.bool_true(pref)
|
||||
return False
|
||||
# TODO: Jellyfin
|
||||
# pms_connect = pmsconnect.PmsConnect()
|
||||
# pref = pms_connect.get_server_pref(pref='PublishServerOnPlexOnlineKey')
|
||||
# return helpers.bool_true(pref)
|
||||
|
||||
def on_down(self, server_response):
|
||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extdown', 'remote_access_info': server_response})
|
||||
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_extdown', 'remote_access_info': server_response})
|
||||
|
||||
def on_up(self, server_response):
|
||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extup', 'remote_access_info': server_response})
|
||||
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_extup', 'remote_access_info': server_response})
|
||||
|
||||
def process(self):
|
||||
# Check if remote access is enabled
|
||||
@@ -547,42 +542,43 @@ class ReachabilityHandler(object):
|
||||
return
|
||||
|
||||
# Do nothing if remote access is still up and hasn't changed
|
||||
if self.is_reachable() and plexpy.PLEX_REMOTE_ACCESS_UP:
|
||||
if self.is_reachable() and jellypy.PLEX_REMOTE_ACCESS_UP:
|
||||
return
|
||||
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
server_response = pms_connect.get_server_response()
|
||||
|
||||
if server_response:
|
||||
# Waiting for port mapping
|
||||
if server_response['mapping_state'] == 'waiting':
|
||||
logger.warn("Tautulli ReachabilityHandler :: Remote access waiting for port mapping.")
|
||||
|
||||
elif plexpy.PLEX_REMOTE_ACCESS_UP is not False and server_response['reason']:
|
||||
logger.warn("Tautulli ReachabilityHandler :: Remote access failed: %s" % server_response['reason'])
|
||||
logger.info("Tautulli ReachabilityHandler :: Plex remote access is down.")
|
||||
|
||||
plexpy.PLEX_REMOTE_ACCESS_UP = False
|
||||
|
||||
if not ACTIVITY_SCHED.get_job('on_extdown'):
|
||||
logger.debug("Tautulli ReachabilityHandler :: Schedule remote access down callback in %d seconds.",
|
||||
plexpy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD)
|
||||
schedule_callback('on_extdown', func=self.on_down, args=[server_response],
|
||||
seconds=plexpy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD)
|
||||
|
||||
elif plexpy.PLEX_REMOTE_ACCESS_UP is False and not server_response['reason']:
|
||||
logger.info("Tautulli ReachabilityHandler :: Plex remote access is back up.")
|
||||
|
||||
plexpy.PLEX_REMOTE_ACCESS_UP = True
|
||||
|
||||
if ACTIVITY_SCHED.get_job('on_extdown'):
|
||||
logger.debug("Tautulli ReachabilityHandler :: Cancelling scheduled remote access down callback.")
|
||||
schedule_callback('on_extdown', remove_job=True)
|
||||
else:
|
||||
self.on_up(server_response)
|
||||
|
||||
elif plexpy.PLEX_REMOTE_ACCESS_UP is None:
|
||||
plexpy.PLEX_REMOTE_ACCESS_UP = self.is_reachable()
|
||||
# TODO: Jellyfin
|
||||
# pms_connect = pmsconnect.PmsConnect()
|
||||
# server_response = pms_connect.get_server_response()
|
||||
#
|
||||
# if server_response:
|
||||
# # Waiting for port mapping
|
||||
# if server_response['mapping_state'] == 'waiting':
|
||||
# logger.warn("Tautulli ReachabilityHandler :: Remote access waiting for port mapping.")
|
||||
#
|
||||
# elif jellypy.PLEX_REMOTE_ACCESS_UP is not False and server_response['reason']:
|
||||
# logger.warn("Tautulli ReachabilityHandler :: Remote access failed: %s" % server_response['reason'])
|
||||
# logger.info("Tautulli ReachabilityHandler :: Plex remote access is down.")
|
||||
#
|
||||
# jellypy.PLEX_REMOTE_ACCESS_UP = False
|
||||
#
|
||||
# if not ACTIVITY_SCHED.get_job('on_extdown'):
|
||||
# logger.debug("Tautulli ReachabilityHandler :: Schedule remote access down callback in %d seconds.",
|
||||
# jellypy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD)
|
||||
# schedule_callback('on_extdown', func=self.on_down, args=[server_response],
|
||||
# seconds=jellypy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD)
|
||||
#
|
||||
# elif jellypy.PLEX_REMOTE_ACCESS_UP is False and not server_response['reason']:
|
||||
# logger.info("Tautulli ReachabilityHandler :: Plex remote access is back up.")
|
||||
#
|
||||
# jellypy.PLEX_REMOTE_ACCESS_UP = True
|
||||
#
|
||||
# if ACTIVITY_SCHED.get_job('on_extdown'):
|
||||
# logger.debug("Tautulli ReachabilityHandler :: Cancelling scheduled remote access down callback.")
|
||||
# schedule_callback('on_extdown', remove_job=True)
|
||||
# else:
|
||||
# self.on_up(server_response)
|
||||
#
|
||||
# elif jellypy.PLEX_REMOTE_ACCESS_UP is None:
|
||||
# jellypy.PLEX_REMOTE_ACCESS_UP = self.is_reachable()
|
||||
|
||||
|
||||
def del_keys(key):
|
||||
@@ -606,7 +602,8 @@ def schedule_callback(id, func=None, remove_job=False, args=None, **kwargs):
|
||||
ACTIVITY_SCHED.add_job(
|
||||
func, args=args, id=id, trigger=DateTrigger(
|
||||
run_date=datetime.datetime.now(pytz.UTC) + datetime.timedelta(**kwargs),
|
||||
timezone=pytz.UTC))
|
||||
timezone=pytz.UTC),
|
||||
misfire_grace_time=None)
|
||||
|
||||
|
||||
def force_stop_stream(session_key, title, user):
|
||||
@@ -617,18 +614,20 @@ def force_stop_stream(session_key, title, user):
|
||||
|
||||
if row_id:
|
||||
# If session is written to the database successfully, remove the session from the session table
|
||||
logger.info("Tautulli ActivityHandler :: Removing stale stream with sessionKey %s ratingKey %s from session queue"
|
||||
% (session['session_key'], session['rating_key']))
|
||||
logger.info(
|
||||
"Tautulli ActivityHandler :: Removing stale stream with sessionKey %s ratingKey %s from session queue"
|
||||
% (session['session_key'], session['rating_key']))
|
||||
ap.delete_session(row_id=row_id)
|
||||
delete_metadata_cache(session_key)
|
||||
|
||||
else:
|
||||
session['write_attempts'] += 1
|
||||
|
||||
if session['write_attempts'] < plexpy.CONFIG.SESSION_DB_WRITE_ATTEMPTS:
|
||||
logger.warn("Tautulli ActivityHandler :: Failed to write stream with sessionKey %s ratingKey %s to the database. " \
|
||||
"Will try again in 30 seconds. Write attempt %s."
|
||||
% (session['session_key'], session['rating_key'], str(session['write_attempts'])))
|
||||
if session['write_attempts'] < jellypy.CONFIG.SESSION_DB_WRITE_ATTEMPTS:
|
||||
logger.warn(
|
||||
"Tautulli ActivityHandler :: Failed to write stream with sessionKey %s ratingKey %s to the database. " \
|
||||
"Will try again in 30 seconds. Write attempt %s."
|
||||
% (session['session_key'], session['rating_key'], str(session['write_attempts'])))
|
||||
ap.increment_write_attempts(session_key=session_key)
|
||||
|
||||
# Reschedule for 30 seconds later
|
||||
@@ -636,11 +635,13 @@ def force_stop_stream(session_key, title, user):
|
||||
args=[session_key, session['full_title'], session['user']], seconds=30)
|
||||
|
||||
else:
|
||||
logger.warn("Tautulli ActivityHandler :: Failed to write stream with sessionKey %s ratingKey %s to the database. " \
|
||||
"Removing session from the database. Write attempt %s."
|
||||
% (session['session_key'], session['rating_key'], str(session['write_attempts'])))
|
||||
logger.info("Tautulli ActivityHandler :: Removing stale stream with sessionKey %s ratingKey %s from session queue"
|
||||
% (session['session_key'], session['rating_key']))
|
||||
logger.warn(
|
||||
"Tautulli ActivityHandler :: Failed to write stream with sessionKey %s ratingKey %s to the database. " \
|
||||
"Removing session from the database. Write attempt %s."
|
||||
% (session['session_key'], session['rating_key'], str(session['write_attempts'])))
|
||||
logger.info(
|
||||
"Tautulli ActivityHandler :: Removing stale stream with sessionKey %s ratingKey %s from session queue"
|
||||
% (session['session_key'], session['rating_key']))
|
||||
ap.delete_session(session_key=session_key)
|
||||
delete_metadata_cache(session_key)
|
||||
|
||||
@@ -648,14 +649,14 @@ def force_stop_stream(session_key, title, user):
|
||||
def clear_recently_added_queue(rating_key, title):
|
||||
child_keys = RECENTLY_ADDED_QUEUE[rating_key]
|
||||
|
||||
if plexpy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT and len(child_keys) > 1:
|
||||
if jellypy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT and len(child_keys) > 1:
|
||||
on_created(rating_key, child_keys=child_keys)
|
||||
|
||||
elif child_keys:
|
||||
for child_key in child_keys:
|
||||
grandchild_keys = RECENTLY_ADDED_QUEUE.get(child_key, [])
|
||||
|
||||
if plexpy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_PARENT and len(grandchild_keys) > 1:
|
||||
if jellypy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_PARENT and len(grandchild_keys) > 1:
|
||||
on_created(child_key, child_keys=grandchild_keys)
|
||||
|
||||
elif grandchild_keys:
|
||||
@@ -674,46 +675,47 @@ def clear_recently_added_queue(rating_key, title):
|
||||
|
||||
def on_created(rating_key, **kwargs):
|
||||
logger.debug("Tautulli TimelineHandler :: Library item %s added to Plex." % str(rating_key))
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
metadata = pms_connect.get_metadata_details(rating_key)
|
||||
|
||||
if metadata:
|
||||
notify = True
|
||||
# now = helpers.timestamp()
|
||||
#
|
||||
# if helpers.cast_to_int(metadata['added_at']) < now - 86400: # Updated more than 24 hours ago
|
||||
# logger.debug("Tautulli TimelineHandler :: Library item %s added more than 24 hours ago. Not notifying."
|
||||
# % str(rating_key))
|
||||
# notify = False
|
||||
|
||||
data_factory = datafactory.DataFactory()
|
||||
if 'child_keys' not in kwargs:
|
||||
if data_factory.get_recently_added_item(rating_key):
|
||||
logger.debug("Tautulli TimelineHandler :: Library item %s added already. Not notifying again."
|
||||
% str(rating_key))
|
||||
notify = False
|
||||
|
||||
if notify:
|
||||
data = {'timeline_data': metadata, 'notify_action': 'on_created'}
|
||||
data.update(kwargs)
|
||||
plexpy.NOTIFY_QUEUE.put(data)
|
||||
|
||||
all_keys = [rating_key]
|
||||
if 'child_keys' in kwargs:
|
||||
all_keys.extend(kwargs['child_keys'])
|
||||
|
||||
for key in all_keys:
|
||||
data_factory.set_recently_added_item(key)
|
||||
|
||||
logger.debug("Added %s items to the recently_added database table." % str(len(all_keys)))
|
||||
|
||||
else:
|
||||
logger.error("Tautulli TimelineHandler :: Unable to retrieve metadata for rating_key %s" % str(rating_key))
|
||||
# TODO: Jellyfin
|
||||
# pms_connect = pmsconnect.PmsConnect()
|
||||
# metadata = pms_connect.get_metadata_details(rating_key)
|
||||
#
|
||||
# if metadata:
|
||||
# notify = True
|
||||
# # now = helpers.timestamp()
|
||||
# #
|
||||
# # if helpers.cast_to_int(metadata['added_at']) < now - 86400: # Updated more than 24 hours ago
|
||||
# # logger.debug("Tautulli TimelineHandler :: Library item %s added more than 24 hours ago. Not notifying."
|
||||
# # % str(rating_key))
|
||||
# # notify = False
|
||||
#
|
||||
# data_factory = datafactory.DataFactory()
|
||||
# if 'child_keys' not in kwargs:
|
||||
# if data_factory.get_recently_added_item(rating_key):
|
||||
# logger.debug("Tautulli TimelineHandler :: Library item %s added already. Not notifying again."
|
||||
# % str(rating_key))
|
||||
# notify = False
|
||||
#
|
||||
# if notify:
|
||||
# data = {'timeline_data': metadata, 'notify_action': 'on_created'}
|
||||
# data.update(kwargs)
|
||||
# jellypy.NOTIFY_QUEUE.put(data)
|
||||
#
|
||||
# all_keys = [rating_key]
|
||||
# if 'child_keys' in kwargs:
|
||||
# all_keys.extend(kwargs['child_keys'])
|
||||
#
|
||||
# for key in all_keys:
|
||||
# data_factory.set_recently_added_item(key)
|
||||
#
|
||||
# logger.debug("Added %s items to the recently_added database table." % str(len(all_keys)))
|
||||
#
|
||||
# else:
|
||||
# logger.error("Tautulli TimelineHandler :: Unable to retrieve metadata for rating_key %s" % str(rating_key))
|
||||
|
||||
|
||||
def delete_metadata_cache(session_key):
|
||||
try:
|
||||
os.remove(os.path.join(plexpy.CONFIG.CACHE_DIR, 'session_metadata/metadata-sessionKey-%s.json' % session_key))
|
||||
os.remove(os.path.join(jellypy.CONFIG.CACHE_DIR, 'session_metadata/metadata-sessionKey-%s.json' % session_key))
|
||||
except OSError as e:
|
||||
logger.error("Tautulli ActivityHandler :: Failed to remove metadata cache file (sessionKey %s): %s"
|
||||
% (session_key, e))
|
||||
@@ -13,35 +13,17 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
|
||||
import threading
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import activity_handler
|
||||
import activity_processor
|
||||
import database
|
||||
import helpers
|
||||
import libraries
|
||||
import logger
|
||||
import notification_handler
|
||||
import plextv
|
||||
import pmsconnect
|
||||
import web_socket
|
||||
else:
|
||||
from plexpy import activity_handler
|
||||
from plexpy import activity_processor
|
||||
from plexpy import database
|
||||
from plexpy import helpers
|
||||
from plexpy import libraries
|
||||
from plexpy import logger
|
||||
from plexpy import notification_handler
|
||||
from plexpy import plextv
|
||||
from plexpy import pmsconnect
|
||||
from plexpy import web_socket
|
||||
import jellypy
|
||||
|
||||
from jellypy import activity_handler
|
||||
from jellypy import activity_processor
|
||||
from jellypy import database
|
||||
from jellypy import helpers
|
||||
from jellypy import logger
|
||||
from jellypy import notification_handler
|
||||
from jellypy import web_socket
|
||||
|
||||
monitor_lock = threading.Lock()
|
||||
ext_ping_count = 0
|
||||
@@ -50,7 +32,6 @@ int_ping_count = 0
|
||||
|
||||
|
||||
def check_active_sessions(ws_request=False):
|
||||
|
||||
with monitor_lock:
|
||||
monitor_db = database.MonitorDatabase()
|
||||
monitor_process = activity_processor.ActivityProcessor()
|
||||
@@ -60,8 +41,10 @@ def check_active_sessions(ws_request=False):
|
||||
for stream in db_streams:
|
||||
activity_handler.delete_metadata_cache(stream['session_key'])
|
||||
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
session_list = pms_connect.get_current_activity()
|
||||
# TODO: Jellyfin
|
||||
# pms_connect = pmsconnect.PmsConnect()
|
||||
# session_list = pms_connect.get_current_activity()
|
||||
session_list = None
|
||||
|
||||
logger.debug("Tautulli Monitor :: Checking for active streams.")
|
||||
|
||||
@@ -82,28 +65,32 @@ def check_active_sessions(ws_request=False):
|
||||
if session['state'] == 'paused':
|
||||
logger.debug("Tautulli Monitor :: Session %s paused." % stream['session_key'])
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_pause'})
|
||||
jellypy.NOTIFY_QUEUE.put(
|
||||
{'stream_data': stream.copy(), 'notify_action': 'on_pause'})
|
||||
|
||||
if session['state'] == 'playing' and stream['state'] == 'paused':
|
||||
logger.debug("Tautulli Monitor :: Session %s resumed." % stream['session_key'])
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_resume'})
|
||||
jellypy.NOTIFY_QUEUE.put(
|
||||
{'stream_data': stream.copy(), 'notify_action': 'on_resume'})
|
||||
|
||||
if session['state'] == 'error':
|
||||
logger.debug("Tautulli Monitor :: Session %s encountered an error." % stream['session_key'])
|
||||
logger.debug(
|
||||
"Tautulli Monitor :: Session %s encountered an error." % stream['session_key'])
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_error'})
|
||||
jellypy.NOTIFY_QUEUE.put(
|
||||
{'stream_data': stream.copy(), 'notify_action': 'on_error'})
|
||||
|
||||
if stream['state'] == 'paused' and not ws_request:
|
||||
# The stream is still paused so we need to increment the paused_counter
|
||||
# Using the set config parameter as the interval, probably not the most accurate but
|
||||
# it will have to do for now. If it's a websocket request don't use this method.
|
||||
paused_counter = int(stream['paused_counter']) + plexpy.CONFIG.MONITORING_INTERVAL
|
||||
paused_counter = int(stream['paused_counter']) + jellypy.CONFIG.MONITORING_INTERVAL
|
||||
monitor_db.action('UPDATE sessions SET paused_counter = ? '
|
||||
'WHERE session_key = ? AND rating_key = ?',
|
||||
[paused_counter, stream['session_key'], stream['rating_key']])
|
||||
|
||||
if session['state'] == 'buffering' and plexpy.CONFIG.BUFFER_THRESHOLD > 0:
|
||||
if session['state'] == 'buffering' and jellypy.CONFIG.BUFFER_THRESHOLD > 0:
|
||||
# The stream is buffering so we need to increment the buffer_count
|
||||
# We're going just increment on every monitor ping,
|
||||
# would be difficult to keep track otherwise
|
||||
@@ -117,11 +104,11 @@ def check_active_sessions(ws_request=False):
|
||||
'WHERE session_key = ? AND rating_key = ?',
|
||||
[stream['session_key'], stream['rating_key']])
|
||||
|
||||
if buffer_values[0]['buffer_count'] >= plexpy.CONFIG.BUFFER_THRESHOLD:
|
||||
if buffer_values[0]['buffer_count'] >= jellypy.CONFIG.BUFFER_THRESHOLD:
|
||||
# Push any notifications -
|
||||
# Push it on it's own thread so we don't hold up our db actions
|
||||
# Our first buffer notification
|
||||
if buffer_values[0]['buffer_count'] == plexpy.CONFIG.BUFFER_THRESHOLD:
|
||||
if buffer_values[0]['buffer_count'] == jellypy.CONFIG.BUFFER_THRESHOLD:
|
||||
logger.info("Tautulli Monitor :: User '%s' has triggered a buffer warning."
|
||||
% stream['user'])
|
||||
# Set the buffer trigger time
|
||||
@@ -130,26 +117,30 @@ def check_active_sessions(ws_request=False):
|
||||
'WHERE session_key = ? AND rating_key = ?',
|
||||
[stream['session_key'], stream['rating_key']])
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_buffer'})
|
||||
jellypy.NOTIFY_QUEUE.put(
|
||||
{'stream_data': stream.copy(), 'notify_action': 'on_buffer'})
|
||||
|
||||
else:
|
||||
# Subsequent buffer notifications after wait time
|
||||
if helpers.timestamp() > buffer_values[0]['buffer_last_triggered'] + \
|
||||
plexpy.CONFIG.BUFFER_WAIT:
|
||||
logger.info("Tautulli Monitor :: User '%s' has triggered multiple buffer warnings."
|
||||
% stream['user'])
|
||||
jellypy.CONFIG.BUFFER_WAIT:
|
||||
logger.info(
|
||||
"Tautulli Monitor :: User '%s' has triggered multiple buffer warnings."
|
||||
% stream['user'])
|
||||
# Set the buffer trigger time
|
||||
monitor_db.action('UPDATE sessions '
|
||||
'SET buffer_last_triggered = strftime("%s","now") '
|
||||
'WHERE session_key = ? AND rating_key = ?',
|
||||
[stream['session_key'], stream['rating_key']])
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_buffer'})
|
||||
jellypy.NOTIFY_QUEUE.put(
|
||||
{'stream_data': stream.copy(), 'notify_action': 'on_buffer'})
|
||||
|
||||
logger.debug("Tautulli Monitor :: Session %s is buffering. Count is now %s. Last triggered %s."
|
||||
% (stream['session_key'],
|
||||
buffer_values[0]['buffer_count'],
|
||||
buffer_values[0]['buffer_last_triggered']))
|
||||
logger.debug(
|
||||
"Tautulli Monitor :: Session %s is buffering. Count is now %s. Last triggered %s."
|
||||
% (stream['session_key'],
|
||||
buffer_values[0]['buffer_count'],
|
||||
buffer_values[0]['buffer_last_triggered']))
|
||||
|
||||
# Check if the user has reached the offset in the media we defined as the "watched" percent
|
||||
# Don't trigger if state is buffer as some clients push the progress to the end when
|
||||
@@ -157,11 +148,15 @@ def check_active_sessions(ws_request=False):
|
||||
if session['state'] != 'buffering':
|
||||
progress_percent = helpers.get_percent(session['view_offset'], session['duration'])
|
||||
notify_states = notification_handler.get_notify_state(session=session)
|
||||
if (session['media_type'] == 'movie' and progress_percent >= plexpy.CONFIG.MOVIE_WATCHED_PERCENT or
|
||||
session['media_type'] == 'episode' and progress_percent >= plexpy.CONFIG.TV_WATCHED_PERCENT or
|
||||
session['media_type'] == 'track' and progress_percent >= plexpy.CONFIG.MUSIC_WATCHED_PERCENT) \
|
||||
and not any(d['notify_action'] == 'on_watched' for d in notify_states):
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_watched'})
|
||||
if (session[
|
||||
'media_type'] == 'movie' and progress_percent >= jellypy.CONFIG.MOVIE_WATCHED_PERCENT or
|
||||
session[
|
||||
'media_type'] == 'episode' and progress_percent >= jellypy.CONFIG.TV_WATCHED_PERCENT or
|
||||
session[
|
||||
'media_type'] == 'track' and progress_percent >= jellypy.CONFIG.MUSIC_WATCHED_PERCENT) \
|
||||
and not any(d['notify_action'] == 'on_watched' for d in notify_states):
|
||||
jellypy.NOTIFY_QUEUE.put(
|
||||
{'stream_data': stream.copy(), 'notify_action': 'on_watched'})
|
||||
|
||||
else:
|
||||
# The user has stopped playing a stream
|
||||
@@ -173,17 +168,21 @@ def check_active_sessions(ws_request=False):
|
||||
stream['stopped'] = helpers.timestamp()
|
||||
monitor_db.action('UPDATE sessions SET stopped = ?, state = ? '
|
||||
'WHERE session_key = ? AND rating_key = ?',
|
||||
[stream['stopped'], 'stopped', stream['session_key'], stream['rating_key']])
|
||||
[stream['stopped'], 'stopped', stream['session_key'],
|
||||
stream['rating_key']])
|
||||
|
||||
progress_percent = helpers.get_percent(stream['view_offset'], stream['duration'])
|
||||
notify_states = notification_handler.get_notify_state(session=stream)
|
||||
if (stream['media_type'] == 'movie' and progress_percent >= plexpy.CONFIG.MOVIE_WATCHED_PERCENT or
|
||||
stream['media_type'] == 'episode' and progress_percent >= plexpy.CONFIG.TV_WATCHED_PERCENT or
|
||||
stream['media_type'] == 'track' and progress_percent >= plexpy.CONFIG.MUSIC_WATCHED_PERCENT) \
|
||||
and not any(d['notify_action'] == 'on_watched' for d in notify_states):
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_watched'})
|
||||
if (stream[
|
||||
'media_type'] == 'movie' and progress_percent >= jellypy.CONFIG.MOVIE_WATCHED_PERCENT or
|
||||
stream[
|
||||
'media_type'] == 'episode' and progress_percent >= jellypy.CONFIG.TV_WATCHED_PERCENT or
|
||||
stream[
|
||||
'media_type'] == 'track' and progress_percent >= jellypy.CONFIG.MUSIC_WATCHED_PERCENT) \
|
||||
and not any(d['notify_action'] == 'on_watched' for d in notify_states):
|
||||
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_watched'})
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_stop'})
|
||||
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_stop'})
|
||||
|
||||
# Write the item history on playback stop
|
||||
row_id = monitor_process.write_session_history(session=stream)
|
||||
@@ -196,15 +195,17 @@ def check_active_sessions(ws_request=False):
|
||||
else:
|
||||
stream['write_attempts'] += 1
|
||||
|
||||
if stream['write_attempts'] < plexpy.CONFIG.SESSION_DB_WRITE_ATTEMPTS:
|
||||
logger.warn("Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \
|
||||
"Will try again on the next pass. Write attempt %s."
|
||||
% (stream['session_key'], stream['rating_key'], str(stream['write_attempts'])))
|
||||
if stream['write_attempts'] < jellypy.CONFIG.SESSION_DB_WRITE_ATTEMPTS:
|
||||
logger.warn(
|
||||
"Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \
|
||||
"Will try again on the next pass. Write attempt %s."
|
||||
% (stream['session_key'], stream['rating_key'], str(stream['write_attempts'])))
|
||||
monitor_process.increment_write_attempts(session_key=stream['session_key'])
|
||||
else:
|
||||
logger.warn("Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \
|
||||
"Removing session from the database. Write attempt %s."
|
||||
% (stream['session_key'], stream['rating_key'], str(stream['write_attempts'])))
|
||||
logger.warn(
|
||||
"Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \
|
||||
"Removing session from the database. Write attempt %s."
|
||||
% (stream['session_key'], stream['rating_key'], str(stream['write_attempts'])))
|
||||
logger.debug("Tautulli Monitor :: Removing sessionKey %s ratingKey %s from session queue"
|
||||
% (stream['session_key'], stream['rating_key']))
|
||||
monitor_process.delete_session(session_key=stream['session_key'])
|
||||
@@ -216,19 +217,22 @@ def check_active_sessions(ws_request=False):
|
||||
if new_session:
|
||||
logger.debug("Tautulli Monitor :: Session %s started by user %s (%s) with ratingKey %s (%s)%s."
|
||||
% (str(session['session_key']), str(session['user_id']), session['username'],
|
||||
str(session['rating_key']), session['full_title'], '[Live TV]' if session['live'] else ''))
|
||||
str(session['rating_key']), session['full_title'],
|
||||
'[Live TV]' if session['live'] else ''))
|
||||
|
||||
else:
|
||||
logger.debug("Tautulli Monitor :: Unable to read session list.")
|
||||
|
||||
|
||||
def connect_server(log=True, startup=False):
|
||||
if plexpy.CONFIG.PMS_IS_CLOUD:
|
||||
if jellypy.CONFIG.PMS_IS_CLOUD:
|
||||
if log:
|
||||
logger.info("Tautulli Monitor :: Checking for Plex Cloud server status...")
|
||||
|
||||
plex_tv = plextv.PlexTV()
|
||||
status = plex_tv.get_cloud_server_status()
|
||||
# TODO: Jellyfin
|
||||
# plex_tv = plextv.PlexTV()
|
||||
# status = plex_tv.get_cloud_server_status()
|
||||
status = None
|
||||
|
||||
if status is True:
|
||||
logger.info("Tautulli Monitor :: Plex Cloud server is active.")
|
||||
@@ -256,20 +260,20 @@ def connect_server(log=True, startup=False):
|
||||
|
||||
|
||||
def check_server_updates():
|
||||
|
||||
with monitor_lock:
|
||||
logger.info("Tautulli Monitor :: Checking for PMS updates...")
|
||||
|
||||
plex_tv = plextv.PlexTV()
|
||||
download_info = plex_tv.get_plex_downloads()
|
||||
|
||||
if download_info:
|
||||
logger.info("Tautulli Monitor :: Current PMS version: %s", plexpy.CONFIG.PMS_VERSION)
|
||||
|
||||
if download_info['update_available']:
|
||||
logger.info("Tautulli Monitor :: PMS update available version: %s", download_info['version'])
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_pmsupdate', 'pms_download_info': download_info})
|
||||
|
||||
else:
|
||||
logger.info("Tautulli Monitor :: No PMS update available.")
|
||||
# TODO: Jellyfin
|
||||
# plex_tv = plextv.PlexTV()
|
||||
# download_info = plex_tv.get_plex_downloads()
|
||||
#
|
||||
# if download_info:
|
||||
# logger.info("Tautulli Monitor :: Current PMS version: %s", jellypy.CONFIG.PMS_VERSION)
|
||||
#
|
||||
# if download_info['update_available']:
|
||||
# logger.info("Tautulli Monitor :: PMS update available version: %s", download_info['version'])
|
||||
#
|
||||
# jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_pmsupdate', 'pms_download_info': download_info})
|
||||
#
|
||||
# else:
|
||||
# logger.info("Tautulli Monitor :: No PMS update available.")
|
||||
@@ -13,28 +13,16 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
from future.builtins import object
|
||||
|
||||
from collections import defaultdict
|
||||
import json
|
||||
from collections import defaultdict
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import database
|
||||
import helpers
|
||||
import libraries
|
||||
import logger
|
||||
import pmsconnect
|
||||
import users
|
||||
else:
|
||||
from plexpy import database
|
||||
from plexpy import helpers
|
||||
from plexpy import libraries
|
||||
from plexpy import logger
|
||||
from plexpy import pmsconnect
|
||||
from plexpy import users
|
||||
import jellypy
|
||||
|
||||
from jellypy import database
|
||||
from jellypy import helpers
|
||||
from jellypy import libraries
|
||||
from jellypy import logger
|
||||
from jellypy import users
|
||||
|
||||
|
||||
class ActivityProcessor(object):
|
||||
@@ -165,7 +153,7 @@ class ActivityProcessor(object):
|
||||
# Check if any notification agents have notifications enabled
|
||||
if notify:
|
||||
session.update(timestamp)
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
|
||||
jellypy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
|
||||
|
||||
# Add Live TV library if it hasn't been added
|
||||
if values['live']:
|
||||
@@ -229,16 +217,17 @@ class ActivityProcessor(object):
|
||||
(session['session_key'], session['rating_key'], session['media_type']))
|
||||
return session['id']
|
||||
|
||||
real_play_time = stopped - helpers.cast_to_int(session['started']) - helpers.cast_to_int(session['paused_counter'])
|
||||
real_play_time = stopped - helpers.cast_to_int(session['started']) - helpers.cast_to_int(
|
||||
session['paused_counter'])
|
||||
|
||||
if not is_import and plexpy.CONFIG.LOGGING_IGNORE_INTERVAL:
|
||||
if not is_import and jellypy.CONFIG.LOGGING_IGNORE_INTERVAL:
|
||||
if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
|
||||
(real_play_time < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)):
|
||||
(real_play_time < int(jellypy.CONFIG.LOGGING_IGNORE_INTERVAL)):
|
||||
logging_enabled = False
|
||||
logger.debug("Tautulli ActivityProcessor :: Play duration for session %s ratingKey %s is %s secs "
|
||||
"which is less than %s seconds, so we're not logging it." %
|
||||
(session['session_key'], session['rating_key'], str(real_play_time),
|
||||
plexpy.CONFIG.LOGGING_IGNORE_INTERVAL))
|
||||
jellypy.CONFIG.LOGGING_IGNORE_INTERVAL))
|
||||
if not is_import and session['media_type'] == 'track':
|
||||
if real_play_time < 15 and helpers.cast_to_int(session['duration']) >= 30:
|
||||
logging_enabled = False
|
||||
@@ -249,35 +238,41 @@ class ActivityProcessor(object):
|
||||
if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
|
||||
(real_play_time < int(import_ignore_interval)):
|
||||
logging_enabled = False
|
||||
logger.debug("Tautulli ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s "
|
||||
"seconds, so we're not logging it." %
|
||||
(session['rating_key'], str(real_play_time), import_ignore_interval))
|
||||
logger.debug(
|
||||
"Tautulli ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s "
|
||||
"seconds, so we're not logging it." %
|
||||
(session['rating_key'], str(real_play_time), import_ignore_interval))
|
||||
|
||||
if not is_import and not user_details['keep_history']:
|
||||
logging_enabled = False
|
||||
logger.debug("Tautulli ActivityProcessor :: History logging for user '%s' is disabled." % user_details['username'])
|
||||
logger.debug("Tautulli ActivityProcessor :: History logging for user '%s' is disabled." % user_details[
|
||||
'username'])
|
||||
elif not is_import and not library_details['keep_history']:
|
||||
logging_enabled = False
|
||||
logger.debug("Tautulli ActivityProcessor :: History logging for library '%s' is disabled." % library_details['section_name'])
|
||||
logger.debug(
|
||||
"Tautulli ActivityProcessor :: History logging for library '%s' is disabled." % library_details[
|
||||
'section_name'])
|
||||
|
||||
if logging_enabled:
|
||||
|
||||
# Fetch metadata first so we can return false if it fails
|
||||
if not is_import:
|
||||
logger.debug("Tautulli ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key'])
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
if session['live']:
|
||||
metadata = pms_connect.get_metadata_details(rating_key=str(session['rating_key']),
|
||||
cache_key=session['session_key'],
|
||||
return_cache=True)
|
||||
else:
|
||||
metadata = pms_connect.get_metadata_details(rating_key=str(session['rating_key']))
|
||||
if not metadata:
|
||||
return False
|
||||
else:
|
||||
media_info = {}
|
||||
if 'media_info' in metadata and len(metadata['media_info']) > 0:
|
||||
media_info = metadata['media_info'][0]
|
||||
logger.debug(
|
||||
"Tautulli ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key'])
|
||||
# TODO: Jellyfin
|
||||
# pms_connect = pmsconnect.PmsConnect()
|
||||
# if session['live']:
|
||||
# metadata = pms_connect.get_metadata_details(rating_key=str(session['rating_key']),
|
||||
# cache_key=session['session_key'],
|
||||
# return_cache=True)
|
||||
# else:
|
||||
# metadata = pms_connect.get_metadata_details(rating_key=str(session['rating_key']))
|
||||
# if not metadata:
|
||||
# return False
|
||||
# else:
|
||||
# media_info = {}
|
||||
# if 'media_info' in metadata and len(metadata['media_info']) > 0:
|
||||
# media_info = metadata['media_info'][0]
|
||||
else:
|
||||
metadata = import_metadata
|
||||
## TODO: Fix media info from imports. Temporary media info from import session.
|
||||
@@ -360,9 +355,9 @@ class ActivityProcessor(object):
|
||||
'view_offset': result[1]['view_offset'],
|
||||
'reference_id': result[1]['reference_id']}
|
||||
|
||||
watched_percent = {'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT,
|
||||
'episode': plexpy.CONFIG.TV_WATCHED_PERCENT,
|
||||
'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT
|
||||
watched_percent = {'movie': jellypy.CONFIG.MOVIE_WATCHED_PERCENT,
|
||||
'episode': jellypy.CONFIG.TV_WATCHED_PERCENT,
|
||||
'track': jellypy.CONFIG.MUSIC_WATCHED_PERCENT
|
||||
}
|
||||
prev_progress_percent = helpers.get_percent(prev_session['view_offset'], session['duration'])
|
||||
media_watched_percent = watched_percent.get(session['media_type'], 0)
|
||||
@@ -383,7 +378,7 @@ class ActivityProcessor(object):
|
||||
args = [new_session['id'], new_session['id']]
|
||||
|
||||
self.db.action(query=query, args=args)
|
||||
|
||||
|
||||
# logger.debug("Tautulli ActivityProcessor :: Successfully written history item, last id for session_history is %s"
|
||||
# % last_id)
|
||||
|
||||
@@ -661,4 +656,4 @@ class ActivityProcessor(object):
|
||||
'WHERE user_id = ? AND machine_id = ? AND media_type = ? '
|
||||
'ORDER BY stopped DESC',
|
||||
[user_id, machine_id, media_type])
|
||||
return int(started - last_session.get('stopped', 0) >= plexpy.CONFIG.NOTIFY_CONTINUED_SESSION_THRESHOLD)
|
||||
return int(started - last_session.get('stopped', 0) >= jellypy.CONFIG.NOTIFY_CONTINUED_SESSION_THRESHOLD)
|
||||
@@ -15,14 +15,6 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
from future.builtins import object
|
||||
|
||||
from hashing_passwords import check_hash
|
||||
from io import open
|
||||
|
||||
import hashlib
|
||||
import inspect
|
||||
import json
|
||||
@@ -31,39 +23,25 @@ import random
|
||||
import re
|
||||
import time
|
||||
import traceback
|
||||
from io import open
|
||||
|
||||
import cherrypy
|
||||
import xmltodict
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import common
|
||||
import config
|
||||
import database
|
||||
import helpers
|
||||
import libraries
|
||||
import logger
|
||||
import mobile_app
|
||||
import notification_handler
|
||||
import notifiers
|
||||
import newsletter_handler
|
||||
import newsletters
|
||||
import plextv
|
||||
import users
|
||||
else:
|
||||
from plexpy import common
|
||||
from plexpy import config
|
||||
from plexpy import database
|
||||
from plexpy import helpers
|
||||
from plexpy import libraries
|
||||
from plexpy import logger
|
||||
from plexpy import mobile_app
|
||||
from plexpy import notification_handler
|
||||
from plexpy import notifiers
|
||||
from plexpy import newsletter_handler
|
||||
from plexpy import newsletters
|
||||
from plexpy import plextv
|
||||
from plexpy import users
|
||||
import jellypy
|
||||
from jellypy import common
|
||||
from jellypy import config
|
||||
from jellypy import database
|
||||
from jellypy import helpers
|
||||
from jellypy import libraries
|
||||
from jellypy import logger
|
||||
from jellypy import mobile_app
|
||||
from jellypy import notification_handler
|
||||
from jellypy import notifiers
|
||||
from jellypy import newsletter_handler
|
||||
from jellypy import newsletters
|
||||
from jellypy import users
|
||||
from jellypy.password import check_hash
|
||||
|
||||
|
||||
class API2(object):
|
||||
@@ -91,7 +69,8 @@ class API2(object):
|
||||
if md is True:
|
||||
docs[f] = inspect.getdoc(getattr(self, f)) if inspect.getdoc(getattr(self, f)) else None
|
||||
else:
|
||||
docs[f] = ' '.join(inspect.getdoc(getattr(self, f)).split()) if inspect.getdoc(getattr(self, f)) else None
|
||||
docs[f] = ' '.join(inspect.getdoc(getattr(self, f)).split()) if inspect.getdoc(
|
||||
getattr(self, f)) else None
|
||||
return docs
|
||||
|
||||
def docs_md(self):
|
||||
@@ -107,15 +86,15 @@ class API2(object):
|
||||
def _api_validate(self, *args, **kwargs):
|
||||
""" Sets class vars and remove unneeded parameters. """
|
||||
|
||||
if not plexpy.CONFIG.API_ENABLED:
|
||||
if not jellypy.CONFIG.API_ENABLED:
|
||||
self._api_msg = 'API not enabled'
|
||||
self._api_response_code = 404
|
||||
|
||||
elif not plexpy.CONFIG.API_KEY:
|
||||
elif not jellypy.CONFIG.API_KEY:
|
||||
self._api_msg = 'API key not generated'
|
||||
self._api_response_code = 401
|
||||
|
||||
elif len(plexpy.CONFIG.API_KEY) != 32:
|
||||
elif len(jellypy.CONFIG.API_KEY) != 32:
|
||||
self._api_msg = 'API key not generated correctly'
|
||||
self._api_response_code = 401
|
||||
|
||||
@@ -128,7 +107,8 @@ class API2(object):
|
||||
self._api_response_code = 400
|
||||
|
||||
elif 'cmd' in kwargs and kwargs.get('cmd') not in self._api_valid_methods:
|
||||
self._api_msg = 'Unknown command: %s. Possible commands are: %s' % (kwargs.get('cmd', ''), ', '.join(sorted(self._api_valid_methods)))
|
||||
self._api_msg = 'Unknown command: %s. Possible commands are: %s' % (
|
||||
kwargs.get('cmd', ''), ', '.join(sorted(self._api_valid_methods)))
|
||||
self._api_response_code = 400
|
||||
|
||||
self._api_callback = kwargs.pop('callback', None)
|
||||
@@ -142,8 +122,8 @@ class API2(object):
|
||||
if 'app' in kwargs and helpers.bool_true(kwargs.pop('app')):
|
||||
self._api_app = True
|
||||
|
||||
if plexpy.CONFIG.API_ENABLED and not self._api_msg or self._api_cmd in ('get_apikey', 'docs', 'docs_md'):
|
||||
if not self._api_app and self._api_apikey == plexpy.CONFIG.API_KEY:
|
||||
if jellypy.CONFIG.API_ENABLED and not self._api_msg or self._api_cmd in ('get_apikey', 'docs', 'docs_md'):
|
||||
if not self._api_app and self._api_apikey == jellypy.CONFIG.API_KEY:
|
||||
self._api_authenticated = True
|
||||
|
||||
elif self._api_app and self._api_apikey == mobile_app.get_temp_device_token() and \
|
||||
@@ -203,7 +183,7 @@ class API2(object):
|
||||
]
|
||||
```
|
||||
"""
|
||||
logfile = os.path.join(plexpy.CONFIG.LOG_DIR, logger.FILENAME)
|
||||
logfile = os.path.join(jellypy.CONFIG.LOG_DIR, logger.FILENAME)
|
||||
templog = []
|
||||
start = int(start)
|
||||
end = int(end)
|
||||
@@ -232,7 +212,6 @@ class API2(object):
|
||||
continue
|
||||
|
||||
if len(line) > 1 and temp_loglevel_and_time is not None and loglvl in line:
|
||||
|
||||
d = {
|
||||
'time': temp_loglevel_and_time[0],
|
||||
'loglevel': loglvl,
|
||||
@@ -290,11 +269,11 @@ class API2(object):
|
||||
```
|
||||
"""
|
||||
|
||||
interface_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/')
|
||||
interface_dir = os.path.join(jellypy.PROG_DIR, 'data/interfaces/')
|
||||
interface_list = [name for name in os.listdir(interface_dir) if
|
||||
os.path.isdir(os.path.join(interface_dir, name))]
|
||||
|
||||
conf = plexpy.CONFIG._config
|
||||
conf = jellypy.CONFIG._config
|
||||
config = {}
|
||||
|
||||
# Truthify the dict
|
||||
@@ -332,7 +311,7 @@ class API2(object):
|
||||
None
|
||||
```
|
||||
"""
|
||||
if not plexpy.CONFIG.API_SQL:
|
||||
if not jellypy.CONFIG.API_SQL:
|
||||
self._api_msg = 'SQL not enabled for the API.'
|
||||
return
|
||||
|
||||
@@ -342,12 +321,12 @@ class API2(object):
|
||||
|
||||
# allow the user to shoot them self
|
||||
# in the foot but not in the head..
|
||||
if not len(os.listdir(plexpy.CONFIG.BACKUP_DIR)):
|
||||
if not len(os.listdir(jellypy.CONFIG.BACKUP_DIR)):
|
||||
self.backup_db()
|
||||
else:
|
||||
# If the backup is less then 24 h old lets make a backup
|
||||
if not any(os.path.getctime(os.path.join(plexpy.CONFIG.BACKUP_DIR, file_)) > (time.time() - 86400)
|
||||
and file_.endswith('.db') for file_ in os.listdir(plexpy.CONFIG.BACKUP_DIR)):
|
||||
if not any(os.path.getctime(os.path.join(jellypy.CONFIG.BACKUP_DIR, file_)) > (time.time() - 86400)
|
||||
and file_.endswith('.db') for file_ in os.listdir(jellypy.CONFIG.BACKUP_DIR)):
|
||||
self.backup_db()
|
||||
|
||||
db = database.MonitorDatabase()
|
||||
@@ -363,7 +342,7 @@ class API2(object):
|
||||
return data
|
||||
|
||||
def backup_db(self):
|
||||
""" Create a manual backup of the `plexpy.db` file."""
|
||||
""" Create a manual backup of the `jellypy.db` file."""
|
||||
|
||||
data = database.make_backup()
|
||||
self._api_result_type = 'success' if data else 'error'
|
||||
@@ -373,14 +352,14 @@ class API2(object):
|
||||
def restart(self, **kwargs):
|
||||
""" Restart Tautulli."""
|
||||
|
||||
plexpy.SIGNAL = 'restart'
|
||||
jellypy.SIGNAL = 'restart'
|
||||
self._api_msg = 'Restarting Tautulli'
|
||||
self._api_result_type = 'success'
|
||||
|
||||
def update(self, **kwargs):
|
||||
""" Update Tautulli."""
|
||||
|
||||
plexpy.SIGNAL = 'update'
|
||||
jellypy.SIGNAL = 'update'
|
||||
self._api_msg = 'Updating Tautulli'
|
||||
self._api_result_type = 'success'
|
||||
|
||||
@@ -471,11 +450,13 @@ class API2(object):
|
||||
|
||||
mobile_app.set_temp_device_token(True)
|
||||
|
||||
plex_server = plextv.get_server_resources(return_info=True)
|
||||
tautulli = plexpy.get_tautulli_info()
|
||||
# TODO: Jellyfin
|
||||
# plex_server = plextv.get_server_resources(return_info=True)
|
||||
tautulli = jellypy.get_tautulli_info()
|
||||
|
||||
data = {"server_id": plexpy.CONFIG.PMS_UUID}
|
||||
data.update(plex_server)
|
||||
data = {"server_id": jellypy.CONFIG.PMS_UUID}
|
||||
# TODO: Jellyfin
|
||||
# data.update(plex_server)
|
||||
data.update(tautulli)
|
||||
|
||||
return data
|
||||
@@ -646,32 +627,32 @@ General optional parameters:
|
||||
"""
|
||||
data = None
|
||||
apikey = hashlib.sha224(str(random.getrandbits(256)).encode('utf-8')).hexdigest()[0:32]
|
||||
if plexpy.CONFIG.HTTP_USERNAME and plexpy.CONFIG.HTTP_PASSWORD:
|
||||
if jellypy.CONFIG.HTTP_USERNAME and jellypy.CONFIG.HTTP_PASSWORD:
|
||||
authenticated = False
|
||||
if plexpy.CONFIG.HTTP_HASHED_PASSWORD and \
|
||||
username == plexpy.CONFIG.HTTP_USERNAME and check_hash(password, plexpy.CONFIG.HTTP_PASSWORD):
|
||||
if jellypy.CONFIG.HTTP_HASHED_PASSWORD and \
|
||||
username == jellypy.CONFIG.HTTP_USERNAME and check_hash(password, jellypy.CONFIG.HTTP_PASSWORD):
|
||||
authenticated = True
|
||||
elif not plexpy.CONFIG.HTTP_HASHED_PASSWORD and \
|
||||
username == plexpy.CONFIG.HTTP_USERNAME and password == plexpy.CONFIG.HTTP_PASSWORD:
|
||||
elif not jellypy.CONFIG.HTTP_HASHED_PASSWORD and \
|
||||
username == jellypy.CONFIG.HTTP_USERNAME and password == jellypy.CONFIG.HTTP_PASSWORD:
|
||||
authenticated = True
|
||||
|
||||
if authenticated:
|
||||
if plexpy.CONFIG.API_KEY:
|
||||
data = plexpy.CONFIG.API_KEY
|
||||
if jellypy.CONFIG.API_KEY:
|
||||
data = jellypy.CONFIG.API_KEY
|
||||
else:
|
||||
data = apikey
|
||||
plexpy.CONFIG.API_KEY = apikey
|
||||
plexpy.CONFIG.write()
|
||||
jellypy.CONFIG.API_KEY = apikey
|
||||
jellypy.CONFIG.write()
|
||||
else:
|
||||
self._api_msg = 'Authentication is enabled, please add the correct username and password to the parameters'
|
||||
else:
|
||||
if plexpy.CONFIG.API_KEY:
|
||||
data = plexpy.CONFIG.API_KEY
|
||||
if jellypy.CONFIG.API_KEY:
|
||||
data = jellypy.CONFIG.API_KEY
|
||||
else:
|
||||
# Make a apikey if the doesn't exist
|
||||
data = apikey
|
||||
plexpy.CONFIG.API_KEY = apikey
|
||||
plexpy.CONFIG.write()
|
||||
jellypy.CONFIG.API_KEY = apikey
|
||||
jellypy.CONFIG.write()
|
||||
|
||||
return data
|
||||
|
||||
@@ -778,7 +759,8 @@ General optional parameters:
|
||||
|
||||
result = call(**self._api_kwargs)
|
||||
except Exception as e:
|
||||
logger.api_error('Tautulli APIv2 :: Failed to run %s with %s: %s' % (self._api_cmd, self._api_kwargs, e))
|
||||
logger.api_error(
|
||||
'Tautulli APIv2 :: Failed to run %s with %s: %s' % (self._api_cmd, self._api_kwargs, e))
|
||||
self._api_response_code = 500
|
||||
if self._api_debug:
|
||||
cherrypy.request.show_tracebacks = True
|
||||
@@ -13,6 +13,7 @@ from OpenSSL import crypto
|
||||
TYPE_RSA = crypto.TYPE_RSA
|
||||
TYPE_DSA = crypto.TYPE_DSA
|
||||
|
||||
|
||||
def createKeyPair(type, bits):
|
||||
"""
|
||||
Create a public/private key pair.
|
||||
@@ -24,6 +25,7 @@ def createKeyPair(type, bits):
|
||||
pkey.generate_key(type, bits)
|
||||
return pkey
|
||||
|
||||
|
||||
def createCertRequest(pkey, digest="sha256", **name):
|
||||
"""
|
||||
Create a certificate request.
|
||||
@@ -50,6 +52,7 @@ def createCertRequest(pkey, digest="sha256", **name):
|
||||
req.sign(pkey, digest)
|
||||
return req
|
||||
|
||||
|
||||
def createCertificate(req, issuerCertKey, serial, validityPeriod, digest="sha256"):
|
||||
"""
|
||||
Generate a certificate given a certificate request.
|
||||
@@ -76,6 +79,7 @@ def createCertificate(req, issuerCertKey, serial, validityPeriod, digest="sha256
|
||||
cert.sign(issuerKey, digest)
|
||||
return cert
|
||||
|
||||
|
||||
def createSelfSignedCertificate(issuerName, issuerKey, serial, notBefore, notAfter, altNames, digest="sha256"):
|
||||
"""
|
||||
Generate a certificate given a certificate request.
|
||||
902
jellypy/common.py
Normal file
902
jellypy/common.py
Normal file
@@ -0,0 +1,902 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of Tautulli.
|
||||
#
|
||||
# Tautulli is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Tautulli is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import platform
|
||||
from collections import OrderedDict
|
||||
|
||||
import distro
|
||||
|
||||
from jellypy import version
|
||||
|
||||
# Identify Our Application
|
||||
PRODUCT = 'JellPy'
|
||||
PLATFORM = platform.system()
|
||||
PLATFORM_RELEASE = platform.release()
|
||||
PLATFORM_VERSION = platform.version()
|
||||
PLATFORM_LINUX_DISTRO = ' '.join(x for x in distro.linux_distribution() if x)
|
||||
PLATFORM_DEVICE_NAME = platform.node()
|
||||
PYTHON_VERSION = platform.python_version()
|
||||
BRANCH = version.JELLYPY_BRANCH
|
||||
RELEASE = version.JELLYPY_VERSION
|
||||
|
||||
USER_AGENT = '{}/{} ({} {})'.format(PRODUCT, RELEASE, PLATFORM, PLATFORM_RELEASE)
|
||||
|
||||
DEFAULT_USER_THUMB = "interfaces/default/images/gravatar-default-80x80.png"
|
||||
DEFAULT_POSTER_THUMB = "interfaces/default/images/poster.png"
|
||||
DEFAULT_COVER_THUMB = "interfaces/default/images/cover.png"
|
||||
DEFAULT_ART = "interfaces/default/images/art.png"
|
||||
DEFAULT_LIVE_TV_POSTER_THUMB = "interfaces/default/images/poster-live.png"
|
||||
DEFAULT_LIVE_TV_ART = "interfaces/default/images/art-live.png"
|
||||
DEFAULT_LIVE_TV_ART_FULL = "interfaces/default/images/art-live-full.png"
|
||||
DEFAULT_LIVE_TV_THUMB = "interfaces/default/images/libraries/live.png"
|
||||
|
||||
ONLINE_POSTER_THUMB = "https://tautulli.com/images/poster.png"
|
||||
ONLINE_COVER_THUMB = "https://tautulli.com/images/cover.png"
|
||||
ONLINE_ART = "https://tautulli.com/images/art.png"
|
||||
|
||||
LIVE_TV_SECTION_ID = 999999 # Fake section_id for Live TV library
|
||||
LIVE_TV_SECTION_NAME = "Live TV" # Fake section_name for Live TV library
|
||||
|
||||
DEFAULT_IMAGES = {
|
||||
'poster': DEFAULT_POSTER_THUMB,
|
||||
'cover': DEFAULT_COVER_THUMB,
|
||||
'art': DEFAULT_ART,
|
||||
'poster-live': DEFAULT_LIVE_TV_POSTER_THUMB,
|
||||
'art-live': DEFAULT_LIVE_TV_ART,
|
||||
'art-live-full': DEFAULT_LIVE_TV_ART_FULL
|
||||
}
|
||||
|
||||
MEDIA_TYPE_HEADERS = {
|
||||
'movie': 'Movies',
|
||||
'show': 'TV Shows',
|
||||
'season': 'Seasons',
|
||||
'episode': 'Episodes',
|
||||
'artist': 'Artists',
|
||||
'album': 'Albums',
|
||||
'track': 'Tracks',
|
||||
'video': 'Videos',
|
||||
'audio': 'Tracks',
|
||||
'photo': 'Photos'
|
||||
}
|
||||
|
||||
PLATFORM_NAME_OVERRIDES = {
|
||||
'Konvergo': 'Plex Media Player',
|
||||
'Mystery 3': 'Playstation 3',
|
||||
'Mystery 4': 'Playstation 4',
|
||||
'Mystery 5': 'Xbox 360',
|
||||
'WebMAF': 'Playstation 4',
|
||||
'windows': 'Windows',
|
||||
'osx': 'macOS'
|
||||
}
|
||||
|
||||
PMS_PLATFORM_NAME_OVERRIDES = {
|
||||
'MacOSX': 'Mac'
|
||||
}
|
||||
|
||||
PLATFORM_NAMES = {
|
||||
'android': 'android',
|
||||
'apple tv': 'atv',
|
||||
'chrome': 'chrome',
|
||||
'chromecast': 'chromecast',
|
||||
'dlna': 'dlna',
|
||||
'firefox': 'firefox',
|
||||
'internet explorer': 'ie',
|
||||
'ios': 'ios',
|
||||
'ipad': 'ios',
|
||||
'iphone': 'ios',
|
||||
'kodi': 'kodi',
|
||||
'linux': 'linux',
|
||||
'nexus': 'android',
|
||||
'macos': 'macos',
|
||||
'microsoft edge': 'msedge',
|
||||
'netcast': 'lg',
|
||||
'opera': 'opera',
|
||||
'osx': 'macos',
|
||||
'playstation': 'playstation',
|
||||
'plex home theater': 'plex',
|
||||
'plex media player': 'plex',
|
||||
'plexamp': 'plexamp',
|
||||
'plextogether': 'synclounge',
|
||||
'roku': 'roku',
|
||||
'safari': 'safari',
|
||||
'samsung': 'samsung',
|
||||
'synclounge': 'synclounge',
|
||||
'tivo': 'tivo',
|
||||
'tizen': 'samsung',
|
||||
'tvos': 'atv',
|
||||
'vizio': 'opera',
|
||||
'webos': 'lg',
|
||||
'wiiu': 'wiiu',
|
||||
'windows': 'windows',
|
||||
'windows phone': 'wp',
|
||||
'xbmc': 'xbmc',
|
||||
'xbox': 'xbox'
|
||||
}
|
||||
PLATFORM_NAMES = OrderedDict(sorted(list(PLATFORM_NAMES.items()), key=lambda k: k[0], reverse=True))
|
||||
|
||||
MEDIA_FLAGS_AUDIO = {
|
||||
'ac.?3': 'dolby_digital',
|
||||
'truehd': 'dolby_truehd',
|
||||
'(dca|dta)': 'dts',
|
||||
'dts(hd_|-hd|-)?ma': 'dca-ma',
|
||||
'vorbis': 'ogg'
|
||||
}
|
||||
MEDIA_FLAGS_VIDEO = {
|
||||
'avc1': 'h264',
|
||||
'wmv(1|2)': 'wmv',
|
||||
'wmv3': 'wmvhd'
|
||||
}
|
||||
|
||||
AUDIO_CODEC_OVERRIDES = {
|
||||
'truehd': 'TrueHD'
|
||||
}
|
||||
|
||||
VIDEO_RESOLUTION_OVERRIDES = {
|
||||
'sd': 'SD',
|
||||
'4k': '4k'
|
||||
}
|
||||
|
||||
AUDIO_CHANNELS = {
|
||||
'1': 'Mono',
|
||||
'2': 'Stereo',
|
||||
'3': '2.1',
|
||||
'4': '3.1',
|
||||
'6': '5.1',
|
||||
'7': '6.1',
|
||||
'8': '7.1'
|
||||
}
|
||||
|
||||
VIDEO_QUALITY_PROFILES = {
|
||||
20000: '20 Mbps 1080p',
|
||||
12000: '12 Mbps 1080p',
|
||||
10000: '10 Mbps 1080p',
|
||||
8000: '8 Mbps 1080p',
|
||||
4000: '4 Mbps 720p',
|
||||
3000: '3 Mbps 720p',
|
||||
2000: '2 Mbps 720p',
|
||||
1500: '1.5 Mbps 480p',
|
||||
720: '0.7 Mbps 328p',
|
||||
320: '0.3 Mbps 240p',
|
||||
208: '0.2 Mbps 160p',
|
||||
96: '0.096 Mbps',
|
||||
64: '0.064 Mbps'
|
||||
}
|
||||
VIDEO_QUALITY_PROFILES = OrderedDict(sorted(list(VIDEO_QUALITY_PROFILES.items()), key=lambda k: k[0], reverse=True))
|
||||
|
||||
AUDIO_QUALITY_PROFILES = {
|
||||
512: '512 kbps',
|
||||
320: '320 kbps',
|
||||
256: '256 kbps',
|
||||
192: '192 kbps',
|
||||
128: '128 kbps',
|
||||
96: '96 kbps'
|
||||
}
|
||||
AUDIO_QUALITY_PROFILES = OrderedDict(sorted(list(AUDIO_QUALITY_PROFILES.items()), key=lambda k: k[0], reverse=True))
|
||||
|
||||
HW_DECODERS = [
|
||||
'dxva2',
|
||||
'videotoolbox',
|
||||
'mediacodecndk',
|
||||
'vaapi',
|
||||
'nvdec'
|
||||
]
|
||||
HW_ENCODERS = [
|
||||
'qsv',
|
||||
'mf',
|
||||
'videotoolbox',
|
||||
'mediacodecndk',
|
||||
'vaapi',
|
||||
'nvenc',
|
||||
'x264'
|
||||
]
|
||||
|
||||
EXTRA_TYPES = {
|
||||
'1': 'Trailer',
|
||||
'2': 'Deleted Scene',
|
||||
'3': 'Interview',
|
||||
'5': 'Behind the Scenes',
|
||||
'6': 'Scene',
|
||||
'10': 'Featurette',
|
||||
'11': 'Short'
|
||||
}
|
||||
|
||||
SCHEDULER_LIST = [
|
||||
('Check GitHub for updates', 'websocket'),
|
||||
('Check for server response', 'websocket'),
|
||||
('Check for active sessions', 'websocket'),
|
||||
('Check for recently added items', 'websocket'),
|
||||
('Check for server remote access', 'websocket'),
|
||||
('Check for Plex updates', 'scheduled'),
|
||||
('Refresh users list', 'scheduled'),
|
||||
('Refresh libraries list', 'scheduled'),
|
||||
('Refresh Plex server URLs', 'scheduled'),
|
||||
('Optimize Tautulli database', 'scheduled'),
|
||||
('Backup Tautulli database', 'scheduled'),
|
||||
('Backup Tautulli config', 'scheduled')
|
||||
]
|
||||
SCHEDULER_LIST = OrderedDict(SCHEDULER_LIST)
|
||||
|
||||
DATE_TIME_FORMATS = [
|
||||
{
|
||||
'category': 'Year',
|
||||
'parameters': [
|
||||
{'value': 'YYYY', 'description': 'Numeric, four digits', 'example': '1999, 2003'},
|
||||
{'value': 'YY', 'description': 'Numeric, two digits', 'example': '99, 03'}
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Month',
|
||||
'parameters': [
|
||||
{'value': 'MMMM', 'description': 'Textual, full', 'example': 'January-December'},
|
||||
{'value': 'MMM', 'description': 'Textual, three letters', 'example': 'Jan-Dec'},
|
||||
{'value': 'MM', 'description': 'Numeric, with leading zeros', 'example': '01-12'},
|
||||
{'value': 'M', 'description': 'Numeric, without leading zeros', 'example': '1-12'},
|
||||
{'value': 'Mo', 'description': 'Numeric, with suffix', 'example': '1st, 2nd ... 12th'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Day of the Year',
|
||||
'parameters': [
|
||||
{'value': 'DDDD', 'description': 'Numeric, with leading zeros', 'example': '001-365'},
|
||||
{'value': 'DDD', 'description': 'Numeric, without leading zeros', 'example': '1-365'},
|
||||
{'value': 'DDDo', 'description': 'Numeric, with suffix', 'example': '1st, 2nd, ... 365th'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Day of the Month',
|
||||
'parameters': [
|
||||
{'value': 'DD', 'description': 'Numeric, with leading zeros', 'example': '01-31'},
|
||||
{'value': 'D', 'description': 'Numeric, without leading zeros', 'example': '1-31'},
|
||||
{'value': 'Do', 'description': 'Numeric, with suffix', 'example': '1st, 2nd ... 31st'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Day of the Week',
|
||||
'parameters': [
|
||||
{'value': 'dddd', 'description': 'Textual, full', 'example': 'Sunday-Saturday'},
|
||||
{'value': 'ddd', 'description': 'Textual, three letters', 'example': 'Sun-Sat'},
|
||||
{'value': 'dd', 'description': 'Textual, two letters', 'example': 'Su-Sa'},
|
||||
{'value': 'd', 'description': 'Numeric', 'example': '0-6'},
|
||||
{'value': 'do', 'description': 'Numeric, with suffix', 'example': '0th, 1st ... 6th'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Hour',
|
||||
'parameters': [
|
||||
{'value': 'HH', 'description': '24-hour, with leading zeros', 'example': '00-23'},
|
||||
{'value': 'H', 'description': '24-hour, without leading zeros', 'example': '0-23'},
|
||||
{'value': 'hh', 'description': '12-hour, with leading zeros', 'example': '01-12'},
|
||||
{'value': 'h', 'description': '12-hour, without leading zeros', 'example': '1-12'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Minute',
|
||||
'parameters': [
|
||||
{'value': 'mm', 'description': 'Numeric, with leading zeros', 'example': '00-59'},
|
||||
{'value': 'm', 'description': 'Numeric, without leading zeros', 'example': '0-59'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Second',
|
||||
'parameters': [
|
||||
{'value': 'ss', 'description': 'Numeric, with leading zeros', 'example': '00-59'},
|
||||
{'value': 's', 'description': 'Numeric, without leading zeros', 'example': '0-59'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'AM / PM',
|
||||
'parameters': [
|
||||
{'value': 'A', 'description': 'AM/PM uppercase', 'example': 'AM, PM'},
|
||||
{'value': 'a', 'description': 'am/pm lowercase', 'example': 'am, pm'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Timezone',
|
||||
'parameters': [
|
||||
{'value': 'ZZ', 'description': 'UTC offset', 'example': '+0100, -0700'},
|
||||
{'value': 'Z', 'description': 'UTC offset', 'example': '+01:00, -07:00'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Timestamp',
|
||||
'parameters': [
|
||||
{'value': 'X', 'description': 'Unix timestamp', 'example': 'E.g. 1456887825'},
|
||||
]
|
||||
},
|
||||
]
|
||||
|
||||
NOTIFICATION_PARAMETERS = [
|
||||
{
|
||||
'category': 'Global',
|
||||
'parameters': [
|
||||
{'name': 'Tautulli Version', 'type': 'str', 'value': 'tautulli_version',
|
||||
'description': 'The current version of Tautulli.'},
|
||||
{'name': 'Tautulli Remote', 'type': 'str', 'value': 'tautulli_remote',
|
||||
'description': 'The current git remote of Tautulli.'},
|
||||
{'name': 'Tautulli Branch', 'type': 'str', 'value': 'tautulli_branch',
|
||||
'description': 'The current git branch of Tautulli.'},
|
||||
{'name': 'Tautulli Commit', 'type': 'str', 'value': 'tautulli_commit',
|
||||
'description': 'The current git commit hash of Tautulli.'},
|
||||
{'name': 'Server Name', 'type': 'str', 'value': 'server_name',
|
||||
'description': 'The name of your Plex Server.'},
|
||||
{'name': 'Server IP', 'type': 'str', 'value': 'server_ip',
|
||||
'description': 'The connection IP address for your Plex Server.'},
|
||||
{'name': 'Server Port', 'type': 'int', 'value': 'server_port',
|
||||
'description': 'The connection port for your Plex Server.'},
|
||||
{'name': 'Server URL', 'type': 'str', 'value': 'server_url',
|
||||
'description': 'The connection URL for your Plex Server.'},
|
||||
{'name': 'Server Platform', 'type': 'str', 'value': 'server_platform',
|
||||
'description': 'The platform of your Plex Server.'},
|
||||
{'name': 'Server Version', 'type': 'str', 'value': 'server_version',
|
||||
'description': 'The current version of your Plex Server.'},
|
||||
{'name': 'Server ID', 'type': 'str', 'value': 'server_machine_id',
|
||||
'description': 'The unique identifier for your Plex Server.'},
|
||||
{'name': 'Action', 'type': 'str', 'value': 'action',
|
||||
'description': 'The action that triggered the notification.'},
|
||||
{'name': 'Current Year', 'type': 'int', 'value': 'current_year',
|
||||
'description': 'The year when the notification is triggered.'},
|
||||
{'name': 'Current Month', 'type': 'int', 'value': 'current_month',
|
||||
'description': 'The month when the notification is triggered.', 'example': '1 to 12'},
|
||||
{'name': 'Current Day', 'type': 'int', 'value': 'current_day',
|
||||
'description': 'The day when the notification is triggered.', 'example': '1 to 31'},
|
||||
{'name': 'Current Hour', 'type': 'int', 'value': 'current_hour',
|
||||
'description': 'The hour when the notification is triggered.', 'example': '0 to 23'},
|
||||
{'name': 'Current Minute', 'type': 'int', 'value': 'current_minute',
|
||||
'description': 'The minute when the notification is triggered.', 'example': '0 to 59'},
|
||||
{'name': 'Current Second', 'type': 'int', 'value': 'current_second',
|
||||
'description': 'The second when the notification is triggered.', 'example': '0 to 59'},
|
||||
{'name': 'Current Weekday', 'type': 'int', 'value': 'current_weekday',
|
||||
'description': 'The ISO weekday when the notification is triggered.', 'example': '1 (Mon) to 7 (Sun)'},
|
||||
{'name': 'Current Week', 'type': 'int', 'value': 'current_week',
|
||||
'description': 'The ISO week number when the notification is triggered.', 'example': '1 to 52'},
|
||||
{'name': 'Datestamp', 'type': 'str', 'value': 'datestamp',
|
||||
'description': 'The date (in date format) when the notification is triggered.'},
|
||||
{'name': 'Timestamp', 'type': 'str', 'value': 'timestamp',
|
||||
'description': 'The time (in time format) when the notification is triggered.'},
|
||||
{'name': 'Unix Time', 'type': 'int', 'value': 'unixtime',
|
||||
'description': 'The unix timestamp when the notification is triggered.'},
|
||||
{'name': 'UTC Time', 'type': 'int', 'value': 'utctime',
|
||||
'description': 'The UTC timestamp in ISO format when the notification is triggered.'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Stream Details',
|
||||
'parameters': [
|
||||
{'name': 'Streams', 'type': 'int', 'value': 'streams',
|
||||
'description': 'The total number of concurrent streams.'},
|
||||
{'name': 'Direct Plays', 'type': 'int', 'value': 'direct_plays',
|
||||
'description': 'The total number of concurrent direct plays.'},
|
||||
{'name': 'Direct Streams', 'type': 'int', 'value': 'direct_streams',
|
||||
'description': 'The total number of concurrent direct streams.'},
|
||||
{'name': 'Transcodes', 'type': 'int', 'value': 'transcodes',
|
||||
'description': 'The total number of concurrent transcodes.'},
|
||||
{'name': 'Total Bandwidth', 'type': 'int', 'value': 'total_bandwidth',
|
||||
'description': 'The total Plex Streaming Brain reserved bandwidth (in kbps).',
|
||||
'help_text': 'not the used bandwidth'},
|
||||
{'name': 'LAN Bandwidth', 'type': 'int', 'value': 'lan_bandwidth',
|
||||
'description': 'The total Plex Streaming Brain reserved LAN bandwidth (in kbps).',
|
||||
'help_text': 'not the used bandwidth'},
|
||||
{'name': 'WAN Bandwidth', 'type': 'int', 'value': 'wan_bandwidth',
|
||||
'description': 'The total Plex Streaming Brain reserved WAN bandwidth (in kbps).',
|
||||
'help_text': 'not the used bandwidth'},
|
||||
{'name': 'User Streams', 'type': 'int', 'value': 'user_streams',
|
||||
'description': 'The number of concurrent streams by the user streaming.'},
|
||||
{'name': 'User Direct Plays', 'type': 'int', 'value': 'user_direct_plays',
|
||||
'description': 'The number of concurrent direct plays by the user streaming.'},
|
||||
{'name': 'User Direct Streams', 'type': 'int', 'value': 'user_direct_streams',
|
||||
'description': 'The number of concurrent direct streams by the user streaming.'},
|
||||
{'name': 'User Transcodes', 'type': 'int', 'value': 'user_transcodes',
|
||||
'description': 'The number of concurrent transcodes by the user streaming.'},
|
||||
{'name': 'User', 'type': 'str', 'value': 'user', 'description': 'The friendly name of the user streaming.'},
|
||||
{'name': 'Username', 'type': 'str', 'value': 'username',
|
||||
'description': 'The username of the user streaming.'},
|
||||
{'name': 'User Email', 'type': 'str', 'value': 'user_email',
|
||||
'description': 'The email address of the user streaming.'},
|
||||
{'name': 'User Thumb', 'type': 'str', 'value': 'user_thumb',
|
||||
'description': 'The profile picture URL of the user streaming.'},
|
||||
{'name': 'Device', 'type': 'str', 'value': 'device',
|
||||
'description': 'The type of client device being used for playback.'},
|
||||
{'name': 'Platform', 'type': 'str', 'value': 'platform',
|
||||
'description': 'The type of client platform being used for playback.'},
|
||||
{'name': 'Product', 'type': 'str', 'value': 'product',
|
||||
'description': 'The type of client product being used for playback.'},
|
||||
{'name': 'Player', 'type': 'str', 'value': 'player',
|
||||
'description': 'The name of the player being used for playback.'},
|
||||
{'name': 'Initial Stream', 'type': 'int', 'value': 'initial_stream',
|
||||
'description': 'If the stream is the initial stream of a continuous streaming session.',
|
||||
'example': '0 or 1'},
|
||||
{'name': 'IP Address', 'type': 'str', 'value': 'ip_address',
|
||||
'description': 'The IP address of the device being used for playback.'},
|
||||
{'name': 'Stream Duration', 'type': 'int', 'value': 'stream_duration',
|
||||
'description': 'The duration (in minutes) for the stream.'},
|
||||
{'name': 'Stream Time', 'type': 'str', 'value': 'stream_time',
|
||||
'description': 'The duration (in time format) of the stream.'},
|
||||
{'name': 'Remaining Duration', 'type': 'int', 'value': 'remaining_duration',
|
||||
'description': 'The remaining duration (in minutes) of the stream.'},
|
||||
{'name': 'Remaining Time', 'type': 'str', 'value': 'remaining_time',
|
||||
'description': 'The remaining duration (in time format) of the stream.'},
|
||||
{'name': 'Progress Duration', 'type': 'int', 'value': 'progress_duration',
|
||||
'description': 'The last reported offset (in minutes) of the stream.'},
|
||||
{'name': 'Progress Time', 'type': 'str', 'value': 'progress_time',
|
||||
'description': 'The last reported offset (in time format) of the stream.'},
|
||||
{'name': 'Progress Percent', 'type': 'int', 'value': 'progress_percent',
|
||||
'description': 'The last reported progress percent of the stream.'},
|
||||
{'name': 'Transcode Decision', 'type': 'str', 'value': 'transcode_decision',
|
||||
'description': 'The transcode decision of the stream.'},
|
||||
{'name': 'Container Decision', 'type': 'str', 'value': 'container_decision',
|
||||
'description': 'The container transcode decision of the stream.'},
|
||||
{'name': 'Video Decision', 'type': 'str', 'value': 'video_decision',
|
||||
'description': 'The video transcode decision of the stream.'},
|
||||
{'name': 'Audio Decision', 'type': 'str', 'value': 'audio_decision',
|
||||
'description': 'The audio transcode decision of the stream.'},
|
||||
{'name': 'Subtitle Decision', 'type': 'str', 'value': 'subtitle_decision',
|
||||
'description': 'The subtitle transcode decision of the stream.'},
|
||||
{'name': 'Quality Profile', 'type': 'str', 'value': 'quality_profile',
|
||||
'description': 'The Plex quality profile of the stream.', 'example': 'e.g. Original, 4 Mbps 720p, etc.'},
|
||||
{'name': 'Optimized Version', 'type': 'int', 'value': 'optimized_version',
|
||||
'description': 'If the stream is an optimized version.', 'example': '0 or 1'},
|
||||
{'name': 'Optimized Version Profile', 'type': 'str', 'value': 'optimized_version_profile',
|
||||
'description': 'The optimized version profile of the stream.'},
|
||||
{'name': 'Synced Version', 'type': 'int', 'value': 'synced_version',
|
||||
'description': 'If the stream is an synced version.', 'example': '0 or 1'},
|
||||
{'name': 'Live', 'type': 'int', 'value': 'live', 'description': 'If the stream is live TV.',
|
||||
'example': '0 or 1'},
|
||||
{'name': 'Channel Call Sign', 'type': 'str', 'value': 'channel_call_sign',
|
||||
'description': 'The Live TV channel call sign.'},
|
||||
{'name': 'Channel Identifier', 'type': 'str', 'value': 'channel_identifier',
|
||||
'description': 'The Live TV channel number.'},
|
||||
{'name': 'Channel Thumb', 'type': 'str', 'value': 'channel_thumb',
|
||||
'description': 'The URL for the Live TV channel logo.'},
|
||||
{'name': 'Secure', 'type': 'int', 'value': 'secure',
|
||||
'description': 'If the stream is using a secure connection.', 'example': '0 or 1'},
|
||||
{'name': 'Relayed', 'type': 'int', 'value': 'relayed', 'description': 'If the stream is using Plex Relay.',
|
||||
'example': '0 or 1'},
|
||||
{'name': 'Stream Local', 'type': 'int', 'value': 'stream_local', 'description': 'If the stream is local.',
|
||||
'example': '0 or 1'},
|
||||
{'name': 'Stream Location', 'type': 'str', 'value': 'stream_location',
|
||||
'description': 'The network location of the stream.', 'example': 'lan or wan'},
|
||||
{'name': 'Stream Bandwidth', 'type': 'int', 'value': 'stream_bandwidth',
|
||||
'description': 'The Plex Streaming Brain reserved bandwidth (in kbps) of the stream.',
|
||||
'help_text': 'not the used bandwidth'},
|
||||
{'name': 'Stream Container', 'type': 'str', 'value': 'stream_container',
|
||||
'description': 'The media container of the stream.'},
|
||||
{'name': 'Stream Bitrate', 'type': 'int', 'value': 'stream_bitrate',
|
||||
'description': 'The bitrate (in kbps) of the stream.'},
|
||||
{'name': 'Stream Aspect Ratio', 'type': 'float', 'value': 'stream_aspect_ratio',
|
||||
'description': 'The aspect ratio of the stream.'},
|
||||
{'name': 'Stream Video Codec', 'type': 'str', 'value': 'stream_video_codec',
|
||||
'description': 'The video codec of the stream.'},
|
||||
{'name': 'Stream Video Codec Level', 'type': 'int', 'value': 'stream_video_codec_level',
|
||||
'description': 'The video codec level of the stream.'},
|
||||
{'name': 'Stream Video Bitrate', 'type': 'int', 'value': 'stream_video_bitrate',
|
||||
'description': 'The video bitrate (in kbps) of the stream.'},
|
||||
{'name': 'Stream Video Bit Depth', 'type': 'int', 'value': 'stream_video_bit_depth',
|
||||
'description': 'The video bit depth of the stream.'},
|
||||
{'name': 'Stream Video Chroma Subsampling', 'type': 'str', 'value': 'stream_video_chroma_subsampling',
|
||||
'description': 'The video chroma subsampling of the stream.'},
|
||||
{'name': 'Stream Video Color Primaries', 'type': 'str', 'value': 'stream_video_color_primaries',
|
||||
'description': 'The video color primaries of the stream.'},
|
||||
{'name': 'Stream Video Color Range', 'type': 'str', 'value': 'stream_video_color_range',
|
||||
'description': 'The video color range of the stream.'},
|
||||
{'name': 'Stream Video Color Space', 'type': 'str', 'value': 'stream_video_color_space',
|
||||
'description': 'The video color space of the stream.'},
|
||||
{'name': 'Stream Video Color Transfer Function', 'type': 'str', 'value': 'stream_video_color_trc',
|
||||
'description': 'The video transfer function of the stream.'},
|
||||
{'name': 'Stream Video Dynamic Range', 'type': 'str', 'value': 'stream_video_dynamic_range',
|
||||
'description': 'The video dynamic range of the stream.', 'example': 'HDR or SDR'},
|
||||
{'name': 'Stream Video Framerate', 'type': 'str', 'value': 'stream_video_framerate',
|
||||
'description': 'The video framerate of the stream.'},
|
||||
{'name': 'Stream Video Full Resolution', 'type': 'str', 'value': 'stream_video_full_resolution',
|
||||
'description': 'The video resolution of the stream with scan type.'},
|
||||
{'name': 'Stream Video Ref Frames', 'type': 'int', 'value': 'stream_video_ref_frames',
|
||||
'description': 'The video reference frames of the stream.'},
|
||||
{'name': 'Stream Video Resolution', 'type': 'str', 'value': 'stream_video_resolution',
|
||||
'description': 'The video resolution of the stream.'},
|
||||
{'name': 'Stream Video Scan Type', 'type': 'str', 'value': 'stream_video_scan_type',
|
||||
'description': 'The video scan type of the stream.'},
|
||||
{'name': 'Stream Video Height', 'type': 'int', 'value': 'stream_video_height',
|
||||
'description': 'The video height of the stream.'},
|
||||
{'name': 'Stream Video Width', 'type': 'int', 'value': 'stream_video_width',
|
||||
'description': 'The video width of the stream.'},
|
||||
{'name': 'Stream Video Language', 'type': 'str', 'value': 'stream_video_language',
|
||||
'description': 'The video language of the stream.'},
|
||||
{'name': 'Stream Video Language Code', 'type': 'str', 'value': 'stream_video_language_code',
|
||||
'description': 'The video language code of the stream.'},
|
||||
{'name': 'Stream Audio Bitrate', 'type': 'int', 'value': 'stream_audio_bitrate',
|
||||
'description': 'The audio bitrate of the stream.'},
|
||||
{'name': 'Stream Audio Bitrate Mode', 'type': 'str', 'value': 'stream_audio_bitrate_mode',
|
||||
'description': 'The audio bitrate mode of the stream.', 'example': 'cbr or vbr'},
|
||||
{'name': 'Stream Audio Codec', 'type': 'str', 'value': 'stream_audio_codec',
|
||||
'description': 'The audio codec of the stream.'},
|
||||
{'name': 'Stream Audio Channels', 'type': 'float', 'value': 'stream_audio_channels',
|
||||
'description': 'The audio channels of the stream.'},
|
||||
{'name': 'Stream Audio Channel Layout', 'type': 'str', 'value': 'stream_audio_channel_layout',
|
||||
'description': 'The audio channel layout of the stream.'},
|
||||
{'name': 'Stream Audio Sample Rate', 'type': 'int', 'value': 'stream_audio_sample_rate',
|
||||
'description': 'The audio sample rate (in Hz) of the stream.'},
|
||||
{'name': 'Stream Audio Language', 'type': 'str', 'value': 'stream_audio_language',
|
||||
'description': 'The audio language of the stream.'},
|
||||
{'name': 'Stream Audio Language Code', 'type': 'str', 'value': 'stream_audio_language_code',
|
||||
'description': 'The audio language code of the stream.'},
|
||||
{'name': 'Stream Subtitle Codec', 'type': 'str', 'value': 'stream_subtitle_codec',
|
||||
'description': 'The subtitle codec of the stream.'},
|
||||
{'name': 'Stream Subtitle Container', 'type': 'str', 'value': 'stream_subtitle_container',
|
||||
'description': 'The subtitle container of the stream.'},
|
||||
{'name': 'Stream Subtitle Format', 'type': 'str', 'value': 'stream_subtitle_format',
|
||||
'description': 'The subtitle format of the stream.'},
|
||||
{'name': 'Stream Subtitle Forced', 'type': 'int', 'value': 'stream_subtitle_forced',
|
||||
'description': 'If the subtitles are forced.', 'example': '0 or 1'},
|
||||
{'name': 'Stream Subtitle Language', 'type': 'str', 'value': 'stream_subtitle_language',
|
||||
'description': 'The subtitle language of the stream.'},
|
||||
{'name': 'Stream Subtitle Language Code', 'type': 'str', 'value': 'stream_subtitle_language_code',
|
||||
'description': 'The subtitle language code of the stream.'},
|
||||
{'name': 'Stream Subtitle Location', 'type': 'str', 'value': 'stream_subtitle_location',
|
||||
'description': 'The subtitle location of the stream.'},
|
||||
{'name': 'Transcode Container', 'type': 'str', 'value': 'transcode_container',
|
||||
'description': 'The media container of the transcoded stream.'},
|
||||
{'name': 'Transcode Video Codec', 'type': 'str', 'value': 'transcode_video_codec',
|
||||
'description': 'The video codec of the transcoded stream.'},
|
||||
{'name': 'Transcode Video Width', 'type': 'int', 'value': 'transcode_video_width',
|
||||
'description': 'The video width of the transcoded stream.'},
|
||||
{'name': 'Transcode Video Height', 'type': 'int', 'value': 'transcode_video_height',
|
||||
'description': 'The video height of the transcoded stream.'},
|
||||
{'name': 'Transcode Audio Codec', 'type': 'str', 'value': 'transcode_audio_codec',
|
||||
'description': 'The audio codec of the transcoded stream.'},
|
||||
{'name': 'Transcode Audio Channels', 'type': 'float', 'value': 'transcode_audio_channels',
|
||||
'description': 'The audio channels of the transcoded stream.'},
|
||||
{'name': 'Transcode HW Requested', 'type': 'int', 'value': 'transcode_hw_requested',
|
||||
'description': 'If hardware decoding/encoding was requested.', 'example': '0 or 1'},
|
||||
{'name': 'Transcode HW Decoding', 'type': 'int', 'value': 'transcode_hw_decoding',
|
||||
'description': 'If hardware decoding is used.', 'example': '0 or 1'},
|
||||
{'name': 'Transcode HW Decoding Codec', 'type': 'str', 'value': 'transcode_hw_decode',
|
||||
'description': 'The hardware decoding codec.'},
|
||||
{'name': 'Transcode HW Decoding Title', 'type': 'str', 'value': 'transcode_hw_decode_title',
|
||||
'description': 'The hardware decoding codec title.'},
|
||||
{'name': 'Transcode HW Encoding', 'type': 'int', 'value': 'transcode_hw_encoding',
|
||||
'description': 'If hardware encoding is used.', 'example': '0 or 1'},
|
||||
{'name': 'Transcode HW Encoding Codec', 'type': 'str', 'value': 'transcode_hw_encode',
|
||||
'description': 'The hardware encoding codec.'},
|
||||
{'name': 'Transcode HW Encoding Title', 'type': 'str', 'value': 'transcode_hw_encode_title',
|
||||
'description': 'The hardware encoding codec title.'},
|
||||
{'name': 'Session Key', 'type': 'str', 'value': 'session_key',
|
||||
'description': 'The unique identifier for the session.'},
|
||||
{'name': 'Transcode Key', 'type': 'str', 'value': 'transcode_key',
|
||||
'description': 'The unique identifier for the transcode session.'},
|
||||
{'name': 'Session ID', 'type': 'str', 'value': 'session_id',
|
||||
'description': 'The unique identifier for the stream.'},
|
||||
{'name': 'User ID', 'type': 'int', 'value': 'user_id',
|
||||
'description': 'The unique identifier for the user.'},
|
||||
{'name': 'Machine ID', 'type': 'str', 'value': 'machine_id',
|
||||
'description': 'The unique identifier for the player.'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Source Metadata Details',
|
||||
'parameters': [
|
||||
{'name': 'Media Type', 'type': 'str', 'value': 'media_type', 'description': 'The type of media.',
|
||||
'example': 'movie, show, season, episode, artist, album, track, clip'},
|
||||
{'name': 'Title', 'type': 'str', 'value': 'title', 'description': 'The full title of the item.'},
|
||||
{'name': 'Library Name', 'type': 'str', 'value': 'library_name',
|
||||
'description': 'The library name of the item.'},
|
||||
{'name': 'Show Name', 'type': 'str', 'value': 'show_name', 'description': 'The title of the TV series.'},
|
||||
{'name': 'Episode Name', 'type': 'str', 'value': 'episode_name',
|
||||
'description': 'The title of the episode.'},
|
||||
{'name': 'Artist Name', 'type': 'str', 'value': 'artist_name', 'description': 'The name of the artist.'},
|
||||
{'name': 'Album Name', 'type': 'str', 'value': 'album_name', 'description': 'The title of the album.'},
|
||||
{'name': 'Track Name', 'type': 'str', 'value': 'track_name', 'description': 'The title of the track.'},
|
||||
{'name': 'Track Artist', 'type': 'str', 'value': 'track_artist',
|
||||
'description': 'The name of the artist of the track.'},
|
||||
{'name': 'Season Number', 'type': 'int', 'value': 'season_num', 'description': 'The season number.',
|
||||
'example': 'e.g. 1, or 1-3'},
|
||||
{'name': 'Season Number 00', 'type': 'int', 'value': 'season_num00',
|
||||
'description': 'The two digit season number.', 'example': 'e.g. 01, or 01-03'},
|
||||
{'name': 'Episode Number', 'type': 'int', 'value': 'episode_num', 'description': 'The episode number.',
|
||||
'example': 'e.g. 6, or 6-10'},
|
||||
{'name': 'Episode Number 00', 'type': 'int', 'value': 'episode_num00',
|
||||
'description': 'The two digit episode number.', 'example': 'e.g. 06, or 06-10'},
|
||||
{'name': 'Track Number', 'type': 'int', 'value': 'track_num', 'description': 'The track number.',
|
||||
'example': 'e.g. 4, or 4-10'},
|
||||
{'name': 'Track Number 00', 'type': 'int', 'value': 'track_num00',
|
||||
'description': 'The two digit track number.', 'example': 'e.g. 04, or 04-10'},
|
||||
{'name': 'Season Count', 'type': 'int', 'value': 'season_count', 'description': 'The number of seasons.'},
|
||||
{'name': 'Episode Count', 'type': 'int', 'value': 'episode_count',
|
||||
'description': 'The number of episodes.'},
|
||||
{'name': 'Album Count', 'type': 'int', 'value': 'album_count', 'description': 'The number of albums.'},
|
||||
{'name': 'Track Count', 'type': 'int', 'value': 'track_count', 'description': 'The number of tracks.'},
|
||||
{'name': 'Year', 'type': 'int', 'value': 'year', 'description': 'The release year for the item.'},
|
||||
{'name': 'Release Date', 'type': 'str', 'value': 'release_date',
|
||||
'description': 'The release date (in date format) for the item.'},
|
||||
{'name': 'Air Date', 'type': 'str', 'value': 'air_date',
|
||||
'description': 'The air date (in date format) for the item.'},
|
||||
{'name': 'Added Date', 'type': 'str', 'value': 'added_date',
|
||||
'description': 'The date (in date format) the item was added to Plex.'},
|
||||
{'name': 'Updated Date', 'type': 'str', 'value': 'updated_date',
|
||||
'description': 'The date (in date format) the item was updated on Plex.'},
|
||||
{'name': 'Last Viewed Date', 'type': 'str', 'value': 'last_viewed_date',
|
||||
'description': 'The date (in date format) the item was last viewed on Plex.'},
|
||||
{'name': 'Studio', 'type': 'str', 'value': 'studio', 'description': 'The studio for the item.'},
|
||||
{'name': 'Content Rating', 'type': 'str', 'value': 'content_rating',
|
||||
'description': 'The content rating for the item.', 'example': 'e.g. TV-MA, TV-PG, etc.'},
|
||||
{'name': 'Directors', 'type': 'str', 'value': 'directors',
|
||||
'description': 'A list of directors for the item.'},
|
||||
{'name': 'Writers', 'type': 'str', 'value': 'writers', 'description': 'A list of writers for the item.'},
|
||||
{'name': 'Actors', 'type': 'str', 'value': 'actors', 'description': 'A list of actors for the item.'},
|
||||
{'name': 'Genres', 'type': 'str', 'value': 'genres', 'description': 'A list of genres for the item.'},
|
||||
{'name': 'Labels', 'type': 'str', 'value': 'labels', 'description': 'A list of labels for the item.'},
|
||||
{'name': 'Collections', 'type': 'str', 'value': 'collections',
|
||||
'description': 'A list of collections for the item.'},
|
||||
{'name': 'Summary', 'type': 'str', 'value': 'summary', 'description': 'A short plot summary for the item.'},
|
||||
{'name': 'Tagline', 'type': 'str', 'value': 'tagline', 'description': 'A tagline for the media item.'},
|
||||
{'name': 'Rating', 'type': 'float', 'value': 'rating',
|
||||
'description': 'The rating (out of 10) for the item.'},
|
||||
{'name': 'Critic Rating', 'type': 'int', 'value': 'critic_rating',
|
||||
'description': 'The critic rating (%) for the item.',
|
||||
'help_text': 'Ratings source must be Rotten Tomatoes for the Plex Movie agent'},
|
||||
{'name': 'Audience Rating', 'type': 'float', 'value': 'audience_rating',
|
||||
'description': 'The audience rating for the item.',
|
||||
'help_text': 'Rating out of 10 for IMDB, percentage (%) for Rotten Tomatoes and TMDB.'},
|
||||
{'name': 'User Rating', 'type': 'float', 'value': 'user_rating',
|
||||
'description': 'The user (star) rating (out of 10) for the item.'},
|
||||
{'name': 'Duration', 'type': 'int', 'value': 'duration',
|
||||
'description': 'The duration (in minutes) for the item.'},
|
||||
{'name': 'Poster URL', 'type': 'str', 'value': 'poster_url',
|
||||
'description': 'A URL for the movie, TV show, or album poster.'},
|
||||
{'name': 'Plex ID', 'type': 'str', 'value': 'plex_id', 'description': 'The Plex ID for the item.',
|
||||
'example': 'e.g. 5d7769a9594b2b001e6a6b7e'},
|
||||
{'name': 'Plex URL', 'type': 'str', 'value': 'plex_url',
|
||||
'description': 'The Plex URL to your server for the item.'},
|
||||
{'name': 'IMDB ID', 'type': 'str', 'value': 'imdb_id', 'description': 'The IMDB ID for the movie.',
|
||||
'example': 'e.g. tt2488496'},
|
||||
{'name': 'IMDB URL', 'type': 'str', 'value': 'imdb_url', 'description': 'The IMDB URL for the movie.'},
|
||||
{'name': 'TVDB ID', 'type': 'int', 'value': 'thetvdb_id', 'description': 'The TVDB ID for the TV show.',
|
||||
'example': 'e.g. 121361'},
|
||||
{'name': 'TVDB URL', 'type': 'str', 'value': 'thetvdb_url', 'description': 'The TVDB URL for the TV show.'},
|
||||
{'name': 'TMDB ID', 'type': 'int', 'value': 'themoviedb_id',
|
||||
'description': 'The TMDb ID for the movie or TV show.', 'example': 'e.g. 15260'},
|
||||
{'name': 'TMDB URL', 'type': 'str', 'value': 'themoviedb_url',
|
||||
'description': 'The TMDb URL for the movie or TV show.'},
|
||||
{'name': 'TVmaze ID', 'type': 'int', 'value': 'tvmaze_id', 'description': 'The TVmaze ID for the TV show.',
|
||||
'example': 'e.g. 290'},
|
||||
{'name': 'TVmaze URL', 'type': 'str', 'value': 'tvmaze_url',
|
||||
'description': 'The TVmaze URL for the TV show.'},
|
||||
{'name': 'MusicBrainz ID', 'type': 'str', 'value': 'musicbrainz_id',
|
||||
'description': 'The MusicBrainz ID for the artist, album, or track.',
|
||||
'example': 'e.g. b670dfcf-9824-4309-a57e-03595aaba286'},
|
||||
{'name': 'MusicBrainz URL', 'type': 'str', 'value': 'musicbrainz_url',
|
||||
'description': 'The MusicBrainz URL for the artist, album, or track.'},
|
||||
{'name': 'Last.fm URL', 'type': 'str', 'value': 'lastfm_url',
|
||||
'description': 'The Last.fm URL for the album.', 'help_text': 'Music library agent must be Last.fm'},
|
||||
{'name': 'Trakt.tv URL', 'type': 'str', 'value': 'trakt_url',
|
||||
'description': 'The trakt.tv URL for the movie or TV show.'},
|
||||
{'name': 'Container', 'type': 'str', 'value': 'container',
|
||||
'description': 'The media container of the original media.'},
|
||||
{'name': 'Bitrate', 'type': 'int', 'value': 'bitrate', 'description': 'The bitrate of the original media.'},
|
||||
{'name': 'Aspect Ratio', 'type': 'float', 'value': 'aspect_ratio',
|
||||
'description': 'The aspect ratio of the original media.'},
|
||||
{'name': 'Video Codec', 'type': 'str', 'value': 'video_codec',
|
||||
'description': 'The video codec of the original media.'},
|
||||
{'name': 'Video Codec Level', 'type': 'int', 'value': 'video_codec_level',
|
||||
'description': 'The video codec level of the original media.'},
|
||||
{'name': 'Video Bitrate', 'type': 'int', 'value': 'video_bitrate',
|
||||
'description': 'The video bitrate of the original media.'},
|
||||
{'name': 'Video Bit Depth', 'type': 'int', 'value': 'video_bit_depth',
|
||||
'description': 'The video bit depth of the original media.'},
|
||||
{'name': 'Video Chroma Subsampling', 'type': 'str', 'value': 'video_chroma_subsampling',
|
||||
'description': 'The video chroma subsampling of the original media.'},
|
||||
{'name': 'Video Color Primaries', 'type': 'str', 'value': 'video_color_primaries',
|
||||
'description': 'The video color primaries of the original media.'},
|
||||
{'name': 'Video Color Range', 'type': 'str', 'value': 'video_color_range',
|
||||
'description': 'The video color range of the original media.'},
|
||||
{'name': 'Video Color Space', 'type': 'str', 'value': 'video_color_space',
|
||||
'description': 'The video color space of the original media.'},
|
||||
{'name': 'Video Color Transfer Function', 'type': 'str', 'value': 'video_color_trc',
|
||||
'description': 'The video transfer function of the original media.'},
|
||||
{'name': 'Video Dynamic Range', 'type': 'str', 'value': 'video_dynamic_range',
|
||||
'description': 'The video dynamic range of the original media.', 'example': 'HDR or SDR'},
|
||||
{'name': 'Video Framerate', 'type': 'str', 'value': 'video_framerate',
|
||||
'description': 'The video framerate of the original media.'},
|
||||
{'name': 'Video Full Resolution', 'type': 'str', 'value': 'video_full_resolution',
|
||||
'description': 'The video resolution of the original media with scan type.'},
|
||||
{'name': 'Video Ref Frames', 'type': 'int', 'value': 'video_ref_frames',
|
||||
'description': 'The video reference frames of the original media.'},
|
||||
{'name': 'Video Resolution', 'type': 'str', 'value': 'video_resolution',
|
||||
'description': 'The video resolution of the original media.'},
|
||||
{'name': 'Video Scan Type', 'type': 'str', 'value': 'video_scan_type',
|
||||
'description': 'The video scan type of the original media.'},
|
||||
{'name': 'Video Height', 'type': 'int', 'value': 'video_height',
|
||||
'description': 'The video height of the original media.'},
|
||||
{'name': 'Video Width', 'type': 'int', 'value': 'video_width',
|
||||
'description': 'The video width of the original media.'},
|
||||
{'name': 'Video Language', 'type': 'str', 'value': 'video_language',
|
||||
'description': 'The video language of the original media.'},
|
||||
{'name': 'Video Language Code', 'type': 'str', 'value': 'video_language_code',
|
||||
'description': 'The video language code of the original media.'},
|
||||
{'name': 'Audio Bitrate', 'type': 'int', 'value': 'audio_bitrate',
|
||||
'description': 'The audio bitrate of the original media.'},
|
||||
{'name': 'Audio Bitrate Mode', 'type': 'str', 'value': 'audio_bitrate_mode',
|
||||
'description': 'The audio bitrate mode of the original media.', 'example': 'cbr or vbr'},
|
||||
{'name': 'Audio Codec', 'type': 'str', 'value': 'audio_codec',
|
||||
'description': 'The audio codec of the original media.'},
|
||||
{'name': 'Audio Channels', 'type': 'float', 'value': 'audio_channels',
|
||||
'description': 'The audio channels of the original media.'},
|
||||
{'name': 'Audio Channel Layout', 'type': 'str', 'value': 'audio_channel_layout',
|
||||
'description': 'The audio channel layout of the original media.'},
|
||||
{'name': 'Audio Sample Rate', 'type': 'int', 'value': 'audio_sample_rate',
|
||||
'description': 'The audio sample rate (in Hz) of the original media.'},
|
||||
{'name': 'Audio Language', 'type': 'str', 'value': 'audio_language',
|
||||
'description': 'The audio language of the original media.'},
|
||||
{'name': 'Audio Language Code', 'type': 'str', 'value': 'audio_language_code',
|
||||
'description': 'The audio language code of the original media.'},
|
||||
{'name': 'Subtitle Codec', 'type': 'str', 'value': 'subtitle_codec',
|
||||
'description': 'The subtitle codec of the original media.'},
|
||||
{'name': 'Subtitle Container', 'type': 'str', 'value': 'subtitle_container',
|
||||
'description': 'The subtitle container of the original media.'},
|
||||
{'name': 'Subtitle Format', 'type': 'str', 'value': 'subtitle_format',
|
||||
'description': 'The subtitle format of the original media.'},
|
||||
{'name': 'Subtitle Forced', 'type': 'int', 'value': 'subtitle_forced',
|
||||
'description': 'If the subtitles are forced.', 'example': '0 or 1'},
|
||||
{'name': 'Subtitle Location', 'type': 'str', 'value': 'subtitle_location',
|
||||
'description': 'The subtitle location of the original media.'},
|
||||
{'name': 'Subtitle Language', 'type': 'str', 'value': 'subtitle_language',
|
||||
'description': 'The subtitle language of the original media.'},
|
||||
{'name': 'Subtitle Language Code', 'type': 'str', 'value': 'subtitle_language_code',
|
||||
'description': 'The subtitle language code of the original media.'},
|
||||
{'name': 'File', 'type': 'str', 'value': 'file', 'description': 'The file path to the item.'},
|
||||
{'name': 'Filename', 'type': 'str', 'value': 'filename', 'description': 'The file name of the item.'},
|
||||
{'name': 'File Size', 'type': 'int', 'value': 'file_size', 'description': 'The file size of the item.'},
|
||||
{'name': 'Section ID', 'type': 'int', 'value': 'section_id',
|
||||
'description': 'The unique identifier for the library.'},
|
||||
{'name': 'Rating Key', 'type': 'int', 'value': 'rating_key',
|
||||
'description': 'The unique identifier for the movie, episode, or track.'},
|
||||
{'name': 'Parent Rating Key', 'type': 'int', 'value': 'parent_rating_key',
|
||||
'description': 'The unique identifier for the season or album.'},
|
||||
{'name': 'Grandparent Rating Key', 'type': 'int', 'value': 'grandparent_rating_key',
|
||||
'description': 'The unique identifier for the TV show or artist.'},
|
||||
{'name': 'Art', 'type': 'str', 'value': 'art', 'description': 'The Plex background art for the media.'},
|
||||
{'name': 'Thumb', 'type': 'str', 'value': 'thumb',
|
||||
'description': 'The Plex thumbnail for the movie or episode.'},
|
||||
{'name': 'Parent Thumb', 'type': 'str', 'value': 'parent_thumb',
|
||||
'description': 'The Plex thumbnail for the season or album.'},
|
||||
{'name': 'Grandparent Thumb', 'type': 'str', 'value': 'grandparent_thumb',
|
||||
'description': 'The Plex thumbnail for the TV show or artist.'},
|
||||
{'name': 'Poster Thumb', 'type': 'str', 'value': 'poster_thumb',
|
||||
'description': 'The Plex thumbnail for the poster image.'},
|
||||
{'name': 'Poster Title', 'type': 'str', 'value': 'poster_title',
|
||||
'description': 'The title for the poster image.'},
|
||||
{'name': 'Indexes', 'type': 'int', 'value': 'indexes',
|
||||
'description': 'If the media has video preview thumbnails.', 'example': '0 or 1'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Plex Remote Access',
|
||||
'parameters': [
|
||||
{'name': 'Remote Access Mapping State', 'type': 'str', 'value': 'remote_access_mapping_state',
|
||||
'description': 'The mapping state of the Plex remote access port.'},
|
||||
{'name': 'Remote Access Mapping Error', 'type': 'str', 'value': 'remote_access_mapping_error',
|
||||
'description': 'The mapping error of the Plex remote access port.'},
|
||||
{'name': 'Remote Access Public IP Address', 'type': 'str', 'value': 'remote_access_public_address',
|
||||
'description': 'The Plex remote access public IP address.'},
|
||||
{'name': 'Remote Access Public Port', 'type': 'str', 'value': 'remote_access_public_port',
|
||||
'description': 'The Plex remote access public port.'},
|
||||
{'name': 'Remote Access Private IP Address', 'type': 'str', 'value': 'remote_access_private_address',
|
||||
'description': 'The Plex remote access private IP address.'},
|
||||
{'name': 'Remote Access Private Port', 'type': 'str', 'value': 'remote_access_private_port',
|
||||
'description': 'The Plex remote access private port.'},
|
||||
{'name': 'Remote Access Failure Reason', 'type': 'str', 'value': 'remote_access_reason',
|
||||
'description': 'The failure reason for Plex remote access going down.'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Plex Update Available',
|
||||
'parameters': [
|
||||
{'name': 'Update Version', 'type': 'str', 'value': 'update_version',
|
||||
'description': 'The available update version for your Plex Server.'},
|
||||
{'name': 'Update Url', 'type': 'str', 'value': 'update_url',
|
||||
'description': 'The download URL for the available update.'},
|
||||
{'name': 'Update Release Date', 'type': 'str', 'value': 'update_release_date',
|
||||
'description': 'The release date of the available update.'},
|
||||
{'name': 'Update Channel', 'type': 'str', 'value': 'update_channel', 'description': 'The update channel.',
|
||||
'example': 'Public or Plex Pass'},
|
||||
{'name': 'Update Platform', 'type': 'str', 'value': 'update_platform',
|
||||
'description': 'The platform of your Plex Server.'},
|
||||
{'name': 'Update Distro', 'type': 'str', 'value': 'update_distro',
|
||||
'description': 'The distro of your Plex Server.'},
|
||||
{'name': 'Update Distro Build', 'type': 'str', 'value': 'update_distro_build',
|
||||
'description': 'The distro build of your Plex Server.'},
|
||||
{'name': 'Update Requirements', 'type': 'str', 'value': 'update_requirements',
|
||||
'description': 'The requirements for the available update.'},
|
||||
{'name': 'Update Extra Info', 'type': 'str', 'value': 'update_extra_info',
|
||||
'description': 'Any extra info for the available update.'},
|
||||
{'name': 'Update Changelog Added', 'type': 'str', 'value': 'update_changelog_added',
|
||||
'description': 'The added changelog for the available update.'},
|
||||
{'name': 'Update Changelog Fixed', 'type': 'str', 'value': 'update_changelog_fixed',
|
||||
'description': 'The fixed changelog for the available update.'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Tautulli Update Available',
|
||||
'parameters': [
|
||||
{'name': 'Tautulli Update Version', 'type': 'str', 'value': 'tautulli_update_version',
|
||||
'description': 'The available update version for Tautulli.'},
|
||||
{'name': 'Tautulli Update Release URL', 'type': 'str', 'value': 'tautulli_update_release_url',
|
||||
'description': 'The release page URL on GitHub.'},
|
||||
{'name': 'Tautulli Update Tar', 'type': 'str', 'value': 'tautulli_update_tar',
|
||||
'description': 'The tar download URL for the available update.'},
|
||||
{'name': 'Tautulli Update Zip', 'type': 'str', 'value': 'tautulli_update_zip',
|
||||
'description': 'The zip download URL for the available update.'},
|
||||
{'name': 'Tautulli Update Commit', 'type': 'str', 'value': 'tautulli_update_commit',
|
||||
'description': 'The commit hash for the available update.'},
|
||||
{'name': 'Tautulli Update Behind', 'type': 'int', 'value': 'tautulli_update_behind',
|
||||
'description': 'The number of commits behind for the available update.'},
|
||||
{'name': 'Tautulli Update Changelog', 'type': 'str', 'value': 'tautulli_update_changelog',
|
||||
'description': 'The changelog for the available update.'},
|
||||
]
|
||||
},
|
||||
]
|
||||
|
||||
NEWSLETTER_PARAMETERS = [
|
||||
{
|
||||
'category': 'Global',
|
||||
'parameters': [
|
||||
{'name': 'Server Name', 'type': 'str', 'value': 'server_name',
|
||||
'description': 'The name of your Plex Server.'},
|
||||
{'name': 'Start Date', 'type': 'str', 'value': 'start_date',
|
||||
'description': 'The start date of the newsletter.'},
|
||||
{'name': 'End Date', 'type': 'str', 'value': 'end_date', 'description': 'The end date of the newsletter.'},
|
||||
{'name': 'Current Year', 'type': 'int', 'value': 'current_year',
|
||||
'description': 'The year of the start date of the newsletter.'},
|
||||
{'name': 'Current Month', 'type': 'int', 'value': 'current_month',
|
||||
'description': 'The month of the start date of the newsletter.', 'example': '1 to 12'},
|
||||
{'name': 'Current Day', 'type': 'int', 'value': 'current_day',
|
||||
'description': 'The day of the start date of the newsletter.', 'example': '1 to 31'},
|
||||
{'name': 'Current Hour', 'type': 'int', 'value': 'current_hour',
|
||||
'description': 'The hour of the start date of the newsletter.', 'example': '0 to 23'},
|
||||
{'name': 'Current Minute', 'type': 'int', 'value': 'current_minute',
|
||||
'description': 'The minute of the start date of the newsletter.', 'example': '0 to 59'},
|
||||
{'name': 'Current Second', 'type': 'int', 'value': 'current_second',
|
||||
'description': 'The second of the start date of the newsletter.', 'example': '0 to 59'},
|
||||
{'name': 'Current Weekday', 'type': 'int', 'value': 'current_weekday',
|
||||
'description': 'The ISO weekday of the start date of the newsletter.', 'example': '1 (Mon) to 7 (Sun)'},
|
||||
{'name': 'Current Week', 'type': 'int', 'value': 'current_week',
|
||||
'description': 'The ISO week number of the start date of the newsletter.', 'example': '1 to 52'},
|
||||
{'name': 'Newsletter Time Frame', 'type': 'int', 'value': 'newsletter_time_frame',
|
||||
'description': 'The time frame included in the newsletter.'},
|
||||
{'name': 'Newsletter Time Frame Units', 'type': 'str', 'value': 'newsletter_time_frame_units',
|
||||
'description': 'The time frame units included in the newsletter.'},
|
||||
{'name': 'Newsletter URL', 'type': 'str', 'value': 'newsletter_url',
|
||||
'description': 'The self-hosted URL to the newsletter.'},
|
||||
{'name': 'Newsletter Static URL', 'type': 'str', 'value': 'newsletter_static_url',
|
||||
'description': 'The static self-hosted URL to the latest scheduled newsletter for the agent.'},
|
||||
{'name': 'Newsletter UUID', 'type': 'str', 'value': 'newsletter_uuid',
|
||||
'description': 'The unique identifier for the newsletter.'},
|
||||
{'name': 'Newsletter ID', 'type': 'int', 'value': 'newsletter_id',
|
||||
'description': 'The unique ID number for the newsletter agent.'},
|
||||
{'name': 'Newsletter ID Name', 'type': 'int', 'value': 'newsletter_id_name',
|
||||
'description': 'The unique ID name for the newsletter agent.'},
|
||||
{'name': 'Newsletter Password', 'type': 'str', 'value': 'newsletter_password',
|
||||
'description': 'The password required to view the newsletter if enabled.'},
|
||||
]
|
||||
},
|
||||
{
|
||||
'category': 'Recently Added',
|
||||
'parameters': [
|
||||
{'name': 'Included Libraries', 'type': 'str', 'value': 'newsletter_libraries',
|
||||
'description': 'The list of libraries included in the newsletter.'},
|
||||
]
|
||||
}
|
||||
]
|
||||
@@ -13,25 +13,17 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import object
|
||||
from future.builtins import str
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import time
|
||||
import threading
|
||||
import time
|
||||
|
||||
from configobj import ConfigObj, ParseError
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import helpers
|
||||
import logger
|
||||
else:
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
import jellypy
|
||||
from jellypy import helpers
|
||||
from jellypy import logger
|
||||
|
||||
|
||||
def bool_int(value):
|
||||
@@ -49,30 +41,28 @@ FILENAME = "config.ini"
|
||||
_CONFIG_DEFINITIONS = {
|
||||
'ALLOW_GUEST_ACCESS': (int, 'General', 0),
|
||||
'DATE_FORMAT': (str, 'General', 'YYYY-MM-DD'),
|
||||
'PMS_IDENTIFIER': (str, 'PMS', ''),
|
||||
'PMS_IP': (str, 'PMS', '127.0.0.1'),
|
||||
'PMS_IS_CLOUD': (int, 'PMS', 0),
|
||||
'PMS_IS_REMOTE': (int, 'PMS', 0),
|
||||
'PMS_LOGS_FOLDER': (str, 'PMS', ''),
|
||||
'PMS_LOGS_LINE_CAP': (int, 'PMS', 1000),
|
||||
'PMS_NAME': (str, 'PMS', ''),
|
||||
'PMS_PORT': (int, 'PMS', 32400),
|
||||
'PMS_TOKEN': (str, 'PMS', ''),
|
||||
'PMS_SSL': (int, 'PMS', 0),
|
||||
'PMS_URL': (str, 'PMS', ''),
|
||||
'PMS_URL_OVERRIDE': (str, 'PMS', ''),
|
||||
'PMS_URL_MANUAL': (int, 'PMS', 0),
|
||||
'PMS_USE_BIF': (int, 'PMS', 0),
|
||||
'PMS_UUID': (str, 'PMS', ''),
|
||||
'PMS_TIMEOUT': (int, 'Advanced', 15),
|
||||
'PMS_PLEXPASS': (int, 'PMS', 0),
|
||||
'PMS_PLATFORM': (str, 'PMS', ''),
|
||||
'PMS_VERSION': (str, 'PMS', ''),
|
||||
'PMS_UPDATE_CHANNEL': (str, 'PMS', 'plex'),
|
||||
'PMS_UPDATE_DISTRO': (str, 'PMS', ''),
|
||||
'PMS_UPDATE_DISTRO_BUILD': (str, 'PMS', ''),
|
||||
'PMS_UPDATE_CHECK_INTERVAL': (int, 'Advanced', 24),
|
||||
'PMS_WEB_URL': (str, 'PMS', 'https://app.plex.tv/desktop'),
|
||||
'JELLYFIN_IDENTIFIER': (str, 'JELLYFIN', ''),
|
||||
'JELLYFIN_IP': (str, 'JELLYFIN', '127.0.0.1'),
|
||||
'JELLYFIN_IS_REMOTE': (int, 'JELLYFIN', 0),
|
||||
'JELLYFIN_LOGS_FOLDER': (str, 'JELLYFIN', ''),
|
||||
'JELLYFIN_LOGS_LINE_CAP': (int, 'JELLYFIN', 1000),
|
||||
'JELLYFIN_NAME': (str, 'JELLYFIN', ''),
|
||||
'JELLYFIN_PORT': (int, 'JELLYFIN', 8096),
|
||||
'JELLYFIN_TOKEN': (str, 'JELLYFIN', ''),
|
||||
'JELLYFIN_SSL': (int, 'JELLYFIN', 0),
|
||||
'JELLYFIN_URL': (str, 'JELLYFIN', ''),
|
||||
'JELLYFIN_URL_OVERRIDE': (str, 'JELLYFIN', ''),
|
||||
'JELLYFIN_URL_MANUAL': (int, 'JELLYFIN', 0),
|
||||
'JELLYFIN_USE_BIF': (int, 'JELLYFIN', 0),
|
||||
'JELLYFIN_UUID': (str, 'JELLYFIN', ''),
|
||||
'JELLYFIN_TIMEOUT': (int, 'Advanced', 15),
|
||||
'JELLYFIN_PLEXPASS': (int, 'JELLYFIN', 0),
|
||||
'JELLYFIN_PLATFORM': (str, 'JELLYFIN', ''),
|
||||
'JELLYFIN_VERSION': (str, 'JELLYFIN', ''),
|
||||
'JELLYFIN_UPDATE_DISTRO': (str, 'JELLYFIN', ''),
|
||||
'JELLYFIN_UPDATE_DISTRO_BUILD': (str, 'JELLYFIN', ''),
|
||||
'JELLYFIN_UPDATE_CHECK_INTERVAL': (int, 'Advanced', 24),
|
||||
'JELLYFIN_CLIENT_UUID': (str, 'JELLYFIN', ''),
|
||||
'TIME_FORMAT': (str, 'General', 'HH:mm'),
|
||||
'ANON_REDIRECT': (str, 'General', 'https://www.nullrefer.com/?'),
|
||||
'API_ENABLED': (int, 'General', 1),
|
||||
@@ -86,7 +76,7 @@ _CONFIG_DEFINITIONS = {
|
||||
'CACHE_DIR': (str, 'General', ''),
|
||||
'CACHE_IMAGES': (int, 'General', 1),
|
||||
'CACHE_SIZEMB': (int, 'Advanced', 32),
|
||||
'CHECK_GITHUB': (int, 'General', 1),
|
||||
'CHECK_GITHUB': (int, 'General', 0),
|
||||
'CHECK_GITHUB_INTERVAL': (int, 'General', 360),
|
||||
'CHECK_GITHUB_ON_STARTUP': (int, 'General', 1),
|
||||
'CHECK_GITHUB_CACHE_SECONDS': (int, 'Advanced', 3600),
|
||||
@@ -114,7 +104,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'HOME_SECTIONS': (list, 'General', ['current_activity', 'watch_stats', 'library_stats', 'recently_added']),
|
||||
'HOME_LIBRARY_CARDS': (list, 'General', ['first_run']),
|
||||
'HOME_STATS_CARDS': (list, 'General', ['top_movies', 'popular_movies', 'top_tv', 'popular_tv', 'top_music',
|
||||
'popular_music', 'last_watched', 'top_users', 'top_platforms', 'most_concurrent']),
|
||||
'popular_music', 'last_watched', 'top_users', 'top_platforms',
|
||||
'most_concurrent']),
|
||||
'HOME_REFRESH_INTERVAL': (int, 'General', 10),
|
||||
'HTTPS_CREATE_CERT': (int, 'General', 1),
|
||||
'HTTPS_CERT': (str, 'General', ''),
|
||||
@@ -134,6 +125,9 @@ _CONFIG_DEFINITIONS = {
|
||||
'HTTP_USERNAME': (str, 'General', ''),
|
||||
'HTTP_PLEX_ADMIN': (int, 'General', 0),
|
||||
'HTTP_BASE_URL': (str, 'General', ''),
|
||||
'HTTP_RATE_LIMIT_ATTEMPTS': (int, 'General', 10),
|
||||
'HTTP_RATE_LIMIT_ATTEMPTS_INTERVAL': (int, 'General', 300),
|
||||
'HTTP_RATE_LIMIT_LOCKOUT_TIME': (int, 'General', 300),
|
||||
'INTERFACE': (str, 'General', 'default'),
|
||||
'IMGUR_CLIENT_ID': (str, 'Monitoring', ''),
|
||||
'JOURNAL_MODE': (str, 'Advanced', 'WAL'),
|
||||
@@ -146,7 +140,7 @@ _CONFIG_DEFINITIONS = {
|
||||
'MOVIE_WATCHED_PERCENT': (int, 'Monitoring', 85),
|
||||
'MUSIC_WATCHED_PERCENT': (int, 'Monitoring', 85),
|
||||
'MUSICBRAINZ_LOOKUP': (int, 'General', 0),
|
||||
'MONITOR_PMS_UPDATES': (int, 'Monitoring', 0),
|
||||
'MONITOR_JELLYFIN_UPDATES': (int, 'Monitoring', 0),
|
||||
'MONITORING_INTERVAL': (int, 'Monitoring', 60),
|
||||
'NEWSLETTER_AUTH': (int, 'Newsletter', 0),
|
||||
'NEWSLETTER_PASSWORD': (str, 'Newsletter', ''),
|
||||
@@ -167,7 +161,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'NOTIFY_REMOTE_ACCESS_THRESHOLD': (int, 'Monitoring', 60),
|
||||
'NOTIFY_CONCURRENT_BY_IP': (int, 'Monitoring', 0),
|
||||
'NOTIFY_CONCURRENT_THRESHOLD': (int, 'Monitoring', 2),
|
||||
'PLEXPY_AUTO_UPDATE': (int, 'General', 0),
|
||||
'NOTIFY_NEW_DEVICE_INITIAL_ONLY': (int, 'Monitoring', 1),
|
||||
'JELLYPY_AUTO_UPDATE': (int, 'General', 0),
|
||||
'REFRESH_LIBRARIES_INTERVAL': (int, 'Monitoring', 12),
|
||||
'REFRESH_LIBRARIES_ON_STARTUP': (int, 'Monitoring', 1),
|
||||
'REFRESH_USERS_INTERVAL': (int, 'Monitoring', 12),
|
||||
@@ -197,15 +192,12 @@ _BLACKLIST_KEYS = ['_APITOKEN', '_TOKEN', '_KEY', '_SECRET', '_PASSWORD', '_APIK
|
||||
_WHITELIST_KEYS = ['HTTPS_KEY']
|
||||
|
||||
_DO_NOT_IMPORT_KEYS = [
|
||||
'FIRST_RUN_COMPLETE', 'GET_FILE_SIZES_HOLD', 'GIT_PATH', 'PMS_LOGS_FOLDER',
|
||||
'FIRST_RUN_COMPLETE', 'GET_FILE_SIZES_HOLD', 'GIT_PATH', 'JELLYFIN_LOGS_FOLDER',
|
||||
'BACKUP_DIR', 'CACHE_DIR', 'EXPORT_DIR', 'LOG_DIR', 'NEWSLETTER_DIR', 'NEWSLETTER_CUSTOM_DIR',
|
||||
'HTTP_HOST', 'HTTP_PORT', 'HTTP_ROOT',
|
||||
'HTTP_USERNAME', 'HTTP_PASSWORD', 'HTTP_HASH_PASSWORD', 'HTTP_HASHED_PASSWORD',
|
||||
'ENABLE_HTTPS', 'HTTPS_CREATE_CERT', 'HTTPS_CERT', 'HTTPS_CERT_CHAIN', 'HTTPS_KEY'
|
||||
]
|
||||
_DO_NOT_IMPORT_KEYS_DOCKER = [
|
||||
'PLEXPY_AUTO_UPDATE', 'GIT_REMOTE', 'GIT_BRANCH'
|
||||
]
|
||||
|
||||
IS_IMPORTING = False
|
||||
IMPORT_THREAD = None
|
||||
@@ -253,20 +245,17 @@ def import_tautulli_config(config=None, backup=False):
|
||||
# Remove keys that should not be imported
|
||||
for key in _DO_NOT_IMPORT_KEYS:
|
||||
delattr(imported_config, key)
|
||||
if plexpy.DOCKER:
|
||||
for key in _DO_NOT_IMPORT_KEYS_DOCKER:
|
||||
delattr(imported_config, key)
|
||||
|
||||
# Merge the imported config file into the current config file
|
||||
plexpy.CONFIG._config.merge(imported_config._config)
|
||||
plexpy.CONFIG.write()
|
||||
jellypy.CONFIG._config.merge(imported_config._config)
|
||||
jellypy.CONFIG.write()
|
||||
|
||||
logger.info("Tautulli Config :: Tautulli config import complete.")
|
||||
set_import_thread(None)
|
||||
set_is_importing(False)
|
||||
|
||||
# Restart to apply changes
|
||||
plexpy.SIGNAL = 'restart'
|
||||
jellypy.SIGNAL = 'restart'
|
||||
|
||||
|
||||
def make_backup(cleanup=False, scheduler=False):
|
||||
@@ -276,15 +265,15 @@ def make_backup(cleanup=False, scheduler=False):
|
||||
backup_file = 'config.backup-{}.sched.ini'.format(helpers.now())
|
||||
else:
|
||||
backup_file = 'config.backup-{}.ini'.format(helpers.now())
|
||||
backup_folder = plexpy.CONFIG.BACKUP_DIR
|
||||
backup_folder = jellypy.CONFIG.BACKUP_DIR
|
||||
backup_file_fp = os.path.join(backup_folder, backup_file)
|
||||
|
||||
# In case the user has deleted it manually
|
||||
if not os.path.exists(backup_folder):
|
||||
os.makedirs(backup_folder)
|
||||
|
||||
plexpy.CONFIG.write()
|
||||
shutil.copyfile(plexpy.CONFIG_FILE, backup_file_fp)
|
||||
jellypy.CONFIG.write()
|
||||
shutil.copyfile(jellypy.CONFIG_FILE, backup_file_fp)
|
||||
|
||||
if cleanup:
|
||||
now = time.time()
|
||||
@@ -292,17 +281,17 @@ def make_backup(cleanup=False, scheduler=False):
|
||||
for root, dirs, files in os.walk(backup_folder):
|
||||
ini_files = [os.path.join(root, f) for f in files if f.endswith('.sched.ini')]
|
||||
for file_ in ini_files:
|
||||
if os.stat(file_).st_mtime < now - plexpy.CONFIG.BACKUP_DAYS * 86400:
|
||||
if os.stat(file_).st_mtime < now - jellypy.CONFIG.BACKUP_DAYS * 86400:
|
||||
try:
|
||||
os.remove(file_)
|
||||
except OSError as e:
|
||||
logger.error("Tautulli Config :: Failed to delete %s from the backup folder: %s" % (file_, e))
|
||||
|
||||
if backup_file in os.listdir(backup_folder):
|
||||
logger.debug("Tautulli Config :: Successfully backed up %s to %s" % (plexpy.CONFIG_FILE, backup_file))
|
||||
logger.debug("Tautulli Config :: Successfully backed up %s to %s" % (jellypy.CONFIG_FILE, backup_file))
|
||||
return True
|
||||
else:
|
||||
logger.error("Tautulli Config :: Failed to backup %s to %s" % (plexpy.CONFIG_FILE, backup_file))
|
||||
logger.error("Tautulli Config :: Failed to backup %s to %s" % (jellypy.CONFIG_FILE, backup_file))
|
||||
return False
|
||||
|
||||
|
||||
@@ -333,7 +322,7 @@ class Config(object):
|
||||
for key, subkeys in self._config.items():
|
||||
for subkey, value in subkeys.items():
|
||||
if isinstance(value, str) and len(value.strip()) > 5 and \
|
||||
subkey.upper() not in _WHITELIST_KEYS and any(bk in subkey.upper() for bk in _BLACKLIST_KEYS):
|
||||
subkey.upper() not in _WHITELIST_KEYS and any(bk in subkey.upper() for bk in _BLACKLIST_KEYS):
|
||||
blacklist.add(value.strip())
|
||||
|
||||
logger._BLACKLIST_WORDS.update(blacklist)
|
||||
@@ -445,94 +434,3 @@ class Config(object):
|
||||
"""
|
||||
if self.CONFIG_VERSION == 0:
|
||||
self.CONFIG_VERSION = 1
|
||||
|
||||
if self.CONFIG_VERSION == 1:
|
||||
# Change home_stats_cards to list
|
||||
if self.HOME_STATS_CARDS:
|
||||
home_stats_cards = ''.join(self.HOME_STATS_CARDS).split(', ')
|
||||
if 'watch_statistics' in home_stats_cards:
|
||||
home_stats_cards.remove('watch_statistics')
|
||||
self.HOME_STATS_CARDS = home_stats_cards
|
||||
# Change home_library_cards to list
|
||||
if self.HOME_LIBRARY_CARDS:
|
||||
home_library_cards = ''.join(self.HOME_LIBRARY_CARDS).split(', ')
|
||||
if 'library_statistics' in home_library_cards:
|
||||
home_library_cards.remove('library_statistics')
|
||||
self.HOME_LIBRARY_CARDS = home_library_cards
|
||||
|
||||
self.CONFIG_VERSION = 2
|
||||
|
||||
if self.CONFIG_VERSION == 2:
|
||||
self.CONFIG_VERSION = 3
|
||||
|
||||
if self.CONFIG_VERSION == 3:
|
||||
if self.HTTP_ROOT == '/':
|
||||
self.HTTP_ROOT = ''
|
||||
|
||||
self.CONFIG_VERSION = 4
|
||||
|
||||
if self.CONFIG_VERSION == 4:
|
||||
if not len(self.HOME_STATS_CARDS) and 'watch_stats' in self.HOME_SECTIONS:
|
||||
home_sections = self.HOME_SECTIONS
|
||||
home_sections.remove('watch_stats')
|
||||
self.HOME_SECTIONS = home_sections
|
||||
if not len(self.HOME_LIBRARY_CARDS) and 'library_stats' in self.HOME_SECTIONS:
|
||||
home_sections = self.HOME_SECTIONS
|
||||
home_sections.remove('library_stats')
|
||||
self.HOME_SECTIONS = home_sections
|
||||
|
||||
self.CONFIG_VERSION = 5
|
||||
|
||||
if self.CONFIG_VERSION == 5:
|
||||
self.MONITOR_PMS_UPDATES = 0
|
||||
|
||||
self.CONFIG_VERSION = 6
|
||||
|
||||
if self.CONFIG_VERSION == 6:
|
||||
if self.GIT_USER.lower() == 'drzoidberg33':
|
||||
self.GIT_USER = 'JonnyWong16'
|
||||
|
||||
self.CONFIG_VERSION = 7
|
||||
|
||||
if self.CONFIG_VERSION == 7:
|
||||
self.CONFIG_VERSION = 8
|
||||
|
||||
if self.CONFIG_VERSION == 8:
|
||||
self.CONFIG_VERSION = 9
|
||||
|
||||
if self.CONFIG_VERSION == 9:
|
||||
if self.PMS_UPDATE_CHANNEL == 'plexpass':
|
||||
self.PMS_UPDATE_CHANNEL = 'beta'
|
||||
|
||||
self.CONFIG_VERSION = 10
|
||||
|
||||
if self.CONFIG_VERSION == 10:
|
||||
self.GIT_USER = 'Tautulli'
|
||||
self.GIT_REPO = 'Tautulli'
|
||||
|
||||
self.CONFIG_VERSION = 11
|
||||
|
||||
if self.CONFIG_VERSION == 11:
|
||||
self.ANON_REDIRECT = self.ANON_REDIRECT.replace('http://www.nullrefer.com/?',
|
||||
'https://www.nullrefer.com/?')
|
||||
self.CONFIG_VERSION = 12
|
||||
|
||||
if self.CONFIG_VERSION == 12:
|
||||
self.BUFFER_THRESHOLD = max(self.BUFFER_THRESHOLD, 10)
|
||||
|
||||
self.CONFIG_VERSION = 13
|
||||
|
||||
if self.CONFIG_VERSION == 13:
|
||||
self.CONFIG_VERSION = 14
|
||||
|
||||
if self.CONFIG_VERSION == 14:
|
||||
if plexpy.DOCKER:
|
||||
self.PLEXPY_AUTO_UPDATE = 0
|
||||
|
||||
self.CONFIG_VERSION = 15
|
||||
|
||||
if self.CONFIG_VERSION == 15:
|
||||
if self.HTTP_ROOT and self.HTTP_ROOT != '/':
|
||||
self.JWT_UPDATE_SECRET = True
|
||||
|
||||
self.CONFIG_VERSION = 16
|
||||
@@ -13,24 +13,16 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
from future.builtins import object
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
import shutil
|
||||
import sqlite3
|
||||
import threading
|
||||
import time
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import helpers
|
||||
import logger
|
||||
else:
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
import jellypy
|
||||
|
||||
from jellypy import helpers
|
||||
from jellypy import logger
|
||||
|
||||
FILENAME = "tautulli.db"
|
||||
db_lock = threading.Lock()
|
||||
@@ -228,7 +220,7 @@ def delete_rows_from_table(table, row_ids):
|
||||
if row_ids:
|
||||
logger.info("Tautulli Database :: Deleting row ids %s from %s database table", row_ids, table)
|
||||
|
||||
# SQlite verions prior to 3.32.0 (2020-05-22) have maximum variable limit of 999
|
||||
# SQlite versions prior to 3.32.0 (2020-05-22) have maximum variable limit of 999
|
||||
# https://sqlite.org/limits.html
|
||||
sqlite_max_variable_number = 999
|
||||
|
||||
@@ -308,7 +300,7 @@ def optimize_db():
|
||||
def db_filename(filename=FILENAME):
|
||||
""" Returns the filepath to the db """
|
||||
|
||||
return os.path.join(plexpy.DATA_DIR, filename)
|
||||
return os.path.join(jellypy.DATA_DIR, filename)
|
||||
|
||||
|
||||
def make_backup(cleanup=False, scheduler=False):
|
||||
@@ -320,13 +312,13 @@ def make_backup(cleanup=False, scheduler=False):
|
||||
corrupt = ''
|
||||
if not integrity:
|
||||
corrupt = '.corrupt'
|
||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpydbcorrupt'})
|
||||
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpydbcorrupt'})
|
||||
|
||||
if scheduler:
|
||||
backup_file = 'tautulli.backup-{}{}.sched.db'.format(helpers.now(), corrupt)
|
||||
else:
|
||||
backup_file = 'tautulli.backup-{}{}.db'.format(helpers.now(), corrupt)
|
||||
backup_folder = plexpy.CONFIG.BACKUP_DIR
|
||||
backup_folder = jellypy.CONFIG.BACKUP_DIR
|
||||
backup_file_fp = os.path.join(backup_folder, backup_file)
|
||||
|
||||
# In case the user has deleted it manually
|
||||
@@ -345,7 +337,7 @@ def make_backup(cleanup=False, scheduler=False):
|
||||
for root, dirs, files in os.walk(backup_folder):
|
||||
db_files = [os.path.join(root, f) for f in files if f.endswith('.sched.db')]
|
||||
for file_ in db_files:
|
||||
if os.stat(file_).st_mtime < now - plexpy.CONFIG.BACKUP_DAYS * 86400:
|
||||
if os.stat(file_).st_mtime < now - jellypy.CONFIG.BACKUP_DAYS * 86400:
|
||||
try:
|
||||
os.remove(file_)
|
||||
except OSError as e:
|
||||
@@ -361,10 +353,10 @@ def make_backup(cleanup=False, scheduler=False):
|
||||
|
||||
def get_cache_size():
|
||||
# This will protect against typecasting problems produced by empty string and None settings
|
||||
if not plexpy.CONFIG.CACHE_SIZEMB:
|
||||
if not jellypy.CONFIG.CACHE_SIZEMB:
|
||||
# sqlite will work with this (very slowly)
|
||||
return 0
|
||||
return int(plexpy.CONFIG.CACHE_SIZEMB)
|
||||
return int(jellypy.CONFIG.CACHE_SIZEMB)
|
||||
|
||||
|
||||
def dict_factory(cursor, row):
|
||||
@@ -381,9 +373,9 @@ class MonitorDatabase(object):
|
||||
self.filename = filename
|
||||
self.connection = sqlite3.connect(db_filename(filename), timeout=20)
|
||||
# Set database synchronous mode (default NORMAL)
|
||||
self.connection.execute("PRAGMA synchronous = %s" % plexpy.CONFIG.SYNCHRONOUS_MODE)
|
||||
self.connection.execute("PRAGMA synchronous = %s" % jellypy.CONFIG.SYNCHRONOUS_MODE)
|
||||
# Set database journal mode (default WAL)
|
||||
self.connection.execute("PRAGMA journal_mode = %s" % plexpy.CONFIG.JOURNAL_MODE)
|
||||
self.connection.execute("PRAGMA journal_mode = %s" % jellypy.CONFIG.JOURNAL_MODE)
|
||||
# Set database cache size (default 32MB)
|
||||
self.connection.execute("PRAGMA cache_size = -%s" % (get_cache_size() * 1024))
|
||||
self.connection.row_factory = dict_factory
|
||||
@@ -455,8 +447,9 @@ class MonitorDatabase(object):
|
||||
if self.connection.total_changes == changes_before:
|
||||
trans_type = 'insert'
|
||||
insert_query = (
|
||||
"INSERT INTO " + table_name + " (" + ", ".join(list(value_dict.keys()) + list(key_dict.keys())) + ")" +
|
||||
" VALUES (" + ", ".join(["?"] * len(list(value_dict.keys()) + list(key_dict.keys()))) + ")"
|
||||
"INSERT INTO " + table_name + " (" + ", ".join(
|
||||
list(value_dict.keys()) + list(key_dict.keys())) + ")" +
|
||||
" VALUES (" + ", ".join(["?"] * len(list(value_dict.keys()) + list(key_dict.keys()))) + ")"
|
||||
)
|
||||
try:
|
||||
self.action(insert_query, list(value_dict.values()) + list(key_dict.values()))
|
||||
@@ -470,4 +463,4 @@ class MonitorDatabase(object):
|
||||
# Get the last insert row id
|
||||
result = self.select_single(query='SELECT last_insert_rowid() AS last_id')
|
||||
if result:
|
||||
return result.get('last_id', None)
|
||||
return result.get('last_id', None)
|
||||
@@ -15,32 +15,15 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import next
|
||||
from future.builtins import str
|
||||
from future.builtins import object
|
||||
|
||||
import json
|
||||
from itertools import groupby
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import common
|
||||
import database
|
||||
import datatables
|
||||
import helpers
|
||||
import logger
|
||||
import pmsconnect
|
||||
import session
|
||||
else:
|
||||
from plexpy import common
|
||||
from plexpy import database
|
||||
from plexpy import datatables
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
from plexpy import pmsconnect
|
||||
from plexpy import session
|
||||
import jellypy
|
||||
from jellypy import common
|
||||
from jellypy import database
|
||||
from jellypy import datatables
|
||||
from jellypy import helpers
|
||||
from jellypy import logger
|
||||
from jellypy import session
|
||||
|
||||
|
||||
class DataFactory(object):
|
||||
@@ -58,10 +41,10 @@ class DataFactory(object):
|
||||
custom_where = []
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
if include_activity is None:
|
||||
include_activity = plexpy.CONFIG.HISTORY_TABLE_ACTIVITY
|
||||
include_activity = jellypy.CONFIG.HISTORY_TABLE_ACTIVITY
|
||||
|
||||
if session.get_session_user_id():
|
||||
session_user_id = str(session.get_session_user_id())
|
||||
@@ -99,6 +82,7 @@ class DataFactory(object):
|
||||
'product',
|
||||
'player',
|
||||
'ip_address',
|
||||
'machine_id',
|
||||
'session_history.media_type',
|
||||
'session_history_metadata.rating_key',
|
||||
'session_history_metadata.parent_rating_key',
|
||||
@@ -126,7 +110,7 @@ class DataFactory(object):
|
||||
'GROUP_CONCAT(session_history.id) AS group_ids',
|
||||
'NULL AS state',
|
||||
'NULL AS session_key'
|
||||
]
|
||||
]
|
||||
|
||||
if include_activity:
|
||||
table_name_union = 'sessions'
|
||||
@@ -151,6 +135,7 @@ class DataFactory(object):
|
||||
'product',
|
||||
'player',
|
||||
'ip_address',
|
||||
'machine_id',
|
||||
'media_type',
|
||||
'rating_key',
|
||||
'parent_rating_key',
|
||||
@@ -178,7 +163,7 @@ class DataFactory(object):
|
||||
'NULL AS group_ids',
|
||||
'state',
|
||||
'session_key'
|
||||
]
|
||||
]
|
||||
|
||||
else:
|
||||
table_name_union = None
|
||||
@@ -216,11 +201,11 @@ class DataFactory(object):
|
||||
filter_duration = 0
|
||||
total_duration = self.get_total_duration(custom_where=custom_where)
|
||||
|
||||
watched_percent = {'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT,
|
||||
'episode': plexpy.CONFIG.TV_WATCHED_PERCENT,
|
||||
'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT,
|
||||
watched_percent = {'movie': jellypy.CONFIG.MOVIE_WATCHED_PERCENT,
|
||||
'episode': jellypy.CONFIG.TV_WATCHED_PERCENT,
|
||||
'track': jellypy.CONFIG.MUSIC_WATCHED_PERCENT,
|
||||
'photo': 0,
|
||||
'clip': plexpy.CONFIG.TV_WATCHED_PERCENT
|
||||
'clip': jellypy.CONFIG.TV_WATCHED_PERCENT
|
||||
}
|
||||
|
||||
rows = []
|
||||
@@ -263,6 +248,7 @@ class DataFactory(object):
|
||||
'player': item['player'],
|
||||
'ip_address': item['ip_address'],
|
||||
'live': item['live'],
|
||||
'machine_id': item['machine_id'],
|
||||
'media_type': item['media_type'],
|
||||
'rating_key': item['rating_key'],
|
||||
'parent_rating_key': item['parent_rating_key'],
|
||||
@@ -306,13 +292,13 @@ class DataFactory(object):
|
||||
if stat_id:
|
||||
stats_cards = [stat_id]
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
if stats_cards is None:
|
||||
stats_cards = plexpy.CONFIG.HOME_STATS_CARDS
|
||||
stats_cards = jellypy.CONFIG.HOME_STATS_CARDS
|
||||
|
||||
movie_watched_percent = plexpy.CONFIG.MOVIE_WATCHED_PERCENT
|
||||
tv_watched_percent = plexpy.CONFIG.TV_WATCHED_PERCENT
|
||||
music_watched_percent = plexpy.CONFIG.MUSIC_WATCHED_PERCENT
|
||||
movie_watched_percent = jellypy.CONFIG.MOVIE_WATCHED_PERCENT
|
||||
tv_watched_percent = jellypy.CONFIG.TV_WATCHED_PERCENT
|
||||
music_watched_percent = jellypy.CONFIG.MUSIC_WATCHED_PERCENT
|
||||
|
||||
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
|
||||
sort_type = 'total_duration' if stats_type == 'duration' else 'total_plays'
|
||||
@@ -340,7 +326,8 @@ class DataFactory(object):
|
||||
'LIMIT %s OFFSET %s ' % (time_range, group_by, sort_type, stats_count, stats_start)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_movies: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_movies: %s." % e)
|
||||
return None
|
||||
|
||||
for item in result:
|
||||
@@ -392,7 +379,8 @@ class DataFactory(object):
|
||||
'LIMIT %s OFFSET %s ' % (time_range, group_by, sort_type, stats_count, stats_start)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_movies: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_movies: %s." % e)
|
||||
return None
|
||||
|
||||
for item in result:
|
||||
@@ -441,7 +429,8 @@ class DataFactory(object):
|
||||
'LIMIT %s OFFSET %s ' % (time_range, group_by, sort_type, stats_count, stats_start)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_tv: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_tv: %s." % e)
|
||||
return None
|
||||
|
||||
for item in result:
|
||||
@@ -493,7 +482,8 @@ class DataFactory(object):
|
||||
'LIMIT %s OFFSET %s ' % (time_range, group_by, sort_type, stats_count, stats_start)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_tv: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_tv: %s." % e)
|
||||
return None
|
||||
|
||||
for item in result:
|
||||
@@ -543,7 +533,8 @@ class DataFactory(object):
|
||||
'LIMIT %s OFFSET %s ' % (time_range, group_by, sort_type, stats_count, stats_start)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_music: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_music: %s." % e)
|
||||
return None
|
||||
|
||||
for item in result:
|
||||
@@ -596,7 +587,8 @@ class DataFactory(object):
|
||||
'LIMIT %s OFFSET %s ' % (time_range, group_by, sort_type, stats_count, stats_start)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_music: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_music: %s." % e)
|
||||
return None
|
||||
|
||||
for item in result:
|
||||
@@ -646,7 +638,8 @@ class DataFactory(object):
|
||||
'LIMIT %s OFFSET %s ' % (time_range, group_by, sort_type, stats_count, stats_start)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_users: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_users: %s." % e)
|
||||
return None
|
||||
|
||||
for item in result:
|
||||
@@ -671,7 +664,7 @@ class DataFactory(object):
|
||||
'title': '',
|
||||
'platform': '',
|
||||
'row_id': ''
|
||||
}
|
||||
}
|
||||
top_users.append(row)
|
||||
|
||||
home_stats.append({'stat_id': stat,
|
||||
@@ -698,13 +691,15 @@ class DataFactory(object):
|
||||
'LIMIT %s OFFSET %s ' % (time_range, group_by, sort_type, stats_count, stats_start)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_platforms: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_platforms: %s." % e)
|
||||
return None
|
||||
|
||||
for item in result:
|
||||
# Rename Mystery platform names
|
||||
platform = common.PLATFORM_NAME_OVERRIDES.get(item['platform'], item['platform'])
|
||||
platform_name = next((v for k, v in common.PLATFORM_NAMES.items() if k in platform.lower()), 'default')
|
||||
platform_name = next((v for k, v in common.PLATFORM_NAMES.items() if k in platform.lower()),
|
||||
'default')
|
||||
|
||||
row = {'total_plays': item['total_plays'],
|
||||
'total_duration': item['total_duration'],
|
||||
@@ -756,7 +751,8 @@ class DataFactory(object):
|
||||
stats_count, stats_start)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: last_watched: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli DataFactory :: Unable to execute database query for get_home_stats: last_watched: %s." % e)
|
||||
return None
|
||||
|
||||
for item in result:
|
||||
@@ -845,26 +841,27 @@ class DataFactory(object):
|
||||
|
||||
title = 'Concurrent Transcodes'
|
||||
query = base_query \
|
||||
+ 'AND session_history_media_info.transcode_decision = "transcode" '
|
||||
+ 'AND session_history_media_info.transcode_decision = "transcode" '
|
||||
result = monitor_db.select(query)
|
||||
if result:
|
||||
most_concurrent.append(calc_most_concurrent(title, result))
|
||||
|
||||
title = 'Concurrent Direct Streams'
|
||||
query = base_query \
|
||||
+ 'AND session_history_media_info.transcode_decision = "copy" '
|
||||
+ 'AND session_history_media_info.transcode_decision = "copy" '
|
||||
result = monitor_db.select(query)
|
||||
if result:
|
||||
most_concurrent.append(calc_most_concurrent(title, result))
|
||||
|
||||
title = 'Concurrent Direct Plays'
|
||||
query = base_query \
|
||||
+ 'AND session_history_media_info.transcode_decision = "direct play" '
|
||||
+ 'AND session_history_media_info.transcode_decision = "direct play" '
|
||||
result = monitor_db.select(query)
|
||||
if result:
|
||||
most_concurrent.append(calc_most_concurrent(title, result))
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: most_concurrent: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli DataFactory :: Unable to execute database query for get_home_stats: most_concurrent: %s." % e)
|
||||
return None
|
||||
|
||||
home_stats.append({'stat_id': stat,
|
||||
@@ -1574,14 +1571,14 @@ class DataFactory(object):
|
||||
|
||||
key = item['parent_media_index'] if match_type == 'index' else item['parent_title']
|
||||
parents.update({key:
|
||||
{'rating_key': item['parent_rating_key'],
|
||||
'children': children}
|
||||
{'rating_key': item['parent_rating_key'],
|
||||
'children': children}
|
||||
})
|
||||
|
||||
key = 0 if match_type == 'index' else item['grandparent_title']
|
||||
grandparents.update({key:
|
||||
{'rating_key': item['grandparent_rating_key'],
|
||||
'children': parents}
|
||||
{'rating_key': item['grandparent_rating_key'],
|
||||
'children': parents}
|
||||
})
|
||||
|
||||
key_list = grandparents
|
||||
@@ -1589,7 +1586,7 @@ class DataFactory(object):
|
||||
return key_list
|
||||
|
||||
def update_metadata(self, old_key_list='', new_key_list='', media_type=''):
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
# TODO: pms_connect = pmsconnect.PmsConnect()
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
# function to map rating keys pairs
|
||||
@@ -1608,33 +1605,38 @@ class DataFactory(object):
|
||||
if old_key_list and new_key_list:
|
||||
mapping = get_pairs(old_key_list, new_key_list)
|
||||
|
||||
if mapping:
|
||||
logger.info("Tautulli DataFactory :: Updating metadata in the database.")
|
||||
for old_key, new_key in mapping.items():
|
||||
metadata = pms_connect.get_metadata_details(new_key)
|
||||
|
||||
if metadata:
|
||||
if metadata['media_type'] == 'show' or metadata['media_type'] == 'artist':
|
||||
# check grandparent_rating_key (2 tables)
|
||||
monitor_db.action('UPDATE session_history SET grandparent_rating_key = ? WHERE grandparent_rating_key = ?',
|
||||
[new_key, old_key])
|
||||
monitor_db.action('UPDATE session_history_metadata SET grandparent_rating_key = ? WHERE grandparent_rating_key = ?',
|
||||
[new_key, old_key])
|
||||
elif metadata['media_type'] == 'season' or metadata['media_type'] == 'album':
|
||||
# check parent_rating_key (2 tables)
|
||||
monitor_db.action('UPDATE session_history SET parent_rating_key = ? WHERE parent_rating_key = ?',
|
||||
[new_key, old_key])
|
||||
monitor_db.action('UPDATE session_history_metadata SET parent_rating_key = ? WHERE parent_rating_key = ?',
|
||||
[new_key, old_key])
|
||||
else:
|
||||
# check rating_key (2 tables)
|
||||
monitor_db.action('UPDATE session_history SET rating_key = ? WHERE rating_key = ?',
|
||||
[new_key, old_key])
|
||||
monitor_db.action('UPDATE session_history_media_info SET rating_key = ? WHERE rating_key = ?',
|
||||
[new_key, old_key])
|
||||
|
||||
# update session_history_metadata table
|
||||
self.update_metadata_details(old_key, new_key, metadata)
|
||||
# TODO: Jellyfin
|
||||
# if mapping:
|
||||
# logger.info("Tautulli DataFactory :: Updating metadata in the database.")
|
||||
# for old_key, new_key in mapping.items():
|
||||
# metadata = pms_connect.get_metadata_details(new_key)
|
||||
#
|
||||
# if metadata:
|
||||
# if metadata['media_type'] == 'show' or metadata['media_type'] == 'artist':
|
||||
# # check grandparent_rating_key (2 tables)
|
||||
# monitor_db.action(
|
||||
# 'UPDATE session_history SET grandparent_rating_key = ? WHERE grandparent_rating_key = ?',
|
||||
# [new_key, old_key])
|
||||
# monitor_db.action(
|
||||
# 'UPDATE session_history_metadata SET grandparent_rating_key = ? WHERE grandparent_rating_key = ?',
|
||||
# [new_key, old_key])
|
||||
# elif metadata['media_type'] == 'season' or metadata['media_type'] == 'album':
|
||||
# # check parent_rating_key (2 tables)
|
||||
# monitor_db.action(
|
||||
# 'UPDATE session_history SET parent_rating_key = ? WHERE parent_rating_key = ?',
|
||||
# [new_key, old_key])
|
||||
# monitor_db.action(
|
||||
# 'UPDATE session_history_metadata SET parent_rating_key = ? WHERE parent_rating_key = ?',
|
||||
# [new_key, old_key])
|
||||
# else:
|
||||
# # check rating_key (2 tables)
|
||||
# monitor_db.action('UPDATE session_history SET rating_key = ? WHERE rating_key = ?',
|
||||
# [new_key, old_key])
|
||||
# monitor_db.action('UPDATE session_history_media_info SET rating_key = ? WHERE rating_key = ?',
|
||||
# [new_key, old_key])
|
||||
#
|
||||
# # update session_history_metadata table
|
||||
# self.update_metadata_details(old_key, new_key, metadata)
|
||||
|
||||
return 'Updated metadata in database.'
|
||||
else:
|
||||
@@ -1658,7 +1660,7 @@ class DataFactory(object):
|
||||
genres = ";".join(metadata['genres'])
|
||||
labels = ";".join(metadata['labels'])
|
||||
|
||||
#logger.info("Tautulli DataFactory :: Updating metadata in the database for rating key: %s." % new_rating_key)
|
||||
# logger.info("Tautulli DataFactory :: Updating metadata in the database for rating key: %s." % new_rating_key)
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
# Update the session_history_metadata table
|
||||
@@ -1838,12 +1840,22 @@ class DataFactory(object):
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for delete_newsletter_log: %s." % e)
|
||||
return False
|
||||
|
||||
def get_user_devices(self, user_id=''):
|
||||
def get_user_devices(self, user_id='', history_only=True):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
if user_id:
|
||||
if history_only:
|
||||
query = 'SELECT machine_id FROM session_history ' \
|
||||
'WHERE user_id = ? ' \
|
||||
'GROUP BY machine_id'
|
||||
else:
|
||||
query = 'SELECT * FROM (' \
|
||||
'SELECT user_id, machine_id FROM session_history ' \
|
||||
'UNION SELECT user_id, machine_id from sessions_continued) ' \
|
||||
'WHERE user_id = ? ' \
|
||||
'GROUP BY machine_id'
|
||||
|
||||
try:
|
||||
query = 'SELECT machine_id FROM session_history WHERE user_id = ? GROUP BY machine_id'
|
||||
result = monitor_db.select(query=query, args=[user_id])
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_user_devices: %s." % e)
|
||||
@@ -1861,7 +1873,8 @@ class DataFactory(object):
|
||||
query = 'SELECT * FROM recently_added WHERE rating_key = ?'
|
||||
result = monitor_db.select(query=query, args=[rating_key])
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_recently_added_item: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli DataFactory :: Unable to execute database query for get_recently_added_item: %s." % e)
|
||||
return []
|
||||
else:
|
||||
return []
|
||||
@@ -1871,23 +1884,24 @@ class DataFactory(object):
|
||||
def set_recently_added_item(self, rating_key=''):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
metadata = pms_connect.get_metadata_details(rating_key)
|
||||
|
||||
keys = {'rating_key': metadata['rating_key']}
|
||||
|
||||
values = {'added_at': metadata['added_at'],
|
||||
'section_id': metadata['section_id'],
|
||||
'parent_rating_key': metadata['parent_rating_key'],
|
||||
'grandparent_rating_key': metadata['grandparent_rating_key'],
|
||||
'media_type': metadata['media_type'],
|
||||
'media_info': json.dumps(metadata['media_info'])
|
||||
}
|
||||
|
||||
try:
|
||||
monitor_db.upsert(table_name='recently_added', key_dict=keys, value_dict=values)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for set_recently_added_item: %s." % e)
|
||||
return False
|
||||
# TODO: Jellyfin
|
||||
# pms_connect = pmsconnect.PmsConnect()
|
||||
# metadata = pms_connect.get_metadata_details(rating_key)
|
||||
#
|
||||
# keys = {'rating_key': metadata['rating_key']}
|
||||
#
|
||||
# values = {'added_at': metadata['added_at'],
|
||||
# 'section_id': metadata['section_id'],
|
||||
# 'parent_rating_key': metadata['parent_rating_key'],
|
||||
# 'grandparent_rating_key': metadata['grandparent_rating_key'],
|
||||
# 'media_type': metadata['media_type'],
|
||||
# 'media_info': json.dumps(metadata['media_info'])
|
||||
# }
|
||||
#
|
||||
# try:
|
||||
# monitor_db.upsert(table_name='recently_added', key_dict=keys, value_dict=values)
|
||||
# except Exception as e:
|
||||
# logger.warn("Tautulli DataFactory :: Unable to execute database query for set_recently_added_item: %s." % e)
|
||||
# return False
|
||||
|
||||
return True
|
||||
@@ -13,20 +13,11 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import object
|
||||
|
||||
import re
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import database
|
||||
import helpers
|
||||
import logger
|
||||
else:
|
||||
from plexpy import database
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
from jellypy import database
|
||||
from jellypy import helpers
|
||||
from jellypy import logger
|
||||
|
||||
|
||||
class DataTables(object):
|
||||
@@ -224,7 +215,7 @@ class DataTables(object):
|
||||
args.append('%' + search_param + '%')
|
||||
if where:
|
||||
where = 'WHERE ' + where.rstrip(' OR ')
|
||||
|
||||
|
||||
return where, args
|
||||
|
||||
# This method extracts column data from our column list
|
||||
@@ -13,10 +13,8 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
|
||||
class PlexPyException(Exception):
|
||||
class JellyPyException(Exception):
|
||||
"""
|
||||
Generic Tautulli Exception - should never be thrown, only subclassed
|
||||
"""
|
||||
@@ -14,36 +14,24 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
from backports import csv
|
||||
|
||||
import csv
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
import shutil
|
||||
import threading
|
||||
|
||||
from functools import partial, reduce
|
||||
from io import open
|
||||
from multiprocessing.dummy import Pool as ThreadPool
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import database
|
||||
import datatables
|
||||
import helpers
|
||||
import logger
|
||||
import users
|
||||
from plex import Plex
|
||||
else:
|
||||
from plexpy import database
|
||||
from plexpy import datatables
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
from plexpy import users
|
||||
from plexpy.plex import Plex
|
||||
import requests
|
||||
|
||||
import jellypy
|
||||
from jellypy import database
|
||||
from jellypy import datatables
|
||||
from jellypy import helpers
|
||||
from jellypy import logger
|
||||
from jellypy import users
|
||||
from jellypy.jellyfin import Jellyfin
|
||||
|
||||
|
||||
class Export(object):
|
||||
@@ -92,6 +80,14 @@ class Export(object):
|
||||
'collection': ['children'],
|
||||
'playlist': ['item']
|
||||
}
|
||||
TREE_MEDIA_TYPES = [
|
||||
('episode', 'season', 'show'),
|
||||
('track', 'album', 'artist'),
|
||||
('photo', 'photoalbum'),
|
||||
('clip', 'photoalbum'),
|
||||
('children', 'collection'),
|
||||
('item', 'playlist')
|
||||
]
|
||||
METADATA_LEVELS = (0, 1, 2, 3, 9)
|
||||
MEDIA_INFO_LEVELS = (0, 1, 2, 3, 9)
|
||||
IMAGE_LEVELS = (0, 1, 2, 9)
|
||||
@@ -128,6 +124,7 @@ class Export(object):
|
||||
self.file_size = 0
|
||||
self.exported_thumb = False
|
||||
self.exported_art = False
|
||||
self._reload_check_files = False
|
||||
|
||||
self.total_items = 0
|
||||
self.exported_items = 0
|
||||
@@ -288,7 +285,6 @@ class Export(object):
|
||||
},
|
||||
'audioStreams': {
|
||||
'codec': None,
|
||||
'codecID': None,
|
||||
'default': None,
|
||||
'displayTitle': None,
|
||||
'extendedDisplayTitle': None,
|
||||
@@ -305,15 +301,14 @@ class Export(object):
|
||||
'bitrate': None,
|
||||
'bitrateMode': None,
|
||||
'channels': None,
|
||||
'dialogNorm': None,
|
||||
'duration': None,
|
||||
'profile': None,
|
||||
'requiredBandwidths': None,
|
||||
'samplingRate': None
|
||||
'samplingRate': None,
|
||||
'streamIdentifier': None
|
||||
},
|
||||
'subtitleStreams': {
|
||||
'codec': None,
|
||||
'codecID': None,
|
||||
'default': None,
|
||||
'displayTitle': None,
|
||||
'extendedDisplayTitle': None,
|
||||
@@ -329,7 +324,8 @@ class Export(object):
|
||||
'forced': None,
|
||||
'format': None,
|
||||
'headerCompression': None,
|
||||
'key': None
|
||||
'key': None,
|
||||
'transient': None
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -595,7 +591,6 @@ class Export(object):
|
||||
},
|
||||
'audioStreams': {
|
||||
'codec': None,
|
||||
'codecID': None,
|
||||
'default': None,
|
||||
'displayTitle': None,
|
||||
'extendedDisplayTitle': None,
|
||||
@@ -612,15 +607,14 @@ class Export(object):
|
||||
'bitrate': None,
|
||||
'bitrateMode': None,
|
||||
'channels': None,
|
||||
'dialogNorm': None,
|
||||
'duration': None,
|
||||
'profile': None,
|
||||
'requiredBandwidths': None,
|
||||
'samplingRate': None
|
||||
'samplingRate': None,
|
||||
'streamIdentifier': None
|
||||
},
|
||||
'subtitleStreams': {
|
||||
'codec': None,
|
||||
'codecID': None,
|
||||
'default': None,
|
||||
'displayTitle': None,
|
||||
'extendedDisplayTitle': None,
|
||||
@@ -636,7 +630,8 @@ class Export(object):
|
||||
'forced': None,
|
||||
'format': None,
|
||||
'headerCompression': None,
|
||||
'key': None
|
||||
'key': None,
|
||||
'transient': None
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -829,7 +824,6 @@ class Export(object):
|
||||
'syncState': None,
|
||||
'audioStreams': {
|
||||
'codec': None,
|
||||
'codecID': None,
|
||||
'default': None,
|
||||
'displayTitle': None,
|
||||
'extendedDisplayTitle': None,
|
||||
@@ -857,7 +851,6 @@ class Export(object):
|
||||
},
|
||||
'lyricStreams': {
|
||||
'codec': None,
|
||||
'codecID': None,
|
||||
'default': None,
|
||||
'displayTitle': None,
|
||||
'extendedDisplayTitle': None,
|
||||
@@ -1097,7 +1090,7 @@ class Export(object):
|
||||
'media.optimizedVersion', 'media.hdr'
|
||||
],
|
||||
2: [
|
||||
'media.parts.accessible', 'media.parts.exists', 'media.parts.file', 'media.parts.duration',
|
||||
'media.parts.file', 'media.parts.duration',
|
||||
'media.parts.container', 'media.parts.indexes', 'media.parts.size', 'media.parts.sizeHuman',
|
||||
'media.parts.audioProfile', 'media.parts.videoProfile',
|
||||
'media.parts.optimizedForStreaming', 'media.parts.deepAnalysisVersion'
|
||||
@@ -1208,7 +1201,7 @@ class Export(object):
|
||||
'media.optimizedVersion', 'media.hdr'
|
||||
],
|
||||
2: [
|
||||
'media.parts.accessible', 'media.parts.exists', 'media.parts.file', 'media.parts.duration',
|
||||
'media.parts.file', 'media.parts.duration',
|
||||
'media.parts.container', 'media.parts.indexes', 'media.parts.size', 'media.parts.sizeHuman',
|
||||
'media.parts.audioProfile', 'media.parts.videoProfile',
|
||||
'media.parts.optimizedForStreaming', 'media.parts.deepAnalysisVersion'
|
||||
@@ -1317,7 +1310,7 @@ class Export(object):
|
||||
'media.bitrate', 'media.container', 'media.duration'
|
||||
],
|
||||
2: [
|
||||
'media.parts.accessible', 'media.parts.exists', 'media.parts.file', 'media.parts.duration',
|
||||
'media.parts.file', 'media.parts.duration',
|
||||
'media.parts.container', 'media.parts.size', 'media.parts.sizeHuman',
|
||||
'media.parts.audioProfile',
|
||||
'media.parts.deepAnalysisVersion', 'media.parts.hasThumbnail'
|
||||
@@ -1392,7 +1385,7 @@ class Export(object):
|
||||
'media.iso', 'media.lens', 'media.make', 'media.model'
|
||||
],
|
||||
2: [
|
||||
'media.parts.accessible', 'media.parts.exists', 'media.parts.file',
|
||||
'media.parts.file',
|
||||
'media.parts.container', 'media.parts.size', 'media.parts.sizeHuman'
|
||||
],
|
||||
3: [
|
||||
@@ -1529,9 +1522,9 @@ class Export(object):
|
||||
user_tokens = user_data.get_tokens(user_id=self.user_id)
|
||||
plex_token = user_tokens['server_token']
|
||||
else:
|
||||
plex_token = plexpy.CONFIG.PMS_TOKEN
|
||||
plex_token = jellypy.CONFIG.PMS_TOKEN
|
||||
|
||||
plex = Plex(plexpy.CONFIG.PMS_URL, plex_token)
|
||||
jf = Jellyfin(jellypy.CONFIG.PMS_URL, plex_token)
|
||||
|
||||
if self.rating_key:
|
||||
logger.debug(
|
||||
@@ -1541,7 +1534,7 @@ class Export(object):
|
||||
self.rating_key, self.metadata_level, self.media_info_level,
|
||||
self.thumb_level, self.art_level, self.file_format)
|
||||
|
||||
self.obj = plex.get_item(self.rating_key)
|
||||
self.obj = jf.get_item(self.rating_key)
|
||||
self.media_type = self._media_type(self.obj)
|
||||
|
||||
if self.media_type != 'playlist':
|
||||
@@ -1560,7 +1553,7 @@ class Export(object):
|
||||
self.user_id, self.metadata_level, self.media_info_level,
|
||||
self.thumb_level, self.art_level, self.export_type, self.file_format)
|
||||
|
||||
self.obj = plex.plex
|
||||
self.obj = jf.plex
|
||||
self.media_type = self.export_type
|
||||
|
||||
self.obj_title = user_info['username']
|
||||
@@ -1573,7 +1566,7 @@ class Export(object):
|
||||
self.section_id, self.metadata_level, self.media_info_level,
|
||||
self.thumb_level, self.art_level, self.export_type, self.file_format)
|
||||
|
||||
self.obj = plex.get_library(str(self.section_id))
|
||||
self.obj = jf.get_library(str(self.section_id))
|
||||
if self.export_type == 'all':
|
||||
self.media_type = self.obj.type
|
||||
else:
|
||||
@@ -1686,7 +1679,7 @@ class Export(object):
|
||||
self.total_items = len(items)
|
||||
logger.info("Tautulli Exporter :: Exporting %d item(s).", self.total_items)
|
||||
|
||||
pool = ThreadPool(processes=plexpy.CONFIG.EXPORT_THREADS)
|
||||
pool = ThreadPool(processes=jellypy.CONFIG.EXPORT_THREADS)
|
||||
items = [ExportObject(self, item) for item in items]
|
||||
|
||||
try:
|
||||
@@ -1822,6 +1815,15 @@ class Export(object):
|
||||
else:
|
||||
self._custom_fields[media_type] = {field}
|
||||
|
||||
for tree in self.TREE_MEDIA_TYPES:
|
||||
for child_media_type, parent_media_type in zip(tree[:-1], tree[1:]):
|
||||
if child_media_type in self._custom_fields:
|
||||
plural_child_media_type = self.PLURAL_MEDIA_TYPES[child_media_type]
|
||||
if parent_media_type in self._custom_fields:
|
||||
self._custom_fields[parent_media_type].add(plural_child_media_type)
|
||||
else:
|
||||
self._custom_fields[parent_media_type] = {plural_child_media_type}
|
||||
|
||||
def _parse_custom_field(self, media_type, field):
|
||||
for child_media_type in self.CHILD_MEDIA_TYPES.get(media_type, []):
|
||||
plural_key = self.PLURAL_MEDIA_TYPES[child_media_type]
|
||||
@@ -1871,6 +1873,10 @@ class Export(object):
|
||||
elif self.media_type == 'playlist' and 'item' in self._custom_fields:
|
||||
export_attrs_set.update(self._custom_fields['item'])
|
||||
|
||||
if 'media.parts.accessible' in export_attrs_set or 'media.parts.exists' in export_attrs_set or \
|
||||
self.media_info_level == 9:
|
||||
self._reload_check_files = True
|
||||
|
||||
for attr in export_attrs_set:
|
||||
try:
|
||||
value = helpers.get_dict_value_by_path(media_attrs, attr)
|
||||
@@ -1958,12 +1964,13 @@ class Export(object):
|
||||
pass
|
||||
|
||||
def export_obj(self, obj):
|
||||
# Reload ~plexapi.base.PlexPartialObject
|
||||
if hasattr(obj, 'isPartialObject') and obj.isPartialObject():
|
||||
obj = obj.reload()
|
||||
|
||||
media_type = self._media_type(obj)
|
||||
export_attrs = self._get_export_attrs(media_type)
|
||||
|
||||
# Reload ~plexapi.base.PlexPartialObject
|
||||
if hasattr(obj, 'isPartialObject') and obj.isPartialObject():
|
||||
obj = obj.reload(checkFiles=self._reload_check_files)
|
||||
|
||||
return helpers.get_attrs_to_dict(obj, attrs=export_attrs)
|
||||
|
||||
def get_any_hdr(self, item, media_type):
|
||||
@@ -2088,7 +2095,7 @@ def delete_export(export_id):
|
||||
def delete_all_exports():
|
||||
logger.info("Tautulli Exporter :: Deleting all exports from the export directory.")
|
||||
|
||||
export_dir = plexpy.CONFIG.EXPORT_DIR
|
||||
export_dir = jellypy.CONFIG.EXPORT_DIR
|
||||
try:
|
||||
shutil.rmtree(export_dir, ignore_errors=True)
|
||||
except OSError as e:
|
||||
@@ -2214,7 +2221,7 @@ def format_export_filename(title, file_format):
|
||||
def get_export_dirpath(title, timestamp=None, images_directory=None):
|
||||
if timestamp:
|
||||
title = format_export_directory(title, timestamp)
|
||||
dirpath = os.path.join(plexpy.CONFIG.EXPORT_DIR, title)
|
||||
dirpath = os.path.join(jellypy.CONFIG.EXPORT_DIR, title)
|
||||
if images_directory:
|
||||
dirpath = os.path.join(dirpath, '{}.images'.format(images_directory))
|
||||
return dirpath
|
||||
@@ -15,26 +15,15 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
from future.builtins import range
|
||||
from future.builtins import object
|
||||
|
||||
import datetime
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import common
|
||||
import database
|
||||
import logger
|
||||
import libraries
|
||||
import session
|
||||
else:
|
||||
from plexpy import common
|
||||
from plexpy import database
|
||||
from plexpy import logger
|
||||
from plexpy import libraries
|
||||
from plexpy import session
|
||||
import jellypy
|
||||
|
||||
from jellypy import common
|
||||
from jellypy import database
|
||||
from jellypy import logger
|
||||
from jellypy import libraries
|
||||
from jellypy import session
|
||||
|
||||
|
||||
class Graphs(object):
|
||||
@@ -55,7 +44,7 @@ class Graphs(object):
|
||||
user_cond = 'AND session_history.user_id = %s ' % user_id
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
|
||||
|
||||
@@ -169,7 +158,7 @@ class Graphs(object):
|
||||
user_cond = 'AND session_history.user_id = %s ' % user_id
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
|
||||
|
||||
@@ -224,10 +213,11 @@ class Graphs(object):
|
||||
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_per_dayofweek: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli Graphs :: Unable to execute database query for get_total_plays_per_dayofweek: %s." % e)
|
||||
return None
|
||||
|
||||
if plexpy.CONFIG.WEEK_START_MONDAY:
|
||||
if jellypy.CONFIG.WEEK_START_MONDAY:
|
||||
days_list = ['Monday', 'Tuesday', 'Wednesday',
|
||||
'Thursday', 'Friday', 'Saturday', 'Sunday']
|
||||
else:
|
||||
@@ -300,7 +290,7 @@ class Graphs(object):
|
||||
user_cond = 'AND session_history.user_id = %s ' % user_id
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
|
||||
|
||||
@@ -339,7 +329,8 @@ class Graphs(object):
|
||||
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_per_hourofday: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli Graphs :: Unable to execute database query for get_total_plays_per_hourofday: %s." % e)
|
||||
return None
|
||||
|
||||
hours_list = ['00', '01', '02', '03', '04', '05',
|
||||
@@ -413,7 +404,7 @@ class Graphs(object):
|
||||
user_cond = 'AND session_history.user_id = %s ' % user_id
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
|
||||
|
||||
@@ -460,11 +451,11 @@ class Graphs(object):
|
||||
dt_today = datetime.date.today()
|
||||
dt = dt_today
|
||||
month_range = [dt]
|
||||
for n in range(int(time_range)-1):
|
||||
if not ((dt_today.month-n) % 12)-1:
|
||||
dt = datetime.date(dt.year-1, 12, 1)
|
||||
for n in range(int(time_range) - 1):
|
||||
if not ((dt_today.month - n) % 12) - 1:
|
||||
dt = datetime.date(dt.year - 1, 12, 1)
|
||||
else:
|
||||
dt = datetime.date(dt.year, dt.month-1, 1)
|
||||
dt = datetime.date(dt.year, dt.month - 1, 1)
|
||||
month_range.append(dt)
|
||||
|
||||
categories = []
|
||||
@@ -534,7 +525,7 @@ class Graphs(object):
|
||||
user_cond = 'AND session_history.user_id = %s ' % user_id
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
|
||||
|
||||
@@ -578,7 +569,8 @@ class Graphs(object):
|
||||
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_by_top_10_platforms: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli Graphs :: Unable to execute database query for get_total_plays_by_top_10_platforms: %s." % e)
|
||||
return None
|
||||
|
||||
categories = []
|
||||
@@ -630,7 +622,7 @@ class Graphs(object):
|
||||
user_cond = 'AND session_history.user_id = %s ' % user_id
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
|
||||
|
||||
@@ -682,7 +674,8 @@ class Graphs(object):
|
||||
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_by_top_10_users: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli Graphs :: Unable to execute database query for get_total_plays_by_top_10_users: %s." % e)
|
||||
return None
|
||||
|
||||
categories = []
|
||||
@@ -739,7 +732,7 @@ class Graphs(object):
|
||||
user_cond = 'AND session_history.user_id = %s ' % user_id
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
|
||||
|
||||
@@ -787,7 +780,8 @@ class Graphs(object):
|
||||
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_per_stream_type: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli Graphs :: Unable to execute database query for get_total_plays_per_stream_type: %s." % e)
|
||||
return None
|
||||
|
||||
# create our date range as some days may not have any data
|
||||
@@ -845,7 +839,7 @@ class Graphs(object):
|
||||
user_cond = 'AND session_history.user_id = %s ' % user_id
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
|
||||
|
||||
@@ -893,7 +887,8 @@ class Graphs(object):
|
||||
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_by_source_resolution: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli Graphs :: Unable to execute database query for get_total_plays_by_source_resolution: %s." % e)
|
||||
return None
|
||||
|
||||
categories = []
|
||||
@@ -931,7 +926,7 @@ class Graphs(object):
|
||||
user_cond = 'AND session_history.user_id = %s ' % user_id
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
|
||||
|
||||
@@ -954,7 +949,7 @@ class Graphs(object):
|
||||
'THEN 1 ELSE 0 END) AS dp_count, ' \
|
||||
'SUM(CASE WHEN session_history_media_info.transcode_decision = "copy" ' \
|
||||
'THEN 1 ELSE 0 END) AS ds_count, ' \
|
||||
'SUM(CASE WHEN session_history_media_info.transcode_decision = "transcode" '\
|
||||
'SUM(CASE WHEN session_history_media_info.transcode_decision = "transcode" ' \
|
||||
'THEN 1 ELSE 0 END) AS tc_count, ' \
|
||||
'COUNT(session_history.id) AS total_count ' \
|
||||
'FROM (SELECT * FROM session_history GROUP BY %s) AS session_history ' \
|
||||
@@ -1003,7 +998,8 @@ class Graphs(object):
|
||||
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_by_stream_resolution: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli Graphs :: Unable to execute database query for get_total_plays_by_stream_resolution: %s." % e)
|
||||
return None
|
||||
|
||||
categories = []
|
||||
@@ -1041,7 +1037,7 @@ class Graphs(object):
|
||||
user_cond = 'AND session_history.user_id = %s ' % user_id
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
|
||||
|
||||
@@ -1092,7 +1088,8 @@ class Graphs(object):
|
||||
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Graphs :: Unable to execute database query for get_stream_type_by_top_10_platforms: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli Graphs :: Unable to execute database query for get_stream_type_by_top_10_platforms: %s." % e)
|
||||
return None
|
||||
|
||||
categories = []
|
||||
@@ -1131,7 +1128,7 @@ class Graphs(object):
|
||||
user_cond = 'AND session_history.user_id = %s ' % user_id
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
|
||||
|
||||
@@ -1190,7 +1187,8 @@ class Graphs(object):
|
||||
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Graphs :: Unable to execute database query for get_stream_type_by_top_10_users: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli Graphs :: Unable to execute database query for get_stream_type_by_top_10_users: %s." % e)
|
||||
return None
|
||||
|
||||
categories = []
|
||||
@@ -15,28 +15,10 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.builtins import zip
|
||||
from future.builtins import str
|
||||
|
||||
import arrow
|
||||
import base64
|
||||
import cloudinary
|
||||
from cloudinary.api import delete_resources_by_tag
|
||||
from cloudinary.uploader import upload
|
||||
from cloudinary.utils import cloudinary_url
|
||||
from collections import OrderedDict
|
||||
import datetime
|
||||
from functools import reduce, wraps
|
||||
import hashlib
|
||||
import imghdr
|
||||
from future.moves.itertools import islice, zip_longest
|
||||
import ipwhois
|
||||
import ipwhois.exceptions
|
||||
import ipwhois.utils
|
||||
from IPy import IP
|
||||
import json
|
||||
import math
|
||||
import operator
|
||||
@@ -48,21 +30,24 @@ import string
|
||||
import sys
|
||||
import time
|
||||
import unicodedata
|
||||
from future.moves.urllib.parse import urlencode
|
||||
from collections import OrderedDict
|
||||
from functools import reduce, wraps
|
||||
from itertools import zip_longest, islice
|
||||
from urllib.parse import urlencode
|
||||
from xml.dom import minidom
|
||||
import xmltodict
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import common
|
||||
import logger
|
||||
import request
|
||||
from api2 import API2
|
||||
else:
|
||||
from plexpy import common
|
||||
from plexpy import logger
|
||||
from plexpy import request
|
||||
from plexpy.api2 import API2
|
||||
import arrow
|
||||
import ipwhois
|
||||
import ipwhois.exceptions
|
||||
import ipwhois.utils
|
||||
import xmltodict
|
||||
from IPy import IP
|
||||
|
||||
import jellypy
|
||||
from jellypy import common
|
||||
from jellypy import logger
|
||||
from jellypy import request
|
||||
from jellypy.api2 import API2
|
||||
|
||||
|
||||
def addtoapi(*dargs, **dkwargs):
|
||||
@@ -77,6 +62,7 @@ def addtoapi(*dargs, **dkwargs):
|
||||
@addtoapi()
|
||||
|
||||
"""
|
||||
|
||||
def rd(function):
|
||||
@wraps(function)
|
||||
def wrapper(*args, **kwargs):
|
||||
@@ -107,7 +93,6 @@ def checked(variable):
|
||||
|
||||
|
||||
def radio(variable, pos):
|
||||
|
||||
if variable == pos:
|
||||
return 'Checked'
|
||||
else:
|
||||
@@ -161,7 +146,6 @@ def latinToAscii(unicrap, replace=False):
|
||||
|
||||
|
||||
def convert_milliseconds(ms):
|
||||
|
||||
seconds = ms // 1000
|
||||
gmtime = time.gmtime(seconds)
|
||||
if seconds > 3600:
|
||||
@@ -173,7 +157,6 @@ def convert_milliseconds(ms):
|
||||
|
||||
|
||||
def convert_milliseconds_to_minutes(ms):
|
||||
|
||||
if str(ms).isdigit():
|
||||
seconds = float(ms) / 1000
|
||||
minutes = round(seconds / 60, 0)
|
||||
@@ -184,7 +167,6 @@ def convert_milliseconds_to_minutes(ms):
|
||||
|
||||
|
||||
def convert_seconds(s):
|
||||
|
||||
gmtime = time.gmtime(s)
|
||||
if s > 3600:
|
||||
minutes = time.strftime("%H:%M:%S", gmtime)
|
||||
@@ -195,7 +177,6 @@ def convert_seconds(s):
|
||||
|
||||
|
||||
def convert_seconds_to_minutes(s):
|
||||
|
||||
if str(s).isdigit():
|
||||
minutes = round(float(s) / 60, 0)
|
||||
|
||||
@@ -302,7 +283,6 @@ def format_timedelta_Hms(td):
|
||||
|
||||
|
||||
def get_age(date):
|
||||
|
||||
try:
|
||||
split_date = date.split('-')
|
||||
except:
|
||||
@@ -317,7 +297,6 @@ def get_age(date):
|
||||
|
||||
|
||||
def bytes_to_mb(bytes):
|
||||
|
||||
mb = float(bytes) / 1048576
|
||||
size = '%.1f MB' % mb
|
||||
return size
|
||||
@@ -355,7 +334,6 @@ def piratesize(size):
|
||||
|
||||
|
||||
def replace_all(text, dic, normalize=False):
|
||||
|
||||
if not text:
|
||||
return ''
|
||||
|
||||
@@ -367,7 +345,7 @@ def replace_all(text, dic, normalize=False):
|
||||
else:
|
||||
j = unicodedata.normalize('NFC', j)
|
||||
except TypeError:
|
||||
j = unicodedata.normalize('NFC', j.decode(plexpy.SYS_ENCODING, 'replace'))
|
||||
j = unicodedata.normalize('NFC', j.decode(jellypy.SYS_ENCODING, 'replace'))
|
||||
text = text.replace(i, j)
|
||||
return text
|
||||
|
||||
@@ -382,7 +360,6 @@ def replace_illegal_chars(string, type="file"):
|
||||
|
||||
|
||||
def cleanName(string):
|
||||
|
||||
pass1 = latinToAscii(string).lower()
|
||||
out_string = re.sub('[\.\-\/\!\@\#\$\%\^\&\*\(\)\+\-\"\'\,\;\:\[\]\{\}\<\>\=\_]', '', pass1).encode('utf-8')
|
||||
|
||||
@@ -390,7 +367,6 @@ def cleanName(string):
|
||||
|
||||
|
||||
def cleanTitle(title):
|
||||
|
||||
title = re.sub('[\.\-\/\_]', ' ', title).lower()
|
||||
|
||||
# Strip out extra whitespace
|
||||
@@ -442,7 +418,8 @@ def split_path(f):
|
||||
|
||||
def extract_logline(s):
|
||||
# Default log format
|
||||
pattern = re.compile(r'(?P<timestamp>.*?)\s\-\s(?P<level>.*?)\s*\:\:\s(?P<thread>.*?)\s\:\s(?P<message>.*)', re.VERBOSE)
|
||||
pattern = re.compile(r'(?P<timestamp>.*?)\s\-\s(?P<level>.*?)\s*\:\:\s(?P<thread>.*?)\s\:\s(?P<message>.*)',
|
||||
re.VERBOSE)
|
||||
match = pattern.match(s)
|
||||
if match:
|
||||
timestamp = match.group("timestamp")
|
||||
@@ -479,8 +456,8 @@ def create_https_certificates(ssl_cert, ssl_key):
|
||||
serial = timestamp()
|
||||
not_before = 0
|
||||
not_after = 60 * 60 * 24 * 365 * 10 # ten years
|
||||
domains = ['DNS:' + d.strip() for d in plexpy.CONFIG.HTTPS_DOMAIN.split(',') if d]
|
||||
ips = ['IP:' + d.strip() for d in plexpy.CONFIG.HTTPS_IP.split(',') if d]
|
||||
domains = ['DNS:' + d.strip() for d in jellypy.CONFIG.HTTPS_DOMAIN.split(',') if d]
|
||||
ips = ['IP:' + d.strip() for d in jellypy.CONFIG.HTTPS_IP.split(',') if d]
|
||||
alt_names = ','.join(domains + ips).encode('utf-8')
|
||||
|
||||
# Create the self-signed Tautulli certificate
|
||||
@@ -526,7 +503,6 @@ def convert_xml_to_dict(xml):
|
||||
|
||||
|
||||
def get_percent(value1, value2):
|
||||
|
||||
value1 = cast_to_float(value1)
|
||||
value2 = cast_to_float(value2)
|
||||
|
||||
@@ -646,11 +622,14 @@ def sort_helper(k, sort_key, sort_keys):
|
||||
def sanitize_out(*dargs, **dkwargs):
|
||||
""" Helper decorator that sanitized the output
|
||||
"""
|
||||
|
||||
def rd(function):
|
||||
@wraps(function)
|
||||
def wrapper(*args, **kwargs):
|
||||
return sanitize(function(*args, **kwargs))
|
||||
|
||||
return wrapper
|
||||
|
||||
return rd
|
||||
|
||||
|
||||
@@ -674,19 +653,6 @@ def is_public_ip(host):
|
||||
return False
|
||||
|
||||
|
||||
def get_ip(host):
|
||||
ip_address = ''
|
||||
if is_valid_ip(host):
|
||||
return host
|
||||
elif not re.match(r'^[0-9]+(?:\.[0-9]+){3}(?!\d*-[a-z0-9]{6})$', host):
|
||||
try:
|
||||
ip_address = socket.getaddrinfo(host, None)[0][4][0]
|
||||
logger.debug("IP Checker :: Resolved %s to %s." % (host, ip_address))
|
||||
except:
|
||||
logger.error("IP Checker :: Bad IP or hostname provided: %s." % host)
|
||||
return ip_address
|
||||
|
||||
|
||||
def is_valid_ip(address):
|
||||
try:
|
||||
return IP(address)
|
||||
@@ -697,7 +663,6 @@ def is_valid_ip(address):
|
||||
|
||||
|
||||
def whois_lookup(ip_address):
|
||||
|
||||
nets = []
|
||||
err = None
|
||||
try:
|
||||
@@ -708,7 +673,7 @@ def whois_lookup(ip_address):
|
||||
for net in nets:
|
||||
net['country'] = countries.get(net['country'])
|
||||
if net['postal_code']:
|
||||
net['postal_code'] = net['postal_code'].replace('-', ' ')
|
||||
net['postal_code'] = net['postal_code'].replace('-', ' ')
|
||||
except ValueError as e:
|
||||
err = 'Invalid IP address provided: %s.' % ip_address
|
||||
except ipwhois.exceptions.IPDefinedError as e:
|
||||
@@ -739,15 +704,15 @@ def anon_url(*url):
|
||||
"""
|
||||
Return a URL string consisting of the Anonymous redirect URL and an arbitrary number of values appended.
|
||||
"""
|
||||
return '' if None in url else '%s%s' % (plexpy.CONFIG.ANON_REDIRECT, ''.join(str(s) for s in url))
|
||||
return '' if None in url else '%s%s' % (jellypy.CONFIG.ANON_REDIRECT, ''.join(str(s) for s in url))
|
||||
|
||||
|
||||
def get_img_service(include_self=False):
|
||||
if plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS == 1:
|
||||
if jellypy.CONFIG.NOTIFY_UPLOAD_POSTERS == 1:
|
||||
return 'imgur'
|
||||
elif plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS == 2 and include_self:
|
||||
elif jellypy.CONFIG.NOTIFY_UPLOAD_POSTERS == 2 and include_self:
|
||||
return 'self-hosted'
|
||||
elif plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS == 3:
|
||||
elif jellypy.CONFIG.NOTIFY_UPLOAD_POSTERS == 3:
|
||||
return 'cloudinary'
|
||||
else:
|
||||
return None
|
||||
@@ -757,11 +722,11 @@ def upload_to_imgur(img_data, img_title='', rating_key='', fallback=''):
|
||||
""" Uploads an image to Imgur """
|
||||
img_url = delete_hash = ''
|
||||
|
||||
if not plexpy.CONFIG.IMGUR_CLIENT_ID:
|
||||
if not jellypy.CONFIG.IMGUR_CLIENT_ID:
|
||||
logger.error("Tautulli Helpers :: Cannot upload image to Imgur. No Imgur client id specified in the settings.")
|
||||
return img_url, delete_hash
|
||||
|
||||
headers = {'Authorization': 'Client-ID %s' % plexpy.CONFIG.IMGUR_CLIENT_ID}
|
||||
headers = {'Authorization': 'Client-ID %s' % jellypy.CONFIG.IMGUR_CLIENT_ID}
|
||||
data = {'image': base64.b64encode(img_data),
|
||||
'title': img_title.encode('utf-8'),
|
||||
'name': str(rating_key) + '.png',
|
||||
@@ -777,7 +742,8 @@ def upload_to_imgur(img_data, img_title='', rating_key='', fallback=''):
|
||||
delete_hash = imgur_response_data.get('deletehash', '')
|
||||
else:
|
||||
if err_msg:
|
||||
logger.error("Tautulli Helpers :: Unable to upload image '{}' ({}) to Imgur: {}".format(img_title, fallback, err_msg))
|
||||
logger.error("Tautulli Helpers :: Unable to upload image '{}' ({}) to Imgur: {}".format(img_title, fallback,
|
||||
err_msg))
|
||||
else:
|
||||
logger.error("Tautulli Helpers :: Unable to upload image '{}' ({}) to Imgur.".format(img_title, fallback))
|
||||
|
||||
@@ -789,11 +755,12 @@ def upload_to_imgur(img_data, img_title='', rating_key='', fallback=''):
|
||||
|
||||
def delete_from_imgur(delete_hash, img_title='', fallback=''):
|
||||
""" Deletes an image from Imgur """
|
||||
if not plexpy.CONFIG.IMGUR_CLIENT_ID:
|
||||
logger.error("Tautulli Helpers :: Cannot delete image from Imgur. No Imgur client id specified in the settings.")
|
||||
if not jellypy.CONFIG.IMGUR_CLIENT_ID:
|
||||
logger.error(
|
||||
"Tautulli Helpers :: Cannot delete image from Imgur. No Imgur client id specified in the settings.")
|
||||
return False
|
||||
|
||||
headers = {'Authorization': 'Client-ID %s' % plexpy.CONFIG.IMGUR_CLIENT_ID}
|
||||
headers = {'Authorization': 'Client-ID %s' % jellypy.CONFIG.IMGUR_CLIENT_ID}
|
||||
|
||||
response, err_msg, req_msg = request.request_response2('https://api.imgur.com/3/image/%s' % delete_hash, 'DELETE',
|
||||
headers=headers)
|
||||
@@ -803,118 +770,21 @@ def delete_from_imgur(delete_hash, img_title='', fallback=''):
|
||||
return True
|
||||
else:
|
||||
if err_msg:
|
||||
logger.error("Tautulli Helpers :: Unable to delete image '{}' ({}) from Imgur: {}".format(img_title, fallback, err_msg))
|
||||
logger.error(
|
||||
"Tautulli Helpers :: Unable to delete image '{}' ({}) from Imgur: {}".format(img_title, fallback,
|
||||
err_msg))
|
||||
else:
|
||||
logger.error("Tautulli Helpers :: Unable to delete image '{}' ({}) from Imgur.".format(img_title, fallback))
|
||||
return False
|
||||
|
||||
|
||||
def upload_to_cloudinary(img_data, img_title='', rating_key='', fallback=''):
|
||||
""" Uploads an image to Cloudinary """
|
||||
img_url = ''
|
||||
|
||||
if not plexpy.CONFIG.CLOUDINARY_CLOUD_NAME or not plexpy.CONFIG.CLOUDINARY_API_KEY or not plexpy.CONFIG.CLOUDINARY_API_SECRET:
|
||||
logger.error("Tautulli Helpers :: Cannot upload image to Cloudinary. Cloudinary settings not specified in the settings.")
|
||||
return img_url
|
||||
|
||||
cloudinary.config(
|
||||
cloud_name=plexpy.CONFIG.CLOUDINARY_CLOUD_NAME,
|
||||
api_key=plexpy.CONFIG.CLOUDINARY_API_KEY,
|
||||
api_secret=plexpy.CONFIG.CLOUDINARY_API_SECRET
|
||||
)
|
||||
|
||||
# Cloudinary library has very poor support for non-ASCII characters on Python 2
|
||||
if plexpy.PYTHON2:
|
||||
_img_title = latinToAscii(img_title, replace=True)
|
||||
else:
|
||||
_img_title = img_title
|
||||
|
||||
try:
|
||||
response = upload((img_title, img_data),
|
||||
public_id='{}_{}'.format(fallback, rating_key),
|
||||
tags=['tautulli', fallback, str(rating_key)],
|
||||
context={'title': _img_title, 'rating_key': str(rating_key), 'fallback': fallback})
|
||||
logger.debug("Tautulli Helpers :: Image '{}' ({}) uploaded to Cloudinary.".format(img_title, fallback))
|
||||
img_url = response.get('url', '')
|
||||
except Exception as e:
|
||||
logger.error("Tautulli Helpers :: Unable to upload image '{}' ({}) to Cloudinary: {}".format(img_title, fallback, e))
|
||||
|
||||
return img_url
|
||||
|
||||
|
||||
def delete_from_cloudinary(rating_key=None, delete_all=False):
|
||||
""" Deletes an image from Cloudinary """
|
||||
if not plexpy.CONFIG.CLOUDINARY_CLOUD_NAME or not plexpy.CONFIG.CLOUDINARY_API_KEY or not plexpy.CONFIG.CLOUDINARY_API_SECRET:
|
||||
logger.error("Tautulli Helpers :: Cannot delete image from Cloudinary. Cloudinary settings not specified in the settings.")
|
||||
return False
|
||||
|
||||
cloudinary.config(
|
||||
cloud_name=plexpy.CONFIG.CLOUDINARY_CLOUD_NAME,
|
||||
api_key=plexpy.CONFIG.CLOUDINARY_API_KEY,
|
||||
api_secret=plexpy.CONFIG.CLOUDINARY_API_SECRET
|
||||
)
|
||||
|
||||
if delete_all:
|
||||
delete_resources_by_tag('tautulli')
|
||||
logger.debug("Tautulli Helpers :: Deleted all images from Cloudinary.")
|
||||
elif rating_key:
|
||||
delete_resources_by_tag(str(rating_key))
|
||||
logger.debug("Tautulli Helpers :: Deleted images from Cloudinary with rating_key {}.".format(rating_key))
|
||||
else:
|
||||
logger.debug("Tautulli Helpers :: Unable to delete images from Cloudinary: No rating_key provided.")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def cloudinary_transform(rating_key=None, width=1000, height=1500, opacity=100, background='000000', blur=0,
|
||||
img_format='png', img_title='', fallback=None):
|
||||
url = ''
|
||||
|
||||
if not plexpy.CONFIG.CLOUDINARY_CLOUD_NAME or not plexpy.CONFIG.CLOUDINARY_API_KEY or not plexpy.CONFIG.CLOUDINARY_API_SECRET:
|
||||
logger.error("Tautulli Helpers :: Cannot transform image on Cloudinary. Cloudinary settings not specified in the settings.")
|
||||
return url
|
||||
|
||||
cloudinary.config(
|
||||
cloud_name=plexpy.CONFIG.CLOUDINARY_CLOUD_NAME,
|
||||
api_key=plexpy.CONFIG.CLOUDINARY_API_KEY,
|
||||
api_secret=plexpy.CONFIG.CLOUDINARY_API_SECRET
|
||||
)
|
||||
|
||||
img_options = {'format': img_format,
|
||||
'fetch_format': 'auto',
|
||||
'quality': 'auto',
|
||||
'version': timestamp(),
|
||||
'secure': True}
|
||||
|
||||
if width != 1000:
|
||||
img_options['width'] = str(width)
|
||||
img_options['crop'] = 'fill'
|
||||
if height != 1500:
|
||||
img_options['height'] = str(height)
|
||||
img_options['crop'] = 'fill'
|
||||
if opacity != 100:
|
||||
img_options['opacity'] = opacity
|
||||
if background != '000000':
|
||||
img_options['background'] = 'rgb:{}'.format(background)
|
||||
if blur != 0:
|
||||
img_options['effect'] = 'blur:{}'.format(blur * 100)
|
||||
|
||||
try:
|
||||
url, options = cloudinary_url('{}_{}'.format(fallback, rating_key), **img_options)
|
||||
logger.debug("Tautulli Helpers :: Image '{}' ({}) transformed on Cloudinary.".format(img_title, fallback))
|
||||
except Exception as e:
|
||||
logger.error("Tautulli Helpers :: Unable to transform image '{}' ({}) on Cloudinary: {}".format(img_title, fallback, e))
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def cache_image(url, image=None):
|
||||
"""
|
||||
Saves an image to the cache directory.
|
||||
If no image is provided, tries to return the image from the cache directory.
|
||||
"""
|
||||
# Create image directory if it doesn't exist
|
||||
imgdir = os.path.join(plexpy.CONFIG.CACHE_DIR, 'images/')
|
||||
imgdir = os.path.join(jellypy.CONFIG.CACHE_DIR, 'images/')
|
||||
if not os.path.exists(imgdir):
|
||||
logger.debug("Tautulli Helpers :: Creating image cache directory at %s" % imgdir)
|
||||
os.makedirs(imgdir)
|
||||
@@ -987,7 +857,7 @@ def human_file_size(bytes, si=True):
|
||||
else:
|
||||
return bytes
|
||||
|
||||
#thresh = 1000 if si else 1024
|
||||
# thresh = 1000 if si else 1024
|
||||
thresh = 1024 # Always divide by 2^10 but display SI units
|
||||
if bytes < thresh:
|
||||
return str(bytes) + ' B'
|
||||
@@ -1064,7 +934,7 @@ def parse_condition_logic_string(s, num_cond=0):
|
||||
stack.pop()
|
||||
nest_and -= 1
|
||||
|
||||
elif bool_next and x == 'and' and i < len(tokens)-1:
|
||||
elif bool_next and x == 'and' and i < len(tokens) - 1:
|
||||
stack[-1].append([])
|
||||
stack.append(stack[-1][-1])
|
||||
stack[-1].append(stack[-2].pop(-2))
|
||||
@@ -1075,7 +945,7 @@ def parse_condition_logic_string(s, num_cond=0):
|
||||
close_bracket_next = False
|
||||
nest_and += 1
|
||||
|
||||
elif bool_next and x == 'or' and i < len(tokens)-1:
|
||||
elif bool_next and x == 'or' and i < len(tokens) - 1:
|
||||
stack[-1].append(x)
|
||||
cond_next = True
|
||||
bool_next = False
|
||||
@@ -1122,12 +992,12 @@ def eval_logic_groups_to_bool(logic_groups, eval_conds):
|
||||
|
||||
|
||||
def get_plexpy_url(hostname=None):
|
||||
if plexpy.CONFIG.ENABLE_HTTPS:
|
||||
if jellypy.CONFIG.ENABLE_HTTPS:
|
||||
scheme = 'https'
|
||||
else:
|
||||
scheme = 'http'
|
||||
|
||||
if hostname is None and plexpy.CONFIG.HTTP_HOST == '0.0.0.0':
|
||||
if hostname is None and jellypy.CONFIG.HTTP_HOST == '0.0.0.0':
|
||||
import socket
|
||||
try:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
@@ -1142,18 +1012,18 @@ def get_plexpy_url(hostname=None):
|
||||
|
||||
if not hostname:
|
||||
hostname = 'localhost'
|
||||
elif hostname == 'localhost' and plexpy.CONFIG.HTTP_HOST != '0.0.0.0':
|
||||
hostname = plexpy.CONFIG.HTTP_HOST
|
||||
elif hostname == 'localhost' and jellypy.CONFIG.HTTP_HOST != '0.0.0.0':
|
||||
hostname = jellypy.CONFIG.HTTP_HOST
|
||||
else:
|
||||
hostname = hostname or plexpy.CONFIG.HTTP_HOST
|
||||
hostname = hostname or jellypy.CONFIG.HTTP_HOST
|
||||
|
||||
if plexpy.HTTP_PORT not in (80, 443):
|
||||
port = ':' + str(plexpy.HTTP_PORT)
|
||||
if jellypy.HTTP_PORT not in (80, 443):
|
||||
port = ':' + str(jellypy.HTTP_PORT)
|
||||
else:
|
||||
port = ''
|
||||
|
||||
if plexpy.HTTP_ROOT is not None and plexpy.HTTP_ROOT.strip('/'):
|
||||
root = '/' + plexpy.HTTP_ROOT.strip('/')
|
||||
if jellypy.HTTP_ROOT is not None and jellypy.HTTP_ROOT.strip('/'):
|
||||
root = '/' + jellypy.HTTP_ROOT.strip('/')
|
||||
else:
|
||||
root = ''
|
||||
|
||||
@@ -1202,10 +1072,10 @@ def split_args(args=None):
|
||||
if isinstance(args, list):
|
||||
return args
|
||||
elif isinstance(args, str):
|
||||
if plexpy.PYTHON2:
|
||||
if jellypy.PYTHON2:
|
||||
args = args.encode('utf-8')
|
||||
args = shlex.split(args)
|
||||
if plexpy.PYTHON2:
|
||||
if jellypy.PYTHON2:
|
||||
args = [a.decode('utf-8') for a in args]
|
||||
return args
|
||||
return []
|
||||
@@ -1395,7 +1265,7 @@ def dict_merge(a, b, path=None):
|
||||
return a
|
||||
|
||||
|
||||
#https://stackoverflow.com/a/26853961
|
||||
# https://stackoverflow.com/a/26853961
|
||||
def dict_update(*dict_args):
|
||||
"""
|
||||
Given any number of dictionaries, shallow copy and merge into a new dict,
|
||||
@@ -14,25 +14,16 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import object
|
||||
from future.builtins import str
|
||||
|
||||
from functools import partial
|
||||
from multiprocessing.dummy import Pool as ThreadPool
|
||||
from future.moves.urllib.parse import urljoin
|
||||
from multiprocessing.pool import ThreadPool
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import certifi
|
||||
import requests
|
||||
import urllib3
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import helpers
|
||||
import logger
|
||||
else:
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
import jellypy
|
||||
from jellypy import helpers
|
||||
from jellypy import logger
|
||||
|
||||
|
||||
class HTTPHandler(object):
|
||||
@@ -41,6 +32,7 @@ class HTTPHandler(object):
|
||||
"""
|
||||
|
||||
def __init__(self, urls, headers=None, token=None, timeout=10, ssl_verify=True, silent=False):
|
||||
self._valid_request_types = {'GET', 'POST', 'PUT', 'DELETE'}
|
||||
self._silent = silent
|
||||
|
||||
if isinstance(urls, str):
|
||||
@@ -51,24 +43,34 @@ class HTTPHandler(object):
|
||||
if headers:
|
||||
self.headers = headers
|
||||
else:
|
||||
self.headers = {'X-Plex-Product': plexpy.common.PRODUCT,
|
||||
'X-Plex-Version': plexpy.common.RELEASE,
|
||||
'X-Plex-Client-Identifier': plexpy.CONFIG.PMS_UUID,
|
||||
'X-Plex-Platform': plexpy.common.PLATFORM,
|
||||
'X-Plex-Platform-Version': plexpy.common.PLATFORM_RELEASE,
|
||||
'X-Plex-Device': '{} {}'.format(plexpy.common.PLATFORM,
|
||||
plexpy.common.PLATFORM_RELEASE),
|
||||
'X-Plex-Device-Name': plexpy.common.PLATFORM_DEVICE_NAME
|
||||
}
|
||||
self.headers = {
|
||||
'X-Plex-Product': jellypy.common.PRODUCT,
|
||||
'X-Plex-Version': jellypy.common.RELEASE,
|
||||
'X-Plex-Client-Identifier': jellypy.CONFIG.PMS_UUID,
|
||||
'X-Plex-Platform': jellypy.common.PLATFORM,
|
||||
'X-Plex-Platform-Version': jellypy.common.PLATFORM_RELEASE,
|
||||
'X-Plex-Device': '{} {}'.format(jellypy.common.PLATFORM,
|
||||
jellypy.common.PLATFORM_RELEASE),
|
||||
'X-Plex-Device-Name': jellypy.common.PLATFORM_DEVICE_NAME
|
||||
}
|
||||
|
||||
self.token = token
|
||||
if self.token:
|
||||
self.headers['X-Plex-Token'] = self.token
|
||||
|
||||
self._session = requests.Session()
|
||||
self.timeout = timeout
|
||||
self.ssl_verify = ssl_verify
|
||||
self.ssl_verify = certifi.where() if ssl_verify else False
|
||||
if not self.ssl_verify:
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
self.valid_request_types = ('GET', 'POST', 'PUT', 'DELETE')
|
||||
self.uri = None
|
||||
self.data = None
|
||||
self.request_type = 'GET'
|
||||
self.output_format = 'raw'
|
||||
self.return_type = False
|
||||
self.callback = None
|
||||
self.request_kwargs = {}
|
||||
|
||||
def make_request(self,
|
||||
uri=None,
|
||||
@@ -96,7 +98,7 @@ class HTTPHandler(object):
|
||||
self.timeout = timeout or self.timeout
|
||||
self.request_kwargs = request_kwargs
|
||||
|
||||
if self.request_type not in self.valid_request_types:
|
||||
if self.request_type not in self._valid_request_types:
|
||||
logger.debug("HTTP request made but unsupported request type given.")
|
||||
return None
|
||||
|
||||
@@ -115,7 +117,7 @@ class HTTPHandler(object):
|
||||
return responses[0]
|
||||
|
||||
else:
|
||||
logger.debug("HTTP request made but no enpoint given.")
|
||||
logger.debug("HTTP request made but no uri endpoint provided.")
|
||||
return None
|
||||
|
||||
def _http_requests_pool(self, urls, workers=10, chunk=None):
|
||||
@@ -128,20 +130,13 @@ class HTTPHandler(object):
|
||||
if len(urls) == 0:
|
||||
chunk = 0
|
||||
|
||||
if self.ssl_verify:
|
||||
session = urllib3.PoolManager(cert_reqs=2, ca_certs=certifi.where()) # ssl.CERT_REQUIRED = 2
|
||||
else:
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
session = urllib3.PoolManager()
|
||||
part = partial(self._http_requests_urllib3, session=session)
|
||||
|
||||
if len(urls) == 1:
|
||||
yield part(urls[0])
|
||||
yield self._http_requests_single(urls[0])
|
||||
else:
|
||||
pool = ThreadPool(workers)
|
||||
|
||||
try:
|
||||
for work in pool.imap_unordered(part, urls, chunk):
|
||||
for work in pool.imap_unordered(self._http_requests_single, urls, chunk):
|
||||
yield work
|
||||
except Exception as e:
|
||||
if not self._silent:
|
||||
@@ -150,34 +145,40 @@ class HTTPHandler(object):
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
def _http_requests_urllib3(self, url, session):
|
||||
def _http_requests_single(self, url):
|
||||
"""Request the data from the url"""
|
||||
error_msg = "Failed to access uri endpoint %s. " % self.uri
|
||||
try:
|
||||
r = session.request(self.request_type, url, headers=self.headers, fields=self.data,
|
||||
timeout=self.timeout, **self.request_kwargs)
|
||||
except IOError as e:
|
||||
r = self._session.request(self.request_type, url, headers=self.headers, data=self.data,
|
||||
timeout=self.timeout, verify=self.ssl_verify, **self.request_kwargs)
|
||||
r.raise_for_status()
|
||||
except requests.exceptions.Timeout as e:
|
||||
if not self._silent:
|
||||
logger.warn("Failed to access uri endpoint %s with error %s" % (self.uri, e))
|
||||
logger.error(error_msg + "Request timed out: %s", e)
|
||||
return None
|
||||
except Exception as e:
|
||||
except requests.exceptions.SSLError as e:
|
||||
if not self._silent:
|
||||
logger.warn("Failed to access uri endpoint %s. Is your server maybe accepting SSL connections only? %s" % (self.uri, e))
|
||||
logger.error(error_msg + "Is your server maybe accepting SSL connections only? %s", e)
|
||||
return None
|
||||
except:
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if not self._silent:
|
||||
logger.warn("Failed to access uri endpoint %s with Uncaught exception." % self.uri)
|
||||
logger.error(error_msg + "Status code %s", e)
|
||||
return None
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
if not self._silent:
|
||||
logger.error(error_msg + "Connection error: %s", e)
|
||||
return None
|
||||
except requests.exceptions.RequestException as e:
|
||||
if not self._silent:
|
||||
logger.error(error_msg + "Uncaught exception: %s", e)
|
||||
return None
|
||||
|
||||
response_status = r.status
|
||||
response_content = r.data
|
||||
response_status = r.status_code
|
||||
response_content = r.content
|
||||
response_headers = r.headers
|
||||
|
||||
if response_status in (200, 201):
|
||||
return self._http_format_output(response_content, response_headers)
|
||||
else:
|
||||
if not self._silent:
|
||||
logger.warn("Failed to access uri endpoint %s. Status code %r" % (self.uri, response_status))
|
||||
return None
|
||||
|
||||
def _http_format_output(self, response_content, response_headers):
|
||||
"""Formats the request response to the desired type"""
|
||||
74
jellypy/jellyfin.py
Normal file
74
jellypy/jellyfin.py
Normal file
@@ -0,0 +1,74 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of Tautulli.
|
||||
#
|
||||
# Tautulli is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Tautulli is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
import pprint
|
||||
import uuid
|
||||
|
||||
from jellyfin_apiclient_python import JellyfinClient
|
||||
|
||||
import jellypy
|
||||
from jellypy.common import PRODUCT, RELEASE
|
||||
|
||||
|
||||
class Jellyfin(object):
|
||||
def __init__(self, url, token=None):
|
||||
if not jellypy.CONFIG.JELLYFIN_CLIENT_UUID:
|
||||
jellypy.CONFIG.JELLYFIN_CLIENT_UUID = uuid.uuid4()
|
||||
jellypy.CONFIG.write()
|
||||
|
||||
self.jf = JellyfinClient()
|
||||
self.jf.config.data["app.default"] = True
|
||||
self.jf.config.app(
|
||||
PRODUCT, RELEASE, PRODUCT, jellypy.CONFIG.JELLYFIN_CLIENT_UUID
|
||||
)
|
||||
self.jf.config.data["http.user_agent"] = PRODUCT
|
||||
self.jf.config.data["auth.ssl"] = jellypy.CONFIG.JELLYFIN_SSL
|
||||
self.url = url
|
||||
self.id = None
|
||||
self.token = token
|
||||
|
||||
if self.token:
|
||||
self.login()
|
||||
|
||||
def get_library(self, section_id):
|
||||
return self.jf.library.sectionByID(str(section_id))
|
||||
|
||||
def get_library_items(self, section_id):
|
||||
return self.get_library(str(section_id)).all()
|
||||
|
||||
def get_item(self, rating_key):
|
||||
return self.jf.fetchItem(rating_key)
|
||||
|
||||
def login(self, user=None, password=None) -> bool:
|
||||
if user and password and self.url:
|
||||
self.jf.auth.connect_to_address(self.url)
|
||||
result = self.jf.auth.login(self.url, user, password)
|
||||
|
||||
if "AccessToken" in result:
|
||||
credentials = self.jf.auth.credentials.get_credentials()
|
||||
pprint.pprint(credentials)
|
||||
self.id = credentials["Servers"][0]["Id"]
|
||||
# jellypy.CONFIG.JELLYFIN_TOKEN =
|
||||
#
|
||||
# self._connect_client(server)
|
||||
# self.credentials.append(server)
|
||||
# self.save_credentials()
|
||||
return True
|
||||
if self.token and self.url:
|
||||
# TODO: Add token auth
|
||||
pass
|
||||
|
||||
return False
|
||||
@@ -15,43 +15,25 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
from future.builtins import next
|
||||
from future.builtins import object
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import common
|
||||
import database
|
||||
import datatables
|
||||
import helpers
|
||||
import logger
|
||||
import plextv
|
||||
import pmsconnect
|
||||
import session
|
||||
import users
|
||||
from plex import Plex
|
||||
else:
|
||||
from plexpy import common
|
||||
from plexpy import database
|
||||
from plexpy import datatables
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
from plexpy import plextv
|
||||
from plexpy import pmsconnect
|
||||
from plexpy import session
|
||||
from plexpy import users
|
||||
from plexpy.plex import Plex
|
||||
import jellypy
|
||||
from jellypy import common
|
||||
from jellypy import database
|
||||
from jellypy import datatables
|
||||
from jellypy import helpers
|
||||
from jellypy import logger
|
||||
from jellypy import session
|
||||
from jellypy import users
|
||||
from jellypy.jellyfin import Jellyfin
|
||||
|
||||
|
||||
def refresh_libraries():
|
||||
logger.info("Tautulli Libraries :: Requesting libraries list refresh...")
|
||||
|
||||
server_id = plexpy.CONFIG.PMS_IDENTIFIER
|
||||
server_id = jellypy.CONFIG.PMS_IDENTIFIER
|
||||
if not server_id:
|
||||
logger.error("Tautulli Libraries :: No PMS identifier, cannot refresh libraries. Verify server in settings.")
|
||||
return
|
||||
@@ -96,15 +78,15 @@ def refresh_libraries():
|
||||
|
||||
query = 'UPDATE library_sections SET is_active = 0 WHERE server_id != ? OR ' \
|
||||
'section_id NOT IN ({})'.format(', '.join(['?'] * len(section_ids)))
|
||||
monitor_db.action(query=query, args=[plexpy.CONFIG.PMS_IDENTIFIER] + section_ids)
|
||||
monitor_db.action(query=query, args=[jellypy.CONFIG.PMS_IDENTIFIER] + section_ids)
|
||||
|
||||
if plexpy.CONFIG.HOME_LIBRARY_CARDS == ['first_run_wizard']:
|
||||
plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', library_keys)
|
||||
plexpy.CONFIG.write()
|
||||
if jellypy.CONFIG.HOME_LIBRARY_CARDS == ['first_run_wizard']:
|
||||
jellypy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', library_keys)
|
||||
jellypy.CONFIG.write()
|
||||
else:
|
||||
new_keys = plexpy.CONFIG.HOME_LIBRARY_CARDS + new_keys
|
||||
plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', new_keys)
|
||||
plexpy.CONFIG.write()
|
||||
new_keys = jellypy.CONFIG.HOME_LIBRARY_CARDS + new_keys
|
||||
jellypy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', new_keys)
|
||||
jellypy.CONFIG.write()
|
||||
|
||||
logger.info("Tautulli Libraries :: Libraries list refreshed.")
|
||||
return True
|
||||
@@ -117,7 +99,7 @@ def add_live_tv_library(refresh=False):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
result = monitor_db.select_single('SELECT * FROM library_sections '
|
||||
'WHERE section_id = ? and server_id = ?',
|
||||
[common.LIVE_TV_SECTION_ID, plexpy.CONFIG.PMS_IDENTIFIER])
|
||||
[common.LIVE_TV_SECTION_ID, jellypy.CONFIG.PMS_IDENTIFIER])
|
||||
|
||||
if result and not refresh or not result and refresh:
|
||||
return
|
||||
@@ -125,9 +107,9 @@ def add_live_tv_library(refresh=False):
|
||||
if not refresh:
|
||||
logger.info("Tautulli Libraries :: Adding Live TV library to the database.")
|
||||
|
||||
section_keys = {'server_id': plexpy.CONFIG.PMS_IDENTIFIER,
|
||||
section_keys = {'server_id': jellypy.CONFIG.PMS_IDENTIFIER,
|
||||
'section_id': common.LIVE_TV_SECTION_ID}
|
||||
section_values = {'server_id': plexpy.CONFIG.PMS_IDENTIFIER,
|
||||
section_values = {'server_id': jellypy.CONFIG.PMS_IDENTIFIER,
|
||||
'section_id': common.LIVE_TV_SECTION_ID,
|
||||
'section_name': common.LIVE_TV_SECTION_NAME,
|
||||
'section_type': 'live',
|
||||
@@ -148,8 +130,8 @@ def has_library_type(section_type):
|
||||
|
||||
|
||||
def get_collections(section_id=None):
|
||||
plex = Plex(plexpy.CONFIG.PMS_URL, session.get_session_user_token())
|
||||
library = plex.get_library(section_id)
|
||||
jf = Jellyfin(jellypy.CONFIG.PMS_URL, session.get_session_user_token())
|
||||
library = jf.get_library(section_id)
|
||||
|
||||
if library.type not in ('movie', 'show', 'artist'):
|
||||
return []
|
||||
@@ -246,12 +228,12 @@ def get_playlists(section_id=None, user_id=None):
|
||||
if not plex_token:
|
||||
return []
|
||||
|
||||
plex = Plex(plexpy.CONFIG.PMS_URL, plex_token)
|
||||
jf = Jellyfin(jellypy.CONFIG.PMS_URL, plex_token)
|
||||
|
||||
if user_id:
|
||||
playlists = plex.plex.playlists()
|
||||
playlists = jf.plex.playlists()
|
||||
else:
|
||||
library = plex.get_library(section_id)
|
||||
library = jf.get_library(section_id)
|
||||
playlists = library.playlist()
|
||||
|
||||
playlists_list = []
|
||||
@@ -321,7 +303,7 @@ class Libraries(object):
|
||||
custom_where = [['library_sections.deleted_section', 0]]
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
if session.get_session_shared_libraries():
|
||||
custom_where.append(['library_sections.section_id', session.get_session_shared_libraries()])
|
||||
@@ -378,9 +360,10 @@ class Libraries(object):
|
||||
join_tables=['session_history_metadata',
|
||||
'session_history',
|
||||
'session_history_media_info'],
|
||||
join_evals=[['session_history_metadata.section_id', 'library_sections.section_id'],
|
||||
['session_history_metadata.id', 'session_history.id'],
|
||||
['session_history_metadata.id', 'session_history_media_info.id']],
|
||||
join_evals=[
|
||||
['session_history_metadata.section_id', 'library_sections.section_id'],
|
||||
['session_history_metadata.id', 'session_history.id'],
|
||||
['session_history_metadata.id', 'session_history_media_info.id']],
|
||||
kwargs=kwargs)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Libraries :: Unable to execute database query for get_list: %s." % e)
|
||||
@@ -452,7 +435,8 @@ class Libraries(object):
|
||||
|
||||
return dict
|
||||
|
||||
def get_datatables_media_info(self, section_id=None, section_type=None, rating_key=None, refresh=False, kwargs=None):
|
||||
def get_datatables_media_info(self, section_id=None, section_type=None, rating_key=None, refresh=False,
|
||||
kwargs=None):
|
||||
default_return = {'recordsFiltered': 0,
|
||||
'recordsTotal': 0,
|
||||
'draw': 0,
|
||||
@@ -484,7 +468,7 @@ class Libraries(object):
|
||||
# Get play counts from the database
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
if plexpy.CONFIG.GROUP_HISTORY_TABLES:
|
||||
if jellypy.CONFIG.GROUP_HISTORY_TABLES:
|
||||
count_by = 'reference_id'
|
||||
else:
|
||||
count_by = 'id'
|
||||
@@ -505,7 +489,8 @@ class Libraries(object):
|
||||
'GROUP BY session_history.%s ' % (count_by, group_by)
|
||||
result = monitor_db.select(query, args=[section_id])
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Libraries :: Unable to execute database query for get_datatables_media_info2: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli Libraries :: Unable to execute database query for get_datatables_media_info2: %s." % e)
|
||||
return default_return
|
||||
|
||||
watched_list = {}
|
||||
@@ -517,23 +502,23 @@ class Libraries(object):
|
||||
# Import media info cache from json file
|
||||
if rating_key:
|
||||
try:
|
||||
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
|
||||
inFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key))
|
||||
with open(inFilePath, 'r') as inFile:
|
||||
rows = json.load(inFile)
|
||||
library_count = len(rows)
|
||||
except IOError as e:
|
||||
#logger.debug("Tautulli Libraries :: No JSON file for rating_key %s." % rating_key)
|
||||
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key)
|
||||
# logger.debug("Tautulli Libraries :: No JSON file for rating_key %s." % rating_key)
|
||||
# logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key)
|
||||
pass
|
||||
elif section_id:
|
||||
try:
|
||||
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
|
||||
inFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id)
|
||||
with open(inFilePath, 'r') as inFile:
|
||||
rows = json.load(inFile)
|
||||
library_count = len(rows)
|
||||
except IOError as e:
|
||||
#logger.debug("Tautulli Libraries :: No JSON file for library section_id %s." % section_id)
|
||||
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id)
|
||||
# logger.debug("Tautulli Libraries :: No JSON file for library section_id %s." % section_id)
|
||||
# logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id)
|
||||
pass
|
||||
|
||||
# If no cache was imported, get all library children items
|
||||
@@ -594,14 +579,15 @@ class Libraries(object):
|
||||
# Cache the media info to a json file
|
||||
if rating_key:
|
||||
try:
|
||||
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
|
||||
outFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR,
|
||||
'media_info_%s-%s.json' % (section_id, rating_key))
|
||||
with open(outFilePath, 'w') as outFile:
|
||||
json.dump(rows, outFile)
|
||||
except IOError as e:
|
||||
logger.debug("Tautulli Libraries :: Unable to create cache file for rating_key %s." % rating_key)
|
||||
elif section_id:
|
||||
try:
|
||||
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
|
||||
outFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id)
|
||||
with open(outFilePath, 'w') as outFile:
|
||||
json.dump(rows, outFile)
|
||||
except IOError as e:
|
||||
@@ -622,14 +608,14 @@ class Libraries(object):
|
||||
# Get datatables JSON data
|
||||
if kwargs.get('json_data'):
|
||||
json_data = helpers.process_json_kwargs(json_kwargs=kwargs.get('json_data'))
|
||||
#print json_data
|
||||
# print json_data
|
||||
|
||||
# Search results
|
||||
search_value = json_data['search']['value'].lower()
|
||||
if search_value:
|
||||
searchable_columns = [d['data'] for d in json_data['columns'] if d['searchable']] + ['title']
|
||||
for row in rows:
|
||||
for k,v in row.items():
|
||||
for k, v in row.items():
|
||||
if k in searchable_columns and search_value in v.lower():
|
||||
results.append(row)
|
||||
break
|
||||
@@ -649,7 +635,9 @@ class Libraries(object):
|
||||
elif sort_key in ('file_size', 'bitrate', 'added_at', 'last_played', 'play_count'):
|
||||
results = sorted(results, key=lambda k: helpers.cast_to_int(k[sort_key]), reverse=reverse)
|
||||
elif sort_key == 'video_resolution':
|
||||
results = sorted(results, key=lambda k: helpers.cast_to_int(k[sort_key].replace('4k', '2160p').rstrip('p')), reverse=reverse)
|
||||
results = sorted(results,
|
||||
key=lambda k: helpers.cast_to_int(k[sort_key].replace('4k', '2160p').rstrip('p')),
|
||||
reverse=reverse)
|
||||
else:
|
||||
results = sorted(results, key=lambda k: k[sort_key].lower(), reverse=reverse)
|
||||
|
||||
@@ -692,24 +680,24 @@ class Libraries(object):
|
||||
rows = []
|
||||
# Import media info cache from json file
|
||||
if rating_key:
|
||||
#logger.debug("Tautulli Libraries :: Getting file sizes for rating_key %s." % rating_key)
|
||||
# logger.debug("Tautulli Libraries :: Getting file sizes for rating_key %s." % rating_key)
|
||||
try:
|
||||
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
|
||||
inFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key))
|
||||
with open(inFilePath, 'r') as inFile:
|
||||
rows = json.load(inFile)
|
||||
except IOError as e:
|
||||
#logger.debug("Tautulli Libraries :: No JSON file for rating_key %s." % rating_key)
|
||||
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key)
|
||||
# logger.debug("Tautulli Libraries :: No JSON file for rating_key %s." % rating_key)
|
||||
# logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key)
|
||||
pass
|
||||
elif section_id:
|
||||
logger.debug("Tautulli Libraries :: Getting file sizes for section_id %s." % section_id)
|
||||
try:
|
||||
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
|
||||
inFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id)
|
||||
with open(inFilePath, 'r') as inFile:
|
||||
rows = json.load(inFile)
|
||||
except IOError as e:
|
||||
#logger.debug("Tautulli Libraries :: No JSON file for library section_id %s." % section_id)
|
||||
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id)
|
||||
# logger.debug("Tautulli Libraries :: No JSON file for library section_id %s." % section_id)
|
||||
# logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id)
|
||||
pass
|
||||
|
||||
# Get the total file size for each item
|
||||
@@ -727,7 +715,7 @@ class Libraries(object):
|
||||
media_info = media_part_info = {}
|
||||
if 'media_info' in child_metadata and len(child_metadata['media_info']) > 0:
|
||||
media_info = child_metadata['media_info'][0]
|
||||
if 'parts' in media_info and len (media_info['parts']) > 0:
|
||||
if 'parts' in media_info and len(media_info['parts']) > 0:
|
||||
media_part_info = next((p for p in media_info['parts'] if p['selected']),
|
||||
media_info['parts'][0])
|
||||
|
||||
@@ -738,26 +726,29 @@ class Libraries(object):
|
||||
# Cache the media info to a json file
|
||||
if rating_key:
|
||||
try:
|
||||
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
|
||||
outFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key))
|
||||
with open(outFilePath, 'w') as outFile:
|
||||
json.dump(rows, outFile)
|
||||
except IOError as e:
|
||||
logger.debug("Tautulli Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key)
|
||||
logger.debug(
|
||||
"Tautulli Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key)
|
||||
elif section_id:
|
||||
try:
|
||||
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
|
||||
outFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id)
|
||||
with open(outFilePath, 'w') as outFile:
|
||||
json.dump(rows, outFile)
|
||||
except IOError as e:
|
||||
logger.debug("Tautulli Libraries :: Unable to create cache file with file sizes for section_id %s." % section_id)
|
||||
logger.debug(
|
||||
"Tautulli Libraries :: Unable to create cache file with file sizes for section_id %s." % section_id)
|
||||
|
||||
if rating_key:
|
||||
#logger.debug("Tautulli Libraries :: File sizes updated for rating_key %s." % rating_key)
|
||||
# logger.debug("Tautulli Libraries :: File sizes updated for rating_key %s." % rating_key)
|
||||
pass
|
||||
elif section_id:
|
||||
logger.debug("Tautulli Libraries :: File sizes updated for section_id %s." % section_id)
|
||||
|
||||
return True
|
||||
|
||||
def set_config(self, section_id=None, custom_thumb='', custom_art='',
|
||||
do_notify=1, keep_history=1, do_notify_created=1):
|
||||
if section_id:
|
||||
@@ -796,7 +787,7 @@ class Libraries(object):
|
||||
return default_return
|
||||
|
||||
if server_id is None:
|
||||
server_id = plexpy.CONFIG.PMS_IDENTIFIER
|
||||
server_id = jellypy.CONFIG.PMS_IDENTIFIER
|
||||
|
||||
def get_library_details(section_id=section_id, server_id=server_id):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
@@ -856,8 +847,9 @@ class Libraries(object):
|
||||
return library_details
|
||||
|
||||
else:
|
||||
logger.warn("Tautulli Libraries :: Unable to retrieve library %s from database. Requesting library list refresh."
|
||||
% section_id)
|
||||
logger.warn(
|
||||
"Tautulli Libraries :: Unable to retrieve library %s from database. Requesting library list refresh."
|
||||
% section_id)
|
||||
# Let's first refresh the libraries list to make sure the library isn't newly added and not in the db yet
|
||||
refresh_libraries()
|
||||
|
||||
@@ -877,7 +869,7 @@ class Libraries(object):
|
||||
return []
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
if query_days and query_days is not None:
|
||||
query_days = map(helpers.cast_to_int, query_days.split(','))
|
||||
@@ -941,7 +933,7 @@ class Libraries(object):
|
||||
return []
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
@@ -1016,36 +1008,36 @@ class Libraries(object):
|
||||
result = []
|
||||
|
||||
for row in result:
|
||||
if row['media_type'] == 'episode' and row['parent_thumb']:
|
||||
thumb = row['parent_thumb']
|
||||
elif row['media_type'] == 'episode':
|
||||
thumb = row['grandparent_thumb']
|
||||
else:
|
||||
thumb = row['thumb']
|
||||
if row['media_type'] == 'episode' and row['parent_thumb']:
|
||||
thumb = row['parent_thumb']
|
||||
elif row['media_type'] == 'episode':
|
||||
thumb = row['grandparent_thumb']
|
||||
else:
|
||||
thumb = row['thumb']
|
||||
|
||||
recent_output = {'row_id': row['id'],
|
||||
'media_type': row['media_type'],
|
||||
'rating_key': row['rating_key'],
|
||||
'parent_rating_key': row['parent_rating_key'],
|
||||
'grandparent_rating_key': row['grandparent_rating_key'],
|
||||
'title': row['title'],
|
||||
'parent_title': row['parent_title'],
|
||||
'grandparent_title': row['grandparent_title'],
|
||||
'original_title': row['original_title'],
|
||||
'thumb': thumb,
|
||||
'media_index': row['media_index'],
|
||||
'parent_media_index': row['parent_media_index'],
|
||||
'year': row['year'],
|
||||
'originally_available_at': row['originally_available_at'],
|
||||
'live': row['live'],
|
||||
'guid': row['guid'],
|
||||
'time': row['started'],
|
||||
'user': row['user'],
|
||||
'section_id': row['section_id'],
|
||||
'content_rating': row['content_rating'],
|
||||
'labels': row['labels'].split(';') if row['labels'] else (),
|
||||
}
|
||||
recently_watched.append(recent_output)
|
||||
recent_output = {'row_id': row['id'],
|
||||
'media_type': row['media_type'],
|
||||
'rating_key': row['rating_key'],
|
||||
'parent_rating_key': row['parent_rating_key'],
|
||||
'grandparent_rating_key': row['grandparent_rating_key'],
|
||||
'title': row['title'],
|
||||
'parent_title': row['parent_title'],
|
||||
'grandparent_title': row['grandparent_title'],
|
||||
'original_title': row['original_title'],
|
||||
'thumb': thumb,
|
||||
'media_index': row['media_index'],
|
||||
'parent_media_index': row['parent_media_index'],
|
||||
'year': row['year'],
|
||||
'originally_available_at': row['originally_available_at'],
|
||||
'live': row['live'],
|
||||
'guid': row['guid'],
|
||||
'time': row['started'],
|
||||
'user': row['user'],
|
||||
'section_id': row['section_id'],
|
||||
'content_rating': row['content_rating'],
|
||||
'labels': row['labels'].split(';') if row['labels'] else (),
|
||||
}
|
||||
recently_watched.append(recent_output)
|
||||
|
||||
return session.mask_session_info(recently_watched)
|
||||
|
||||
@@ -1088,13 +1080,13 @@ class Libraries(object):
|
||||
return all(success)
|
||||
|
||||
elif str(section_id).isdigit():
|
||||
server_id = server_id or plexpy.CONFIG.PMS_IDENTIFIER
|
||||
if server_id == plexpy.CONFIG.PMS_IDENTIFIER:
|
||||
server_id = server_id or jellypy.CONFIG.PMS_IDENTIFIER
|
||||
if server_id == jellypy.CONFIG.PMS_IDENTIFIER:
|
||||
delete_success = database.delete_library_history(section_id=section_id)
|
||||
else:
|
||||
logger.warn("Tautulli Libraries :: Library history not deleted for library section_id %s "
|
||||
"because library server_id %s does not match Plex server identifier %s."
|
||||
% (section_id, server_id, plexpy.CONFIG.PMS_IDENTIFIER))
|
||||
% (section_id, server_id, jellypy.CONFIG.PMS_IDENTIFIER))
|
||||
delete_success = True
|
||||
|
||||
if purge_only:
|
||||
@@ -1151,7 +1143,7 @@ class Libraries(object):
|
||||
|
||||
try:
|
||||
if section_id.isdigit():
|
||||
[os.remove(os.path.join(plexpy.CONFIG.CACHE_DIR, f)) for f in os.listdir(plexpy.CONFIG.CACHE_DIR)
|
||||
[os.remove(os.path.join(jellypy.CONFIG.CACHE_DIR, f)) for f in os.listdir(jellypy.CONFIG.CACHE_DIR)
|
||||
if f.startswith('media_info_%s' % section_id) and f.endswith('.json')]
|
||||
|
||||
logger.debug("Tautulli Libraries :: Deleted media info table cache for section_id %s." % section_id)
|
||||
@@ -1165,9 +1157,9 @@ class Libraries(object):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
# Refresh the PMS_URL to make sure the server_id is updated
|
||||
plextv.get_server_resources()
|
||||
# TODO: plextv.get_server_resources()
|
||||
|
||||
server_id = plexpy.CONFIG.PMS_IDENTIFIER
|
||||
server_id = jellypy.CONFIG.PMS_IDENTIFIER
|
||||
|
||||
try:
|
||||
logger.debug("Tautulli Libraries :: Deleting libraries where server_id does not match %s." % server_id)
|
||||
@@ -14,19 +14,11 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import object
|
||||
|
||||
import future.moves.queue as queue
|
||||
import time
|
||||
import queue
|
||||
import threading
|
||||
import time
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import logger
|
||||
else:
|
||||
from plexpy import logger
|
||||
from jellypy import logger
|
||||
|
||||
|
||||
class TimedLock(object):
|
||||
@@ -15,28 +15,21 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
import os
|
||||
from io import open
|
||||
|
||||
import os
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import helpers
|
||||
import logger
|
||||
else:
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
import jellypy
|
||||
from jellypy import helpers
|
||||
from jellypy import logger
|
||||
|
||||
|
||||
def get_log_tail(window=20, parsed=True, log_type="server"):
|
||||
|
||||
if plexpy.CONFIG.PMS_LOGS_FOLDER:
|
||||
if jellypy.CONFIG.PMS_LOGS_FOLDER:
|
||||
log_file = ""
|
||||
if log_type == "server":
|
||||
log_file = os.path.join(plexpy.CONFIG.PMS_LOGS_FOLDER, 'Plex Media Server.log')
|
||||
log_file = os.path.join(jellypy.CONFIG.PMS_LOGS_FOLDER, 'Plex Media Server.log')
|
||||
elif log_type == "scanner":
|
||||
log_file = os.path.join(plexpy.CONFIG.PMS_LOGS_FOLDER, 'Plex Media Scanner.log')
|
||||
log_file = os.path.join(jellypy.CONFIG.PMS_LOGS_FOLDER, 'Plex Media Scanner.log')
|
||||
else:
|
||||
return []
|
||||
|
||||
@@ -76,6 +69,7 @@ def get_log_tail(window=20, parsed=True, log_type="server"):
|
||||
|
||||
return log_lines
|
||||
|
||||
|
||||
# http://stackoverflow.com/a/13790289/2405162
|
||||
def tail(f, lines=1, _buffer=4098):
|
||||
"""Tail a file and get X lines from the end"""
|
||||
@@ -105,4 +99,4 @@ def tail(f, lines=1, _buffer=4098):
|
||||
# next X bytes
|
||||
block_counter -= 1
|
||||
|
||||
return lines_found[-lines:]
|
||||
return lines_found[-lines:]
|
||||
@@ -15,13 +15,7 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
|
||||
from logutils.queue import QueueHandler, QueueListener
|
||||
from logging import handlers
|
||||
|
||||
import cherrypy
|
||||
import contextlib
|
||||
import errno
|
||||
import logging
|
||||
@@ -31,15 +25,15 @@ import re
|
||||
import sys
|
||||
import threading
|
||||
import traceback
|
||||
from logging import handlers
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import helpers
|
||||
from config import _BLACKLIST_KEYS, _WHITELIST_KEYS
|
||||
else:
|
||||
from plexpy import helpers
|
||||
from plexpy.config import _BLACKLIST_KEYS, _WHITELIST_KEYS
|
||||
import cherrypy
|
||||
from logutils.queue import QueueHandler, QueueListener
|
||||
|
||||
import jellypy
|
||||
|
||||
from jellypy import helpers
|
||||
from jellypy.config import _BLACKLIST_KEYS, _WHITELIST_KEYS
|
||||
|
||||
# These settings are for file logging only
|
||||
FILENAME = "tautulli.log"
|
||||
@@ -67,8 +61,8 @@ def blacklist_config(config):
|
||||
|
||||
for key, value in config.items():
|
||||
if isinstance(value, str) and len(value.strip()) > 5 and \
|
||||
key.upper() not in _WHITELIST_KEYS and (key.upper() in blacklist_keys or
|
||||
any(bk in key.upper() for bk in _BLACKLIST_KEYS)):
|
||||
key.upper() not in _WHITELIST_KEYS and (key.upper() in blacklist_keys or
|
||||
any(bk in key.upper() for bk in _BLACKLIST_KEYS)):
|
||||
blacklist.add(value.strip())
|
||||
|
||||
_BLACKLIST_WORDS.update(blacklist)
|
||||
@@ -78,9 +72,10 @@ class NoThreadFilter(logging.Filter):
|
||||
"""
|
||||
Log filter for the current thread
|
||||
"""
|
||||
|
||||
def __init__(self, threadName):
|
||||
super(NoThreadFilter, self).__init__()
|
||||
|
||||
|
||||
self.threadName = threadName
|
||||
|
||||
def filter(self, record):
|
||||
@@ -92,22 +87,33 @@ class BlacklistFilter(logging.Filter):
|
||||
"""
|
||||
Log filter for blacklisted tokens and passwords
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(BlacklistFilter, self).__init__()
|
||||
|
||||
def filter(self, record):
|
||||
if not plexpy.CONFIG.LOG_BLACKLIST:
|
||||
if not jellypy.CONFIG.LOG_BLACKLIST:
|
||||
return True
|
||||
|
||||
for item in _BLACKLIST_WORDS:
|
||||
try:
|
||||
if item in record.msg:
|
||||
record.msg = record.msg.replace(item, 16 * '*')
|
||||
if any(item in str(arg) for arg in record.args):
|
||||
record.args = tuple(arg.replace(item, 16 * '*') if isinstance(arg, str) else arg
|
||||
for arg in record.args)
|
||||
|
||||
args = []
|
||||
for arg in record.args:
|
||||
try:
|
||||
arg_str = str(arg)
|
||||
if item in arg_str:
|
||||
arg_str = arg_str.replace(item, 16 * '*')
|
||||
arg = arg_str
|
||||
except:
|
||||
pass
|
||||
args.append(arg)
|
||||
record.args = tuple(args)
|
||||
except:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -115,13 +121,14 @@ class RegexFilter(logging.Filter):
|
||||
"""
|
||||
Base class for regex log filter
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(RegexFilter, self).__init__()
|
||||
|
||||
self.regex = re.compile(r'')
|
||||
|
||||
def filter(self, record):
|
||||
if not plexpy.CONFIG.LOG_BLACKLIST:
|
||||
if not jellypy.CONFIG.LOG_BLACKLIST:
|
||||
return True
|
||||
|
||||
try:
|
||||
@@ -131,9 +138,15 @@ class RegexFilter(logging.Filter):
|
||||
|
||||
args = []
|
||||
for arg in record.args:
|
||||
matches = self.regex.findall(arg) if isinstance(arg, str) else []
|
||||
for match in matches:
|
||||
arg = self.replace(arg, match)
|
||||
try:
|
||||
arg_str = str(arg)
|
||||
matches = self.regex.findall(arg_str)
|
||||
if matches:
|
||||
for match in matches:
|
||||
arg_str = self.replace(arg_str, match)
|
||||
arg = arg_str
|
||||
except:
|
||||
pass
|
||||
args.append(arg)
|
||||
record.args = tuple(args)
|
||||
except:
|
||||
@@ -149,6 +162,7 @@ class PublicIPFilter(RegexFilter):
|
||||
"""
|
||||
Log filter for public IP addresses
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(PublicIPFilter, self).__init__()
|
||||
|
||||
@@ -166,6 +180,7 @@ class EmailFilter(RegexFilter):
|
||||
"""
|
||||
Log filter for email addresses
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(EmailFilter, self).__init__()
|
||||
|
||||
@@ -182,6 +197,7 @@ class PlexTokenFilter(RegexFilter):
|
||||
"""
|
||||
Log filter for X-Plex-Token
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(PlexTokenFilter, self).__init__()
|
||||
|
||||
@@ -293,11 +309,13 @@ def initLogger(console=False, log_dir=False, verbose=False):
|
||||
|
||||
# Setup file logger
|
||||
if log_dir:
|
||||
file_formatter = logging.Formatter('%(asctime)s - %(levelname)-7s :: %(threadName)s : %(message)s', '%Y-%m-%d %H:%M:%S')
|
||||
file_formatter = logging.Formatter('%(asctime)s - %(levelname)-7s :: %(threadName)s : %(message)s',
|
||||
'%Y-%m-%d %H:%M:%S')
|
||||
|
||||
# Main Tautulli logger
|
||||
filename = os.path.join(log_dir, FILENAME)
|
||||
file_handler = handlers.RotatingFileHandler(filename, maxBytes=MAX_SIZE, backupCount=MAX_FILES, encoding='utf-8')
|
||||
file_handler = handlers.RotatingFileHandler(filename, maxBytes=MAX_SIZE, backupCount=MAX_FILES,
|
||||
encoding='utf-8')
|
||||
file_handler.setLevel(logging.DEBUG)
|
||||
file_handler.setFormatter(file_formatter)
|
||||
|
||||
@@ -306,7 +324,8 @@ def initLogger(console=False, log_dir=False, verbose=False):
|
||||
|
||||
# Tautulli API logger
|
||||
filename = os.path.join(log_dir, FILENAME_API)
|
||||
file_handler = handlers.RotatingFileHandler(filename, maxBytes=MAX_SIZE, backupCount=MAX_FILES, encoding='utf-8')
|
||||
file_handler = handlers.RotatingFileHandler(filename, maxBytes=MAX_SIZE, backupCount=MAX_FILES,
|
||||
encoding='utf-8')
|
||||
file_handler.setLevel(logging.DEBUG)
|
||||
file_handler.setFormatter(file_formatter)
|
||||
|
||||
@@ -314,7 +333,8 @@ def initLogger(console=False, log_dir=False, verbose=False):
|
||||
|
||||
# Tautulli websocket logger
|
||||
filename = os.path.join(log_dir, FILENAME_PLEX_WEBSOCKET)
|
||||
file_handler = handlers.RotatingFileHandler(filename, maxBytes=MAX_SIZE, backupCount=MAX_FILES, encoding='utf-8')
|
||||
file_handler = handlers.RotatingFileHandler(filename, maxBytes=MAX_SIZE, backupCount=MAX_FILES,
|
||||
encoding='utf-8')
|
||||
file_handler.setLevel(logging.DEBUG)
|
||||
file_handler.setFormatter(file_formatter)
|
||||
|
||||
@@ -322,7 +342,8 @@ def initLogger(console=False, log_dir=False, verbose=False):
|
||||
|
||||
# Setup console logger
|
||||
if console:
|
||||
console_formatter = logging.Formatter('%(asctime)s - %(levelname)s :: %(threadName)s : %(message)s', '%Y-%m-%d %H:%M:%S')
|
||||
console_formatter = logging.Formatter('%(asctime)s - %(levelname)s :: %(threadName)s : %(message)s',
|
||||
'%Y-%m-%d %H:%M:%S')
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setFormatter(console_formatter)
|
||||
console_handler.setLevel(logging.DEBUG)
|
||||
@@ -333,7 +354,7 @@ def initLogger(console=False, log_dir=False, verbose=False):
|
||||
# Add filters to log handlers
|
||||
# Only add filters after the config file has been initialized
|
||||
# Nothing prior to initialization should contain sensitive information
|
||||
if not plexpy.DEV and plexpy.CONFIG:
|
||||
if not jellypy.DEV and jellypy.CONFIG:
|
||||
log_handlers = logger.handlers + \
|
||||
logger_api.handlers + \
|
||||
logger_plex_websocket.handlers + \
|
||||
@@ -391,6 +412,7 @@ def initHooks(global_exceptions=True, thread_exceptions=True, pass_original=True
|
||||
raise
|
||||
except:
|
||||
excepthook(*sys.exc_info())
|
||||
|
||||
self.run = new_run
|
||||
|
||||
# Monkey patch the run() by monkey patching the __init__ method
|
||||
@@ -16,13 +16,14 @@
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import plistlib
|
||||
import subprocess
|
||||
import sys
|
||||
import plistlib
|
||||
|
||||
try:
|
||||
import AppKit
|
||||
import Foundation
|
||||
|
||||
HAS_PYOBJC = True
|
||||
except ImportError:
|
||||
HAS_PYOBJC = False
|
||||
@@ -30,25 +31,22 @@ except ImportError:
|
||||
if HAS_PYOBJC:
|
||||
import rumps
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import common
|
||||
import logger
|
||||
import versioncheck
|
||||
else:
|
||||
from plexpy import common
|
||||
from plexpy import logger
|
||||
from plexpy import versioncheck
|
||||
import jellypy
|
||||
|
||||
from jellypy import common
|
||||
from jellypy import logger
|
||||
from jellypy import versioncheck
|
||||
|
||||
|
||||
class MacOSSystemTray(object):
|
||||
def __init__(self):
|
||||
self.image_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/', plexpy.CONFIG.INTERFACE, 'images')
|
||||
self.image_dir = os.path.join(jellypy.PROG_DIR, 'data/interfaces/', jellypy.CONFIG.INTERFACE, 'images')
|
||||
self.icon = os.path.join(self.image_dir, 'logo-flat-white.ico')
|
||||
|
||||
if plexpy.UPDATE_AVAILABLE:
|
||||
self.icon = os.path.join(self.image_dir, 'logo-circle-update.ico')
|
||||
if jellypy.UPDATE_AVAILABLE:
|
||||
self.update_title = 'Check for Updates - Update Available!'
|
||||
else:
|
||||
self.icon = os.path.join(self.image_dir, 'logo-circle.ico')
|
||||
self.update_title = 'Check for Updates'
|
||||
|
||||
self.menu = [
|
||||
rumps.MenuItem('Open Tautulli', callback=self.tray_open),
|
||||
@@ -56,16 +54,17 @@ class MacOSSystemTray(object):
|
||||
rumps.MenuItem('Start Tautulli at Login', callback=self.tray_startup),
|
||||
rumps.MenuItem('Open Browser when Tautulli Starts', callback=self.tray_browser),
|
||||
None,
|
||||
rumps.MenuItem('Check for Updates', callback=self.tray_check_update),
|
||||
rumps.MenuItem(self.update_title, callback=self.tray_check_update),
|
||||
rumps.MenuItem('Restart', callback=self.tray_restart),
|
||||
rumps.MenuItem('Quit', callback=self.tray_quit)
|
||||
]
|
||||
if not plexpy.FROZEN:
|
||||
if not jellypy.FROZEN:
|
||||
self.menu.insert(6, rumps.MenuItem('Update', callback=self.tray_update))
|
||||
self.menu[2].state = plexpy.CONFIG.LAUNCH_STARTUP
|
||||
self.menu[3].state = plexpy.CONFIG.LAUNCH_BROWSER
|
||||
self.menu[2].state = jellypy.CONFIG.LAUNCH_STARTUP
|
||||
self.menu[3].state = jellypy.CONFIG.LAUNCH_BROWSER
|
||||
|
||||
self.tray_icon = rumps.App(common.PRODUCT, icon=self.icon, menu=self.menu, quit_button=None)
|
||||
self.tray_icon = rumps.App(common.PRODUCT, icon=self.icon, template=True,
|
||||
menu=self.menu, quit_button=None)
|
||||
|
||||
def start(self):
|
||||
logger.info("Launching MacOS menu bar icon.")
|
||||
@@ -82,47 +81,50 @@ class MacOSSystemTray(object):
|
||||
self.tray_icon.icon = kwargs['icon']
|
||||
|
||||
def tray_open(self, tray_icon):
|
||||
plexpy.launch_browser(plexpy.CONFIG.HTTP_HOST, plexpy.HTTP_PORT, plexpy.HTTP_ROOT)
|
||||
jellypy.launch_browser(jellypy.CONFIG.HTTP_HOST, jellypy.HTTP_PORT, jellypy.HTTP_ROOT)
|
||||
|
||||
def tray_startup(self, tray_icon):
|
||||
plexpy.CONFIG.LAUNCH_STARTUP = not plexpy.CONFIG.LAUNCH_STARTUP
|
||||
jellypy.CONFIG.LAUNCH_STARTUP = not jellypy.CONFIG.LAUNCH_STARTUP
|
||||
set_startup()
|
||||
|
||||
def tray_browser(self, tray_icon):
|
||||
plexpy.CONFIG.LAUNCH_BROWSER = not plexpy.CONFIG.LAUNCH_BROWSER
|
||||
jellypy.CONFIG.LAUNCH_BROWSER = not jellypy.CONFIG.LAUNCH_BROWSER
|
||||
set_startup()
|
||||
|
||||
def tray_check_update(self, tray_icon):
|
||||
versioncheck.check_update()
|
||||
|
||||
def tray_update(self, tray_icon):
|
||||
if plexpy.UPDATE_AVAILABLE:
|
||||
plexpy.SIGNAL = 'update'
|
||||
if jellypy.UPDATE_AVAILABLE:
|
||||
jellypy.SIGNAL = 'update'
|
||||
else:
|
||||
self.update_title = 'Check for Updates - No Update Available'
|
||||
self.menu[5].title = self.update_title
|
||||
|
||||
def tray_restart(self, tray_icon):
|
||||
plexpy.SIGNAL = 'restart'
|
||||
jellypy.SIGNAL = 'restart'
|
||||
|
||||
def tray_quit(self, tray_icon):
|
||||
plexpy.SIGNAL = 'shutdown'
|
||||
jellypy.SIGNAL = 'shutdown'
|
||||
|
||||
def change_tray_update_icon(self):
|
||||
if plexpy.UPDATE_AVAILABLE:
|
||||
self.icon = os.path.join(self.image_dir, 'logo-circle-update.ico')
|
||||
if jellypy.UPDATE_AVAILABLE:
|
||||
self.update_title = 'Check for Updates - Update Available!'
|
||||
else:
|
||||
self.icon = os.path.join(self.image_dir, 'logo-circle.ico')
|
||||
self.update(icon=self.icon)
|
||||
self.update_title = 'Check for Updates'
|
||||
self.menu[5].title = self.update_title
|
||||
|
||||
def change_tray_icons(self):
|
||||
self.tray_icon.menu['Start Tautulli at Login'].state = plexpy.CONFIG.LAUNCH_STARTUP
|
||||
self.tray_icon.menu['Open Browser when Tautulli Starts'].state = plexpy.CONFIG.LAUNCH_BROWSER
|
||||
self.tray_icon.menu['Start Tautulli at Login'].state = jellypy.CONFIG.LAUNCH_STARTUP
|
||||
self.tray_icon.menu['Open Browser when Tautulli Starts'].state = jellypy.CONFIG.LAUNCH_BROWSER
|
||||
|
||||
|
||||
def set_startup():
|
||||
if plexpy.MAC_SYS_TRAY_ICON:
|
||||
plexpy.MAC_SYS_TRAY_ICON.change_tray_icons()
|
||||
if jellypy.MAC_SYS_TRAY_ICON:
|
||||
jellypy.MAC_SYS_TRAY_ICON.change_tray_icons()
|
||||
|
||||
if plexpy.INSTALL_TYPE == 'macos':
|
||||
if plexpy.CONFIG.LAUNCH_STARTUP:
|
||||
if jellypy.INSTALL_TYPE == 'macos':
|
||||
if jellypy.CONFIG.LAUNCH_STARTUP:
|
||||
try:
|
||||
subprocess.Popen(['osascript',
|
||||
'-e', 'tell application "System Events"',
|
||||
@@ -157,10 +159,11 @@ def set_startup():
|
||||
plist_file_path = os.path.join(launch_agents, plist_file)
|
||||
|
||||
exe = sys.executable
|
||||
if plexpy.FROZEN:
|
||||
args = [exe]
|
||||
run_args = [arg for arg in jellypy.ARGS if arg != '--nolaunch']
|
||||
if jellypy.FROZEN:
|
||||
args = [exe] + run_args
|
||||
else:
|
||||
args = [exe, plexpy.FULL_PATH]
|
||||
args = [exe, jellypy.FULL_PATH] + run_args
|
||||
|
||||
plist_dict = {
|
||||
'Label': common.PRODUCT,
|
||||
@@ -168,7 +171,7 @@ def set_startup():
|
||||
'RunAtLoad': True
|
||||
}
|
||||
|
||||
if plexpy.CONFIG.LAUNCH_STARTUP:
|
||||
if jellypy.CONFIG.LAUNCH_STARTUP:
|
||||
if not os.path.exists(launch_agents):
|
||||
try:
|
||||
os.makedirs(launch_agents)
|
||||
@@ -15,22 +15,13 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
|
||||
import requests
|
||||
import threading
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import database
|
||||
import helpers
|
||||
import logger
|
||||
else:
|
||||
from plexpy import database
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
import requests
|
||||
|
||||
from jellypy import database
|
||||
from jellypy import helpers
|
||||
from jellypy import logger
|
||||
|
||||
TEMP_DEVICE_TOKEN = None
|
||||
INVALIDATE_TIMER = None
|
||||
@@ -118,7 +109,8 @@ def get_mobile_device_config(mobile_device_id=None):
|
||||
if str(mobile_device_id).isdigit():
|
||||
mobile_device_id = int(mobile_device_id)
|
||||
else:
|
||||
logger.error("Tautulli MobileApp :: Unable to retrieve mobile device config: invalid mobile_device_id %s." % mobile_device_id)
|
||||
logger.error(
|
||||
"Tautulli MobileApp :: Unable to retrieve mobile device config: invalid mobile_device_id %s." % mobile_device_id)
|
||||
return None
|
||||
|
||||
db = database.MonitorDatabase()
|
||||
@@ -132,7 +124,8 @@ def set_mobile_device_config(mobile_device_id=None, **kwargs):
|
||||
if str(mobile_device_id).isdigit():
|
||||
mobile_device_id = int(mobile_device_id)
|
||||
else:
|
||||
logger.error("Tautulli MobileApp :: Unable to set exisiting mobile device: invalid mobile_device_id %s." % mobile_device_id)
|
||||
logger.error(
|
||||
"Tautulli MobileApp :: Unable to set exisiting mobile device: invalid mobile_device_id %s." % mobile_device_id)
|
||||
return False
|
||||
|
||||
keys = {'id': mobile_device_id}
|
||||
@@ -15,26 +15,17 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from io import open
|
||||
import email.utils
|
||||
import os
|
||||
from io import open
|
||||
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
import email.utils
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import database
|
||||
import helpers
|
||||
import logger
|
||||
import newsletters
|
||||
else:
|
||||
from plexpy import database
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
from plexpy import newsletters
|
||||
|
||||
import jellypy
|
||||
from jellypy import database
|
||||
from jellypy import helpers
|
||||
from jellypy import logger
|
||||
from jellypy import newsletters
|
||||
|
||||
NEWSLETTER_SCHED = None
|
||||
|
||||
@@ -48,7 +39,7 @@ def add_newsletter_each(newsletter_id=None, notify_action=None, **kwargs):
|
||||
'newsletter_id': newsletter_id,
|
||||
'notify_action': notify_action}
|
||||
data.update(kwargs)
|
||||
plexpy.NOTIFY_QUEUE.put(data)
|
||||
jellypy.NOTIFY_QUEUE.put(data)
|
||||
|
||||
|
||||
def schedule_newsletters(newsletter_id=None):
|
||||
@@ -59,7 +50,8 @@ def schedule_newsletters(newsletter_id=None):
|
||||
|
||||
if newsletter['active']:
|
||||
schedule_newsletter_job('newsletter-{}'.format(newsletter['id']), name=newsletter_job_name,
|
||||
func=add_newsletter_each, args=[newsletter['id'], 'on_cron'], cron=newsletter['cron'])
|
||||
func=add_newsletter_each, args=[newsletter['id'], 'on_cron'],
|
||||
cron=newsletter['cron'])
|
||||
else:
|
||||
schedule_newsletter_job('newsletter-{}'.format(newsletter['id']), name=newsletter_job_name,
|
||||
remove_job=True)
|
||||
@@ -82,7 +74,8 @@ def schedule_newsletter_job(newsletter_job_id, name='', func=None, remove_job=Fa
|
||||
logger.info("Tautulli NewsletterHandler :: Re-scheduled newsletter: %s" % name)
|
||||
elif not remove_job:
|
||||
NEWSLETTER_SCHED.add_job(
|
||||
func, args=args, id=newsletter_job_id, trigger=CronTrigger.from_crontab(cron))
|
||||
func, args=args, id=newsletter_job_id, trigger=CronTrigger.from_crontab(cron),
|
||||
misfire_grace_time=None)
|
||||
logger.info("Tautulli NewsletterHandler :: Scheduled newsletter: %s" % name)
|
||||
|
||||
|
||||
@@ -142,7 +135,6 @@ def notify(newsletter_id=None, notify_action=None, **kwargs):
|
||||
|
||||
def set_notify_state(newsletter, notify_action, subject, body, message, filename,
|
||||
start_date, end_date, start_time, end_time, newsletter_uuid, email_msg_id):
|
||||
|
||||
if newsletter and notify_action:
|
||||
db = database.MonitorDatabase()
|
||||
|
||||
@@ -210,7 +202,7 @@ def get_newsletter(newsletter_uuid=None, newsletter_id_name=None):
|
||||
end_date.replace('-', ''),
|
||||
newsletter_uuid)
|
||||
|
||||
newsletter_folder = plexpy.CONFIG.NEWSLETTER_DIR or os.path.join(plexpy.DATA_DIR, 'newsletters')
|
||||
newsletter_folder = jellypy.CONFIG.NEWSLETTER_DIR or os.path.join(jellypy.DATA_DIR, 'newsletters')
|
||||
newsletter_file_fp = os.path.join(newsletter_folder, newsletter_file)
|
||||
|
||||
if newsletter_file in os.listdir(newsletter_folder):
|
||||
@@ -219,6 +211,7 @@ def get_newsletter(newsletter_uuid=None, newsletter_id_name=None):
|
||||
newsletter = n_file.read()
|
||||
return newsletter
|
||||
except OSError as e:
|
||||
logger.error("Tautulli NewsletterHandler :: Failed to retrieve newsletter '%s': %s" % (newsletter_uuid, e))
|
||||
logger.error(
|
||||
"Tautulli NewsletterHandler :: Failed to retrieve newsletter '%s': %s" % (newsletter_uuid, e))
|
||||
else:
|
||||
logger.warn("Tautulli NewsletterHandler :: Newsletter file '%s' is missing." % newsletter_file)
|
||||
@@ -15,40 +15,24 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import next
|
||||
from future.builtins import str
|
||||
from future.builtins import object
|
||||
|
||||
import arrow
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
from itertools import groupby
|
||||
from mako.lookup import TemplateLookup
|
||||
from mako import exceptions
|
||||
import os
|
||||
import re
|
||||
from collections import OrderedDict
|
||||
from itertools import groupby
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import common
|
||||
import database
|
||||
import helpers
|
||||
import libraries
|
||||
import logger
|
||||
import newsletter_handler
|
||||
import pmsconnect
|
||||
from notifiers import send_notification, EMAIL
|
||||
else:
|
||||
from plexpy import common
|
||||
from plexpy import database
|
||||
from plexpy import helpers
|
||||
from plexpy import libraries
|
||||
from plexpy import logger
|
||||
from plexpy import newsletter_handler
|
||||
from plexpy import pmsconnect
|
||||
from plexpy.notifiers import send_notification, EMAIL
|
||||
import arrow
|
||||
from mako import exceptions
|
||||
from mako.lookup import TemplateLookup
|
||||
|
||||
import jellypy
|
||||
from jellypy import common
|
||||
from jellypy import database
|
||||
from jellypy import helpers
|
||||
from jellypy import libraries
|
||||
from jellypy import logger
|
||||
from jellypy import newsletter_handler
|
||||
from jellypy.notifiers import send_notification, EMAIL
|
||||
|
||||
AGENT_IDS = {
|
||||
'recently_added': 0
|
||||
@@ -319,14 +303,14 @@ def blacklist_logger():
|
||||
|
||||
|
||||
def serve_template(templatename, **kwargs):
|
||||
if plexpy.CONFIG.NEWSLETTER_CUSTOM_DIR:
|
||||
if jellypy.CONFIG.NEWSLETTER_CUSTOM_DIR:
|
||||
logger.info("Tautulli Newsletters :: Using custom newsletter template directory.")
|
||||
template_dir = plexpy.CONFIG.NEWSLETTER_CUSTOM_DIR
|
||||
template_dir = jellypy.CONFIG.NEWSLETTER_CUSTOM_DIR
|
||||
else:
|
||||
interface_dir = os.path.join(str(plexpy.PROG_DIR), 'data/interfaces/')
|
||||
template_dir = os.path.join(str(interface_dir), plexpy.CONFIG.NEWSLETTER_TEMPLATES)
|
||||
interface_dir = os.path.join(str(jellypy.PROG_DIR), 'data/interfaces/')
|
||||
template_dir = os.path.join(str(interface_dir), jellypy.CONFIG.NEWSLETTER_TEMPLATES)
|
||||
|
||||
if not plexpy.CONFIG.NEWSLETTER_INLINE_STYLES:
|
||||
if not jellypy.CONFIG.NEWSLETTER_INLINE_STYLES:
|
||||
templatename = templatename.replace('.html', '.internal.html')
|
||||
|
||||
_hplookup = TemplateLookup(directories=[template_dir], default_filters=['unicode', 'h'])
|
||||
@@ -344,7 +328,7 @@ def generate_newsletter_uuid():
|
||||
db = database.MonitorDatabase()
|
||||
|
||||
while not uuid or uuid_exists:
|
||||
uuid = plexpy.generate_uuid()[:8]
|
||||
uuid = jellypy.generate_uuid()[:8]
|
||||
result = db.select_single(
|
||||
'SELECT EXISTS(SELECT uuid FROM newsletter_log WHERE uuid = ?) as uuid_exists', [uuid])
|
||||
uuid_exists = result['uuid_exists']
|
||||
@@ -512,7 +496,8 @@ class Newsletter(object):
|
||||
self.newsletter = self.generate_newsletter()
|
||||
|
||||
if self.template_error:
|
||||
logger.error("Tautulli Newsletters :: %s newsletter failed to render template. Newsletter not sent." % self.NAME)
|
||||
logger.error(
|
||||
"Tautulli Newsletters :: %s newsletter failed to render template. Newsletter not sent." % self.NAME)
|
||||
return False
|
||||
|
||||
if not self._has_data():
|
||||
@@ -528,7 +513,7 @@ class Newsletter(object):
|
||||
|
||||
def _save(self):
|
||||
newsletter_file = self.filename_formatted
|
||||
newsletter_folder = plexpy.CONFIG.NEWSLETTER_DIR or os.path.join(plexpy.DATA_DIR, 'newsletters')
|
||||
newsletter_folder = jellypy.CONFIG.NEWSLETTER_DIR or os.path.join(jellypy.DATA_DIR, 'newsletters')
|
||||
newsletter_file_fp = os.path.join(newsletter_folder, newsletter_file)
|
||||
|
||||
# In case the user has deleted it manually
|
||||
@@ -540,7 +525,7 @@ class Newsletter(object):
|
||||
for line in self.newsletter.splitlines():
|
||||
if '<!-- IGNORE SAVE -->' not in line:
|
||||
n_file.write((line + '\r\n').encode('utf-8'))
|
||||
#n_file.write(line.strip())
|
||||
# n_file.write(line.strip())
|
||||
|
||||
logger.info("Tautulli Newsletters :: %s newsletter saved to '%s'" % (self.NAME, newsletter_file))
|
||||
except OSError as e:
|
||||
@@ -552,7 +537,7 @@ class Newsletter(object):
|
||||
newsletter_stripped = ''.join(l.strip() for l in self.newsletter.splitlines())
|
||||
|
||||
plaintext = 'HTML email support is required to view the newsletter.\n'
|
||||
if plexpy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
if jellypy.CONFIG.NEWSLETTER_SELF_HOSTED and jellypy.CONFIG.HTTP_BASE_URL:
|
||||
plaintext += self._DEFAULT_BODY.format(**self.parameters)
|
||||
|
||||
email_reply_msg_id = self.email_reply_msg_id if self.config['threaded'] else None
|
||||
@@ -578,10 +563,10 @@ class Newsletter(object):
|
||||
)
|
||||
elif self.config['notifier_id']:
|
||||
return send_notification(
|
||||
notifier_id=self.config['notifier_id'],
|
||||
subject=self.subject_formatted,
|
||||
body=self.body_formatted
|
||||
)
|
||||
notifier_id=self.config['notifier_id'],
|
||||
subject=self.subject_formatted,
|
||||
body=self.body_formatted
|
||||
)
|
||||
|
||||
def build_params(self):
|
||||
parameters = self._build_params()
|
||||
@@ -589,15 +574,15 @@ class Newsletter(object):
|
||||
return parameters
|
||||
|
||||
def _build_params(self):
|
||||
date_format = helpers.momentjs_to_arrow(plexpy.CONFIG.DATE_FORMAT)
|
||||
date_format = helpers.momentjs_to_arrow(jellypy.CONFIG.DATE_FORMAT)
|
||||
|
||||
if plexpy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'newsletter/'
|
||||
if jellypy.CONFIG.NEWSLETTER_SELF_HOSTED and jellypy.CONFIG.HTTP_BASE_URL:
|
||||
base_url = jellypy.CONFIG.HTTP_BASE_URL + jellypy.HTTP_ROOT + 'newsletter/'
|
||||
else:
|
||||
base_url = helpers.get_plexpy_url() + '/newsletter/'
|
||||
|
||||
parameters = {
|
||||
'server_name': plexpy.CONFIG.PMS_NAME,
|
||||
'server_name': jellypy.CONFIG.PMS_NAME,
|
||||
'start_date': self.start_date.format(date_format),
|
||||
'end_date': self.end_date.format(date_format),
|
||||
'current_year': self.start_date.year,
|
||||
@@ -616,19 +601,20 @@ class Newsletter(object):
|
||||
'newsletter_uuid': self.uuid,
|
||||
'newsletter_id': self.newsletter_id,
|
||||
'newsletter_id_name': self.newsletter_id_name,
|
||||
'newsletter_password': plexpy.CONFIG.NEWSLETTER_PASSWORD
|
||||
'newsletter_password': jellypy.CONFIG.NEWSLETTER_PASSWORD
|
||||
}
|
||||
|
||||
return parameters
|
||||
|
||||
def build_text(self):
|
||||
from plexpy.notification_handler import CustomFormatter
|
||||
from jellypy.notification_handler import CustomFormatter
|
||||
custom_formatter = CustomFormatter()
|
||||
|
||||
try:
|
||||
subject = custom_formatter.format(str(self.subject), **self.parameters)
|
||||
except LookupError as e:
|
||||
logger.error("Tautulli Newsletter :: Unable to parse parameter %s in newsletter subject. Using fallback." % e)
|
||||
logger.error(
|
||||
"Tautulli Newsletter :: Unable to parse parameter %s in newsletter subject. Using fallback." % e)
|
||||
subject = str(self._DEFAULT_SUBJECT).format(**self.parameters)
|
||||
except Exception as e:
|
||||
logger.error("Tautulli Newsletter :: Unable to parse custom newsletter subject: %s. Using fallback." % e)
|
||||
@@ -646,7 +632,8 @@ class Newsletter(object):
|
||||
try:
|
||||
message = custom_formatter.format(str(self.message), **self.parameters)
|
||||
except LookupError as e:
|
||||
logger.error("Tautulli Newsletter :: Unable to parse parameter %s in newsletter message. Using fallback." % e)
|
||||
logger.error(
|
||||
"Tautulli Newsletter :: Unable to parse parameter %s in newsletter message. Using fallback." % e)
|
||||
message = str(self._DEFAULT_MESSAGE).format(**self.parameters)
|
||||
except Exception as e:
|
||||
logger.error("Tautulli Newsletter :: Unable to parse custom newsletter message: %s. Using fallback." % e)
|
||||
@@ -655,13 +642,14 @@ class Newsletter(object):
|
||||
return subject, body, message
|
||||
|
||||
def build_filename(self):
|
||||
from plexpy.notification_handler import CustomFormatter
|
||||
from jellypy.notification_handler import CustomFormatter
|
||||
custom_formatter = CustomFormatter()
|
||||
|
||||
try:
|
||||
filename = custom_formatter.format(str(self.filename), **self.parameters)
|
||||
except LookupError as e:
|
||||
logger.error("Tautulli Newsletter :: Unable to parse parameter %s in newsletter filename. Using fallback." % e)
|
||||
logger.error(
|
||||
"Tautulli Newsletter :: Unable to parse parameter %s in newsletter filename. Using fallback." % e)
|
||||
filename = str(self._DEFAULT_FILENAME).format(**self.parameters)
|
||||
except Exception as e:
|
||||
logger.error("Tautulli Newsletter :: Unable to parse custom newsletter subject: %s. Using fallback." % e)
|
||||
@@ -702,9 +690,10 @@ class RecentlyAdded(Newsletter):
|
||||
_TEMPLATE = 'recently_added.html'
|
||||
|
||||
def _get_recently_added(self, media_type=None):
|
||||
from plexpy.notification_handler import format_group_index
|
||||
from jellypy.notification_handler import format_group_index
|
||||
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
# TODO: Jellyfin
|
||||
# pms_connect = pmsconnect.PmsConnect()
|
||||
|
||||
recently_added = []
|
||||
done = False
|
||||
@@ -818,10 +807,11 @@ class RecentlyAdded(Newsletter):
|
||||
return recently_added
|
||||
|
||||
def retrieve_data(self):
|
||||
from plexpy.notification_handler import get_img_info, set_hash_image_info
|
||||
from jellypy.notification_handler import get_img_info, set_hash_image_info
|
||||
|
||||
if not self.config['incl_libraries']:
|
||||
logger.warn("Tautulli Newsletters :: Failed to retrieve %s newsletter data: no libraries selected." % self.NAME)
|
||||
logger.warn(
|
||||
"Tautulli Newsletters :: Failed to retrieve %s newsletter data: no libraries selected." % self.NAME)
|
||||
|
||||
media_types = set()
|
||||
for s in self._get_sections():
|
||||
@@ -948,8 +938,8 @@ class RecentlyAdded(Newsletter):
|
||||
newsletter_libraries.append(s['section_name'])
|
||||
|
||||
parameters['newsletter_libraries'] = ', '.join(sorted(newsletter_libraries))
|
||||
parameters['pms_identifier'] = plexpy.CONFIG.PMS_IDENTIFIER
|
||||
parameters['pms_web_url'] = plexpy.CONFIG.PMS_WEB_URL
|
||||
parameters['pms_identifier'] = jellypy.CONFIG.PMS_IDENTIFIER
|
||||
parameters['pms_web_url'] = jellypy.CONFIG.PMS_WEB_URL
|
||||
|
||||
return parameters
|
||||
|
||||
@@ -15,60 +15,39 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import next
|
||||
from future.builtins import map
|
||||
from future.builtins import str
|
||||
from future.builtins import range
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import threading
|
||||
import time
|
||||
from collections import Counter, defaultdict
|
||||
from functools import partial
|
||||
from itertools import groupby
|
||||
from operator import itemgetter
|
||||
from string import Formatter
|
||||
|
||||
import arrow
|
||||
import bleach
|
||||
from collections import Counter, defaultdict
|
||||
from functools import partial
|
||||
import hashlib
|
||||
from itertools import groupby
|
||||
import json
|
||||
from operator import itemgetter
|
||||
import os
|
||||
import re
|
||||
from string import Formatter
|
||||
import threading
|
||||
import time
|
||||
|
||||
import musicbrainzngs
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import activity_processor
|
||||
import common
|
||||
import database
|
||||
import datafactory
|
||||
import logger
|
||||
import helpers
|
||||
import notifiers
|
||||
import pmsconnect
|
||||
import request
|
||||
from newsletter_handler import notify as notify_newsletter
|
||||
else:
|
||||
from plexpy import activity_processor
|
||||
from plexpy import common
|
||||
from plexpy import database
|
||||
from plexpy import datafactory
|
||||
from plexpy import logger
|
||||
from plexpy import helpers
|
||||
from plexpy import notifiers
|
||||
from plexpy import pmsconnect
|
||||
from plexpy import request
|
||||
from plexpy.newsletter_handler import notify as notify_newsletter
|
||||
import jellypy
|
||||
from jellypy import activity_processor
|
||||
from jellypy import common
|
||||
from jellypy import database
|
||||
from jellypy import datafactory
|
||||
from jellypy import logger
|
||||
from jellypy import helpers
|
||||
from jellypy import notifiers
|
||||
from jellypy import request
|
||||
from jellypy.newsletter_handler import notify as notify_newsletter
|
||||
|
||||
|
||||
def process_queue():
|
||||
queue = plexpy.NOTIFY_QUEUE
|
||||
queue = jellypy.NOTIFY_QUEUE
|
||||
while True:
|
||||
params = queue.get()
|
||||
|
||||
|
||||
if params is None:
|
||||
break
|
||||
elif params:
|
||||
@@ -88,14 +67,16 @@ def process_queue():
|
||||
|
||||
|
||||
def start_threads(num_threads=1):
|
||||
logger.info("Tautulli NotificationHandler :: Starting background notification handler ({} threads).".format(num_threads))
|
||||
logger.info(
|
||||
"Tautulli NotificationHandler :: Starting background notification handler ({} threads).".format(num_threads))
|
||||
for x in range(num_threads):
|
||||
thread = threading.Thread(target=process_queue)
|
||||
thread.daemon = True
|
||||
thread.start()
|
||||
|
||||
|
||||
def add_notifier_each(notifier_id=None, notify_action=None, stream_data=None, timeline_data=None, manual_trigger=False, **kwargs):
|
||||
def add_notifier_each(notifier_id=None, notify_action=None, stream_data=None, timeline_data=None, manual_trigger=False,
|
||||
**kwargs):
|
||||
if not notify_action:
|
||||
logger.debug("Tautulli NotificationHandler :: Notify called but no action received.")
|
||||
return
|
||||
@@ -119,7 +100,8 @@ def add_notifier_each(notifier_id=None, notify_action=None, stream_data=None, ti
|
||||
|
||||
if notifiers_enabled and (manual_trigger or conditions):
|
||||
if manual_trigger:
|
||||
logger.debug("Tautulli NotificationHandler :: Notifiers enabled for notify_action '%s' (manual trigger)." % notify_action)
|
||||
logger.debug(
|
||||
"Tautulli NotificationHandler :: Notifiers enabled for notify_action '%s' (manual trigger)." % notify_action)
|
||||
|
||||
if stream_data or timeline_data:
|
||||
# Build the notification parameters
|
||||
@@ -148,14 +130,16 @@ def add_notifier_each(notifier_id=None, notify_action=None, stream_data=None, ti
|
||||
'timeline_data': timeline_data,
|
||||
'parameters': parameters}
|
||||
data.update(kwargs)
|
||||
plexpy.NOTIFY_QUEUE.put(data)
|
||||
jellypy.NOTIFY_QUEUE.put(data)
|
||||
else:
|
||||
logger.debug("Tautulli NotificationHandler :: Custom notification conditions not satisfied, skipping notifier_id %s." % notifier['id'])
|
||||
logger.debug(
|
||||
"Tautulli NotificationHandler :: Custom notification conditions not satisfied, skipping notifier_id %s." %
|
||||
notifier['id'])
|
||||
|
||||
# Add on_concurrent and on_newdevice to queue if action is on_play
|
||||
if notify_action == 'on_play':
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': stream_data.copy(), 'notify_action': 'on_concurrent'})
|
||||
plexpy.NOTIFY_QUEUE.put({'stream_data': stream_data.copy(), 'notify_action': 'on_newdevice'})
|
||||
jellypy.NOTIFY_QUEUE.put({'stream_data': stream_data.copy(), 'notify_action': 'on_concurrent'})
|
||||
jellypy.NOTIFY_QUEUE.put({'stream_data': stream_data.copy(), 'notify_action': 'on_newdevice'})
|
||||
|
||||
|
||||
def notify_conditions(notify_action=None, stream_data=None, timeline_data=None):
|
||||
@@ -179,33 +163,39 @@ def notify_conditions(notify_action=None, stream_data=None, timeline_data=None):
|
||||
# return False
|
||||
|
||||
if notify_action == 'on_concurrent':
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
result = pms_connect.get_current_activity()
|
||||
|
||||
user_sessions = []
|
||||
if result:
|
||||
user_sessions = [s for s in result['sessions'] if s['user_id'] == stream_data['user_id']]
|
||||
|
||||
if plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP:
|
||||
evaluated = len(Counter(s['ip_address'] for s in user_sessions)) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD
|
||||
else:
|
||||
evaluated = len(user_sessions) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD
|
||||
pass
|
||||
# TODO: Jellyfin
|
||||
# pms_connect = pmsconnect.PmsConnect()
|
||||
# result = pms_connect.get_current_activity()
|
||||
#
|
||||
# user_sessions = []
|
||||
# if result:
|
||||
# user_sessions = [s for s in result['sessions'] if s['user_id'] == stream_data['user_id']]
|
||||
#
|
||||
# if jellypy.CONFIG.NOTIFY_CONCURRENT_BY_IP:
|
||||
# evaluated = len(
|
||||
# Counter(s['ip_address'] for s in user_sessions)) >= jellypy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD
|
||||
# else:
|
||||
# evaluated = len(user_sessions) >= jellypy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD
|
||||
|
||||
elif notify_action == 'on_newdevice':
|
||||
data_factory = datafactory.DataFactory()
|
||||
user_devices = data_factory.get_user_devices(user_id=stream_data['user_id'])
|
||||
user_devices = data_factory.get_user_devices(user_id=stream_data['user_id'],
|
||||
history_only=not jellypy.CONFIG.NOTIFY_NEW_DEVICE_INITIAL_ONLY)
|
||||
evaluated = stream_data['machine_id'] not in user_devices
|
||||
|
||||
elif stream_data['media_type'] in ('movie', 'episode', 'clip'):
|
||||
progress_percent = helpers.get_percent(stream_data['view_offset'], stream_data['duration'])
|
||||
|
||||
if notify_action == 'on_stop':
|
||||
evaluated = (plexpy.CONFIG.NOTIFY_CONSECUTIVE or
|
||||
(stream_data['media_type'] == 'movie' and progress_percent < plexpy.CONFIG.MOVIE_WATCHED_PERCENT) or
|
||||
(stream_data['media_type'] == 'episode' and progress_percent < plexpy.CONFIG.TV_WATCHED_PERCENT))
|
||||
evaluated = (jellypy.CONFIG.NOTIFY_CONSECUTIVE or
|
||||
(stream_data[
|
||||
'media_type'] == 'movie' and progress_percent < jellypy.CONFIG.MOVIE_WATCHED_PERCENT) or
|
||||
(stream_data[
|
||||
'media_type'] == 'episode' and progress_percent < jellypy.CONFIG.TV_WATCHED_PERCENT))
|
||||
|
||||
elif notify_action == 'on_resume':
|
||||
evaluated = plexpy.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < 99
|
||||
evaluated = jellypy.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < 99
|
||||
|
||||
# All other activity notify actions
|
||||
else:
|
||||
@@ -217,7 +207,8 @@ def notify_conditions(notify_action=None, stream_data=None, timeline_data=None):
|
||||
else:
|
||||
evaluated = False
|
||||
|
||||
logger.debug("Tautulli NotificationHandler :: Global notification conditions evaluated to '{}'.".format(evaluated))
|
||||
logger.debug(
|
||||
"Tautulli NotificationHandler :: Global notification conditions evaluated to '{}'.".format(evaluated))
|
||||
# Recently Added notifications
|
||||
elif timeline_data:
|
||||
|
||||
@@ -271,7 +262,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
|
||||
if not parameter or not operator or not values:
|
||||
evaluated = True
|
||||
evaluated_conditions.append(evaluated)
|
||||
logger.debug("Tautulli NotificationHandler :: {%s} Blank condition > %s" % (i+1, evaluated))
|
||||
logger.debug("Tautulli NotificationHandler :: {%s} Blank condition > %s" % (i + 1, evaluated))
|
||||
continue
|
||||
|
||||
# Make sure the condition values is in a list
|
||||
@@ -290,8 +281,9 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
|
||||
values = [helpers.cast_to_float(v) for v in values]
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Tautulli NotificationHandler :: {%s} Unable to cast condition '%s', values '%s', to type '%s'."
|
||||
% (i+1, parameter, values, parameter_type))
|
||||
logger.error(
|
||||
"Tautulli NotificationHandler :: {%s} Unable to cast condition '%s', values '%s', to type '%s'."
|
||||
% (i + 1, parameter, values, parameter_type))
|
||||
return False
|
||||
|
||||
# Cast the parameter value to the correct type
|
||||
@@ -306,8 +298,9 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
|
||||
parameter_value = helpers.cast_to_float(parameter_value)
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Tautulli NotificationHandler :: {%s} Unable to cast parameter '%s', value '%s', to type '%s'."
|
||||
% (i+1, parameter, parameter_value, parameter_type))
|
||||
logger.error(
|
||||
"Tautulli NotificationHandler :: {%s} Unable to cast parameter '%s', value '%s', to type '%s'."
|
||||
% (i + 1, parameter, parameter_value, parameter_type))
|
||||
return False
|
||||
|
||||
# Check each condition
|
||||
@@ -338,11 +331,12 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
|
||||
else:
|
||||
evaluated = None
|
||||
logger.warn("Tautulli NotificationHandler :: {%s} Invalid condition operator '%s' > %s."
|
||||
% (i+1, operator, evaluated))
|
||||
% (i + 1, operator, evaluated))
|
||||
|
||||
evaluated_conditions.append(evaluated)
|
||||
logger.debug("Tautulli NotificationHandler :: {%s} %s | %s | %s > '%s' > %s"
|
||||
% (i+1, parameter, operator, ' or '.join(["'%s'" % v for v in values]), parameter_value, evaluated))
|
||||
% (i + 1, parameter, operator, ' or '.join(["'%s'" % v for v in values]), parameter_value,
|
||||
evaluated))
|
||||
|
||||
if logic_groups:
|
||||
# Format and evaluate the logic string
|
||||
@@ -356,7 +350,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
|
||||
else:
|
||||
evaluated_logic = all(evaluated_conditions[1:])
|
||||
logger.debug("Tautulli NotificationHandler :: Condition logic [blank]: %s > %s"
|
||||
% (' and '.join(['{%s}' % (i+1) for i in range(len(custom_conditions))]), evaluated_logic))
|
||||
% (' and '.join(['{%s}' % (i + 1) for i in range(len(custom_conditions))]), evaluated_logic))
|
||||
|
||||
logger.debug("Tautulli NotificationHandler :: Custom conditions evaluated to '{}'. Conditions: {}.".format(
|
||||
evaluated_logic, evaluated_conditions[1:]))
|
||||
@@ -456,7 +450,6 @@ def get_notify_state_enabled(session, notify_action, notified=True):
|
||||
|
||||
|
||||
def set_notify_state(notifier, notify_action, subject='', body='', script_args='', session=None):
|
||||
|
||||
if notifier and notify_action:
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
@@ -496,9 +489,9 @@ def set_notify_success(notification_id):
|
||||
|
||||
def build_media_notify_params(notify_action=None, session=None, timeline=None, manual_trigger=False, **kwargs):
|
||||
# Get time formats
|
||||
date_format = helpers.momentjs_to_arrow(plexpy.CONFIG.DATE_FORMAT)
|
||||
time_format = helpers.momentjs_to_arrow(plexpy.CONFIG.TIME_FORMAT)
|
||||
duration_format = helpers.momentjs_to_arrow(plexpy.CONFIG.TIME_FORMAT, duration=True)
|
||||
date_format = helpers.momentjs_to_arrow(jellypy.CONFIG.DATE_FORMAT)
|
||||
time_format = helpers.momentjs_to_arrow(jellypy.CONFIG.TIME_FORMAT)
|
||||
duration_format = helpers.momentjs_to_arrow(jellypy.CONFIG.TIME_FORMAT, duration=True)
|
||||
|
||||
# Get metadata for the item
|
||||
if session:
|
||||
@@ -544,10 +537,11 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
notify_params.update(media_part_info)
|
||||
|
||||
child_metadata = grandchild_metadata = []
|
||||
for key in kwargs.pop('child_keys', []):
|
||||
child_metadata.append(pmsconnect.PmsConnect().get_metadata_details(rating_key=key))
|
||||
for key in kwargs.pop('grandchild_keys', []):
|
||||
grandchild_metadata.append(pmsconnect.PmsConnect().get_metadata_details(rating_key=key))
|
||||
# TODO: Jellyfin
|
||||
# for key in kwargs.pop('child_keys', []):
|
||||
# child_metadata.append(pmsconnect.PmsConnect().get_metadata_details(rating_key=key))
|
||||
# for key in kwargs.pop('grandchild_keys', []):
|
||||
# grandchild_metadata.append(pmsconnect.PmsConnect().get_metadata_details(rating_key=key))
|
||||
|
||||
# Session values
|
||||
session = session or {}
|
||||
@@ -570,7 +564,8 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
total_bandwidth = lan_bandwidth + wan_bandwidth
|
||||
|
||||
# Generate a combined transcode decision value
|
||||
if session.get('stream_video_decision', '') == 'transcode' or session.get('stream_audio_decision', '') == 'transcode':
|
||||
if session.get('stream_video_decision', '') == 'transcode' or session.get('stream_audio_decision',
|
||||
'') == 'transcode':
|
||||
transcode_decision = 'Transcode'
|
||||
elif session.get('stream_video_decision', '') == 'copy' or session.get('stream_audio_decision', '') == 'copy':
|
||||
transcode_decision = 'Direct Stream'
|
||||
@@ -596,9 +591,10 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
else:
|
||||
plex_web_rating_key = notify_params['rating_key']
|
||||
|
||||
notify_params['plex_url'] = '{web_url}#!/server/{pms_identifier}/details?key=%2Flibrary%2Fmetadata%2F{rating_key}'.format(
|
||||
web_url=plexpy.CONFIG.PMS_WEB_URL,
|
||||
pms_identifier=plexpy.CONFIG.PMS_IDENTIFIER,
|
||||
notify_params[
|
||||
'plex_url'] = '{web_url}#!/server/{pms_identifier}/details?key=%2Flibrary%2Fmetadata%2F{rating_key}'.format(
|
||||
web_url=jellypy.CONFIG.PMS_WEB_URL,
|
||||
pms_identifier=jellypy.CONFIG.PMS_IDENTIFIER,
|
||||
rating_key=plex_web_rating_key)
|
||||
|
||||
# Check external guids
|
||||
@@ -620,7 +616,8 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/imdb/' + notify_params['imdb_id']
|
||||
|
||||
if 'thetvdb://' in notify_params['guid'] or notify_params['thetvdb_id']:
|
||||
notify_params['thetvdb_id'] = notify_params['thetvdb_id'] or notify_params['guid'].split('thetvdb://')[1].split('/')[0].split('?')[0]
|
||||
notify_params['thetvdb_id'] = notify_params['thetvdb_id'] or \
|
||||
notify_params['guid'].split('thetvdb://')[1].split('/')[0].split('?')[0]
|
||||
notify_params['thetvdb_url'] = 'https://thetvdb.com/?tab=series&id=' + notify_params['thetvdb_id']
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tvdb/' + notify_params['thetvdb_id'] + '?type=show'
|
||||
|
||||
@@ -631,12 +628,15 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
|
||||
if 'themoviedb://' in notify_params['guid'] or notify_params['themoviedb_id']:
|
||||
if notify_params['media_type'] == 'movie':
|
||||
notify_params['themoviedb_id'] = notify_params['themoviedb_id'] or notify_params['guid'].split('themoviedb://')[1].split('?')[0]
|
||||
notify_params['themoviedb_id'] = notify_params['themoviedb_id'] or \
|
||||
notify_params['guid'].split('themoviedb://')[1].split('?')[0]
|
||||
notify_params['themoviedb_url'] = 'https://www.themoviedb.org/movie/' + notify_params['themoviedb_id']
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tmdb/' + notify_params['themoviedb_id'] + '?type=movie'
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tmdb/' + notify_params[
|
||||
'themoviedb_id'] + '?type=movie'
|
||||
|
||||
elif notify_params['media_type'] in ('show', 'season', 'episode'):
|
||||
notify_params['themoviedb_id'] = notify_params['themoviedb_id'] or notify_params['guid'].split('themoviedb://')[1].split('/')[0].split('?')[0]
|
||||
notify_params['themoviedb_id'] = notify_params['themoviedb_id'] or \
|
||||
notify_params['guid'].split('themoviedb://')[1].split('/')[0].split('?')[0]
|
||||
notify_params['themoviedb_url'] = 'https://www.themoviedb.org/tv/' + notify_params['themoviedb_id']
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tmdb/' + notify_params['themoviedb_id'] + '?type=show'
|
||||
|
||||
@@ -645,7 +645,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
notify_params['lastfm_url'] = 'https://www.last.fm/music/' + notify_params['lastfm_id']
|
||||
|
||||
# Get TheMovieDB info (for movies and tv only)
|
||||
if plexpy.CONFIG.THEMOVIEDB_LOOKUP and notify_params['media_type'] in ('movie', 'show', 'season', 'episode'):
|
||||
if jellypy.CONFIG.THEMOVIEDB_LOOKUP and notify_params['media_type'] in ('movie', 'show', 'season', 'episode'):
|
||||
if notify_params.get('themoviedb_id'):
|
||||
themoveidb_json = get_themoviedb_info(rating_key=rating_key,
|
||||
media_type=notify_params['media_type'],
|
||||
@@ -688,7 +688,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
notify_params['themoviedb_id'], 'show' if lookup_media_type == 'tv' else 'movie')
|
||||
|
||||
# Get TVmaze info (for tv shows only)
|
||||
if plexpy.CONFIG.TVMAZE_LOOKUP and notify_params['media_type'] in ('show', 'season', 'episode'):
|
||||
if jellypy.CONFIG.TVMAZE_LOOKUP and notify_params['media_type'] in ('show', 'season', 'episode'):
|
||||
if notify_params.get('thetvdb_id') or notify_params.get('imdb_id') or notify_params.get('plex_id'):
|
||||
if notify_params['media_type'] == 'episode':
|
||||
lookup_key = notify_params['grandparent_rating_key']
|
||||
@@ -709,13 +709,14 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
|
||||
if tvmaze_info.get('thetvdb_id'):
|
||||
notify_params['thetvdb_url'] = 'https://thetvdb.com/?tab=series&id=' + str(tvmaze_info['thetvdb_id'])
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tvdb/{}' + str(notify_params['thetvdb_id']) + '?type=show'
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/tvdb/{}' + str(
|
||||
notify_params['thetvdb_id']) + '?type=show'
|
||||
if tvmaze_info.get('imdb_id'):
|
||||
notify_params['imdb_url'] = 'https://www.imdb.com/title/' + tvmaze_info['imdb_id']
|
||||
notify_params['trakt_url'] = 'https://trakt.tv/search/imdb/' + notify_params['imdb_id']
|
||||
|
||||
# Get MusicBrainz info (for music only)
|
||||
if plexpy.CONFIG.MUSICBRAINZ_LOOKUP and notify_params['media_type'] in ('artist', 'album', 'track'):
|
||||
if jellypy.CONFIG.MUSICBRAINZ_LOOKUP and notify_params['media_type'] in ('artist', 'album', 'track'):
|
||||
artist = release = recording = tracks = tnum = None
|
||||
if notify_params['media_type'] == 'artist':
|
||||
musicbrainz_type = 'artist'
|
||||
@@ -771,14 +772,14 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
img_info = get_img_info(img=poster_thumb, rating_key=poster_key, title=poster_title, fallback=fallback)
|
||||
poster_info = {'poster_title': img_info['img_title'], 'poster_url': img_info['img_url']}
|
||||
notify_params.update(poster_info)
|
||||
elif img_service == 'self-hosted' and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
elif img_service == 'self-hosted' and jellypy.CONFIG.HTTP_BASE_URL:
|
||||
img_hash = set_hash_image_info(img=poster_thumb, fallback=fallback)
|
||||
poster_info = {'poster_title': poster_title,
|
||||
'poster_url': plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'image/' + img_hash}
|
||||
'poster_url': jellypy.CONFIG.HTTP_BASE_URL + jellypy.HTTP_ROOT + 'image/' + img_hash}
|
||||
notify_params.update(poster_info)
|
||||
|
||||
if ((manual_trigger or plexpy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT)
|
||||
and notify_params['media_type'] in ('show', 'artist')):
|
||||
if ((manual_trigger or jellypy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT)
|
||||
and notify_params['media_type'] in ('show', 'artist')):
|
||||
show_name = notify_params['title']
|
||||
episode_name = ''
|
||||
artist_name = notify_params['title']
|
||||
@@ -796,7 +797,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
child_count = len(child_num)
|
||||
grandchild_count = ''
|
||||
|
||||
elif ((manual_trigger or plexpy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_PARENT)
|
||||
elif ((manual_trigger or jellypy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_PARENT)
|
||||
and notify_params['media_type'] in ('season', 'album')):
|
||||
show_name = notify_params['parent_title']
|
||||
episode_name = ''
|
||||
@@ -849,16 +850,16 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
available_params = {
|
||||
# Global paramaters
|
||||
'tautulli_version': common.RELEASE,
|
||||
'tautulli_remote': plexpy.CONFIG.GIT_REMOTE,
|
||||
'tautulli_branch': plexpy.CONFIG.GIT_BRANCH,
|
||||
'tautulli_commit': plexpy.CURRENT_VERSION,
|
||||
'server_name': plexpy.CONFIG.PMS_NAME,
|
||||
'server_ip': plexpy.CONFIG.PMS_IP,
|
||||
'server_port': plexpy.CONFIG.PMS_PORT,
|
||||
'server_url': plexpy.CONFIG.PMS_URL,
|
||||
'server_machine_id': plexpy.CONFIG.PMS_IDENTIFIER,
|
||||
'server_platform': plexpy.CONFIG.PMS_PLATFORM,
|
||||
'server_version': plexpy.CONFIG.PMS_VERSION,
|
||||
'tautulli_remote': jellypy.CONFIG.GIT_REMOTE,
|
||||
'tautulli_branch': jellypy.CONFIG.GIT_BRANCH,
|
||||
'tautulli_commit': jellypy.CURRENT_VERSION,
|
||||
'server_name': jellypy.CONFIG.PMS_NAME,
|
||||
'server_ip': jellypy.CONFIG.PMS_IP,
|
||||
'server_port': jellypy.CONFIG.PMS_PORT,
|
||||
'server_url': jellypy.CONFIG.PMS_URL,
|
||||
'server_machine_id': jellypy.CONFIG.PMS_IDENTIFIER,
|
||||
'server_platform': jellypy.CONFIG.PMS_PLATFORM,
|
||||
'server_version': jellypy.CONFIG.PMS_VERSION,
|
||||
'action': notify_action.split('on_')[-1],
|
||||
'current_year': now.year,
|
||||
'current_month': now.month,
|
||||
@@ -998,15 +999,15 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
'track_count': grandchild_count,
|
||||
'year': notify_params['year'],
|
||||
'release_date': arrow.get(notify_params['originally_available_at']).format(date_format)
|
||||
if notify_params['originally_available_at'] else '',
|
||||
if notify_params['originally_available_at'] else '',
|
||||
'air_date': arrow.get(notify_params['originally_available_at']).format(date_format)
|
||||
if notify_params['originally_available_at'] else '',
|
||||
if notify_params['originally_available_at'] else '',
|
||||
'added_date': arrow.get(notify_params['added_at']).format(date_format)
|
||||
if notify_params['added_at'] else '',
|
||||
if notify_params['added_at'] else '',
|
||||
'updated_date': arrow.get(notify_params['updated_at']).format(date_format)
|
||||
if notify_params['updated_at'] else '',
|
||||
if notify_params['updated_at'] else '',
|
||||
'last_viewed_date': arrow.get(notify_params['last_viewed_at']).format(date_format)
|
||||
if notify_params['last_viewed_at'] else '',
|
||||
if notify_params['last_viewed_at'] else '',
|
||||
'studio': notify_params['studio'],
|
||||
'content_rating': notify_params['content_rating'],
|
||||
'directors': ', '.join(notify_params['directors']),
|
||||
@@ -1018,7 +1019,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
'summary': notify_params['summary'],
|
||||
'tagline': notify_params['tagline'],
|
||||
'rating': rating,
|
||||
'critic_rating': critic_rating,
|
||||
'critic_rating': critic_rating,
|
||||
'audience_rating': audience_rating,
|
||||
'user_rating': notify_params['user_rating'],
|
||||
'duration': duration,
|
||||
@@ -1088,17 +1089,18 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
'parent_thumb': notify_params['parent_thumb'],
|
||||
'grandparent_thumb': notify_params['grandparent_thumb'],
|
||||
'poster_thumb': poster_thumb
|
||||
}
|
||||
}
|
||||
|
||||
return available_params
|
||||
|
||||
|
||||
def build_server_notify_params(notify_action=None, **kwargs):
|
||||
# Get time formats
|
||||
date_format = plexpy.CONFIG.DATE_FORMAT.replace('Do','')
|
||||
time_format = plexpy.CONFIG.TIME_FORMAT.replace('Do','')
|
||||
date_format = jellypy.CONFIG.DATE_FORMAT.replace('Do', '')
|
||||
time_format = jellypy.CONFIG.TIME_FORMAT.replace('Do', '')
|
||||
|
||||
update_channel = pmsconnect.PmsConnect().get_server_update_channel()
|
||||
# TODO: Jellyfin
|
||||
# update_channel = pmsconnect.PmsConnect().get_server_update_channel()
|
||||
|
||||
pms_download_info = defaultdict(str, kwargs.pop('pms_download_info', {}))
|
||||
plexpy_download_info = defaultdict(str, kwargs.pop('plexpy_download_info', {}))
|
||||
@@ -1110,16 +1112,16 @@ def build_server_notify_params(notify_action=None, **kwargs):
|
||||
available_params = {
|
||||
# Global paramaters
|
||||
'tautulli_version': common.RELEASE,
|
||||
'tautulli_remote': plexpy.CONFIG.GIT_REMOTE,
|
||||
'tautulli_branch': plexpy.CONFIG.GIT_BRANCH,
|
||||
'tautulli_commit': plexpy.CURRENT_VERSION,
|
||||
'server_name': plexpy.CONFIG.PMS_NAME,
|
||||
'server_ip': plexpy.CONFIG.PMS_IP,
|
||||
'server_port': plexpy.CONFIG.PMS_PORT,
|
||||
'server_url': plexpy.CONFIG.PMS_URL,
|
||||
'server_platform': plexpy.CONFIG.PMS_PLATFORM,
|
||||
'server_version': plexpy.CONFIG.PMS_VERSION,
|
||||
'server_machine_id': plexpy.CONFIG.PMS_IDENTIFIER,
|
||||
'tautulli_remote': jellypy.CONFIG.GIT_REMOTE,
|
||||
'tautulli_branch': jellypy.CONFIG.GIT_BRANCH,
|
||||
'tautulli_commit': jellypy.CURRENT_VERSION,
|
||||
'server_name': jellypy.CONFIG.PMS_NAME,
|
||||
'server_ip': jellypy.CONFIG.PMS_IP,
|
||||
'server_port': jellypy.CONFIG.PMS_PORT,
|
||||
'server_url': jellypy.CONFIG.PMS_URL,
|
||||
'server_platform': jellypy.CONFIG.PMS_PLATFORM,
|
||||
'server_version': jellypy.CONFIG.PMS_VERSION,
|
||||
'server_machine_id': jellypy.CONFIG.PMS_IDENTIFIER,
|
||||
'action': notify_action.split('on_')[-1],
|
||||
'current_year': now.year,
|
||||
'current_month': now.month,
|
||||
@@ -1146,8 +1148,9 @@ def build_server_notify_params(notify_action=None, **kwargs):
|
||||
'update_version': pms_download_info['version'],
|
||||
'update_url': pms_download_info['download_url'],
|
||||
'update_release_date': arrow.get(pms_download_info['release_date']).format(date_format)
|
||||
if pms_download_info['release_date'] else '',
|
||||
'update_channel': 'Beta' if update_channel == 'beta' else 'Public',
|
||||
if pms_download_info['release_date'] else '',
|
||||
# TODO: Jellyfin
|
||||
# 'update_channel': 'Beta' if update_channel == 'beta' else 'Public',
|
||||
'update_platform': pms_download_info['platform'],
|
||||
'update_distro': pms_download_info['distro'],
|
||||
'update_distro_build': pms_download_info['build'],
|
||||
@@ -1163,7 +1166,7 @@ def build_server_notify_params(notify_action=None, **kwargs):
|
||||
'tautulli_update_commit': kwargs.pop('plexpy_update_commit', ''),
|
||||
'tautulli_update_behind': kwargs.pop('plexpy_update_behind', ''),
|
||||
'tautulli_update_changelog': plexpy_download_info['body']
|
||||
}
|
||||
}
|
||||
|
||||
return available_params
|
||||
|
||||
@@ -1188,8 +1191,8 @@ def build_notify_text(subject='', body='', notify_action=None, parameters=None,
|
||||
media_type = parameters.get('media_type')
|
||||
|
||||
all_tags = r'<movie>.*?</movie>|' \
|
||||
'<show>.*?</show>|<season>.*?</season>|<episode>.*?</episode>|' \
|
||||
'<artist>.*?</artist>|<album>.*?</album>|<track>.*?</track>'
|
||||
'<show>.*?</show>|<season>.*?</season>|<episode>.*?</episode>|' \
|
||||
'<artist>.*?</artist>|<album>.*?</album>|<track>.*?</track>'
|
||||
|
||||
# Check for exclusion tags
|
||||
if media_type == 'movie':
|
||||
@@ -1199,7 +1202,8 @@ def build_notify_text(subject='', body='', notify_action=None, parameters=None,
|
||||
elif media_type == 'season':
|
||||
pattern = re.compile(all_tags.replace('<season>.*?</season>', '<season>|</season>'), re.IGNORECASE | re.DOTALL)
|
||||
elif media_type == 'episode':
|
||||
pattern = re.compile(all_tags.replace('<episode>.*?</episode>', '<episode>|</episode>'), re.IGNORECASE | re.DOTALL)
|
||||
pattern = re.compile(all_tags.replace('<episode>.*?</episode>', '<episode>|</episode>'),
|
||||
re.IGNORECASE | re.DOTALL)
|
||||
elif media_type == 'artist':
|
||||
pattern = re.compile(all_tags.replace('<artist>.*?</artist>', '<artist>|</artist>'), re.IGNORECASE | re.DOTALL)
|
||||
elif media_type == 'album':
|
||||
@@ -1223,10 +1227,12 @@ def build_notify_text(subject='', body='', notify_action=None, parameters=None,
|
||||
try:
|
||||
script_args = [str_formatter(arg) for arg in helpers.split_args(subject)]
|
||||
except LookupError as e:
|
||||
logger.error("Tautulli NotificationHandler :: Unable to parse parameter %s in script argument. Using fallback." % e)
|
||||
logger.error(
|
||||
"Tautulli NotificationHandler :: Unable to parse parameter %s in script argument. Using fallback." % e)
|
||||
script_args = []
|
||||
except Exception as e:
|
||||
logger.exception("Tautulli NotificationHandler :: Unable to parse custom script arguments: %s. Using fallback." % e)
|
||||
logger.exception(
|
||||
"Tautulli NotificationHandler :: Unable to parse custom script arguments: %s. Using fallback." % e)
|
||||
script_args = []
|
||||
|
||||
elif agent_id == 25:
|
||||
@@ -1234,51 +1240,61 @@ def build_notify_text(subject='', body='', notify_action=None, parameters=None,
|
||||
try:
|
||||
subject = json.loads(subject)
|
||||
except ValueError as e:
|
||||
logger.error("Tautulli NotificationHandler :: Unable to parse custom webhook json header data: %s. Using fallback." % e)
|
||||
logger.error(
|
||||
"Tautulli NotificationHandler :: Unable to parse custom webhook json header data: %s. Using fallback." % e)
|
||||
subject = ''
|
||||
if subject:
|
||||
try:
|
||||
subject = json.dumps(helpers.traverse_map(subject, str_formatter))
|
||||
except LookupError as e:
|
||||
logger.error("Tautulli NotificationHandler :: Unable to parse parameter %s in webhook header data. Using fallback." % e)
|
||||
logger.error(
|
||||
"Tautulli NotificationHandler :: Unable to parse parameter %s in webhook header data. Using fallback." % e)
|
||||
subject = ''
|
||||
except Exception as e:
|
||||
logger.exception("Tautulli NotificationHandler :: Unable to parse custom webhook header data: %s. Using fallback." % e)
|
||||
logger.exception(
|
||||
"Tautulli NotificationHandler :: Unable to parse custom webhook header data: %s. Using fallback." % e)
|
||||
subject = ''
|
||||
|
||||
if body:
|
||||
try:
|
||||
body = json.loads(body)
|
||||
except ValueError as e:
|
||||
logger.error("Tautulli NotificationHandler :: Unable to parse custom webhook json body data: %s. Using fallback." % e)
|
||||
logger.error(
|
||||
"Tautulli NotificationHandler :: Unable to parse custom webhook json body data: %s. Using fallback." % e)
|
||||
body = ''
|
||||
if body:
|
||||
try:
|
||||
body = json.dumps(helpers.traverse_map(body, str_formatter))
|
||||
except LookupError as e:
|
||||
logger.error("Tautulli NotificationHandler :: Unable to parse parameter %s in webhook body data. Using fallback." % e)
|
||||
logger.error(
|
||||
"Tautulli NotificationHandler :: Unable to parse parameter %s in webhook body data. Using fallback." % e)
|
||||
body = ''
|
||||
except Exception as e:
|
||||
logger.exception("Tautulli NotificationHandler :: Unable to parse custom webhook body data: %s. Using fallback." % e)
|
||||
logger.exception(
|
||||
"Tautulli NotificationHandler :: Unable to parse custom webhook body data: %s. Using fallback." % e)
|
||||
body = ''
|
||||
|
||||
else:
|
||||
try:
|
||||
subject = str_formatter(subject)
|
||||
except LookupError as e:
|
||||
logger.error("Tautulli NotificationHandler :: Unable to parse parameter %s in notification subject. Using fallback." % e)
|
||||
logger.error(
|
||||
"Tautulli NotificationHandler :: Unable to parse parameter %s in notification subject. Using fallback." % e)
|
||||
subject = str(default_subject).format(**parameters)
|
||||
except Exception as e:
|
||||
logger.exception("Tautulli NotificationHandler :: Unable to parse custom notification subject: %s. Using fallback." % e)
|
||||
logger.exception(
|
||||
"Tautulli NotificationHandler :: Unable to parse custom notification subject: %s. Using fallback." % e)
|
||||
subject = str(default_subject).format(**parameters)
|
||||
|
||||
try:
|
||||
body = str_formatter(body)
|
||||
except LookupError as e:
|
||||
logger.error("Tautulli NotificationHandler :: Unable to parse parameter %s in notification body. Using fallback." % e)
|
||||
logger.error(
|
||||
"Tautulli NotificationHandler :: Unable to parse parameter %s in notification body. Using fallback." % e)
|
||||
body = str(default_body).format(**parameters)
|
||||
except Exception as e:
|
||||
logger.exception("Tautulli NotificationHandler :: Unable to parse custom notification body: %s. Using fallback." % e)
|
||||
logger.exception(
|
||||
"Tautulli NotificationHandler :: Unable to parse custom notification body: %s. Using fallback." % e)
|
||||
body = str(default_body).format(**parameters)
|
||||
|
||||
return subject, body, script_args
|
||||
@@ -1324,7 +1340,7 @@ def format_group_index(group_keys):
|
||||
num = []
|
||||
num00 = []
|
||||
|
||||
for k, g in groupby(enumerate(group_keys), lambda i_x: i_x[0]-i_x[1]):
|
||||
for k, g in groupby(enumerate(group_keys), lambda i_x: i_x[0] - i_x[1]):
|
||||
group = list(map(itemgetter(1), g))
|
||||
g_min, g_max = min(group), max(group)
|
||||
|
||||
@@ -1398,32 +1414,34 @@ def get_img_info(img=None, rating_key=None, title='', width=1000, height=1500,
|
||||
img_info = database_img_info[0]
|
||||
|
||||
elif not database_img_info and img:
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
result = pms_connect.get_image(refresh=True, **image_info)
|
||||
|
||||
if result and result[0]:
|
||||
img_url = delete_hash = ''
|
||||
|
||||
if service == 'imgur':
|
||||
img_url, delete_hash = helpers.upload_to_imgur(img_data=result[0],
|
||||
img_title=title,
|
||||
rating_key=rating_key,
|
||||
fallback=fallback)
|
||||
elif service == 'cloudinary':
|
||||
img_url = helpers.upload_to_cloudinary(img_data=result[0],
|
||||
img_title=title,
|
||||
rating_key=rating_key,
|
||||
fallback=fallback)
|
||||
|
||||
if img_url:
|
||||
img_hash = set_hash_image_info(**image_info)
|
||||
data_factory.set_img_info(img_hash=img_hash,
|
||||
img_title=title,
|
||||
img_url=img_url,
|
||||
delete_hash=delete_hash,
|
||||
service=service)
|
||||
|
||||
img_info = {'img_title': title, 'img_url': img_url}
|
||||
pass
|
||||
# TODO: Jellyfin
|
||||
# pms_connect = pmsconnect.PmsConnect()
|
||||
# result = pms_connect.get_image(refresh=True, **image_info)
|
||||
#
|
||||
# if result and result[0]:
|
||||
# img_url = delete_hash = ''
|
||||
#
|
||||
# if service == 'imgur':
|
||||
# img_url, delete_hash = helpers.upload_to_imgur(img_data=result[0],
|
||||
# img_title=title,
|
||||
# rating_key=rating_key,
|
||||
# fallback=fallback)
|
||||
# elif service == 'cloudinary':
|
||||
# img_url = helpers.upload_to_cloudinary(img_data=result[0],
|
||||
# img_title=title,
|
||||
# rating_key=rating_key,
|
||||
# fallback=fallback)
|
||||
#
|
||||
# if img_url:
|
||||
# img_hash = set_hash_image_info(**image_info)
|
||||
# data_factory.set_img_info(img_hash=img_hash,
|
||||
# img_title=title,
|
||||
# img_url=img_url,
|
||||
# delete_hash=delete_hash,
|
||||
# service=service)
|
||||
#
|
||||
# img_info = {'img_title': title, 'img_url': img_url}
|
||||
|
||||
if img_info['img_url'] and service == 'cloudinary':
|
||||
# Transform image using Cloudinary
|
||||
@@ -1463,7 +1481,7 @@ def set_hash_image_info(img=None, rating_key=None, width=750, height=1000,
|
||||
rating_key = img_rating_key
|
||||
|
||||
img_string = '{}.{}.{}.{}.{}.{}.{}.{}'.format(
|
||||
plexpy.CONFIG.PMS_UUID, img, rating_key, width, height, opacity, background, blur, fallback)
|
||||
jellypy.CONFIG.PMS_UUID, img, rating_key, width, height, opacity, background, blur, fallback)
|
||||
img_hash = hashlib.sha256(img_string.encode('utf-8')).hexdigest()
|
||||
|
||||
if add_to_db:
|
||||
@@ -1498,14 +1516,16 @@ def lookup_tvmaze_by_id(rating_key=None, thetvdb_id=None, imdb_id=None, title=No
|
||||
'WHERE rating_key = ?'
|
||||
tvmaze_info = db.select_single(query, args=[rating_key])
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli NotificationHandler :: Unable to execute database query for lookup_tvmaze_by_tvdb_id: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli NotificationHandler :: Unable to execute database query for lookup_tvmaze_by_tvdb_id: %s." % e)
|
||||
return {}
|
||||
|
||||
if not tvmaze_info:
|
||||
tvmaze_info = {}
|
||||
|
||||
if thetvdb_id:
|
||||
logger.debug("Tautulli NotificationHandler :: Looking up TVmaze info for thetvdb_id '{}'.".format(thetvdb_id))
|
||||
logger.debug(
|
||||
"Tautulli NotificationHandler :: Looking up TVmaze info for thetvdb_id '{}'.".format(thetvdb_id))
|
||||
elif imdb_id:
|
||||
logger.debug("Tautulli NotificationHandler :: Looking up TVmaze info for imdb_id '{}'.".format(imdb_id))
|
||||
else:
|
||||
@@ -1558,20 +1578,25 @@ def lookup_themoviedb_by_id(rating_key=None, thetvdb_id=None, imdb_id=None, titl
|
||||
'WHERE rating_key = ?'
|
||||
themoviedb_info = db.select_single(query, args=[rating_key])
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli NotificationHandler :: Unable to execute database query for lookup_themoviedb_by_imdb_id: %s." % e)
|
||||
logger.warn(
|
||||
"Tautulli NotificationHandler :: Unable to execute database query for lookup_themoviedb_by_imdb_id: %s." % e)
|
||||
return {}
|
||||
|
||||
if not themoviedb_info:
|
||||
themoviedb_info = {}
|
||||
|
||||
if thetvdb_id:
|
||||
logger.debug("Tautulli NotificationHandler :: Looking up The Movie Database info for thetvdb_id '{}'.".format(thetvdb_id))
|
||||
logger.debug(
|
||||
"Tautulli NotificationHandler :: Looking up The Movie Database info for thetvdb_id '{}'.".format(
|
||||
thetvdb_id))
|
||||
elif imdb_id:
|
||||
logger.debug("Tautulli NotificationHandler :: Looking up The Movie Database info for imdb_id '{}'.".format(imdb_id))
|
||||
logger.debug(
|
||||
"Tautulli NotificationHandler :: Looking up The Movie Database info for imdb_id '{}'.".format(imdb_id))
|
||||
else:
|
||||
logger.debug("Tautulli NotificationHandler :: Looking up The Movie Database info for '{} ({})'.".format(title, year))
|
||||
logger.debug(
|
||||
"Tautulli NotificationHandler :: Looking up The Movie Database info for '{} ({})'.".format(title, year))
|
||||
|
||||
params = {'api_key': plexpy.CONFIG.THEMOVIEDB_APIKEY}
|
||||
params = {'api_key': jellypy.CONFIG.THEMOVIEDB_APIKEY}
|
||||
|
||||
if thetvdb_id or imdb_id:
|
||||
params['external_source'] = 'tvdb_id' if thetvdb_id else 'imdb_id'
|
||||
@@ -1647,10 +1672,12 @@ def get_themoviedb_info(rating_key=None, media_type=None, themoviedb_id=None):
|
||||
|
||||
themoviedb_json = {}
|
||||
|
||||
logger.debug("Tautulli NotificationHandler :: Looking up The Movie Database info for themoviedb_id '{}'.".format(themoviedb_id))
|
||||
logger.debug("Tautulli NotificationHandler :: Looking up The Movie Database info for themoviedb_id '{}'.".format(
|
||||
themoviedb_id))
|
||||
|
||||
params = {'api_key': plexpy.CONFIG.THEMOVIEDB_APIKEY}
|
||||
response, err_msg, req_msg = request.request_response2('https://api.themoviedb.org/3/{}/{}'.format(media_type, themoviedb_id), params=params)
|
||||
params = {'api_key': jellypy.CONFIG.THEMOVIEDB_APIKEY}
|
||||
response, err_msg, req_msg = request.request_response2(
|
||||
'https://api.themoviedb.org/3/{}/{}'.format(media_type, themoviedb_id), params=params)
|
||||
|
||||
if response and not err_msg:
|
||||
themoviedb_json = response.json()
|
||||
@@ -1869,13 +1896,13 @@ class CustomFormatter(Formatter):
|
||||
obj = self.convert_field(obj, conversion)
|
||||
|
||||
# expand the format spec, if needed
|
||||
if plexpy.PYTHON2:
|
||||
if jellypy.PYTHON2:
|
||||
format_spec = self._vformat(format_spec, args, kwargs,
|
||||
used_args, recursion_depth - 1)
|
||||
else:
|
||||
format_spec, auto_arg_index = self._vformat(
|
||||
format_spec, args, kwargs,
|
||||
used_args, recursion_depth-1,
|
||||
used_args, recursion_depth - 1,
|
||||
auto_arg_index=auto_arg_index)
|
||||
|
||||
# format the object and append to the result
|
||||
@@ -1888,7 +1915,7 @@ class CustomFormatter(Formatter):
|
||||
result.append(suffix)
|
||||
# result.append(self.format_field(obj, format_spec))
|
||||
|
||||
if plexpy.PYTHON2:
|
||||
if jellypy.PYTHON2:
|
||||
return ''.join(result)
|
||||
else:
|
||||
return ''.join(result), auto_arg_index
|
||||
@@ -15,33 +15,31 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
from future.builtins import object
|
||||
|
||||
import base64
|
||||
import bleach
|
||||
import json
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
import email.utils
|
||||
from paho.mqtt.publish import single
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
import smtplib
|
||||
import subprocess
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from future.moves.urllib.parse import urlencode
|
||||
from future.moves.urllib.parse import urlparse
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from urllib.parse import urlencode, urlparse
|
||||
|
||||
import bleach
|
||||
import requests
|
||||
from paho.mqtt.publish import single
|
||||
|
||||
try:
|
||||
from Cryptodome.Protocol.KDF import PBKDF2
|
||||
from Cryptodome.Cipher import AES
|
||||
from Cryptodome.Random import get_random_bytes
|
||||
from Cryptodome.Hash import HMAC, SHA1
|
||||
|
||||
CRYPTODOME = True
|
||||
except ImportError:
|
||||
try:
|
||||
@@ -49,34 +47,22 @@ except ImportError:
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Random import get_random_bytes
|
||||
from Crypto.Hash import HMAC, SHA1
|
||||
|
||||
CRYPTODOME = True
|
||||
except ImportError:
|
||||
CRYPTODOME = False
|
||||
|
||||
import gntp.notifier
|
||||
import facebook
|
||||
import twitter
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import common
|
||||
import database
|
||||
import helpers
|
||||
import logger
|
||||
import mobile_app
|
||||
import pmsconnect
|
||||
import request
|
||||
import users
|
||||
else:
|
||||
from plexpy import common
|
||||
from plexpy import database
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
from plexpy import mobile_app
|
||||
from plexpy import pmsconnect
|
||||
from plexpy import request
|
||||
from plexpy import users
|
||||
|
||||
import jellypy
|
||||
from jellypy import common
|
||||
from jellypy import database
|
||||
from jellypy import helpers
|
||||
from jellypy import logger
|
||||
from jellypy import mobile_app
|
||||
from jellypy import request
|
||||
from jellypy import users
|
||||
|
||||
BROWSER_NOTIFIERS = {}
|
||||
|
||||
@@ -140,12 +126,6 @@ def available_notification_agents():
|
||||
'class': EMAIL,
|
||||
'action_types': ('all',)
|
||||
},
|
||||
{'label': 'Facebook',
|
||||
'name': 'facebook',
|
||||
'id': AGENT_IDS['facebook'],
|
||||
'class': FACEBOOK,
|
||||
'action_types': ('all',)
|
||||
},
|
||||
{'label': 'GroupMe',
|
||||
'name': 'groupme',
|
||||
'id': AGENT_IDS['groupme'],
|
||||
@@ -793,7 +773,7 @@ class PrettyMetadata(object):
|
||||
@staticmethod
|
||||
def get_parameters():
|
||||
parameters = {param['value']: param['name']
|
||||
for category in common.NOTIFICATION_PARAMETERS for param in category['parameters']}
|
||||
for category in common.NOTIFICATION_PARAMETERS for param in category['parameters']}
|
||||
parameters[''] = ''
|
||||
return parameters
|
||||
|
||||
@@ -854,10 +834,11 @@ class Notifier(object):
|
||||
|
||||
else:
|
||||
verify_msg = ""
|
||||
if response is not None and response.status_code >= 400 and response.status_code < 500:
|
||||
verify_msg = " Verify you notification agent settings are correct."
|
||||
if response is not None and 400 <= response.status_code < 500:
|
||||
verify_msg = " Verify your notification agent settings are correct."
|
||||
|
||||
logger.error("Tautulli Notifiers :: {name} notification failed.{msg}".format(msg=verify_msg, name=self.NAME))
|
||||
logger.error(
|
||||
"Tautulli Notifiers :: {name} notification failed.{msg}".format(msg=verify_msg, name=self.NAME))
|
||||
|
||||
if err_msg:
|
||||
logger.error("Tautulli Notifiers :: {}".format(err_msg))
|
||||
@@ -912,7 +893,7 @@ class ANDROIDAPP(Notifier):
|
||||
'rating_key': pretty_metadata.parameters.get('rating_key', ''),
|
||||
'poster_thumb': pretty_metadata.parameters.get('poster_thumb', '')}
|
||||
|
||||
#logger.debug("Plaintext data: {}".format(plaintext_data))
|
||||
# logger.debug("Plaintext data: {}".format(plaintext_data))
|
||||
|
||||
if CRYPTODOME:
|
||||
# Key generation
|
||||
@@ -923,7 +904,7 @@ class ANDROIDAPP(Notifier):
|
||||
key = PBKDF2(passphrase, salt, dkLen=key_length, count=iterations,
|
||||
prf=lambda p, s: HMAC.new(p, s, SHA1).digest())
|
||||
|
||||
#logger.debug("Encryption key (base64): {}".format(base64.b64encode(key)))
|
||||
# logger.debug("Encryption key (base64): {}".format(base64.b64encode(key)))
|
||||
|
||||
# Encrypt using AES GCM
|
||||
nonce = get_random_bytes(16)
|
||||
@@ -931,10 +912,10 @@ class ANDROIDAPP(Notifier):
|
||||
encrypted_data, gcm_tag = cipher.encrypt_and_digest(json.dumps(plaintext_data).encode('utf-8'))
|
||||
encrypted_data += gcm_tag
|
||||
|
||||
#logger.debug("Encrypted data (base64): {}".format(base64.b64encode(encrypted_data)))
|
||||
#logger.debug("GCM tag (base64): {}".format(base64.b64encode(gcm_tag)))
|
||||
#logger.debug("Nonce (base64): {}".format(base64.b64encode(nonce)))
|
||||
#logger.debug("Salt (base64): {}".format(base64.b64encode(salt)))
|
||||
# logger.debug("Encrypted data (base64): {}".format(base64.b64encode(encrypted_data)))
|
||||
# logger.debug("GCM tag (base64): {}".format(base64.b64encode(gcm_tag)))
|
||||
# logger.debug("Nonce (base64): {}".format(base64.b64encode(nonce)))
|
||||
# logger.debug("Salt (base64): {}".format(base64.b64encode(salt)))
|
||||
|
||||
payload = {'app_id': mobile_app._ONESIGNAL_APP_ID,
|
||||
'include_player_ids': [device['onesignal_id']],
|
||||
@@ -943,7 +924,7 @@ class ANDROIDAPP(Notifier):
|
||||
'cipher_text': base64.b64encode(encrypted_data),
|
||||
'nonce': base64.b64encode(nonce),
|
||||
'salt': base64.b64encode(salt),
|
||||
'server_id': plexpy.CONFIG.PMS_UUID}
|
||||
'server_id': jellypy.CONFIG.PMS_UUID}
|
||||
}
|
||||
else:
|
||||
logger.warn("Tautulli Notifiers :: PyCryptodome library is missing. "
|
||||
@@ -955,10 +936,10 @@ class ANDROIDAPP(Notifier):
|
||||
'contents': {'en': 'Tautulli Notification'},
|
||||
'data': {'encrypted': False,
|
||||
'plain_text': plaintext_data,
|
||||
'server_id': plexpy.CONFIG.PMS_UUID}
|
||||
'server_id': jellypy.CONFIG.PMS_UUID}
|
||||
}
|
||||
|
||||
#logger.debug("OneSignal payload: {}".format(payload))
|
||||
# logger.debug("OneSignal payload: {}".format(payload))
|
||||
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
|
||||
@@ -995,24 +976,25 @@ class ANDROIDAPP(Notifier):
|
||||
'Please install the library to encrypt the notification contents. '
|
||||
'Instructions can be found in the '
|
||||
'<a href="' + helpers.anon_url(
|
||||
'https://github.com/%s/%s-Wiki/wiki/Frequently-Asked-Questions#notifications-pycryptodome'
|
||||
% (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO)) + '" target="_blank">FAQ</a>.' ,
|
||||
'https://github.com/%s/%s-Wiki/wiki/Frequently-Asked-Questions#notifications-pycryptodome'
|
||||
% (jellypy.CONFIG.GIT_USER, jellypy.CONFIG.GIT_REPO)) + '" target="_blank">FAQ</a>.',
|
||||
'input_type': 'help'
|
||||
})
|
||||
})
|
||||
else:
|
||||
config_option.append({
|
||||
'label': 'Note',
|
||||
'description': 'The PyCryptodome library was found. '
|
||||
'The content of your notifications will be sent encrypted!',
|
||||
'input_type': 'help'
|
||||
})
|
||||
})
|
||||
|
||||
config_option[-1]['description'] += '<br><br>Notifications are sent using the ' \
|
||||
'<a href="' + helpers.anon_url('https://onesignal.com') + '" target="_blank">' \
|
||||
'OneSignal</a>. Some user data is collected and cannot be encrypted. ' \
|
||||
'Please read the <a href="' + helpers.anon_url(
|
||||
'https://onesignal.com/privacy_policy') + '" target="_blank">' \
|
||||
'OneSignal Privacy Policy</a> for more details.'
|
||||
'<a href="' + helpers.anon_url(
|
||||
'https://onesignal.com') + '" target="_blank">' \
|
||||
'OneSignal</a>. Some user data is collected and cannot be encrypted. ' \
|
||||
'Please read the <a href="' + helpers.anon_url(
|
||||
'https://onesignal.com/privacy_policy') + '" target="_blank">' \
|
||||
'OneSignal Privacy Policy</a> for more details.'
|
||||
|
||||
devices = self.get_devices()
|
||||
|
||||
@@ -1023,7 +1005,7 @@ class ANDROIDAPP(Notifier):
|
||||
'<a data-tab-destination="android_app" data-toggle="tab" data-dismiss="modal">'
|
||||
'Get the Android App</a> and register a device.',
|
||||
'input_type': 'help'
|
||||
})
|
||||
})
|
||||
else:
|
||||
config_option.append({
|
||||
'label': 'Device',
|
||||
@@ -1034,7 +1016,7 @@ class ANDROIDAPP(Notifier):
|
||||
'register a new device</a> with Tautulli.',
|
||||
'input_type': 'select',
|
||||
'select_options': devices
|
||||
})
|
||||
})
|
||||
|
||||
config_option.append({
|
||||
'label': 'Priority',
|
||||
@@ -1043,7 +1025,7 @@ class ANDROIDAPP(Notifier):
|
||||
'description': 'Set the notification priority.',
|
||||
'input_type': 'select',
|
||||
'select_options': {1: 'Minimum', 2: 'Low', 3: 'Normal', 4: 'High'}
|
||||
})
|
||||
})
|
||||
|
||||
return config_option
|
||||
|
||||
@@ -1086,7 +1068,7 @@ class BOXCAR(Notifier):
|
||||
'flourish': 'Flourish',
|
||||
'harp': 'Harp',
|
||||
'light': 'Light',
|
||||
'magic-chime':'Magic Chime',
|
||||
'magic-chime': 'Magic Chime',
|
||||
'magic-coin': 'Magic Coin',
|
||||
'no-sound': 'No Sound',
|
||||
'notifier-1': 'Notifier (1)',
|
||||
@@ -1512,191 +1494,6 @@ class EMAIL(Notifier):
|
||||
return config_option
|
||||
|
||||
|
||||
class FACEBOOK(Notifier):
|
||||
"""
|
||||
Facebook notifications
|
||||
"""
|
||||
NAME = 'Facebook'
|
||||
_DEFAULT_CONFIG = {'redirect_uri': '',
|
||||
'access_token': '',
|
||||
'app_id': '',
|
||||
'app_secret': '',
|
||||
'group_id': '',
|
||||
'incl_subject': 1,
|
||||
'incl_card': 0,
|
||||
'movie_provider': '',
|
||||
'tv_provider': '',
|
||||
'music_provider': ''
|
||||
}
|
||||
|
||||
def _get_authorization(self, app_id='', app_secret='', redirect_uri=''):
|
||||
# Temporarily store settings in the config so we can retrieve them in Facebook step 2.
|
||||
# Assume the user won't be requesting authorization for multiple Facebook notifiers at the same time.
|
||||
plexpy.CONFIG.FACEBOOK_APP_ID = app_id
|
||||
plexpy.CONFIG.FACEBOOK_APP_SECRET = app_secret
|
||||
plexpy.CONFIG.FACEBOOK_REDIRECT_URI = redirect_uri
|
||||
plexpy.CONFIG.FACEBOOK_TOKEN = 'temp'
|
||||
|
||||
return facebook.auth_url(app_id=app_id,
|
||||
canvas_url=redirect_uri,
|
||||
perms=['publish_to_groups'])
|
||||
|
||||
def _get_credentials(self, code=''):
|
||||
logger.info("Tautulli Notifiers :: Requesting access token from {name}.".format(name=self.NAME))
|
||||
|
||||
app_id = plexpy.CONFIG.FACEBOOK_APP_ID
|
||||
app_secret = plexpy.CONFIG.FACEBOOK_APP_SECRET
|
||||
redirect_uri = plexpy.CONFIG.FACEBOOK_REDIRECT_URI
|
||||
|
||||
try:
|
||||
# Request user access token
|
||||
api = facebook.GraphAPI(version='2.12')
|
||||
response = api.get_access_token_from_code(code=code,
|
||||
redirect_uri=redirect_uri,
|
||||
app_id=app_id,
|
||||
app_secret=app_secret)
|
||||
access_token = response['access_token']
|
||||
|
||||
# Request extended user access token
|
||||
api = facebook.GraphAPI(access_token=access_token, version='2.12')
|
||||
response = api.extend_access_token(app_id=app_id,
|
||||
app_secret=app_secret)
|
||||
|
||||
plexpy.CONFIG.FACEBOOK_TOKEN = response['access_token']
|
||||
except Exception as e:
|
||||
logger.error("Tautulli Notifiers :: Error requesting {name} access token: {e}".format(name=self.NAME, e=e))
|
||||
plexpy.CONFIG.FACEBOOK_TOKEN = ''
|
||||
|
||||
# Clear out temporary config values
|
||||
plexpy.CONFIG.FACEBOOK_APP_ID = ''
|
||||
plexpy.CONFIG.FACEBOOK_APP_SECRET = ''
|
||||
plexpy.CONFIG.FACEBOOK_REDIRECT_URI = ''
|
||||
|
||||
return plexpy.CONFIG.FACEBOOK_TOKEN
|
||||
|
||||
def _post_facebook(self, **data):
|
||||
if self.config['group_id']:
|
||||
api = facebook.GraphAPI(access_token=self.config['access_token'], version='2.12')
|
||||
|
||||
try:
|
||||
api.put_object(parent_object=self.config['group_id'], connection_name='feed', **data)
|
||||
logger.info("Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error("Tautulli Notifiers :: Error sending {name} post: {e}".format(name=self.NAME, e=e))
|
||||
return False
|
||||
|
||||
else:
|
||||
logger.error("Tautulli Notifiers :: Error sending {name} post: No {name} Group ID provided.".format(name=self.NAME))
|
||||
return False
|
||||
|
||||
def agent_notify(self, subject='', body='', action='', **kwargs):
|
||||
if self.config['incl_subject']:
|
||||
text = subject + '\r\n' + body
|
||||
else:
|
||||
text = body
|
||||
|
||||
data = {'message': text}
|
||||
|
||||
if self.config['incl_card'] and kwargs.get('parameters', {}).get('media_type'):
|
||||
# Grab formatted metadata
|
||||
pretty_metadata = PrettyMetadata(kwargs['parameters'])
|
||||
|
||||
if pretty_metadata.media_type == 'movie':
|
||||
provider = self.config['movie_provider']
|
||||
elif pretty_metadata.media_type in ('show', 'season', 'episode'):
|
||||
provider = self.config['tv_provider']
|
||||
elif pretty_metadata.media_type in ('artist', 'album', 'track'):
|
||||
provider = self.config['music_provider']
|
||||
else:
|
||||
provider = None
|
||||
|
||||
data['link'] = pretty_metadata.get_provider_link(provider)
|
||||
|
||||
return self._post_facebook(**data)
|
||||
|
||||
def _return_config_options(self):
|
||||
config_option = [{'label': 'OAuth Redirect URI',
|
||||
'value': self.config['redirect_uri'],
|
||||
'name': 'facebook_redirect_uri',
|
||||
'description': 'Fill in this address for the "Valid OAuth redirect URIs" '
|
||||
'in your Facebook App.',
|
||||
'input_type': 'text'
|
||||
},
|
||||
{'label': 'Facebook App ID',
|
||||
'value': self.config['app_id'],
|
||||
'name': 'facebook_app_id',
|
||||
'description': 'Your Facebook app ID.',
|
||||
'input_type': 'text'
|
||||
},
|
||||
{'label': 'Facebook App Secret',
|
||||
'value': self.config['app_secret'],
|
||||
'name': 'facebook_app_secret',
|
||||
'description': 'Your Facebook app secret.',
|
||||
'input_type': 'text'
|
||||
},
|
||||
{'label': 'Request Authorization',
|
||||
'value': 'Request Authorization',
|
||||
'name': 'facebook_facebook_auth',
|
||||
'description': 'Request Facebook authorization. (Ensure you allow the browser pop-up).',
|
||||
'input_type': 'button'
|
||||
},
|
||||
{'label': 'Facebook Access Token',
|
||||
'value': self.config['access_token'],
|
||||
'name': 'facebook_access_token',
|
||||
'description': 'Your Facebook access token. '
|
||||
'Automatically filled in after requesting authorization.',
|
||||
'input_type': 'text'
|
||||
},
|
||||
{'label': 'Facebook Group ID',
|
||||
'value': self.config['group_id'],
|
||||
'name': 'facebook_group_id',
|
||||
'description': 'Your Facebook Group ID.',
|
||||
'input_type': 'text'
|
||||
},
|
||||
{'label': 'Include Subject Line',
|
||||
'value': self.config['incl_subject'],
|
||||
'name': 'facebook_incl_subject',
|
||||
'description': 'Include the subject line with the notifications.',
|
||||
'input_type': 'checkbox'
|
||||
},
|
||||
{'label': 'Include Rich Metadata Info',
|
||||
'value': self.config['incl_card'],
|
||||
'name': 'facebook_incl_card',
|
||||
'description': 'Include an info card with a poster and metadata with the notifications.<br>'
|
||||
'Note: <a data-tab-destination="3rd_party_apis" data-dismiss="modal" '
|
||||
'data-target="notify_upload_posters">Image Hosting</a> '
|
||||
'must be enabled under the notifications settings tab.',
|
||||
'input_type': 'checkbox'
|
||||
},
|
||||
{'label': 'Movie Link Source',
|
||||
'value': self.config['movie_provider'],
|
||||
'name': 'facebook_movie_provider',
|
||||
'description': 'Select the source for movie links on the info cards. Leave blank to disable.<br>'
|
||||
'Note: 3rd party API lookup may need to be enabled under the notifications settings tab.',
|
||||
'input_type': 'select',
|
||||
'select_options': PrettyMetadata().get_movie_providers()
|
||||
},
|
||||
{'label': 'TV Show Link Source',
|
||||
'value': self.config['tv_provider'],
|
||||
'name': 'facebook_tv_provider',
|
||||
'description': 'Select the source for tv show links on the info cards. Leave blank to disable.<br>'
|
||||
'Note: 3rd party API lookup may need to be enabled under the notifications settings tab.',
|
||||
'input_type': 'select',
|
||||
'select_options': PrettyMetadata().get_tv_providers()
|
||||
},
|
||||
{'label': 'Music Link Source',
|
||||
'value': self.config['music_provider'],
|
||||
'name': 'facebook_music_provider',
|
||||
'description': 'Select the source for music links on the info cards. Leave blank to disable.',
|
||||
'input_type': 'select',
|
||||
'select_options': PrettyMetadata().get_music_providers()
|
||||
}
|
||||
]
|
||||
|
||||
return config_option
|
||||
|
||||
|
||||
class GROUPME(Notifier):
|
||||
"""
|
||||
GroupMe notifications
|
||||
@@ -1719,29 +1516,30 @@ class GROUPME(Notifier):
|
||||
if self.config['incl_poster'] and kwargs.get('parameters'):
|
||||
pretty_metadata = PrettyMetadata(kwargs.get('parameters'))
|
||||
|
||||
# Retrieve the poster from Plex
|
||||
result = pmsconnect.PmsConnect().get_image(img=pretty_metadata.parameters.get('poster_thumb',''))
|
||||
if result and result[0]:
|
||||
poster_content = result[0]
|
||||
else:
|
||||
poster_content = ''
|
||||
logger.error("Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME))
|
||||
|
||||
if poster_content:
|
||||
headers = {'X-Access-Token': self.config['access_token'],
|
||||
'Content-Type': 'image/png'}
|
||||
|
||||
r = requests.post('https://image.groupme.com/pictures', headers=headers, data=poster_content)
|
||||
|
||||
if r.status_code == 200:
|
||||
logger.info("Tautulli Notifiers :: {name} poster sent.".format(name=self.NAME))
|
||||
r_content = r.json()
|
||||
data['attachments'] = [{'type': 'image',
|
||||
'url': r_content['payload']['picture_url']}]
|
||||
else:
|
||||
logger.error("Tautulli Notifiers :: {name} poster failed: "
|
||||
"[{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
|
||||
logger.debug("Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
|
||||
# TODO: Jellyfin
|
||||
# # Retrieve the poster from Plex
|
||||
# result = pmsconnect.PmsConnect().get_image(img=pretty_metadata.parameters.get('poster_thumb', ''))
|
||||
# if result and result[0]:
|
||||
# poster_content = result[0]
|
||||
# else:
|
||||
# poster_content = ''
|
||||
# logger.error("Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME))
|
||||
#
|
||||
# if poster_content:
|
||||
# headers = {'X-Access-Token': self.config['access_token'],
|
||||
# 'Content-Type': 'image/png'}
|
||||
#
|
||||
# r = requests.post('https://image.groupme.com/pictures', headers=headers, data=poster_content)
|
||||
#
|
||||
# if r.status_code == 200:
|
||||
# logger.info("Tautulli Notifiers :: {name} poster sent.".format(name=self.NAME))
|
||||
# r_content = r.json()
|
||||
# data['attachments'] = [{'type': 'image',
|
||||
# 'url': r_content['payload']['picture_url']}]
|
||||
# else:
|
||||
# logger.error("Tautulli Notifiers :: {name} poster failed: "
|
||||
# "[{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
|
||||
# logger.debug("Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
|
||||
|
||||
return self.make_request('https://api.groupme.com/v3/bots/post', json=data)
|
||||
|
||||
@@ -1816,12 +1614,13 @@ class GROWL(Notifier):
|
||||
logger.error("Tautulli Notifiers :: {name} notification failed: network error".format(name=self.NAME))
|
||||
return False
|
||||
except gntp.notifier.errors.AuthError:
|
||||
logger.error("Tautulli Notifiers :: {name} notification failed: authentication error".format(name=self.NAME))
|
||||
logger.error(
|
||||
"Tautulli Notifiers :: {name} notification failed: authentication error".format(name=self.NAME))
|
||||
return False
|
||||
|
||||
# Send it, including an image
|
||||
image_file = os.path.join(str(plexpy.PROG_DIR),
|
||||
"data/interfaces/default/images/logo-circle.png")
|
||||
image_file = os.path.join(str(jellypy.PROG_DIR),
|
||||
"data/interfaces/default/images/logo-circle.png")
|
||||
|
||||
with open(image_file, 'rb') as f:
|
||||
image = f.read()
|
||||
@@ -1887,7 +1686,8 @@ class IFTTT(Notifier):
|
||||
'value': self.config['key'],
|
||||
'name': 'ifttt_key',
|
||||
'description': 'Your IFTTT webhook key. You can get a key from'
|
||||
' <a href="' + helpers.anon_url('https://ifttt.com/maker_webhooks') + '" target="_blank">here</a>.',
|
||||
' <a href="' + helpers.anon_url(
|
||||
'https://ifttt.com/maker_webhooks') + '" target="_blank">here</a>.',
|
||||
'input_type': 'text'
|
||||
},
|
||||
{'label': 'IFTTT Event',
|
||||
@@ -1966,10 +1766,13 @@ class JOIN(Notifier):
|
||||
return True
|
||||
else:
|
||||
error_msg = response_data.get('errorMessage')
|
||||
logger.error("Tautulli Notifiers :: {name} notification failed: {msg}".format(name=self.NAME, msg=error_msg))
|
||||
logger.error(
|
||||
"Tautulli Notifiers :: {name} notification failed: {msg}".format(name=self.NAME, msg=error_msg))
|
||||
return False
|
||||
else:
|
||||
logger.error("Tautulli Notifiers :: {name} notification failed: [{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
|
||||
logger.error(
|
||||
"Tautulli Notifiers :: {name} notification failed: [{r.status_code}] {r.reason}".format(name=self.NAME,
|
||||
r=r))
|
||||
logger.debug("Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
|
||||
return False
|
||||
|
||||
@@ -1990,14 +1793,19 @@ class JOIN(Notifier):
|
||||
devices.update({d['deviceName']: d['deviceName'] for d in response_devices})
|
||||
else:
|
||||
error_msg = response_data.get('errorMessage')
|
||||
logger.error("Tautulli Notifiers :: Unable to retrieve {name} devices list: {msg}".format(name=self.NAME, msg=error_msg))
|
||||
logger.error(
|
||||
"Tautulli Notifiers :: Unable to retrieve {name} devices list: {msg}".format(name=self.NAME,
|
||||
msg=error_msg))
|
||||
|
||||
else:
|
||||
logger.error("Tautulli Notifiers :: Unable to retrieve {name} devices list: [{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
|
||||
logger.error(
|
||||
"Tautulli Notifiers :: Unable to retrieve {name} devices list: [{r.status_code}] {r.reason}".format(
|
||||
name=self.NAME, r=r))
|
||||
logger.debug("Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Tautulli Notifiers :: Unable to retrieve {name} devices list: {msg}".format(name=self.NAME, msg=e))
|
||||
logger.error(
|
||||
"Tautulli Notifiers :: Unable to retrieve {name} devices list: {msg}".format(name=self.NAME, msg=e))
|
||||
|
||||
return devices
|
||||
|
||||
@@ -2097,7 +1905,8 @@ class MQTT(Notifier):
|
||||
if self.config['password']:
|
||||
auth['password'] = self.config['password']
|
||||
|
||||
single(self.config['topic'], payload=json.dumps(data), qos=self.config['qos'], retain=bool(self.config['retain']),
|
||||
single(self.config['topic'], payload=json.dumps(data), qos=self.config['qos'],
|
||||
retain=bool(self.config['retain']),
|
||||
hostname=self.config['broker'], port=self.config['port'], client_id=self.config['clientid'],
|
||||
keepalive=self.config['keep_alive'], auth=auth or None, protocol=self.config['protocol'])
|
||||
|
||||
@@ -2207,6 +2016,7 @@ class OSX(Notifier):
|
||||
|
||||
def wrapper(self, *args, **kwargs):
|
||||
return func(self, old_IMP, *args, **kwargs)
|
||||
|
||||
new_IMP = self.objc.selector(wrapper, selector=old_IMP.selector,
|
||||
signature=old_IMP.signature)
|
||||
self.objc.classAddMethod(cls, SEL, new_IMP)
|
||||
@@ -2222,8 +2032,8 @@ class OSX(Notifier):
|
||||
|
||||
try:
|
||||
self._swizzle(self.objc.lookUpClass('NSBundle'),
|
||||
b'bundleIdentifier',
|
||||
self._swizzled_bundleIdentifier)
|
||||
b'bundleIdentifier',
|
||||
self._swizzled_bundleIdentifier)
|
||||
|
||||
NSUserNotification = self.objc.lookUpClass('NSUserNotification')
|
||||
NSUserNotificationCenter = self.objc.lookUpClass('NSUserNotificationCenter')
|
||||
@@ -2325,12 +2135,14 @@ class PLEX(Notifier):
|
||||
if self.config['image']:
|
||||
image = self.config['image']
|
||||
else:
|
||||
image = os.path.join(plexpy.DATA_DIR, os.path.abspath("data/interfaces/default/images/logo-circle.png"))
|
||||
image = os.path.join(jellypy.DATA_DIR, os.path.abspath("data/interfaces/default/images/logo-circle.png"))
|
||||
|
||||
for host in hosts:
|
||||
logger.info("Tautulli Notifiers :: Sending notification command to {name} @ {host}".format(name=self.NAME, host=host))
|
||||
logger.info("Tautulli Notifiers :: Sending notification command to {name} @ {host}".format(name=self.NAME,
|
||||
host=host))
|
||||
try:
|
||||
version = self._sendjson(host, 'Application.GetProperties', {'properties': ['version']})['version']['major']
|
||||
version = self._sendjson(host, 'Application.GetProperties', {'properties': ['version']})['version'][
|
||||
'major']
|
||||
|
||||
if version < 12: # Eden
|
||||
notification = subject + "," + body + "," + str(display_time)
|
||||
@@ -2415,7 +2227,7 @@ class PLEXMOBILEAPP(Notifier):
|
||||
if action == 'test':
|
||||
tests = []
|
||||
for configuration in self.configurations:
|
||||
tests.append(self.agent_notify(subject=subject, body=body, action='test_'+configuration))
|
||||
tests.append(self.agent_notify(subject=subject, body=body, action='test_' + configuration))
|
||||
return all(tests)
|
||||
|
||||
configuration_action = action.split('test_')[-1]
|
||||
@@ -2430,8 +2242,8 @@ class PLEXMOBILEAPP(Notifier):
|
||||
'to': self.config['user_ids'],
|
||||
'data': {
|
||||
'provider': {
|
||||
'identifier': plexpy.CONFIG.PMS_IDENTIFIER,
|
||||
'title': plexpy.CONFIG.PMS_NAME
|
||||
'identifier': jellypy.CONFIG.PMS_IDENTIFIER,
|
||||
'title': jellypy.CONFIG.PMS_NAME
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2536,11 +2348,11 @@ class PLEXMOBILEAPP(Notifier):
|
||||
|
||||
data['metadata'] = metadata
|
||||
data['uri'] = 'server://{}/com.plexapp.plugins.library/library/metadata/{}'.format(
|
||||
plexpy.CONFIG.PMS_IDENTIFIER, uri_rating_key or pretty_metadata.parameters['rating_key']
|
||||
jellypy.CONFIG.PMS_IDENTIFIER, uri_rating_key or pretty_metadata.parameters['rating_key']
|
||||
)
|
||||
data['play'] = self.config['tap_action'] == 'play'
|
||||
|
||||
headers = {'X-Plex-Token': plexpy.CONFIG.PMS_TOKEN}
|
||||
headers = {'X-Plex-Token': jellypy.CONFIG.PMS_TOKEN}
|
||||
|
||||
return self.make_request(self.NOTIFICATION_URL, headers=headers, json=data)
|
||||
|
||||
@@ -2694,7 +2506,8 @@ class PUSHBULLET(Notifier):
|
||||
logger.debug("Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Tautulli Notifiers :: Unable to retrieve {name} devices list: {msg}".format(name=self.NAME, msg=e))
|
||||
logger.error(
|
||||
"Tautulli Notifiers :: Unable to retrieve {name} devices list: {msg}".format(name=self.NAME, msg=e))
|
||||
|
||||
return devices
|
||||
|
||||
@@ -2977,7 +2790,7 @@ class SCRIPTS(Notifier):
|
||||
'.php': 'php',
|
||||
'.pl': 'perl',
|
||||
'.ps1': 'powershell -executionPolicy bypass -file',
|
||||
'.py': 'python' if plexpy.FROZEN else sys.executable,
|
||||
'.py': 'python' if jellypy.FROZEN else sys.executable,
|
||||
'.pyw': 'pythonw',
|
||||
'.rb': 'ruby',
|
||||
'.sh': ''
|
||||
@@ -3013,24 +2826,24 @@ class SCRIPTS(Notifier):
|
||||
def run_script(self, script, user_id):
|
||||
# Common environment variables
|
||||
custom_env = {
|
||||
'PLEX_URL': plexpy.CONFIG.PMS_URL,
|
||||
'PLEX_TOKEN': plexpy.CONFIG.PMS_TOKEN,
|
||||
'PLEX_URL': jellypy.CONFIG.PMS_URL,
|
||||
'PLEX_TOKEN': jellypy.CONFIG.PMS_TOKEN,
|
||||
'PLEX_USER_TOKEN': '',
|
||||
'TAUTULLI_URL': helpers.get_plexpy_url(hostname='localhost'),
|
||||
'TAUTULLI_PUBLIC_URL': plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT,
|
||||
'TAUTULLI_APIKEY': plexpy.CONFIG.API_KEY,
|
||||
'TAUTULLI_ENCODING': plexpy.SYS_ENCODING,
|
||||
'TAUTULLI_PUBLIC_URL': jellypy.CONFIG.HTTP_BASE_URL + jellypy.HTTP_ROOT,
|
||||
'TAUTULLI_APIKEY': jellypy.CONFIG.API_KEY,
|
||||
'TAUTULLI_ENCODING': jellypy.SYS_ENCODING,
|
||||
'TAUTULLI_PYTHON_VERSION': common.PYTHON_VERSION
|
||||
}
|
||||
}
|
||||
|
||||
if user_id:
|
||||
user_tokens = users.Users().get_tokens(user_id=user_id)
|
||||
custom_env['PLEX_USER_TOKEN'] = str(user_tokens['server_token'])
|
||||
|
||||
if self.pythonpath and plexpy.INSTALL_TYPE not in ('windows', 'macos'):
|
||||
if self.pythonpath and jellypy.INSTALL_TYPE not in ('windows', 'macos'):
|
||||
custom_env['PYTHONPATH'] = os.pathsep.join([p for p in sys.path if p])
|
||||
|
||||
if plexpy.PYTHON2:
|
||||
if jellypy.PYTHON2:
|
||||
custom_env = {k.encode('utf-8'): v.encode('utf-8') for k, v in custom_env.items()}
|
||||
|
||||
env = os.environ.copy()
|
||||
@@ -3137,8 +2950,8 @@ class SCRIPTS(Notifier):
|
||||
|
||||
script.extend(script_args)
|
||||
|
||||
if plexpy.PYTHON2:
|
||||
script = [s.encode(plexpy.SYS_ENCODING, 'ignore') for s in script]
|
||||
if jellypy.PYTHON2:
|
||||
script = [s.encode(jellypy.SYS_ENCODING, 'ignore') for s in script]
|
||||
|
||||
logger.debug("Tautulli Notifiers :: Full script is: %s" % script)
|
||||
logger.debug("Tautulli Notifiers :: Executing script in a new thread.")
|
||||
@@ -3149,7 +2962,7 @@ class SCRIPTS(Notifier):
|
||||
def _return_config_options(self):
|
||||
config_option = [{'label': 'Supported File Types',
|
||||
'description': '<span class="inline-pre">' + \
|
||||
', '.join(self.script_exts) + '</span>',
|
||||
', '.join(self.script_exts) + '</span>',
|
||||
'input_type': 'help'
|
||||
},
|
||||
{'label': 'Script Folder',
|
||||
@@ -3523,7 +3336,7 @@ class TWITTER(Notifier):
|
||||
poster_url = ''
|
||||
if self.config['incl_poster'] and kwargs.get('parameters'):
|
||||
parameters = kwargs['parameters']
|
||||
poster_url = parameters.get('poster_url','')
|
||||
poster_url = parameters.get('poster_url', '')
|
||||
|
||||
# Hack to add media type to attachment
|
||||
if poster_url and not helpers.get_img_service():
|
||||
@@ -3688,12 +3501,13 @@ class XBMC(Notifier):
|
||||
if self.config['image']:
|
||||
image = self.config['image']
|
||||
else:
|
||||
image = os.path.join(plexpy.DATA_DIR, os.path.abspath("data/interfaces/default/images/logo-circle.png"))
|
||||
image = os.path.join(jellypy.DATA_DIR, os.path.abspath("data/interfaces/default/images/logo-circle.png"))
|
||||
|
||||
for host in hosts:
|
||||
logger.info("Tautulli Notifiers :: Sending notification command to XMBC @ " + host)
|
||||
try:
|
||||
version = self._sendjson(host, 'Application.GetProperties', {'properties': ['version']})['version']['major']
|
||||
version = self._sendjson(host, 'Application.GetProperties', {'properties': ['version']})['version'][
|
||||
'major']
|
||||
|
||||
if version < 12: # Eden
|
||||
notification = subject + "," + body + "," + str(display_time)
|
||||
22
jellypy/password.py
Normal file
22
jellypy/password.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import binascii
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
|
||||
def make_hash(password):
|
||||
salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')
|
||||
pwdhash = hashlib.pbkdf2_hmac('sha512', password.encode('utf-8'),
|
||||
salt, 100000)
|
||||
pwdhash = binascii.hexlify(pwdhash)
|
||||
return (salt + pwdhash).decode('ascii')
|
||||
|
||||
|
||||
def check_hash(password, stored_pw):
|
||||
salt = stored_pw[:64]
|
||||
stored_password = stored_pw[64:]
|
||||
pwdhash = hashlib.pbkdf2_hmac('sha512',
|
||||
password.encode('utf-8'),
|
||||
salt.encode('ascii'),
|
||||
100000)
|
||||
pwdhash = binascii.hexlify(pwdhash).decode('ascii')
|
||||
return pwdhash == stored_password
|
||||
@@ -15,24 +15,16 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
import collections
|
||||
from xml.dom import minidom
|
||||
|
||||
import collections
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from requests.packages import urllib3
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import lock
|
||||
import logger
|
||||
else:
|
||||
from plexpy import lock
|
||||
from plexpy import logger
|
||||
|
||||
import jellypy
|
||||
from jellypy import lock
|
||||
from jellypy import logger
|
||||
|
||||
# Dictionary with last request times, for rate limiting.
|
||||
last_requests = collections.defaultdict(int)
|
||||
@@ -59,7 +51,7 @@ def request_response(url, method="get", auto_raise=True,
|
||||
|
||||
# Disable verification of SSL certificates if requested. Note: this could
|
||||
# pose a security issue!
|
||||
kwargs["verify"] = bool(plexpy.CONFIG.VERIFY_SSL_CERT)
|
||||
kwargs["verify"] = bool(jellypy.CONFIG.VERIFY_SSL_CERT)
|
||||
if not kwargs['verify']:
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
@@ -123,7 +115,7 @@ def request_response(url, method="get", auto_raise=True,
|
||||
e.response.status_code, cause)
|
||||
|
||||
# Debug response
|
||||
if plexpy.VERBOSE:
|
||||
if jellypy.VERBOSE:
|
||||
server_message(e.response)
|
||||
else:
|
||||
logger.error("Request raised HTTP error.")
|
||||
@@ -151,7 +143,7 @@ def request_response2(url, method="get", auto_raise=True,
|
||||
|
||||
# Disable verification of SSL certificates if requested. Note: this could
|
||||
# pose a security issue!
|
||||
kwargs['verify'] = bool(plexpy.CONFIG.VERIFY_SSL_CERT)
|
||||
kwargs['verify'] = bool(jellypy.CONFIG.VERIFY_SSL_CERT)
|
||||
if not kwargs['verify']:
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
@@ -182,7 +174,7 @@ def request_response2(url, method="get", auto_raise=True,
|
||||
err_msg = "Unable to connect to remote host because of a SSL error."
|
||||
else:
|
||||
err_msg = "Unable to connect to remote host because of a SSL error, " \
|
||||
"with certificate verification turned off: {}".format(e)
|
||||
"with certificate verification turned off: {}".format(e)
|
||||
|
||||
except requests.ConnectionError:
|
||||
err_msg = "Unable to connect to remote host. Check if the remote host is up and running."
|
||||
@@ -203,7 +195,7 @@ def request_response2(url, method="get", auto_raise=True,
|
||||
|
||||
err_msg = "Request raised a HTTP error: {}".format(http_err)
|
||||
|
||||
if plexpy.VERBOSE:
|
||||
if jellypy.VERBOSE:
|
||||
req_msg = server_message(e.response, return_msg=True)
|
||||
|
||||
else:
|
||||
@@ -264,7 +256,7 @@ def request_json(url, **kwargs):
|
||||
logger.error("Response returned invalid JSON data")
|
||||
|
||||
# Debug response
|
||||
if plexpy.VERBOSE:
|
||||
if jellypy.VERBOSE:
|
||||
server_message(response)
|
||||
|
||||
|
||||
@@ -14,19 +14,11 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
|
||||
import cherrypy
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import common
|
||||
import users
|
||||
else:
|
||||
from plexpy import common
|
||||
from plexpy import users
|
||||
import jellypy
|
||||
from jellypy import common
|
||||
from jellypy import users
|
||||
|
||||
|
||||
def get_session_info():
|
||||
@@ -43,6 +35,7 @@ def get_session_info():
|
||||
|
||||
return _session
|
||||
|
||||
|
||||
def get_session_user():
|
||||
"""
|
||||
Returns the user_id for the current logged in session
|
||||
@@ -50,6 +43,7 @@ def get_session_user():
|
||||
_session = get_session_info()
|
||||
return _session['user'] if _session['user_group'] == 'guest' and _session['user'] else None
|
||||
|
||||
|
||||
def get_session_user_id():
|
||||
"""
|
||||
Returns the user_id for the current logged in session
|
||||
@@ -68,7 +62,7 @@ def get_session_user_token():
|
||||
session_user_tokens = users.Users().get_tokens(_session['user_id'])
|
||||
user_token = session_user_tokens['server_token']
|
||||
else:
|
||||
user_token = plexpy.CONFIG.PMS_TOKEN
|
||||
user_token = jellypy.CONFIG.PMS_TOKEN
|
||||
|
||||
return user_token
|
||||
|
||||
@@ -80,6 +74,7 @@ def get_session_shared_libraries():
|
||||
user_details = users.Users().get_details(user_id=get_session_user_id())
|
||||
return tuple(str(s) for s in user_details['shared_libraries'])
|
||||
|
||||
|
||||
def get_session_library_filters():
|
||||
"""
|
||||
Returns a dict of library filters for the current logged in session
|
||||
@@ -91,6 +86,7 @@ def get_session_library_filters():
|
||||
filters = users.Users().get_filters(user_id=get_session_user_id())
|
||||
return filters
|
||||
|
||||
|
||||
def get_session_library_filters_type(filters, media_type=None):
|
||||
"""
|
||||
Returns a dict of library filters for the current logged in session
|
||||
@@ -115,6 +111,7 @@ def get_session_library_filters_type(filters, media_type=None):
|
||||
|
||||
return content_rating, tuple(f.lower() for f in labels)
|
||||
|
||||
|
||||
def allow_session_user(user_id):
|
||||
"""
|
||||
Returns True or False if the user_id is allowed for the current logged in session
|
||||
@@ -124,6 +121,7 @@ def allow_session_user(user_id):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def allow_session_library(section_id):
|
||||
"""
|
||||
Returns True or False if the section_id is allowed for the current logged in session
|
||||
@@ -133,13 +131,14 @@ def allow_session_library(section_id):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def friendly_name_to_username(list_of_dicts):
|
||||
"""
|
||||
Reverts the friendly name back to the username of the current logged in session
|
||||
"""
|
||||
session_user = get_session_user()
|
||||
session_user_id = get_session_user_id()
|
||||
|
||||
|
||||
if session_user_id:
|
||||
for d in list_of_dicts:
|
||||
if 'friendly_name' in d and d['friendly_name'] != session_user:
|
||||
@@ -147,12 +146,13 @@ def friendly_name_to_username(list_of_dicts):
|
||||
|
||||
return list_of_dicts
|
||||
|
||||
|
||||
def filter_session_info(list_of_dicts, filter_key=None):
|
||||
"""
|
||||
Filters a list of dictionary items to only return the info for the current logged in session
|
||||
"""
|
||||
session_user_id = get_session_user_id()
|
||||
|
||||
|
||||
if not session_user_id:
|
||||
return list_of_dicts
|
||||
|
||||
@@ -162,13 +162,13 @@ def filter_session_info(list_of_dicts, filter_key=None):
|
||||
list_of_dicts = friendly_name_to_username(list_of_dicts)
|
||||
|
||||
if filter_key == 'user_id' and session_user_id:
|
||||
return [d for d in list_of_dicts if str(d.get('user_id','')) == session_user_id]
|
||||
return [d for d in list_of_dicts if str(d.get('user_id', '')) == session_user_id]
|
||||
|
||||
elif filter_key == 'section_id' and session_library_ids:
|
||||
new_list_of_dicts = []
|
||||
|
||||
for d in list_of_dicts:
|
||||
if str(d.get('section_id','')) not in session_library_ids:
|
||||
if str(d.get('section_id', '')) not in session_library_ids:
|
||||
continue
|
||||
|
||||
if d.get('media_type'):
|
||||
@@ -198,6 +198,7 @@ def filter_session_info(list_of_dicts, filter_key=None):
|
||||
|
||||
return list_of_dicts
|
||||
|
||||
|
||||
def mask_session_info(list_of_dicts, mask_metadata=True):
|
||||
"""
|
||||
Masks user info in a list of dictionary items to only display info for the current logged in session
|
||||
@@ -213,6 +214,7 @@ def mask_session_info(list_of_dicts, mask_metadata=True):
|
||||
|
||||
keys_to_mask = {'user_id': '',
|
||||
'user': 'Plex User',
|
||||
'username': 'Plex User',
|
||||
'friendly_name': 'Plex User',
|
||||
'user_thumb': common.DEFAULT_USER_THUMB,
|
||||
'ip_address': 'N/A',
|
||||
@@ -248,7 +250,7 @@ def mask_session_info(list_of_dicts, mask_metadata=True):
|
||||
if not mask_metadata:
|
||||
continue
|
||||
|
||||
if str(d.get('section_id','')) not in session_library_ids:
|
||||
if str(d.get('section_id', '')) not in session_library_ids:
|
||||
for k, v in metadata_to_mask.items():
|
||||
if k in d: d[k] = metadata_to_mask[k]
|
||||
continue
|
||||
@@ -256,7 +258,7 @@ def mask_session_info(list_of_dicts, mask_metadata=True):
|
||||
media_type = d.get('media_type')
|
||||
if media_type:
|
||||
f_content_rating, f_labels = get_session_library_filters_type(session_library_filters,
|
||||
media_type=d['media_type'])
|
||||
media_type=d['media_type'])
|
||||
|
||||
d_content_rating = d.get('content_rating', '')
|
||||
d_labels = tuple(f.lower() for f in d.get('labels', ()))
|
||||
@@ -276,4 +278,4 @@ def mask_session_info(list_of_dicts, mask_metadata=True):
|
||||
for k, v in metadata_to_mask.items():
|
||||
if k in d: d[k] = metadata_to_mask[k]
|
||||
|
||||
return list_of_dicts
|
||||
return list_of_dicts
|
||||
@@ -14,41 +14,25 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import next
|
||||
from future.builtins import str
|
||||
from future.builtins import object
|
||||
from future.moves.urllib.parse import parse_qsl
|
||||
from urllib.parse import parse_qsl
|
||||
|
||||
import httpagentparser
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import common
|
||||
import database
|
||||
import datatables
|
||||
import helpers
|
||||
import libraries
|
||||
import logger
|
||||
import plextv
|
||||
import session
|
||||
else:
|
||||
from plexpy import common
|
||||
from plexpy import database
|
||||
from plexpy import datatables
|
||||
from plexpy import helpers
|
||||
from plexpy import libraries
|
||||
from plexpy import logger
|
||||
from plexpy import plextv
|
||||
from plexpy import session
|
||||
import jellypy
|
||||
from jellypy import common
|
||||
from jellypy import database
|
||||
from jellypy import datatables
|
||||
from jellypy import helpers
|
||||
from jellypy import libraries
|
||||
from jellypy import logger
|
||||
from jellypy import session
|
||||
|
||||
|
||||
def refresh_users():
|
||||
logger.info("Tautulli Users :: Requesting users list refresh...")
|
||||
result = plextv.PlexTV().get_full_users_list()
|
||||
|
||||
server_id = plexpy.CONFIG.PMS_IDENTIFIER
|
||||
server_id = jellypy.CONFIG.PMS_IDENTIFIER
|
||||
if not server_id:
|
||||
logger.error("Tautulli Users :: No PMS identifier, cannot refresh users. Verify server in settings.")
|
||||
return
|
||||
@@ -111,7 +95,7 @@ class Users(object):
|
||||
custom_where = [['users.deleted_user', 0]]
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
if session.get_session_user_id():
|
||||
custom_where.append(['users.user_id', session.get_session_user_id()])
|
||||
@@ -486,7 +470,7 @@ class Users(object):
|
||||
return []
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
if query_days and query_days is not None:
|
||||
query_days = map(helpers.cast_to_int, query_days.split(','))
|
||||
@@ -548,7 +532,7 @@ class Users(object):
|
||||
return []
|
||||
|
||||
if grouping is None:
|
||||
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
|
||||
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
@@ -847,7 +831,8 @@ class Users(object):
|
||||
|
||||
return filters_list
|
||||
|
||||
def set_user_login(self, user_id=None, user=None, user_group=None, ip_address=None, host=None, user_agent=None, success=0):
|
||||
def set_user_login(self, user_id=None, user=None, user_group=None, ip_address=None, host=None, user_agent=None,
|
||||
success=0):
|
||||
|
||||
if user_id is None or str(user_id).isdigit():
|
||||
monitor_db = database.MonitorDatabase()
|
||||
@@ -947,4 +932,4 @@ class Users(object):
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Users :: Unable to execute database query for delete_login_log: %s." % e)
|
||||
return False
|
||||
return False
|
||||
@@ -15,7 +15,5 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
PLEXPY_BRANCH = "master"
|
||||
PLEXPY_RELEASE_VERSION = "v2.6.1"
|
||||
JELLYPY_BRANCH = "master"
|
||||
JELLYPY_VERSION = "1.0.0-alpha"
|
||||
@@ -15,11 +15,6 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import next
|
||||
from future.builtins import str
|
||||
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
@@ -27,23 +22,16 @@ import re
|
||||
import subprocess
|
||||
import tarfile
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import common
|
||||
import helpers
|
||||
import logger
|
||||
import request
|
||||
else:
|
||||
from plexpy import common
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
from plexpy import request
|
||||
import jellypy
|
||||
from jellypy import common
|
||||
from jellypy import helpers
|
||||
from jellypy import logger
|
||||
from jellypy import request
|
||||
|
||||
|
||||
def runGit(args):
|
||||
|
||||
if plexpy.CONFIG.GIT_PATH:
|
||||
git_locations = ['"' + plexpy.CONFIG.GIT_PATH + '"']
|
||||
if jellypy.CONFIG.GIT_PATH:
|
||||
git_locations = ['"' + jellypy.CONFIG.GIT_PATH + '"']
|
||||
else:
|
||||
git_locations = ['git']
|
||||
|
||||
@@ -56,8 +44,9 @@ def runGit(args):
|
||||
cmd = cur_git + ' ' + args
|
||||
|
||||
try:
|
||||
logger.debug('Trying to execute: "' + cmd + '" with shell in ' + plexpy.PROG_DIR)
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=plexpy.PROG_DIR)
|
||||
logger.debug('Trying to execute: "' + cmd + '" with shell in ' + jellypy.PROG_DIR)
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True,
|
||||
cwd=jellypy.PROG_DIR)
|
||||
output, err = p.communicate()
|
||||
output = output.strip().decode()
|
||||
|
||||
@@ -79,19 +68,18 @@ def runGit(args):
|
||||
|
||||
|
||||
def get_version():
|
||||
|
||||
if plexpy.FROZEN and common.PLATFORM == 'Windows':
|
||||
plexpy.INSTALL_TYPE = 'windows'
|
||||
if jellypy.FROZEN and common.PLATFORM == 'Windows':
|
||||
jellypy.INSTALL_TYPE = 'windows'
|
||||
current_version, current_branch = get_version_from_file()
|
||||
return current_version, 'origin', current_branch
|
||||
|
||||
elif plexpy.FROZEN and common.PLATFORM == 'Darwin':
|
||||
plexpy.INSTALL_TYPE = 'macos'
|
||||
elif jellypy.FROZEN and common.PLATFORM == 'Darwin':
|
||||
jellypy.INSTALL_TYPE = 'macos'
|
||||
current_version, current_branch = get_version_from_file()
|
||||
return current_version, 'origin', current_branch
|
||||
|
||||
elif os.path.isdir(os.path.join(plexpy.PROG_DIR, '.git')):
|
||||
plexpy.INSTALL_TYPE = 'git'
|
||||
elif os.path.isdir(os.path.join(jellypy.PROG_DIR, '.git')):
|
||||
jellypy.INSTALL_TYPE = 'git'
|
||||
output, err = runGit('rev-parse HEAD')
|
||||
|
||||
if not output:
|
||||
@@ -104,9 +92,9 @@ def get_version():
|
||||
logger.error('Output does not look like a hash, not using it.')
|
||||
cur_commit_hash = None
|
||||
|
||||
if plexpy.CONFIG.DO_NOT_OVERRIDE_GIT_BRANCH and plexpy.CONFIG.GIT_BRANCH:
|
||||
if jellypy.CONFIG.DO_NOT_OVERRIDE_GIT_BRANCH and jellypy.CONFIG.GIT_BRANCH:
|
||||
remote_name = None
|
||||
branch_name = plexpy.CONFIG.GIT_BRANCH
|
||||
branch_name = jellypy.CONFIG.GIT_BRANCH
|
||||
|
||||
else:
|
||||
remote_branch, err = runGit('rev-parse --abbrev-ref --symbolic-full-name @{u}')
|
||||
@@ -116,16 +104,16 @@ def get_version():
|
||||
else:
|
||||
remote_name = branch_name = None
|
||||
|
||||
if not remote_name and plexpy.CONFIG.GIT_REMOTE:
|
||||
logger.error('Could not retrieve remote name from git. Falling back to %s.' % plexpy.CONFIG.GIT_REMOTE)
|
||||
remote_name = plexpy.CONFIG.GIT_REMOTE
|
||||
if not remote_name and jellypy.CONFIG.GIT_REMOTE:
|
||||
logger.error('Could not retrieve remote name from git. Falling back to %s.' % jellypy.CONFIG.GIT_REMOTE)
|
||||
remote_name = jellypy.CONFIG.GIT_REMOTE
|
||||
if not remote_name:
|
||||
logger.error('Could not retrieve remote name from git. Defaulting to origin.')
|
||||
branch_name = 'origin'
|
||||
|
||||
if not branch_name and plexpy.CONFIG.GIT_BRANCH:
|
||||
logger.error('Could not retrieve branch name from git. Falling back to %s.' % plexpy.CONFIG.GIT_BRANCH)
|
||||
branch_name = plexpy.CONFIG.GIT_BRANCH
|
||||
if not branch_name and jellypy.CONFIG.GIT_BRANCH:
|
||||
logger.error('Could not retrieve branch name from git. Falling back to %s.' % jellypy.CONFIG.GIT_BRANCH)
|
||||
branch_name = jellypy.CONFIG.GIT_BRANCH
|
||||
if not branch_name:
|
||||
logger.error('Could not retrieve branch name from git. Defaulting to master.')
|
||||
branch_name = 'master'
|
||||
@@ -133,14 +121,20 @@ def get_version():
|
||||
return cur_commit_hash, remote_name, branch_name
|
||||
|
||||
else:
|
||||
plexpy.INSTALL_TYPE = 'docker' if plexpy.DOCKER else 'source'
|
||||
if jellypy.DOCKER:
|
||||
jellypy.INSTALL_TYPE = 'docker'
|
||||
elif jellypy.SNAP:
|
||||
jellypy.INSTALL_TYPE = 'snap'
|
||||
else:
|
||||
jellypy.INSTALL_TYPE = 'source'
|
||||
|
||||
current_version, current_branch = get_version_from_file()
|
||||
return current_version, 'origin', current_branch
|
||||
|
||||
|
||||
def get_version_from_file():
|
||||
version_file = os.path.join(plexpy.PROG_DIR, 'version.txt')
|
||||
branch_file = os.path.join(plexpy.PROG_DIR, 'branch.txt')
|
||||
version_file = os.path.join(jellypy.PROG_DIR, 'version.txt')
|
||||
branch_file = os.path.join(jellypy.PROG_DIR, 'branch.txt')
|
||||
|
||||
if os.path.isfile(version_file):
|
||||
with open(version_file, 'r') as f:
|
||||
@@ -160,27 +154,30 @@ def get_version_from_file():
|
||||
def check_update(scheduler=False, notify=False, use_cache=False):
|
||||
check_github(scheduler=scheduler, notify=notify, use_cache=use_cache)
|
||||
|
||||
if not plexpy.CURRENT_VERSION:
|
||||
plexpy.UPDATE_AVAILABLE = None
|
||||
elif plexpy.COMMITS_BEHIND > 0 and (plexpy.common.BRANCH in ('master', 'beta') or plexpy.FROZEN) and \
|
||||
plexpy.common.RELEASE != plexpy.LATEST_RELEASE:
|
||||
plexpy.UPDATE_AVAILABLE = 'release'
|
||||
elif plexpy.COMMITS_BEHIND > 0 and plexpy.CURRENT_VERSION != plexpy.LATEST_VERSION and not plexpy.FROZEN:
|
||||
plexpy.UPDATE_AVAILABLE = 'commit'
|
||||
if not jellypy.CURRENT_VERSION:
|
||||
jellypy.UPDATE_AVAILABLE = None
|
||||
elif jellypy.COMMITS_BEHIND > 0 and \
|
||||
(jellypy.common.BRANCH in ('master', 'beta') or jellypy.SNAP or jellypy.FROZEN) and \
|
||||
jellypy.common.RELEASE != jellypy.LATEST_RELEASE:
|
||||
jellypy.UPDATE_AVAILABLE = 'release'
|
||||
elif jellypy.COMMITS_BEHIND > 0 and \
|
||||
not jellypy.SNAP and not jellypy.FROZEN and \
|
||||
jellypy.CURRENT_VERSION != jellypy.LATEST_VERSION:
|
||||
jellypy.UPDATE_AVAILABLE = 'commit'
|
||||
else:
|
||||
plexpy.UPDATE_AVAILABLE = False
|
||||
jellypy.UPDATE_AVAILABLE = False
|
||||
|
||||
if plexpy.WIN_SYS_TRAY_ICON:
|
||||
plexpy.WIN_SYS_TRAY_ICON.change_tray_update_icon()
|
||||
elif plexpy.MAC_SYS_TRAY_ICON:
|
||||
plexpy.MAC_SYS_TRAY_ICON.change_tray_update_icon()
|
||||
if jellypy.WIN_SYS_TRAY_ICON:
|
||||
jellypy.WIN_SYS_TRAY_ICON.change_tray_update_icon()
|
||||
elif jellypy.MAC_SYS_TRAY_ICON:
|
||||
jellypy.MAC_SYS_TRAY_ICON.change_tray_update_icon()
|
||||
|
||||
|
||||
def check_github(scheduler=False, notify=False, use_cache=False):
|
||||
plexpy.COMMITS_BEHIND = 0
|
||||
jellypy.COMMITS_BEHIND = 0
|
||||
|
||||
if plexpy.CONFIG.GIT_TOKEN:
|
||||
headers = {'Authorization': 'token {}'.format(plexpy.CONFIG.GIT_TOKEN)}
|
||||
if jellypy.CONFIG.GIT_TOKEN:
|
||||
headers = {'Authorization': 'token {}'.format(jellypy.CONFIG.GIT_TOKEN)}
|
||||
else:
|
||||
headers = {}
|
||||
|
||||
@@ -188,103 +185,118 @@ def check_github(scheduler=False, notify=False, use_cache=False):
|
||||
if not version:
|
||||
# Get the latest version available from github
|
||||
logger.info('Retrieving latest version information from GitHub')
|
||||
url = 'https://api.github.com/repos/%s/%s/commits/%s' % (plexpy.CONFIG.GIT_USER,
|
||||
plexpy.CONFIG.GIT_REPO,
|
||||
plexpy.CONFIG.GIT_BRANCH)
|
||||
url = 'https://api.github.com/repos/%s/%s/commits/%s' % (jellypy.CONFIG.GIT_USER,
|
||||
jellypy.CONFIG.GIT_REPO,
|
||||
jellypy.CONFIG.GIT_BRANCH)
|
||||
version = request.request_json(url, headers=headers, timeout=20,
|
||||
validator=lambda x: type(x) == dict)
|
||||
github_cache('version', github_data=version)
|
||||
|
||||
if version is None:
|
||||
logger.warn('Could not get the latest version from GitHub. Are you running a local development version?')
|
||||
return plexpy.CURRENT_VERSION
|
||||
return jellypy.CURRENT_VERSION
|
||||
|
||||
plexpy.LATEST_VERSION = version['sha']
|
||||
logger.debug("Latest version is %s", plexpy.LATEST_VERSION)
|
||||
jellypy.LATEST_VERSION = version['sha']
|
||||
logger.debug("Latest version is %s", jellypy.LATEST_VERSION)
|
||||
|
||||
# See how many commits behind we are
|
||||
if not plexpy.CURRENT_VERSION:
|
||||
if not jellypy.CURRENT_VERSION:
|
||||
logger.info('You are running an unknown version of Tautulli. Run the updater to identify your version')
|
||||
return plexpy.LATEST_VERSION
|
||||
return jellypy.LATEST_VERSION
|
||||
|
||||
if plexpy.LATEST_VERSION == plexpy.CURRENT_VERSION:
|
||||
if jellypy.LATEST_VERSION == jellypy.CURRENT_VERSION:
|
||||
logger.info('Tautulli is up to date')
|
||||
return plexpy.LATEST_VERSION
|
||||
return jellypy.LATEST_VERSION
|
||||
|
||||
commits = github_cache('commits', use_cache=use_cache)
|
||||
if not commits:
|
||||
logger.info('Comparing currently installed version with latest GitHub version')
|
||||
url = 'https://api.github.com/repos/%s/%s/compare/%s...%s' % (plexpy.CONFIG.GIT_USER,
|
||||
plexpy.CONFIG.GIT_REPO,
|
||||
plexpy.LATEST_VERSION,
|
||||
plexpy.CURRENT_VERSION)
|
||||
url = 'https://api.github.com/repos/%s/%s/compare/%s...%s' % (jellypy.CONFIG.GIT_USER,
|
||||
jellypy.CONFIG.GIT_REPO,
|
||||
jellypy.LATEST_VERSION,
|
||||
jellypy.CURRENT_VERSION)
|
||||
commits = request.request_json(url, headers=headers, timeout=20, whitelist_status_code=404,
|
||||
validator=lambda x: type(x) == dict)
|
||||
github_cache('commits', github_data=commits)
|
||||
|
||||
if commits is None:
|
||||
logger.warn('Could not get commits behind from GitHub.')
|
||||
return plexpy.LATEST_VERSION
|
||||
return jellypy.LATEST_VERSION
|
||||
|
||||
try:
|
||||
plexpy.COMMITS_BEHIND = int(commits['behind_by'])
|
||||
logger.debug("In total, %d commits behind", plexpy.COMMITS_BEHIND)
|
||||
jellypy.COMMITS_BEHIND = int(commits['behind_by'])
|
||||
logger.debug("In total, %d commits behind", jellypy.COMMITS_BEHIND)
|
||||
except KeyError:
|
||||
logger.info('Cannot compare versions. Are you running a local development version?')
|
||||
plexpy.COMMITS_BEHIND = 0
|
||||
jellypy.COMMITS_BEHIND = 0
|
||||
|
||||
if plexpy.COMMITS_BEHIND > 0:
|
||||
logger.info('New version is available. You are %s commits behind' % plexpy.COMMITS_BEHIND)
|
||||
if jellypy.COMMITS_BEHIND > 0:
|
||||
logger.info('New version is available. You are %s commits behind' % jellypy.COMMITS_BEHIND)
|
||||
|
||||
releases = github_cache('releases', use_cache=use_cache)
|
||||
if not releases:
|
||||
url = 'https://api.github.com/repos/%s/%s/releases' % (plexpy.CONFIG.GIT_USER,
|
||||
plexpy.CONFIG.GIT_REPO)
|
||||
url = 'https://api.github.com/repos/%s/%s/releases' % (jellypy.CONFIG.GIT_USER,
|
||||
jellypy.CONFIG.GIT_REPO)
|
||||
releases = request.request_json(url, timeout=20, whitelist_status_code=404,
|
||||
validator=lambda x: type(x) == list)
|
||||
github_cache('releases', github_data=releases)
|
||||
|
||||
if releases is None:
|
||||
logger.warn('Could not get releases from GitHub.')
|
||||
return plexpy.LATEST_VERSION
|
||||
return jellypy.LATEST_VERSION
|
||||
|
||||
if plexpy.CONFIG.GIT_BRANCH == 'master':
|
||||
if jellypy.CONFIG.GIT_BRANCH == 'master':
|
||||
release = next((r for r in releases if not r['prerelease']), releases[0])
|
||||
elif plexpy.CONFIG.GIT_BRANCH == 'beta':
|
||||
elif jellypy.CONFIG.GIT_BRANCH == 'beta':
|
||||
release = next((r for r in releases if not r['tag_name'].endswith('-nightly')), releases[0])
|
||||
elif plexpy.CONFIG.GIT_BRANCH == 'nightly':
|
||||
elif jellypy.CONFIG.GIT_BRANCH == 'nightly':
|
||||
release = next((r for r in releases), releases[0])
|
||||
else:
|
||||
release = releases[0]
|
||||
|
||||
plexpy.LATEST_RELEASE = release['tag_name']
|
||||
jellypy.LATEST_RELEASE = release['tag_name']
|
||||
|
||||
if notify:
|
||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpyupdate',
|
||||
'plexpy_download_info': release,
|
||||
'plexpy_update_commit': plexpy.LATEST_VERSION,
|
||||
'plexpy_update_behind': plexpy.COMMITS_BEHIND})
|
||||
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpyupdate',
|
||||
'plexpy_download_info': release,
|
||||
'plexpy_update_commit': jellypy.LATEST_VERSION,
|
||||
'plexpy_update_behind': jellypy.COMMITS_BEHIND})
|
||||
|
||||
if scheduler and plexpy.CONFIG.PLEXPY_AUTO_UPDATE and not plexpy.DOCKER and not plexpy.FROZEN:
|
||||
if jellypy.PYTHON2:
|
||||
logger.warn('Tautulli is running using Python 2. Unable to run automatic update.')
|
||||
|
||||
elif scheduler and jellypy.CONFIG.PLEXPY_AUTO_UPDATE and \
|
||||
not jellypy.DOCKER and not jellypy.SNAP and \
|
||||
not (jellypy.FROZEN and common.PLATFORM == 'Darwin'):
|
||||
logger.info('Running automatic update.')
|
||||
plexpy.shutdown(restart=True, update=True)
|
||||
jellypy.shutdown(restart=True, update=True)
|
||||
|
||||
elif plexpy.COMMITS_BEHIND == 0:
|
||||
elif jellypy.COMMITS_BEHIND == 0:
|
||||
logger.info('Tautulli is up to date')
|
||||
|
||||
return plexpy.LATEST_VERSION
|
||||
return jellypy.LATEST_VERSION
|
||||
|
||||
|
||||
def update():
|
||||
if not plexpy.UPDATE_AVAILABLE:
|
||||
if jellypy.PYTHON2:
|
||||
logger.warn('Tautulli is running using Python 2. Unable to update.')
|
||||
return
|
||||
|
||||
if plexpy.INSTALL_TYPE in ('docker', 'windows', 'macos'):
|
||||
if not jellypy.UPDATE_AVAILABLE:
|
||||
return
|
||||
|
||||
elif plexpy.INSTALL_TYPE == 'git':
|
||||
output, err = runGit('pull --ff-only {} {}'.format(plexpy.CONFIG.GIT_REMOTE,
|
||||
plexpy.CONFIG.GIT_BRANCH))
|
||||
if jellypy.INSTALL_TYPE in ('docker', 'snap', 'macos'):
|
||||
return
|
||||
|
||||
elif jellypy.INSTALL_TYPE == 'windows':
|
||||
logger.info('Calling Windows scheduled task to update Tautulli')
|
||||
CREATE_NO_WINDOW = 0x08000000
|
||||
subprocess.Popen(['SCHTASKS', '/Run', '/TN', 'TautulliUpdateTask'],
|
||||
creationflags=CREATE_NO_WINDOW)
|
||||
|
||||
elif jellypy.INSTALL_TYPE == 'git':
|
||||
output, err = runGit('pull --ff-only {} {}'.format(jellypy.CONFIG.GIT_REMOTE,
|
||||
jellypy.CONFIG.GIT_BRANCH))
|
||||
|
||||
if not output:
|
||||
logger.error('Unable to download latest version')
|
||||
@@ -296,12 +308,12 @@ def update():
|
||||
elif line.endswith(('Aborting', 'Aborting.')):
|
||||
logger.error('Unable to update from git: ' + line)
|
||||
|
||||
elif plexpy.INSTALL_TYPE == 'source':
|
||||
tar_download_url = 'https://github.com/{}/{}/tarball/{}'.format(plexpy.CONFIG.GIT_USER,
|
||||
plexpy.CONFIG.GIT_REPO,
|
||||
plexpy.CONFIG.GIT_BRANCH)
|
||||
update_dir = os.path.join(plexpy.DATA_DIR, 'update')
|
||||
version_path = os.path.join(plexpy.PROG_DIR, 'version.txt')
|
||||
elif jellypy.INSTALL_TYPE == 'source':
|
||||
tar_download_url = 'https://github.com/{}/{}/tarball/{}'.format(jellypy.CONFIG.GIT_USER,
|
||||
jellypy.CONFIG.GIT_REPO,
|
||||
jellypy.CONFIG.GIT_BRANCH)
|
||||
update_dir = os.path.join(jellypy.DATA_DIR, 'update')
|
||||
version_path = os.path.join(jellypy.PROG_DIR, 'version.txt')
|
||||
|
||||
logger.info('Downloading update from: ' + tar_download_url)
|
||||
data = request.request_content(tar_download_url)
|
||||
@@ -310,8 +322,8 @@ def update():
|
||||
logger.error("Unable to retrieve new version from '%s', can't update", tar_download_url)
|
||||
return
|
||||
|
||||
download_name = plexpy.CONFIG.GIT_BRANCH + '-github'
|
||||
tar_download_path = os.path.join(plexpy.DATA_DIR, download_name)
|
||||
download_name = jellypy.CONFIG.GIT_BRANCH + '-github'
|
||||
tar_download_path = os.path.join(jellypy.DATA_DIR, download_name)
|
||||
|
||||
# Save tar to disk
|
||||
with open(tar_download_path, 'wb') as f:
|
||||
@@ -339,7 +351,7 @@ def update():
|
||||
dirname = dirname[len(content_dir) + 1:]
|
||||
for curfile in filenames:
|
||||
old_path = os.path.join(content_dir, dirname, curfile)
|
||||
new_path = os.path.join(plexpy.PROG_DIR, dirname, curfile)
|
||||
new_path = os.path.join(jellypy.PROG_DIR, dirname, curfile)
|
||||
|
||||
if os.path.isfile(new_path):
|
||||
os.remove(new_path)
|
||||
@@ -348,7 +360,7 @@ def update():
|
||||
# Update version.txt
|
||||
try:
|
||||
with open(version_path, 'w') as f:
|
||||
f.write(str(plexpy.LATEST_VERSION))
|
||||
f.write(str(jellypy.LATEST_VERSION))
|
||||
except IOError as e:
|
||||
logger.error(
|
||||
"Unable to write current version to version.txt, update not complete: %s",
|
||||
@@ -358,18 +370,18 @@ def update():
|
||||
|
||||
|
||||
def reset_git_install():
|
||||
if plexpy.INSTALL_TYPE == 'git':
|
||||
logger.info('Attempting to reset git install to "{}/{}/{}"'.format(plexpy.CONFIG.GIT_REMOTE,
|
||||
plexpy.CONFIG.GIT_BRANCH,
|
||||
if jellypy.INSTALL_TYPE == 'git':
|
||||
logger.info('Attempting to reset git install to "{}/{}/{}"'.format(jellypy.CONFIG.GIT_REMOTE,
|
||||
jellypy.CONFIG.GIT_BRANCH,
|
||||
common.RELEASE))
|
||||
|
||||
output, err = runGit('remote set-url {} https://github.com/{}/{}.git'.format(plexpy.CONFIG.GIT_REMOTE,
|
||||
plexpy.CONFIG.GIT_USER,
|
||||
plexpy.CONFIG.GIT_REPO))
|
||||
output, err = runGit('fetch {}'.format(plexpy.CONFIG.GIT_REMOTE))
|
||||
output, err = runGit('checkout {}'.format(plexpy.CONFIG.GIT_BRANCH))
|
||||
output, err = runGit('branch -u {}/{}'.format(plexpy.CONFIG.GIT_REMOTE,
|
||||
plexpy.CONFIG.GIT_BRANCH))
|
||||
output, err = runGit('remote set-url {} https://github.com/{}/{}.git'.format(jellypy.CONFIG.GIT_REMOTE,
|
||||
jellypy.CONFIG.GIT_USER,
|
||||
jellypy.CONFIG.GIT_REPO))
|
||||
output, err = runGit('fetch {}'.format(jellypy.CONFIG.GIT_REMOTE))
|
||||
output, err = runGit('checkout {}'.format(jellypy.CONFIG.GIT_BRANCH))
|
||||
output, err = runGit('branch -u {}/{}'.format(jellypy.CONFIG.GIT_REMOTE,
|
||||
jellypy.CONFIG.GIT_BRANCH))
|
||||
output, err = runGit('reset --hard {}'.format(common.RELEASE))
|
||||
|
||||
if not output:
|
||||
@@ -386,12 +398,12 @@ def reset_git_install():
|
||||
|
||||
|
||||
def checkout_git_branch():
|
||||
if plexpy.INSTALL_TYPE == 'git':
|
||||
logger.info('Attempting to checkout git branch "{}/{}"'.format(plexpy.CONFIG.GIT_REMOTE,
|
||||
plexpy.CONFIG.GIT_BRANCH))
|
||||
if jellypy.INSTALL_TYPE == 'git':
|
||||
logger.info('Attempting to checkout git branch "{}/{}"'.format(jellypy.CONFIG.GIT_REMOTE,
|
||||
jellypy.CONFIG.GIT_BRANCH))
|
||||
|
||||
output, err = runGit('fetch {}'.format(plexpy.CONFIG.GIT_REMOTE))
|
||||
output, err = runGit('checkout {}'.format(plexpy.CONFIG.GIT_BRANCH))
|
||||
output, err = runGit('fetch {}'.format(jellypy.CONFIG.GIT_REMOTE))
|
||||
output, err = runGit('checkout {}'.format(jellypy.CONFIG.GIT_BRANCH))
|
||||
|
||||
if not output:
|
||||
logger.error('Unable to change git branch.')
|
||||
@@ -402,13 +414,13 @@ def checkout_git_branch():
|
||||
logger.error('Unable to checkout from git: ' + line)
|
||||
return
|
||||
|
||||
output, err = runGit('pull {} {}'.format(plexpy.CONFIG.GIT_REMOTE,
|
||||
plexpy.CONFIG.GIT_BRANCH))
|
||||
output, err = runGit('pull {} {}'.format(jellypy.CONFIG.GIT_REMOTE,
|
||||
jellypy.CONFIG.GIT_BRANCH))
|
||||
|
||||
|
||||
def github_cache(cache, github_data=None, use_cache=True):
|
||||
timestamp = helpers.timestamp()
|
||||
cache_filepath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'github_{}.json'.format(cache))
|
||||
cache_filepath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'github_{}.json'.format(cache))
|
||||
|
||||
if github_data:
|
||||
cache_data = {'github_data': github_data, '_cache_time': timestamp}
|
||||
@@ -423,7 +435,7 @@ def github_cache(cache, github_data=None, use_cache=True):
|
||||
try:
|
||||
with open(cache_filepath, 'r', encoding='utf-8') as cache_file:
|
||||
cache_data = json.load(cache_file)
|
||||
if timestamp - cache_data['_cache_time'] < plexpy.CONFIG.CHECK_GITHUB_CACHE_SECONDS:
|
||||
if timestamp - cache_data['_cache_time'] < jellypy.CONFIG.CHECK_GITHUB_CACHE_SECONDS:
|
||||
logger.debug('Using cached GitHub %s data', cache)
|
||||
return cache_data['github_data']
|
||||
except:
|
||||
@@ -431,7 +443,7 @@ def github_cache(cache, github_data=None, use_cache=True):
|
||||
|
||||
|
||||
def read_changelog(latest_only=False, since_prev_release=False):
|
||||
changelog_file = os.path.join(plexpy.PROG_DIR, 'CHANGELOG.md')
|
||||
changelog_file = os.path.join(jellypy.PROG_DIR, 'CHANGELOG.md')
|
||||
|
||||
if not os.path.isfile(changelog_file):
|
||||
return '<h4>Missing changelog file</h4>'
|
||||
@@ -446,7 +458,7 @@ def read_changelog(latest_only=False, since_prev_release=False):
|
||||
list_pattern = re.compile(r'(^[ \t]*\*\s)(.+)')
|
||||
|
||||
beta_release = False
|
||||
prev_release = str(plexpy.PREV_RELEASE)
|
||||
prev_release = str(jellypy.PREV_RELEASE)
|
||||
|
||||
with open(changelog_file, "r") as logfile:
|
||||
for line in logfile:
|
||||
@@ -17,9 +17,6 @@
|
||||
|
||||
# Mostly borrowed from https://github.com/trakt/Plex-Trakt-Scrobbler
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future.builtins import str
|
||||
|
||||
import json
|
||||
import ssl
|
||||
import threading
|
||||
@@ -28,20 +25,12 @@ import time
|
||||
import certifi
|
||||
import websocket
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import activity_handler
|
||||
import activity_pinger
|
||||
import activity_processor
|
||||
import database
|
||||
import logger
|
||||
else:
|
||||
from plexpy import activity_handler
|
||||
from plexpy import activity_pinger
|
||||
from plexpy import activity_processor
|
||||
from plexpy import database
|
||||
from plexpy import logger
|
||||
|
||||
import jellypy
|
||||
from jellypy import activity_handler
|
||||
from jellypy import activity_pinger
|
||||
from jellypy import activity_processor
|
||||
from jellypy import database
|
||||
from jellypy import logger
|
||||
|
||||
name = 'websocket'
|
||||
opcode_data = (websocket.ABNF.OPCODE_TEXT, websocket.ABNF.OPCODE_BINARY)
|
||||
@@ -66,30 +55,30 @@ def start_thread():
|
||||
|
||||
|
||||
def on_connect():
|
||||
if plexpy.PLEX_SERVER_UP is None:
|
||||
plexpy.PLEX_SERVER_UP = True
|
||||
if jellypy.PLEX_SERVER_UP is None:
|
||||
jellypy.PLEX_SERVER_UP = True
|
||||
|
||||
if not plexpy.PLEX_SERVER_UP:
|
||||
if not jellypy.PLEX_SERVER_UP:
|
||||
logger.info("Tautulli WebSocket :: The Plex Media Server is back up.")
|
||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_intup'})
|
||||
plexpy.PLEX_SERVER_UP = True
|
||||
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_intup'})
|
||||
jellypy.PLEX_SERVER_UP = True
|
||||
|
||||
plexpy.initialize_scheduler()
|
||||
if plexpy.CONFIG.WEBSOCKET_MONITOR_PING_PONG:
|
||||
jellypy.initialize_scheduler()
|
||||
if jellypy.CONFIG.WEBSOCKET_MONITOR_PING_PONG:
|
||||
send_ping()
|
||||
|
||||
|
||||
def on_disconnect():
|
||||
if plexpy.PLEX_SERVER_UP is None:
|
||||
plexpy.PLEX_SERVER_UP = False
|
||||
if jellypy.PLEX_SERVER_UP is None:
|
||||
jellypy.PLEX_SERVER_UP = False
|
||||
|
||||
if plexpy.PLEX_SERVER_UP:
|
||||
if jellypy.PLEX_SERVER_UP:
|
||||
logger.info("Tautulli WebSocket :: Unable to get a response from the server, Plex server is down.")
|
||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_intdown'})
|
||||
plexpy.PLEX_SERVER_UP = False
|
||||
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_intdown'})
|
||||
jellypy.PLEX_SERVER_UP = False
|
||||
|
||||
activity_processor.ActivityProcessor().set_temp_stopped()
|
||||
plexpy.initialize_scheduler()
|
||||
jellypy.initialize_scheduler()
|
||||
|
||||
|
||||
def reconnect():
|
||||
@@ -106,14 +95,14 @@ def shutdown():
|
||||
|
||||
def close():
|
||||
logger.info("Tautulli WebSocket :: Disconnecting websocket...")
|
||||
plexpy.WEBSOCKET.close()
|
||||
plexpy.WS_CONNECTED = False
|
||||
jellypy.WEBSOCKET.close()
|
||||
jellypy.WS_CONNECTED = False
|
||||
|
||||
|
||||
def send_ping():
|
||||
if plexpy.WS_CONNECTED:
|
||||
if jellypy.WS_CONNECTED:
|
||||
# logger.debug("Tautulli WebSocket :: Sending ping.")
|
||||
plexpy.WEBSOCKET.ping("Hi?")
|
||||
jellypy.WEBSOCKET.ping("Hi?")
|
||||
|
||||
global pong_timer
|
||||
pong_timer = threading.Timer(5.0, wait_pong)
|
||||
@@ -127,7 +116,7 @@ def wait_pong():
|
||||
|
||||
logger.warn("Tautulli WebSocket :: Failed to receive pong from websocket, ping attempt %s." % str(pong_count))
|
||||
|
||||
if pong_count >= plexpy.CONFIG.WEBSOCKET_CONNECTION_ATTEMPTS:
|
||||
if pong_count >= jellypy.CONFIG.WEBSOCKET_CONNECTION_ATTEMPTS:
|
||||
pong_count = 0
|
||||
close()
|
||||
|
||||
@@ -144,24 +133,24 @@ def receive_pong():
|
||||
def run():
|
||||
from websocket import create_connection
|
||||
|
||||
if plexpy.CONFIG.PMS_SSL and plexpy.CONFIG.PMS_URL[:5] == 'https':
|
||||
uri = plexpy.CONFIG.PMS_URL.replace('https://', 'wss://') + '/:/websockets/notifications'
|
||||
if jellypy.CONFIG.PMS_SSL and jellypy.CONFIG.PMS_URL[:5] == 'https':
|
||||
uri = jellypy.CONFIG.PMS_URL.replace('https://', 'wss://') + '/:/websockets/notifications'
|
||||
secure = 'secure '
|
||||
if plexpy.CONFIG.VERIFY_SSL_CERT:
|
||||
if jellypy.CONFIG.VERIFY_SSL_CERT:
|
||||
sslopt = {'ca_certs': certifi.where()}
|
||||
else:
|
||||
sslopt = {'cert_reqs': ssl.CERT_NONE}
|
||||
else:
|
||||
uri = 'ws://%s:%s/:/websockets/notifications' % (
|
||||
plexpy.CONFIG.PMS_IP,
|
||||
plexpy.CONFIG.PMS_PORT
|
||||
jellypy.CONFIG.PMS_IP,
|
||||
jellypy.CONFIG.PMS_PORT
|
||||
)
|
||||
secure = ''
|
||||
sslopt = None
|
||||
|
||||
# Set authentication token (if one is available)
|
||||
if plexpy.CONFIG.PMS_TOKEN:
|
||||
header = ["X-Plex-Token: %s" % plexpy.CONFIG.PMS_TOKEN]
|
||||
if jellypy.CONFIG.PMS_TOKEN:
|
||||
header = ["X-Plex-Token: %s" % jellypy.CONFIG.PMS_TOKEN]
|
||||
else:
|
||||
header = []
|
||||
|
||||
@@ -172,18 +161,18 @@ def run():
|
||||
# Try an open the websocket connection
|
||||
logger.info("Tautulli WebSocket :: Opening %swebsocket." % secure)
|
||||
try:
|
||||
plexpy.WEBSOCKET = create_connection(uri, header=header, sslopt=sslopt)
|
||||
jellypy.WEBSOCKET = create_connection(uri, header=header, sslopt=sslopt)
|
||||
logger.info("Tautulli WebSocket :: Ready")
|
||||
plexpy.WS_CONNECTED = True
|
||||
jellypy.WS_CONNECTED = True
|
||||
except (websocket.WebSocketException, IOError, Exception) as e:
|
||||
logger.error("Tautulli WebSocket :: %s.", e)
|
||||
|
||||
if plexpy.WS_CONNECTED:
|
||||
if jellypy.WS_CONNECTED:
|
||||
on_connect()
|
||||
|
||||
while plexpy.WS_CONNECTED:
|
||||
while jellypy.WS_CONNECTED:
|
||||
try:
|
||||
process(*receive(plexpy.WEBSOCKET))
|
||||
process(*receive(jellypy.WEBSOCKET))
|
||||
|
||||
# successfully received data, reset reconnects counter
|
||||
reconnects = 0
|
||||
@@ -195,19 +184,19 @@ def run():
|
||||
if reconnects == 0:
|
||||
logger.warn("Tautulli WebSocket :: Connection has closed.")
|
||||
|
||||
if not plexpy.CONFIG.PMS_IS_CLOUD and reconnects < plexpy.CONFIG.WEBSOCKET_CONNECTION_ATTEMPTS:
|
||||
if not jellypy.CONFIG.PMS_IS_CLOUD and reconnects < jellypy.CONFIG.WEBSOCKET_CONNECTION_ATTEMPTS:
|
||||
reconnects += 1
|
||||
|
||||
# Sleep 5 between connection attempts
|
||||
if reconnects > 1:
|
||||
time.sleep(plexpy.CONFIG.WEBSOCKET_CONNECTION_TIMEOUT)
|
||||
time.sleep(jellypy.CONFIG.WEBSOCKET_CONNECTION_TIMEOUT)
|
||||
|
||||
logger.warn("Tautulli WebSocket :: Reconnection attempt %s." % str(reconnects))
|
||||
|
||||
try:
|
||||
plexpy.WEBSOCKET = create_connection(uri, header=header)
|
||||
jellypy.WEBSOCKET = create_connection(uri, header=header)
|
||||
logger.info("Tautulli WebSocket :: Ready")
|
||||
plexpy.WS_CONNECTED = True
|
||||
jellypy.WS_CONNECTED = True
|
||||
except (websocket.WebSocketException, IOError, Exception) as e:
|
||||
logger.error("Tautulli WebSocket :: %s.", e)
|
||||
|
||||
@@ -223,7 +212,7 @@ def run():
|
||||
close()
|
||||
break
|
||||
|
||||
if not plexpy.WS_CONNECTED and not ws_shutdown:
|
||||
if not jellypy.WS_CONNECTED and not ws_shutdown:
|
||||
on_disconnect()
|
||||
|
||||
logger.debug("Tautulli WebSocket :: Leaving thread.")
|
||||
@@ -16,30 +16,23 @@
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
# http://tools.cherrypy.org/wiki/AuthenticationAndAccessRestrictions
|
||||
# https://github.com/cherrypy/tools/blob/master/AuthenticationAndAccessRestrictions
|
||||
# Form based authentication for CherryPy. Requires the
|
||||
# Session tool to be loaded.
|
||||
|
||||
from future.builtins import object
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from future.moves.urllib.parse import quote, unquote
|
||||
from urllib.parse import quote, unquote
|
||||
|
||||
import cherrypy
|
||||
from hashing_passwords import check_hash
|
||||
import jwt
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import logger
|
||||
from database import MonitorDatabase
|
||||
from users import Users, refresh_users
|
||||
from plextv import PlexTV
|
||||
else:
|
||||
from plexpy import logger
|
||||
from plexpy.database import MonitorDatabase
|
||||
from plexpy.users import Users, refresh_users
|
||||
from plexpy.plextv import PlexTV
|
||||
import jellypy
|
||||
from jellypy import logger
|
||||
from jellypy.database import MonitorDatabase
|
||||
from jellypy.helpers import timestamp
|
||||
from jellypy.password import check_hash
|
||||
from jellypy.users import Users, refresh_users
|
||||
|
||||
# Monkey patch SameSite support into cookies.
|
||||
# https://stackoverflow.com/a/50813092
|
||||
@@ -50,28 +43,29 @@ except ImportError:
|
||||
Morsel._reserved[str('samesite')] = str('SameSite')
|
||||
|
||||
JWT_ALGORITHM = 'HS256'
|
||||
JWT_COOKIE_NAME = 'tautulli_token_'
|
||||
JWT_COOKIE_NAME = 'jellypy_token_'
|
||||
|
||||
|
||||
def plex_user_login(username=None, password=None, token=None, headers=None):
|
||||
user_token = None
|
||||
user_id = None
|
||||
|
||||
# Try to login to Plex.tv to check if the user has a vaild account
|
||||
if username and password:
|
||||
plex_tv = PlexTV(username=username, password=password, headers=headers)
|
||||
plex_user = plex_tv.get_token()
|
||||
if plex_user:
|
||||
user_token = plex_user['auth_token']
|
||||
user_id = plex_user['user_id']
|
||||
elif token:
|
||||
plex_tv = PlexTV(token=token, headers=headers)
|
||||
plex_user = plex_tv.get_plex_account_details()
|
||||
if plex_user:
|
||||
user_token = token
|
||||
user_id = plex_user['user_id']
|
||||
else:
|
||||
return None
|
||||
# TODO: Jellyfin
|
||||
# # Try to login to Plex.tv to check if the user has a vaild account
|
||||
# if username and password:
|
||||
# plex_tv = PlexTV(username=username, password=password, headers=headers)
|
||||
# plex_user = plex_tv.get_token()
|
||||
# if plex_user:
|
||||
# user_token = plex_user['auth_token']
|
||||
# user_id = plex_user['user_id']
|
||||
# elif token:
|
||||
# plex_tv = PlexTV(token=token, headers=headers)
|
||||
# plex_user = plex_tv.get_plex_account_details()
|
||||
# if plex_user:
|
||||
# user_token = token
|
||||
# user_id = plex_user['user_id']
|
||||
# else:
|
||||
# return None
|
||||
|
||||
if user_token and user_id:
|
||||
# Try to retrieve the user from the database.
|
||||
@@ -81,7 +75,7 @@ def plex_user_login(username=None, password=None, token=None, headers=None):
|
||||
if user_id != str(user_details['user_id']):
|
||||
# The user is not in the database.
|
||||
return None
|
||||
elif plexpy.CONFIG.HTTP_PLEX_ADMIN and user_details['is_admin']:
|
||||
elif jellypy.CONFIG.HTTP_PLEX_ADMIN and user_details['is_admin']:
|
||||
# Plex admin login
|
||||
return user_details, 'admin'
|
||||
elif not user_details['allow_guest'] or user_details['deleted_user']:
|
||||
@@ -89,13 +83,15 @@ def plex_user_login(username=None, password=None, token=None, headers=None):
|
||||
return None
|
||||
|
||||
# Stop here if guest access is not enabled
|
||||
if not plexpy.CONFIG.ALLOW_GUEST_ACCESS:
|
||||
if not jellypy.CONFIG.ALLOW_GUEST_ACCESS:
|
||||
return None
|
||||
|
||||
# The user is in the database, and guest access is enabled, so try to retrieve a server token.
|
||||
# If a server token is returned, then the user is a valid friend of the server.
|
||||
plex_tv = PlexTV(token=user_token, headers=headers)
|
||||
server_token = plex_tv.get_server_token()
|
||||
# TODO: Jellyfin
|
||||
# # The user is in the database, and guest access is enabled, so try to retrieve a server token.
|
||||
# # If a server token is returned, then the user is a valid friend of the server.
|
||||
# plex_tv = PlexTV(token=user_token, headers=headers)
|
||||
# server_token = plex_tv.get_server_token()
|
||||
server_token = None
|
||||
if server_token:
|
||||
|
||||
# Register the new user / update the access tokens.
|
||||
@@ -137,17 +133,17 @@ def check_credentials(username=None, password=None, token=None, admin_login='0',
|
||||
Returns True and the user group on success or False and no user group"""
|
||||
|
||||
if username and password:
|
||||
if plexpy.CONFIG.HTTP_PASSWORD:
|
||||
if jellypy.CONFIG.HTTP_PASSWORD:
|
||||
user_details = {'user_id': None, 'username': username}
|
||||
|
||||
if plexpy.CONFIG.HTTP_HASHED_PASSWORD and \
|
||||
username == plexpy.CONFIG.HTTP_USERNAME and check_hash(password, plexpy.CONFIG.HTTP_PASSWORD):
|
||||
if jellypy.CONFIG.HTTP_HASHED_PASSWORD and \
|
||||
username == jellypy.CONFIG.HTTP_USERNAME and check_hash(password, jellypy.CONFIG.HTTP_PASSWORD):
|
||||
return True, user_details, 'admin'
|
||||
elif not plexpy.CONFIG.HTTP_HASHED_PASSWORD and \
|
||||
username == plexpy.CONFIG.HTTP_USERNAME and password == plexpy.CONFIG.HTTP_PASSWORD:
|
||||
elif not jellypy.CONFIG.HTTP_HASHED_PASSWORD and \
|
||||
username == jellypy.CONFIG.HTTP_USERNAME and password == jellypy.CONFIG.HTTP_PASSWORD:
|
||||
return True, user_details, 'admin'
|
||||
|
||||
if plexpy.CONFIG.HTTP_PLEX_ADMIN or (not admin_login == '1' and plexpy.CONFIG.ALLOW_GUEST_ACCESS):
|
||||
if jellypy.CONFIG.HTTP_PLEX_ADMIN or (not admin_login == '1' and jellypy.CONFIG.ALLOW_GUEST_ACCESS):
|
||||
plex_login = plex_user_login(token=token, headers=headers)
|
||||
if plex_login is not None:
|
||||
return True, plex_login[0], plex_login[1]
|
||||
@@ -156,13 +152,13 @@ def check_credentials(username=None, password=None, token=None, admin_login='0',
|
||||
|
||||
|
||||
def check_jwt_token():
|
||||
jwt_cookie = str(JWT_COOKIE_NAME + plexpy.CONFIG.PMS_UUID)
|
||||
jwt_cookie = str(JWT_COOKIE_NAME + jellypy.CONFIG.PMS_UUID)
|
||||
jwt_token = cherrypy.request.cookie.get(jwt_cookie)
|
||||
|
||||
if jwt_token:
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
jwt_token.value, plexpy.CONFIG.JWT_SECRET, leeway=timedelta(seconds=10), algorithms=[JWT_ALGORITHM]
|
||||
jwt_token.value, jellypy.CONFIG.JWT_SECRET, leeway=timedelta(seconds=10), algorithms=[JWT_ALGORITHM]
|
||||
)
|
||||
except (jwt.DecodeError, jwt.ExpiredSignatureError):
|
||||
return None
|
||||
@@ -184,19 +180,20 @@ def check_auth(*args, **kwargs):
|
||||
for condition in conditions:
|
||||
# A condition is just a callable that returns true or false
|
||||
if not condition():
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT)
|
||||
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT)
|
||||
|
||||
else:
|
||||
redirect_uri = cherrypy.request.wsgi_environ['REQUEST_URI']
|
||||
if redirect_uri:
|
||||
redirect_uri = '?redirect_uri=' + quote(redirect_uri)
|
||||
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "auth/logout" + redirect_uri)
|
||||
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT + "auth/logout" + redirect_uri)
|
||||
|
||||
|
||||
def requireAuth(*conditions):
|
||||
"""A decorator that appends conditions to the auth.require config
|
||||
variable."""
|
||||
|
||||
def decorate(f):
|
||||
if not hasattr(f, '_cp_config'):
|
||||
f._cp_config = dict()
|
||||
@@ -204,6 +201,7 @@ def requireAuth(*conditions):
|
||||
f._cp_config['auth.require'] = []
|
||||
f._cp_config['auth.require'].extend(conditions)
|
||||
return f
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
@@ -226,11 +224,13 @@ def name_is(user_name):
|
||||
|
||||
def any_of(*conditions):
|
||||
"""Returns True if any of the conditions match"""
|
||||
|
||||
def check():
|
||||
for c in conditions:
|
||||
if c():
|
||||
return True
|
||||
return False
|
||||
|
||||
return check
|
||||
|
||||
|
||||
@@ -238,22 +238,51 @@ def any_of(*conditions):
|
||||
# needed if you want to use it inside of an any_of(...) condition
|
||||
def all_of(*conditions):
|
||||
"""Returns True if all of the conditions match"""
|
||||
|
||||
def check():
|
||||
for c in conditions:
|
||||
if not c():
|
||||
return False
|
||||
return True
|
||||
|
||||
return check
|
||||
|
||||
|
||||
def check_rate_limit(ip_address):
|
||||
monitor_db = MonitorDatabase()
|
||||
result = monitor_db.select('SELECT timestamp, success FROM user_login '
|
||||
'WHERE ip_address = ? '
|
||||
'AND timestamp >= ( '
|
||||
'SELECT CASE WHEN MAX(timestamp) IS NULL THEN 0 ELSE MAX(timestamp) END '
|
||||
'FROM user_login WHERE ip_address = ? AND success = 1) '
|
||||
'ORDER BY timestamp DESC',
|
||||
[ip_address, ip_address])
|
||||
|
||||
try:
|
||||
last_timestamp = result[0]['timestamp']
|
||||
except IndexError:
|
||||
last_timestamp = 0
|
||||
|
||||
try:
|
||||
last_success = max(login['timestamp'] for login in result if login['success'])
|
||||
except ValueError:
|
||||
last_success = 0
|
||||
|
||||
max_timestamp = max(last_success, last_timestamp - jellypy.CONFIG.HTTP_RATE_LIMIT_ATTEMPTS_INTERVAL)
|
||||
attempts = [login for login in result if login['timestamp'] >= max_timestamp and not login['success']]
|
||||
|
||||
if len(attempts) >= jellypy.CONFIG.HTTP_RATE_LIMIT_ATTEMPTS:
|
||||
return max(last_timestamp - (timestamp() - jellypy.CONFIG.HTTP_RATE_LIMIT_LOCKOUT_TIME), 0)
|
||||
|
||||
|
||||
# Controller to provide login and logout actions
|
||||
|
||||
class AuthController(object):
|
||||
|
||||
def check_auth_enabled(self):
|
||||
if not plexpy.CONFIG.HTTP_BASIC_AUTH and plexpy.CONFIG.HTTP_PASSWORD:
|
||||
if not jellypy.CONFIG.HTTP_BASIC_AUTH and jellypy.CONFIG.HTTP_PASSWORD:
|
||||
return
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT)
|
||||
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT)
|
||||
|
||||
def on_login(self, username=None, user_id=None, user_group=None, success=False, oauth=False):
|
||||
"""Called on successful login"""
|
||||
@@ -281,12 +310,12 @@ class AuthController(object):
|
||||
logger.debug("Tautulli WebAuth :: %s user '%s' logged out of Tautulli." % (user_group.capitalize(), username))
|
||||
|
||||
def get_loginform(self, redirect_uri=''):
|
||||
from plexpy.webserve import serve_template
|
||||
from jellypy.webserve import serve_template
|
||||
return serve_template(templatename="login.html", title="Login", redirect_uri=unquote(redirect_uri))
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, *args, **kwargs):
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "auth/login")
|
||||
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT + "auth/login")
|
||||
|
||||
@cherrypy.expose
|
||||
def login(self, redirect_uri='', *args, **kwargs):
|
||||
@@ -302,12 +331,12 @@ class AuthController(object):
|
||||
if payload:
|
||||
self.on_logout(payload['user'], payload['user_group'])
|
||||
|
||||
jwt_cookie = str(JWT_COOKIE_NAME + plexpy.CONFIG.PMS_UUID)
|
||||
jwt_cookie = str(JWT_COOKIE_NAME + jellypy.CONFIG.PMS_UUID)
|
||||
cherrypy.response.cookie[jwt_cookie] = ''
|
||||
cherrypy.response.cookie[jwt_cookie]['expires'] = 0
|
||||
cherrypy.response.cookie[jwt_cookie]['path'] = plexpy.HTTP_ROOT.rstrip('/') or '/'
|
||||
cherrypy.response.cookie[jwt_cookie]['path'] = jellypy.HTTP_ROOT.rstrip('/') or '/'
|
||||
|
||||
if plexpy.HTTP_ROOT != '/':
|
||||
if jellypy.HTTP_ROOT != '/':
|
||||
# Also expire the JWT on the root path
|
||||
cherrypy.response.headers['Set-Cookie'] = jwt_cookie + '=""; expires=Thu, 01 Jan 1970 12:00:00 GMT; path=/'
|
||||
|
||||
@@ -316,7 +345,7 @@ class AuthController(object):
|
||||
if redirect_uri:
|
||||
redirect_uri = '?redirect_uri=' + redirect_uri
|
||||
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "auth/login" + redirect_uri)
|
||||
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT + "auth/login" + redirect_uri)
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -325,6 +354,16 @@ class AuthController(object):
|
||||
cherrypy.response.status = 405
|
||||
return {'status': 'error', 'message': 'Sign in using POST.'}
|
||||
|
||||
ip_address = cherrypy.request.remote.ip
|
||||
rate_limit = check_rate_limit(ip_address)
|
||||
|
||||
if rate_limit:
|
||||
logger.debug("Tautulli WebAuth :: Too many incorrect login attempts from '%s'." % ip_address)
|
||||
error_message = {'status': 'error', 'message': 'Too many login attempts.'}
|
||||
cherrypy.response.status = 429
|
||||
cherrypy.response.headers['Retry-After'] = rate_limit
|
||||
return error_message
|
||||
|
||||
error_message = {'status': 'error', 'message': 'Invalid credentials.'}
|
||||
|
||||
valid_login, user_details, user_group = check_credentials(username=username,
|
||||
@@ -344,7 +383,7 @@ class AuthController(object):
|
||||
'exp': expiry
|
||||
}
|
||||
|
||||
jwt_token = jwt.encode(payload, plexpy.CONFIG.JWT_SECRET, algorithm=JWT_ALGORITHM).decode('utf-8')
|
||||
jwt_token = jwt.encode(payload, jellypy.CONFIG.JWT_SECRET, algorithm=JWT_ALGORITHM).decode('utf-8')
|
||||
|
||||
self.on_login(username=user_details['username'],
|
||||
user_id=user_details['user_id'],
|
||||
@@ -352,16 +391,16 @@ class AuthController(object):
|
||||
success=True,
|
||||
oauth=bool(token))
|
||||
|
||||
jwt_cookie = str(JWT_COOKIE_NAME + plexpy.CONFIG.PMS_UUID)
|
||||
jwt_cookie = str(JWT_COOKIE_NAME + jellypy.CONFIG.PMS_UUID)
|
||||
cherrypy.response.cookie[jwt_cookie] = jwt_token
|
||||
cherrypy.response.cookie[jwt_cookie]['expires'] = int(time_delta.total_seconds())
|
||||
cherrypy.response.cookie[jwt_cookie]['path'] = plexpy.HTTP_ROOT.rstrip('/') or '/'
|
||||
cherrypy.response.cookie[jwt_cookie]['path'] = jellypy.HTTP_ROOT.rstrip('/') or '/'
|
||||
cherrypy.response.cookie[jwt_cookie]['httponly'] = True
|
||||
cherrypy.response.cookie[jwt_cookie]['samesite'] = 'lax'
|
||||
|
||||
cherrypy.request.login = payload
|
||||
cherrypy.response.status = 200
|
||||
return {'status': 'success', 'token': jwt_token, 'uuid': plexpy.CONFIG.PMS_UUID}
|
||||
return {'status': 'success', 'token': jwt_token, 'uuid': jellypy.CONFIG.PMS_UUID}
|
||||
|
||||
elif admin_login == '1' and username:
|
||||
self.on_login(username=username)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -20,34 +20,28 @@ import sys
|
||||
|
||||
import cherrypy
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import logger
|
||||
import webauth
|
||||
from helpers import create_https_certificates
|
||||
from webserve import WebInterface, BaseRedirect
|
||||
else:
|
||||
from plexpy import logger
|
||||
from plexpy import webauth
|
||||
from plexpy.helpers import create_https_certificates
|
||||
from plexpy.webserve import WebInterface, BaseRedirect
|
||||
import jellypy
|
||||
from jellypy import logger
|
||||
from jellypy import webauth
|
||||
from jellypy.helpers import create_https_certificates
|
||||
from jellypy.webserve import WebInterface, BaseRedirect
|
||||
|
||||
|
||||
def start():
|
||||
logger.info("Tautulli WebStart :: Initializing Tautulli web server...")
|
||||
web_config = {
|
||||
'http_port': plexpy.HTTP_PORT,
|
||||
'http_host': plexpy.CONFIG.HTTP_HOST,
|
||||
'http_root': plexpy.CONFIG.HTTP_ROOT,
|
||||
'http_environment': plexpy.CONFIG.HTTP_ENVIRONMENT,
|
||||
'http_proxy': plexpy.CONFIG.HTTP_PROXY,
|
||||
'enable_https': plexpy.CONFIG.ENABLE_HTTPS,
|
||||
'https_cert': plexpy.CONFIG.HTTPS_CERT,
|
||||
'https_cert_chain': plexpy.CONFIG.HTTPS_CERT_CHAIN,
|
||||
'https_key': plexpy.CONFIG.HTTPS_KEY,
|
||||
'http_username': plexpy.CONFIG.HTTP_USERNAME,
|
||||
'http_password': plexpy.CONFIG.HTTP_PASSWORD,
|
||||
'http_basic_auth': plexpy.CONFIG.HTTP_BASIC_AUTH
|
||||
'http_port': jellypy.HTTP_PORT,
|
||||
'http_host': jellypy.CONFIG.HTTP_HOST,
|
||||
'http_root': jellypy.CONFIG.HTTP_ROOT,
|
||||
'http_environment': jellypy.CONFIG.HTTP_ENVIRONMENT,
|
||||
'http_proxy': jellypy.CONFIG.HTTP_PROXY,
|
||||
'enable_https': jellypy.CONFIG.ENABLE_HTTPS,
|
||||
'https_cert': jellypy.CONFIG.HTTPS_CERT,
|
||||
'https_cert_chain': jellypy.CONFIG.HTTPS_CERT_CHAIN,
|
||||
'https_key': jellypy.CONFIG.HTTPS_KEY,
|
||||
'http_username': jellypy.CONFIG.HTTP_USERNAME,
|
||||
'http_password': jellypy.CONFIG.HTTP_PASSWORD,
|
||||
'http_basic_auth': jellypy.CONFIG.HTTP_BASIC_AUTH
|
||||
}
|
||||
initialize(web_config)
|
||||
|
||||
@@ -64,7 +58,6 @@ def restart():
|
||||
|
||||
|
||||
def initialize(options):
|
||||
|
||||
# HTTPS stuff stolen from sickbeard
|
||||
enable_https = options['enable_https']
|
||||
https_cert = options['https_cert']
|
||||
@@ -73,7 +66,7 @@ def initialize(options):
|
||||
|
||||
if enable_https:
|
||||
# If either the HTTPS certificate or key do not exist, try to make self-signed ones.
|
||||
if plexpy.CONFIG.HTTPS_CREATE_CERT and \
|
||||
if jellypy.CONFIG.HTTPS_CREATE_CERT and \
|
||||
(not (https_cert and os.path.exists(https_cert)) or
|
||||
not (https_key and os.path.exists(https_key))):
|
||||
if not create_https_certificates(https_cert, https_key):
|
||||
@@ -96,7 +89,7 @@ def initialize(options):
|
||||
'tools.decode.on': True
|
||||
}
|
||||
|
||||
if plexpy.DEV:
|
||||
if jellypy.DEV:
|
||||
options_dict['environment'] = "test_suite"
|
||||
options_dict['engine.autoreload.on'] = True
|
||||
|
||||
@@ -114,39 +107,39 @@ def initialize(options):
|
||||
|
||||
if options['http_password']:
|
||||
login_allowed = ["Tautulli admin (username is '%s')" % options['http_username']]
|
||||
if plexpy.CONFIG.HTTP_PLEX_ADMIN:
|
||||
if jellypy.CONFIG.HTTP_PLEX_ADMIN:
|
||||
login_allowed.append("Plex admin")
|
||||
|
||||
logger.info("Tautulli WebStart :: Web server authentication is enabled: %s.", ' and '.join(login_allowed))
|
||||
|
||||
if options['http_basic_auth']:
|
||||
plexpy.AUTH_ENABLED = False
|
||||
jellypy.AUTH_ENABLED = False
|
||||
basic_auth_enabled = True
|
||||
else:
|
||||
plexpy.AUTH_ENABLED = True
|
||||
jellypy.AUTH_ENABLED = True
|
||||
basic_auth_enabled = False
|
||||
cherrypy.tools.auth = cherrypy.Tool('before_handler', webauth.check_auth, priority=2)
|
||||
else:
|
||||
plexpy.AUTH_ENABLED = False
|
||||
jellypy.AUTH_ENABLED = False
|
||||
basic_auth_enabled = False
|
||||
|
||||
if options['http_root'].strip('/'):
|
||||
plexpy.HTTP_ROOT = options['http_root'] = '/' + str(options['http_root'].strip('/')) + '/'
|
||||
jellypy.HTTP_ROOT = options['http_root'] = '/' + str(options['http_root'].strip('/')) + '/'
|
||||
else:
|
||||
plexpy.HTTP_ROOT = options['http_root'] = '/'
|
||||
jellypy.HTTP_ROOT = options['http_root'] = '/'
|
||||
|
||||
cherrypy.config.update(options_dict)
|
||||
|
||||
conf = {
|
||||
'/': {
|
||||
'engine.timeout_monitor.on': False,
|
||||
'tools.staticdir.root': os.path.join(plexpy.PROG_DIR, 'data'),
|
||||
'tools.staticdir.root': os.path.join(jellypy.PROG_DIR, 'data'),
|
||||
'tools.proxy.on': bool(options['http_proxy']),
|
||||
'tools.gzip.on': True,
|
||||
'tools.gzip.mime_types': ['text/html', 'text/plain', 'text/css',
|
||||
'text/javascript', 'application/json',
|
||||
'application/javascript'],
|
||||
'tools.auth.on': plexpy.AUTH_ENABLED,
|
||||
'tools.auth.on': jellypy.AUTH_ENABLED,
|
||||
'tools.auth_basic.on': basic_auth_enabled,
|
||||
'tools.auth_basic.realm': 'Tautulli web server',
|
||||
'tools.auth_basic.checkpassword': cherrypy.lib.auth_basic.checkpassword_dict({
|
||||
@@ -216,7 +209,7 @@ def initialize(options):
|
||||
},
|
||||
'/cache': {
|
||||
'tools.staticdir.on': True,
|
||||
'tools.staticdir.dir': plexpy.CONFIG.CACHE_DIR,
|
||||
'tools.staticdir.dir': jellypy.CONFIG.CACHE_DIR,
|
||||
'tools.caching.on': True,
|
||||
'tools.caching.force': True,
|
||||
'tools.caching.delay': 0,
|
||||
@@ -225,9 +218,9 @@ def initialize(options):
|
||||
'tools.sessions.on': False,
|
||||
'tools.auth.on': False
|
||||
},
|
||||
#'/pms_image_proxy': {
|
||||
# '/pms_image_proxy': {
|
||||
# 'tools.staticdir.on': True,
|
||||
# 'tools.staticdir.dir': os.path.join(plexpy.CONFIG.CACHE_DIR, 'images'),
|
||||
# 'tools.staticdir.dir': os.path.join(jellypy.CONFIG.CACHE_DIR, 'images'),
|
||||
# 'tools.caching.on': True,
|
||||
# 'tools.caching.force': True,
|
||||
# 'tools.caching.delay': 0,
|
||||
@@ -235,10 +228,11 @@ def initialize(options):
|
||||
# 'tools.expires.secs': 60 * 60 * 24 * 30, # 30 days
|
||||
# 'tools.auth.on': False,
|
||||
# 'tools.sessions.on': False
|
||||
#},
|
||||
# },
|
||||
'/favicon.ico': {
|
||||
'tools.staticfile.on': True,
|
||||
'tools.staticfile.filename': os.path.abspath(os.path.join(plexpy.PROG_DIR, 'data/interfaces/default/images/favicon/favicon.ico')),
|
||||
'tools.staticfile.filename': os.path.abspath(
|
||||
os.path.join(jellypy.PROG_DIR, 'data/interfaces/default/images/favicon/favicon.ico')),
|
||||
'tools.caching.on': True,
|
||||
'tools.caching.force': True,
|
||||
'tools.caching.delay': 0,
|
||||
@@ -250,14 +244,14 @@ def initialize(options):
|
||||
}
|
||||
|
||||
cherrypy.tree.mount(WebInterface(), options['http_root'], config=conf)
|
||||
if plexpy.HTTP_ROOT != '/':
|
||||
if jellypy.HTTP_ROOT != '/':
|
||||
cherrypy.tree.mount(BaseRedirect(), '/')
|
||||
|
||||
try:
|
||||
logger.info("Tautulli WebStart :: Starting Tautulli web server on %s://%s:%d%s", protocol,
|
||||
options['http_host'], options['http_port'], options['http_root'])
|
||||
#cherrypy.process.servers.check_port(str(options['http_host']), options['http_port'])
|
||||
if not plexpy.DEV:
|
||||
# cherrypy.process.servers.check_port(str(options['http_host']), options['http_port'])
|
||||
if not jellypy.DEV:
|
||||
cherrypy.server.start()
|
||||
else:
|
||||
cherrypy.engine.signals.subscribe()
|
||||
1652
lib/IPy.py
1652
lib/IPy.py
File diff suppressed because it is too large
Load Diff
@@ -1,121 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
###############################################################################
|
||||
# Formatting filter for urllib2's HTTPHandler(debuglevel=1) output
|
||||
# Copyright (c) 2013, Analytics Pros
|
||||
#
|
||||
# This project is free software, distributed under the BSD license.
|
||||
# Analytics Pros offers consulting and integration services if your firm needs
|
||||
# assistance in strategy, implementation, or auditing existing work.
|
||||
###############################################################################
|
||||
|
||||
|
||||
import sys, re, os
|
||||
from io import StringIO
|
||||
|
||||
|
||||
|
||||
class BufferTranslator(object):
|
||||
""" Provides a buffer-compatible interface for filtering buffer content.
|
||||
"""
|
||||
parsers = []
|
||||
|
||||
def __init__(self, output):
|
||||
self.output = output
|
||||
self.encoding = getattr(output, 'encoding', None)
|
||||
|
||||
def write(self, content):
|
||||
content = self.translate(content)
|
||||
self.output.write(content)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def stripslashes(content):
|
||||
return content.decode('string_escape')
|
||||
|
||||
@staticmethod
|
||||
def addslashes(content):
|
||||
return content.encode('string_escape')
|
||||
|
||||
def translate(self, line):
|
||||
for pattern, method in self.parsers:
|
||||
match = pattern.match(line)
|
||||
if match:
|
||||
return method(match)
|
||||
|
||||
return line
|
||||
|
||||
|
||||
|
||||
class LineBufferTranslator(BufferTranslator):
|
||||
""" Line buffer implementation supports translation of line-format input
|
||||
even when input is not already line-buffered. Caches input until newlines
|
||||
occur, and then dispatches translated input to output buffer.
|
||||
"""
|
||||
def __init__(self, *a, **kw):
|
||||
self._linepending = []
|
||||
super(LineBufferTranslator, self).__init__(*a, **kw)
|
||||
|
||||
def write(self, _input):
|
||||
lines = _input.splitlines(True)
|
||||
for i in range(0, len(lines)):
|
||||
last = i
|
||||
if lines[i].endswith('\n'):
|
||||
prefix = len(self._linepending) and ''.join(self._linepending) or ''
|
||||
self.output.write(self.translate(prefix + lines[i]))
|
||||
del self._linepending[0:]
|
||||
last = -1
|
||||
|
||||
if last >= 0:
|
||||
self._linepending.append(lines[ last ])
|
||||
|
||||
|
||||
def __del__(self):
|
||||
if len(self._linepending):
|
||||
self.output.write(self.translate(''.join(self._linepending)))
|
||||
|
||||
|
||||
class HTTPTranslator(LineBufferTranslator):
|
||||
""" Translates output from |urllib2| HTTPHandler(debuglevel = 1) into
|
||||
HTTP-compatible, readible text structures for human analysis.
|
||||
"""
|
||||
|
||||
RE_LINE_PARSER = re.compile(r'^(?:([a-z]+):)\s*(\'?)([^\r\n]*)\2(?:[\r\n]*)$')
|
||||
RE_LINE_BREAK = re.compile(r'(\r?\n|(?:\\r)?\\n)')
|
||||
RE_HTTP_METHOD = re.compile(r'^(POST|GET|HEAD|DELETE|PUT|TRACE|OPTIONS)')
|
||||
RE_PARAMETER_SPACER = re.compile(r'&([a-z0-9]+)=')
|
||||
|
||||
@classmethod
|
||||
def spacer(cls, line):
|
||||
return cls.RE_PARAMETER_SPACER.sub(r' &\1= ', line)
|
||||
|
||||
def translate(self, line):
|
||||
|
||||
parsed = self.RE_LINE_PARSER.match(line)
|
||||
|
||||
if parsed:
|
||||
value = parsed.group(3)
|
||||
stage = parsed.group(1)
|
||||
|
||||
if stage == 'send': # query string is rendered here
|
||||
return '\n# HTTP Request:\n' + self.stripslashes(value)
|
||||
elif stage == 'reply':
|
||||
return '\n\n# HTTP Response:\n' + self.stripslashes(value)
|
||||
elif stage == 'header':
|
||||
return value + '\n'
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
return line
|
||||
|
||||
|
||||
def consume(outbuffer = None): # Capture standard output
|
||||
sys.stdout = HTTPTranslator(outbuffer or sys.stdout)
|
||||
return sys.stdout
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
consume(sys.stdout).write(sys.stdin.read())
|
||||
print('\n')
|
||||
|
||||
# vim: set nowrap tabstop=4 shiftwidth=4 softtabstop=0 expandtab textwidth=0 filetype=python foldmethod=indent foldcolumn=4
|
||||
@@ -1,424 +0,0 @@
|
||||
from future.moves.urllib.request import urlopen, build_opener, install_opener
|
||||
from future.moves.urllib.request import Request, HTTPSHandler
|
||||
from future.moves.urllib.error import URLError, HTTPError
|
||||
from future.moves.urllib.parse import urlencode
|
||||
|
||||
import random
|
||||
import datetime
|
||||
import time
|
||||
import uuid
|
||||
import hashlib
|
||||
import socket
|
||||
|
||||
|
||||
def generate_uuid(basedata=None):
|
||||
""" Provides a _random_ UUID with no input, or a UUID4-format MD5 checksum of any input data provided """
|
||||
if basedata is None:
|
||||
return str(uuid.uuid4())
|
||||
elif isinstance(basedata, str):
|
||||
checksum = hashlib.md5(str(basedata).encode('utf-8')).hexdigest()
|
||||
return '%8s-%4s-%4s-%4s-%12s' % (
|
||||
checksum[0:8], checksum[8:12], checksum[12:16], checksum[16:20], checksum[20:32])
|
||||
|
||||
|
||||
class Time(datetime.datetime):
|
||||
""" Wrappers and convenience methods for processing various time representations """
|
||||
|
||||
@classmethod
|
||||
def from_unix(cls, seconds, milliseconds=0):
|
||||
""" Produce a full |datetime.datetime| object from a Unix timestamp """
|
||||
base = list(time.gmtime(seconds))[0:6]
|
||||
base.append(milliseconds * 1000) # microseconds
|
||||
return cls(*base)
|
||||
|
||||
@classmethod
|
||||
def to_unix(cls, timestamp):
|
||||
""" Wrapper over time module to produce Unix epoch time as a float """
|
||||
if not isinstance(timestamp, datetime.datetime):
|
||||
raise TypeError('Time.milliseconds expects a datetime object')
|
||||
base = time.mktime(timestamp.timetuple())
|
||||
return base
|
||||
|
||||
@classmethod
|
||||
def milliseconds_offset(cls, timestamp, now=None):
|
||||
""" Offset time (in milliseconds) from a |datetime.datetime| object to now """
|
||||
if isinstance(timestamp, (int, float)):
|
||||
base = timestamp
|
||||
else:
|
||||
base = cls.to_unix(timestamp)
|
||||
base = base + (timestamp.microsecond / 1000000)
|
||||
if now is None:
|
||||
now = time.time()
|
||||
return (now - base) * 1000
|
||||
|
||||
|
||||
class HTTPRequest(object):
|
||||
""" URL Construction and request handling abstraction.
|
||||
This is not intended to be used outside this module.
|
||||
|
||||
Automates mapping of persistent state (i.e. query parameters)
|
||||
onto transcient datasets for each query.
|
||||
"""
|
||||
|
||||
endpoint = 'https://www.google-analytics.com/collect'
|
||||
|
||||
@staticmethod
|
||||
def debug():
|
||||
""" Activate debugging on urllib2 """
|
||||
handler = HTTPSHandler(debuglevel=1)
|
||||
opener = build_opener(handler)
|
||||
install_opener(opener)
|
||||
|
||||
# Store properties for all requests
|
||||
def __init__(self, user_agent=None, *args, **opts):
|
||||
self.user_agent = user_agent or 'Analytics Pros - Universal Analytics (Python)'
|
||||
|
||||
@classmethod
|
||||
def fixUTF8(cls, data): # Ensure proper encoding for UA's servers...
|
||||
""" Convert all strings to UTF-8 """
|
||||
for key in data:
|
||||
if isinstance(data[key], str):
|
||||
data[key] = data[key].encode('utf-8')
|
||||
return data
|
||||
|
||||
# Apply stored properties to the given dataset & POST to the configured endpoint
|
||||
def send(self, data):
|
||||
request = Request(
|
||||
self.endpoint + '?' + urlencode(self.fixUTF8(data)).encode('utf-8'),
|
||||
headers={
|
||||
'User-Agent': self.user_agent
|
||||
}
|
||||
)
|
||||
self.open(request)
|
||||
|
||||
def open(self, request):
|
||||
try:
|
||||
return urlopen(request)
|
||||
except HTTPError as e:
|
||||
return False
|
||||
except URLError as e:
|
||||
self.cache_request(request)
|
||||
return False
|
||||
|
||||
def cache_request(self, request):
|
||||
# TODO: implement a proper caching mechanism here for re-transmitting hits
|
||||
# record = (Time.now(), request.get_full_url(), request.get_data(), request.headers)
|
||||
pass
|
||||
|
||||
|
||||
class HTTPPost(HTTPRequest):
|
||||
|
||||
# Apply stored properties to the given dataset & POST to the configured endpoint
|
||||
def send(self, data):
|
||||
request = Request(
|
||||
self.endpoint,
|
||||
data=urlencode(self.fixUTF8(data)).encode('utf-8'),
|
||||
headers={
|
||||
'User-Agent': self.user_agent
|
||||
}
|
||||
)
|
||||
self.open(request)
|
||||
|
||||
|
||||
class Tracker(object):
|
||||
""" Primary tracking interface for Universal Analytics """
|
||||
params = None
|
||||
parameter_alias = {}
|
||||
valid_hittypes = ('pageview', 'event', 'social', 'screenview', 'transaction', 'item', 'exception', 'timing')
|
||||
|
||||
@classmethod
|
||||
def alias(cls, typemap, base, *names):
|
||||
""" Declare an alternate (humane) name for a measurement protocol parameter """
|
||||
cls.parameter_alias[base] = (typemap, base)
|
||||
for i in names:
|
||||
cls.parameter_alias[i] = (typemap, base)
|
||||
|
||||
@classmethod
|
||||
def coerceParameter(cls, name, value=None):
|
||||
if isinstance(name, str) and name[0] == '&':
|
||||
return name[1:], str(value)
|
||||
elif name in cls.parameter_alias:
|
||||
typecast, param_name = cls.parameter_alias.get(name)
|
||||
return param_name, typecast(value)
|
||||
else:
|
||||
raise KeyError('Parameter "{0}" is not recognized'.format(name))
|
||||
|
||||
def payload(self, data):
|
||||
for key, value in data.items():
|
||||
try:
|
||||
yield self.coerceParameter(key, value)
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
option_sequence = {
|
||||
'pageview': [(str, 'dp')],
|
||||
'event': [(str, 'ec'), (str, 'ea'), (str, 'el'), (int, 'ev')],
|
||||
'social': [(str, 'sn'), (str, 'sa'), (str, 'st')],
|
||||
'timing': [(str, 'utc'), (str, 'utv'), (str, 'utt'), (str, 'utl')]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def consume_options(cls, data, hittype, args):
|
||||
""" Interpret sequential arguments related to known hittypes based on declared structures """
|
||||
opt_position = 0
|
||||
data['t'] = hittype # integrate hit type parameter
|
||||
if hittype in cls.option_sequence:
|
||||
for expected_type, optname in cls.option_sequence[hittype]:
|
||||
if opt_position < len(args) and isinstance(args[opt_position], expected_type):
|
||||
data[optname] = args[opt_position]
|
||||
opt_position += 1
|
||||
|
||||
@classmethod
|
||||
def hittime(cls, timestamp=None, age=None, milliseconds=None):
|
||||
""" Returns an integer represeting the milliseconds offset for a given hit (relative to now) """
|
||||
if isinstance(timestamp, (int, float)):
|
||||
return int(Time.milliseconds_offset(Time.from_unix(timestamp, milliseconds=milliseconds)))
|
||||
if isinstance(timestamp, datetime.datetime):
|
||||
return int(Time.milliseconds_offset(timestamp))
|
||||
if isinstance(age, (int, float)):
|
||||
return int(age * 1000) + (milliseconds or 0)
|
||||
|
||||
@property
|
||||
def account(self):
|
||||
return self.params.get('tid', None)
|
||||
|
||||
def __init__(self, account, name=None, client_id=None, hash_client_id=False, user_id=None, user_agent=None,
|
||||
use_post=True):
|
||||
|
||||
if use_post is False:
|
||||
self.http = HTTPRequest(user_agent=user_agent)
|
||||
else:
|
||||
self.http = HTTPPost(user_agent=user_agent)
|
||||
|
||||
self.params = {'v': 1, 'tid': account}
|
||||
|
||||
if client_id is None:
|
||||
client_id = generate_uuid()
|
||||
|
||||
self.params['cid'] = client_id
|
||||
|
||||
self.hash_client_id = hash_client_id
|
||||
|
||||
if user_id is not None:
|
||||
self.params['uid'] = user_id
|
||||
|
||||
def set_timestamp(self, data):
|
||||
""" Interpret time-related options, apply queue-time parameter as needed """
|
||||
if 'hittime' in data: # an absolute timestamp
|
||||
data['qt'] = self.hittime(timestamp=data.pop('hittime', None))
|
||||
if 'hitage' in data: # a relative age (in seconds)
|
||||
data['qt'] = self.hittime(age=data.pop('hitage', None))
|
||||
|
||||
def send(self, hittype, *args, **data):
|
||||
""" Transmit HTTP requests to Google Analytics using the measurement protocol """
|
||||
|
||||
if hittype not in self.valid_hittypes:
|
||||
raise KeyError('Unsupported Universal Analytics Hit Type: {0}'.format(repr(hittype)))
|
||||
|
||||
self.set_timestamp(data)
|
||||
self.consume_options(data, hittype, args)
|
||||
|
||||
for item in args: # process dictionary-object arguments of transcient data
|
||||
if isinstance(item, dict):
|
||||
for key, val in self.payload(item):
|
||||
data[key] = val
|
||||
|
||||
for k, v in self.params.items(): # update only absent parameters
|
||||
if k not in data:
|
||||
data[k] = v
|
||||
|
||||
data = dict(self.payload(data))
|
||||
|
||||
if self.hash_client_id:
|
||||
data['cid'] = generate_uuid(data['cid'])
|
||||
|
||||
# Transmit the hit to Google...
|
||||
self.http.send(data)
|
||||
|
||||
# Setting persistent attibutes of the session/hit/etc (inc. custom dimensions/metrics)
|
||||
def set(self, name, value=None):
|
||||
if isinstance(name, dict):
|
||||
for key, value in name.items():
|
||||
try:
|
||||
param, value = self.coerceParameter(key, value)
|
||||
self.params[param] = value
|
||||
except KeyError:
|
||||
pass
|
||||
elif isinstance(name, str):
|
||||
try:
|
||||
param, value = self.coerceParameter(name, value)
|
||||
self.params[param] = value
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def __getitem__(self, name):
|
||||
param, value = self.coerceParameter(name, None)
|
||||
return self.params.get(param, None)
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
param, value = self.coerceParameter(name, value)
|
||||
self.params[param] = value
|
||||
|
||||
def __delitem__(self, name):
|
||||
param, value = self.coerceParameter(name, None)
|
||||
if param in self.params:
|
||||
del self.params[param]
|
||||
|
||||
|
||||
def safe_unicode(obj):
|
||||
""" Safe convertion to the Unicode string version of the object """
|
||||
try:
|
||||
return str(obj)
|
||||
except UnicodeDecodeError:
|
||||
return obj.decode('utf-8')
|
||||
|
||||
|
||||
# Declaring name mappings for Measurement Protocol parameters
|
||||
MAX_CUSTOM_DEFINITIONS = 200
|
||||
MAX_EC_LISTS = 11 # 1-based index
|
||||
MAX_EC_PRODUCTS = 11 # 1-based index
|
||||
MAX_EC_PROMOTIONS = 11 # 1-based index
|
||||
|
||||
Tracker.alias(int, 'v', 'protocol-version')
|
||||
Tracker.alias(safe_unicode, 'cid', 'client-id', 'clientId', 'clientid')
|
||||
Tracker.alias(safe_unicode, 'tid', 'trackingId', 'account')
|
||||
Tracker.alias(safe_unicode, 'uid', 'user-id', 'userId', 'userid')
|
||||
Tracker.alias(safe_unicode, 'uip', 'user-ip', 'userIp', 'ipaddr')
|
||||
Tracker.alias(safe_unicode, 'ua', 'userAgent', 'userAgentOverride', 'user-agent')
|
||||
Tracker.alias(safe_unicode, 'dp', 'page', 'path')
|
||||
Tracker.alias(safe_unicode, 'dt', 'title', 'pagetitle', 'pageTitle' 'page-title')
|
||||
Tracker.alias(safe_unicode, 'dl', 'location')
|
||||
Tracker.alias(safe_unicode, 'dh', 'hostname')
|
||||
Tracker.alias(safe_unicode, 'sc', 'sessioncontrol', 'session-control', 'sessionControl')
|
||||
Tracker.alias(safe_unicode, 'dr', 'referrer', 'referer')
|
||||
Tracker.alias(int, 'qt', 'queueTime', 'queue-time')
|
||||
Tracker.alias(safe_unicode, 't', 'hitType', 'hittype')
|
||||
Tracker.alias(int, 'aip', 'anonymizeIp', 'anonIp', 'anonymize-ip')
|
||||
Tracker.alias(safe_unicode, 'ds', 'dataSource', 'data-source')
|
||||
|
||||
# Campaign attribution
|
||||
Tracker.alias(safe_unicode, 'cn', 'campaign', 'campaignName', 'campaign-name')
|
||||
Tracker.alias(safe_unicode, 'cs', 'source', 'campaignSource', 'campaign-source')
|
||||
Tracker.alias(safe_unicode, 'cm', 'medium', 'campaignMedium', 'campaign-medium')
|
||||
Tracker.alias(safe_unicode, 'ck', 'keyword', 'campaignKeyword', 'campaign-keyword')
|
||||
Tracker.alias(safe_unicode, 'cc', 'content', 'campaignContent', 'campaign-content')
|
||||
Tracker.alias(safe_unicode, 'ci', 'campaignId', 'campaignID', 'campaign-id')
|
||||
|
||||
# Technical specs
|
||||
Tracker.alias(safe_unicode, 'sr', 'screenResolution', 'screen-resolution', 'resolution')
|
||||
Tracker.alias(safe_unicode, 'vp', 'viewport', 'viewportSize', 'viewport-size')
|
||||
Tracker.alias(safe_unicode, 'de', 'encoding', 'documentEncoding', 'document-encoding')
|
||||
Tracker.alias(int, 'sd', 'colors', 'screenColors', 'screen-colors')
|
||||
Tracker.alias(safe_unicode, 'ul', 'language', 'user-language', 'userLanguage')
|
||||
|
||||
# Mobile app
|
||||
Tracker.alias(safe_unicode, 'an', 'appName', 'app-name', 'app')
|
||||
Tracker.alias(safe_unicode, 'cd', 'contentDescription', 'screenName', 'screen-name', 'content-description')
|
||||
Tracker.alias(safe_unicode, 'av', 'appVersion', 'app-version', 'version')
|
||||
Tracker.alias(safe_unicode, 'aid', 'appID', 'appId', 'application-id', 'app-id', 'applicationId')
|
||||
Tracker.alias(safe_unicode, 'aiid', 'appInstallerId', 'app-installer-id')
|
||||
|
||||
# Ecommerce
|
||||
Tracker.alias(safe_unicode, 'ta', 'affiliation', 'transactionAffiliation', 'transaction-affiliation')
|
||||
Tracker.alias(safe_unicode, 'ti', 'transaction', 'transactionId', 'transaction-id')
|
||||
Tracker.alias(float, 'tr', 'revenue', 'transactionRevenue', 'transaction-revenue')
|
||||
Tracker.alias(float, 'ts', 'shipping', 'transactionShipping', 'transaction-shipping')
|
||||
Tracker.alias(float, 'tt', 'tax', 'transactionTax', 'transaction-tax')
|
||||
Tracker.alias(safe_unicode, 'cu', 'currency', 'transactionCurrency',
|
||||
'transaction-currency') # Currency code, e.g. USD, EUR
|
||||
Tracker.alias(safe_unicode, 'in', 'item-name', 'itemName')
|
||||
Tracker.alias(float, 'ip', 'item-price', 'itemPrice')
|
||||
Tracker.alias(float, 'iq', 'item-quantity', 'itemQuantity')
|
||||
Tracker.alias(safe_unicode, 'ic', 'item-code', 'sku', 'itemCode')
|
||||
Tracker.alias(safe_unicode, 'iv', 'item-variation', 'item-category', 'itemCategory', 'itemVariation')
|
||||
|
||||
# Events
|
||||
Tracker.alias(safe_unicode, 'ec', 'event-category', 'eventCategory', 'category')
|
||||
Tracker.alias(safe_unicode, 'ea', 'event-action', 'eventAction', 'action')
|
||||
Tracker.alias(safe_unicode, 'el', 'event-label', 'eventLabel', 'label')
|
||||
Tracker.alias(int, 'ev', 'event-value', 'eventValue', 'value')
|
||||
Tracker.alias(int, 'ni', 'noninteractive', 'nonInteractive', 'noninteraction', 'nonInteraction')
|
||||
|
||||
# Social
|
||||
Tracker.alias(safe_unicode, 'sa', 'social-action', 'socialAction')
|
||||
Tracker.alias(safe_unicode, 'sn', 'social-network', 'socialNetwork')
|
||||
Tracker.alias(safe_unicode, 'st', 'social-target', 'socialTarget')
|
||||
|
||||
# Exceptions
|
||||
Tracker.alias(safe_unicode, 'exd', 'exception-description', 'exceptionDescription', 'exDescription')
|
||||
Tracker.alias(int, 'exf', 'exception-fatal', 'exceptionFatal', 'exFatal')
|
||||
|
||||
# User Timing
|
||||
Tracker.alias(safe_unicode, 'utc', 'timingCategory', 'timing-category')
|
||||
Tracker.alias(safe_unicode, 'utv', 'timingVariable', 'timing-variable')
|
||||
Tracker.alias(float, 'utt', 'time', 'timingTime', 'timing-time')
|
||||
Tracker.alias(safe_unicode, 'utl', 'timingLabel', 'timing-label')
|
||||
Tracker.alias(float, 'dns', 'timingDNS', 'timing-dns')
|
||||
Tracker.alias(float, 'pdt', 'timingPageLoad', 'timing-page-load')
|
||||
Tracker.alias(float, 'rrt', 'timingRedirect', 'timing-redirect')
|
||||
Tracker.alias(safe_unicode, 'tcp', 'timingTCPConnect', 'timing-tcp-connect')
|
||||
Tracker.alias(safe_unicode, 'srt', 'timingServerResponse', 'timing-server-response')
|
||||
|
||||
# Custom dimensions and metrics
|
||||
for i in range(0, 200):
|
||||
Tracker.alias(safe_unicode, 'cd{0}'.format(i), 'dimension{0}'.format(i))
|
||||
Tracker.alias(int, 'cm{0}'.format(i), 'metric{0}'.format(i))
|
||||
|
||||
# Content groups
|
||||
for i in range(0, 5):
|
||||
Tracker.alias(safe_unicode, 'cg{0}'.format(i), 'contentGroup{0}'.format(i))
|
||||
|
||||
# Enhanced Ecommerce
|
||||
Tracker.alias(str, 'pa') # Product action
|
||||
Tracker.alias(str, 'tcc') # Coupon code
|
||||
Tracker.alias(str, 'pal') # Product action list
|
||||
Tracker.alias(int, 'cos') # Checkout step
|
||||
Tracker.alias(str, 'col') # Checkout step option
|
||||
|
||||
Tracker.alias(str, 'promoa') # Promotion action
|
||||
|
||||
for product_index in range(1, MAX_EC_PRODUCTS):
|
||||
Tracker.alias(str, 'pr{0}id'.format(product_index)) # Product SKU
|
||||
Tracker.alias(str, 'pr{0}nm'.format(product_index)) # Product name
|
||||
Tracker.alias(str, 'pr{0}br'.format(product_index)) # Product brand
|
||||
Tracker.alias(str, 'pr{0}ca'.format(product_index)) # Product category
|
||||
Tracker.alias(str, 'pr{0}va'.format(product_index)) # Product variant
|
||||
Tracker.alias(str, 'pr{0}pr'.format(product_index)) # Product price
|
||||
Tracker.alias(int, 'pr{0}qt'.format(product_index)) # Product quantity
|
||||
Tracker.alias(str, 'pr{0}cc'.format(product_index)) # Product coupon code
|
||||
Tracker.alias(int, 'pr{0}ps'.format(product_index)) # Product position
|
||||
|
||||
for custom_index in range(MAX_CUSTOM_DEFINITIONS):
|
||||
Tracker.alias(str, 'pr{0}cd{1}'.format(product_index, custom_index)) # Product custom dimension
|
||||
Tracker.alias(int, 'pr{0}cm{1}'.format(product_index, custom_index)) # Product custom metric
|
||||
|
||||
for list_index in range(1, MAX_EC_LISTS):
|
||||
Tracker.alias(str, 'il{0}pi{1}id'.format(list_index, product_index)) # Product impression SKU
|
||||
Tracker.alias(str, 'il{0}pi{1}nm'.format(list_index, product_index)) # Product impression name
|
||||
Tracker.alias(str, 'il{0}pi{1}br'.format(list_index, product_index)) # Product impression brand
|
||||
Tracker.alias(str, 'il{0}pi{1}ca'.format(list_index, product_index)) # Product impression category
|
||||
Tracker.alias(str, 'il{0}pi{1}va'.format(list_index, product_index)) # Product impression variant
|
||||
Tracker.alias(int, 'il{0}pi{1}ps'.format(list_index, product_index)) # Product impression position
|
||||
Tracker.alias(int, 'il{0}pi{1}pr'.format(list_index, product_index)) # Product impression price
|
||||
|
||||
for custom_index in range(MAX_CUSTOM_DEFINITIONS):
|
||||
Tracker.alias(str, 'il{0}pi{1}cd{2}'.format(list_index, product_index,
|
||||
custom_index)) # Product impression custom dimension
|
||||
Tracker.alias(int, 'il{0}pi{1}cm{2}'.format(list_index, product_index,
|
||||
custom_index)) # Product impression custom metric
|
||||
|
||||
for list_index in range(1, MAX_EC_LISTS):
|
||||
Tracker.alias(str, 'il{0}nm'.format(list_index)) # Product impression list name
|
||||
|
||||
for promotion_index in range(1, MAX_EC_PROMOTIONS):
|
||||
Tracker.alias(str, 'promo{0}id'.format(promotion_index)) # Promotion ID
|
||||
Tracker.alias(str, 'promo{0}nm'.format(promotion_index)) # Promotion name
|
||||
Tracker.alias(str, 'promo{0}cr'.format(promotion_index)) # Promotion creative
|
||||
Tracker.alias(str, 'promo{0}ps'.format(promotion_index)) # Promotion position
|
||||
|
||||
|
||||
# Shortcut for creating trackers
|
||||
def create(account, *args, **kwargs):
|
||||
return Tracker(account, *args, **kwargs)
|
||||
|
||||
# vim: set nowrap tabstop=4 shiftwidth=4 softtabstop=0 expandtab textwidth=0 filetype=python foldmethod=indent foldcolumn=4
|
||||
@@ -1 +0,0 @@
|
||||
from . import Tracker
|
||||
608
lib/appdirs.py
608
lib/appdirs.py
@@ -1,608 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2005-2010 ActiveState Software Inc.
|
||||
# Copyright (c) 2013 Eddy Petrișor
|
||||
|
||||
"""Utilities for determining application-specific dirs.
|
||||
|
||||
See <http://github.com/ActiveState/appdirs> for details and usage.
|
||||
"""
|
||||
# Dev Notes:
|
||||
# - MSDN on where to store app data files:
|
||||
# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
|
||||
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
|
||||
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
||||
|
||||
__version_info__ = (1, 4, 3)
|
||||
__version__ = '.'.join(map(str, __version_info__))
|
||||
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
unicode = str
|
||||
|
||||
if sys.platform.startswith('java'):
|
||||
import platform
|
||||
os_name = platform.java_ver()[3][0]
|
||||
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
|
||||
system = 'win32'
|
||||
elif os_name.startswith('Mac'): # "Mac OS X", etc.
|
||||
system = 'darwin'
|
||||
else: # "Linux", "SunOS", "FreeBSD", etc.
|
||||
# Setting this to "linux2" is not ideal, but only Windows or Mac
|
||||
# are actually checked for and the rest of the module expects
|
||||
# *sys.platform* style strings.
|
||||
system = 'linux2'
|
||||
else:
|
||||
system = sys.platform
|
||||
|
||||
|
||||
|
||||
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||
r"""Return full path to the user-specific data dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"roaming" (boolean, default False) can be set True to use the Windows
|
||||
roaming appdata directory. That means that for users on a Windows
|
||||
network setup for roaming profiles, this user data will be
|
||||
sync'd on login. See
|
||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user data directories are:
|
||||
Mac OS X: ~/Library/Application Support/<AppName>
|
||||
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
|
||||
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
|
||||
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
|
||||
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
|
||||
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
|
||||
|
||||
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
|
||||
That means, by default "~/.local/share/<AppName>".
|
||||
"""
|
||||
if system == "win32":
|
||||
if appauthor is None:
|
||||
appauthor = appname
|
||||
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
|
||||
path = os.path.normpath(_get_win_folder(const))
|
||||
if appname:
|
||||
if appauthor is not False:
|
||||
path = os.path.join(path, appauthor, appname)
|
||||
else:
|
||||
path = os.path.join(path, appname)
|
||||
elif system == 'darwin':
|
||||
path = os.path.expanduser('~/Library/Application Support/')
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
else:
|
||||
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||
r"""Return full path to the user-shared data dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"multipath" is an optional parameter only applicable to *nix
|
||||
which indicates that the entire list of data dirs should be
|
||||
returned. By default, the first item from XDG_DATA_DIRS is
|
||||
returned, or '/usr/local/share/<AppName>',
|
||||
if XDG_DATA_DIRS is not set
|
||||
|
||||
Typical site data directories are:
|
||||
Mac OS X: /Library/Application Support/<AppName>
|
||||
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
|
||||
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
|
||||
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
||||
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
|
||||
|
||||
For Unix, this is using the $XDG_DATA_DIRS[0] default.
|
||||
|
||||
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
||||
"""
|
||||
if system == "win32":
|
||||
if appauthor is None:
|
||||
appauthor = appname
|
||||
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
|
||||
if appname:
|
||||
if appauthor is not False:
|
||||
path = os.path.join(path, appauthor, appname)
|
||||
else:
|
||||
path = os.path.join(path, appname)
|
||||
elif system == 'darwin':
|
||||
path = os.path.expanduser('/Library/Application Support')
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
else:
|
||||
# XDG default for $XDG_DATA_DIRS
|
||||
# only first, if multipath is False
|
||||
path = os.getenv('XDG_DATA_DIRS',
|
||||
os.pathsep.join(['/usr/local/share', '/usr/share']))
|
||||
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||
if appname:
|
||||
if version:
|
||||
appname = os.path.join(appname, version)
|
||||
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
||||
|
||||
if multipath:
|
||||
path = os.pathsep.join(pathlist)
|
||||
else:
|
||||
path = pathlist[0]
|
||||
return path
|
||||
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||
r"""Return full path to the user-specific config dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"roaming" (boolean, default False) can be set True to use the Windows
|
||||
roaming appdata directory. That means that for users on a Windows
|
||||
network setup for roaming profiles, this user data will be
|
||||
sync'd on login. See
|
||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user config directories are:
|
||||
Mac OS X: same as user_data_dir
|
||||
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
|
||||
Win *: same as user_data_dir
|
||||
|
||||
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
|
||||
That means, by default "~/.config/<AppName>".
|
||||
"""
|
||||
if system in ["win32", "darwin"]:
|
||||
path = user_data_dir(appname, appauthor, None, roaming)
|
||||
else:
|
||||
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||
r"""Return full path to the user-shared data dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"multipath" is an optional parameter only applicable to *nix
|
||||
which indicates that the entire list of config dirs should be
|
||||
returned. By default, the first item from XDG_CONFIG_DIRS is
|
||||
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
|
||||
|
||||
Typical site config directories are:
|
||||
Mac OS X: same as site_data_dir
|
||||
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
|
||||
$XDG_CONFIG_DIRS
|
||||
Win *: same as site_data_dir
|
||||
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
||||
|
||||
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
|
||||
|
||||
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
||||
"""
|
||||
if system in ["win32", "darwin"]:
|
||||
path = site_data_dir(appname, appauthor)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
else:
|
||||
# XDG default for $XDG_CONFIG_DIRS
|
||||
# only first, if multipath is False
|
||||
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
|
||||
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||
if appname:
|
||||
if version:
|
||||
appname = os.path.join(appname, version)
|
||||
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
||||
|
||||
if multipath:
|
||||
path = os.pathsep.join(pathlist)
|
||||
else:
|
||||
path = pathlist[0]
|
||||
return path
|
||||
|
||||
|
||||
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||
r"""Return full path to the user-specific cache dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"opinion" (boolean) can be False to disable the appending of
|
||||
"Cache" to the base app data dir for Windows. See
|
||||
discussion below.
|
||||
|
||||
Typical user cache directories are:
|
||||
Mac OS X: ~/Library/Caches/<AppName>
|
||||
Unix: ~/.cache/<AppName> (XDG default)
|
||||
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
|
||||
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
|
||||
|
||||
On Windows the only suggestion in the MSDN docs is that local settings go in
|
||||
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
|
||||
app data dir (the default returned by `user_data_dir` above). Apps typically
|
||||
put cache data somewhere *under* the given dir here. Some examples:
|
||||
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
|
||||
...\Acme\SuperApp\Cache\1.0
|
||||
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
|
||||
This can be disabled with the `opinion=False` option.
|
||||
"""
|
||||
if system == "win32":
|
||||
if appauthor is None:
|
||||
appauthor = appname
|
||||
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
|
||||
if appname:
|
||||
if appauthor is not False:
|
||||
path = os.path.join(path, appauthor, appname)
|
||||
else:
|
||||
path = os.path.join(path, appname)
|
||||
if opinion:
|
||||
path = os.path.join(path, "Cache")
|
||||
elif system == 'darwin':
|
||||
path = os.path.expanduser('~/Library/Caches')
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
else:
|
||||
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||
r"""Return full path to the user-specific state dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"roaming" (boolean, default False) can be set True to use the Windows
|
||||
roaming appdata directory. That means that for users on a Windows
|
||||
network setup for roaming profiles, this user data will be
|
||||
sync'd on login. See
|
||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user state directories are:
|
||||
Mac OS X: same as user_data_dir
|
||||
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
|
||||
Win *: same as user_data_dir
|
||||
|
||||
For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
|
||||
to extend the XDG spec and support $XDG_STATE_HOME.
|
||||
|
||||
That means, by default "~/.local/state/<AppName>".
|
||||
"""
|
||||
if system in ["win32", "darwin"]:
|
||||
path = user_data_dir(appname, appauthor, None, roaming)
|
||||
else:
|
||||
path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||
r"""Return full path to the user-specific log dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"opinion" (boolean) can be False to disable the appending of
|
||||
"Logs" to the base app data dir for Windows, and "log" to the
|
||||
base cache dir for Unix. See discussion below.
|
||||
|
||||
Typical user log directories are:
|
||||
Mac OS X: ~/Library/Logs/<AppName>
|
||||
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
|
||||
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
|
||||
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
|
||||
|
||||
On Windows the only suggestion in the MSDN docs is that local settings
|
||||
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
|
||||
examples of what some windows apps use for a logs dir.)
|
||||
|
||||
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
|
||||
value for Windows and appends "log" to the user cache dir for Unix.
|
||||
This can be disabled with the `opinion=False` option.
|
||||
"""
|
||||
if system == "darwin":
|
||||
path = os.path.join(
|
||||
os.path.expanduser('~/Library/Logs'),
|
||||
appname)
|
||||
elif system == "win32":
|
||||
path = user_data_dir(appname, appauthor, version)
|
||||
version = False
|
||||
if opinion:
|
||||
path = os.path.join(path, "Logs")
|
||||
else:
|
||||
path = user_cache_dir(appname, appauthor, version)
|
||||
version = False
|
||||
if opinion:
|
||||
path = os.path.join(path, "log")
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
class AppDirs(object):
|
||||
"""Convenience wrapper for getting application dirs."""
|
||||
def __init__(self, appname=None, appauthor=None, version=None,
|
||||
roaming=False, multipath=False):
|
||||
self.appname = appname
|
||||
self.appauthor = appauthor
|
||||
self.version = version
|
||||
self.roaming = roaming
|
||||
self.multipath = multipath
|
||||
|
||||
@property
|
||||
def user_data_dir(self):
|
||||
return user_data_dir(self.appname, self.appauthor,
|
||||
version=self.version, roaming=self.roaming)
|
||||
|
||||
@property
|
||||
def site_data_dir(self):
|
||||
return site_data_dir(self.appname, self.appauthor,
|
||||
version=self.version, multipath=self.multipath)
|
||||
|
||||
@property
|
||||
def user_config_dir(self):
|
||||
return user_config_dir(self.appname, self.appauthor,
|
||||
version=self.version, roaming=self.roaming)
|
||||
|
||||
@property
|
||||
def site_config_dir(self):
|
||||
return site_config_dir(self.appname, self.appauthor,
|
||||
version=self.version, multipath=self.multipath)
|
||||
|
||||
@property
|
||||
def user_cache_dir(self):
|
||||
return user_cache_dir(self.appname, self.appauthor,
|
||||
version=self.version)
|
||||
|
||||
@property
|
||||
def user_state_dir(self):
|
||||
return user_state_dir(self.appname, self.appauthor,
|
||||
version=self.version)
|
||||
|
||||
@property
|
||||
def user_log_dir(self):
|
||||
return user_log_dir(self.appname, self.appauthor,
|
||||
version=self.version)
|
||||
|
||||
|
||||
#---- internal support stuff
|
||||
|
||||
def _get_win_folder_from_registry(csidl_name):
|
||||
"""This is a fallback technique at best. I'm not sure if using the
|
||||
registry for this guarantees us the correct answer for all CSIDL_*
|
||||
names.
|
||||
"""
|
||||
if PY3:
|
||||
import winreg as _winreg
|
||||
else:
|
||||
import _winreg
|
||||
|
||||
shell_folder_name = {
|
||||
"CSIDL_APPDATA": "AppData",
|
||||
"CSIDL_COMMON_APPDATA": "Common AppData",
|
||||
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
||||
}[csidl_name]
|
||||
|
||||
key = _winreg.OpenKey(
|
||||
_winreg.HKEY_CURRENT_USER,
|
||||
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
|
||||
)
|
||||
dir, type = _winreg.QueryValueEx(key, shell_folder_name)
|
||||
return dir
|
||||
|
||||
|
||||
def _get_win_folder_with_pywin32(csidl_name):
|
||||
from win32com.shell import shellcon, shell
|
||||
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
|
||||
# Try to make this a unicode path because SHGetFolderPath does
|
||||
# not return unicode strings when there is unicode data in the
|
||||
# path.
|
||||
try:
|
||||
dir = unicode(dir)
|
||||
|
||||
# Downgrade to short path name if have highbit chars. See
|
||||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||
has_high_char = False
|
||||
for c in dir:
|
||||
if ord(c) > 255:
|
||||
has_high_char = True
|
||||
break
|
||||
if has_high_char:
|
||||
try:
|
||||
import win32api
|
||||
dir = win32api.GetShortPathName(dir)
|
||||
except ImportError:
|
||||
pass
|
||||
except UnicodeError:
|
||||
pass
|
||||
return dir
|
||||
|
||||
|
||||
def _get_win_folder_with_ctypes(csidl_name):
|
||||
import ctypes
|
||||
|
||||
csidl_const = {
|
||||
"CSIDL_APPDATA": 26,
|
||||
"CSIDL_COMMON_APPDATA": 35,
|
||||
"CSIDL_LOCAL_APPDATA": 28,
|
||||
}[csidl_name]
|
||||
|
||||
buf = ctypes.create_unicode_buffer(1024)
|
||||
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
||||
|
||||
# Downgrade to short path name if have highbit chars. See
|
||||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||
has_high_char = False
|
||||
for c in buf:
|
||||
if ord(c) > 255:
|
||||
has_high_char = True
|
||||
break
|
||||
if has_high_char:
|
||||
buf2 = ctypes.create_unicode_buffer(1024)
|
||||
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
||||
buf = buf2
|
||||
|
||||
return buf.value
|
||||
|
||||
def _get_win_folder_with_jna(csidl_name):
|
||||
import array
|
||||
from com.sun import jna
|
||||
from com.sun.jna.platform import win32
|
||||
|
||||
buf_size = win32.WinDef.MAX_PATH * 2
|
||||
buf = array.zeros('c', buf_size)
|
||||
shell = win32.Shell32.INSTANCE
|
||||
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
|
||||
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||
|
||||
# Downgrade to short path name if have highbit chars. See
|
||||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||
has_high_char = False
|
||||
for c in dir:
|
||||
if ord(c) > 255:
|
||||
has_high_char = True
|
||||
break
|
||||
if has_high_char:
|
||||
buf = array.zeros('c', buf_size)
|
||||
kernel = win32.Kernel32.INSTANCE
|
||||
if kernel.GetShortPathName(dir, buf, buf_size):
|
||||
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||
|
||||
return dir
|
||||
|
||||
if system == "win32":
|
||||
try:
|
||||
import win32com.shell
|
||||
_get_win_folder = _get_win_folder_with_pywin32
|
||||
except ImportError:
|
||||
try:
|
||||
from ctypes import windll
|
||||
_get_win_folder = _get_win_folder_with_ctypes
|
||||
except ImportError:
|
||||
try:
|
||||
import com.sun.jna
|
||||
_get_win_folder = _get_win_folder_with_jna
|
||||
except ImportError:
|
||||
_get_win_folder = _get_win_folder_from_registry
|
||||
|
||||
|
||||
#---- self test code
|
||||
|
||||
if __name__ == "__main__":
|
||||
appname = "MyApp"
|
||||
appauthor = "MyCompany"
|
||||
|
||||
props = ("user_data_dir",
|
||||
"user_config_dir",
|
||||
"user_cache_dir",
|
||||
"user_state_dir",
|
||||
"user_log_dir",
|
||||
"site_data_dir",
|
||||
"site_config_dir")
|
||||
|
||||
print("-- app dirs %s --" % __version__)
|
||||
|
||||
print("-- app dirs (with optional 'version')")
|
||||
dirs = AppDirs(appname, appauthor, version="1.0")
|
||||
for prop in props:
|
||||
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||
|
||||
print("\n-- app dirs (without optional 'version')")
|
||||
dirs = AppDirs(appname, appauthor)
|
||||
for prop in props:
|
||||
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||
|
||||
print("\n-- app dirs (without optional 'appauthor')")
|
||||
dirs = AppDirs(appname)
|
||||
for prop in props:
|
||||
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||
|
||||
print("\n-- app dirs (with disabled 'appauthor')")
|
||||
dirs = AppDirs(appname, appauthor=False)
|
||||
for prop in props:
|
||||
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||
@@ -1,10 +0,0 @@
|
||||
from pkg_resources import get_distribution, DistributionNotFound
|
||||
|
||||
try:
|
||||
release = get_distribution('APScheduler').version.split('-')[0]
|
||||
except DistributionNotFound:
|
||||
release = '3.5.0'
|
||||
|
||||
version_info = tuple(int(x) if x.isdigit() else x for x in release.split('.'))
|
||||
version = __version__ = '.'.join(str(x) for x in version_info[:3])
|
||||
del get_distribution, DistributionNotFound
|
||||
@@ -1,94 +0,0 @@
|
||||
__all__ = ('EVENT_SCHEDULER_STARTED', 'EVENT_SCHEDULER_SHUTDOWN', 'EVENT_SCHEDULER_PAUSED',
|
||||
'EVENT_SCHEDULER_RESUMED', 'EVENT_EXECUTOR_ADDED', 'EVENT_EXECUTOR_REMOVED',
|
||||
'EVENT_JOBSTORE_ADDED', 'EVENT_JOBSTORE_REMOVED', 'EVENT_ALL_JOBS_REMOVED',
|
||||
'EVENT_JOB_ADDED', 'EVENT_JOB_REMOVED', 'EVENT_JOB_MODIFIED', 'EVENT_JOB_EXECUTED',
|
||||
'EVENT_JOB_ERROR', 'EVENT_JOB_MISSED', 'EVENT_JOB_SUBMITTED', 'EVENT_JOB_MAX_INSTANCES',
|
||||
'SchedulerEvent', 'JobEvent', 'JobExecutionEvent', 'JobSubmissionEvent')
|
||||
|
||||
|
||||
EVENT_SCHEDULER_STARTED = EVENT_SCHEDULER_START = 2 ** 0
|
||||
EVENT_SCHEDULER_SHUTDOWN = 2 ** 1
|
||||
EVENT_SCHEDULER_PAUSED = 2 ** 2
|
||||
EVENT_SCHEDULER_RESUMED = 2 ** 3
|
||||
EVENT_EXECUTOR_ADDED = 2 ** 4
|
||||
EVENT_EXECUTOR_REMOVED = 2 ** 5
|
||||
EVENT_JOBSTORE_ADDED = 2 ** 6
|
||||
EVENT_JOBSTORE_REMOVED = 2 ** 7
|
||||
EVENT_ALL_JOBS_REMOVED = 2 ** 8
|
||||
EVENT_JOB_ADDED = 2 ** 9
|
||||
EVENT_JOB_REMOVED = 2 ** 10
|
||||
EVENT_JOB_MODIFIED = 2 ** 11
|
||||
EVENT_JOB_EXECUTED = 2 ** 12
|
||||
EVENT_JOB_ERROR = 2 ** 13
|
||||
EVENT_JOB_MISSED = 2 ** 14
|
||||
EVENT_JOB_SUBMITTED = 2 ** 15
|
||||
EVENT_JOB_MAX_INSTANCES = 2 ** 16
|
||||
EVENT_ALL = (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED |
|
||||
EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED |
|
||||
EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED |
|
||||
EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED |
|
||||
EVENT_JOB_ERROR | EVENT_JOB_MISSED | EVENT_JOB_SUBMITTED | EVENT_JOB_MAX_INSTANCES)
|
||||
|
||||
|
||||
class SchedulerEvent(object):
|
||||
"""
|
||||
An event that concerns the scheduler itself.
|
||||
|
||||
:ivar code: the type code of this event
|
||||
:ivar alias: alias of the job store or executor that was added or removed (if applicable)
|
||||
"""
|
||||
|
||||
def __init__(self, code, alias=None):
|
||||
super(SchedulerEvent, self).__init__()
|
||||
self.code = code
|
||||
self.alias = alias
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s (code=%d)>' % (self.__class__.__name__, self.code)
|
||||
|
||||
|
||||
class JobEvent(SchedulerEvent):
|
||||
"""
|
||||
An event that concerns a job.
|
||||
|
||||
:ivar code: the type code of this event
|
||||
:ivar job_id: identifier of the job in question
|
||||
:ivar jobstore: alias of the job store containing the job in question
|
||||
"""
|
||||
|
||||
def __init__(self, code, job_id, jobstore):
|
||||
super(JobEvent, self).__init__(code)
|
||||
self.code = code
|
||||
self.job_id = job_id
|
||||
self.jobstore = jobstore
|
||||
|
||||
|
||||
class JobSubmissionEvent(JobEvent):
|
||||
"""
|
||||
An event that concerns the submission of a job to its executor.
|
||||
|
||||
:ivar scheduled_run_times: a list of datetimes when the job was intended to run
|
||||
"""
|
||||
|
||||
def __init__(self, code, job_id, jobstore, scheduled_run_times):
|
||||
super(JobSubmissionEvent, self).__init__(code, job_id, jobstore)
|
||||
self.scheduled_run_times = scheduled_run_times
|
||||
|
||||
|
||||
class JobExecutionEvent(JobEvent):
|
||||
"""
|
||||
An event that concerns the running of a job within its executor.
|
||||
|
||||
:ivar scheduled_run_time: the time when the job was scheduled to be run
|
||||
:ivar retval: the return value of the successfully executed job
|
||||
:ivar exception: the exception raised by the job
|
||||
:ivar traceback: a formatted traceback for the exception
|
||||
"""
|
||||
|
||||
def __init__(self, code, job_id, jobstore, scheduled_run_time, retval=None, exception=None,
|
||||
traceback=None):
|
||||
super(JobExecutionEvent, self).__init__(code, job_id, jobstore)
|
||||
self.scheduled_run_time = scheduled_run_time
|
||||
self.retval = retval
|
||||
self.exception = exception
|
||||
self.traceback = traceback
|
||||
@@ -1,59 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
|
||||
from apscheduler.executors.base import BaseExecutor, run_job
|
||||
from apscheduler.util import iscoroutinefunction_partial
|
||||
|
||||
try:
|
||||
from apscheduler.executors.base_py3 import run_coroutine_job
|
||||
except ImportError:
|
||||
run_coroutine_job = None
|
||||
|
||||
|
||||
class AsyncIOExecutor(BaseExecutor):
|
||||
"""
|
||||
Runs jobs in the default executor of the event loop.
|
||||
|
||||
If the job function is a native coroutine function, it is scheduled to be run directly in the
|
||||
event loop as soon as possible. All other functions are run in the event loop's default
|
||||
executor which is usually a thread pool.
|
||||
|
||||
Plugin alias: ``asyncio``
|
||||
"""
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
super(AsyncIOExecutor, self).start(scheduler, alias)
|
||||
self._eventloop = scheduler._eventloop
|
||||
self._pending_futures = set()
|
||||
|
||||
def shutdown(self, wait=True):
|
||||
# There is no way to honor wait=True without converting this method into a coroutine method
|
||||
for f in self._pending_futures:
|
||||
if not f.done():
|
||||
f.cancel()
|
||||
|
||||
self._pending_futures.clear()
|
||||
|
||||
def _do_submit_job(self, job, run_times):
|
||||
def callback(f):
|
||||
self._pending_futures.discard(f)
|
||||
try:
|
||||
events = f.result()
|
||||
except BaseException:
|
||||
self._run_job_error(job.id, *sys.exc_info()[1:])
|
||||
else:
|
||||
self._run_job_success(job.id, events)
|
||||
|
||||
if iscoroutinefunction_partial(job.func):
|
||||
if run_coroutine_job is not None:
|
||||
coro = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name)
|
||||
f = self._eventloop.create_task(coro)
|
||||
else:
|
||||
raise Exception('Executing coroutine based jobs is not supported with Trollius')
|
||||
else:
|
||||
f = self._eventloop.run_in_executor(None, run_job, job, job._jobstore_alias, run_times,
|
||||
self._logger.name)
|
||||
|
||||
f.add_done_callback(callback)
|
||||
self._pending_futures.add(f)
|
||||
@@ -1,146 +0,0 @@
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta
|
||||
from traceback import format_tb
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from pytz import utc
|
||||
import six
|
||||
|
||||
from apscheduler.events import (
|
||||
JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED)
|
||||
|
||||
|
||||
class MaxInstancesReachedError(Exception):
|
||||
def __init__(self, job):
|
||||
super(MaxInstancesReachedError, self).__init__(
|
||||
'Job "%s" has already reached its maximum number of instances (%d)' %
|
||||
(job.id, job.max_instances))
|
||||
|
||||
|
||||
class BaseExecutor(six.with_metaclass(ABCMeta, object)):
|
||||
"""Abstract base class that defines the interface that every executor must implement."""
|
||||
|
||||
_scheduler = None
|
||||
_lock = None
|
||||
_logger = logging.getLogger('apscheduler.executors')
|
||||
|
||||
def __init__(self):
|
||||
super(BaseExecutor, self).__init__()
|
||||
self._instances = defaultdict(lambda: 0)
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
"""
|
||||
Called by the scheduler when the scheduler is being started or when the executor is being
|
||||
added to an already running scheduler.
|
||||
|
||||
:param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting
|
||||
this executor
|
||||
:param str|unicode alias: alias of this executor as it was assigned to the scheduler
|
||||
|
||||
"""
|
||||
self._scheduler = scheduler
|
||||
self._lock = scheduler._create_lock()
|
||||
self._logger = logging.getLogger('apscheduler.executors.%s' % alias)
|
||||
|
||||
def shutdown(self, wait=True):
|
||||
"""
|
||||
Shuts down this executor.
|
||||
|
||||
:param bool wait: ``True`` to wait until all submitted jobs
|
||||
have been executed
|
||||
"""
|
||||
|
||||
def submit_job(self, job, run_times):
|
||||
"""
|
||||
Submits job for execution.
|
||||
|
||||
:param Job job: job to execute
|
||||
:param list[datetime] run_times: list of datetimes specifying
|
||||
when the job should have been run
|
||||
:raises MaxInstancesReachedError: if the maximum number of
|
||||
allowed instances for this job has been reached
|
||||
|
||||
"""
|
||||
assert self._lock is not None, 'This executor has not been started yet'
|
||||
with self._lock:
|
||||
if self._instances[job.id] >= job.max_instances:
|
||||
raise MaxInstancesReachedError(job)
|
||||
|
||||
self._do_submit_job(job, run_times)
|
||||
self._instances[job.id] += 1
|
||||
|
||||
@abstractmethod
|
||||
def _do_submit_job(self, job, run_times):
|
||||
"""Performs the actual task of scheduling `run_job` to be called."""
|
||||
|
||||
def _run_job_success(self, job_id, events):
|
||||
"""
|
||||
Called by the executor with the list of generated events when :func:`run_job` has been
|
||||
successfully called.
|
||||
|
||||
"""
|
||||
with self._lock:
|
||||
self._instances[job_id] -= 1
|
||||
if self._instances[job_id] == 0:
|
||||
del self._instances[job_id]
|
||||
|
||||
for event in events:
|
||||
self._scheduler._dispatch_event(event)
|
||||
|
||||
def _run_job_error(self, job_id, exc, traceback=None):
|
||||
"""Called by the executor with the exception if there is an error calling `run_job`."""
|
||||
with self._lock:
|
||||
self._instances[job_id] -= 1
|
||||
if self._instances[job_id] == 0:
|
||||
del self._instances[job_id]
|
||||
|
||||
exc_info = (exc.__class__, exc, traceback)
|
||||
self._logger.error('Error running job %s', job_id, exc_info=exc_info)
|
||||
|
||||
|
||||
def run_job(job, jobstore_alias, run_times, logger_name):
|
||||
"""
|
||||
Called by executors to run the job. Returns a list of scheduler events to be dispatched by the
|
||||
scheduler.
|
||||
|
||||
"""
|
||||
events = []
|
||||
logger = logging.getLogger(logger_name)
|
||||
for run_time in run_times:
|
||||
# See if the job missed its run time window, and handle
|
||||
# possible misfires accordingly
|
||||
if job.misfire_grace_time is not None:
|
||||
difference = datetime.now(utc) - run_time
|
||||
grace_time = timedelta(seconds=job.misfire_grace_time)
|
||||
if difference > grace_time:
|
||||
events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias,
|
||||
run_time))
|
||||
logger.warning('Run time of job "%s" was missed by %s', job, difference)
|
||||
continue
|
||||
|
||||
logger.info('Running job "%s" (scheduled at %s)', job, run_time)
|
||||
try:
|
||||
retval = job.func(*job.args, **job.kwargs)
|
||||
except BaseException:
|
||||
exc, tb = sys.exc_info()[1:]
|
||||
formatted_tb = ''.join(format_tb(tb))
|
||||
events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time,
|
||||
exception=exc, traceback=formatted_tb))
|
||||
logger.exception('Job "%s" raised an exception', job)
|
||||
|
||||
# This is to prevent cyclic references that would lead to memory leaks
|
||||
if six.PY2:
|
||||
sys.exc_clear()
|
||||
del tb
|
||||
else:
|
||||
import traceback
|
||||
traceback.clear_frames(tb)
|
||||
del tb
|
||||
else:
|
||||
events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time,
|
||||
retval=retval))
|
||||
logger.info('Job "%s" executed successfully', job)
|
||||
|
||||
return events
|
||||
@@ -1,41 +0,0 @@
|
||||
import logging
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
from traceback import format_tb
|
||||
|
||||
from pytz import utc
|
||||
|
||||
from apscheduler.events import (
|
||||
JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED)
|
||||
|
||||
|
||||
async def run_coroutine_job(job, jobstore_alias, run_times, logger_name):
|
||||
"""Coroutine version of run_job()."""
|
||||
events = []
|
||||
logger = logging.getLogger(logger_name)
|
||||
for run_time in run_times:
|
||||
# See if the job missed its run time window, and handle possible misfires accordingly
|
||||
if job.misfire_grace_time is not None:
|
||||
difference = datetime.now(utc) - run_time
|
||||
grace_time = timedelta(seconds=job.misfire_grace_time)
|
||||
if difference > grace_time:
|
||||
events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias,
|
||||
run_time))
|
||||
logger.warning('Run time of job "%s" was missed by %s', job, difference)
|
||||
continue
|
||||
|
||||
logger.info('Running job "%s" (scheduled at %s)', job, run_time)
|
||||
try:
|
||||
retval = await job.func(*job.args, **job.kwargs)
|
||||
except BaseException:
|
||||
exc, tb = sys.exc_info()[1:]
|
||||
formatted_tb = ''.join(format_tb(tb))
|
||||
events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time,
|
||||
exception=exc, traceback=formatted_tb))
|
||||
logger.exception('Job "%s" raised an exception', job)
|
||||
else:
|
||||
events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time,
|
||||
retval=retval))
|
||||
logger.info('Job "%s" executed successfully', job)
|
||||
|
||||
return events
|
||||
@@ -1,20 +0,0 @@
|
||||
import sys
|
||||
|
||||
from apscheduler.executors.base import BaseExecutor, run_job
|
||||
|
||||
|
||||
class DebugExecutor(BaseExecutor):
|
||||
"""
|
||||
A special executor that executes the target callable directly instead of deferring it to a
|
||||
thread or process.
|
||||
|
||||
Plugin alias: ``debug``
|
||||
"""
|
||||
|
||||
def _do_submit_job(self, job, run_times):
|
||||
try:
|
||||
events = run_job(job, job._jobstore_alias, run_times, self._logger.name)
|
||||
except BaseException:
|
||||
self._run_job_error(job.id, *sys.exc_info()[1:])
|
||||
else:
|
||||
self._run_job_success(job.id, events)
|
||||
@@ -1,30 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
import sys
|
||||
|
||||
from apscheduler.executors.base import BaseExecutor, run_job
|
||||
|
||||
|
||||
try:
|
||||
import gevent
|
||||
except ImportError: # pragma: nocover
|
||||
raise ImportError('GeventExecutor requires gevent installed')
|
||||
|
||||
|
||||
class GeventExecutor(BaseExecutor):
|
||||
"""
|
||||
Runs jobs as greenlets.
|
||||
|
||||
Plugin alias: ``gevent``
|
||||
"""
|
||||
|
||||
def _do_submit_job(self, job, run_times):
|
||||
def callback(greenlet):
|
||||
try:
|
||||
events = greenlet.get()
|
||||
except BaseException:
|
||||
self._run_job_error(job.id, *sys.exc_info()[1:])
|
||||
else:
|
||||
self._run_job_success(job.id, events)
|
||||
|
||||
gevent.spawn(run_job, job, job._jobstore_alias, run_times, self._logger.name).\
|
||||
link(callback)
|
||||
@@ -1,54 +0,0 @@
|
||||
from abc import abstractmethod
|
||||
import concurrent.futures
|
||||
|
||||
from apscheduler.executors.base import BaseExecutor, run_job
|
||||
|
||||
|
||||
class BasePoolExecutor(BaseExecutor):
|
||||
@abstractmethod
|
||||
def __init__(self, pool):
|
||||
super(BasePoolExecutor, self).__init__()
|
||||
self._pool = pool
|
||||
|
||||
def _do_submit_job(self, job, run_times):
|
||||
def callback(f):
|
||||
exc, tb = (f.exception_info() if hasattr(f, 'exception_info') else
|
||||
(f.exception(), getattr(f.exception(), '__traceback__', None)))
|
||||
if exc:
|
||||
self._run_job_error(job.id, exc, tb)
|
||||
else:
|
||||
self._run_job_success(job.id, f.result())
|
||||
|
||||
f = self._pool.submit(run_job, job, job._jobstore_alias, run_times, self._logger.name)
|
||||
f.add_done_callback(callback)
|
||||
|
||||
def shutdown(self, wait=True):
|
||||
self._pool.shutdown(wait)
|
||||
|
||||
|
||||
class ThreadPoolExecutor(BasePoolExecutor):
|
||||
"""
|
||||
An executor that runs jobs in a concurrent.futures thread pool.
|
||||
|
||||
Plugin alias: ``threadpool``
|
||||
|
||||
:param max_workers: the maximum number of spawned threads.
|
||||
"""
|
||||
|
||||
def __init__(self, max_workers=10):
|
||||
pool = concurrent.futures.ThreadPoolExecutor(int(max_workers))
|
||||
super(ThreadPoolExecutor, self).__init__(pool)
|
||||
|
||||
|
||||
class ProcessPoolExecutor(BasePoolExecutor):
|
||||
"""
|
||||
An executor that runs jobs in a concurrent.futures process pool.
|
||||
|
||||
Plugin alias: ``processpool``
|
||||
|
||||
:param max_workers: the maximum number of spawned processes.
|
||||
"""
|
||||
|
||||
def __init__(self, max_workers=10):
|
||||
pool = concurrent.futures.ProcessPoolExecutor(int(max_workers))
|
||||
super(ProcessPoolExecutor, self).__init__(pool)
|
||||
@@ -1,54 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from tornado.gen import convert_yielded
|
||||
|
||||
from apscheduler.executors.base import BaseExecutor, run_job
|
||||
|
||||
try:
|
||||
from apscheduler.executors.base_py3 import run_coroutine_job
|
||||
from apscheduler.util import iscoroutinefunction_partial
|
||||
except ImportError:
|
||||
def iscoroutinefunction_partial(func):
|
||||
return False
|
||||
|
||||
|
||||
class TornadoExecutor(BaseExecutor):
|
||||
"""
|
||||
Runs jobs either in a thread pool or directly on the I/O loop.
|
||||
|
||||
If the job function is a native coroutine function, it is scheduled to be run directly in the
|
||||
I/O loop as soon as possible. All other functions are run in a thread pool.
|
||||
|
||||
Plugin alias: ``tornado``
|
||||
|
||||
:param int max_workers: maximum number of worker threads in the thread pool
|
||||
"""
|
||||
|
||||
def __init__(self, max_workers=10):
|
||||
super(TornadoExecutor, self).__init__()
|
||||
self.executor = ThreadPoolExecutor(max_workers)
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
super(TornadoExecutor, self).start(scheduler, alias)
|
||||
self._ioloop = scheduler._ioloop
|
||||
|
||||
def _do_submit_job(self, job, run_times):
|
||||
def callback(f):
|
||||
try:
|
||||
events = f.result()
|
||||
except BaseException:
|
||||
self._run_job_error(job.id, *sys.exc_info()[1:])
|
||||
else:
|
||||
self._run_job_success(job.id, events)
|
||||
|
||||
if iscoroutinefunction_partial(job.func):
|
||||
f = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name)
|
||||
else:
|
||||
f = self.executor.submit(run_job, job, job._jobstore_alias, run_times,
|
||||
self._logger.name)
|
||||
|
||||
f = convert_yielded(f)
|
||||
f.add_done_callback(callback)
|
||||
@@ -1,25 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from apscheduler.executors.base import BaseExecutor, run_job
|
||||
|
||||
|
||||
class TwistedExecutor(BaseExecutor):
|
||||
"""
|
||||
Runs jobs in the reactor's thread pool.
|
||||
|
||||
Plugin alias: ``twisted``
|
||||
"""
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
super(TwistedExecutor, self).start(scheduler, alias)
|
||||
self._reactor = scheduler._reactor
|
||||
|
||||
def _do_submit_job(self, job, run_times):
|
||||
def callback(success, result):
|
||||
if success:
|
||||
self._run_job_success(job.id, result)
|
||||
else:
|
||||
self._run_job_error(job.id, result.value, result.tb)
|
||||
|
||||
self._reactor.getThreadPool().callInThreadWithCallback(
|
||||
callback, run_job, job, job._jobstore_alias, run_times, self._logger.name)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user