Rename; Removed deps from repo

This commit is contained in:
2021-02-05 16:30:46 +01:00
parent 97f80adf0b
commit b867dc9be2
1680 changed files with 1603 additions and 290357 deletions

View File

@@ -1,11 +0,0 @@
.git
.github
.gitignore
contrib
init-scripts
package
pylintrc
snap
*.md
!CHANGELOG*.md
start.bat

3
.github/FUNDING.yml vendored
View File

@@ -1,3 +0,0 @@
github: JonnyWong16
patreon: Tautulli
custom: ["https://bit.ly/2InPp15", "https://bit.ly/2WTq83m"]

View File

@@ -1,20 +0,0 @@
## Description
Please include a summary of the change and which issue is fixed.
Fixes Tautulli/Tautulli-Issues#(issue)
## Type of change
Please delete options that are not relevant.
- [ ] Bug fix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
## Checklist:
- [ ] My code follows the style guidelines of this project
- [ ] I have performed a self-review of my own code
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] I have added or updated the docstring for new or existing methods

View File

@@ -1,115 +0,0 @@
name: Publish Docker
on:
push:
branches: [master, beta, nightly]
tags: [v*]
pull_request: ~
jobs:
build-docker:
name: Build Docker Image
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Prepare
id: prepare
run: |
if [[ $GITHUB_REF == refs/tags/* ]]; then
echo ::set-output name=tag::${GITHUB_REF#refs/tags/}
elif [[ $GITHUB_REF == refs/heads/master ]]; then
echo ::set-output name=tag::latest
else
echo ::set-output name=tag::${GITHUB_REF#refs/heads/}
fi
if [[ $GITHUB_REF == refs/tags/*-beta ]]; then
echo ::set-output name=branch::beta
elif [[ $GITHUB_REF == refs/tags/* ]]; then
echo ::set-output name=branch::master
else
echo ::set-output name=branch::${GITHUB_REF#refs/heads/}
fi
echo ::set-output name=commit::${GITHUB_SHA}
echo ::set-output name=build_date::$(date -u +'%Y-%m-%dT%H:%M:%SZ')
echo ::set-output name=docker_platforms::linux/amd64,linux/arm64/v8,linux/arm/v7,linux/arm/v6
echo ::set-output name=docker_image::${{ secrets.DOCKER_REPO }}/tautulli
- name: Set Up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
id: buildx
with:
version: latest
- name: Cache Docker Layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to DockerHub
uses: docker/login-action@v1
if: success() && github.event_name != 'pull_request'
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v1
if: success() && github.event_name != 'pull_request'
with:
registry: ghcr.io
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.GHCR_TOKEN }}
- name: Docker Build and Push
uses: docker/build-push-action@v2
if: success()
with:
context: .
file: ./Dockerfile
push: ${{ github.event_name != 'pull_request' }}
platforms: ${{ steps.prepare.outputs.docker_platforms }}
build-args: |
TAG=${{ steps.prepare.outputs.tag }}
BRANCH=${{ steps.prepare.outputs.branch }}
COMMIT=${{ steps.prepare.outputs.commit }}
BUILD_DATE=${{ steps.prepare.outputs.build_date }}
tags: |
${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
ghcr.io/${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
discord:
name: Discord Notification
needs: build-docker
if: always() && github.event_name != 'pull_request'
runs-on: ubuntu-latest
steps:
- name: Get Build Job Status
uses: technote-space/workflow-conclusion-action@v1
- name: Combine Job Status
id: status
run: |
failures=(neutral, skipped, timed_out, action_required)
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
echo ::set-output name=status::failure
else
echo ::set-output name=status::$WORKFLOW_CONCLUSION
fi
- name: Post Status to Discord
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ steps.status.outputs.status }}
title: ${{ github.workflow }}
nofail: true

View File

@@ -1,194 +0,0 @@
name: Publish Installers
on:
push:
branches: [master, beta, nightly]
tags: [v*]
pull_request: ~
jobs:
build-installer:
name: Build ${{ matrix.os_upper }} Installer
runs-on: ${{ matrix.os }}-latest
strategy:
fail-fast: false
matrix:
include:
- os: 'windows'
os_upper: 'Windows'
ext: 'exe'
- os: 'macos'
os_upper: 'MacOS'
ext: 'pkg'
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Set Release Version
id: get_version
shell: bash
run: |
if [[ $GITHUB_REF == refs/tags/* ]]; then
echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV
VERSION_NSIS=${GITHUB_REF#refs/tags/v}.1
echo ::set-output name=VERSION_NSIS::${VERSION_NSIS/%-beta.1/.0}
echo ::set-output name=VERSION::${GITHUB_REF#refs/tags/v}
echo ::set-output name=RELEASE_VERSION::${GITHUB_REF#refs/tags/}
else
echo "VERSION=0.0.0" >> $GITHUB_ENV
echo ::set-output name=VERSION_NSIS::0.0.0.0
echo ::set-output name=VERSION::0.0.0
echo ::set-output name=RELEASE_VERSION::${GITHUB_SHA::7}
fi
if [[ $GITHUB_REF == refs/tags/*-beta ]]; then
echo "beta" > branch.txt
elif [[ $GITHUB_REF == refs/tags/* ]]; then
echo "master" > branch.txt
else
echo ${GITHUB_REF#refs/heads/} > branch.txt
fi
echo $GITHUB_SHA > version.txt
- name: Set Up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Cache Dependencies
uses: actions/cache@v2
with:
path: ~\AppData\Local\pip\Cache
key: ${{ runner.os }}-pip-${{ hashFiles(format('package/requirements-{0}.txt', matrix.os)) }}
restore-keys: ${{ runner.os }}-pip-
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install -r package/requirements-${{ matrix.os }}.txt
- name: Build Package
run: |
pyinstaller -y ./package/Tautulli-${{ matrix.os }}.spec
- name: Move Windows Updater Files
if: matrix.os == 'windows'
run: |
Move-Item dist\updater\* dist\Tautulli\ -Force
- name: Create Windows Installer
uses: joncloud/makensis-action@v3.4
if: matrix.os == 'windows'
with:
script-file: ./package/Tautulli.nsi
arguments: >
/DVERSION=${{ steps.get_version.outputs.VERSION_NSIS }}
/DINSTALLER_NAME=..\Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
additional-plugin-paths: package/nsis-plugins
- name: Create MacOS Installer
if: matrix.os == 'macos'
run: |
sudo pkgbuild \
--install-location /Applications \
--version ${{ steps.get_version.outputs.VERSION }} \
--component ./dist/Tautulli.app \
--scripts ./package/macos-scripts \
Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
- name: Upload Installer
uses: actions/upload-artifact@v2
with:
name: Tautulli-${{ matrix.os }}-installer
path: Tautulli-${{ matrix.os }}-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.${{ matrix.ext }}
release:
name: Release Installers
needs: build-installer
if: always() && startsWith(github.ref, 'refs/tags/') && github.event_name != 'pull_request'
runs-on: ubuntu-latest
steps:
- name: Get Build Job Status
uses: technote-space/workflow-conclusion-action@v1
- name: Checkout Code
uses: actions/checkout@v2
- name: Set Release Version
id: get_version
run: |
echo ::set-output name=RELEASE_VERSION::${GITHUB_REF#refs/tags/}
- name: Download Installers
if: env.WORKFLOW_CONCLUSION == 'success'
uses: actions/download-artifact@v2
- name: Get Changelog
id: get_changelog
run: |
echo ::set-output name=CHANGELOG::"$( sed -n '/^## /{p; :loop n; p; /^## /q; b loop}' CHANGELOG.md \
| sed '$d' | sed '$d' | sed '$d' | sed ':a;N;$!ba;s/\n/%0A/g' )"
- name: Create Release
uses: actions/create-release@v1
id: create_release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ steps.get_version.outputs.RELEASE_VERSION }}
release_name: Tautulli ${{ steps.get_version.outputs.RELEASE_VERSION }}
body: |
## Changelog
##${{ steps.get_changelog.outputs.CHANGELOG }}
draft: false
prerelease: ${{ endsWith(steps.get_version.outputs.RELEASE_VERSION, '-beta') }}
- name: Upload Windows Installer
uses: actions/upload-release-asset@v1
if: env.WORKFLOW_CONCLUSION == 'success'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: Tautulli-windows-installer/Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
asset_name: Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
asset_content_type: application/vnd.microsoft.portable-executable
- name: Upload MacOS Installer
uses: actions/upload-release-asset@v1
if: env.WORKFLOW_CONCLUSION == 'success'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: Tautulli-macos-installer/Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
asset_name: Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
asset_content_type: application/vnd.apple.installer+xml
discord:
name: Discord Notification
needs: [build-installer, release]
if: always() && github.event_name != 'pull_request'
runs-on: ubuntu-latest
steps:
- name: Get Build Job Status
uses: technote-space/workflow-conclusion-action@v1
- name: Combine Job Status
id: status
run: |
failures=(neutral, skipped, timed_out, action_required)
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
echo ::set-output name=status::failure
else
echo ::set-output name=status::$WORKFLOW_CONCLUSION
fi
- name: Post Status to Discord
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ steps.status.outputs.status }}
title: ${{ github.workflow }}
nofail: true

View File

@@ -1,94 +0,0 @@
name: Publish Snap
on:
push:
branches: [master, beta, nightly]
tags: [v*]
pull_request: ~
jobs:
build-snap:
name: Build Snap Package (${{ matrix.architecture }})
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
architecture:
- i386
- amd64
- arm64
- armhf
- ppc64el
#- s390x # broken at the moment
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Prepare
id: prepare
run: |
git fetch --prune --unshallow --tags
if [[ $GITHUB_REF == refs/tags/*-beta || $GITHUB_REF == refs/heads/beta ]]; then
echo ::set-output name=RELEASE::beta
elif [[ $GITHUB_REF == refs/tags/* || $GITHUB_REF == refs/heads/master ]]; then
echo ::set-output name=RELEASE::stable
else
echo ::set-output name=RELEASE::edge
fi
- name: Set Up QEMU
uses: docker/setup-qemu-action@v1
- name: Build Snap Package
uses: diddlesnaps/snapcraft-multiarch-action@v1
id: build
with:
architecture: ${{ matrix.architecture }}
- name: Upload Snap Package
uses: actions/upload-artifact@v2
with:
name: Tautulli-snap-package-${{ matrix.architecture }}
path: ${{ steps.build.outputs.snap }}
- name: Review Snap Package
uses: diddlesnaps/snapcraft-review-tools-action@v1
with:
snap: ${{ steps.build.outputs.snap }}
- name: Publish Snap Package
uses: snapcore/action-publish@v1
if: >
github.event_name != 'pull_request' &&
(startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/nightly')
with:
store_login: ${{ secrets.SNAP_LOGIN }}
snap: ${{ steps.build.outputs.snap }}
release: ${{ steps.prepare.outputs.RELEASE }}
discord:
name: Discord Notification
needs: build-snap
if: always() && github.event_name != 'pull_request'
runs-on: ubuntu-latest
steps:
- name: Get Build Job Status
uses: technote-space/workflow-conclusion-action@v1
- name: Combine Job Status
id: status
run: |
failures=(neutral, skipped, timed_out, action_required)
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
echo ::set-output name=status::failure
else
echo ::set-output name=status::$WORKFLOW_CONCLUSION
fi
- name: Post Status to Discord
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ steps.status.outputs.status }}
title: ${{ github.workflow }}
nofail: true

View File

@@ -1,28 +0,0 @@
name: Pull Requests
on:
pull_request_target:
types: [opened, synchronize, edited, reopened]
jobs:
check-branch:
name: Check Pull Request
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Comment on Pull Request
uses: mshick/add-pr-comment@v1
if: github.base_ref != 'nightly'
with:
message: Pull requests must be made to the `nightly` branch. Thanks.
repo-token: ${{ secrets.GITHUB_TOKEN }}
repo-token-user-login: 'github-actions[bot]'
- name: Fail Workflow
if: github.base_ref != 'nightly'
run: |
echo Base: ${{ github.base_ref }}
echo Head: ${{ github.head_ref }}
exit 1

343
.gitignore vendored
View File

@@ -1,92 +1,275 @@
# Compiled source #
###################
__pycache__
*.pyc
*.py~
*.pyproj
*.sln
# Created by https://www.toptal.com/developers/gitignore/api/pycharm+all,python,linux,windows
# Edit at https://www.toptal.com/developers/gitignore?templates=pycharm+all,python,linux,windows
# PlexPy files #
######################
### Linux ###
*~
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
### PyCharm+all ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
### PyCharm+all Patch ###
# Ignores the whole .idea folder and all .iml files
# See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360
.idea/
# Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023
*.iml
modules.xml
.idea/misc.xml
*.ipr
# Sonarlint plugin
.idea/sonarlint
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
pytestdebug.log
# Translations
*.mo
*.pot
# Django stuff:
*.log
*.db*
*.db-journal
*.ini
release.lock
version.lock
logs/*
backups/*
cache/*
exports/*
newsletters/*
*.mmdb
version.txt
branch.txt
local_settings.py
db.sqlite3
db.sqlite3-journal
# HTTPS Cert/Key #
##################
/*.crt
/*.key
/*.csr
/*.pem
# Flask stuff:
instance/
.webassets-cache
# Mergetool
*.orgin
# Scrapy stuff:
.scrapy
# OS generated files #
######################
.DS_Store?
.DS_Store
ehthumbs.db
Icon?
# Sphinx documentation
docs/_build/
doc/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
pythonenv*
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# profiling data
.prof
### Windows ###
# Windows thumbnail cache files
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
#Ignore files generated by PyCharm
*.idea/*
# Dump file
*.stackdump
#Ignore files generated by vi
*.swp
# Folder config file
[Dd]esktop.ini
#Ignore files build by Visual Studio
*.obj
*.exe
*.pdb
*.user
*.aps
*.pch
*.vspscc
*_i.c
*_p.c
*.ncb
*.suo
*.tlb
*.tlh
*.bak
*.cache
*.ilk
[Bb]in
[Dd]ebug*/
*.lib
*.sbr
obj/
[Rr]elease*/
_ReSharper*/
[Tt]est[Rr]esult*
/cache
/logs
.project
.pydevproject
# Recycle Bin used on file shares
$RECYCLE.BIN/
#Ignore files generated by pyinstaller
/build
/dist
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
#snapcraft specifics
/parts/
/stage/
/prime/
*.snap
.snapcraft
*_source.tar.bz2
snap/.snapcraft
# Windows shortcuts
*.lnk
# End of https://www.toptal.com/developers/gitignore/api/pycharm+all,python,linux,windows

View File

@@ -1,26 +0,0 @@
FROM tautulli/tautulli-baseimage:python3
LABEL maintainer="Tautulli"
ARG BRANCH
ARG COMMIT
ENV TAUTULLI_DOCKER=True
ENV TZ=UTC
WORKDIR /app
RUN \
groupadd -g 1000 tautulli && \
useradd -u 1000 -g 1000 tautulli && \
echo ${BRANCH} > /app/branch.txt && \
echo ${COMMIT} > /app/version.txt
COPY . /app
CMD [ "python", "Tautulli.py", "--datadir", "/config" ]
ENTRYPOINT [ "./start.sh" ]
VOLUME /config
EXPOSE 8181
HEALTHCHECK --start-period=90s CMD curl -ILfSs http://localhost:8181/status > /dev/null || curl -ILfkSs https://localhost:8181/status > /dev/null || exit 1

View File

@@ -20,11 +20,6 @@
import os
import sys
# Ensure lib added to path, before any other imports
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'lib'))
from future.builtins import str
import appdirs
import argparse
import datetime
@@ -36,16 +31,16 @@ import time
import threading
import tzlocal
import plexpy
from plexpy import common, config, database, helpers, logger, webstart
import jellypy
from jellypy import common, config, database, helpers, logger, webstart
if common.PLATFORM == 'Windows':
from plexpy import windows
from jellypy import windows
elif common.PLATFORM == 'Darwin':
from plexpy import macos
from jellypy import macos
# Register signals, such as CTRL + C
signal.signal(signal.SIGINT, plexpy.sig_handler)
signal.signal(signal.SIGTERM, plexpy.sig_handler)
signal.signal(signal.SIGINT, jellypy.sig_handler)
signal.signal(signal.SIGTERM, jellypy.sig_handler)
def main():
@@ -56,28 +51,28 @@ def main():
# Fixed paths to Tautulli
if hasattr(sys, 'frozen') and hasattr(sys, '_MEIPASS'):
plexpy.FROZEN = True
plexpy.FULL_PATH = os.path.abspath(sys.executable)
plexpy.PROG_DIR = sys._MEIPASS
jellypy.FROZEN = True
jellypy.FULL_PATH = os.path.abspath(sys.executable)
jellypy.PROG_DIR = sys._MEIPASS
else:
plexpy.FULL_PATH = os.path.abspath(__file__)
plexpy.PROG_DIR = os.path.dirname(plexpy.FULL_PATH)
jellypy.FULL_PATH = os.path.abspath(__file__)
jellypy.PROG_DIR = os.path.dirname(jellypy.FULL_PATH)
plexpy.ARGS = sys.argv[1:]
jellypy.ARGS = sys.argv[1:]
# From sickbeard
plexpy.SYS_PLATFORM = sys.platform
plexpy.SYS_ENCODING = None
jellypy.SYS_PLATFORM = sys.platform
jellypy.SYS_ENCODING = None
try:
locale.setlocale(locale.LC_ALL, "")
plexpy.SYS_LANGUAGE, plexpy.SYS_ENCODING = locale.getdefaultlocale()
jellypy.SYS_LANGUAGE, jellypy.SYS_ENCODING = locale.getdefaultlocale()
except (locale.Error, IOError):
pass
# for OSes that are poorly configured I'll just force UTF-8
if not plexpy.SYS_ENCODING or plexpy.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
plexpy.SYS_ENCODING = 'UTF-8'
if not jellypy.SYS_ENCODING or jellypy.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
jellypy.SYS_ENCODING = 'UTF-8'
# Set up and gather command line arguments
parser = argparse.ArgumentParser(
@@ -107,50 +102,50 @@ def main():
args = parser.parse_args()
if args.verbose:
plexpy.VERBOSE = True
jellypy.VERBOSE = True
if args.quiet:
plexpy.QUIET = True
jellypy.QUIET = True
# Do an intial setup of the logger.
# Require verbose for pre-initilization to see critical errors
logger.initLogger(console=not plexpy.QUIET, log_dir=False, verbose=True)
logger.initLogger(console=not jellypy.QUIET, log_dir=False, verbose=True)
try:
plexpy.SYS_TIMEZONE = tzlocal.get_localzone()
jellypy.SYS_TIMEZONE = tzlocal.get_localzone()
except (pytz.UnknownTimeZoneError, LookupError, ValueError) as e:
logger.error("Could not determine system timezone: %s" % e)
plexpy.SYS_TIMEZONE = pytz.UTC
jellypy.SYS_TIMEZONE = pytz.UTC
plexpy.SYS_UTC_OFFSET = datetime.datetime.now(plexpy.SYS_TIMEZONE).strftime('%z')
jellypy.SYS_UTC_OFFSET = datetime.datetime.now(jellypy.SYS_TIMEZONE).strftime('%z')
if helpers.bool_true(os.getenv('TAUTULLI_DOCKER', False)):
plexpy.DOCKER = True
jellypy.DOCKER = True
if helpers.bool_true(os.getenv('TAUTULLI_SNAP', False)):
plexpy.SNAP = True
jellypy.SNAP = True
if args.dev:
plexpy.DEV = True
jellypy.DEV = True
logger.debug("Tautulli is running in the dev environment.")
if args.daemon:
if sys.platform == 'win32':
logger.warn("Daemonizing not supported under Windows, starting normally")
else:
plexpy.DAEMON = True
plexpy.QUIET = True
jellypy.DAEMON = True
jellypy.QUIET = True
if args.nofork:
plexpy.NOFORK = True
jellypy.NOFORK = True
logger.info("Tautulli is running as a service, it will not fork when restarted.")
if args.pidfile:
plexpy.PIDFILE = str(args.pidfile)
jellypy.PIDFILE = str(args.pidfile)
# If the pidfile already exists, plexpy may still be running, so
# If the pidfile already exists, jellypy may still be running, so
# exit
if os.path.exists(plexpy.PIDFILE):
if os.path.exists(jellypy.PIDFILE):
try:
with open(plexpy.PIDFILE, 'r') as fp:
with open(jellypy.PIDFILE, 'r') as fp:
pid = int(fp.read())
except IOError as e:
raise SystemExit("Unable to read PID file: %s", e)
@@ -160,20 +155,20 @@ def main():
except OSError:
logger.warn("PID file '%s' already exists, but PID %d is "
"not running. Ignoring PID file." %
(plexpy.PIDFILE, pid))
(jellypy.PIDFILE, pid))
else:
# The pidfile exists and points to a live PID. plexpy may
# The pidfile exists and points to a live PID. jellypy may
# still be running, so exit.
raise SystemExit("PID file '%s' already exists. Exiting." %
plexpy.PIDFILE)
jellypy.PIDFILE)
# The pidfile is only useful in daemon mode, make sure we can write the
# file properly
if plexpy.DAEMON:
plexpy.CREATEPID = True
if jellypy.DAEMON:
jellypy.CREATEPID = True
try:
with open(plexpy.PIDFILE, 'w') as fp:
with open(jellypy.PIDFILE, 'w') as fp:
fp.write("pid\n")
except IOError as e:
raise SystemExit("Unable to write PID file: %s", e)
@@ -183,107 +178,107 @@ def main():
# Determine which data directory and config file to use
if args.datadir:
plexpy.DATA_DIR = args.datadir
elif plexpy.FROZEN:
plexpy.DATA_DIR = appdirs.user_data_dir("Tautulli", False)
jellypy.DATA_DIR = args.datadir
elif jellypy.FROZEN:
jellypy.DATA_DIR = appdirs.user_data_dir("Tautulli", False)
else:
plexpy.DATA_DIR = plexpy.PROG_DIR
jellypy.DATA_DIR = jellypy.PROG_DIR
# Migrate Snap data dir
if plexpy.SNAP:
if jellypy.SNAP:
snap_common = os.environ['SNAP_COMMON']
old_data_dir = os.path.join(snap_common, 'Tautulli')
if os.path.exists(old_data_dir) and os.listdir(old_data_dir):
plexpy.SNAP_MIGRATE = True
jellypy.SNAP_MIGRATE = True
logger.info("Migrating Snap user data.")
shutil.move(old_data_dir, plexpy.DATA_DIR)
shutil.move(old_data_dir, jellypy.DATA_DIR)
if args.config:
config_file = args.config
else:
config_file = os.path.join(plexpy.DATA_DIR, config.FILENAME)
config_file = os.path.join(jellypy.DATA_DIR, config.FILENAME)
# Try to create the DATA_DIR if it doesn't exist
if not os.path.exists(plexpy.DATA_DIR):
if not os.path.exists(jellypy.DATA_DIR):
try:
os.makedirs(plexpy.DATA_DIR)
os.makedirs(jellypy.DATA_DIR)
except OSError:
raise SystemExit(
'Could not create data directory: ' + plexpy.DATA_DIR + '. Exiting....')
'Could not create data directory: ' + jellypy.DATA_DIR + '. Exiting....')
# Make sure the DATA_DIR is writeable
if not os.access(plexpy.DATA_DIR, os.W_OK):
if not os.access(jellypy.DATA_DIR, os.W_OK):
raise SystemExit(
'Cannot write to the data directory: ' + plexpy.DATA_DIR + '. Exiting...')
'Cannot write to the data directory: ' + jellypy.DATA_DIR + '. Exiting...')
# Put the database in the DATA_DIR
plexpy.DB_FILE = os.path.join(plexpy.DATA_DIR, database.FILENAME)
jellypy.DB_FILE = os.path.join(jellypy.DATA_DIR, database.FILENAME)
# Move 'plexpy.db' to 'tautulli.db'
if os.path.isfile(os.path.join(plexpy.DATA_DIR, 'plexpy.db')) and \
not os.path.isfile(os.path.join(plexpy.DATA_DIR, plexpy.DB_FILE)):
# Move 'jellypy.db' to 'tautulli.db'
if os.path.isfile(os.path.join(jellypy.DATA_DIR, 'jellypy.db')) and \
not os.path.isfile(os.path.join(jellypy.DATA_DIR, jellypy.DB_FILE)):
try:
os.rename(os.path.join(plexpy.DATA_DIR, 'plexpy.db'), plexpy.DB_FILE)
os.rename(os.path.join(jellypy.DATA_DIR, 'jellypy.db'), jellypy.DB_FILE)
except OSError as e:
raise SystemExit("Unable to rename plexpy.db to tautulli.db: %s", e)
raise SystemExit("Unable to rename jellypy.db to tautulli.db: %s", e)
if plexpy.DAEMON:
plexpy.daemonize()
if jellypy.DAEMON:
jellypy.daemonize()
# Read config and start logging
plexpy.initialize(config_file)
jellypy.initialize(config_file)
# Start the background threads
plexpy.start()
jellypy.start()
# Force the http port if neccessary
if args.port:
plexpy.HTTP_PORT = args.port
logger.info('Using forced web server port: %i', plexpy.HTTP_PORT)
jellypy.HTTP_PORT = args.port
logger.info('Using forced web server port: %i', jellypy.HTTP_PORT)
else:
plexpy.HTTP_PORT = int(plexpy.CONFIG.HTTP_PORT)
jellypy.HTTP_PORT = int(jellypy.CONFIG.HTTP_PORT)
# Check if pyOpenSSL is installed. It is required for certificate generation
# and for CherryPy.
if plexpy.CONFIG.ENABLE_HTTPS:
if jellypy.CONFIG.ENABLE_HTTPS:
try:
import OpenSSL
except ImportError:
logger.warn("The pyOpenSSL module is missing. Install this "
"module to enable HTTPS. HTTPS will be disabled.")
plexpy.CONFIG.ENABLE_HTTPS = False
jellypy.CONFIG.ENABLE_HTTPS = False
# Try to start the server. Will exit here is address is already in use.
webstart.start()
if common.PLATFORM == 'Windows':
if plexpy.CONFIG.SYS_TRAY_ICON:
plexpy.WIN_SYS_TRAY_ICON = windows.WindowsSystemTray()
plexpy.WIN_SYS_TRAY_ICON.start()
if jellypy.CONFIG.SYS_TRAY_ICON:
jellypy.WIN_SYS_TRAY_ICON = windows.WindowsSystemTray()
jellypy.WIN_SYS_TRAY_ICON.start()
windows.set_startup()
elif common.PLATFORM == 'Darwin':
macos.set_startup()
# Open webbrowser
if plexpy.CONFIG.LAUNCH_BROWSER and not args.nolaunch and not plexpy.DEV:
plexpy.launch_browser(plexpy.CONFIG.HTTP_HOST, plexpy.HTTP_PORT,
plexpy.HTTP_ROOT)
if jellypy.CONFIG.LAUNCH_BROWSER and not args.nolaunch and not jellypy.DEV:
jellypy.launch_browser(jellypy.CONFIG.HTTP_HOST, jellypy.HTTP_PORT,
jellypy.HTTP_ROOT)
if common.PLATFORM == 'Darwin' and plexpy.CONFIG.SYS_TRAY_ICON:
if common.PLATFORM == 'Darwin' and jellypy.CONFIG.SYS_TRAY_ICON:
if not macos.HAS_PYOBJC:
logger.warn("The pyobjc module is missing. Install this "
"module to enable the MacOS menu bar icon.")
plexpy.CONFIG.SYS_TRAY_ICON = False
jellypy.CONFIG.SYS_TRAY_ICON = False
if plexpy.CONFIG.SYS_TRAY_ICON:
if jellypy.CONFIG.SYS_TRAY_ICON:
# MacOS menu bar icon must be run on the main thread and is blocking
# Start the rest of Tautulli on a new thread
thread = threading.Thread(target=wait)
thread.daemon = True
thread.start()
plexpy.MAC_SYS_TRAY_ICON = macos.MacOSSystemTray()
plexpy.MAC_SYS_TRAY_ICON.start()
jellypy.MAC_SYS_TRAY_ICON = macos.MacOSSystemTray()
jellypy.MAC_SYS_TRAY_ICON.start()
else:
wait()
else:
@@ -295,29 +290,29 @@ def wait():
# Wait endlessly for a signal to happen
while True:
if not plexpy.SIGNAL:
if not jellypy.SIGNAL:
try:
time.sleep(1)
except KeyboardInterrupt:
plexpy.SIGNAL = 'shutdown'
jellypy.SIGNAL = 'shutdown'
else:
logger.info('Received signal: %s', plexpy.SIGNAL)
logger.info('Received signal: %s', jellypy.SIGNAL)
if plexpy.SIGNAL == 'shutdown':
plexpy.shutdown()
elif plexpy.SIGNAL == 'restart':
plexpy.shutdown(restart=True)
elif plexpy.SIGNAL == 'checkout':
plexpy.shutdown(restart=True, checkout=True)
elif plexpy.SIGNAL == 'reset':
plexpy.shutdown(restart=True, reset=True)
elif plexpy.SIGNAL == 'update':
plexpy.shutdown(restart=True, update=True)
if jellypy.SIGNAL == 'shutdown':
jellypy.shutdown()
elif jellypy.SIGNAL == 'restart':
jellypy.shutdown(restart=True)
elif jellypy.SIGNAL == 'checkout':
jellypy.shutdown(restart=True, checkout=True)
elif jellypy.SIGNAL == 'reset':
jellypy.shutdown(restart=True, reset=True)
elif jellypy.SIGNAL == 'update':
jellypy.shutdown(restart=True, update=True)
else:
logger.error('Unknown signal. Shutting down...')
plexpy.shutdown()
jellypy.shutdown()
plexpy.SIGNAL = None
jellypy.SIGNAL = None
if __name__ == "__main__":

View File

@@ -1,24 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from Tautulli import main
# Call main() from Tautulli.py
if __name__ == "__main__":
main()

View File

@@ -1,8 +1,8 @@
# Tautulli
# JellyPy
A python based web application for monitoring, analytics and notifications for [Plex Media Server](https://plex.tv).
A python based web application for monitoring, analytics and notifications for [Jellyfin](https://jellyfin.org/).
This project is based on code from [Headphones](https://github.com/rembo10/headphones) and [PlexWatchWeb](https://github.com/ecleese/plexWatchWeb).
This project is based on [Tautulli](https://github.com/Tautulli/Tautulli).
## Features
@@ -63,4 +63,6 @@ This project is based on code from [Headphones](https://github.com/rembo10/headp
This is free software under the GPL v3 open source license. Feel free to do with it what you wish, but any modification must be open sourced. A copy of the license is included.
This software includes Highsoft software libraries which you may freely distribute for non-commercial use. Commerical users must licence this software, for more information visit https://shop.highsoft.com/faq/non-commercial#non-commercial-redistribution.
This software includes Highsoft software libraries which you may freely distribute for non-commercial use. Commerical users must licence this software, for more information visit https://shop.highsoft.com/faq/non-commercial#non-commercial-redistribution.
[Tautulli]: https://github.com/Tautulli/Tautulli

View File

@@ -2024,7 +2024,7 @@ Rating: {rating}/10 --> Rating: /10
cache: false,
async: true,
complete: function(xhr, status) {
$("#plexpy-configuration-table").html(xhr.responseText);
$("#jellypy-configuration-table").html(xhr.responseText);
}
});
}
@@ -2035,7 +2035,7 @@ Rating: {rating}/10 --> Rating: /10
cache: false,
async: true,
complete: function(xhr, status) {
$("#plexpy-scheduler-table").html(xhr.responseText);
$("#jellypy-scheduler-table").html(xhr.responseText);
}
});
}
@@ -2046,7 +2046,7 @@ Rating: {rating}/10 --> Rating: /10
cache: false,
async: true,
complete: function(xhr, status) {
$("#plexpy-notifiers-table").html(xhr.responseText);
$("#jellypy-notifiers-table").html(xhr.responseText);
}
});
}
@@ -2071,7 +2071,7 @@ Rating: {rating}/10 --> Rating: /10
cache: false,
async: true,
complete: function(xhr, status) {
$("#plexpy-newsletters-table").html(xhr.responseText);
$("#jellypy-newsletters-table").html(xhr.responseText);
}
});
}
@@ -2096,7 +2096,7 @@ Rating: {rating}/10 --> Rating: /10
cache: false,
async: true,
complete: function(xhr, status) {
$("#plexpy-mobile-devices-table").html(xhr.responseText);
$("#jellypy-mobile-devices-table").html(xhr.responseText);
}
});
}

View File

@@ -13,73 +13,48 @@
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import range
import datetime
import os
import future.moves.queue as queue
import sqlite3
import sys
import subprocess
import sys
import threading
import uuid
import future.moves.queue as queue
from future.builtins import range
# Some cut down versions of Python may not include this module and it's not critical for us
try:
import webbrowser
no_browser = False
except ImportError:
no_browser = True
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.interval import IntervalTrigger
from UniversalAnalytics import Tracker
import pytz
PYTHON2 = sys.version_info[0] == 2
if PYTHON2:
import activity_handler
import activity_pinger
import common
import database
import datafactory
import exporter
import libraries
import logger
import mobile_app
import newsletters
import newsletter_handler
import notification_handler
import notifiers
import plextv
import users
import versioncheck
import web_socket
import webstart
import config
else:
from plexpy import activity_handler
from plexpy import activity_pinger
from plexpy import common
from plexpy import database
from plexpy import datafactory
from plexpy import exporter
from plexpy import libraries
from plexpy import logger
from plexpy import mobile_app
from plexpy import newsletters
from plexpy import newsletter_handler
from plexpy import notification_handler
from plexpy import notifiers
from plexpy import plextv
from plexpy import users
from plexpy import versioncheck
from plexpy import web_socket
from plexpy import webstart
from plexpy import config
from jellypy import activity_handler
from jellypy import activity_pinger
from jellypy import common
from jellypy import database
from jellypy import datafactory
from jellypy import exporter
from jellypy import libraries
from jellypy import logger
from jellypy import mobile_app
from jellypy import newsletters
from jellypy import newsletter_handler
from jellypy import notification_handler
from jellypy import notifiers
from jellypy import plextv
from jellypy import users
from jellypy import versioncheck
from jellypy import web_socket
from jellypy import webstart
from jellypy import config
PROG_DIR = None
FULL_PATH = None
@@ -502,7 +477,7 @@ def initialize_scheduler():
def schedule_job(func, name, hours=0, minutes=0, seconds=0, args=None):
"""
Start scheduled job if starting or restarting plexpy.
Start scheduled job if starting or restarting jellypy.
Reschedule job if Interval Settings have changed.
Remove job if if Interval Settings changed to 0
@@ -2388,47 +2363,6 @@ def generate_uuid():
return uuid.uuid4().hex
def initialize_tracker():
data = {
'dataSource': 'server',
'appName': common.PRODUCT,
'appVersion': common.RELEASE,
'appId': INSTALL_TYPE,
'appInstallerId': CONFIG.GIT_BRANCH,
'dimension1': '{} {}'.format(common.PLATFORM, common.PLATFORM_RELEASE), # App Platform
'dimension2': common.PLATFORM_LINUX_DISTRO, # Linux Distro
'dimension3': common.PYTHON_VERSION,
'userLanguage': SYS_LANGUAGE,
'documentEncoding': SYS_ENCODING,
'noninteractive': True
}
tracker = Tracker.create('UA-111522699-2', client_id=CONFIG.PMS_UUID, hash_client_id=True,
user_agent=common.USER_AGENT)
tracker.set(data)
return tracker
def analytics_event(category, action, label=None, value=None, **kwargs):
data = {'category': category, 'action': action}
if label is not None:
data['label'] = label
if value is not None:
data['value'] = value
if kwargs:
data.update(kwargs)
if TRACKER:
try:
TRACKER.send('event', data)
except Exception as e:
logger.warn("Failed to send analytics event for category '%s', action '%s': %s" % (category, action, e))
def check_folder_writable(folder, fallback, name):
if not folder:
folder = fallback
@@ -2461,7 +2395,7 @@ def get_tautulli_info():
'tautulli_version': common.RELEASE,
'tautulli_branch': CONFIG.GIT_BRANCH,
'tautulli_commit': CURRENT_VERSION,
'tautulli_platform':common.PLATFORM,
'tautulli_platform': common.PLATFORM,
'tautulli_platform_release': common.PLATFORM_RELEASE,
'tautulli_platform_version': common.PLATFORM_VERSION,
'tautulli_platform_linux_distro': common.PLATFORM_LINUX_DISTRO,

View File

@@ -24,8 +24,8 @@ import time
from apscheduler.triggers.date import DateTrigger
import pytz
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import activity_processor
import datafactory
import helpers
@@ -33,12 +33,12 @@ if plexpy.PYTHON2:
import notification_handler
import pmsconnect
else:
from plexpy import activity_processor
from plexpy import datafactory
from plexpy import helpers
from plexpy import logger
from plexpy import notification_handler
from plexpy import pmsconnect
from jellypy import activity_processor
from jellypy import datafactory
from jellypy import helpers
from jellypy import logger
from jellypy import notification_handler
from jellypy import pmsconnect
ACTIVITY_SCHED = None
@@ -134,7 +134,7 @@ class ActivityHandler(object):
str(session['rating_key']), session['full_title'], '[Live TV]' if session['live'] else ''))
# Send notification after updating db
#plexpy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
#jellypy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
# Write the new session to our temp session table
self.update_db_session(session=session, notify=True)
@@ -162,7 +162,7 @@ class ActivityHandler(object):
# Retrieve the session data from our temp table
db_session = ap.get_session_by_key(session_key=self.get_session_key())
plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_stop'})
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_stop'})
# Write it to the history table
monitor_proc = activity_processor.ActivityProcessor()
@@ -198,7 +198,7 @@ class ActivityHandler(object):
db_session = ap.get_session_by_key(session_key=self.get_session_key())
if not still_paused:
plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_pause'})
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_pause'})
def on_resume(self):
if self.is_valid_session():
@@ -214,7 +214,7 @@ class ActivityHandler(object):
# Retrieve the session data from our temp table
db_session = ap.get_session_by_key(session_key=self.get_session_key())
plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_resume'})
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_resume'})
def on_change(self):
if self.is_valid_session():
@@ -227,7 +227,7 @@ class ActivityHandler(object):
ap = activity_processor.ActivityProcessor()
db_session = ap.get_session_by_key(session_key=self.get_session_key())
plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_change'})
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_change'})
def on_buffer(self):
if self.is_valid_session():
@@ -255,14 +255,14 @@ class ActivityHandler(object):
(self.get_session_key(), buffer_last_triggered))
time_since_last_trigger = helpers.timestamp() - int(buffer_last_triggered)
if current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \
time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT:
if current_buffer_count >= jellypy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \
time_since_last_trigger >= jellypy.CONFIG.BUFFER_WAIT:
ap.set_session_buffer_trigger_time(session_key=self.get_session_key())
# Retrieve the session data from our temp table
db_session = ap.get_session_by_key(session_key=self.get_session_key())
plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_buffer'})
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_buffer'})
def on_error(self):
if self.is_valid_session():
@@ -275,7 +275,7 @@ class ActivityHandler(object):
ap = activity_processor.ActivityProcessor()
db_session = ap.get_session_by_key(session_key=self.get_session_key())
plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_error'})
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_error'})
# This function receives events from our websocket connection
def process(self):
@@ -358,10 +358,10 @@ class ActivityHandler(object):
# The only purpose of this is for notifications
if not db_session['watched'] and this_state != 'buffering':
progress_percent = helpers.get_percent(self.timeline['viewOffset'], db_session['duration'])
watched_percent = {'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT,
'episode': plexpy.CONFIG.TV_WATCHED_PERCENT,
'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT,
'clip': plexpy.CONFIG.TV_WATCHED_PERCENT
watched_percent = {'movie': jellypy.CONFIG.MOVIE_WATCHED_PERCENT,
'episode': jellypy.CONFIG.TV_WATCHED_PERCENT,
'track': jellypy.CONFIG.MUSIC_WATCHED_PERCENT,
'clip': jellypy.CONFIG.TV_WATCHED_PERCENT
}
if progress_percent >= watched_percent.get(db_session['media_type'], 101):
@@ -373,7 +373,7 @@ class ActivityHandler(object):
session=db_session, notify_action='on_watched', notified=False)
for d in watched_notifiers:
plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(),
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(),
'notifier_id': d['notifier_id'],
'notify_action': 'on_watched'})
@@ -440,7 +440,7 @@ class TimelineHandler(object):
# Add a new media item to the recently added queue
if media_type and section_id > 0 and \
((state_type == 0 and metadata_state == 'created')): # or \
#(plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_UPGRADE and state_type in (1, 5) and \
#(jellypy.CONFIG.NOTIFY_RECENTLY_ADDED_UPGRADE and state_type in (1, 5) and \
#media_state == 'analyzing' and queue_size is None)):
if media_type in ('episode', 'track'):
@@ -467,7 +467,7 @@ class TimelineHandler(object):
schedule_callback('rating_key-{}'.format(grandparent_rating_key),
func=clear_recently_added_queue,
args=[grandparent_rating_key, grandparent_title],
seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
seconds=jellypy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
elif media_type in ('season', 'album'):
metadata = self.get_metadata()
@@ -486,7 +486,7 @@ class TimelineHandler(object):
schedule_callback('rating_key-{}'.format(parent_rating_key),
func=clear_recently_added_queue,
args=[parent_rating_key, parent_title],
seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
seconds=jellypy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
else:
queue_set = RECENTLY_ADDED_QUEUE.get(rating_key, set())
@@ -499,7 +499,7 @@ class TimelineHandler(object):
schedule_callback('rating_key-{}'.format(rating_key),
func=clear_recently_added_queue,
args=[rating_key, title],
seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
seconds=jellypy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
# A movie, show, or artist is done processing
elif media_type in ('movie', 'show', 'artist') and section_id > 0 and \
@@ -536,10 +536,10 @@ class ReachabilityHandler(object):
return helpers.bool_true(pref)
def on_down(self, server_response):
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extdown', 'remote_access_info': server_response})
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_extdown', 'remote_access_info': server_response})
def on_up(self, server_response):
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extup', 'remote_access_info': server_response})
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_extup', 'remote_access_info': server_response})
def process(self):
# Check if remote access is enabled
@@ -547,7 +547,7 @@ class ReachabilityHandler(object):
return
# Do nothing if remote access is still up and hasn't changed
if self.is_reachable() and plexpy.PLEX_REMOTE_ACCESS_UP:
if self.is_reachable() and jellypy.PLEX_REMOTE_ACCESS_UP:
return
pms_connect = pmsconnect.PmsConnect()
@@ -558,22 +558,22 @@ class ReachabilityHandler(object):
if server_response['mapping_state'] == 'waiting':
logger.warn("Tautulli ReachabilityHandler :: Remote access waiting for port mapping.")
elif plexpy.PLEX_REMOTE_ACCESS_UP is not False and server_response['reason']:
elif jellypy.PLEX_REMOTE_ACCESS_UP is not False and server_response['reason']:
logger.warn("Tautulli ReachabilityHandler :: Remote access failed: %s" % server_response['reason'])
logger.info("Tautulli ReachabilityHandler :: Plex remote access is down.")
plexpy.PLEX_REMOTE_ACCESS_UP = False
jellypy.PLEX_REMOTE_ACCESS_UP = False
if not ACTIVITY_SCHED.get_job('on_extdown'):
logger.debug("Tautulli ReachabilityHandler :: Schedule remote access down callback in %d seconds.",
plexpy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD)
jellypy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD)
schedule_callback('on_extdown', func=self.on_down, args=[server_response],
seconds=plexpy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD)
seconds=jellypy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD)
elif plexpy.PLEX_REMOTE_ACCESS_UP is False and not server_response['reason']:
elif jellypy.PLEX_REMOTE_ACCESS_UP is False and not server_response['reason']:
logger.info("Tautulli ReachabilityHandler :: Plex remote access is back up.")
plexpy.PLEX_REMOTE_ACCESS_UP = True
jellypy.PLEX_REMOTE_ACCESS_UP = True
if ACTIVITY_SCHED.get_job('on_extdown'):
logger.debug("Tautulli ReachabilityHandler :: Cancelling scheduled remote access down callback.")
@@ -581,8 +581,8 @@ class ReachabilityHandler(object):
else:
self.on_up(server_response)
elif plexpy.PLEX_REMOTE_ACCESS_UP is None:
plexpy.PLEX_REMOTE_ACCESS_UP = self.is_reachable()
elif jellypy.PLEX_REMOTE_ACCESS_UP is None:
jellypy.PLEX_REMOTE_ACCESS_UP = self.is_reachable()
def del_keys(key):
@@ -626,7 +626,7 @@ def force_stop_stream(session_key, title, user):
else:
session['write_attempts'] += 1
if session['write_attempts'] < plexpy.CONFIG.SESSION_DB_WRITE_ATTEMPTS:
if session['write_attempts'] < jellypy.CONFIG.SESSION_DB_WRITE_ATTEMPTS:
logger.warn("Tautulli ActivityHandler :: Failed to write stream with sessionKey %s ratingKey %s to the database. " \
"Will try again in 30 seconds. Write attempt %s."
% (session['session_key'], session['rating_key'], str(session['write_attempts'])))
@@ -649,14 +649,14 @@ def force_stop_stream(session_key, title, user):
def clear_recently_added_queue(rating_key, title):
child_keys = RECENTLY_ADDED_QUEUE[rating_key]
if plexpy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT and len(child_keys) > 1:
if jellypy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT and len(child_keys) > 1:
on_created(rating_key, child_keys=child_keys)
elif child_keys:
for child_key in child_keys:
grandchild_keys = RECENTLY_ADDED_QUEUE.get(child_key, [])
if plexpy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_PARENT and len(grandchild_keys) > 1:
if jellypy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_PARENT and len(grandchild_keys) > 1:
on_created(child_key, child_keys=grandchild_keys)
elif grandchild_keys:
@@ -697,7 +697,7 @@ def on_created(rating_key, **kwargs):
if notify:
data = {'timeline_data': metadata, 'notify_action': 'on_created'}
data.update(kwargs)
plexpy.NOTIFY_QUEUE.put(data)
jellypy.NOTIFY_QUEUE.put(data)
all_keys = [rating_key]
if 'child_keys' in kwargs:
@@ -714,7 +714,7 @@ def on_created(rating_key, **kwargs):
def delete_metadata_cache(session_key):
try:
os.remove(os.path.join(plexpy.CONFIG.CACHE_DIR, 'session_metadata/metadata-sessionKey-%s.json' % session_key))
os.remove(os.path.join(jellypy.CONFIG.CACHE_DIR, 'session_metadata/metadata-sessionKey-%s.json' % session_key))
except OSError as e:
logger.error("Tautulli ActivityHandler :: Failed to remove metadata cache file (sessionKey %s): %s"
% (session_key, e))

View File

@@ -18,8 +18,8 @@ from future.builtins import str
import threading
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import activity_handler
import activity_processor
import database
@@ -31,16 +31,16 @@ if plexpy.PYTHON2:
import pmsconnect
import web_socket
else:
from plexpy import activity_handler
from plexpy import activity_processor
from plexpy import database
from plexpy import helpers
from plexpy import libraries
from plexpy import logger
from plexpy import notification_handler
from plexpy import plextv
from plexpy import pmsconnect
from plexpy import web_socket
from jellypy import activity_handler
from jellypy import activity_processor
from jellypy import database
from jellypy import helpers
from jellypy import libraries
from jellypy import logger
from jellypy import notification_handler
from jellypy import plextv
from jellypy import pmsconnect
from jellypy import web_socket
monitor_lock = threading.Lock()
@@ -82,28 +82,28 @@ def check_active_sessions(ws_request=False):
if session['state'] == 'paused':
logger.debug("Tautulli Monitor :: Session %s paused." % stream['session_key'])
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_pause'})
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_pause'})
if session['state'] == 'playing' and stream['state'] == 'paused':
logger.debug("Tautulli Monitor :: Session %s resumed." % stream['session_key'])
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_resume'})
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_resume'})
if session['state'] == 'error':
logger.debug("Tautulli Monitor :: Session %s encountered an error." % stream['session_key'])
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_error'})
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_error'})
if stream['state'] == 'paused' and not ws_request:
# The stream is still paused so we need to increment the paused_counter
# Using the set config parameter as the interval, probably not the most accurate but
# it will have to do for now. If it's a websocket request don't use this method.
paused_counter = int(stream['paused_counter']) + plexpy.CONFIG.MONITORING_INTERVAL
paused_counter = int(stream['paused_counter']) + jellypy.CONFIG.MONITORING_INTERVAL
monitor_db.action('UPDATE sessions SET paused_counter = ? '
'WHERE session_key = ? AND rating_key = ?',
[paused_counter, stream['session_key'], stream['rating_key']])
if session['state'] == 'buffering' and plexpy.CONFIG.BUFFER_THRESHOLD > 0:
if session['state'] == 'buffering' and jellypy.CONFIG.BUFFER_THRESHOLD > 0:
# The stream is buffering so we need to increment the buffer_count
# We're going just increment on every monitor ping,
# would be difficult to keep track otherwise
@@ -117,11 +117,11 @@ def check_active_sessions(ws_request=False):
'WHERE session_key = ? AND rating_key = ?',
[stream['session_key'], stream['rating_key']])
if buffer_values[0]['buffer_count'] >= plexpy.CONFIG.BUFFER_THRESHOLD:
if buffer_values[0]['buffer_count'] >= jellypy.CONFIG.BUFFER_THRESHOLD:
# Push any notifications -
# Push it on it's own thread so we don't hold up our db actions
# Our first buffer notification
if buffer_values[0]['buffer_count'] == plexpy.CONFIG.BUFFER_THRESHOLD:
if buffer_values[0]['buffer_count'] == jellypy.CONFIG.BUFFER_THRESHOLD:
logger.info("Tautulli Monitor :: User '%s' has triggered a buffer warning."
% stream['user'])
# Set the buffer trigger time
@@ -130,12 +130,12 @@ def check_active_sessions(ws_request=False):
'WHERE session_key = ? AND rating_key = ?',
[stream['session_key'], stream['rating_key']])
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_buffer'})
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_buffer'})
else:
# Subsequent buffer notifications after wait time
if helpers.timestamp() > buffer_values[0]['buffer_last_triggered'] + \
plexpy.CONFIG.BUFFER_WAIT:
jellypy.CONFIG.BUFFER_WAIT:
logger.info("Tautulli Monitor :: User '%s' has triggered multiple buffer warnings."
% stream['user'])
# Set the buffer trigger time
@@ -144,7 +144,7 @@ def check_active_sessions(ws_request=False):
'WHERE session_key = ? AND rating_key = ?',
[stream['session_key'], stream['rating_key']])
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_buffer'})
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_buffer'})
logger.debug("Tautulli Monitor :: Session %s is buffering. Count is now %s. Last triggered %s."
% (stream['session_key'],
@@ -157,11 +157,11 @@ def check_active_sessions(ws_request=False):
if session['state'] != 'buffering':
progress_percent = helpers.get_percent(session['view_offset'], session['duration'])
notify_states = notification_handler.get_notify_state(session=session)
if (session['media_type'] == 'movie' and progress_percent >= plexpy.CONFIG.MOVIE_WATCHED_PERCENT or
session['media_type'] == 'episode' and progress_percent >= plexpy.CONFIG.TV_WATCHED_PERCENT or
session['media_type'] == 'track' and progress_percent >= plexpy.CONFIG.MUSIC_WATCHED_PERCENT) \
if (session['media_type'] == 'movie' and progress_percent >= jellypy.CONFIG.MOVIE_WATCHED_PERCENT or
session['media_type'] == 'episode' and progress_percent >= jellypy.CONFIG.TV_WATCHED_PERCENT or
session['media_type'] == 'track' and progress_percent >= jellypy.CONFIG.MUSIC_WATCHED_PERCENT) \
and not any(d['notify_action'] == 'on_watched' for d in notify_states):
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_watched'})
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_watched'})
else:
# The user has stopped playing a stream
@@ -177,13 +177,13 @@ def check_active_sessions(ws_request=False):
progress_percent = helpers.get_percent(stream['view_offset'], stream['duration'])
notify_states = notification_handler.get_notify_state(session=stream)
if (stream['media_type'] == 'movie' and progress_percent >= plexpy.CONFIG.MOVIE_WATCHED_PERCENT or
stream['media_type'] == 'episode' and progress_percent >= plexpy.CONFIG.TV_WATCHED_PERCENT or
stream['media_type'] == 'track' and progress_percent >= plexpy.CONFIG.MUSIC_WATCHED_PERCENT) \
if (stream['media_type'] == 'movie' and progress_percent >= jellypy.CONFIG.MOVIE_WATCHED_PERCENT or
stream['media_type'] == 'episode' and progress_percent >= jellypy.CONFIG.TV_WATCHED_PERCENT or
stream['media_type'] == 'track' and progress_percent >= jellypy.CONFIG.MUSIC_WATCHED_PERCENT) \
and not any(d['notify_action'] == 'on_watched' for d in notify_states):
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_watched'})
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_watched'})
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_stop'})
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_stop'})
# Write the item history on playback stop
row_id = monitor_process.write_session_history(session=stream)
@@ -196,7 +196,7 @@ def check_active_sessions(ws_request=False):
else:
stream['write_attempts'] += 1
if stream['write_attempts'] < plexpy.CONFIG.SESSION_DB_WRITE_ATTEMPTS:
if stream['write_attempts'] < jellypy.CONFIG.SESSION_DB_WRITE_ATTEMPTS:
logger.warn("Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \
"Will try again on the next pass. Write attempt %s."
% (stream['session_key'], stream['rating_key'], str(stream['write_attempts'])))
@@ -223,7 +223,7 @@ def check_active_sessions(ws_request=False):
def connect_server(log=True, startup=False):
if plexpy.CONFIG.PMS_IS_CLOUD:
if jellypy.CONFIG.PMS_IS_CLOUD:
if log:
logger.info("Tautulli Monitor :: Checking for Plex Cloud server status...")
@@ -264,12 +264,12 @@ def check_server_updates():
download_info = plex_tv.get_plex_downloads()
if download_info:
logger.info("Tautulli Monitor :: Current PMS version: %s", plexpy.CONFIG.PMS_VERSION)
logger.info("Tautulli Monitor :: Current PMS version: %s", jellypy.CONFIG.PMS_VERSION)
if download_info['update_available']:
logger.info("Tautulli Monitor :: PMS update available version: %s", download_info['version'])
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_pmsupdate', 'pms_download_info': download_info})
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_pmsupdate', 'pms_download_info': download_info})
else:
logger.info("Tautulli Monitor :: No PMS update available.")

View File

@@ -20,8 +20,8 @@ from future.builtins import object
from collections import defaultdict
import json
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import database
import helpers
import libraries
@@ -29,12 +29,12 @@ if plexpy.PYTHON2:
import pmsconnect
import users
else:
from plexpy import database
from plexpy import helpers
from plexpy import libraries
from plexpy import logger
from plexpy import pmsconnect
from plexpy import users
from jellypy import database
from jellypy import helpers
from jellypy import libraries
from jellypy import logger
from jellypy import pmsconnect
from jellypy import users
class ActivityProcessor(object):
@@ -165,7 +165,7 @@ class ActivityProcessor(object):
# Check if any notification agents have notifications enabled
if notify:
session.update(timestamp)
plexpy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
jellypy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
# Add Live TV library if it hasn't been added
if values['live']:
@@ -231,14 +231,14 @@ class ActivityProcessor(object):
real_play_time = stopped - helpers.cast_to_int(session['started']) - helpers.cast_to_int(session['paused_counter'])
if not is_import and plexpy.CONFIG.LOGGING_IGNORE_INTERVAL:
if not is_import and jellypy.CONFIG.LOGGING_IGNORE_INTERVAL:
if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
(real_play_time < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)):
(real_play_time < int(jellypy.CONFIG.LOGGING_IGNORE_INTERVAL)):
logging_enabled = False
logger.debug("Tautulli ActivityProcessor :: Play duration for session %s ratingKey %s is %s secs "
"which is less than %s seconds, so we're not logging it." %
(session['session_key'], session['rating_key'], str(real_play_time),
plexpy.CONFIG.LOGGING_IGNORE_INTERVAL))
jellypy.CONFIG.LOGGING_IGNORE_INTERVAL))
if not is_import and session['media_type'] == 'track':
if real_play_time < 15 and helpers.cast_to_int(session['duration']) >= 30:
logging_enabled = False
@@ -360,9 +360,9 @@ class ActivityProcessor(object):
'view_offset': result[1]['view_offset'],
'reference_id': result[1]['reference_id']}
watched_percent = {'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT,
'episode': plexpy.CONFIG.TV_WATCHED_PERCENT,
'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT
watched_percent = {'movie': jellypy.CONFIG.MOVIE_WATCHED_PERCENT,
'episode': jellypy.CONFIG.TV_WATCHED_PERCENT,
'track': jellypy.CONFIG.MUSIC_WATCHED_PERCENT
}
prev_progress_percent = helpers.get_percent(prev_session['view_offset'], session['duration'])
media_watched_percent = watched_percent.get(session['media_type'], 0)
@@ -661,4 +661,4 @@ class ActivityProcessor(object):
'WHERE user_id = ? AND machine_id = ? AND media_type = ? '
'ORDER BY stopped DESC',
[user_id, machine_id, media_type])
return int(started - last_session.get('stopped', 0) >= plexpy.CONFIG.NOTIFY_CONTINUED_SESSION_THRESHOLD)
return int(started - last_session.get('stopped', 0) >= jellypy.CONFIG.NOTIFY_CONTINUED_SESSION_THRESHOLD)

View File

@@ -35,8 +35,8 @@ import traceback
import cherrypy
import xmltodict
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import common
import config
import database
@@ -51,19 +51,19 @@ if plexpy.PYTHON2:
import plextv
import users
else:
from plexpy import common
from plexpy import config
from plexpy import database
from plexpy import helpers
from plexpy import libraries
from plexpy import logger
from plexpy import mobile_app
from plexpy import notification_handler
from plexpy import notifiers
from plexpy import newsletter_handler
from plexpy import newsletters
from plexpy import plextv
from plexpy import users
from jellypy import common
from jellypy import config
from jellypy import database
from jellypy import helpers
from jellypy import libraries
from jellypy import logger
from jellypy import mobile_app
from jellypy import notification_handler
from jellypy import notifiers
from jellypy import newsletter_handler
from jellypy import newsletters
from jellypy import plextv
from jellypy import users
class API2(object):
@@ -107,15 +107,15 @@ class API2(object):
def _api_validate(self, *args, **kwargs):
""" Sets class vars and remove unneeded parameters. """
if not plexpy.CONFIG.API_ENABLED:
if not jellypy.CONFIG.API_ENABLED:
self._api_msg = 'API not enabled'
self._api_response_code = 404
elif not plexpy.CONFIG.API_KEY:
elif not jellypy.CONFIG.API_KEY:
self._api_msg = 'API key not generated'
self._api_response_code = 401
elif len(plexpy.CONFIG.API_KEY) != 32:
elif len(jellypy.CONFIG.API_KEY) != 32:
self._api_msg = 'API key not generated correctly'
self._api_response_code = 401
@@ -142,8 +142,8 @@ class API2(object):
if 'app' in kwargs and helpers.bool_true(kwargs.pop('app')):
self._api_app = True
if plexpy.CONFIG.API_ENABLED and not self._api_msg or self._api_cmd in ('get_apikey', 'docs', 'docs_md'):
if not self._api_app and self._api_apikey == plexpy.CONFIG.API_KEY:
if jellypy.CONFIG.API_ENABLED and not self._api_msg or self._api_cmd in ('get_apikey', 'docs', 'docs_md'):
if not self._api_app and self._api_apikey == jellypy.CONFIG.API_KEY:
self._api_authenticated = True
elif self._api_app and self._api_apikey == mobile_app.get_temp_device_token() and \
@@ -203,7 +203,7 @@ class API2(object):
]
```
"""
logfile = os.path.join(plexpy.CONFIG.LOG_DIR, logger.FILENAME)
logfile = os.path.join(jellypy.CONFIG.LOG_DIR, logger.FILENAME)
templog = []
start = int(start)
end = int(end)
@@ -290,11 +290,11 @@ class API2(object):
```
"""
interface_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/')
interface_dir = os.path.join(jellypy.PROG_DIR, 'data/interfaces/')
interface_list = [name for name in os.listdir(interface_dir) if
os.path.isdir(os.path.join(interface_dir, name))]
conf = plexpy.CONFIG._config
conf = jellypy.CONFIG._config
config = {}
# Truthify the dict
@@ -332,7 +332,7 @@ class API2(object):
None
```
"""
if not plexpy.CONFIG.API_SQL:
if not jellypy.CONFIG.API_SQL:
self._api_msg = 'SQL not enabled for the API.'
return
@@ -342,12 +342,12 @@ class API2(object):
# allow the user to shoot them self
# in the foot but not in the head..
if not len(os.listdir(plexpy.CONFIG.BACKUP_DIR)):
if not len(os.listdir(jellypy.CONFIG.BACKUP_DIR)):
self.backup_db()
else:
# If the backup is less then 24 h old lets make a backup
if not any(os.path.getctime(os.path.join(plexpy.CONFIG.BACKUP_DIR, file_)) > (time.time() - 86400)
and file_.endswith('.db') for file_ in os.listdir(plexpy.CONFIG.BACKUP_DIR)):
if not any(os.path.getctime(os.path.join(jellypy.CONFIG.BACKUP_DIR, file_)) > (time.time() - 86400)
and file_.endswith('.db') for file_ in os.listdir(jellypy.CONFIG.BACKUP_DIR)):
self.backup_db()
db = database.MonitorDatabase()
@@ -363,7 +363,7 @@ class API2(object):
return data
def backup_db(self):
""" Create a manual backup of the `plexpy.db` file."""
""" Create a manual backup of the `jellypy.db` file."""
data = database.make_backup()
self._api_result_type = 'success' if data else 'error'
@@ -373,14 +373,14 @@ class API2(object):
def restart(self, **kwargs):
""" Restart Tautulli."""
plexpy.SIGNAL = 'restart'
jellypy.SIGNAL = 'restart'
self._api_msg = 'Restarting Tautulli'
self._api_result_type = 'success'
def update(self, **kwargs):
""" Update Tautulli."""
plexpy.SIGNAL = 'update'
jellypy.SIGNAL = 'update'
self._api_msg = 'Updating Tautulli'
self._api_result_type = 'success'
@@ -472,9 +472,9 @@ class API2(object):
mobile_app.set_temp_device_token(True)
plex_server = plextv.get_server_resources(return_info=True)
tautulli = plexpy.get_tautulli_info()
tautulli = jellypy.get_tautulli_info()
data = {"server_id": plexpy.CONFIG.PMS_UUID}
data = {"server_id": jellypy.CONFIG.PMS_UUID}
data.update(plex_server)
data.update(tautulli)
@@ -646,32 +646,32 @@ General optional parameters:
"""
data = None
apikey = hashlib.sha224(str(random.getrandbits(256)).encode('utf-8')).hexdigest()[0:32]
if plexpy.CONFIG.HTTP_USERNAME and plexpy.CONFIG.HTTP_PASSWORD:
if jellypy.CONFIG.HTTP_USERNAME and jellypy.CONFIG.HTTP_PASSWORD:
authenticated = False
if plexpy.CONFIG.HTTP_HASHED_PASSWORD and \
username == plexpy.CONFIG.HTTP_USERNAME and check_hash(password, plexpy.CONFIG.HTTP_PASSWORD):
if jellypy.CONFIG.HTTP_HASHED_PASSWORD and \
username == jellypy.CONFIG.HTTP_USERNAME and check_hash(password, jellypy.CONFIG.HTTP_PASSWORD):
authenticated = True
elif not plexpy.CONFIG.HTTP_HASHED_PASSWORD and \
username == plexpy.CONFIG.HTTP_USERNAME and password == plexpy.CONFIG.HTTP_PASSWORD:
elif not jellypy.CONFIG.HTTP_HASHED_PASSWORD and \
username == jellypy.CONFIG.HTTP_USERNAME and password == jellypy.CONFIG.HTTP_PASSWORD:
authenticated = True
if authenticated:
if plexpy.CONFIG.API_KEY:
data = plexpy.CONFIG.API_KEY
if jellypy.CONFIG.API_KEY:
data = jellypy.CONFIG.API_KEY
else:
data = apikey
plexpy.CONFIG.API_KEY = apikey
plexpy.CONFIG.write()
jellypy.CONFIG.API_KEY = apikey
jellypy.CONFIG.write()
else:
self._api_msg = 'Authentication is enabled, please add the correct username and password to the parameters'
else:
if plexpy.CONFIG.API_KEY:
data = plexpy.CONFIG.API_KEY
if jellypy.CONFIG.API_KEY:
data = jellypy.CONFIG.API_KEY
else:
# Make a apikey if the doesn't exist
data = apikey
plexpy.CONFIG.API_KEY = apikey
plexpy.CONFIG.write()
jellypy.CONFIG.API_KEY = apikey
jellypy.CONFIG.write()
return data

View File

@@ -24,11 +24,11 @@ from __future__ import unicode_literals
from future.moves.urllib.request import FancyURLopener
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
from common import USER_AGENT
else:
from plexpy.common import USER_AGENT
from jellypy.common import USER_AGENT
class PlexPyURLopener(FancyURLopener):

View File

@@ -21,11 +21,11 @@ import distro
import platform
from collections import OrderedDict
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import version
else:
from plexpy import version
from jellypy import version
# Identify Our Application

View File

@@ -25,13 +25,13 @@ import threading
from configobj import ConfigObj, ParseError
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import helpers
import logger
else:
from plexpy import helpers
from plexpy import logger
from jellypy import helpers
from jellypy import logger
def bool_int(value):
@@ -257,20 +257,20 @@ def import_tautulli_config(config=None, backup=False):
# Remove keys that should not be imported
for key in _DO_NOT_IMPORT_KEYS:
delattr(imported_config, key)
if plexpy.DOCKER or plexpy.SNAP:
if jellypy.DOCKER or jellypy.SNAP:
for key in _DO_NOT_IMPORT_KEYS_DOCKER:
delattr(imported_config, key)
# Merge the imported config file into the current config file
plexpy.CONFIG._config.merge(imported_config._config)
plexpy.CONFIG.write()
jellypy.CONFIG._config.merge(imported_config._config)
jellypy.CONFIG.write()
logger.info("Tautulli Config :: Tautulli config import complete.")
set_import_thread(None)
set_is_importing(False)
# Restart to apply changes
plexpy.SIGNAL = 'restart'
jellypy.SIGNAL = 'restart'
def make_backup(cleanup=False, scheduler=False):
@@ -280,15 +280,15 @@ def make_backup(cleanup=False, scheduler=False):
backup_file = 'config.backup-{}.sched.ini'.format(helpers.now())
else:
backup_file = 'config.backup-{}.ini'.format(helpers.now())
backup_folder = plexpy.CONFIG.BACKUP_DIR
backup_folder = jellypy.CONFIG.BACKUP_DIR
backup_file_fp = os.path.join(backup_folder, backup_file)
# In case the user has deleted it manually
if not os.path.exists(backup_folder):
os.makedirs(backup_folder)
plexpy.CONFIG.write()
shutil.copyfile(plexpy.CONFIG_FILE, backup_file_fp)
jellypy.CONFIG.write()
shutil.copyfile(jellypy.CONFIG_FILE, backup_file_fp)
if cleanup:
now = time.time()
@@ -296,17 +296,17 @@ def make_backup(cleanup=False, scheduler=False):
for root, dirs, files in os.walk(backup_folder):
ini_files = [os.path.join(root, f) for f in files if f.endswith('.sched.ini')]
for file_ in ini_files:
if os.stat(file_).st_mtime < now - plexpy.CONFIG.BACKUP_DAYS * 86400:
if os.stat(file_).st_mtime < now - jellypy.CONFIG.BACKUP_DAYS * 86400:
try:
os.remove(file_)
except OSError as e:
logger.error("Tautulli Config :: Failed to delete %s from the backup folder: %s" % (file_, e))
if backup_file in os.listdir(backup_folder):
logger.debug("Tautulli Config :: Successfully backed up %s to %s" % (plexpy.CONFIG_FILE, backup_file))
logger.debug("Tautulli Config :: Successfully backed up %s to %s" % (jellypy.CONFIG_FILE, backup_file))
return True
else:
logger.error("Tautulli Config :: Failed to backup %s to %s" % (plexpy.CONFIG_FILE, backup_file))
logger.error("Tautulli Config :: Failed to backup %s to %s" % (jellypy.CONFIG_FILE, backup_file))
return False
@@ -530,7 +530,7 @@ class Config(object):
self.CONFIG_VERSION = 14
if self.CONFIG_VERSION == 14:
if plexpy.DOCKER:
if jellypy.DOCKER:
self.PLEXPY_AUTO_UPDATE = 0
self.CONFIG_VERSION = 15
@@ -542,7 +542,7 @@ class Config(object):
self.CONFIG_VERSION = 16
if self.CONFIG_VERSION == 16:
if plexpy.SNAP:
if jellypy.SNAP:
self.PLEXPY_AUTO_UPDATE = 0
self.CONFIG_VERSION = 17

View File

@@ -23,13 +23,13 @@ import shutil
import threading
import time
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import helpers
import logger
else:
from plexpy import helpers
from plexpy import logger
from jellypy import helpers
from jellypy import logger
FILENAME = "tautulli.db"
@@ -308,7 +308,7 @@ def optimize_db():
def db_filename(filename=FILENAME):
""" Returns the filepath to the db """
return os.path.join(plexpy.DATA_DIR, filename)
return os.path.join(jellypy.DATA_DIR, filename)
def make_backup(cleanup=False, scheduler=False):
@@ -320,13 +320,13 @@ def make_backup(cleanup=False, scheduler=False):
corrupt = ''
if not integrity:
corrupt = '.corrupt'
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpydbcorrupt'})
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpydbcorrupt'})
if scheduler:
backup_file = 'tautulli.backup-{}{}.sched.db'.format(helpers.now(), corrupt)
else:
backup_file = 'tautulli.backup-{}{}.db'.format(helpers.now(), corrupt)
backup_folder = plexpy.CONFIG.BACKUP_DIR
backup_folder = jellypy.CONFIG.BACKUP_DIR
backup_file_fp = os.path.join(backup_folder, backup_file)
# In case the user has deleted it manually
@@ -345,7 +345,7 @@ def make_backup(cleanup=False, scheduler=False):
for root, dirs, files in os.walk(backup_folder):
db_files = [os.path.join(root, f) for f in files if f.endswith('.sched.db')]
for file_ in db_files:
if os.stat(file_).st_mtime < now - plexpy.CONFIG.BACKUP_DAYS * 86400:
if os.stat(file_).st_mtime < now - jellypy.CONFIG.BACKUP_DAYS * 86400:
try:
os.remove(file_)
except OSError as e:
@@ -361,10 +361,10 @@ def make_backup(cleanup=False, scheduler=False):
def get_cache_size():
# This will protect against typecasting problems produced by empty string and None settings
if not plexpy.CONFIG.CACHE_SIZEMB:
if not jellypy.CONFIG.CACHE_SIZEMB:
# sqlite will work with this (very slowly)
return 0
return int(plexpy.CONFIG.CACHE_SIZEMB)
return int(jellypy.CONFIG.CACHE_SIZEMB)
def dict_factory(cursor, row):
@@ -381,9 +381,9 @@ class MonitorDatabase(object):
self.filename = filename
self.connection = sqlite3.connect(db_filename(filename), timeout=20)
# Set database synchronous mode (default NORMAL)
self.connection.execute("PRAGMA synchronous = %s" % plexpy.CONFIG.SYNCHRONOUS_MODE)
self.connection.execute("PRAGMA synchronous = %s" % jellypy.CONFIG.SYNCHRONOUS_MODE)
# Set database journal mode (default WAL)
self.connection.execute("PRAGMA journal_mode = %s" % plexpy.CONFIG.JOURNAL_MODE)
self.connection.execute("PRAGMA journal_mode = %s" % jellypy.CONFIG.JOURNAL_MODE)
# Set database cache size (default 32MB)
self.connection.execute("PRAGMA cache_size = -%s" % (get_cache_size() * 1024))
self.connection.row_factory = dict_factory

View File

@@ -24,8 +24,8 @@ from future.builtins import object
import json
from itertools import groupby
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import common
import database
import datatables
@@ -34,13 +34,13 @@ if plexpy.PYTHON2:
import pmsconnect
import session
else:
from plexpy import common
from plexpy import database
from plexpy import datatables
from plexpy import helpers
from plexpy import logger
from plexpy import pmsconnect
from plexpy import session
from jellypy import common
from jellypy import database
from jellypy import datatables
from jellypy import helpers
from jellypy import logger
from jellypy import pmsconnect
from jellypy import session
class DataFactory(object):
@@ -58,10 +58,10 @@ class DataFactory(object):
custom_where = []
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
if include_activity is None:
include_activity = plexpy.CONFIG.HISTORY_TABLE_ACTIVITY
include_activity = jellypy.CONFIG.HISTORY_TABLE_ACTIVITY
if session.get_session_user_id():
session_user_id = str(session.get_session_user_id())
@@ -218,11 +218,11 @@ class DataFactory(object):
filter_duration = 0
total_duration = self.get_total_duration(custom_where=custom_where)
watched_percent = {'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT,
'episode': plexpy.CONFIG.TV_WATCHED_PERCENT,
'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT,
watched_percent = {'movie': jellypy.CONFIG.MOVIE_WATCHED_PERCENT,
'episode': jellypy.CONFIG.TV_WATCHED_PERCENT,
'track': jellypy.CONFIG.MUSIC_WATCHED_PERCENT,
'photo': 0,
'clip': plexpy.CONFIG.TV_WATCHED_PERCENT
'clip': jellypy.CONFIG.TV_WATCHED_PERCENT
}
rows = []
@@ -309,13 +309,13 @@ class DataFactory(object):
if stat_id:
stats_cards = [stat_id]
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
if stats_cards is None:
stats_cards = plexpy.CONFIG.HOME_STATS_CARDS
stats_cards = jellypy.CONFIG.HOME_STATS_CARDS
movie_watched_percent = plexpy.CONFIG.MOVIE_WATCHED_PERCENT
tv_watched_percent = plexpy.CONFIG.TV_WATCHED_PERCENT
music_watched_percent = plexpy.CONFIG.MUSIC_WATCHED_PERCENT
movie_watched_percent = jellypy.CONFIG.MOVIE_WATCHED_PERCENT
tv_watched_percent = jellypy.CONFIG.TV_WATCHED_PERCENT
music_watched_percent = jellypy.CONFIG.MUSIC_WATCHED_PERCENT
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
sort_type = 'total_duration' if stats_type == 'duration' else 'total_plays'

View File

@@ -18,15 +18,15 @@ from future.builtins import object
import re
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import database
import helpers
import logger
else:
from plexpy import database
from plexpy import helpers
from plexpy import logger
from jellypy import database
from jellypy import helpers
from jellypy import logger
class DataTables(object):

View File

@@ -29,8 +29,8 @@ from functools import partial, reduce
from io import open
from multiprocessing.dummy import Pool as ThreadPool
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import database
import datatables
import helpers
@@ -38,12 +38,12 @@ if plexpy.PYTHON2:
import users
from plex import Plex
else:
from plexpy import database
from plexpy import datatables
from plexpy import helpers
from plexpy import logger
from plexpy import users
from plexpy.plex import Plex
from jellypy import database
from jellypy import datatables
from jellypy import helpers
from jellypy import logger
from jellypy import users
from jellypy.plex import Plex
class Export(object):
@@ -1534,9 +1534,9 @@ class Export(object):
user_tokens = user_data.get_tokens(user_id=self.user_id)
plex_token = user_tokens['server_token']
else:
plex_token = plexpy.CONFIG.PMS_TOKEN
plex_token = jellypy.CONFIG.PMS_TOKEN
plex = Plex(plexpy.CONFIG.PMS_URL, plex_token)
plex = Plex(jellypy.CONFIG.PMS_URL, plex_token)
if self.rating_key:
logger.debug(
@@ -1691,7 +1691,7 @@ class Export(object):
self.total_items = len(items)
logger.info("Tautulli Exporter :: Exporting %d item(s).", self.total_items)
pool = ThreadPool(processes=plexpy.CONFIG.EXPORT_THREADS)
pool = ThreadPool(processes=jellypy.CONFIG.EXPORT_THREADS)
items = [ExportObject(self, item) for item in items]
try:
@@ -2107,7 +2107,7 @@ def delete_export(export_id):
def delete_all_exports():
logger.info("Tautulli Exporter :: Deleting all exports from the export directory.")
export_dir = plexpy.CONFIG.EXPORT_DIR
export_dir = jellypy.CONFIG.EXPORT_DIR
try:
shutil.rmtree(export_dir, ignore_errors=True)
except OSError as e:
@@ -2233,7 +2233,7 @@ def format_export_filename(title, file_format):
def get_export_dirpath(title, timestamp=None, images_directory=None):
if timestamp:
title = format_export_directory(title, timestamp)
dirpath = os.path.join(plexpy.CONFIG.EXPORT_DIR, title)
dirpath = os.path.join(jellypy.CONFIG.EXPORT_DIR, title)
if images_directory:
dirpath = os.path.join(dirpath, '{}.images'.format(images_directory))
return dirpath

View File

@@ -22,19 +22,19 @@ from future.builtins import object
import datetime
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import common
import database
import logger
import libraries
import session
else:
from plexpy import common
from plexpy import database
from plexpy import logger
from plexpy import libraries
from plexpy import session
from jellypy import common
from jellypy import database
from jellypy import logger
from jellypy import libraries
from jellypy import session
class Graphs(object):
@@ -55,7 +55,7 @@ class Graphs(object):
user_cond = 'AND session_history.user_id = %s ' % user_id
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
@@ -169,7 +169,7 @@ class Graphs(object):
user_cond = 'AND session_history.user_id = %s ' % user_id
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
@@ -227,7 +227,7 @@ class Graphs(object):
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_per_dayofweek: %s." % e)
return None
if plexpy.CONFIG.WEEK_START_MONDAY:
if jellypy.CONFIG.WEEK_START_MONDAY:
days_list = ['Monday', 'Tuesday', 'Wednesday',
'Thursday', 'Friday', 'Saturday', 'Sunday']
else:
@@ -300,7 +300,7 @@ class Graphs(object):
user_cond = 'AND session_history.user_id = %s ' % user_id
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
@@ -413,7 +413,7 @@ class Graphs(object):
user_cond = 'AND session_history.user_id = %s ' % user_id
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
@@ -534,7 +534,7 @@ class Graphs(object):
user_cond = 'AND session_history.user_id = %s ' % user_id
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
@@ -630,7 +630,7 @@ class Graphs(object):
user_cond = 'AND session_history.user_id = %s ' % user_id
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
@@ -739,7 +739,7 @@ class Graphs(object):
user_cond = 'AND session_history.user_id = %s ' % user_id
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
@@ -845,7 +845,7 @@ class Graphs(object):
user_cond = 'AND session_history.user_id = %s ' % user_id
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
@@ -931,7 +931,7 @@ class Graphs(object):
user_cond = 'AND session_history.user_id = %s ' % user_id
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
@@ -1041,7 +1041,7 @@ class Graphs(object):
user_cond = 'AND session_history.user_id = %s ' % user_id
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
@@ -1131,7 +1131,7 @@ class Graphs(object):
user_cond = 'AND session_history.user_id = %s ' % user_id
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
group_by = 'session_history.reference_id' if grouping else 'session_history.id'

View File

@@ -52,17 +52,17 @@ from future.moves.urllib.parse import urlencode
from xml.dom import minidom
import xmltodict
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import common
import logger
import request
from api2 import API2
else:
from plexpy import common
from plexpy import logger
from plexpy import request
from plexpy.api2 import API2
from jellypy import common
from jellypy import logger
from jellypy import request
from jellypy.api2 import API2
def addtoapi(*dargs, **dkwargs):
@@ -367,7 +367,7 @@ def replace_all(text, dic, normalize=False):
else:
j = unicodedata.normalize('NFC', j)
except TypeError:
j = unicodedata.normalize('NFC', j.decode(plexpy.SYS_ENCODING, 'replace'))
j = unicodedata.normalize('NFC', j.decode(jellypy.SYS_ENCODING, 'replace'))
text = text.replace(i, j)
return text
@@ -479,8 +479,8 @@ def create_https_certificates(ssl_cert, ssl_key):
serial = timestamp()
not_before = 0
not_after = 60 * 60 * 24 * 365 * 10 # ten years
domains = ['DNS:' + d.strip() for d in plexpy.CONFIG.HTTPS_DOMAIN.split(',') if d]
ips = ['IP:' + d.strip() for d in plexpy.CONFIG.HTTPS_IP.split(',') if d]
domains = ['DNS:' + d.strip() for d in jellypy.CONFIG.HTTPS_DOMAIN.split(',') if d]
ips = ['IP:' + d.strip() for d in jellypy.CONFIG.HTTPS_IP.split(',') if d]
alt_names = ','.join(domains + ips).encode('utf-8')
# Create the self-signed Tautulli certificate
@@ -739,15 +739,15 @@ def anon_url(*url):
"""
Return a URL string consisting of the Anonymous redirect URL and an arbitrary number of values appended.
"""
return '' if None in url else '%s%s' % (plexpy.CONFIG.ANON_REDIRECT, ''.join(str(s) for s in url))
return '' if None in url else '%s%s' % (jellypy.CONFIG.ANON_REDIRECT, ''.join(str(s) for s in url))
def get_img_service(include_self=False):
if plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS == 1:
if jellypy.CONFIG.NOTIFY_UPLOAD_POSTERS == 1:
return 'imgur'
elif plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS == 2 and include_self:
elif jellypy.CONFIG.NOTIFY_UPLOAD_POSTERS == 2 and include_self:
return 'self-hosted'
elif plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS == 3:
elif jellypy.CONFIG.NOTIFY_UPLOAD_POSTERS == 3:
return 'cloudinary'
else:
return None
@@ -757,11 +757,11 @@ def upload_to_imgur(img_data, img_title='', rating_key='', fallback=''):
""" Uploads an image to Imgur """
img_url = delete_hash = ''
if not plexpy.CONFIG.IMGUR_CLIENT_ID:
if not jellypy.CONFIG.IMGUR_CLIENT_ID:
logger.error("Tautulli Helpers :: Cannot upload image to Imgur. No Imgur client id specified in the settings.")
return img_url, delete_hash
headers = {'Authorization': 'Client-ID %s' % plexpy.CONFIG.IMGUR_CLIENT_ID}
headers = {'Authorization': 'Client-ID %s' % jellypy.CONFIG.IMGUR_CLIENT_ID}
data = {'image': base64.b64encode(img_data),
'title': img_title.encode('utf-8'),
'name': str(rating_key) + '.png',
@@ -789,11 +789,11 @@ def upload_to_imgur(img_data, img_title='', rating_key='', fallback=''):
def delete_from_imgur(delete_hash, img_title='', fallback=''):
""" Deletes an image from Imgur """
if not plexpy.CONFIG.IMGUR_CLIENT_ID:
if not jellypy.CONFIG.IMGUR_CLIENT_ID:
logger.error("Tautulli Helpers :: Cannot delete image from Imgur. No Imgur client id specified in the settings.")
return False
headers = {'Authorization': 'Client-ID %s' % plexpy.CONFIG.IMGUR_CLIENT_ID}
headers = {'Authorization': 'Client-ID %s' % jellypy.CONFIG.IMGUR_CLIENT_ID}
response, err_msg, req_msg = request.request_response2('https://api.imgur.com/3/image/%s' % delete_hash, 'DELETE',
headers=headers)
@@ -813,18 +813,18 @@ def upload_to_cloudinary(img_data, img_title='', rating_key='', fallback=''):
""" Uploads an image to Cloudinary """
img_url = ''
if not plexpy.CONFIG.CLOUDINARY_CLOUD_NAME or not plexpy.CONFIG.CLOUDINARY_API_KEY or not plexpy.CONFIG.CLOUDINARY_API_SECRET:
if not jellypy.CONFIG.CLOUDINARY_CLOUD_NAME or not jellypy.CONFIG.CLOUDINARY_API_KEY or not jellypy.CONFIG.CLOUDINARY_API_SECRET:
logger.error("Tautulli Helpers :: Cannot upload image to Cloudinary. Cloudinary settings not specified in the settings.")
return img_url
cloudinary.config(
cloud_name=plexpy.CONFIG.CLOUDINARY_CLOUD_NAME,
api_key=plexpy.CONFIG.CLOUDINARY_API_KEY,
api_secret=plexpy.CONFIG.CLOUDINARY_API_SECRET
cloud_name=jellypy.CONFIG.CLOUDINARY_CLOUD_NAME,
api_key=jellypy.CONFIG.CLOUDINARY_API_KEY,
api_secret=jellypy.CONFIG.CLOUDINARY_API_SECRET
)
# Cloudinary library has very poor support for non-ASCII characters on Python 2
if plexpy.PYTHON2:
if jellypy.PYTHON2:
_img_title = latinToAscii(img_title, replace=True)
else:
_img_title = img_title
@@ -844,14 +844,14 @@ def upload_to_cloudinary(img_data, img_title='', rating_key='', fallback=''):
def delete_from_cloudinary(rating_key=None, delete_all=False):
""" Deletes an image from Cloudinary """
if not plexpy.CONFIG.CLOUDINARY_CLOUD_NAME or not plexpy.CONFIG.CLOUDINARY_API_KEY or not plexpy.CONFIG.CLOUDINARY_API_SECRET:
if not jellypy.CONFIG.CLOUDINARY_CLOUD_NAME or not jellypy.CONFIG.CLOUDINARY_API_KEY or not jellypy.CONFIG.CLOUDINARY_API_SECRET:
logger.error("Tautulli Helpers :: Cannot delete image from Cloudinary. Cloudinary settings not specified in the settings.")
return False
cloudinary.config(
cloud_name=plexpy.CONFIG.CLOUDINARY_CLOUD_NAME,
api_key=plexpy.CONFIG.CLOUDINARY_API_KEY,
api_secret=plexpy.CONFIG.CLOUDINARY_API_SECRET
cloud_name=jellypy.CONFIG.CLOUDINARY_CLOUD_NAME,
api_key=jellypy.CONFIG.CLOUDINARY_API_KEY,
api_secret=jellypy.CONFIG.CLOUDINARY_API_SECRET
)
if delete_all:
@@ -870,14 +870,14 @@ def cloudinary_transform(rating_key=None, width=1000, height=1500, opacity=100,
img_format='png', img_title='', fallback=None):
url = ''
if not plexpy.CONFIG.CLOUDINARY_CLOUD_NAME or not plexpy.CONFIG.CLOUDINARY_API_KEY or not plexpy.CONFIG.CLOUDINARY_API_SECRET:
if not jellypy.CONFIG.CLOUDINARY_CLOUD_NAME or not jellypy.CONFIG.CLOUDINARY_API_KEY or not jellypy.CONFIG.CLOUDINARY_API_SECRET:
logger.error("Tautulli Helpers :: Cannot transform image on Cloudinary. Cloudinary settings not specified in the settings.")
return url
cloudinary.config(
cloud_name=plexpy.CONFIG.CLOUDINARY_CLOUD_NAME,
api_key=plexpy.CONFIG.CLOUDINARY_API_KEY,
api_secret=plexpy.CONFIG.CLOUDINARY_API_SECRET
cloud_name=jellypy.CONFIG.CLOUDINARY_CLOUD_NAME,
api_key=jellypy.CONFIG.CLOUDINARY_API_KEY,
api_secret=jellypy.CONFIG.CLOUDINARY_API_SECRET
)
img_options = {'format': img_format,
@@ -914,7 +914,7 @@ def cache_image(url, image=None):
If no image is provided, tries to return the image from the cache directory.
"""
# Create image directory if it doesn't exist
imgdir = os.path.join(plexpy.CONFIG.CACHE_DIR, 'images/')
imgdir = os.path.join(jellypy.CONFIG.CACHE_DIR, 'images/')
if not os.path.exists(imgdir):
logger.debug("Tautulli Helpers :: Creating image cache directory at %s" % imgdir)
os.makedirs(imgdir)
@@ -1122,12 +1122,12 @@ def eval_logic_groups_to_bool(logic_groups, eval_conds):
def get_plexpy_url(hostname=None):
if plexpy.CONFIG.ENABLE_HTTPS:
if jellypy.CONFIG.ENABLE_HTTPS:
scheme = 'https'
else:
scheme = 'http'
if hostname is None and plexpy.CONFIG.HTTP_HOST == '0.0.0.0':
if hostname is None and jellypy.CONFIG.HTTP_HOST == '0.0.0.0':
import socket
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
@@ -1142,18 +1142,18 @@ def get_plexpy_url(hostname=None):
if not hostname:
hostname = 'localhost'
elif hostname == 'localhost' and plexpy.CONFIG.HTTP_HOST != '0.0.0.0':
hostname = plexpy.CONFIG.HTTP_HOST
elif hostname == 'localhost' and jellypy.CONFIG.HTTP_HOST != '0.0.0.0':
hostname = jellypy.CONFIG.HTTP_HOST
else:
hostname = hostname or plexpy.CONFIG.HTTP_HOST
hostname = hostname or jellypy.CONFIG.HTTP_HOST
if plexpy.HTTP_PORT not in (80, 443):
port = ':' + str(plexpy.HTTP_PORT)
if jellypy.HTTP_PORT not in (80, 443):
port = ':' + str(jellypy.HTTP_PORT)
else:
port = ''
if plexpy.HTTP_ROOT is not None and plexpy.HTTP_ROOT.strip('/'):
root = '/' + plexpy.HTTP_ROOT.strip('/')
if jellypy.HTTP_ROOT is not None and jellypy.HTTP_ROOT.strip('/'):
root = '/' + jellypy.HTTP_ROOT.strip('/')
else:
root = ''
@@ -1202,10 +1202,10 @@ def split_args(args=None):
if isinstance(args, list):
return args
elif isinstance(args, str):
if plexpy.PYTHON2:
if jellypy.PYTHON2:
args = args.encode('utf-8')
args = shlex.split(args)
if plexpy.PYTHON2:
if jellypy.PYTHON2:
args = [a.decode('utf-8') for a in args]
return args
return []

View File

@@ -26,13 +26,13 @@ import certifi
import requests
import urllib3
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import helpers
import logger
else:
from plexpy import helpers
from plexpy import logger
from jellypy import helpers
from jellypy import logger
class HTTPHandler(object):
@@ -53,14 +53,14 @@ class HTTPHandler(object):
self.headers = headers
else:
self.headers = {
'X-Plex-Product': plexpy.common.PRODUCT,
'X-Plex-Version': plexpy.common.RELEASE,
'X-Plex-Client-Identifier': plexpy.CONFIG.PMS_UUID,
'X-Plex-Platform': plexpy.common.PLATFORM,
'X-Plex-Platform-Version': plexpy.common.PLATFORM_RELEASE,
'X-Plex-Device': '{} {}'.format(plexpy.common.PLATFORM,
plexpy.common.PLATFORM_RELEASE),
'X-Plex-Device-Name': plexpy.common.PLATFORM_DEVICE_NAME
'X-Plex-Product': jellypy.common.PRODUCT,
'X-Plex-Version': jellypy.common.RELEASE,
'X-Plex-Client-Identifier': jellypy.CONFIG.PMS_UUID,
'X-Plex-Platform': jellypy.common.PLATFORM,
'X-Plex-Platform-Version': jellypy.common.PLATFORM_RELEASE,
'X-Plex-Device': '{} {}'.format(jellypy.common.PLATFORM,
jellypy.common.PLATFORM_RELEASE),
'X-Plex-Device-Name': jellypy.common.PLATFORM_DEVICE_NAME
}
self.token = token

View File

@@ -23,8 +23,8 @@ from future.builtins import object
import json
import os
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import common
import database
import datatables
@@ -36,22 +36,22 @@ if plexpy.PYTHON2:
import users
from plex import Plex
else:
from plexpy import common
from plexpy import database
from plexpy import datatables
from plexpy import helpers
from plexpy import logger
from plexpy import plextv
from plexpy import pmsconnect
from plexpy import session
from plexpy import users
from plexpy.plex import Plex
from jellypy import common
from jellypy import database
from jellypy import datatables
from jellypy import helpers
from jellypy import logger
from jellypy import plextv
from jellypy import pmsconnect
from jellypy import session
from jellypy import users
from jellypy.plex import Plex
def refresh_libraries():
logger.info("Tautulli Libraries :: Requesting libraries list refresh...")
server_id = plexpy.CONFIG.PMS_IDENTIFIER
server_id = jellypy.CONFIG.PMS_IDENTIFIER
if not server_id:
logger.error("Tautulli Libraries :: No PMS identifier, cannot refresh libraries. Verify server in settings.")
return
@@ -96,15 +96,15 @@ def refresh_libraries():
query = 'UPDATE library_sections SET is_active = 0 WHERE server_id != ? OR ' \
'section_id NOT IN ({})'.format(', '.join(['?'] * len(section_ids)))
monitor_db.action(query=query, args=[plexpy.CONFIG.PMS_IDENTIFIER] + section_ids)
monitor_db.action(query=query, args=[jellypy.CONFIG.PMS_IDENTIFIER] + section_ids)
if plexpy.CONFIG.HOME_LIBRARY_CARDS == ['first_run_wizard']:
plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', library_keys)
plexpy.CONFIG.write()
if jellypy.CONFIG.HOME_LIBRARY_CARDS == ['first_run_wizard']:
jellypy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', library_keys)
jellypy.CONFIG.write()
else:
new_keys = plexpy.CONFIG.HOME_LIBRARY_CARDS + new_keys
plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', new_keys)
plexpy.CONFIG.write()
new_keys = jellypy.CONFIG.HOME_LIBRARY_CARDS + new_keys
jellypy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', new_keys)
jellypy.CONFIG.write()
logger.info("Tautulli Libraries :: Libraries list refreshed.")
return True
@@ -117,7 +117,7 @@ def add_live_tv_library(refresh=False):
monitor_db = database.MonitorDatabase()
result = monitor_db.select_single('SELECT * FROM library_sections '
'WHERE section_id = ? and server_id = ?',
[common.LIVE_TV_SECTION_ID, plexpy.CONFIG.PMS_IDENTIFIER])
[common.LIVE_TV_SECTION_ID, jellypy.CONFIG.PMS_IDENTIFIER])
if result and not refresh or not result and refresh:
return
@@ -125,9 +125,9 @@ def add_live_tv_library(refresh=False):
if not refresh:
logger.info("Tautulli Libraries :: Adding Live TV library to the database.")
section_keys = {'server_id': plexpy.CONFIG.PMS_IDENTIFIER,
section_keys = {'server_id': jellypy.CONFIG.PMS_IDENTIFIER,
'section_id': common.LIVE_TV_SECTION_ID}
section_values = {'server_id': plexpy.CONFIG.PMS_IDENTIFIER,
section_values = {'server_id': jellypy.CONFIG.PMS_IDENTIFIER,
'section_id': common.LIVE_TV_SECTION_ID,
'section_name': common.LIVE_TV_SECTION_NAME,
'section_type': 'live',
@@ -148,7 +148,7 @@ def has_library_type(section_type):
def get_collections(section_id=None):
plex = Plex(plexpy.CONFIG.PMS_URL, session.get_session_user_token())
plex = Plex(jellypy.CONFIG.PMS_URL, session.get_session_user_token())
library = plex.get_library(section_id)
if library.type not in ('movie', 'show', 'artist'):
@@ -246,7 +246,7 @@ def get_playlists(section_id=None, user_id=None):
if not plex_token:
return []
plex = Plex(plexpy.CONFIG.PMS_URL, plex_token)
plex = Plex(jellypy.CONFIG.PMS_URL, plex_token)
if user_id:
playlists = plex.plex.playlists()
@@ -321,7 +321,7 @@ class Libraries(object):
custom_where = [['library_sections.deleted_section', 0]]
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
if session.get_session_shared_libraries():
custom_where.append(['library_sections.section_id', session.get_session_shared_libraries()])
@@ -484,7 +484,7 @@ class Libraries(object):
# Get play counts from the database
monitor_db = database.MonitorDatabase()
if plexpy.CONFIG.GROUP_HISTORY_TABLES:
if jellypy.CONFIG.GROUP_HISTORY_TABLES:
count_by = 'reference_id'
else:
count_by = 'id'
@@ -517,7 +517,7 @@ class Libraries(object):
# Import media info cache from json file
if rating_key:
try:
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
inFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key))
with open(inFilePath, 'r') as inFile:
rows = json.load(inFile)
library_count = len(rows)
@@ -527,7 +527,7 @@ class Libraries(object):
pass
elif section_id:
try:
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
inFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id)
with open(inFilePath, 'r') as inFile:
rows = json.load(inFile)
library_count = len(rows)
@@ -594,14 +594,14 @@ class Libraries(object):
# Cache the media info to a json file
if rating_key:
try:
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
outFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key))
with open(outFilePath, 'w') as outFile:
json.dump(rows, outFile)
except IOError as e:
logger.debug("Tautulli Libraries :: Unable to create cache file for rating_key %s." % rating_key)
elif section_id:
try:
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
outFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id)
with open(outFilePath, 'w') as outFile:
json.dump(rows, outFile)
except IOError as e:
@@ -694,7 +694,7 @@ class Libraries(object):
if rating_key:
#logger.debug("Tautulli Libraries :: Getting file sizes for rating_key %s." % rating_key)
try:
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
inFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key))
with open(inFilePath, 'r') as inFile:
rows = json.load(inFile)
except IOError as e:
@@ -704,7 +704,7 @@ class Libraries(object):
elif section_id:
logger.debug("Tautulli Libraries :: Getting file sizes for section_id %s." % section_id)
try:
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
inFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id)
with open(inFilePath, 'r') as inFile:
rows = json.load(inFile)
except IOError as e:
@@ -738,14 +738,14 @@ class Libraries(object):
# Cache the media info to a json file
if rating_key:
try:
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
outFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key))
with open(outFilePath, 'w') as outFile:
json.dump(rows, outFile)
except IOError as e:
logger.debug("Tautulli Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key)
elif section_id:
try:
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
outFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id)
with open(outFilePath, 'w') as outFile:
json.dump(rows, outFile)
except IOError as e:
@@ -796,7 +796,7 @@ class Libraries(object):
return default_return
if server_id is None:
server_id = plexpy.CONFIG.PMS_IDENTIFIER
server_id = jellypy.CONFIG.PMS_IDENTIFIER
def get_library_details(section_id=section_id, server_id=server_id):
monitor_db = database.MonitorDatabase()
@@ -877,7 +877,7 @@ class Libraries(object):
return []
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
if query_days and query_days is not None:
query_days = map(helpers.cast_to_int, query_days.split(','))
@@ -941,7 +941,7 @@ class Libraries(object):
return []
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
monitor_db = database.MonitorDatabase()
@@ -1088,13 +1088,13 @@ class Libraries(object):
return all(success)
elif str(section_id).isdigit():
server_id = server_id or plexpy.CONFIG.PMS_IDENTIFIER
if server_id == plexpy.CONFIG.PMS_IDENTIFIER:
server_id = server_id or jellypy.CONFIG.PMS_IDENTIFIER
if server_id == jellypy.CONFIG.PMS_IDENTIFIER:
delete_success = database.delete_library_history(section_id=section_id)
else:
logger.warn("Tautulli Libraries :: Library history not deleted for library section_id %s "
"because library server_id %s does not match Plex server identifier %s."
% (section_id, server_id, plexpy.CONFIG.PMS_IDENTIFIER))
% (section_id, server_id, jellypy.CONFIG.PMS_IDENTIFIER))
delete_success = True
if purge_only:
@@ -1151,7 +1151,7 @@ class Libraries(object):
try:
if section_id.isdigit():
[os.remove(os.path.join(plexpy.CONFIG.CACHE_DIR, f)) for f in os.listdir(plexpy.CONFIG.CACHE_DIR)
[os.remove(os.path.join(jellypy.CONFIG.CACHE_DIR, f)) for f in os.listdir(jellypy.CONFIG.CACHE_DIR)
if f.startswith('media_info_%s' % section_id) and f.endswith('.json')]
logger.debug("Tautulli Libraries :: Deleted media info table cache for section_id %s." % section_id)
@@ -1167,7 +1167,7 @@ class Libraries(object):
# Refresh the PMS_URL to make sure the server_id is updated
plextv.get_server_resources()
server_id = plexpy.CONFIG.PMS_IDENTIFIER
server_id = jellypy.CONFIG.PMS_IDENTIFIER
try:
logger.debug("Tautulli Libraries :: Deleting libraries where server_id does not match %s." % server_id)

View File

@@ -22,11 +22,11 @@ import future.moves.queue as queue
import time
import threading
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import logger
else:
from plexpy import logger
from jellypy import logger
class TimedLock(object):

View File

@@ -20,23 +20,23 @@ from io import open
import os
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import helpers
import logger
else:
from plexpy import helpers
from plexpy import logger
from jellypy import helpers
from jellypy import logger
def get_log_tail(window=20, parsed=True, log_type="server"):
if plexpy.CONFIG.PMS_LOGS_FOLDER:
if jellypy.CONFIG.PMS_LOGS_FOLDER:
log_file = ""
if log_type == "server":
log_file = os.path.join(plexpy.CONFIG.PMS_LOGS_FOLDER, 'Plex Media Server.log')
log_file = os.path.join(jellypy.CONFIG.PMS_LOGS_FOLDER, 'Plex Media Server.log')
elif log_type == "scanner":
log_file = os.path.join(plexpy.CONFIG.PMS_LOGS_FOLDER, 'Plex Media Scanner.log')
log_file = os.path.join(jellypy.CONFIG.PMS_LOGS_FOLDER, 'Plex Media Scanner.log')
else:
return []

View File

@@ -32,13 +32,13 @@ import sys
import threading
import traceback
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import helpers
from config import _BLACKLIST_KEYS, _WHITELIST_KEYS
else:
from plexpy import helpers
from plexpy.config import _BLACKLIST_KEYS, _WHITELIST_KEYS
from jellypy import helpers
from jellypy.config import _BLACKLIST_KEYS, _WHITELIST_KEYS
# These settings are for file logging only
@@ -96,7 +96,7 @@ class BlacklistFilter(logging.Filter):
super(BlacklistFilter, self).__init__()
def filter(self, record):
if not plexpy.CONFIG.LOG_BLACKLIST:
if not jellypy.CONFIG.LOG_BLACKLIST:
return True
for item in _BLACKLIST_WORDS:
@@ -131,7 +131,7 @@ class RegexFilter(logging.Filter):
self.regex = re.compile(r'')
def filter(self, record):
if not plexpy.CONFIG.LOG_BLACKLIST:
if not jellypy.CONFIG.LOG_BLACKLIST:
return True
try:
@@ -349,7 +349,7 @@ def initLogger(console=False, log_dir=False, verbose=False):
# Add filters to log handlers
# Only add filters after the config file has been initialized
# Nothing prior to initialization should contain sensitive information
if not plexpy.DEV and plexpy.CONFIG:
if not jellypy.DEV and jellypy.CONFIG:
log_handlers = logger.handlers + \
logger_api.handlers + \
logger_plex_websocket.handlers + \

View File

@@ -30,23 +30,23 @@ except ImportError:
if HAS_PYOBJC:
import rumps
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import common
import logger
import versioncheck
else:
from plexpy import common
from plexpy import logger
from plexpy import versioncheck
from jellypy import common
from jellypy import logger
from jellypy import versioncheck
class MacOSSystemTray(object):
def __init__(self):
self.image_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/', plexpy.CONFIG.INTERFACE, 'images')
self.image_dir = os.path.join(jellypy.PROG_DIR, 'data/interfaces/', jellypy.CONFIG.INTERFACE, 'images')
self.icon = os.path.join(self.image_dir, 'logo-flat-white.ico')
if plexpy.UPDATE_AVAILABLE:
if jellypy.UPDATE_AVAILABLE:
self.update_title = 'Check for Updates - Update Available!'
else:
self.update_title = 'Check for Updates'
@@ -61,10 +61,10 @@ class MacOSSystemTray(object):
rumps.MenuItem('Restart', callback=self.tray_restart),
rumps.MenuItem('Quit', callback=self.tray_quit)
]
if not plexpy.FROZEN:
if not jellypy.FROZEN:
self.menu.insert(6, rumps.MenuItem('Update', callback=self.tray_update))
self.menu[2].state = plexpy.CONFIG.LAUNCH_STARTUP
self.menu[3].state = plexpy.CONFIG.LAUNCH_BROWSER
self.menu[2].state = jellypy.CONFIG.LAUNCH_STARTUP
self.menu[3].state = jellypy.CONFIG.LAUNCH_BROWSER
self.tray_icon = rumps.App(common.PRODUCT, icon=self.icon, template=True,
menu=self.menu, quit_button=None)
@@ -84,50 +84,50 @@ class MacOSSystemTray(object):
self.tray_icon.icon = kwargs['icon']
def tray_open(self, tray_icon):
plexpy.launch_browser(plexpy.CONFIG.HTTP_HOST, plexpy.HTTP_PORT, plexpy.HTTP_ROOT)
jellypy.launch_browser(jellypy.CONFIG.HTTP_HOST, jellypy.HTTP_PORT, jellypy.HTTP_ROOT)
def tray_startup(self, tray_icon):
plexpy.CONFIG.LAUNCH_STARTUP = not plexpy.CONFIG.LAUNCH_STARTUP
jellypy.CONFIG.LAUNCH_STARTUP = not jellypy.CONFIG.LAUNCH_STARTUP
set_startup()
def tray_browser(self, tray_icon):
plexpy.CONFIG.LAUNCH_BROWSER = not plexpy.CONFIG.LAUNCH_BROWSER
jellypy.CONFIG.LAUNCH_BROWSER = not jellypy.CONFIG.LAUNCH_BROWSER
set_startup()
def tray_check_update(self, tray_icon):
versioncheck.check_update()
def tray_update(self, tray_icon):
if plexpy.UPDATE_AVAILABLE:
plexpy.SIGNAL = 'update'
if jellypy.UPDATE_AVAILABLE:
jellypy.SIGNAL = 'update'
else:
self.update_title = 'Check for Updates - No Update Available'
self.menu[5].title = self.update_title
def tray_restart(self, tray_icon):
plexpy.SIGNAL = 'restart'
jellypy.SIGNAL = 'restart'
def tray_quit(self, tray_icon):
plexpy.SIGNAL = 'shutdown'
jellypy.SIGNAL = 'shutdown'
def change_tray_update_icon(self):
if plexpy.UPDATE_AVAILABLE:
if jellypy.UPDATE_AVAILABLE:
self.update_title = 'Check for Updates - Update Available!'
else:
self.update_title = 'Check for Updates'
self.menu[5].title = self.update_title
def change_tray_icons(self):
self.tray_icon.menu['Start Tautulli at Login'].state = plexpy.CONFIG.LAUNCH_STARTUP
self.tray_icon.menu['Open Browser when Tautulli Starts'].state = plexpy.CONFIG.LAUNCH_BROWSER
self.tray_icon.menu['Start Tautulli at Login'].state = jellypy.CONFIG.LAUNCH_STARTUP
self.tray_icon.menu['Open Browser when Tautulli Starts'].state = jellypy.CONFIG.LAUNCH_BROWSER
def set_startup():
if plexpy.MAC_SYS_TRAY_ICON:
plexpy.MAC_SYS_TRAY_ICON.change_tray_icons()
if jellypy.MAC_SYS_TRAY_ICON:
jellypy.MAC_SYS_TRAY_ICON.change_tray_icons()
if plexpy.INSTALL_TYPE == 'macos':
if plexpy.CONFIG.LAUNCH_STARTUP:
if jellypy.INSTALL_TYPE == 'macos':
if jellypy.CONFIG.LAUNCH_STARTUP:
try:
subprocess.Popen(['osascript',
'-e', 'tell application "System Events"',
@@ -162,11 +162,11 @@ def set_startup():
plist_file_path = os.path.join(launch_agents, plist_file)
exe = sys.executable
run_args = [arg for arg in plexpy.ARGS if arg != '--nolaunch']
if plexpy.FROZEN:
run_args = [arg for arg in jellypy.ARGS if arg != '--nolaunch']
if jellypy.FROZEN:
args = [exe] + run_args
else:
args = [exe, plexpy.FULL_PATH] + run_args
args = [exe, jellypy.FULL_PATH] + run_args
plist_dict = {
'Label': common.PRODUCT,
@@ -174,7 +174,7 @@ def set_startup():
'RunAtLoad': True
}
if plexpy.CONFIG.LAUNCH_STARTUP:
if jellypy.CONFIG.LAUNCH_STARTUP:
if not os.path.exists(launch_agents):
try:
os.makedirs(launch_agents)

View File

@@ -21,15 +21,15 @@ from future.builtins import str
import requests
import threading
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import database
import helpers
import logger
else:
from plexpy import database
from plexpy import helpers
from plexpy import logger
from jellypy import database
from jellypy import helpers
from jellypy import logger
TEMP_DEVICE_TOKEN = None

View File

@@ -23,17 +23,17 @@ import os
from apscheduler.triggers.cron import CronTrigger
import email.utils
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import database
import helpers
import logger
import newsletters
else:
from plexpy import database
from plexpy import helpers
from plexpy import logger
from plexpy import newsletters
from jellypy import database
from jellypy import helpers
from jellypy import logger
from jellypy import newsletters
NEWSLETTER_SCHED = None
@@ -48,7 +48,7 @@ def add_newsletter_each(newsletter_id=None, notify_action=None, **kwargs):
'newsletter_id': newsletter_id,
'notify_action': notify_action}
data.update(kwargs)
plexpy.NOTIFY_QUEUE.put(data)
jellypy.NOTIFY_QUEUE.put(data)
def schedule_newsletters(newsletter_id=None):
@@ -211,7 +211,7 @@ def get_newsletter(newsletter_uuid=None, newsletter_id_name=None):
end_date.replace('-', ''),
newsletter_uuid)
newsletter_folder = plexpy.CONFIG.NEWSLETTER_DIR or os.path.join(plexpy.DATA_DIR, 'newsletters')
newsletter_folder = jellypy.CONFIG.NEWSLETTER_DIR or os.path.join(jellypy.DATA_DIR, 'newsletters')
newsletter_file_fp = os.path.join(newsletter_folder, newsletter_file)
if newsletter_file in os.listdir(newsletter_folder):

View File

@@ -29,8 +29,8 @@ from mako import exceptions
import os
import re
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import common
import database
import helpers
@@ -40,14 +40,14 @@ if plexpy.PYTHON2:
import pmsconnect
from notifiers import send_notification, EMAIL
else:
from plexpy import common
from plexpy import database
from plexpy import helpers
from plexpy import libraries
from plexpy import logger
from plexpy import newsletter_handler
from plexpy import pmsconnect
from plexpy.notifiers import send_notification, EMAIL
from jellypy import common
from jellypy import database
from jellypy import helpers
from jellypy import libraries
from jellypy import logger
from jellypy import newsletter_handler
from jellypy import pmsconnect
from jellypy.notifiers import send_notification, EMAIL
AGENT_IDS = {
@@ -319,14 +319,14 @@ def blacklist_logger():
def serve_template(templatename, **kwargs):
if plexpy.CONFIG.NEWSLETTER_CUSTOM_DIR:
if jellypy.CONFIG.NEWSLETTER_CUSTOM_DIR:
logger.info("Tautulli Newsletters :: Using custom newsletter template directory.")
template_dir = plexpy.CONFIG.NEWSLETTER_CUSTOM_DIR
template_dir = jellypy.CONFIG.NEWSLETTER_CUSTOM_DIR
else:
interface_dir = os.path.join(str(plexpy.PROG_DIR), 'data/interfaces/')
template_dir = os.path.join(str(interface_dir), plexpy.CONFIG.NEWSLETTER_TEMPLATES)
interface_dir = os.path.join(str(jellypy.PROG_DIR), 'data/interfaces/')
template_dir = os.path.join(str(interface_dir), jellypy.CONFIG.NEWSLETTER_TEMPLATES)
if not plexpy.CONFIG.NEWSLETTER_INLINE_STYLES:
if not jellypy.CONFIG.NEWSLETTER_INLINE_STYLES:
templatename = templatename.replace('.html', '.internal.html')
_hplookup = TemplateLookup(directories=[template_dir], default_filters=['unicode', 'h'])
@@ -344,7 +344,7 @@ def generate_newsletter_uuid():
db = database.MonitorDatabase()
while not uuid or uuid_exists:
uuid = plexpy.generate_uuid()[:8]
uuid = jellypy.generate_uuid()[:8]
result = db.select_single(
'SELECT EXISTS(SELECT uuid FROM newsletter_log WHERE uuid = ?) as uuid_exists', [uuid])
uuid_exists = result['uuid_exists']
@@ -528,7 +528,7 @@ class Newsletter(object):
def _save(self):
newsletter_file = self.filename_formatted
newsletter_folder = plexpy.CONFIG.NEWSLETTER_DIR or os.path.join(plexpy.DATA_DIR, 'newsletters')
newsletter_folder = jellypy.CONFIG.NEWSLETTER_DIR or os.path.join(jellypy.DATA_DIR, 'newsletters')
newsletter_file_fp = os.path.join(newsletter_folder, newsletter_file)
# In case the user has deleted it manually
@@ -552,7 +552,7 @@ class Newsletter(object):
newsletter_stripped = ''.join(l.strip() for l in self.newsletter.splitlines())
plaintext = 'HTML email support is required to view the newsletter.\n'
if plexpy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL:
if jellypy.CONFIG.NEWSLETTER_SELF_HOSTED and jellypy.CONFIG.HTTP_BASE_URL:
plaintext += self._DEFAULT_BODY.format(**self.parameters)
email_reply_msg_id = self.email_reply_msg_id if self.config['threaded'] else None
@@ -589,15 +589,15 @@ class Newsletter(object):
return parameters
def _build_params(self):
date_format = helpers.momentjs_to_arrow(plexpy.CONFIG.DATE_FORMAT)
date_format = helpers.momentjs_to_arrow(jellypy.CONFIG.DATE_FORMAT)
if plexpy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL:
base_url = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'newsletter/'
if jellypy.CONFIG.NEWSLETTER_SELF_HOSTED and jellypy.CONFIG.HTTP_BASE_URL:
base_url = jellypy.CONFIG.HTTP_BASE_URL + jellypy.HTTP_ROOT + 'newsletter/'
else:
base_url = helpers.get_plexpy_url() + '/newsletter/'
parameters = {
'server_name': plexpy.CONFIG.PMS_NAME,
'server_name': jellypy.CONFIG.PMS_NAME,
'start_date': self.start_date.format(date_format),
'end_date': self.end_date.format(date_format),
'current_year': self.start_date.year,
@@ -616,13 +616,13 @@ class Newsletter(object):
'newsletter_uuid': self.uuid,
'newsletter_id': self.newsletter_id,
'newsletter_id_name': self.newsletter_id_name,
'newsletter_password': plexpy.CONFIG.NEWSLETTER_PASSWORD
'newsletter_password': jellypy.CONFIG.NEWSLETTER_PASSWORD
}
return parameters
def build_text(self):
from plexpy.notification_handler import CustomFormatter
from jellypy.notification_handler import CustomFormatter
custom_formatter = CustomFormatter()
try:
@@ -655,7 +655,7 @@ class Newsletter(object):
return subject, body, message
def build_filename(self):
from plexpy.notification_handler import CustomFormatter
from jellypy.notification_handler import CustomFormatter
custom_formatter = CustomFormatter()
try:
@@ -702,7 +702,7 @@ class RecentlyAdded(Newsletter):
_TEMPLATE = 'recently_added.html'
def _get_recently_added(self, media_type=None):
from plexpy.notification_handler import format_group_index
from jellypy.notification_handler import format_group_index
pms_connect = pmsconnect.PmsConnect()
@@ -818,7 +818,7 @@ class RecentlyAdded(Newsletter):
return recently_added
def retrieve_data(self):
from plexpy.notification_handler import get_img_info, set_hash_image_info
from jellypy.notification_handler import get_img_info, set_hash_image_info
if not self.config['incl_libraries']:
logger.warn("Tautulli Newsletters :: Failed to retrieve %s newsletter data: no libraries selected." % self.NAME)
@@ -948,8 +948,8 @@ class RecentlyAdded(Newsletter):
newsletter_libraries.append(s['section_name'])
parameters['newsletter_libraries'] = ', '.join(sorted(newsletter_libraries))
parameters['pms_identifier'] = plexpy.CONFIG.PMS_IDENTIFIER
parameters['pms_web_url'] = plexpy.CONFIG.PMS_WEB_URL
parameters['pms_identifier'] = jellypy.CONFIG.PMS_IDENTIFIER
parameters['pms_web_url'] = jellypy.CONFIG.PMS_WEB_URL
return parameters

View File

@@ -39,8 +39,8 @@ import time
import musicbrainzngs
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import activity_processor
import common
import database
@@ -52,20 +52,20 @@ if plexpy.PYTHON2:
import request
from newsletter_handler import notify as notify_newsletter
else:
from plexpy import activity_processor
from plexpy import common
from plexpy import database
from plexpy import datafactory
from plexpy import logger
from plexpy import helpers
from plexpy import notifiers
from plexpy import pmsconnect
from plexpy import request
from plexpy.newsletter_handler import notify as notify_newsletter
from jellypy import activity_processor
from jellypy import common
from jellypy import database
from jellypy import datafactory
from jellypy import logger
from jellypy import helpers
from jellypy import notifiers
from jellypy import pmsconnect
from jellypy import request
from jellypy.newsletter_handler import notify as notify_newsletter
def process_queue():
queue = plexpy.NOTIFY_QUEUE
queue = jellypy.NOTIFY_QUEUE
while True:
params = queue.get()
@@ -148,14 +148,14 @@ def add_notifier_each(notifier_id=None, notify_action=None, stream_data=None, ti
'timeline_data': timeline_data,
'parameters': parameters}
data.update(kwargs)
plexpy.NOTIFY_QUEUE.put(data)
jellypy.NOTIFY_QUEUE.put(data)
else:
logger.debug("Tautulli NotificationHandler :: Custom notification conditions not satisfied, skipping notifier_id %s." % notifier['id'])
# Add on_concurrent and on_newdevice to queue if action is on_play
if notify_action == 'on_play':
plexpy.NOTIFY_QUEUE.put({'stream_data': stream_data.copy(), 'notify_action': 'on_concurrent'})
plexpy.NOTIFY_QUEUE.put({'stream_data': stream_data.copy(), 'notify_action': 'on_newdevice'})
jellypy.NOTIFY_QUEUE.put({'stream_data': stream_data.copy(), 'notify_action': 'on_concurrent'})
jellypy.NOTIFY_QUEUE.put({'stream_data': stream_data.copy(), 'notify_action': 'on_newdevice'})
def notify_conditions(notify_action=None, stream_data=None, timeline_data=None):
@@ -186,27 +186,27 @@ def notify_conditions(notify_action=None, stream_data=None, timeline_data=None):
if result:
user_sessions = [s for s in result['sessions'] if s['user_id'] == stream_data['user_id']]
if plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP:
evaluated = len(Counter(s['ip_address'] for s in user_sessions)) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD
if jellypy.CONFIG.NOTIFY_CONCURRENT_BY_IP:
evaluated = len(Counter(s['ip_address'] for s in user_sessions)) >= jellypy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD
else:
evaluated = len(user_sessions) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD
evaluated = len(user_sessions) >= jellypy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD
elif notify_action == 'on_newdevice':
data_factory = datafactory.DataFactory()
user_devices = data_factory.get_user_devices(user_id=stream_data['user_id'],
history_only=not plexpy.CONFIG.NOTIFY_NEW_DEVICE_INITIAL_ONLY)
history_only=not jellypy.CONFIG.NOTIFY_NEW_DEVICE_INITIAL_ONLY)
evaluated = stream_data['machine_id'] not in user_devices
elif stream_data['media_type'] in ('movie', 'episode', 'clip'):
progress_percent = helpers.get_percent(stream_data['view_offset'], stream_data['duration'])
if notify_action == 'on_stop':
evaluated = (plexpy.CONFIG.NOTIFY_CONSECUTIVE or
(stream_data['media_type'] == 'movie' and progress_percent < plexpy.CONFIG.MOVIE_WATCHED_PERCENT) or
(stream_data['media_type'] == 'episode' and progress_percent < plexpy.CONFIG.TV_WATCHED_PERCENT))
evaluated = (jellypy.CONFIG.NOTIFY_CONSECUTIVE or
(stream_data['media_type'] == 'movie' and progress_percent < jellypy.CONFIG.MOVIE_WATCHED_PERCENT) or
(stream_data['media_type'] == 'episode' and progress_percent < jellypy.CONFIG.TV_WATCHED_PERCENT))
elif notify_action == 'on_resume':
evaluated = plexpy.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < 99
evaluated = jellypy.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < 99
# All other activity notify actions
else:
@@ -497,9 +497,9 @@ def set_notify_success(notification_id):
def build_media_notify_params(notify_action=None, session=None, timeline=None, manual_trigger=False, **kwargs):
# Get time formats
date_format = helpers.momentjs_to_arrow(plexpy.CONFIG.DATE_FORMAT)
time_format = helpers.momentjs_to_arrow(plexpy.CONFIG.TIME_FORMAT)
duration_format = helpers.momentjs_to_arrow(plexpy.CONFIG.TIME_FORMAT, duration=True)
date_format = helpers.momentjs_to_arrow(jellypy.CONFIG.DATE_FORMAT)
time_format = helpers.momentjs_to_arrow(jellypy.CONFIG.TIME_FORMAT)
duration_format = helpers.momentjs_to_arrow(jellypy.CONFIG.TIME_FORMAT, duration=True)
# Get metadata for the item
if session:
@@ -598,8 +598,8 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
plex_web_rating_key = notify_params['rating_key']
notify_params['plex_url'] = '{web_url}#!/server/{pms_identifier}/details?key=%2Flibrary%2Fmetadata%2F{rating_key}'.format(
web_url=plexpy.CONFIG.PMS_WEB_URL,
pms_identifier=plexpy.CONFIG.PMS_IDENTIFIER,
web_url=jellypy.CONFIG.PMS_WEB_URL,
pms_identifier=jellypy.CONFIG.PMS_IDENTIFIER,
rating_key=plex_web_rating_key)
# Check external guids
@@ -646,7 +646,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
notify_params['lastfm_url'] = 'https://www.last.fm/music/' + notify_params['lastfm_id']
# Get TheMovieDB info (for movies and tv only)
if plexpy.CONFIG.THEMOVIEDB_LOOKUP and notify_params['media_type'] in ('movie', 'show', 'season', 'episode'):
if jellypy.CONFIG.THEMOVIEDB_LOOKUP and notify_params['media_type'] in ('movie', 'show', 'season', 'episode'):
if notify_params.get('themoviedb_id'):
themoveidb_json = get_themoviedb_info(rating_key=rating_key,
media_type=notify_params['media_type'],
@@ -689,7 +689,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
notify_params['themoviedb_id'], 'show' if lookup_media_type == 'tv' else 'movie')
# Get TVmaze info (for tv shows only)
if plexpy.CONFIG.TVMAZE_LOOKUP and notify_params['media_type'] in ('show', 'season', 'episode'):
if jellypy.CONFIG.TVMAZE_LOOKUP and notify_params['media_type'] in ('show', 'season', 'episode'):
if notify_params.get('thetvdb_id') or notify_params.get('imdb_id') or notify_params.get('plex_id'):
if notify_params['media_type'] == 'episode':
lookup_key = notify_params['grandparent_rating_key']
@@ -716,7 +716,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
notify_params['trakt_url'] = 'https://trakt.tv/search/imdb/' + notify_params['imdb_id']
# Get MusicBrainz info (for music only)
if plexpy.CONFIG.MUSICBRAINZ_LOOKUP and notify_params['media_type'] in ('artist', 'album', 'track'):
if jellypy.CONFIG.MUSICBRAINZ_LOOKUP and notify_params['media_type'] in ('artist', 'album', 'track'):
artist = release = recording = tracks = tnum = None
if notify_params['media_type'] == 'artist':
musicbrainz_type = 'artist'
@@ -772,13 +772,13 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
img_info = get_img_info(img=poster_thumb, rating_key=poster_key, title=poster_title, fallback=fallback)
poster_info = {'poster_title': img_info['img_title'], 'poster_url': img_info['img_url']}
notify_params.update(poster_info)
elif img_service == 'self-hosted' and plexpy.CONFIG.HTTP_BASE_URL:
elif img_service == 'self-hosted' and jellypy.CONFIG.HTTP_BASE_URL:
img_hash = set_hash_image_info(img=poster_thumb, fallback=fallback)
poster_info = {'poster_title': poster_title,
'poster_url': plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'image/' + img_hash}
'poster_url': jellypy.CONFIG.HTTP_BASE_URL + jellypy.HTTP_ROOT + 'image/' + img_hash}
notify_params.update(poster_info)
if ((manual_trigger or plexpy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT)
if ((manual_trigger or jellypy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT)
and notify_params['media_type'] in ('show', 'artist')):
show_name = notify_params['title']
episode_name = ''
@@ -797,7 +797,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
child_count = len(child_num)
grandchild_count = ''
elif ((manual_trigger or plexpy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_PARENT)
elif ((manual_trigger or jellypy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_PARENT)
and notify_params['media_type'] in ('season', 'album')):
show_name = notify_params['parent_title']
episode_name = ''
@@ -850,16 +850,16 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
available_params = {
# Global paramaters
'tautulli_version': common.RELEASE,
'tautulli_remote': plexpy.CONFIG.GIT_REMOTE,
'tautulli_branch': plexpy.CONFIG.GIT_BRANCH,
'tautulli_commit': plexpy.CURRENT_VERSION,
'server_name': plexpy.CONFIG.PMS_NAME,
'server_ip': plexpy.CONFIG.PMS_IP,
'server_port': plexpy.CONFIG.PMS_PORT,
'server_url': plexpy.CONFIG.PMS_URL,
'server_machine_id': plexpy.CONFIG.PMS_IDENTIFIER,
'server_platform': plexpy.CONFIG.PMS_PLATFORM,
'server_version': plexpy.CONFIG.PMS_VERSION,
'tautulli_remote': jellypy.CONFIG.GIT_REMOTE,
'tautulli_branch': jellypy.CONFIG.GIT_BRANCH,
'tautulli_commit': jellypy.CURRENT_VERSION,
'server_name': jellypy.CONFIG.PMS_NAME,
'server_ip': jellypy.CONFIG.PMS_IP,
'server_port': jellypy.CONFIG.PMS_PORT,
'server_url': jellypy.CONFIG.PMS_URL,
'server_machine_id': jellypy.CONFIG.PMS_IDENTIFIER,
'server_platform': jellypy.CONFIG.PMS_PLATFORM,
'server_version': jellypy.CONFIG.PMS_VERSION,
'action': notify_action.split('on_')[-1],
'current_year': now.year,
'current_month': now.month,
@@ -1096,8 +1096,8 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
def build_server_notify_params(notify_action=None, **kwargs):
# Get time formats
date_format = plexpy.CONFIG.DATE_FORMAT.replace('Do','')
time_format = plexpy.CONFIG.TIME_FORMAT.replace('Do','')
date_format = jellypy.CONFIG.DATE_FORMAT.replace('Do', '')
time_format = jellypy.CONFIG.TIME_FORMAT.replace('Do', '')
update_channel = pmsconnect.PmsConnect().get_server_update_channel()
@@ -1111,16 +1111,16 @@ def build_server_notify_params(notify_action=None, **kwargs):
available_params = {
# Global paramaters
'tautulli_version': common.RELEASE,
'tautulli_remote': plexpy.CONFIG.GIT_REMOTE,
'tautulli_branch': plexpy.CONFIG.GIT_BRANCH,
'tautulli_commit': plexpy.CURRENT_VERSION,
'server_name': plexpy.CONFIG.PMS_NAME,
'server_ip': plexpy.CONFIG.PMS_IP,
'server_port': plexpy.CONFIG.PMS_PORT,
'server_url': plexpy.CONFIG.PMS_URL,
'server_platform': plexpy.CONFIG.PMS_PLATFORM,
'server_version': plexpy.CONFIG.PMS_VERSION,
'server_machine_id': plexpy.CONFIG.PMS_IDENTIFIER,
'tautulli_remote': jellypy.CONFIG.GIT_REMOTE,
'tautulli_branch': jellypy.CONFIG.GIT_BRANCH,
'tautulli_commit': jellypy.CURRENT_VERSION,
'server_name': jellypy.CONFIG.PMS_NAME,
'server_ip': jellypy.CONFIG.PMS_IP,
'server_port': jellypy.CONFIG.PMS_PORT,
'server_url': jellypy.CONFIG.PMS_URL,
'server_platform': jellypy.CONFIG.PMS_PLATFORM,
'server_version': jellypy.CONFIG.PMS_VERSION,
'server_machine_id': jellypy.CONFIG.PMS_IDENTIFIER,
'action': notify_action.split('on_')[-1],
'current_year': now.year,
'current_month': now.month,
@@ -1464,7 +1464,7 @@ def set_hash_image_info(img=None, rating_key=None, width=750, height=1000,
rating_key = img_rating_key
img_string = '{}.{}.{}.{}.{}.{}.{}.{}'.format(
plexpy.CONFIG.PMS_UUID, img, rating_key, width, height, opacity, background, blur, fallback)
jellypy.CONFIG.PMS_UUID, img, rating_key, width, height, opacity, background, blur, fallback)
img_hash = hashlib.sha256(img_string.encode('utf-8')).hexdigest()
if add_to_db:
@@ -1572,7 +1572,7 @@ def lookup_themoviedb_by_id(rating_key=None, thetvdb_id=None, imdb_id=None, titl
else:
logger.debug("Tautulli NotificationHandler :: Looking up The Movie Database info for '{} ({})'.".format(title, year))
params = {'api_key': plexpy.CONFIG.THEMOVIEDB_APIKEY}
params = {'api_key': jellypy.CONFIG.THEMOVIEDB_APIKEY}
if thetvdb_id or imdb_id:
params['external_source'] = 'tvdb_id' if thetvdb_id else 'imdb_id'
@@ -1650,7 +1650,7 @@ def get_themoviedb_info(rating_key=None, media_type=None, themoviedb_id=None):
logger.debug("Tautulli NotificationHandler :: Looking up The Movie Database info for themoviedb_id '{}'.".format(themoviedb_id))
params = {'api_key': plexpy.CONFIG.THEMOVIEDB_APIKEY}
params = {'api_key': jellypy.CONFIG.THEMOVIEDB_APIKEY}
response, err_msg, req_msg = request.request_response2('https://api.themoviedb.org/3/{}/{}'.format(media_type, themoviedb_id), params=params)
if response and not err_msg:
@@ -1870,7 +1870,7 @@ class CustomFormatter(Formatter):
obj = self.convert_field(obj, conversion)
# expand the format spec, if needed
if plexpy.PYTHON2:
if jellypy.PYTHON2:
format_spec = self._vformat(format_spec, args, kwargs,
used_args, recursion_depth - 1)
else:
@@ -1889,7 +1889,7 @@ class CustomFormatter(Formatter):
result.append(suffix)
# result.append(self.format_field(obj, format_spec))
if plexpy.PYTHON2:
if jellypy.PYTHON2:
return ''.join(result)
else:
return ''.join(result), auto_arg_index

View File

@@ -57,8 +57,8 @@ import gntp.notifier
import facebook
import twitter
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import common
import database
import helpers
@@ -68,14 +68,14 @@ if plexpy.PYTHON2:
import request
import users
else:
from plexpy import common
from plexpy import database
from plexpy import helpers
from plexpy import logger
from plexpy import mobile_app
from plexpy import pmsconnect
from plexpy import request
from plexpy import users
from jellypy import common
from jellypy import database
from jellypy import helpers
from jellypy import logger
from jellypy import mobile_app
from jellypy import pmsconnect
from jellypy import request
from jellypy import users
BROWSER_NOTIFIERS = {}
@@ -943,7 +943,7 @@ class ANDROIDAPP(Notifier):
'cipher_text': base64.b64encode(encrypted_data),
'nonce': base64.b64encode(nonce),
'salt': base64.b64encode(salt),
'server_id': plexpy.CONFIG.PMS_UUID}
'server_id': jellypy.CONFIG.PMS_UUID}
}
else:
logger.warn("Tautulli Notifiers :: PyCryptodome library is missing. "
@@ -955,7 +955,7 @@ class ANDROIDAPP(Notifier):
'contents': {'en': 'Tautulli Notification'},
'data': {'encrypted': False,
'plain_text': plaintext_data,
'server_id': plexpy.CONFIG.PMS_UUID}
'server_id': jellypy.CONFIG.PMS_UUID}
}
#logger.debug("OneSignal payload: {}".format(payload))
@@ -996,7 +996,7 @@ class ANDROIDAPP(Notifier):
'Instructions can be found in the '
'<a href="' + helpers.anon_url(
'https://github.com/%s/%s-Wiki/wiki/Frequently-Asked-Questions#notifications-pycryptodome'
% (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO)) + '" target="_blank">FAQ</a>.' ,
% (jellypy.CONFIG.GIT_USER, jellypy.CONFIG.GIT_REPO)) + '" target="_blank">FAQ</a>.' ,
'input_type': 'help'
})
else:
@@ -1532,10 +1532,10 @@ class FACEBOOK(Notifier):
def _get_authorization(self, app_id='', app_secret='', redirect_uri=''):
# Temporarily store settings in the config so we can retrieve them in Facebook step 2.
# Assume the user won't be requesting authorization for multiple Facebook notifiers at the same time.
plexpy.CONFIG.FACEBOOK_APP_ID = app_id
plexpy.CONFIG.FACEBOOK_APP_SECRET = app_secret
plexpy.CONFIG.FACEBOOK_REDIRECT_URI = redirect_uri
plexpy.CONFIG.FACEBOOK_TOKEN = 'temp'
jellypy.CONFIG.FACEBOOK_APP_ID = app_id
jellypy.CONFIG.FACEBOOK_APP_SECRET = app_secret
jellypy.CONFIG.FACEBOOK_REDIRECT_URI = redirect_uri
jellypy.CONFIG.FACEBOOK_TOKEN = 'temp'
return facebook.auth_url(app_id=app_id,
canvas_url=redirect_uri,
@@ -1544,9 +1544,9 @@ class FACEBOOK(Notifier):
def _get_credentials(self, code=''):
logger.info("Tautulli Notifiers :: Requesting access token from {name}.".format(name=self.NAME))
app_id = plexpy.CONFIG.FACEBOOK_APP_ID
app_secret = plexpy.CONFIG.FACEBOOK_APP_SECRET
redirect_uri = plexpy.CONFIG.FACEBOOK_REDIRECT_URI
app_id = jellypy.CONFIG.FACEBOOK_APP_ID
app_secret = jellypy.CONFIG.FACEBOOK_APP_SECRET
redirect_uri = jellypy.CONFIG.FACEBOOK_REDIRECT_URI
try:
# Request user access token
@@ -1562,17 +1562,17 @@ class FACEBOOK(Notifier):
response = api.extend_access_token(app_id=app_id,
app_secret=app_secret)
plexpy.CONFIG.FACEBOOK_TOKEN = response['access_token']
jellypy.CONFIG.FACEBOOK_TOKEN = response['access_token']
except Exception as e:
logger.error("Tautulli Notifiers :: Error requesting {name} access token: {e}".format(name=self.NAME, e=e))
plexpy.CONFIG.FACEBOOK_TOKEN = ''
jellypy.CONFIG.FACEBOOK_TOKEN = ''
# Clear out temporary config values
plexpy.CONFIG.FACEBOOK_APP_ID = ''
plexpy.CONFIG.FACEBOOK_APP_SECRET = ''
plexpy.CONFIG.FACEBOOK_REDIRECT_URI = ''
jellypy.CONFIG.FACEBOOK_APP_ID = ''
jellypy.CONFIG.FACEBOOK_APP_SECRET = ''
jellypy.CONFIG.FACEBOOK_REDIRECT_URI = ''
return plexpy.CONFIG.FACEBOOK_TOKEN
return jellypy.CONFIG.FACEBOOK_TOKEN
def _post_facebook(self, **data):
if self.config['group_id']:
@@ -1820,7 +1820,7 @@ class GROWL(Notifier):
return False
# Send it, including an image
image_file = os.path.join(str(plexpy.PROG_DIR),
image_file = os.path.join(str(jellypy.PROG_DIR),
"data/interfaces/default/images/logo-circle.png")
with open(image_file, 'rb') as f:
@@ -2325,7 +2325,7 @@ class PLEX(Notifier):
if self.config['image']:
image = self.config['image']
else:
image = os.path.join(plexpy.DATA_DIR, os.path.abspath("data/interfaces/default/images/logo-circle.png"))
image = os.path.join(jellypy.DATA_DIR, os.path.abspath("data/interfaces/default/images/logo-circle.png"))
for host in hosts:
logger.info("Tautulli Notifiers :: Sending notification command to {name} @ {host}".format(name=self.NAME, host=host))
@@ -2430,8 +2430,8 @@ class PLEXMOBILEAPP(Notifier):
'to': self.config['user_ids'],
'data': {
'provider': {
'identifier': plexpy.CONFIG.PMS_IDENTIFIER,
'title': plexpy.CONFIG.PMS_NAME
'identifier': jellypy.CONFIG.PMS_IDENTIFIER,
'title': jellypy.CONFIG.PMS_NAME
}
}
}
@@ -2536,11 +2536,11 @@ class PLEXMOBILEAPP(Notifier):
data['metadata'] = metadata
data['uri'] = 'server://{}/com.plexapp.plugins.library/library/metadata/{}'.format(
plexpy.CONFIG.PMS_IDENTIFIER, uri_rating_key or pretty_metadata.parameters['rating_key']
jellypy.CONFIG.PMS_IDENTIFIER, uri_rating_key or pretty_metadata.parameters['rating_key']
)
data['play'] = self.config['tap_action'] == 'play'
headers = {'X-Plex-Token': plexpy.CONFIG.PMS_TOKEN}
headers = {'X-Plex-Token': jellypy.CONFIG.PMS_TOKEN}
return self.make_request(self.NOTIFICATION_URL, headers=headers, json=data)
@@ -2977,7 +2977,7 @@ class SCRIPTS(Notifier):
'.php': 'php',
'.pl': 'perl',
'.ps1': 'powershell -executionPolicy bypass -file',
'.py': 'python' if plexpy.FROZEN else sys.executable,
'.py': 'python' if jellypy.FROZEN else sys.executable,
'.pyw': 'pythonw',
'.rb': 'ruby',
'.sh': ''
@@ -3013,13 +3013,13 @@ class SCRIPTS(Notifier):
def run_script(self, script, user_id):
# Common environment variables
custom_env = {
'PLEX_URL': plexpy.CONFIG.PMS_URL,
'PLEX_TOKEN': plexpy.CONFIG.PMS_TOKEN,
'PLEX_URL': jellypy.CONFIG.PMS_URL,
'PLEX_TOKEN': jellypy.CONFIG.PMS_TOKEN,
'PLEX_USER_TOKEN': '',
'TAUTULLI_URL': helpers.get_plexpy_url(hostname='localhost'),
'TAUTULLI_PUBLIC_URL': plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT,
'TAUTULLI_APIKEY': plexpy.CONFIG.API_KEY,
'TAUTULLI_ENCODING': plexpy.SYS_ENCODING,
'TAUTULLI_PUBLIC_URL': jellypy.CONFIG.HTTP_BASE_URL + jellypy.HTTP_ROOT,
'TAUTULLI_APIKEY': jellypy.CONFIG.API_KEY,
'TAUTULLI_ENCODING': jellypy.SYS_ENCODING,
'TAUTULLI_PYTHON_VERSION': common.PYTHON_VERSION
}
@@ -3027,10 +3027,10 @@ class SCRIPTS(Notifier):
user_tokens = users.Users().get_tokens(user_id=user_id)
custom_env['PLEX_USER_TOKEN'] = str(user_tokens['server_token'])
if self.pythonpath and plexpy.INSTALL_TYPE not in ('windows', 'macos'):
if self.pythonpath and jellypy.INSTALL_TYPE not in ('windows', 'macos'):
custom_env['PYTHONPATH'] = os.pathsep.join([p for p in sys.path if p])
if plexpy.PYTHON2:
if jellypy.PYTHON2:
custom_env = {k.encode('utf-8'): v.encode('utf-8') for k, v in custom_env.items()}
env = os.environ.copy()
@@ -3137,8 +3137,8 @@ class SCRIPTS(Notifier):
script.extend(script_args)
if plexpy.PYTHON2:
script = [s.encode(plexpy.SYS_ENCODING, 'ignore') for s in script]
if jellypy.PYTHON2:
script = [s.encode(jellypy.SYS_ENCODING, 'ignore') for s in script]
logger.debug("Tautulli Notifiers :: Full script is: %s" % script)
logger.debug("Tautulli Notifiers :: Executing script in a new thread.")
@@ -3688,7 +3688,7 @@ class XBMC(Notifier):
if self.config['image']:
image = self.config['image']
else:
image = os.path.join(plexpy.DATA_DIR, os.path.abspath("data/interfaces/default/images/logo-circle.png"))
image = os.path.join(jellypy.DATA_DIR, os.path.abspath("data/interfaces/default/images/logo-circle.png"))
for host in hosts:
logger.info("Tautulli Notifiers :: Sending notification command to XMBC @ " + host)

View File

@@ -21,11 +21,11 @@ from future.builtins import str
from plexapi.server import PlexServer
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import logger
else:
from plexpy import logger
from jellypy import logger
class Plex(object):

View File

@@ -22,19 +22,19 @@ import arrow
import sqlite3
from xml.dom import minidom
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import activity_processor
import database
import helpers
import logger
import users
else:
from plexpy import activity_processor
from plexpy import database
from plexpy import helpers
from plexpy import logger
from plexpy import users
from jellypy import activity_processor
from jellypy import database
from jellypy import helpers
from jellypy import logger
from jellypy import users
def extract_plexivity_xml(xml=None):

View File

@@ -23,8 +23,8 @@ from future.builtins import object
import base64
import json
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import common
import helpers
import http_handler
@@ -33,31 +33,31 @@ if plexpy.PYTHON2:
import pmsconnect
import session
else:
from plexpy import common
from plexpy import helpers
from plexpy import http_handler
from plexpy import logger
from plexpy import users
from plexpy import pmsconnect
from plexpy import session
from jellypy import common
from jellypy import helpers
from jellypy import http_handler
from jellypy import logger
from jellypy import users
from jellypy import pmsconnect
from jellypy import session
def get_server_resources(return_presence=False, return_server=False, return_info=False, **kwargs):
if not return_presence and not return_info:
logger.info("Tautulli PlexTV :: Requesting resources for server...")
server = {'pms_name': plexpy.CONFIG.PMS_NAME,
'pms_version': plexpy.CONFIG.PMS_VERSION,
'pms_platform': plexpy.CONFIG.PMS_PLATFORM,
'pms_ip': plexpy.CONFIG.PMS_IP,
'pms_port': plexpy.CONFIG.PMS_PORT,
'pms_ssl': plexpy.CONFIG.PMS_SSL,
'pms_is_remote': plexpy.CONFIG.PMS_IS_REMOTE,
'pms_is_cloud': plexpy.CONFIG.PMS_IS_CLOUD,
'pms_url': plexpy.CONFIG.PMS_URL,
'pms_url_manual': plexpy.CONFIG.PMS_URL_MANUAL,
'pms_identifier': plexpy.CONFIG.PMS_IDENTIFIER,
'pms_plexpass': plexpy.CONFIG.PMS_PLEXPASS
server = {'pms_name': jellypy.CONFIG.PMS_NAME,
'pms_version': jellypy.CONFIG.PMS_VERSION,
'pms_platform': jellypy.CONFIG.PMS_PLATFORM,
'pms_ip': jellypy.CONFIG.PMS_IP,
'pms_port': jellypy.CONFIG.PMS_PORT,
'pms_ssl': jellypy.CONFIG.PMS_SSL,
'pms_is_remote': jellypy.CONFIG.PMS_IS_REMOTE,
'pms_is_cloud': jellypy.CONFIG.PMS_IS_CLOUD,
'pms_url': jellypy.CONFIG.PMS_URL,
'pms_url_manual': jellypy.CONFIG.PMS_URL_MANUAL,
'pms_identifier': jellypy.CONFIG.PMS_IDENTIFIER,
'pms_plexpass': jellypy.CONFIG.PMS_PLEXPASS
}
if return_info:
@@ -132,8 +132,8 @@ def get_server_resources(return_presence=False, return_server=False, return_info
logger.info("Tautulli PlexTV :: Selected server: %s (%s) (%s - Version %s)",
server['pms_name'], server['pms_url'], server['pms_platform'], server['pms_version'])
plexpy.CONFIG.process_kwargs(server)
plexpy.CONFIG.write()
jellypy.CONFIG.process_kwargs(server)
jellypy.CONFIG.write()
class PlexTV(object):
@@ -147,8 +147,8 @@ class PlexTV(object):
self.token = token
self.urls = 'https://plex.tv'
self.timeout = plexpy.CONFIG.PMS_TIMEOUT
self.ssl_verify = plexpy.CONFIG.VERIFY_SSL_CERT
self.timeout = jellypy.CONFIG.PMS_TIMEOUT
self.ssl_verify = jellypy.CONFIG.VERIFY_SSL_CERT
if self.username is None and self.password is None:
if not self.token:
@@ -158,7 +158,7 @@ class PlexTV(object):
user_tokens = user_data.get_tokens(user_id=session.get_session_user_id())
self.token = user_tokens['server_token']
else:
self.token = plexpy.CONFIG.PMS_TOKEN
self.token = jellypy.CONFIG.PMS_TOKEN
if not self.token:
logger.error("Tautulli PlexTV :: PlexTV called, but no token provided.")
@@ -212,7 +212,7 @@ class PlexTV(object):
if force:
logger.debug("Tautulli PlexTV :: Forcing refresh of Plex.tv token.")
devices_list = self.get_devices_list()
device_id = next((d for d in devices_list if d['device_identifier'] == plexpy.CONFIG.PMS_UUID), {}).get('device_id', None)
device_id = next((d for d in devices_list if d['device_identifier'] == jellypy.CONFIG.PMS_UUID), {}).get('device_id', None)
if device_id:
logger.debug("Tautulli PlexTV :: Removing Tautulli from Plex.tv devices.")
@@ -228,8 +228,8 @@ class PlexTV(object):
user = self.get_token()
if user:
token = user['auth_token']
plexpy.CONFIG.__setattr__('PMS_TOKEN', token)
plexpy.CONFIG.write()
jellypy.CONFIG.__setattr__('PMS_TOKEN', token)
jellypy.CONFIG.write()
logger.info("Tautulli PlexTV :: Updated Plex.tv token for Tautulli.")
return token
@@ -245,7 +245,7 @@ class PlexTV(object):
return None
for a in xml_head:
if helpers.get_xml_attr(a, 'clientIdentifier') == plexpy.CONFIG.PMS_IDENTIFIER \
if helpers.get_xml_attr(a, 'clientIdentifier') == jellypy.CONFIG.PMS_IDENTIFIER \
and 'server' in helpers.get_xml_attr(a, 'provides'):
server_token = helpers.get_xml_attr(a, 'accessToken')
break
@@ -412,7 +412,7 @@ class PlexTV(object):
def get_full_users_list(self):
own_account = self.get_plextv_user_details(output_format='xml')
friends_list = self.get_plextv_friends(output_format='xml')
shared_servers = self.get_plextv_shared_servers(machine_id=plexpy.CONFIG.PMS_IDENTIFIER,
shared_servers = self.get_plextv_shared_servers(machine_id=jellypy.CONFIG.PMS_IDENTIFIER,
output_format='xml')
users_list = []
@@ -498,7 +498,7 @@ class PlexTV(object):
rating_key_filter=None, sync_id_filter=None):
if not machine_id:
machine_id = plexpy.CONFIG.PMS_IDENTIFIER
machine_id = jellypy.CONFIG.PMS_IDENTIFIER
if isinstance(rating_key_filter, list):
rating_key_filter = [str(k) for k in rating_key_filter]
@@ -709,7 +709,7 @@ class PlexTV(object):
return {}
for a in xml_head:
if helpers.get_xml_attr(a, 'machineIdentifier') == plexpy.CONFIG.PMS_IDENTIFIER:
if helpers.get_xml_attr(a, 'machineIdentifier') == jellypy.CONFIG.PMS_IDENTIFIER:
server_times = {"created_at": helpers.get_xml_attr(a, 'createdAt'),
"updated_at": helpers.get_xml_attr(a, 'updatedAt'),
"version": helpers.get_xml_attr(a, 'version')
@@ -824,7 +824,7 @@ class PlexTV(object):
return {}
# Get the updates for the platform
pms_platform = common.PMS_PLATFORM_NAME_OVERRIDES.get(plexpy.CONFIG.PMS_PLATFORM, plexpy.CONFIG.PMS_PLATFORM)
pms_platform = common.PMS_PLATFORM_NAME_OVERRIDES.get(jellypy.CONFIG.PMS_PLATFORM, jellypy.CONFIG.PMS_PLATFORM)
platform_downloads = available_downloads.get('computer').get(pms_platform) or \
available_downloads.get('nas').get(pms_platform)
@@ -833,12 +833,12 @@ class PlexTV(object):
% pms_platform)
return {}
v_old = helpers.cast_to_int("".join(v.zfill(4) for v in plexpy.CONFIG.PMS_VERSION.split('-')[0].split('.')[:4]))
v_old = helpers.cast_to_int("".join(v.zfill(4) for v in jellypy.CONFIG.PMS_VERSION.split('-')[0].split('.')[:4]))
v_new = helpers.cast_to_int("".join(v.zfill(4) for v in platform_downloads.get('version', '').split('-')[0].split('.')[:4]))
if not v_old:
logger.error("Tautulli PlexTV :: Unable to retrieve Plex updates: Invalid current server version: %s."
% plexpy.CONFIG.PMS_VERSION)
% jellypy.CONFIG.PMS_VERSION)
return {}
if not v_new:
logger.error("Tautulli PlexTV :: Unable to retrieve Plex updates: Invalid new server version: %s."
@@ -847,8 +847,8 @@ class PlexTV(object):
# Get proper download
releases = platform_downloads.get('releases', [{}])
release = next((r for r in releases if r['distro'] == plexpy.CONFIG.PMS_UPDATE_DISTRO and
r['build'] == plexpy.CONFIG.PMS_UPDATE_DISTRO_BUILD), releases[0])
release = next((r for r in releases if r['distro'] == jellypy.CONFIG.PMS_UPDATE_DISTRO and
r['build'] == jellypy.CONFIG.PMS_UPDATE_DISTRO_BUILD), releases[0])
download_info = {'update_available': v_new > v_old,
'platform': platform_downloads.get('name'),
@@ -876,13 +876,13 @@ class PlexTV(object):
return False
if subscription and helpers.get_xml_attr(subscription[0], 'active') == '1':
plexpy.CONFIG.__setattr__('PMS_PLEXPASS', 1)
plexpy.CONFIG.write()
jellypy.CONFIG.__setattr__('PMS_PLEXPASS', 1)
jellypy.CONFIG.write()
return True
else:
logger.debug("Tautulli PlexTV :: Plex Pass subscription not found.")
plexpy.CONFIG.__setattr__('PMS_PLEXPASS', 0)
plexpy.CONFIG.write()
jellypy.CONFIG.__setattr__('PMS_PLEXPASS', 0)
jellypy.CONFIG.write()
return False
def get_devices_list(self):
@@ -925,7 +925,7 @@ class PlexTV(object):
for info in status_info:
servers = info.getElementsByTagName('server')
for s in servers:
if helpers.get_xml_attr(s, 'address') == plexpy.CONFIG.PMS_IP:
if helpers.get_xml_attr(s, 'address') == jellypy.CONFIG.PMS_IP:
if helpers.get_xml_attr(info, 'running') == '1':
return True
else:

View File

@@ -21,19 +21,19 @@ from future.builtins import str
import sqlite3
from xml.dom import minidom
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import activity_processor
import database
import helpers
import logger
import users
else:
from plexpy import activity_processor
from plexpy import database
from plexpy import helpers
from plexpy import logger
from plexpy import users
from jellypy import activity_processor
from jellypy import database
from jellypy import helpers
from jellypy import logger
from jellypy import users
def extract_plexwatch_xml(xml=None):

View File

@@ -26,8 +26,8 @@ import time
from future.moves.urllib.parse import quote, quote_plus, urlencode
from xml.dom.minidom import Node
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import activity_processor
import common
import helpers
@@ -38,15 +38,15 @@ if plexpy.PYTHON2:
import session
import users
else:
from plexpy import activity_processor
from plexpy import common
from plexpy import helpers
from plexpy import http_handler
from plexpy import libraries
from plexpy import logger
from plexpy import plextv
from plexpy import session
from plexpy import users
from jellypy import activity_processor
from jellypy import common
from jellypy import helpers
from jellypy import http_handler
from jellypy import libraries
from jellypy import logger
from jellypy import plextv
from jellypy import session
from jellypy import users
def get_server_friendly_name():
@@ -57,13 +57,13 @@ def get_server_friendly_name():
if not server_name:
servers_info = PmsConnect().get_servers_info()
for server in servers_info:
if server['machine_identifier'] == plexpy.CONFIG.PMS_IDENTIFIER:
if server['machine_identifier'] == jellypy.CONFIG.PMS_IDENTIFIER:
server_name = server['name']
break
if server_name and server_name != plexpy.CONFIG.PMS_NAME:
plexpy.CONFIG.__setattr__('PMS_NAME', server_name)
plexpy.CONFIG.write()
if server_name and server_name != jellypy.CONFIG.PMS_NAME:
jellypy.CONFIG.__setattr__('PMS_NAME', server_name)
jellypy.CONFIG.write()
logger.info("Tautulli Pmsconnect :: Server name retrieved.")
return server_name
@@ -78,12 +78,12 @@ class PmsConnect(object):
self.url = url
self.token = token
if not self.url and plexpy.CONFIG.PMS_URL:
self.url = plexpy.CONFIG.PMS_URL
if not self.url and jellypy.CONFIG.PMS_URL:
self.url = jellypy.CONFIG.PMS_URL
elif not self.url:
self.url = 'http://{hostname}:{port}'.format(hostname=plexpy.CONFIG.PMS_IP,
port=plexpy.CONFIG.PMS_PORT)
self.timeout = plexpy.CONFIG.PMS_TIMEOUT
self.url = 'http://{hostname}:{port}'.format(hostname=jellypy.CONFIG.PMS_IP,
port=jellypy.CONFIG.PMS_PORT)
self.timeout = jellypy.CONFIG.PMS_TIMEOUT
if not self.token:
# Check if we should use the admin token, or the guest server token
@@ -92,7 +92,7 @@ class PmsConnect(object):
user_tokens = user_data.get_tokens(user_id=session.get_session_user_id())
self.token = user_tokens['server_token']
else:
self.token = plexpy.CONFIG.PMS_TOKEN
self.token = jellypy.CONFIG.PMS_TOKEN
self.request_handler = http_handler.HTTPHandler(urls=self.url,
token=self.token,
@@ -625,7 +625,7 @@ class PmsConnect(object):
metadata = {}
if not skip_cache and cache_key:
in_file_folder = os.path.join(plexpy.CONFIG.CACHE_DIR, 'session_metadata')
in_file_folder = os.path.join(jellypy.CONFIG.CACHE_DIR, 'session_metadata')
in_file_path = os.path.join(in_file_folder, 'metadata-sessionKey-%s.json' % cache_key)
if not os.path.exists(in_file_folder):
@@ -640,7 +640,7 @@ class PmsConnect(object):
if metadata:
_cache_time = metadata.pop('_cache_time', 0)
# Return cached metadata if less than cache_seconds ago
if return_cache or helpers.timestamp() - _cache_time <= plexpy.CONFIG.METADATA_CACHE_SECONDS:
if return_cache or helpers.timestamp() - _cache_time <= jellypy.CONFIG.METADATA_CACHE_SECONDS:
return metadata
if rating_key:
@@ -649,7 +649,7 @@ class PmsConnect(object):
metadata_xml = self.get_sync_item(str(sync_id), output_format='xml')
elif plex_guid.startswith(('plex://movie', 'plex://episode')):
rating_key = plex_guid.rsplit('/', 1)[-1]
plextv_metadata = PmsConnect(url='https://metadata.provider.plex.tv', token=plexpy.CONFIG.PMS_TOKEN)
plextv_metadata = PmsConnect(url='https://metadata.provider.plex.tv', token=jellypy.CONFIG.PMS_TOKEN)
metadata_xml = plextv_metadata.get_metadata(rating_key, output_format='xml')
else:
return metadata
@@ -1474,7 +1474,7 @@ class PmsConnect(object):
if cache_key:
metadata['_cache_time'] = helpers.timestamp()
out_file_folder = os.path.join(plexpy.CONFIG.CACHE_DIR, 'session_metadata')
out_file_folder = os.path.join(jellypy.CONFIG.CACHE_DIR, 'session_metadata')
out_file_path = os.path.join(out_file_folder, 'metadata-sessionKey-%s.json' % cache_key)
if not os.path.exists(out_file_folder):
@@ -1782,7 +1782,7 @@ class PmsConnect(object):
and not session.getElementsByTagName('Session') \
and not session.getElementsByTagName('TranscodeSession') \
and helpers.get_xml_attr(session, 'ratingKey').isdigit() \
and plexpy.CONFIG.PMS_PLEXPASS:
and jellypy.CONFIG.PMS_PLEXPASS:
plex_tv = plextv.PlexTV()
parent_rating_key = helpers.get_xml_attr(session, 'parentRatingKey')
grandparent_rating_key = helpers.get_xml_attr(session, 'grandparentRatingKey')
@@ -3101,9 +3101,9 @@ class PmsConnect(object):
logger.info("Tautulli is unable to check for Plex updates. Disabling check for Plex updates.")
# Disable check for Plex updates
plexpy.CONFIG.MONITOR_PMS_UPDATES = 0
plexpy.initialize_scheduler()
plexpy.CONFIG.write()
jellypy.CONFIG.MONITOR_PMS_UPDATES = 0
jellypy.initialize_scheduler()
jellypy.CONFIG.write()
return {}
@@ -3123,13 +3123,13 @@ class PmsConnect(object):
def set_server_version(self):
identity = self.get_server_identity()
version = identity.get('version', plexpy.CONFIG.PMS_VERSION)
version = identity.get('version', jellypy.CONFIG.PMS_VERSION)
plexpy.CONFIG.__setattr__('PMS_VERSION', version)
plexpy.CONFIG.write()
jellypy.CONFIG.__setattr__('PMS_VERSION', version)
jellypy.CONFIG.write()
def get_server_update_channel(self):
if plexpy.CONFIG.PMS_UPDATE_CHANNEL == 'plex':
if jellypy.CONFIG.PMS_UPDATE_CHANNEL == 'plex':
update_channel_value = self.get_server_pref('ButlerUpdateChannel')
if update_channel_value == '8':
@@ -3137,4 +3137,4 @@ class PmsConnect(object):
else:
return 'public'
return plexpy.CONFIG.PMS_UPDATE_CHANNEL
return jellypy.CONFIG.PMS_UPDATE_CHANNEL

View File

@@ -25,13 +25,13 @@ import collections
import requests
from requests.packages import urllib3
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import lock
import logger
else:
from plexpy import lock
from plexpy import logger
from jellypy import lock
from jellypy import logger
# Dictionary with last request times, for rate limiting.
@@ -59,7 +59,7 @@ def request_response(url, method="get", auto_raise=True,
# Disable verification of SSL certificates if requested. Note: this could
# pose a security issue!
kwargs["verify"] = bool(plexpy.CONFIG.VERIFY_SSL_CERT)
kwargs["verify"] = bool(jellypy.CONFIG.VERIFY_SSL_CERT)
if not kwargs['verify']:
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
@@ -123,7 +123,7 @@ def request_response(url, method="get", auto_raise=True,
e.response.status_code, cause)
# Debug response
if plexpy.VERBOSE:
if jellypy.VERBOSE:
server_message(e.response)
else:
logger.error("Request raised HTTP error.")
@@ -151,7 +151,7 @@ def request_response2(url, method="get", auto_raise=True,
# Disable verification of SSL certificates if requested. Note: this could
# pose a security issue!
kwargs['verify'] = bool(plexpy.CONFIG.VERIFY_SSL_CERT)
kwargs['verify'] = bool(jellypy.CONFIG.VERIFY_SSL_CERT)
if not kwargs['verify']:
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
@@ -203,7 +203,7 @@ def request_response2(url, method="get", auto_raise=True,
err_msg = "Request raised a HTTP error: {}".format(http_err)
if plexpy.VERBOSE:
if jellypy.VERBOSE:
req_msg = server_message(e.response, return_msg=True)
else:
@@ -264,7 +264,7 @@ def request_json(url, **kwargs):
logger.error("Response returned invalid JSON data")
# Debug response
if plexpy.VERBOSE:
if jellypy.VERBOSE:
server_message(response)

View File

@@ -20,13 +20,13 @@ from future.builtins import str
import cherrypy
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import common
import users
else:
from plexpy import common
from plexpy import users
from jellypy import common
from jellypy import users
def get_session_info():
@@ -68,7 +68,7 @@ def get_session_user_token():
session_user_tokens = users.Users().get_tokens(_session['user_id'])
user_token = session_user_tokens['server_token']
else:
user_token = plexpy.CONFIG.PMS_TOKEN
user_token = jellypy.CONFIG.PMS_TOKEN
return user_token

View File

@@ -23,8 +23,8 @@ from future.moves.urllib.parse import parse_qsl
import httpagentparser
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import common
import database
import datatables
@@ -34,21 +34,21 @@ if plexpy.PYTHON2:
import plextv
import session
else:
from plexpy import common
from plexpy import database
from plexpy import datatables
from plexpy import helpers
from plexpy import libraries
from plexpy import logger
from plexpy import plextv
from plexpy import session
from jellypy import common
from jellypy import database
from jellypy import datatables
from jellypy import helpers
from jellypy import libraries
from jellypy import logger
from jellypy import plextv
from jellypy import session
def refresh_users():
logger.info("Tautulli Users :: Requesting users list refresh...")
result = plextv.PlexTV().get_full_users_list()
server_id = plexpy.CONFIG.PMS_IDENTIFIER
server_id = jellypy.CONFIG.PMS_IDENTIFIER
if not server_id:
logger.error("Tautulli Users :: No PMS identifier, cannot refresh users. Verify server in settings.")
return
@@ -111,7 +111,7 @@ class Users(object):
custom_where = [['users.deleted_user', 0]]
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
if session.get_session_user_id():
custom_where.append(['users.user_id', session.get_session_user_id()])
@@ -486,7 +486,7 @@ class Users(object):
return []
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
if query_days and query_days is not None:
query_days = map(helpers.cast_to_int, query_days.split(','))
@@ -548,7 +548,7 @@ class Users(object):
return []
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
monitor_db = database.MonitorDatabase()

View File

@@ -27,23 +27,23 @@ import re
import subprocess
import tarfile
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import common
import helpers
import logger
import request
else:
from plexpy import common
from plexpy import helpers
from plexpy import logger
from plexpy import request
from jellypy import common
from jellypy import helpers
from jellypy import logger
from jellypy import request
def runGit(args):
if plexpy.CONFIG.GIT_PATH:
git_locations = ['"' + plexpy.CONFIG.GIT_PATH + '"']
if jellypy.CONFIG.GIT_PATH:
git_locations = ['"' + jellypy.CONFIG.GIT_PATH + '"']
else:
git_locations = ['git']
@@ -56,8 +56,8 @@ def runGit(args):
cmd = cur_git + ' ' + args
try:
logger.debug('Trying to execute: "' + cmd + '" with shell in ' + plexpy.PROG_DIR)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=plexpy.PROG_DIR)
logger.debug('Trying to execute: "' + cmd + '" with shell in ' + jellypy.PROG_DIR)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=jellypy.PROG_DIR)
output, err = p.communicate()
output = output.strip().decode()
@@ -80,18 +80,18 @@ def runGit(args):
def get_version():
if plexpy.FROZEN and common.PLATFORM == 'Windows':
plexpy.INSTALL_TYPE = 'windows'
if jellypy.FROZEN and common.PLATFORM == 'Windows':
jellypy.INSTALL_TYPE = 'windows'
current_version, current_branch = get_version_from_file()
return current_version, 'origin', current_branch
elif plexpy.FROZEN and common.PLATFORM == 'Darwin':
plexpy.INSTALL_TYPE = 'macos'
elif jellypy.FROZEN and common.PLATFORM == 'Darwin':
jellypy.INSTALL_TYPE = 'macos'
current_version, current_branch = get_version_from_file()
return current_version, 'origin', current_branch
elif os.path.isdir(os.path.join(plexpy.PROG_DIR, '.git')):
plexpy.INSTALL_TYPE = 'git'
elif os.path.isdir(os.path.join(jellypy.PROG_DIR, '.git')):
jellypy.INSTALL_TYPE = 'git'
output, err = runGit('rev-parse HEAD')
if not output:
@@ -104,9 +104,9 @@ def get_version():
logger.error('Output does not look like a hash, not using it.')
cur_commit_hash = None
if plexpy.CONFIG.DO_NOT_OVERRIDE_GIT_BRANCH and plexpy.CONFIG.GIT_BRANCH:
if jellypy.CONFIG.DO_NOT_OVERRIDE_GIT_BRANCH and jellypy.CONFIG.GIT_BRANCH:
remote_name = None
branch_name = plexpy.CONFIG.GIT_BRANCH
branch_name = jellypy.CONFIG.GIT_BRANCH
else:
remote_branch, err = runGit('rev-parse --abbrev-ref --symbolic-full-name @{u}')
@@ -116,16 +116,16 @@ def get_version():
else:
remote_name = branch_name = None
if not remote_name and plexpy.CONFIG.GIT_REMOTE:
logger.error('Could not retrieve remote name from git. Falling back to %s.' % plexpy.CONFIG.GIT_REMOTE)
remote_name = plexpy.CONFIG.GIT_REMOTE
if not remote_name and jellypy.CONFIG.GIT_REMOTE:
logger.error('Could not retrieve remote name from git. Falling back to %s.' % jellypy.CONFIG.GIT_REMOTE)
remote_name = jellypy.CONFIG.GIT_REMOTE
if not remote_name:
logger.error('Could not retrieve remote name from git. Defaulting to origin.')
branch_name = 'origin'
if not branch_name and plexpy.CONFIG.GIT_BRANCH:
logger.error('Could not retrieve branch name from git. Falling back to %s.' % plexpy.CONFIG.GIT_BRANCH)
branch_name = plexpy.CONFIG.GIT_BRANCH
if not branch_name and jellypy.CONFIG.GIT_BRANCH:
logger.error('Could not retrieve branch name from git. Falling back to %s.' % jellypy.CONFIG.GIT_BRANCH)
branch_name = jellypy.CONFIG.GIT_BRANCH
if not branch_name:
logger.error('Could not retrieve branch name from git. Defaulting to master.')
branch_name = 'master'
@@ -133,20 +133,20 @@ def get_version():
return cur_commit_hash, remote_name, branch_name
else:
if plexpy.DOCKER:
plexpy.INSTALL_TYPE = 'docker'
elif plexpy.SNAP:
plexpy.INSTALL_TYPE = 'snap'
if jellypy.DOCKER:
jellypy.INSTALL_TYPE = 'docker'
elif jellypy.SNAP:
jellypy.INSTALL_TYPE = 'snap'
else:
plexpy.INSTALL_TYPE = 'source'
jellypy.INSTALL_TYPE = 'source'
current_version, current_branch = get_version_from_file()
return current_version, 'origin', current_branch
def get_version_from_file():
version_file = os.path.join(plexpy.PROG_DIR, 'version.txt')
branch_file = os.path.join(plexpy.PROG_DIR, 'branch.txt')
version_file = os.path.join(jellypy.PROG_DIR, 'version.txt')
branch_file = os.path.join(jellypy.PROG_DIR, 'branch.txt')
if os.path.isfile(version_file):
with open(version_file, 'r') as f:
@@ -166,30 +166,30 @@ def get_version_from_file():
def check_update(scheduler=False, notify=False, use_cache=False):
check_github(scheduler=scheduler, notify=notify, use_cache=use_cache)
if not plexpy.CURRENT_VERSION:
plexpy.UPDATE_AVAILABLE = None
elif plexpy.COMMITS_BEHIND > 0 and \
(plexpy.common.BRANCH in ('master', 'beta') or plexpy.SNAP or plexpy.FROZEN) and \
plexpy.common.RELEASE != plexpy.LATEST_RELEASE:
plexpy.UPDATE_AVAILABLE = 'release'
elif plexpy.COMMITS_BEHIND > 0 and \
not plexpy.SNAP and not plexpy.FROZEN and \
plexpy.CURRENT_VERSION != plexpy.LATEST_VERSION:
plexpy.UPDATE_AVAILABLE = 'commit'
if not jellypy.CURRENT_VERSION:
jellypy.UPDATE_AVAILABLE = None
elif jellypy.COMMITS_BEHIND > 0 and \
(jellypy.common.BRANCH in ('master', 'beta') or jellypy.SNAP or jellypy.FROZEN) and \
jellypy.common.RELEASE != jellypy.LATEST_RELEASE:
jellypy.UPDATE_AVAILABLE = 'release'
elif jellypy.COMMITS_BEHIND > 0 and \
not jellypy.SNAP and not jellypy.FROZEN and \
jellypy.CURRENT_VERSION != jellypy.LATEST_VERSION:
jellypy.UPDATE_AVAILABLE = 'commit'
else:
plexpy.UPDATE_AVAILABLE = False
jellypy.UPDATE_AVAILABLE = False
if plexpy.WIN_SYS_TRAY_ICON:
plexpy.WIN_SYS_TRAY_ICON.change_tray_update_icon()
elif plexpy.MAC_SYS_TRAY_ICON:
plexpy.MAC_SYS_TRAY_ICON.change_tray_update_icon()
if jellypy.WIN_SYS_TRAY_ICON:
jellypy.WIN_SYS_TRAY_ICON.change_tray_update_icon()
elif jellypy.MAC_SYS_TRAY_ICON:
jellypy.MAC_SYS_TRAY_ICON.change_tray_update_icon()
def check_github(scheduler=False, notify=False, use_cache=False):
plexpy.COMMITS_BEHIND = 0
jellypy.COMMITS_BEHIND = 0
if plexpy.CONFIG.GIT_TOKEN:
headers = {'Authorization': 'token {}'.format(plexpy.CONFIG.GIT_TOKEN)}
if jellypy.CONFIG.GIT_TOKEN:
headers = {'Authorization': 'token {}'.format(jellypy.CONFIG.GIT_TOKEN)}
else:
headers = {}
@@ -197,118 +197,118 @@ def check_github(scheduler=False, notify=False, use_cache=False):
if not version:
# Get the latest version available from github
logger.info('Retrieving latest version information from GitHub')
url = 'https://api.github.com/repos/%s/%s/commits/%s' % (plexpy.CONFIG.GIT_USER,
plexpy.CONFIG.GIT_REPO,
plexpy.CONFIG.GIT_BRANCH)
url = 'https://api.github.com/repos/%s/%s/commits/%s' % (jellypy.CONFIG.GIT_USER,
jellypy.CONFIG.GIT_REPO,
jellypy.CONFIG.GIT_BRANCH)
version = request.request_json(url, headers=headers, timeout=20,
validator=lambda x: type(x) == dict)
github_cache('version', github_data=version)
if version is None:
logger.warn('Could not get the latest version from GitHub. Are you running a local development version?')
return plexpy.CURRENT_VERSION
return jellypy.CURRENT_VERSION
plexpy.LATEST_VERSION = version['sha']
logger.debug("Latest version is %s", plexpy.LATEST_VERSION)
jellypy.LATEST_VERSION = version['sha']
logger.debug("Latest version is %s", jellypy.LATEST_VERSION)
# See how many commits behind we are
if not plexpy.CURRENT_VERSION:
if not jellypy.CURRENT_VERSION:
logger.info('You are running an unknown version of Tautulli. Run the updater to identify your version')
return plexpy.LATEST_VERSION
return jellypy.LATEST_VERSION
if plexpy.LATEST_VERSION == plexpy.CURRENT_VERSION:
if jellypy.LATEST_VERSION == jellypy.CURRENT_VERSION:
logger.info('Tautulli is up to date')
return plexpy.LATEST_VERSION
return jellypy.LATEST_VERSION
commits = github_cache('commits', use_cache=use_cache)
if not commits:
logger.info('Comparing currently installed version with latest GitHub version')
url = 'https://api.github.com/repos/%s/%s/compare/%s...%s' % (plexpy.CONFIG.GIT_USER,
plexpy.CONFIG.GIT_REPO,
plexpy.LATEST_VERSION,
plexpy.CURRENT_VERSION)
url = 'https://api.github.com/repos/%s/%s/compare/%s...%s' % (jellypy.CONFIG.GIT_USER,
jellypy.CONFIG.GIT_REPO,
jellypy.LATEST_VERSION,
jellypy.CURRENT_VERSION)
commits = request.request_json(url, headers=headers, timeout=20, whitelist_status_code=404,
validator=lambda x: type(x) == dict)
github_cache('commits', github_data=commits)
if commits is None:
logger.warn('Could not get commits behind from GitHub.')
return plexpy.LATEST_VERSION
return jellypy.LATEST_VERSION
try:
plexpy.COMMITS_BEHIND = int(commits['behind_by'])
logger.debug("In total, %d commits behind", plexpy.COMMITS_BEHIND)
jellypy.COMMITS_BEHIND = int(commits['behind_by'])
logger.debug("In total, %d commits behind", jellypy.COMMITS_BEHIND)
except KeyError:
logger.info('Cannot compare versions. Are you running a local development version?')
plexpy.COMMITS_BEHIND = 0
jellypy.COMMITS_BEHIND = 0
if plexpy.COMMITS_BEHIND > 0:
logger.info('New version is available. You are %s commits behind' % plexpy.COMMITS_BEHIND)
if jellypy.COMMITS_BEHIND > 0:
logger.info('New version is available. You are %s commits behind' % jellypy.COMMITS_BEHIND)
releases = github_cache('releases', use_cache=use_cache)
if not releases:
url = 'https://api.github.com/repos/%s/%s/releases' % (plexpy.CONFIG.GIT_USER,
plexpy.CONFIG.GIT_REPO)
url = 'https://api.github.com/repos/%s/%s/releases' % (jellypy.CONFIG.GIT_USER,
jellypy.CONFIG.GIT_REPO)
releases = request.request_json(url, timeout=20, whitelist_status_code=404,
validator=lambda x: type(x) == list)
github_cache('releases', github_data=releases)
if releases is None:
logger.warn('Could not get releases from GitHub.')
return plexpy.LATEST_VERSION
return jellypy.LATEST_VERSION
if plexpy.CONFIG.GIT_BRANCH == 'master':
if jellypy.CONFIG.GIT_BRANCH == 'master':
release = next((r for r in releases if not r['prerelease']), releases[0])
elif plexpy.CONFIG.GIT_BRANCH == 'beta':
elif jellypy.CONFIG.GIT_BRANCH == 'beta':
release = next((r for r in releases if not r['tag_name'].endswith('-nightly')), releases[0])
elif plexpy.CONFIG.GIT_BRANCH == 'nightly':
elif jellypy.CONFIG.GIT_BRANCH == 'nightly':
release = next((r for r in releases), releases[0])
else:
release = releases[0]
plexpy.LATEST_RELEASE = release['tag_name']
jellypy.LATEST_RELEASE = release['tag_name']
if notify:
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpyupdate',
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpyupdate',
'plexpy_download_info': release,
'plexpy_update_commit': plexpy.LATEST_VERSION,
'plexpy_update_behind': plexpy.COMMITS_BEHIND})
'plexpy_update_commit': jellypy.LATEST_VERSION,
'plexpy_update_behind': jellypy.COMMITS_BEHIND})
if plexpy.PYTHON2:
if jellypy.PYTHON2:
logger.warn('Tautulli is running using Python 2. Unable to run automatic update.')
elif scheduler and plexpy.CONFIG.PLEXPY_AUTO_UPDATE and \
not plexpy.DOCKER and not plexpy.SNAP and \
not (plexpy.FROZEN and common.PLATFORM == 'Darwin'):
elif scheduler and jellypy.CONFIG.PLEXPY_AUTO_UPDATE and \
not jellypy.DOCKER and not jellypy.SNAP and \
not (jellypy.FROZEN and common.PLATFORM == 'Darwin'):
logger.info('Running automatic update.')
plexpy.shutdown(restart=True, update=True)
jellypy.shutdown(restart=True, update=True)
elif plexpy.COMMITS_BEHIND == 0:
elif jellypy.COMMITS_BEHIND == 0:
logger.info('Tautulli is up to date')
return plexpy.LATEST_VERSION
return jellypy.LATEST_VERSION
def update():
if plexpy.PYTHON2:
if jellypy.PYTHON2:
logger.warn('Tautulli is running using Python 2. Unable to update.')
return
if not plexpy.UPDATE_AVAILABLE:
if not jellypy.UPDATE_AVAILABLE:
return
if plexpy.INSTALL_TYPE in ('docker', 'snap', 'macos'):
if jellypy.INSTALL_TYPE in ('docker', 'snap', 'macos'):
return
elif plexpy.INSTALL_TYPE == 'windows':
elif jellypy.INSTALL_TYPE == 'windows':
logger.info('Calling Windows scheduled task to update Tautulli')
CREATE_NO_WINDOW = 0x08000000
subprocess.Popen(['SCHTASKS', '/Run', '/TN', 'TautulliUpdateTask'],
creationflags=CREATE_NO_WINDOW)
elif plexpy.INSTALL_TYPE == 'git':
output, err = runGit('pull --ff-only {} {}'.format(plexpy.CONFIG.GIT_REMOTE,
plexpy.CONFIG.GIT_BRANCH))
elif jellypy.INSTALL_TYPE == 'git':
output, err = runGit('pull --ff-only {} {}'.format(jellypy.CONFIG.GIT_REMOTE,
jellypy.CONFIG.GIT_BRANCH))
if not output:
logger.error('Unable to download latest version')
@@ -320,12 +320,12 @@ def update():
elif line.endswith(('Aborting', 'Aborting.')):
logger.error('Unable to update from git: ' + line)
elif plexpy.INSTALL_TYPE == 'source':
tar_download_url = 'https://github.com/{}/{}/tarball/{}'.format(plexpy.CONFIG.GIT_USER,
plexpy.CONFIG.GIT_REPO,
plexpy.CONFIG.GIT_BRANCH)
update_dir = os.path.join(plexpy.DATA_DIR, 'update')
version_path = os.path.join(plexpy.PROG_DIR, 'version.txt')
elif jellypy.INSTALL_TYPE == 'source':
tar_download_url = 'https://github.com/{}/{}/tarball/{}'.format(jellypy.CONFIG.GIT_USER,
jellypy.CONFIG.GIT_REPO,
jellypy.CONFIG.GIT_BRANCH)
update_dir = os.path.join(jellypy.DATA_DIR, 'update')
version_path = os.path.join(jellypy.PROG_DIR, 'version.txt')
logger.info('Downloading update from: ' + tar_download_url)
data = request.request_content(tar_download_url)
@@ -334,8 +334,8 @@ def update():
logger.error("Unable to retrieve new version from '%s', can't update", tar_download_url)
return
download_name = plexpy.CONFIG.GIT_BRANCH + '-github'
tar_download_path = os.path.join(plexpy.DATA_DIR, download_name)
download_name = jellypy.CONFIG.GIT_BRANCH + '-github'
tar_download_path = os.path.join(jellypy.DATA_DIR, download_name)
# Save tar to disk
with open(tar_download_path, 'wb') as f:
@@ -363,7 +363,7 @@ def update():
dirname = dirname[len(content_dir) + 1:]
for curfile in filenames:
old_path = os.path.join(content_dir, dirname, curfile)
new_path = os.path.join(plexpy.PROG_DIR, dirname, curfile)
new_path = os.path.join(jellypy.PROG_DIR, dirname, curfile)
if os.path.isfile(new_path):
os.remove(new_path)
@@ -372,7 +372,7 @@ def update():
# Update version.txt
try:
with open(version_path, 'w') as f:
f.write(str(plexpy.LATEST_VERSION))
f.write(str(jellypy.LATEST_VERSION))
except IOError as e:
logger.error(
"Unable to write current version to version.txt, update not complete: %s",
@@ -382,18 +382,18 @@ def update():
def reset_git_install():
if plexpy.INSTALL_TYPE == 'git':
logger.info('Attempting to reset git install to "{}/{}/{}"'.format(plexpy.CONFIG.GIT_REMOTE,
plexpy.CONFIG.GIT_BRANCH,
if jellypy.INSTALL_TYPE == 'git':
logger.info('Attempting to reset git install to "{}/{}/{}"'.format(jellypy.CONFIG.GIT_REMOTE,
jellypy.CONFIG.GIT_BRANCH,
common.RELEASE))
output, err = runGit('remote set-url {} https://github.com/{}/{}.git'.format(plexpy.CONFIG.GIT_REMOTE,
plexpy.CONFIG.GIT_USER,
plexpy.CONFIG.GIT_REPO))
output, err = runGit('fetch {}'.format(plexpy.CONFIG.GIT_REMOTE))
output, err = runGit('checkout {}'.format(plexpy.CONFIG.GIT_BRANCH))
output, err = runGit('branch -u {}/{}'.format(plexpy.CONFIG.GIT_REMOTE,
plexpy.CONFIG.GIT_BRANCH))
output, err = runGit('remote set-url {} https://github.com/{}/{}.git'.format(jellypy.CONFIG.GIT_REMOTE,
jellypy.CONFIG.GIT_USER,
jellypy.CONFIG.GIT_REPO))
output, err = runGit('fetch {}'.format(jellypy.CONFIG.GIT_REMOTE))
output, err = runGit('checkout {}'.format(jellypy.CONFIG.GIT_BRANCH))
output, err = runGit('branch -u {}/{}'.format(jellypy.CONFIG.GIT_REMOTE,
jellypy.CONFIG.GIT_BRANCH))
output, err = runGit('reset --hard {}'.format(common.RELEASE))
if not output:
@@ -410,12 +410,12 @@ def reset_git_install():
def checkout_git_branch():
if plexpy.INSTALL_TYPE == 'git':
logger.info('Attempting to checkout git branch "{}/{}"'.format(plexpy.CONFIG.GIT_REMOTE,
plexpy.CONFIG.GIT_BRANCH))
if jellypy.INSTALL_TYPE == 'git':
logger.info('Attempting to checkout git branch "{}/{}"'.format(jellypy.CONFIG.GIT_REMOTE,
jellypy.CONFIG.GIT_BRANCH))
output, err = runGit('fetch {}'.format(plexpy.CONFIG.GIT_REMOTE))
output, err = runGit('checkout {}'.format(plexpy.CONFIG.GIT_BRANCH))
output, err = runGit('fetch {}'.format(jellypy.CONFIG.GIT_REMOTE))
output, err = runGit('checkout {}'.format(jellypy.CONFIG.GIT_BRANCH))
if not output:
logger.error('Unable to change git branch.')
@@ -426,13 +426,13 @@ def checkout_git_branch():
logger.error('Unable to checkout from git: ' + line)
return
output, err = runGit('pull {} {}'.format(plexpy.CONFIG.GIT_REMOTE,
plexpy.CONFIG.GIT_BRANCH))
output, err = runGit('pull {} {}'.format(jellypy.CONFIG.GIT_REMOTE,
jellypy.CONFIG.GIT_BRANCH))
def github_cache(cache, github_data=None, use_cache=True):
timestamp = helpers.timestamp()
cache_filepath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'github_{}.json'.format(cache))
cache_filepath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'github_{}.json'.format(cache))
if github_data:
cache_data = {'github_data': github_data, '_cache_time': timestamp}
@@ -447,7 +447,7 @@ def github_cache(cache, github_data=None, use_cache=True):
try:
with open(cache_filepath, 'r', encoding='utf-8') as cache_file:
cache_data = json.load(cache_file)
if timestamp - cache_data['_cache_time'] < plexpy.CONFIG.CHECK_GITHUB_CACHE_SECONDS:
if timestamp - cache_data['_cache_time'] < jellypy.CONFIG.CHECK_GITHUB_CACHE_SECONDS:
logger.debug('Using cached GitHub %s data', cache)
return cache_data['github_data']
except:
@@ -455,7 +455,7 @@ def github_cache(cache, github_data=None, use_cache=True):
def read_changelog(latest_only=False, since_prev_release=False):
changelog_file = os.path.join(plexpy.PROG_DIR, 'CHANGELOG.md')
changelog_file = os.path.join(jellypy.PROG_DIR, 'CHANGELOG.md')
if not os.path.isfile(changelog_file):
return '<h4>Missing changelog file</h4>'
@@ -470,7 +470,7 @@ def read_changelog(latest_only=False, since_prev_release=False):
list_pattern = re.compile(r'(^[ \t]*\*\s)(.+)')
beta_release = False
prev_release = str(plexpy.PREV_RELEASE)
prev_release = str(jellypy.PREV_RELEASE)
with open(changelog_file, "r") as logfile:
for line in logfile:

View File

@@ -28,19 +28,19 @@ import time
import certifi
import websocket
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import activity_handler
import activity_pinger
import activity_processor
import database
import logger
else:
from plexpy import activity_handler
from plexpy import activity_pinger
from plexpy import activity_processor
from plexpy import database
from plexpy import logger
from jellypy import activity_handler
from jellypy import activity_pinger
from jellypy import activity_processor
from jellypy import database
from jellypy import logger
name = 'websocket'
@@ -66,30 +66,30 @@ def start_thread():
def on_connect():
if plexpy.PLEX_SERVER_UP is None:
plexpy.PLEX_SERVER_UP = True
if jellypy.PLEX_SERVER_UP is None:
jellypy.PLEX_SERVER_UP = True
if not plexpy.PLEX_SERVER_UP:
if not jellypy.PLEX_SERVER_UP:
logger.info("Tautulli WebSocket :: The Plex Media Server is back up.")
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_intup'})
plexpy.PLEX_SERVER_UP = True
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_intup'})
jellypy.PLEX_SERVER_UP = True
plexpy.initialize_scheduler()
if plexpy.CONFIG.WEBSOCKET_MONITOR_PING_PONG:
jellypy.initialize_scheduler()
if jellypy.CONFIG.WEBSOCKET_MONITOR_PING_PONG:
send_ping()
def on_disconnect():
if plexpy.PLEX_SERVER_UP is None:
plexpy.PLEX_SERVER_UP = False
if jellypy.PLEX_SERVER_UP is None:
jellypy.PLEX_SERVER_UP = False
if plexpy.PLEX_SERVER_UP:
if jellypy.PLEX_SERVER_UP:
logger.info("Tautulli WebSocket :: Unable to get a response from the server, Plex server is down.")
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_intdown'})
plexpy.PLEX_SERVER_UP = False
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_intdown'})
jellypy.PLEX_SERVER_UP = False
activity_processor.ActivityProcessor().set_temp_stopped()
plexpy.initialize_scheduler()
jellypy.initialize_scheduler()
def reconnect():
@@ -106,14 +106,14 @@ def shutdown():
def close():
logger.info("Tautulli WebSocket :: Disconnecting websocket...")
plexpy.WEBSOCKET.close()
plexpy.WS_CONNECTED = False
jellypy.WEBSOCKET.close()
jellypy.WS_CONNECTED = False
def send_ping():
if plexpy.WS_CONNECTED:
if jellypy.WS_CONNECTED:
# logger.debug("Tautulli WebSocket :: Sending ping.")
plexpy.WEBSOCKET.ping("Hi?")
jellypy.WEBSOCKET.ping("Hi?")
global pong_timer
pong_timer = threading.Timer(5.0, wait_pong)
@@ -127,7 +127,7 @@ def wait_pong():
logger.warn("Tautulli WebSocket :: Failed to receive pong from websocket, ping attempt %s." % str(pong_count))
if pong_count >= plexpy.CONFIG.WEBSOCKET_CONNECTION_ATTEMPTS:
if pong_count >= jellypy.CONFIG.WEBSOCKET_CONNECTION_ATTEMPTS:
pong_count = 0
close()
@@ -144,24 +144,24 @@ def receive_pong():
def run():
from websocket import create_connection
if plexpy.CONFIG.PMS_SSL and plexpy.CONFIG.PMS_URL[:5] == 'https':
uri = plexpy.CONFIG.PMS_URL.replace('https://', 'wss://') + '/:/websockets/notifications'
if jellypy.CONFIG.PMS_SSL and jellypy.CONFIG.PMS_URL[:5] == 'https':
uri = jellypy.CONFIG.PMS_URL.replace('https://', 'wss://') + '/:/websockets/notifications'
secure = 'secure '
if plexpy.CONFIG.VERIFY_SSL_CERT:
if jellypy.CONFIG.VERIFY_SSL_CERT:
sslopt = {'ca_certs': certifi.where()}
else:
sslopt = {'cert_reqs': ssl.CERT_NONE}
else:
uri = 'ws://%s:%s/:/websockets/notifications' % (
plexpy.CONFIG.PMS_IP,
plexpy.CONFIG.PMS_PORT
jellypy.CONFIG.PMS_IP,
jellypy.CONFIG.PMS_PORT
)
secure = ''
sslopt = None
# Set authentication token (if one is available)
if plexpy.CONFIG.PMS_TOKEN:
header = ["X-Plex-Token: %s" % plexpy.CONFIG.PMS_TOKEN]
if jellypy.CONFIG.PMS_TOKEN:
header = ["X-Plex-Token: %s" % jellypy.CONFIG.PMS_TOKEN]
else:
header = []
@@ -172,18 +172,18 @@ def run():
# Try an open the websocket connection
logger.info("Tautulli WebSocket :: Opening %swebsocket." % secure)
try:
plexpy.WEBSOCKET = create_connection(uri, header=header, sslopt=sslopt)
jellypy.WEBSOCKET = create_connection(uri, header=header, sslopt=sslopt)
logger.info("Tautulli WebSocket :: Ready")
plexpy.WS_CONNECTED = True
jellypy.WS_CONNECTED = True
except (websocket.WebSocketException, IOError, Exception) as e:
logger.error("Tautulli WebSocket :: %s.", e)
if plexpy.WS_CONNECTED:
if jellypy.WS_CONNECTED:
on_connect()
while plexpy.WS_CONNECTED:
while jellypy.WS_CONNECTED:
try:
process(*receive(plexpy.WEBSOCKET))
process(*receive(jellypy.WEBSOCKET))
# successfully received data, reset reconnects counter
reconnects = 0
@@ -195,19 +195,19 @@ def run():
if reconnects == 0:
logger.warn("Tautulli WebSocket :: Connection has closed.")
if not plexpy.CONFIG.PMS_IS_CLOUD and reconnects < plexpy.CONFIG.WEBSOCKET_CONNECTION_ATTEMPTS:
if not jellypy.CONFIG.PMS_IS_CLOUD and reconnects < jellypy.CONFIG.WEBSOCKET_CONNECTION_ATTEMPTS:
reconnects += 1
# Sleep 5 between connection attempts
if reconnects > 1:
time.sleep(plexpy.CONFIG.WEBSOCKET_CONNECTION_TIMEOUT)
time.sleep(jellypy.CONFIG.WEBSOCKET_CONNECTION_TIMEOUT)
logger.warn("Tautulli WebSocket :: Reconnection attempt %s." % str(reconnects))
try:
plexpy.WEBSOCKET = create_connection(uri, header=header)
jellypy.WEBSOCKET = create_connection(uri, header=header)
logger.info("Tautulli WebSocket :: Ready")
plexpy.WS_CONNECTED = True
jellypy.WS_CONNECTED = True
except (websocket.WebSocketException, IOError, Exception) as e:
logger.error("Tautulli WebSocket :: %s.", e)
@@ -223,7 +223,7 @@ def run():
close()
break
if not plexpy.WS_CONNECTED and not ws_shutdown:
if not jellypy.WS_CONNECTED and not ws_shutdown:
on_disconnect()
logger.debug("Tautulli WebSocket :: Leaving thread.")

View File

@@ -29,19 +29,19 @@ import cherrypy
from hashing_passwords import check_hash
import jwt
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import logger
from database import MonitorDatabase
from helpers import timestamp
from users import Users, refresh_users
from plextv import PlexTV
else:
from plexpy import logger
from plexpy.database import MonitorDatabase
from plexpy.helpers import timestamp
from plexpy.users import Users, refresh_users
from plexpy.plextv import PlexTV
from jellypy import logger
from jellypy.database import MonitorDatabase
from jellypy.helpers import timestamp
from jellypy.users import Users, refresh_users
from jellypy.plextv import PlexTV
# Monkey patch SameSite support into cookies.
# https://stackoverflow.com/a/50813092
@@ -83,7 +83,7 @@ def plex_user_login(username=None, password=None, token=None, headers=None):
if user_id != str(user_details['user_id']):
# The user is not in the database.
return None
elif plexpy.CONFIG.HTTP_PLEX_ADMIN and user_details['is_admin']:
elif jellypy.CONFIG.HTTP_PLEX_ADMIN and user_details['is_admin']:
# Plex admin login
return user_details, 'admin'
elif not user_details['allow_guest'] or user_details['deleted_user']:
@@ -91,7 +91,7 @@ def plex_user_login(username=None, password=None, token=None, headers=None):
return None
# Stop here if guest access is not enabled
if not plexpy.CONFIG.ALLOW_GUEST_ACCESS:
if not jellypy.CONFIG.ALLOW_GUEST_ACCESS:
return None
# The user is in the database, and guest access is enabled, so try to retrieve a server token.
@@ -139,17 +139,17 @@ def check_credentials(username=None, password=None, token=None, admin_login='0',
Returns True and the user group on success or False and no user group"""
if username and password:
if plexpy.CONFIG.HTTP_PASSWORD:
if jellypy.CONFIG.HTTP_PASSWORD:
user_details = {'user_id': None, 'username': username}
if plexpy.CONFIG.HTTP_HASHED_PASSWORD and \
username == plexpy.CONFIG.HTTP_USERNAME and check_hash(password, plexpy.CONFIG.HTTP_PASSWORD):
if jellypy.CONFIG.HTTP_HASHED_PASSWORD and \
username == jellypy.CONFIG.HTTP_USERNAME and check_hash(password, jellypy.CONFIG.HTTP_PASSWORD):
return True, user_details, 'admin'
elif not plexpy.CONFIG.HTTP_HASHED_PASSWORD and \
username == plexpy.CONFIG.HTTP_USERNAME and password == plexpy.CONFIG.HTTP_PASSWORD:
elif not jellypy.CONFIG.HTTP_HASHED_PASSWORD and \
username == jellypy.CONFIG.HTTP_USERNAME and password == jellypy.CONFIG.HTTP_PASSWORD:
return True, user_details, 'admin'
if plexpy.CONFIG.HTTP_PLEX_ADMIN or (not admin_login == '1' and plexpy.CONFIG.ALLOW_GUEST_ACCESS):
if jellypy.CONFIG.HTTP_PLEX_ADMIN or (not admin_login == '1' and jellypy.CONFIG.ALLOW_GUEST_ACCESS):
plex_login = plex_user_login(token=token, headers=headers)
if plex_login is not None:
return True, plex_login[0], plex_login[1]
@@ -158,13 +158,13 @@ def check_credentials(username=None, password=None, token=None, admin_login='0',
def check_jwt_token():
jwt_cookie = str(JWT_COOKIE_NAME + plexpy.CONFIG.PMS_UUID)
jwt_cookie = str(JWT_COOKIE_NAME + jellypy.CONFIG.PMS_UUID)
jwt_token = cherrypy.request.cookie.get(jwt_cookie)
if jwt_token:
try:
payload = jwt.decode(
jwt_token.value, plexpy.CONFIG.JWT_SECRET, leeway=timedelta(seconds=10), algorithms=[JWT_ALGORITHM]
jwt_token.value, jellypy.CONFIG.JWT_SECRET, leeway=timedelta(seconds=10), algorithms=[JWT_ALGORITHM]
)
except (jwt.DecodeError, jwt.ExpiredSignatureError):
return None
@@ -186,14 +186,14 @@ def check_auth(*args, **kwargs):
for condition in conditions:
# A condition is just a callable that returns true or false
if not condition():
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT)
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT)
else:
redirect_uri = cherrypy.request.wsgi_environ['REQUEST_URI']
if redirect_uri:
redirect_uri = '?redirect_uri=' + quote(redirect_uri)
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "auth/logout" + redirect_uri)
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT + "auth/logout" + redirect_uri)
def requireAuth(*conditions):
@@ -268,11 +268,11 @@ def check_rate_limit(ip_address):
except ValueError:
last_success = 0
max_timestamp = max(last_success, last_timestamp - plexpy.CONFIG.HTTP_RATE_LIMIT_ATTEMPTS_INTERVAL)
max_timestamp = max(last_success, last_timestamp - jellypy.CONFIG.HTTP_RATE_LIMIT_ATTEMPTS_INTERVAL)
attempts = [login for login in result if login['timestamp'] >= max_timestamp and not login['success']]
if len(attempts) >= plexpy.CONFIG.HTTP_RATE_LIMIT_ATTEMPTS:
return max(last_timestamp - (timestamp() - plexpy.CONFIG.HTTP_RATE_LIMIT_LOCKOUT_TIME), 0)
if len(attempts) >= jellypy.CONFIG.HTTP_RATE_LIMIT_ATTEMPTS:
return max(last_timestamp - (timestamp() - jellypy.CONFIG.HTTP_RATE_LIMIT_LOCKOUT_TIME), 0)
# Controller to provide login and logout actions
@@ -280,9 +280,9 @@ def check_rate_limit(ip_address):
class AuthController(object):
def check_auth_enabled(self):
if not plexpy.CONFIG.HTTP_BASIC_AUTH and plexpy.CONFIG.HTTP_PASSWORD:
if not jellypy.CONFIG.HTTP_BASIC_AUTH and jellypy.CONFIG.HTTP_PASSWORD:
return
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT)
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT)
def on_login(self, username=None, user_id=None, user_group=None, success=False, oauth=False):
"""Called on successful login"""
@@ -310,12 +310,12 @@ class AuthController(object):
logger.debug("Tautulli WebAuth :: %s user '%s' logged out of Tautulli." % (user_group.capitalize(), username))
def get_loginform(self, redirect_uri=''):
from plexpy.webserve import serve_template
from jellypy.webserve import serve_template
return serve_template(templatename="login.html", title="Login", redirect_uri=unquote(redirect_uri))
@cherrypy.expose
def index(self, *args, **kwargs):
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "auth/login")
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT + "auth/login")
@cherrypy.expose
def login(self, redirect_uri='', *args, **kwargs):
@@ -331,12 +331,12 @@ class AuthController(object):
if payload:
self.on_logout(payload['user'], payload['user_group'])
jwt_cookie = str(JWT_COOKIE_NAME + plexpy.CONFIG.PMS_UUID)
jwt_cookie = str(JWT_COOKIE_NAME + jellypy.CONFIG.PMS_UUID)
cherrypy.response.cookie[jwt_cookie] = ''
cherrypy.response.cookie[jwt_cookie]['expires'] = 0
cherrypy.response.cookie[jwt_cookie]['path'] = plexpy.HTTP_ROOT.rstrip('/') or '/'
cherrypy.response.cookie[jwt_cookie]['path'] = jellypy.HTTP_ROOT.rstrip('/') or '/'
if plexpy.HTTP_ROOT != '/':
if jellypy.HTTP_ROOT != '/':
# Also expire the JWT on the root path
cherrypy.response.headers['Set-Cookie'] = jwt_cookie + '=""; expires=Thu, 01 Jan 1970 12:00:00 GMT; path=/'
@@ -345,7 +345,7 @@ class AuthController(object):
if redirect_uri:
redirect_uri = '?redirect_uri=' + redirect_uri
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "auth/login" + redirect_uri)
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT + "auth/login" + redirect_uri)
@cherrypy.expose
@cherrypy.tools.json_out()
@@ -383,7 +383,7 @@ class AuthController(object):
'exp': expiry
}
jwt_token = jwt.encode(payload, plexpy.CONFIG.JWT_SECRET, algorithm=JWT_ALGORITHM).decode('utf-8')
jwt_token = jwt.encode(payload, jellypy.CONFIG.JWT_SECRET, algorithm=JWT_ALGORITHM).decode('utf-8')
self.on_login(username=user_details['username'],
user_id=user_details['user_id'],
@@ -391,16 +391,16 @@ class AuthController(object):
success=True,
oauth=bool(token))
jwt_cookie = str(JWT_COOKIE_NAME + plexpy.CONFIG.PMS_UUID)
jwt_cookie = str(JWT_COOKIE_NAME + jellypy.CONFIG.PMS_UUID)
cherrypy.response.cookie[jwt_cookie] = jwt_token
cherrypy.response.cookie[jwt_cookie]['expires'] = int(time_delta.total_seconds())
cherrypy.response.cookie[jwt_cookie]['path'] = plexpy.HTTP_ROOT.rstrip('/') or '/'
cherrypy.response.cookie[jwt_cookie]['path'] = jellypy.HTTP_ROOT.rstrip('/') or '/'
cherrypy.response.cookie[jwt_cookie]['httponly'] = True
cherrypy.response.cookie[jwt_cookie]['samesite'] = 'lax'
cherrypy.request.login = payload
cherrypy.response.status = 200
return {'status': 'success', 'token': jwt_token, 'uuid': plexpy.CONFIG.PMS_UUID}
return {'status': 'success', 'token': jwt_token, 'uuid': jellypy.CONFIG.PMS_UUID}
elif admin_login == '1' and username:
self.on_login(username=username)

File diff suppressed because it is too large Load Diff

View File

@@ -20,34 +20,34 @@ import sys
import cherrypy
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import logger
import webauth
from helpers import create_https_certificates
from webserve import WebInterface, BaseRedirect
else:
from plexpy import logger
from plexpy import webauth
from plexpy.helpers import create_https_certificates
from plexpy.webserve import WebInterface, BaseRedirect
from jellypy import logger
from jellypy import webauth
from jellypy.helpers import create_https_certificates
from jellypy.webserve import WebInterface, BaseRedirect
def start():
logger.info("Tautulli WebStart :: Initializing Tautulli web server...")
web_config = {
'http_port': plexpy.HTTP_PORT,
'http_host': plexpy.CONFIG.HTTP_HOST,
'http_root': plexpy.CONFIG.HTTP_ROOT,
'http_environment': plexpy.CONFIG.HTTP_ENVIRONMENT,
'http_proxy': plexpy.CONFIG.HTTP_PROXY,
'enable_https': plexpy.CONFIG.ENABLE_HTTPS,
'https_cert': plexpy.CONFIG.HTTPS_CERT,
'https_cert_chain': plexpy.CONFIG.HTTPS_CERT_CHAIN,
'https_key': plexpy.CONFIG.HTTPS_KEY,
'http_username': plexpy.CONFIG.HTTP_USERNAME,
'http_password': plexpy.CONFIG.HTTP_PASSWORD,
'http_basic_auth': plexpy.CONFIG.HTTP_BASIC_AUTH
'http_port': jellypy.HTTP_PORT,
'http_host': jellypy.CONFIG.HTTP_HOST,
'http_root': jellypy.CONFIG.HTTP_ROOT,
'http_environment': jellypy.CONFIG.HTTP_ENVIRONMENT,
'http_proxy': jellypy.CONFIG.HTTP_PROXY,
'enable_https': jellypy.CONFIG.ENABLE_HTTPS,
'https_cert': jellypy.CONFIG.HTTPS_CERT,
'https_cert_chain': jellypy.CONFIG.HTTPS_CERT_CHAIN,
'https_key': jellypy.CONFIG.HTTPS_KEY,
'http_username': jellypy.CONFIG.HTTP_USERNAME,
'http_password': jellypy.CONFIG.HTTP_PASSWORD,
'http_basic_auth': jellypy.CONFIG.HTTP_BASIC_AUTH
}
initialize(web_config)
@@ -73,7 +73,7 @@ def initialize(options):
if enable_https:
# If either the HTTPS certificate or key do not exist, try to make self-signed ones.
if plexpy.CONFIG.HTTPS_CREATE_CERT and \
if jellypy.CONFIG.HTTPS_CREATE_CERT and \
(not (https_cert and os.path.exists(https_cert)) or
not (https_key and os.path.exists(https_key))):
if not create_https_certificates(https_cert, https_key):
@@ -96,7 +96,7 @@ def initialize(options):
'tools.decode.on': True
}
if plexpy.DEV:
if jellypy.DEV:
options_dict['environment'] = "test_suite"
options_dict['engine.autoreload.on'] = True
@@ -114,39 +114,39 @@ def initialize(options):
if options['http_password']:
login_allowed = ["Tautulli admin (username is '%s')" % options['http_username']]
if plexpy.CONFIG.HTTP_PLEX_ADMIN:
if jellypy.CONFIG.HTTP_PLEX_ADMIN:
login_allowed.append("Plex admin")
logger.info("Tautulli WebStart :: Web server authentication is enabled: %s.", ' and '.join(login_allowed))
if options['http_basic_auth']:
plexpy.AUTH_ENABLED = False
jellypy.AUTH_ENABLED = False
basic_auth_enabled = True
else:
plexpy.AUTH_ENABLED = True
jellypy.AUTH_ENABLED = True
basic_auth_enabled = False
cherrypy.tools.auth = cherrypy.Tool('before_handler', webauth.check_auth, priority=2)
else:
plexpy.AUTH_ENABLED = False
jellypy.AUTH_ENABLED = False
basic_auth_enabled = False
if options['http_root'].strip('/'):
plexpy.HTTP_ROOT = options['http_root'] = '/' + str(options['http_root'].strip('/')) + '/'
jellypy.HTTP_ROOT = options['http_root'] = '/' + str(options['http_root'].strip('/')) + '/'
else:
plexpy.HTTP_ROOT = options['http_root'] = '/'
jellypy.HTTP_ROOT = options['http_root'] = '/'
cherrypy.config.update(options_dict)
conf = {
'/': {
'engine.timeout_monitor.on': False,
'tools.staticdir.root': os.path.join(plexpy.PROG_DIR, 'data'),
'tools.staticdir.root': os.path.join(jellypy.PROG_DIR, 'data'),
'tools.proxy.on': bool(options['http_proxy']),
'tools.gzip.on': True,
'tools.gzip.mime_types': ['text/html', 'text/plain', 'text/css',
'text/javascript', 'application/json',
'application/javascript'],
'tools.auth.on': plexpy.AUTH_ENABLED,
'tools.auth.on': jellypy.AUTH_ENABLED,
'tools.auth_basic.on': basic_auth_enabled,
'tools.auth_basic.realm': 'Tautulli web server',
'tools.auth_basic.checkpassword': cherrypy.lib.auth_basic.checkpassword_dict({
@@ -216,7 +216,7 @@ def initialize(options):
},
'/cache': {
'tools.staticdir.on': True,
'tools.staticdir.dir': plexpy.CONFIG.CACHE_DIR,
'tools.staticdir.dir': jellypy.CONFIG.CACHE_DIR,
'tools.caching.on': True,
'tools.caching.force': True,
'tools.caching.delay': 0,
@@ -227,7 +227,7 @@ def initialize(options):
},
#'/pms_image_proxy': {
# 'tools.staticdir.on': True,
# 'tools.staticdir.dir': os.path.join(plexpy.CONFIG.CACHE_DIR, 'images'),
# 'tools.staticdir.dir': os.path.join(jellypy.CONFIG.CACHE_DIR, 'images'),
# 'tools.caching.on': True,
# 'tools.caching.force': True,
# 'tools.caching.delay': 0,
@@ -238,7 +238,7 @@ def initialize(options):
#},
'/favicon.ico': {
'tools.staticfile.on': True,
'tools.staticfile.filename': os.path.abspath(os.path.join(plexpy.PROG_DIR, 'data/interfaces/default/images/favicon/favicon.ico')),
'tools.staticfile.filename': os.path.abspath(os.path.join(jellypy.PROG_DIR, 'data/interfaces/default/images/favicon/favicon.ico')),
'tools.caching.on': True,
'tools.caching.force': True,
'tools.caching.delay': 0,
@@ -250,14 +250,14 @@ def initialize(options):
}
cherrypy.tree.mount(WebInterface(), options['http_root'], config=conf)
if plexpy.HTTP_ROOT != '/':
if jellypy.HTTP_ROOT != '/':
cherrypy.tree.mount(BaseRedirect(), '/')
try:
logger.info("Tautulli WebStart :: Starting Tautulli web server on %s://%s:%d%s", protocol,
options['http_host'], options['http_port'], options['http_root'])
#cherrypy.process.servers.check_port(str(options['http_host']), options['http_port'])
if not plexpy.DEV:
if not jellypy.DEV:
cherrypy.server.start()
else:
cherrypy.engine.signals.subscribe()

View File

@@ -29,34 +29,34 @@ try:
except ImportError:
import _winreg as winreg
import plexpy
if plexpy.PYTHON2:
import jellypy
if jellypy.PYTHON2:
import common
import logger
import versioncheck
else:
from plexpy import common
from plexpy import logger
from plexpy import versioncheck
from jellypy import common
from jellypy import logger
from jellypy import versioncheck
class WindowsSystemTray(object):
def __init__(self):
self.image_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/', plexpy.CONFIG.INTERFACE, 'images')
self.image_dir = os.path.join(jellypy.PROG_DIR, 'data/interfaces/', jellypy.CONFIG.INTERFACE, 'images')
self.icon = os.path.join(self.image_dir, 'logo-circle.ico')
if plexpy.UPDATE_AVAILABLE:
if jellypy.UPDATE_AVAILABLE:
self.hover_text = common.PRODUCT + ' - Update Available!'
self.update_title = 'Check for Updates - Update Available!'
else:
self.hover_text = common.PRODUCT
self.update_title = 'Check for Updates'
if plexpy.CONFIG.LAUNCH_STARTUP:
if jellypy.CONFIG.LAUNCH_STARTUP:
launch_start_icon = os.path.join(self.image_dir, 'check-solid.ico')
else:
launch_start_icon = None
if plexpy.CONFIG.LAUNCH_BROWSER:
if jellypy.CONFIG.LAUNCH_BROWSER:
launch_browser_icon = os.path.join(self.image_dir, 'check-solid.ico')
else:
launch_browser_icon = None
@@ -70,7 +70,7 @@ class WindowsSystemTray(object):
[self.update_title, None, self.tray_check_update, None],
['Restart', None, self.tray_restart, None]
]
if not plexpy.FROZEN:
if not jellypy.FROZEN:
self.menu.insert(6, ['Update', None, self.tray_update, None])
self.tray_icon = SysTrayIcon(self.icon, self.hover_text, self.menu, on_quit=self.tray_quit)
@@ -89,22 +89,22 @@ class WindowsSystemTray(object):
self.tray_icon.update(**kwargs)
def tray_open(self, tray_icon):
plexpy.launch_browser(plexpy.CONFIG.HTTP_HOST, plexpy.HTTP_PORT, plexpy.HTTP_ROOT)
jellypy.launch_browser(jellypy.CONFIG.HTTP_HOST, jellypy.HTTP_PORT, jellypy.HTTP_ROOT)
def tray_startup(self, tray_icon):
plexpy.CONFIG.LAUNCH_STARTUP = not plexpy.CONFIG.LAUNCH_STARTUP
jellypy.CONFIG.LAUNCH_STARTUP = not jellypy.CONFIG.LAUNCH_STARTUP
set_startup()
def tray_browser(self, tray_icon):
plexpy.CONFIG.LAUNCH_BROWSER = not plexpy.CONFIG.LAUNCH_BROWSER
jellypy.CONFIG.LAUNCH_BROWSER = not jellypy.CONFIG.LAUNCH_BROWSER
set_startup()
def tray_check_update(self, tray_icon):
versioncheck.check_update()
def tray_update(self, tray_icon):
if plexpy.UPDATE_AVAILABLE:
plexpy.SIGNAL = 'update'
if jellypy.UPDATE_AVAILABLE:
jellypy.SIGNAL = 'update'
else:
self.hover_text = common.PRODUCT + ' - No Update Available'
self.update_title = 'Check for Updates - No Update Available'
@@ -112,13 +112,13 @@ class WindowsSystemTray(object):
self.update(hover_text=self.hover_text, menu_options=self.menu)
def tray_restart(self, tray_icon):
plexpy.SIGNAL = 'restart'
jellypy.SIGNAL = 'restart'
def tray_quit(self, tray_icon):
plexpy.SIGNAL = 'shutdown'
jellypy.SIGNAL = 'shutdown'
def change_tray_update_icon(self):
if plexpy.UPDATE_AVAILABLE:
if jellypy.UPDATE_AVAILABLE:
self.hover_text = common.PRODUCT + ' - Update Available!'
self.update_title = 'Check for Updates - Update Available!'
else:
@@ -128,11 +128,11 @@ class WindowsSystemTray(object):
self.update(hover_text=self.hover_text, menu_options=self.menu)
def change_tray_icons(self):
if plexpy.CONFIG.LAUNCH_STARTUP:
if jellypy.CONFIG.LAUNCH_STARTUP:
launch_start_icon = os.path.join(self.image_dir, 'check-solid.ico')
else:
launch_start_icon = None
if plexpy.CONFIG.LAUNCH_BROWSER:
if jellypy.CONFIG.LAUNCH_BROWSER:
launch_browser_icon = os.path.join(self.image_dir, 'check-solid.ico')
else:
launch_browser_icon = None
@@ -142,23 +142,23 @@ class WindowsSystemTray(object):
def set_startup():
if plexpy.WIN_SYS_TRAY_ICON:
plexpy.WIN_SYS_TRAY_ICON.change_tray_icons()
if jellypy.WIN_SYS_TRAY_ICON:
jellypy.WIN_SYS_TRAY_ICON.change_tray_icons()
startup_reg_path = "Software\\Microsoft\\Windows\\CurrentVersion\\Run"
exe = sys.executable
run_args = [arg for arg in plexpy.ARGS if arg != '--nolaunch']
if plexpy.FROZEN:
run_args = [arg for arg in jellypy.ARGS if arg != '--nolaunch']
if jellypy.FROZEN:
args = [exe] + run_args
else:
args = [exe, plexpy.FULL_PATH] + run_args
args = [exe, jellypy.FULL_PATH] + run_args
registry_key_name = '{}_{}'.format(common.PRODUCT, plexpy.CONFIG.PMS_UUID)
registry_key_name = '{}_{}'.format(common.PRODUCT, jellypy.CONFIG.PMS_UUID)
cmd = ' '.join(cmd_quote(arg) for arg in args).replace('python.exe', 'pythonw.exe').replace("'", '"')
if plexpy.CONFIG.LAUNCH_STARTUP:
if jellypy.CONFIG.LAUNCH_STARTUP:
# Rename old Tautulli registry key
try:
registry_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, startup_reg_path, 0, winreg.KEY_ALL_ACCESS)

1652
lib/IPy.py

File diff suppressed because it is too large Load Diff

View File

@@ -1,121 +0,0 @@
#!/usr/bin/python
###############################################################################
# Formatting filter for urllib2's HTTPHandler(debuglevel=1) output
# Copyright (c) 2013, Analytics Pros
#
# This project is free software, distributed under the BSD license.
# Analytics Pros offers consulting and integration services if your firm needs
# assistance in strategy, implementation, or auditing existing work.
###############################################################################
import sys, re, os
from io import StringIO
class BufferTranslator(object):
""" Provides a buffer-compatible interface for filtering buffer content.
"""
parsers = []
def __init__(self, output):
self.output = output
self.encoding = getattr(output, 'encoding', None)
def write(self, content):
content = self.translate(content)
self.output.write(content)
@staticmethod
def stripslashes(content):
return content.decode('string_escape')
@staticmethod
def addslashes(content):
return content.encode('string_escape')
def translate(self, line):
for pattern, method in self.parsers:
match = pattern.match(line)
if match:
return method(match)
return line
class LineBufferTranslator(BufferTranslator):
""" Line buffer implementation supports translation of line-format input
even when input is not already line-buffered. Caches input until newlines
occur, and then dispatches translated input to output buffer.
"""
def __init__(self, *a, **kw):
self._linepending = []
super(LineBufferTranslator, self).__init__(*a, **kw)
def write(self, _input):
lines = _input.splitlines(True)
for i in range(0, len(lines)):
last = i
if lines[i].endswith('\n'):
prefix = len(self._linepending) and ''.join(self._linepending) or ''
self.output.write(self.translate(prefix + lines[i]))
del self._linepending[0:]
last = -1
if last >= 0:
self._linepending.append(lines[ last ])
def __del__(self):
if len(self._linepending):
self.output.write(self.translate(''.join(self._linepending)))
class HTTPTranslator(LineBufferTranslator):
""" Translates output from |urllib2| HTTPHandler(debuglevel = 1) into
HTTP-compatible, readible text structures for human analysis.
"""
RE_LINE_PARSER = re.compile(r'^(?:([a-z]+):)\s*(\'?)([^\r\n]*)\2(?:[\r\n]*)$')
RE_LINE_BREAK = re.compile(r'(\r?\n|(?:\\r)?\\n)')
RE_HTTP_METHOD = re.compile(r'^(POST|GET|HEAD|DELETE|PUT|TRACE|OPTIONS)')
RE_PARAMETER_SPACER = re.compile(r'&([a-z0-9]+)=')
@classmethod
def spacer(cls, line):
return cls.RE_PARAMETER_SPACER.sub(r' &\1= ', line)
def translate(self, line):
parsed = self.RE_LINE_PARSER.match(line)
if parsed:
value = parsed.group(3)
stage = parsed.group(1)
if stage == 'send': # query string is rendered here
return '\n# HTTP Request:\n' + self.stripslashes(value)
elif stage == 'reply':
return '\n\n# HTTP Response:\n' + self.stripslashes(value)
elif stage == 'header':
return value + '\n'
else:
return value
return line
def consume(outbuffer = None): # Capture standard output
sys.stdout = HTTPTranslator(outbuffer or sys.stdout)
return sys.stdout
if __name__ == '__main__':
consume(sys.stdout).write(sys.stdin.read())
print('\n')
# vim: set nowrap tabstop=4 shiftwidth=4 softtabstop=0 expandtab textwidth=0 filetype=python foldmethod=indent foldcolumn=4

View File

@@ -1,424 +0,0 @@
from future.moves.urllib.request import urlopen, build_opener, install_opener
from future.moves.urllib.request import Request, HTTPSHandler
from future.moves.urllib.error import URLError, HTTPError
from future.moves.urllib.parse import urlencode
import random
import datetime
import time
import uuid
import hashlib
import socket
def generate_uuid(basedata=None):
""" Provides a _random_ UUID with no input, or a UUID4-format MD5 checksum of any input data provided """
if basedata is None:
return str(uuid.uuid4())
elif isinstance(basedata, str):
checksum = hashlib.md5(str(basedata).encode('utf-8')).hexdigest()
return '%8s-%4s-%4s-%4s-%12s' % (
checksum[0:8], checksum[8:12], checksum[12:16], checksum[16:20], checksum[20:32])
class Time(datetime.datetime):
""" Wrappers and convenience methods for processing various time representations """
@classmethod
def from_unix(cls, seconds, milliseconds=0):
""" Produce a full |datetime.datetime| object from a Unix timestamp """
base = list(time.gmtime(seconds))[0:6]
base.append(milliseconds * 1000) # microseconds
return cls(*base)
@classmethod
def to_unix(cls, timestamp):
""" Wrapper over time module to produce Unix epoch time as a float """
if not isinstance(timestamp, datetime.datetime):
raise TypeError('Time.milliseconds expects a datetime object')
base = time.mktime(timestamp.timetuple())
return base
@classmethod
def milliseconds_offset(cls, timestamp, now=None):
""" Offset time (in milliseconds) from a |datetime.datetime| object to now """
if isinstance(timestamp, (int, float)):
base = timestamp
else:
base = cls.to_unix(timestamp)
base = base + (timestamp.microsecond / 1000000)
if now is None:
now = time.time()
return (now - base) * 1000
class HTTPRequest(object):
""" URL Construction and request handling abstraction.
This is not intended to be used outside this module.
Automates mapping of persistent state (i.e. query parameters)
onto transcient datasets for each query.
"""
endpoint = 'https://www.google-analytics.com/collect'
@staticmethod
def debug():
""" Activate debugging on urllib2 """
handler = HTTPSHandler(debuglevel=1)
opener = build_opener(handler)
install_opener(opener)
# Store properties for all requests
def __init__(self, user_agent=None, *args, **opts):
self.user_agent = user_agent or 'Analytics Pros - Universal Analytics (Python)'
@classmethod
def fixUTF8(cls, data): # Ensure proper encoding for UA's servers...
""" Convert all strings to UTF-8 """
for key in data:
if isinstance(data[key], str):
data[key] = data[key].encode('utf-8')
return data
# Apply stored properties to the given dataset & POST to the configured endpoint
def send(self, data):
request = Request(
self.endpoint + '?' + urlencode(self.fixUTF8(data)).encode('utf-8'),
headers={
'User-Agent': self.user_agent
}
)
self.open(request)
def open(self, request):
try:
return urlopen(request)
except HTTPError as e:
return False
except URLError as e:
self.cache_request(request)
return False
def cache_request(self, request):
# TODO: implement a proper caching mechanism here for re-transmitting hits
# record = (Time.now(), request.get_full_url(), request.get_data(), request.headers)
pass
class HTTPPost(HTTPRequest):
# Apply stored properties to the given dataset & POST to the configured endpoint
def send(self, data):
request = Request(
self.endpoint,
data=urlencode(self.fixUTF8(data)).encode('utf-8'),
headers={
'User-Agent': self.user_agent
}
)
self.open(request)
class Tracker(object):
""" Primary tracking interface for Universal Analytics """
params = None
parameter_alias = {}
valid_hittypes = ('pageview', 'event', 'social', 'screenview', 'transaction', 'item', 'exception', 'timing')
@classmethod
def alias(cls, typemap, base, *names):
""" Declare an alternate (humane) name for a measurement protocol parameter """
cls.parameter_alias[base] = (typemap, base)
for i in names:
cls.parameter_alias[i] = (typemap, base)
@classmethod
def coerceParameter(cls, name, value=None):
if isinstance(name, str) and name[0] == '&':
return name[1:], str(value)
elif name in cls.parameter_alias:
typecast, param_name = cls.parameter_alias.get(name)
return param_name, typecast(value)
else:
raise KeyError('Parameter "{0}" is not recognized'.format(name))
def payload(self, data):
for key, value in data.items():
try:
yield self.coerceParameter(key, value)
except KeyError:
continue
option_sequence = {
'pageview': [(str, 'dp')],
'event': [(str, 'ec'), (str, 'ea'), (str, 'el'), (int, 'ev')],
'social': [(str, 'sn'), (str, 'sa'), (str, 'st')],
'timing': [(str, 'utc'), (str, 'utv'), (str, 'utt'), (str, 'utl')]
}
@classmethod
def consume_options(cls, data, hittype, args):
""" Interpret sequential arguments related to known hittypes based on declared structures """
opt_position = 0
data['t'] = hittype # integrate hit type parameter
if hittype in cls.option_sequence:
for expected_type, optname in cls.option_sequence[hittype]:
if opt_position < len(args) and isinstance(args[opt_position], expected_type):
data[optname] = args[opt_position]
opt_position += 1
@classmethod
def hittime(cls, timestamp=None, age=None, milliseconds=None):
""" Returns an integer represeting the milliseconds offset for a given hit (relative to now) """
if isinstance(timestamp, (int, float)):
return int(Time.milliseconds_offset(Time.from_unix(timestamp, milliseconds=milliseconds)))
if isinstance(timestamp, datetime.datetime):
return int(Time.milliseconds_offset(timestamp))
if isinstance(age, (int, float)):
return int(age * 1000) + (milliseconds or 0)
@property
def account(self):
return self.params.get('tid', None)
def __init__(self, account, name=None, client_id=None, hash_client_id=False, user_id=None, user_agent=None,
use_post=True):
if use_post is False:
self.http = HTTPRequest(user_agent=user_agent)
else:
self.http = HTTPPost(user_agent=user_agent)
self.params = {'v': 1, 'tid': account}
if client_id is None:
client_id = generate_uuid()
self.params['cid'] = client_id
self.hash_client_id = hash_client_id
if user_id is not None:
self.params['uid'] = user_id
def set_timestamp(self, data):
""" Interpret time-related options, apply queue-time parameter as needed """
if 'hittime' in data: # an absolute timestamp
data['qt'] = self.hittime(timestamp=data.pop('hittime', None))
if 'hitage' in data: # a relative age (in seconds)
data['qt'] = self.hittime(age=data.pop('hitage', None))
def send(self, hittype, *args, **data):
""" Transmit HTTP requests to Google Analytics using the measurement protocol """
if hittype not in self.valid_hittypes:
raise KeyError('Unsupported Universal Analytics Hit Type: {0}'.format(repr(hittype)))
self.set_timestamp(data)
self.consume_options(data, hittype, args)
for item in args: # process dictionary-object arguments of transcient data
if isinstance(item, dict):
for key, val in self.payload(item):
data[key] = val
for k, v in self.params.items(): # update only absent parameters
if k not in data:
data[k] = v
data = dict(self.payload(data))
if self.hash_client_id:
data['cid'] = generate_uuid(data['cid'])
# Transmit the hit to Google...
self.http.send(data)
# Setting persistent attibutes of the session/hit/etc (inc. custom dimensions/metrics)
def set(self, name, value=None):
if isinstance(name, dict):
for key, value in name.items():
try:
param, value = self.coerceParameter(key, value)
self.params[param] = value
except KeyError:
pass
elif isinstance(name, str):
try:
param, value = self.coerceParameter(name, value)
self.params[param] = value
except KeyError:
pass
def __getitem__(self, name):
param, value = self.coerceParameter(name, None)
return self.params.get(param, None)
def __setitem__(self, name, value):
param, value = self.coerceParameter(name, value)
self.params[param] = value
def __delitem__(self, name):
param, value = self.coerceParameter(name, None)
if param in self.params:
del self.params[param]
def safe_unicode(obj):
""" Safe convertion to the Unicode string version of the object """
try:
return str(obj)
except UnicodeDecodeError:
return obj.decode('utf-8')
# Declaring name mappings for Measurement Protocol parameters
MAX_CUSTOM_DEFINITIONS = 200
MAX_EC_LISTS = 11 # 1-based index
MAX_EC_PRODUCTS = 11 # 1-based index
MAX_EC_PROMOTIONS = 11 # 1-based index
Tracker.alias(int, 'v', 'protocol-version')
Tracker.alias(safe_unicode, 'cid', 'client-id', 'clientId', 'clientid')
Tracker.alias(safe_unicode, 'tid', 'trackingId', 'account')
Tracker.alias(safe_unicode, 'uid', 'user-id', 'userId', 'userid')
Tracker.alias(safe_unicode, 'uip', 'user-ip', 'userIp', 'ipaddr')
Tracker.alias(safe_unicode, 'ua', 'userAgent', 'userAgentOverride', 'user-agent')
Tracker.alias(safe_unicode, 'dp', 'page', 'path')
Tracker.alias(safe_unicode, 'dt', 'title', 'pagetitle', 'pageTitle' 'page-title')
Tracker.alias(safe_unicode, 'dl', 'location')
Tracker.alias(safe_unicode, 'dh', 'hostname')
Tracker.alias(safe_unicode, 'sc', 'sessioncontrol', 'session-control', 'sessionControl')
Tracker.alias(safe_unicode, 'dr', 'referrer', 'referer')
Tracker.alias(int, 'qt', 'queueTime', 'queue-time')
Tracker.alias(safe_unicode, 't', 'hitType', 'hittype')
Tracker.alias(int, 'aip', 'anonymizeIp', 'anonIp', 'anonymize-ip')
Tracker.alias(safe_unicode, 'ds', 'dataSource', 'data-source')
# Campaign attribution
Tracker.alias(safe_unicode, 'cn', 'campaign', 'campaignName', 'campaign-name')
Tracker.alias(safe_unicode, 'cs', 'source', 'campaignSource', 'campaign-source')
Tracker.alias(safe_unicode, 'cm', 'medium', 'campaignMedium', 'campaign-medium')
Tracker.alias(safe_unicode, 'ck', 'keyword', 'campaignKeyword', 'campaign-keyword')
Tracker.alias(safe_unicode, 'cc', 'content', 'campaignContent', 'campaign-content')
Tracker.alias(safe_unicode, 'ci', 'campaignId', 'campaignID', 'campaign-id')
# Technical specs
Tracker.alias(safe_unicode, 'sr', 'screenResolution', 'screen-resolution', 'resolution')
Tracker.alias(safe_unicode, 'vp', 'viewport', 'viewportSize', 'viewport-size')
Tracker.alias(safe_unicode, 'de', 'encoding', 'documentEncoding', 'document-encoding')
Tracker.alias(int, 'sd', 'colors', 'screenColors', 'screen-colors')
Tracker.alias(safe_unicode, 'ul', 'language', 'user-language', 'userLanguage')
# Mobile app
Tracker.alias(safe_unicode, 'an', 'appName', 'app-name', 'app')
Tracker.alias(safe_unicode, 'cd', 'contentDescription', 'screenName', 'screen-name', 'content-description')
Tracker.alias(safe_unicode, 'av', 'appVersion', 'app-version', 'version')
Tracker.alias(safe_unicode, 'aid', 'appID', 'appId', 'application-id', 'app-id', 'applicationId')
Tracker.alias(safe_unicode, 'aiid', 'appInstallerId', 'app-installer-id')
# Ecommerce
Tracker.alias(safe_unicode, 'ta', 'affiliation', 'transactionAffiliation', 'transaction-affiliation')
Tracker.alias(safe_unicode, 'ti', 'transaction', 'transactionId', 'transaction-id')
Tracker.alias(float, 'tr', 'revenue', 'transactionRevenue', 'transaction-revenue')
Tracker.alias(float, 'ts', 'shipping', 'transactionShipping', 'transaction-shipping')
Tracker.alias(float, 'tt', 'tax', 'transactionTax', 'transaction-tax')
Tracker.alias(safe_unicode, 'cu', 'currency', 'transactionCurrency',
'transaction-currency') # Currency code, e.g. USD, EUR
Tracker.alias(safe_unicode, 'in', 'item-name', 'itemName')
Tracker.alias(float, 'ip', 'item-price', 'itemPrice')
Tracker.alias(float, 'iq', 'item-quantity', 'itemQuantity')
Tracker.alias(safe_unicode, 'ic', 'item-code', 'sku', 'itemCode')
Tracker.alias(safe_unicode, 'iv', 'item-variation', 'item-category', 'itemCategory', 'itemVariation')
# Events
Tracker.alias(safe_unicode, 'ec', 'event-category', 'eventCategory', 'category')
Tracker.alias(safe_unicode, 'ea', 'event-action', 'eventAction', 'action')
Tracker.alias(safe_unicode, 'el', 'event-label', 'eventLabel', 'label')
Tracker.alias(int, 'ev', 'event-value', 'eventValue', 'value')
Tracker.alias(int, 'ni', 'noninteractive', 'nonInteractive', 'noninteraction', 'nonInteraction')
# Social
Tracker.alias(safe_unicode, 'sa', 'social-action', 'socialAction')
Tracker.alias(safe_unicode, 'sn', 'social-network', 'socialNetwork')
Tracker.alias(safe_unicode, 'st', 'social-target', 'socialTarget')
# Exceptions
Tracker.alias(safe_unicode, 'exd', 'exception-description', 'exceptionDescription', 'exDescription')
Tracker.alias(int, 'exf', 'exception-fatal', 'exceptionFatal', 'exFatal')
# User Timing
Tracker.alias(safe_unicode, 'utc', 'timingCategory', 'timing-category')
Tracker.alias(safe_unicode, 'utv', 'timingVariable', 'timing-variable')
Tracker.alias(float, 'utt', 'time', 'timingTime', 'timing-time')
Tracker.alias(safe_unicode, 'utl', 'timingLabel', 'timing-label')
Tracker.alias(float, 'dns', 'timingDNS', 'timing-dns')
Tracker.alias(float, 'pdt', 'timingPageLoad', 'timing-page-load')
Tracker.alias(float, 'rrt', 'timingRedirect', 'timing-redirect')
Tracker.alias(safe_unicode, 'tcp', 'timingTCPConnect', 'timing-tcp-connect')
Tracker.alias(safe_unicode, 'srt', 'timingServerResponse', 'timing-server-response')
# Custom dimensions and metrics
for i in range(0, 200):
Tracker.alias(safe_unicode, 'cd{0}'.format(i), 'dimension{0}'.format(i))
Tracker.alias(int, 'cm{0}'.format(i), 'metric{0}'.format(i))
# Content groups
for i in range(0, 5):
Tracker.alias(safe_unicode, 'cg{0}'.format(i), 'contentGroup{0}'.format(i))
# Enhanced Ecommerce
Tracker.alias(str, 'pa') # Product action
Tracker.alias(str, 'tcc') # Coupon code
Tracker.alias(str, 'pal') # Product action list
Tracker.alias(int, 'cos') # Checkout step
Tracker.alias(str, 'col') # Checkout step option
Tracker.alias(str, 'promoa') # Promotion action
for product_index in range(1, MAX_EC_PRODUCTS):
Tracker.alias(str, 'pr{0}id'.format(product_index)) # Product SKU
Tracker.alias(str, 'pr{0}nm'.format(product_index)) # Product name
Tracker.alias(str, 'pr{0}br'.format(product_index)) # Product brand
Tracker.alias(str, 'pr{0}ca'.format(product_index)) # Product category
Tracker.alias(str, 'pr{0}va'.format(product_index)) # Product variant
Tracker.alias(str, 'pr{0}pr'.format(product_index)) # Product price
Tracker.alias(int, 'pr{0}qt'.format(product_index)) # Product quantity
Tracker.alias(str, 'pr{0}cc'.format(product_index)) # Product coupon code
Tracker.alias(int, 'pr{0}ps'.format(product_index)) # Product position
for custom_index in range(MAX_CUSTOM_DEFINITIONS):
Tracker.alias(str, 'pr{0}cd{1}'.format(product_index, custom_index)) # Product custom dimension
Tracker.alias(int, 'pr{0}cm{1}'.format(product_index, custom_index)) # Product custom metric
for list_index in range(1, MAX_EC_LISTS):
Tracker.alias(str, 'il{0}pi{1}id'.format(list_index, product_index)) # Product impression SKU
Tracker.alias(str, 'il{0}pi{1}nm'.format(list_index, product_index)) # Product impression name
Tracker.alias(str, 'il{0}pi{1}br'.format(list_index, product_index)) # Product impression brand
Tracker.alias(str, 'il{0}pi{1}ca'.format(list_index, product_index)) # Product impression category
Tracker.alias(str, 'il{0}pi{1}va'.format(list_index, product_index)) # Product impression variant
Tracker.alias(int, 'il{0}pi{1}ps'.format(list_index, product_index)) # Product impression position
Tracker.alias(int, 'il{0}pi{1}pr'.format(list_index, product_index)) # Product impression price
for custom_index in range(MAX_CUSTOM_DEFINITIONS):
Tracker.alias(str, 'il{0}pi{1}cd{2}'.format(list_index, product_index,
custom_index)) # Product impression custom dimension
Tracker.alias(int, 'il{0}pi{1}cm{2}'.format(list_index, product_index,
custom_index)) # Product impression custom metric
for list_index in range(1, MAX_EC_LISTS):
Tracker.alias(str, 'il{0}nm'.format(list_index)) # Product impression list name
for promotion_index in range(1, MAX_EC_PROMOTIONS):
Tracker.alias(str, 'promo{0}id'.format(promotion_index)) # Promotion ID
Tracker.alias(str, 'promo{0}nm'.format(promotion_index)) # Promotion name
Tracker.alias(str, 'promo{0}cr'.format(promotion_index)) # Promotion creative
Tracker.alias(str, 'promo{0}ps'.format(promotion_index)) # Promotion position
# Shortcut for creating trackers
def create(account, *args, **kwargs):
return Tracker(account, *args, **kwargs)
# vim: set nowrap tabstop=4 shiftwidth=4 softtabstop=0 expandtab textwidth=0 filetype=python foldmethod=indent foldcolumn=4

View File

@@ -1 +0,0 @@
from . import Tracker

View File

@@ -1,608 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2005-2010 ActiveState Software Inc.
# Copyright (c) 2013 Eddy Petrișor
"""Utilities for determining application-specific dirs.
See <http://github.com/ActiveState/appdirs> for details and usage.
"""
# Dev Notes:
# - MSDN on where to store app data files:
# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
__version_info__ = (1, 4, 3)
__version__ = '.'.join(map(str, __version_info__))
import sys
import os
PY3 = sys.version_info[0] == 3
if PY3:
unicode = str
if sys.platform.startswith('java'):
import platform
os_name = platform.java_ver()[3][0]
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
system = 'win32'
elif os_name.startswith('Mac'): # "Mac OS X", etc.
system = 'darwin'
else: # "Linux", "SunOS", "FreeBSD", etc.
# Setting this to "linux2" is not ideal, but only Windows or Mac
# are actually checked for and the rest of the module expects
# *sys.platform* style strings.
system = 'linux2'
else:
system = sys.platform
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
r"""Return full path to the user-specific data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user data directories are:
Mac OS X: ~/Library/Application Support/<AppName>
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
That means, by default "~/.local/share/<AppName>".
"""
if system == "win32":
if appauthor is None:
appauthor = appname
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
path = os.path.normpath(_get_win_folder(const))
if appname:
if appauthor is not False:
path = os.path.join(path, appauthor, appname)
else:
path = os.path.join(path, appname)
elif system == 'darwin':
path = os.path.expanduser('~/Library/Application Support/')
if appname:
path = os.path.join(path, appname)
else:
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
r"""Return full path to the user-shared data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"multipath" is an optional parameter only applicable to *nix
which indicates that the entire list of data dirs should be
returned. By default, the first item from XDG_DATA_DIRS is
returned, or '/usr/local/share/<AppName>',
if XDG_DATA_DIRS is not set
Typical site data directories are:
Mac OS X: /Library/Application Support/<AppName>
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
For Unix, this is using the $XDG_DATA_DIRS[0] default.
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
"""
if system == "win32":
if appauthor is None:
appauthor = appname
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
if appname:
if appauthor is not False:
path = os.path.join(path, appauthor, appname)
else:
path = os.path.join(path, appname)
elif system == 'darwin':
path = os.path.expanduser('/Library/Application Support')
if appname:
path = os.path.join(path, appname)
else:
# XDG default for $XDG_DATA_DIRS
# only first, if multipath is False
path = os.getenv('XDG_DATA_DIRS',
os.pathsep.join(['/usr/local/share', '/usr/share']))
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
if appname:
if version:
appname = os.path.join(appname, version)
pathlist = [os.sep.join([x, appname]) for x in pathlist]
if multipath:
path = os.pathsep.join(pathlist)
else:
path = pathlist[0]
return path
if appname and version:
path = os.path.join(path, version)
return path
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
r"""Return full path to the user-specific config dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user config directories are:
Mac OS X: same as user_data_dir
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
Win *: same as user_data_dir
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
That means, by default "~/.config/<AppName>".
"""
if system in ["win32", "darwin"]:
path = user_data_dir(appname, appauthor, None, roaming)
else:
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
r"""Return full path to the user-shared data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"multipath" is an optional parameter only applicable to *nix
which indicates that the entire list of config dirs should be
returned. By default, the first item from XDG_CONFIG_DIRS is
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
Typical site config directories are:
Mac OS X: same as site_data_dir
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
$XDG_CONFIG_DIRS
Win *: same as site_data_dir
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
"""
if system in ["win32", "darwin"]:
path = site_data_dir(appname, appauthor)
if appname and version:
path = os.path.join(path, version)
else:
# XDG default for $XDG_CONFIG_DIRS
# only first, if multipath is False
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
if appname:
if version:
appname = os.path.join(appname, version)
pathlist = [os.sep.join([x, appname]) for x in pathlist]
if multipath:
path = os.pathsep.join(pathlist)
else:
path = pathlist[0]
return path
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
r"""Return full path to the user-specific cache dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"opinion" (boolean) can be False to disable the appending of
"Cache" to the base app data dir for Windows. See
discussion below.
Typical user cache directories are:
Mac OS X: ~/Library/Caches/<AppName>
Unix: ~/.cache/<AppName> (XDG default)
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
On Windows the only suggestion in the MSDN docs is that local settings go in
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
app data dir (the default returned by `user_data_dir` above). Apps typically
put cache data somewhere *under* the given dir here. Some examples:
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
...\Acme\SuperApp\Cache\1.0
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
This can be disabled with the `opinion=False` option.
"""
if system == "win32":
if appauthor is None:
appauthor = appname
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
if appname:
if appauthor is not False:
path = os.path.join(path, appauthor, appname)
else:
path = os.path.join(path, appname)
if opinion:
path = os.path.join(path, "Cache")
elif system == 'darwin':
path = os.path.expanduser('~/Library/Caches')
if appname:
path = os.path.join(path, appname)
else:
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
r"""Return full path to the user-specific state dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user state directories are:
Mac OS X: same as user_data_dir
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
Win *: same as user_data_dir
For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
to extend the XDG spec and support $XDG_STATE_HOME.
That means, by default "~/.local/state/<AppName>".
"""
if system in ["win32", "darwin"]:
path = user_data_dir(appname, appauthor, None, roaming)
else:
path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
r"""Return full path to the user-specific log dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"opinion" (boolean) can be False to disable the appending of
"Logs" to the base app data dir for Windows, and "log" to the
base cache dir for Unix. See discussion below.
Typical user log directories are:
Mac OS X: ~/Library/Logs/<AppName>
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
On Windows the only suggestion in the MSDN docs is that local settings
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
examples of what some windows apps use for a logs dir.)
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
value for Windows and appends "log" to the user cache dir for Unix.
This can be disabled with the `opinion=False` option.
"""
if system == "darwin":
path = os.path.join(
os.path.expanduser('~/Library/Logs'),
appname)
elif system == "win32":
path = user_data_dir(appname, appauthor, version)
version = False
if opinion:
path = os.path.join(path, "Logs")
else:
path = user_cache_dir(appname, appauthor, version)
version = False
if opinion:
path = os.path.join(path, "log")
if appname and version:
path = os.path.join(path, version)
return path
class AppDirs(object):
"""Convenience wrapper for getting application dirs."""
def __init__(self, appname=None, appauthor=None, version=None,
roaming=False, multipath=False):
self.appname = appname
self.appauthor = appauthor
self.version = version
self.roaming = roaming
self.multipath = multipath
@property
def user_data_dir(self):
return user_data_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
@property
def site_data_dir(self):
return site_data_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
@property
def user_config_dir(self):
return user_config_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
@property
def site_config_dir(self):
return site_config_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
@property
def user_cache_dir(self):
return user_cache_dir(self.appname, self.appauthor,
version=self.version)
@property
def user_state_dir(self):
return user_state_dir(self.appname, self.appauthor,
version=self.version)
@property
def user_log_dir(self):
return user_log_dir(self.appname, self.appauthor,
version=self.version)
#---- internal support stuff
def _get_win_folder_from_registry(csidl_name):
"""This is a fallback technique at best. I'm not sure if using the
registry for this guarantees us the correct answer for all CSIDL_*
names.
"""
if PY3:
import winreg as _winreg
else:
import _winreg
shell_folder_name = {
"CSIDL_APPDATA": "AppData",
"CSIDL_COMMON_APPDATA": "Common AppData",
"CSIDL_LOCAL_APPDATA": "Local AppData",
}[csidl_name]
key = _winreg.OpenKey(
_winreg.HKEY_CURRENT_USER,
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
)
dir, type = _winreg.QueryValueEx(key, shell_folder_name)
return dir
def _get_win_folder_with_pywin32(csidl_name):
from win32com.shell import shellcon, shell
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
# Try to make this a unicode path because SHGetFolderPath does
# not return unicode strings when there is unicode data in the
# path.
try:
dir = unicode(dir)
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in dir:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
try:
import win32api
dir = win32api.GetShortPathName(dir)
except ImportError:
pass
except UnicodeError:
pass
return dir
def _get_win_folder_with_ctypes(csidl_name):
import ctypes
csidl_const = {
"CSIDL_APPDATA": 26,
"CSIDL_COMMON_APPDATA": 35,
"CSIDL_LOCAL_APPDATA": 28,
}[csidl_name]
buf = ctypes.create_unicode_buffer(1024)
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in buf:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
buf2 = ctypes.create_unicode_buffer(1024)
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
buf = buf2
return buf.value
def _get_win_folder_with_jna(csidl_name):
import array
from com.sun import jna
from com.sun.jna.platform import win32
buf_size = win32.WinDef.MAX_PATH * 2
buf = array.zeros('c', buf_size)
shell = win32.Shell32.INSTANCE
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in dir:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
buf = array.zeros('c', buf_size)
kernel = win32.Kernel32.INSTANCE
if kernel.GetShortPathName(dir, buf, buf_size):
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
return dir
if system == "win32":
try:
import win32com.shell
_get_win_folder = _get_win_folder_with_pywin32
except ImportError:
try:
from ctypes import windll
_get_win_folder = _get_win_folder_with_ctypes
except ImportError:
try:
import com.sun.jna
_get_win_folder = _get_win_folder_with_jna
except ImportError:
_get_win_folder = _get_win_folder_from_registry
#---- self test code
if __name__ == "__main__":
appname = "MyApp"
appauthor = "MyCompany"
props = ("user_data_dir",
"user_config_dir",
"user_cache_dir",
"user_state_dir",
"user_log_dir",
"site_data_dir",
"site_config_dir")
print("-- app dirs %s --" % __version__)
print("-- app dirs (with optional 'version')")
dirs = AppDirs(appname, appauthor, version="1.0")
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))
print("\n-- app dirs (without optional 'version')")
dirs = AppDirs(appname, appauthor)
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))
print("\n-- app dirs (without optional 'appauthor')")
dirs = AppDirs(appname)
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))
print("\n-- app dirs (with disabled 'appauthor')")
dirs = AppDirs(appname, appauthor=False)
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))

View File

@@ -1,10 +0,0 @@
from pkg_resources import get_distribution, DistributionNotFound
try:
release = get_distribution('APScheduler').version.split('-')[0]
except DistributionNotFound:
release = '3.5.0'
version_info = tuple(int(x) if x.isdigit() else x for x in release.split('.'))
version = __version__ = '.'.join(str(x) for x in version_info[:3])
del get_distribution, DistributionNotFound

View File

@@ -1,94 +0,0 @@
__all__ = ('EVENT_SCHEDULER_STARTED', 'EVENT_SCHEDULER_SHUTDOWN', 'EVENT_SCHEDULER_PAUSED',
'EVENT_SCHEDULER_RESUMED', 'EVENT_EXECUTOR_ADDED', 'EVENT_EXECUTOR_REMOVED',
'EVENT_JOBSTORE_ADDED', 'EVENT_JOBSTORE_REMOVED', 'EVENT_ALL_JOBS_REMOVED',
'EVENT_JOB_ADDED', 'EVENT_JOB_REMOVED', 'EVENT_JOB_MODIFIED', 'EVENT_JOB_EXECUTED',
'EVENT_JOB_ERROR', 'EVENT_JOB_MISSED', 'EVENT_JOB_SUBMITTED', 'EVENT_JOB_MAX_INSTANCES',
'SchedulerEvent', 'JobEvent', 'JobExecutionEvent', 'JobSubmissionEvent')
EVENT_SCHEDULER_STARTED = EVENT_SCHEDULER_START = 2 ** 0
EVENT_SCHEDULER_SHUTDOWN = 2 ** 1
EVENT_SCHEDULER_PAUSED = 2 ** 2
EVENT_SCHEDULER_RESUMED = 2 ** 3
EVENT_EXECUTOR_ADDED = 2 ** 4
EVENT_EXECUTOR_REMOVED = 2 ** 5
EVENT_JOBSTORE_ADDED = 2 ** 6
EVENT_JOBSTORE_REMOVED = 2 ** 7
EVENT_ALL_JOBS_REMOVED = 2 ** 8
EVENT_JOB_ADDED = 2 ** 9
EVENT_JOB_REMOVED = 2 ** 10
EVENT_JOB_MODIFIED = 2 ** 11
EVENT_JOB_EXECUTED = 2 ** 12
EVENT_JOB_ERROR = 2 ** 13
EVENT_JOB_MISSED = 2 ** 14
EVENT_JOB_SUBMITTED = 2 ** 15
EVENT_JOB_MAX_INSTANCES = 2 ** 16
EVENT_ALL = (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED |
EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED |
EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED |
EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED |
EVENT_JOB_ERROR | EVENT_JOB_MISSED | EVENT_JOB_SUBMITTED | EVENT_JOB_MAX_INSTANCES)
class SchedulerEvent(object):
"""
An event that concerns the scheduler itself.
:ivar code: the type code of this event
:ivar alias: alias of the job store or executor that was added or removed (if applicable)
"""
def __init__(self, code, alias=None):
super(SchedulerEvent, self).__init__()
self.code = code
self.alias = alias
def __repr__(self):
return '<%s (code=%d)>' % (self.__class__.__name__, self.code)
class JobEvent(SchedulerEvent):
"""
An event that concerns a job.
:ivar code: the type code of this event
:ivar job_id: identifier of the job in question
:ivar jobstore: alias of the job store containing the job in question
"""
def __init__(self, code, job_id, jobstore):
super(JobEvent, self).__init__(code)
self.code = code
self.job_id = job_id
self.jobstore = jobstore
class JobSubmissionEvent(JobEvent):
"""
An event that concerns the submission of a job to its executor.
:ivar scheduled_run_times: a list of datetimes when the job was intended to run
"""
def __init__(self, code, job_id, jobstore, scheduled_run_times):
super(JobSubmissionEvent, self).__init__(code, job_id, jobstore)
self.scheduled_run_times = scheduled_run_times
class JobExecutionEvent(JobEvent):
"""
An event that concerns the running of a job within its executor.
:ivar scheduled_run_time: the time when the job was scheduled to be run
:ivar retval: the return value of the successfully executed job
:ivar exception: the exception raised by the job
:ivar traceback: a formatted traceback for the exception
"""
def __init__(self, code, job_id, jobstore, scheduled_run_time, retval=None, exception=None,
traceback=None):
super(JobExecutionEvent, self).__init__(code, job_id, jobstore)
self.scheduled_run_time = scheduled_run_time
self.retval = retval
self.exception = exception
self.traceback = traceback

View File

@@ -1,59 +0,0 @@
from __future__ import absolute_import
import sys
from apscheduler.executors.base import BaseExecutor, run_job
from apscheduler.util import iscoroutinefunction_partial
try:
from apscheduler.executors.base_py3 import run_coroutine_job
except ImportError:
run_coroutine_job = None
class AsyncIOExecutor(BaseExecutor):
"""
Runs jobs in the default executor of the event loop.
If the job function is a native coroutine function, it is scheduled to be run directly in the
event loop as soon as possible. All other functions are run in the event loop's default
executor which is usually a thread pool.
Plugin alias: ``asyncio``
"""
def start(self, scheduler, alias):
super(AsyncIOExecutor, self).start(scheduler, alias)
self._eventloop = scheduler._eventloop
self._pending_futures = set()
def shutdown(self, wait=True):
# There is no way to honor wait=True without converting this method into a coroutine method
for f in self._pending_futures:
if not f.done():
f.cancel()
self._pending_futures.clear()
def _do_submit_job(self, job, run_times):
def callback(f):
self._pending_futures.discard(f)
try:
events = f.result()
except BaseException:
self._run_job_error(job.id, *sys.exc_info()[1:])
else:
self._run_job_success(job.id, events)
if iscoroutinefunction_partial(job.func):
if run_coroutine_job is not None:
coro = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name)
f = self._eventloop.create_task(coro)
else:
raise Exception('Executing coroutine based jobs is not supported with Trollius')
else:
f = self._eventloop.run_in_executor(None, run_job, job, job._jobstore_alias, run_times,
self._logger.name)
f.add_done_callback(callback)
self._pending_futures.add(f)

View File

@@ -1,146 +0,0 @@
from abc import ABCMeta, abstractmethod
from collections import defaultdict
from datetime import datetime, timedelta
from traceback import format_tb
import logging
import sys
from pytz import utc
import six
from apscheduler.events import (
JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED)
class MaxInstancesReachedError(Exception):
def __init__(self, job):
super(MaxInstancesReachedError, self).__init__(
'Job "%s" has already reached its maximum number of instances (%d)' %
(job.id, job.max_instances))
class BaseExecutor(six.with_metaclass(ABCMeta, object)):
"""Abstract base class that defines the interface that every executor must implement."""
_scheduler = None
_lock = None
_logger = logging.getLogger('apscheduler.executors')
def __init__(self):
super(BaseExecutor, self).__init__()
self._instances = defaultdict(lambda: 0)
def start(self, scheduler, alias):
"""
Called by the scheduler when the scheduler is being started or when the executor is being
added to an already running scheduler.
:param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting
this executor
:param str|unicode alias: alias of this executor as it was assigned to the scheduler
"""
self._scheduler = scheduler
self._lock = scheduler._create_lock()
self._logger = logging.getLogger('apscheduler.executors.%s' % alias)
def shutdown(self, wait=True):
"""
Shuts down this executor.
:param bool wait: ``True`` to wait until all submitted jobs
have been executed
"""
def submit_job(self, job, run_times):
"""
Submits job for execution.
:param Job job: job to execute
:param list[datetime] run_times: list of datetimes specifying
when the job should have been run
:raises MaxInstancesReachedError: if the maximum number of
allowed instances for this job has been reached
"""
assert self._lock is not None, 'This executor has not been started yet'
with self._lock:
if self._instances[job.id] >= job.max_instances:
raise MaxInstancesReachedError(job)
self._do_submit_job(job, run_times)
self._instances[job.id] += 1
@abstractmethod
def _do_submit_job(self, job, run_times):
"""Performs the actual task of scheduling `run_job` to be called."""
def _run_job_success(self, job_id, events):
"""
Called by the executor with the list of generated events when :func:`run_job` has been
successfully called.
"""
with self._lock:
self._instances[job_id] -= 1
if self._instances[job_id] == 0:
del self._instances[job_id]
for event in events:
self._scheduler._dispatch_event(event)
def _run_job_error(self, job_id, exc, traceback=None):
"""Called by the executor with the exception if there is an error calling `run_job`."""
with self._lock:
self._instances[job_id] -= 1
if self._instances[job_id] == 0:
del self._instances[job_id]
exc_info = (exc.__class__, exc, traceback)
self._logger.error('Error running job %s', job_id, exc_info=exc_info)
def run_job(job, jobstore_alias, run_times, logger_name):
"""
Called by executors to run the job. Returns a list of scheduler events to be dispatched by the
scheduler.
"""
events = []
logger = logging.getLogger(logger_name)
for run_time in run_times:
# See if the job missed its run time window, and handle
# possible misfires accordingly
if job.misfire_grace_time is not None:
difference = datetime.now(utc) - run_time
grace_time = timedelta(seconds=job.misfire_grace_time)
if difference > grace_time:
events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias,
run_time))
logger.warning('Run time of job "%s" was missed by %s', job, difference)
continue
logger.info('Running job "%s" (scheduled at %s)', job, run_time)
try:
retval = job.func(*job.args, **job.kwargs)
except BaseException:
exc, tb = sys.exc_info()[1:]
formatted_tb = ''.join(format_tb(tb))
events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time,
exception=exc, traceback=formatted_tb))
logger.exception('Job "%s" raised an exception', job)
# This is to prevent cyclic references that would lead to memory leaks
if six.PY2:
sys.exc_clear()
del tb
else:
import traceback
traceback.clear_frames(tb)
del tb
else:
events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time,
retval=retval))
logger.info('Job "%s" executed successfully', job)
return events

View File

@@ -1,41 +0,0 @@
import logging
import sys
from datetime import datetime, timedelta
from traceback import format_tb
from pytz import utc
from apscheduler.events import (
JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED)
async def run_coroutine_job(job, jobstore_alias, run_times, logger_name):
"""Coroutine version of run_job()."""
events = []
logger = logging.getLogger(logger_name)
for run_time in run_times:
# See if the job missed its run time window, and handle possible misfires accordingly
if job.misfire_grace_time is not None:
difference = datetime.now(utc) - run_time
grace_time = timedelta(seconds=job.misfire_grace_time)
if difference > grace_time:
events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias,
run_time))
logger.warning('Run time of job "%s" was missed by %s', job, difference)
continue
logger.info('Running job "%s" (scheduled at %s)', job, run_time)
try:
retval = await job.func(*job.args, **job.kwargs)
except BaseException:
exc, tb = sys.exc_info()[1:]
formatted_tb = ''.join(format_tb(tb))
events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time,
exception=exc, traceback=formatted_tb))
logger.exception('Job "%s" raised an exception', job)
else:
events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time,
retval=retval))
logger.info('Job "%s" executed successfully', job)
return events

View File

@@ -1,20 +0,0 @@
import sys
from apscheduler.executors.base import BaseExecutor, run_job
class DebugExecutor(BaseExecutor):
"""
A special executor that executes the target callable directly instead of deferring it to a
thread or process.
Plugin alias: ``debug``
"""
def _do_submit_job(self, job, run_times):
try:
events = run_job(job, job._jobstore_alias, run_times, self._logger.name)
except BaseException:
self._run_job_error(job.id, *sys.exc_info()[1:])
else:
self._run_job_success(job.id, events)

View File

@@ -1,30 +0,0 @@
from __future__ import absolute_import
import sys
from apscheduler.executors.base import BaseExecutor, run_job
try:
import gevent
except ImportError: # pragma: nocover
raise ImportError('GeventExecutor requires gevent installed')
class GeventExecutor(BaseExecutor):
"""
Runs jobs as greenlets.
Plugin alias: ``gevent``
"""
def _do_submit_job(self, job, run_times):
def callback(greenlet):
try:
events = greenlet.get()
except BaseException:
self._run_job_error(job.id, *sys.exc_info()[1:])
else:
self._run_job_success(job.id, events)
gevent.spawn(run_job, job, job._jobstore_alias, run_times, self._logger.name).\
link(callback)

View File

@@ -1,54 +0,0 @@
from abc import abstractmethod
import concurrent.futures
from apscheduler.executors.base import BaseExecutor, run_job
class BasePoolExecutor(BaseExecutor):
@abstractmethod
def __init__(self, pool):
super(BasePoolExecutor, self).__init__()
self._pool = pool
def _do_submit_job(self, job, run_times):
def callback(f):
exc, tb = (f.exception_info() if hasattr(f, 'exception_info') else
(f.exception(), getattr(f.exception(), '__traceback__', None)))
if exc:
self._run_job_error(job.id, exc, tb)
else:
self._run_job_success(job.id, f.result())
f = self._pool.submit(run_job, job, job._jobstore_alias, run_times, self._logger.name)
f.add_done_callback(callback)
def shutdown(self, wait=True):
self._pool.shutdown(wait)
class ThreadPoolExecutor(BasePoolExecutor):
"""
An executor that runs jobs in a concurrent.futures thread pool.
Plugin alias: ``threadpool``
:param max_workers: the maximum number of spawned threads.
"""
def __init__(self, max_workers=10):
pool = concurrent.futures.ThreadPoolExecutor(int(max_workers))
super(ThreadPoolExecutor, self).__init__(pool)
class ProcessPoolExecutor(BasePoolExecutor):
"""
An executor that runs jobs in a concurrent.futures process pool.
Plugin alias: ``processpool``
:param max_workers: the maximum number of spawned processes.
"""
def __init__(self, max_workers=10):
pool = concurrent.futures.ProcessPoolExecutor(int(max_workers))
super(ProcessPoolExecutor, self).__init__(pool)

View File

@@ -1,54 +0,0 @@
from __future__ import absolute_import
import sys
from concurrent.futures import ThreadPoolExecutor
from tornado.gen import convert_yielded
from apscheduler.executors.base import BaseExecutor, run_job
try:
from apscheduler.executors.base_py3 import run_coroutine_job
from apscheduler.util import iscoroutinefunction_partial
except ImportError:
def iscoroutinefunction_partial(func):
return False
class TornadoExecutor(BaseExecutor):
"""
Runs jobs either in a thread pool or directly on the I/O loop.
If the job function is a native coroutine function, it is scheduled to be run directly in the
I/O loop as soon as possible. All other functions are run in a thread pool.
Plugin alias: ``tornado``
:param int max_workers: maximum number of worker threads in the thread pool
"""
def __init__(self, max_workers=10):
super(TornadoExecutor, self).__init__()
self.executor = ThreadPoolExecutor(max_workers)
def start(self, scheduler, alias):
super(TornadoExecutor, self).start(scheduler, alias)
self._ioloop = scheduler._ioloop
def _do_submit_job(self, job, run_times):
def callback(f):
try:
events = f.result()
except BaseException:
self._run_job_error(job.id, *sys.exc_info()[1:])
else:
self._run_job_success(job.id, events)
if iscoroutinefunction_partial(job.func):
f = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name)
else:
f = self.executor.submit(run_job, job, job._jobstore_alias, run_times,
self._logger.name)
f = convert_yielded(f)
f.add_done_callback(callback)

View File

@@ -1,25 +0,0 @@
from __future__ import absolute_import
from apscheduler.executors.base import BaseExecutor, run_job
class TwistedExecutor(BaseExecutor):
"""
Runs jobs in the reactor's thread pool.
Plugin alias: ``twisted``
"""
def start(self, scheduler, alias):
super(TwistedExecutor, self).start(scheduler, alias)
self._reactor = scheduler._reactor
def _do_submit_job(self, job, run_times):
def callback(success, result):
if success:
self._run_job_success(job.id, result)
else:
self._run_job_error(job.id, result.value, result.tb)
self._reactor.getThreadPool().callInThreadWithCallback(
callback, run_job, job, job._jobstore_alias, run_times, self._logger.name)

View File

@@ -1,301 +0,0 @@
from inspect import ismethod, isclass
from uuid import uuid4
import six
from apscheduler.triggers.base import BaseTrigger
from apscheduler.util import (
ref_to_obj, obj_to_ref, datetime_repr, repr_escape, get_callable_name, check_callable_args,
convert_to_datetime)
try:
from collections.abc import Iterable, Mapping
except ImportError:
from collections import Iterable, Mapping
class Job(object):
"""
Contains the options given when scheduling callables and its current schedule and other state.
This class should never be instantiated by the user.
:var str id: the unique identifier of this job
:var str name: the description of this job
:var func: the callable to execute
:var tuple|list args: positional arguments to the callable
:var dict kwargs: keyword arguments to the callable
:var bool coalesce: whether to only run the job once when several run times are due
:var trigger: the trigger object that controls the schedule of this job
:var str executor: the name of the executor that will run this job
:var int misfire_grace_time: the time (in seconds) how much this job's execution is allowed to
be late
:var int max_instances: the maximum number of concurrently executing instances allowed for this
job
:var datetime.datetime next_run_time: the next scheduled run time of this job
.. note::
The ``misfire_grace_time`` has some non-obvious effects on job execution. See the
:ref:`missed-job-executions` section in the documentation for an in-depth explanation.
"""
__slots__ = ('_scheduler', '_jobstore_alias', 'id', 'trigger', 'executor', 'func', 'func_ref',
'args', 'kwargs', 'name', 'misfire_grace_time', 'coalesce', 'max_instances',
'next_run_time')
def __init__(self, scheduler, id=None, **kwargs):
super(Job, self).__init__()
self._scheduler = scheduler
self._jobstore_alias = None
self._modify(id=id or uuid4().hex, **kwargs)
def modify(self, **changes):
"""
Makes the given changes to this job and saves it in the associated job store.
Accepted keyword arguments are the same as the variables on this class.
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.modify_job`
:return Job: this job instance
"""
self._scheduler.modify_job(self.id, self._jobstore_alias, **changes)
return self
def reschedule(self, trigger, **trigger_args):
"""
Shortcut for switching the trigger on this job.
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.reschedule_job`
:return Job: this job instance
"""
self._scheduler.reschedule_job(self.id, self._jobstore_alias, trigger, **trigger_args)
return self
def pause(self):
"""
Temporarily suspend the execution of this job.
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.pause_job`
:return Job: this job instance
"""
self._scheduler.pause_job(self.id, self._jobstore_alias)
return self
def resume(self):
"""
Resume the schedule of this job if previously paused.
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.resume_job`
:return Job: this job instance
"""
self._scheduler.resume_job(self.id, self._jobstore_alias)
return self
def remove(self):
"""
Unschedules this job and removes it from its associated job store.
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.remove_job`
"""
self._scheduler.remove_job(self.id, self._jobstore_alias)
@property
def pending(self):
"""
Returns ``True`` if the referenced job is still waiting to be added to its designated job
store.
"""
return self._jobstore_alias is None
#
# Private API
#
def _get_run_times(self, now):
"""
Computes the scheduled run times between ``next_run_time`` and ``now`` (inclusive).
:type now: datetime.datetime
:rtype: list[datetime.datetime]
"""
run_times = []
next_run_time = self.next_run_time
while next_run_time and next_run_time <= now:
run_times.append(next_run_time)
next_run_time = self.trigger.get_next_fire_time(next_run_time, now)
return run_times
def _modify(self, **changes):
"""
Validates the changes to the Job and makes the modifications if and only if all of them
validate.
"""
approved = {}
if 'id' in changes:
value = changes.pop('id')
if not isinstance(value, six.string_types):
raise TypeError("id must be a nonempty string")
if hasattr(self, 'id'):
raise ValueError('The job ID may not be changed')
approved['id'] = value
if 'func' in changes or 'args' in changes or 'kwargs' in changes:
func = changes.pop('func') if 'func' in changes else self.func
args = changes.pop('args') if 'args' in changes else self.args
kwargs = changes.pop('kwargs') if 'kwargs' in changes else self.kwargs
if isinstance(func, six.string_types):
func_ref = func
func = ref_to_obj(func)
elif callable(func):
try:
func_ref = obj_to_ref(func)
except ValueError:
# If this happens, this Job won't be serializable
func_ref = None
else:
raise TypeError('func must be a callable or a textual reference to one')
if not hasattr(self, 'name') and changes.get('name', None) is None:
changes['name'] = get_callable_name(func)
if isinstance(args, six.string_types) or not isinstance(args, Iterable):
raise TypeError('args must be a non-string iterable')
if isinstance(kwargs, six.string_types) or not isinstance(kwargs, Mapping):
raise TypeError('kwargs must be a dict-like object')
check_callable_args(func, args, kwargs)
approved['func'] = func
approved['func_ref'] = func_ref
approved['args'] = args
approved['kwargs'] = kwargs
if 'name' in changes:
value = changes.pop('name')
if not value or not isinstance(value, six.string_types):
raise TypeError("name must be a nonempty string")
approved['name'] = value
if 'misfire_grace_time' in changes:
value = changes.pop('misfire_grace_time')
if value is not None and (not isinstance(value, six.integer_types) or value <= 0):
raise TypeError('misfire_grace_time must be either None or a positive integer')
approved['misfire_grace_time'] = value
if 'coalesce' in changes:
value = bool(changes.pop('coalesce'))
approved['coalesce'] = value
if 'max_instances' in changes:
value = changes.pop('max_instances')
if not isinstance(value, six.integer_types) or value <= 0:
raise TypeError('max_instances must be a positive integer')
approved['max_instances'] = value
if 'trigger' in changes:
trigger = changes.pop('trigger')
if not isinstance(trigger, BaseTrigger):
raise TypeError('Expected a trigger instance, got %s instead' %
trigger.__class__.__name__)
approved['trigger'] = trigger
if 'executor' in changes:
value = changes.pop('executor')
if not isinstance(value, six.string_types):
raise TypeError('executor must be a string')
approved['executor'] = value
if 'next_run_time' in changes:
value = changes.pop('next_run_time')
approved['next_run_time'] = convert_to_datetime(value, self._scheduler.timezone,
'next_run_time')
if changes:
raise AttributeError('The following are not modifiable attributes of Job: %s' %
', '.join(changes))
for key, value in six.iteritems(approved):
setattr(self, key, value)
def __getstate__(self):
# Don't allow this Job to be serialized if the function reference could not be determined
if not self.func_ref:
raise ValueError(
'This Job cannot be serialized since the reference to its callable (%r) could not '
'be determined. Consider giving a textual reference (module:function name) '
'instead.' % (self.func,))
# Instance methods cannot survive serialization as-is, so store the "self" argument
# explicitly
if ismethod(self.func) and not isclass(self.func.__self__):
args = (self.func.__self__,) + tuple(self.args)
else:
args = self.args
return {
'version': 1,
'id': self.id,
'func': self.func_ref,
'trigger': self.trigger,
'executor': self.executor,
'args': args,
'kwargs': self.kwargs,
'name': self.name,
'misfire_grace_time': self.misfire_grace_time,
'coalesce': self.coalesce,
'max_instances': self.max_instances,
'next_run_time': self.next_run_time
}
def __setstate__(self, state):
if state.get('version', 1) > 1:
raise ValueError('Job has version %s, but only version 1 can be handled' %
state['version'])
self.id = state['id']
self.func_ref = state['func']
self.func = ref_to_obj(self.func_ref)
self.trigger = state['trigger']
self.executor = state['executor']
self.args = state['args']
self.kwargs = state['kwargs']
self.name = state['name']
self.misfire_grace_time = state['misfire_grace_time']
self.coalesce = state['coalesce']
self.max_instances = state['max_instances']
self.next_run_time = state['next_run_time']
def __eq__(self, other):
if isinstance(other, Job):
return self.id == other.id
return NotImplemented
def __repr__(self):
return '<Job (id=%s name=%s)>' % (repr_escape(self.id), repr_escape(self.name))
def __str__(self):
return repr_escape(self.__unicode__())
def __unicode__(self):
if hasattr(self, 'next_run_time'):
status = ('next run at: ' + datetime_repr(self.next_run_time) if
self.next_run_time else 'paused')
else:
status = 'pending'
return u'%s (trigger: %s, %s)' % (self.name, self.trigger, status)

View File

@@ -1,143 +0,0 @@
from abc import ABCMeta, abstractmethod
import logging
import six
class JobLookupError(KeyError):
"""Raised when the job store cannot find a job for update or removal."""
def __init__(self, job_id):
super(JobLookupError, self).__init__(u'No job by the id of %s was found' % job_id)
class ConflictingIdError(KeyError):
"""Raised when the uniqueness of job IDs is being violated."""
def __init__(self, job_id):
super(ConflictingIdError, self).__init__(
u'Job identifier (%s) conflicts with an existing job' % job_id)
class TransientJobError(ValueError):
"""
Raised when an attempt to add transient (with no func_ref) job to a persistent job store is
detected.
"""
def __init__(self, job_id):
super(TransientJobError, self).__init__(
u'Job (%s) cannot be added to this job store because a reference to the callable '
u'could not be determined.' % job_id)
class BaseJobStore(six.with_metaclass(ABCMeta)):
"""Abstract base class that defines the interface that every job store must implement."""
_scheduler = None
_alias = None
_logger = logging.getLogger('apscheduler.jobstores')
def start(self, scheduler, alias):
"""
Called by the scheduler when the scheduler is being started or when the job store is being
added to an already running scheduler.
:param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting
this job store
:param str|unicode alias: alias of this job store as it was assigned to the scheduler
"""
self._scheduler = scheduler
self._alias = alias
self._logger = logging.getLogger('apscheduler.jobstores.%s' % alias)
def shutdown(self):
"""Frees any resources still bound to this job store."""
def _fix_paused_jobs_sorting(self, jobs):
for i, job in enumerate(jobs):
if job.next_run_time is not None:
if i > 0:
paused_jobs = jobs[:i]
del jobs[:i]
jobs.extend(paused_jobs)
break
@abstractmethod
def lookup_job(self, job_id):
"""
Returns a specific job, or ``None`` if it isn't found..
The job store is responsible for setting the ``scheduler`` and ``jobstore`` attributes of
the returned job to point to the scheduler and itself, respectively.
:param str|unicode job_id: identifier of the job
:rtype: Job
"""
@abstractmethod
def get_due_jobs(self, now):
"""
Returns the list of jobs that have ``next_run_time`` earlier or equal to ``now``.
The returned jobs must be sorted by next run time (ascending).
:param datetime.datetime now: the current (timezone aware) datetime
:rtype: list[Job]
"""
@abstractmethod
def get_next_run_time(self):
"""
Returns the earliest run time of all the jobs stored in this job store, or ``None`` if
there are no active jobs.
:rtype: datetime.datetime
"""
@abstractmethod
def get_all_jobs(self):
"""
Returns a list of all jobs in this job store.
The returned jobs should be sorted by next run time (ascending).
Paused jobs (next_run_time == None) should be sorted last.
The job store is responsible for setting the ``scheduler`` and ``jobstore`` attributes of
the returned jobs to point to the scheduler and itself, respectively.
:rtype: list[Job]
"""
@abstractmethod
def add_job(self, job):
"""
Adds the given job to this store.
:param Job job: the job to add
:raises ConflictingIdError: if there is another job in this store with the same ID
"""
@abstractmethod
def update_job(self, job):
"""
Replaces the job in the store with the given newer version.
:param Job job: the job to update
:raises JobLookupError: if the job does not exist
"""
@abstractmethod
def remove_job(self, job_id):
"""
Removes the given job from this store.
:param str|unicode job_id: identifier of the job
:raises JobLookupError: if the job does not exist
"""
@abstractmethod
def remove_all_jobs(self):
"""Removes all jobs from this store."""
def __repr__(self):
return '<%s>' % self.__class__.__name__

View File

@@ -1,108 +0,0 @@
from __future__ import absolute_import
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
from apscheduler.util import datetime_to_utc_timestamp
class MemoryJobStore(BaseJobStore):
"""
Stores jobs in an array in RAM. Provides no persistence support.
Plugin alias: ``memory``
"""
def __init__(self):
super(MemoryJobStore, self).__init__()
# list of (job, timestamp), sorted by next_run_time and job id (ascending)
self._jobs = []
self._jobs_index = {} # id -> (job, timestamp) lookup table
def lookup_job(self, job_id):
return self._jobs_index.get(job_id, (None, None))[0]
def get_due_jobs(self, now):
now_timestamp = datetime_to_utc_timestamp(now)
pending = []
for job, timestamp in self._jobs:
if timestamp is None or timestamp > now_timestamp:
break
pending.append(job)
return pending
def get_next_run_time(self):
return self._jobs[0][0].next_run_time if self._jobs else None
def get_all_jobs(self):
return [j[0] for j in self._jobs]
def add_job(self, job):
if job.id in self._jobs_index:
raise ConflictingIdError(job.id)
timestamp = datetime_to_utc_timestamp(job.next_run_time)
index = self._get_job_index(timestamp, job.id)
self._jobs.insert(index, (job, timestamp))
self._jobs_index[job.id] = (job, timestamp)
def update_job(self, job):
old_job, old_timestamp = self._jobs_index.get(job.id, (None, None))
if old_job is None:
raise JobLookupError(job.id)
# If the next run time has not changed, simply replace the job in its present index.
# Otherwise, reinsert the job to the list to preserve the ordering.
old_index = self._get_job_index(old_timestamp, old_job.id)
new_timestamp = datetime_to_utc_timestamp(job.next_run_time)
if old_timestamp == new_timestamp:
self._jobs[old_index] = (job, new_timestamp)
else:
del self._jobs[old_index]
new_index = self._get_job_index(new_timestamp, job.id)
self._jobs.insert(new_index, (job, new_timestamp))
self._jobs_index[old_job.id] = (job, new_timestamp)
def remove_job(self, job_id):
job, timestamp = self._jobs_index.get(job_id, (None, None))
if job is None:
raise JobLookupError(job_id)
index = self._get_job_index(timestamp, job_id)
del self._jobs[index]
del self._jobs_index[job.id]
def remove_all_jobs(self):
self._jobs = []
self._jobs_index = {}
def shutdown(self):
self.remove_all_jobs()
def _get_job_index(self, timestamp, job_id):
"""
Returns the index of the given job, or if it's not found, the index where the job should be
inserted based on the given timestamp.
:type timestamp: int
:type job_id: str
"""
lo, hi = 0, len(self._jobs)
timestamp = float('inf') if timestamp is None else timestamp
while lo < hi:
mid = (lo + hi) // 2
mid_job, mid_timestamp = self._jobs[mid]
mid_timestamp = float('inf') if mid_timestamp is None else mid_timestamp
if mid_timestamp > timestamp:
hi = mid
elif mid_timestamp < timestamp:
lo = mid + 1
elif mid_job.id > job_id:
hi = mid
elif mid_job.id < job_id:
lo = mid + 1
else:
return mid
return lo

View File

@@ -1,141 +0,0 @@
from __future__ import absolute_import
import warnings
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime
from apscheduler.job import Job
try:
import cPickle as pickle
except ImportError: # pragma: nocover
import pickle
try:
from bson.binary import Binary
from pymongo.errors import DuplicateKeyError
from pymongo import MongoClient, ASCENDING
except ImportError: # pragma: nocover
raise ImportError('MongoDBJobStore requires PyMongo installed')
class MongoDBJobStore(BaseJobStore):
"""
Stores jobs in a MongoDB database. Any leftover keyword arguments are directly passed to
pymongo's `MongoClient
<http://api.mongodb.org/python/current/api/pymongo/mongo_client.html#pymongo.mongo_client.MongoClient>`_.
Plugin alias: ``mongodb``
:param str database: database to store jobs in
:param str collection: collection to store jobs in
:param client: a :class:`~pymongo.mongo_client.MongoClient` instance to use instead of
providing connection arguments
:param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the
highest available
"""
def __init__(self, database='apscheduler', collection='jobs', client=None,
pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
super(MongoDBJobStore, self).__init__()
self.pickle_protocol = pickle_protocol
if not database:
raise ValueError('The "database" parameter must not be empty')
if not collection:
raise ValueError('The "collection" parameter must not be empty')
if client:
self.client = maybe_ref(client)
else:
connect_args.setdefault('w', 1)
self.client = MongoClient(**connect_args)
self.collection = self.client[database][collection]
def start(self, scheduler, alias):
super(MongoDBJobStore, self).start(scheduler, alias)
self.collection.ensure_index('next_run_time', sparse=True)
@property
def connection(self):
warnings.warn('The "connection" member is deprecated -- use "client" instead',
DeprecationWarning)
return self.client
def lookup_job(self, job_id):
document = self.collection.find_one(job_id, ['job_state'])
return self._reconstitute_job(document['job_state']) if document else None
def get_due_jobs(self, now):
timestamp = datetime_to_utc_timestamp(now)
return self._get_jobs({'next_run_time': {'$lte': timestamp}})
def get_next_run_time(self):
document = self.collection.find_one({'next_run_time': {'$ne': None}},
projection=['next_run_time'],
sort=[('next_run_time', ASCENDING)])
return utc_timestamp_to_datetime(document['next_run_time']) if document else None
def get_all_jobs(self):
jobs = self._get_jobs({})
self._fix_paused_jobs_sorting(jobs)
return jobs
def add_job(self, job):
try:
self.collection.insert({
'_id': job.id,
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol))
})
except DuplicateKeyError:
raise ConflictingIdError(job.id)
def update_job(self, job):
changes = {
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol))
}
result = self.collection.update({'_id': job.id}, {'$set': changes})
if result and result['n'] == 0:
raise JobLookupError(job.id)
def remove_job(self, job_id):
result = self.collection.remove(job_id)
if result and result['n'] == 0:
raise JobLookupError(job_id)
def remove_all_jobs(self):
self.collection.remove()
def shutdown(self):
self.client.close()
def _reconstitute_job(self, job_state):
job_state = pickle.loads(job_state)
job = Job.__new__(Job)
job.__setstate__(job_state)
job._scheduler = self._scheduler
job._jobstore_alias = self._alias
return job
def _get_jobs(self, conditions):
jobs = []
failed_job_ids = []
for document in self.collection.find(conditions, ['_id', 'job_state'],
sort=[('next_run_time', ASCENDING)]):
try:
jobs.append(self._reconstitute_job(document['job_state']))
except BaseException:
self._logger.exception('Unable to restore job "%s" -- removing it',
document['_id'])
failed_job_ids.append(document['_id'])
# Remove all the jobs we failed to restore
if failed_job_ids:
self.collection.remove({'_id': {'$in': failed_job_ids}})
return jobs
def __repr__(self):
return '<%s (client=%s)>' % (self.__class__.__name__, self.client)

View File

@@ -1,150 +0,0 @@
from __future__ import absolute_import
from datetime import datetime
from pytz import utc
import six
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
from apscheduler.util import datetime_to_utc_timestamp, utc_timestamp_to_datetime
from apscheduler.job import Job
try:
import cPickle as pickle
except ImportError: # pragma: nocover
import pickle
try:
from redis import Redis
except ImportError: # pragma: nocover
raise ImportError('RedisJobStore requires redis installed')
class RedisJobStore(BaseJobStore):
"""
Stores jobs in a Redis database. Any leftover keyword arguments are directly passed to redis's
:class:`~redis.StrictRedis`.
Plugin alias: ``redis``
:param int db: the database number to store jobs in
:param str jobs_key: key to store jobs in
:param str run_times_key: key to store the jobs' run times in
:param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the
highest available
"""
def __init__(self, db=0, jobs_key='apscheduler.jobs', run_times_key='apscheduler.run_times',
pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
super(RedisJobStore, self).__init__()
if db is None:
raise ValueError('The "db" parameter must not be empty')
if not jobs_key:
raise ValueError('The "jobs_key" parameter must not be empty')
if not run_times_key:
raise ValueError('The "run_times_key" parameter must not be empty')
self.pickle_protocol = pickle_protocol
self.jobs_key = jobs_key
self.run_times_key = run_times_key
self.redis = Redis(db=int(db), **connect_args)
def lookup_job(self, job_id):
job_state = self.redis.hget(self.jobs_key, job_id)
return self._reconstitute_job(job_state) if job_state else None
def get_due_jobs(self, now):
timestamp = datetime_to_utc_timestamp(now)
job_ids = self.redis.zrangebyscore(self.run_times_key, 0, timestamp)
if job_ids:
job_states = self.redis.hmget(self.jobs_key, *job_ids)
return self._reconstitute_jobs(six.moves.zip(job_ids, job_states))
return []
def get_next_run_time(self):
next_run_time = self.redis.zrange(self.run_times_key, 0, 0, withscores=True)
if next_run_time:
return utc_timestamp_to_datetime(next_run_time[0][1])
def get_all_jobs(self):
job_states = self.redis.hgetall(self.jobs_key)
jobs = self._reconstitute_jobs(six.iteritems(job_states))
paused_sort_key = datetime(9999, 12, 31, tzinfo=utc)
return sorted(jobs, key=lambda job: job.next_run_time or paused_sort_key)
def add_job(self, job):
if self.redis.hexists(self.jobs_key, job.id):
raise ConflictingIdError(job.id)
with self.redis.pipeline() as pipe:
pipe.multi()
pipe.hset(self.jobs_key, job.id, pickle.dumps(job.__getstate__(),
self.pickle_protocol))
if job.next_run_time:
pipe.zadd(self.run_times_key,
{job.id: datetime_to_utc_timestamp(job.next_run_time)})
pipe.execute()
def update_job(self, job):
if not self.redis.hexists(self.jobs_key, job.id):
raise JobLookupError(job.id)
with self.redis.pipeline() as pipe:
pipe.hset(self.jobs_key, job.id, pickle.dumps(job.__getstate__(),
self.pickle_protocol))
if job.next_run_time:
pipe.zadd(self.run_times_key,
{job.id: datetime_to_utc_timestamp(job.next_run_time)})
else:
pipe.zrem(self.run_times_key, job.id)
pipe.execute()
def remove_job(self, job_id):
if not self.redis.hexists(self.jobs_key, job_id):
raise JobLookupError(job_id)
with self.redis.pipeline() as pipe:
pipe.hdel(self.jobs_key, job_id)
pipe.zrem(self.run_times_key, job_id)
pipe.execute()
def remove_all_jobs(self):
with self.redis.pipeline() as pipe:
pipe.delete(self.jobs_key)
pipe.delete(self.run_times_key)
pipe.execute()
def shutdown(self):
self.redis.connection_pool.disconnect()
def _reconstitute_job(self, job_state):
job_state = pickle.loads(job_state)
job = Job.__new__(Job)
job.__setstate__(job_state)
job._scheduler = self._scheduler
job._jobstore_alias = self._alias
return job
def _reconstitute_jobs(self, job_states):
jobs = []
failed_job_ids = []
for job_id, job_state in job_states:
try:
jobs.append(self._reconstitute_job(job_state))
except BaseException:
self._logger.exception('Unable to restore job "%s" -- removing it', job_id)
failed_job_ids.append(job_id)
# Remove all the jobs we failed to restore
if failed_job_ids:
with self.redis.pipeline() as pipe:
pipe.hdel(self.jobs_key, *failed_job_ids)
pipe.zrem(self.run_times_key, *failed_job_ids)
pipe.execute()
return jobs
def __repr__(self):
return '<%s>' % self.__class__.__name__

View File

@@ -1,155 +0,0 @@
from __future__ import absolute_import
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime
from apscheduler.job import Job
try:
import cPickle as pickle
except ImportError: # pragma: nocover
import pickle
try:
from rethinkdb import RethinkDB
except ImportError: # pragma: nocover
raise ImportError('RethinkDBJobStore requires rethinkdb installed')
class RethinkDBJobStore(BaseJobStore):
"""
Stores jobs in a RethinkDB database. Any leftover keyword arguments are directly passed to
rethinkdb's `RethinkdbClient <http://www.rethinkdb.com/api/#connect>`_.
Plugin alias: ``rethinkdb``
:param str database: database to store jobs in
:param str collection: collection to store jobs in
:param client: a :class:`rethinkdb.net.Connection` instance to use instead of providing
connection arguments
:param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the
highest available
"""
def __init__(self, database='apscheduler', table='jobs', client=None,
pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
super(RethinkDBJobStore, self).__init__()
if not database:
raise ValueError('The "database" parameter must not be empty')
if not table:
raise ValueError('The "table" parameter must not be empty')
self.database = database
self.table_name = table
self.table = None
self.client = client
self.pickle_protocol = pickle_protocol
self.connect_args = connect_args
self.r = RethinkDB()
self.conn = None
def start(self, scheduler, alias):
super(RethinkDBJobStore, self).start(scheduler, alias)
if self.client:
self.conn = maybe_ref(self.client)
else:
self.conn = self.r.connect(db=self.database, **self.connect_args)
if self.database not in self.r.db_list().run(self.conn):
self.r.db_create(self.database).run(self.conn)
if self.table_name not in self.r.table_list().run(self.conn):
self.r.table_create(self.table_name).run(self.conn)
if 'next_run_time' not in self.r.table(self.table_name).index_list().run(self.conn):
self.r.table(self.table_name).index_create('next_run_time').run(self.conn)
self.table = self.r.db(self.database).table(self.table_name)
def lookup_job(self, job_id):
results = list(self.table.get_all(job_id).pluck('job_state').run(self.conn))
return self._reconstitute_job(results[0]['job_state']) if results else None
def get_due_jobs(self, now):
return self._get_jobs(self.r.row['next_run_time'] <= datetime_to_utc_timestamp(now))
def get_next_run_time(self):
results = list(
self.table
.filter(self.r.row['next_run_time'] != None) # noqa
.order_by(self.r.asc('next_run_time'))
.map(lambda x: x['next_run_time'])
.limit(1)
.run(self.conn)
)
return utc_timestamp_to_datetime(results[0]) if results else None
def get_all_jobs(self):
jobs = self._get_jobs()
self._fix_paused_jobs_sorting(jobs)
return jobs
def add_job(self, job):
job_dict = {
'id': job.id,
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
'job_state': self.r.binary(pickle.dumps(job.__getstate__(), self.pickle_protocol))
}
results = self.table.insert(job_dict).run(self.conn)
if results['errors'] > 0:
raise ConflictingIdError(job.id)
def update_job(self, job):
changes = {
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
'job_state': self.r.binary(pickle.dumps(job.__getstate__(), self.pickle_protocol))
}
results = self.table.get_all(job.id).update(changes).run(self.conn)
skipped = False in map(lambda x: results[x] == 0, results.keys())
if results['skipped'] > 0 or results['errors'] > 0 or not skipped:
raise JobLookupError(job.id)
def remove_job(self, job_id):
results = self.table.get_all(job_id).delete().run(self.conn)
if results['deleted'] + results['skipped'] != 1:
raise JobLookupError(job_id)
def remove_all_jobs(self):
self.table.delete().run(self.conn)
def shutdown(self):
self.conn.close()
def _reconstitute_job(self, job_state):
job_state = pickle.loads(job_state)
job = Job.__new__(Job)
job.__setstate__(job_state)
job._scheduler = self._scheduler
job._jobstore_alias = self._alias
return job
def _get_jobs(self, predicate=None):
jobs = []
failed_job_ids = []
query = (self.table.filter(self.r.row['next_run_time'] != None).filter(predicate) # noqa
if predicate else self.table)
query = query.order_by('next_run_time', 'id').pluck('id', 'job_state')
for document in query.run(self.conn):
try:
jobs.append(self._reconstitute_job(document['job_state']))
except Exception:
self._logger.exception('Unable to restore job "%s" -- removing it', document['id'])
failed_job_ids.append(document['id'])
# Remove all the jobs we failed to restore
if failed_job_ids:
self.r.expr(failed_job_ids).for_each(
lambda job_id: self.table.get_all(job_id).delete()).run(self.conn)
return jobs
def __repr__(self):
connection = self.conn
return '<%s (connection=%s)>' % (self.__class__.__name__, connection)

View File

@@ -1,154 +0,0 @@
from __future__ import absolute_import
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime
from apscheduler.job import Job
try:
import cPickle as pickle
except ImportError: # pragma: nocover
import pickle
try:
from sqlalchemy import (
create_engine, Table, Column, MetaData, Unicode, Float, LargeBinary, select)
from sqlalchemy.exc import IntegrityError
from sqlalchemy.sql.expression import null
except ImportError: # pragma: nocover
raise ImportError('SQLAlchemyJobStore requires SQLAlchemy installed')
class SQLAlchemyJobStore(BaseJobStore):
"""
Stores jobs in a database table using SQLAlchemy.
The table will be created if it doesn't exist in the database.
Plugin alias: ``sqlalchemy``
:param str url: connection string (see
:ref:`SQLAlchemy documentation <sqlalchemy:database_urls>` on this)
:param engine: an SQLAlchemy :class:`~sqlalchemy.engine.Engine` to use instead of creating a
new one based on ``url``
:param str tablename: name of the table to store jobs in
:param metadata: a :class:`~sqlalchemy.schema.MetaData` instance to use instead of creating a
new one
:param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the
highest available
:param str tableschema: name of the (existing) schema in the target database where the table
should be
:param dict engine_options: keyword arguments to :func:`~sqlalchemy.create_engine`
(ignored if ``engine`` is given)
"""
def __init__(self, url=None, engine=None, tablename='apscheduler_jobs', metadata=None,
pickle_protocol=pickle.HIGHEST_PROTOCOL, tableschema=None, engine_options=None):
super(SQLAlchemyJobStore, self).__init__()
self.pickle_protocol = pickle_protocol
metadata = maybe_ref(metadata) or MetaData()
if engine:
self.engine = maybe_ref(engine)
elif url:
self.engine = create_engine(url, **(engine_options or {}))
else:
raise ValueError('Need either "engine" or "url" defined')
# 191 = max key length in MySQL for InnoDB/utf8mb4 tables,
# 25 = precision that translates to an 8-byte float
self.jobs_t = Table(
tablename, metadata,
Column('id', Unicode(191, _warn_on_bytestring=False), primary_key=True),
Column('next_run_time', Float(25), index=True),
Column('job_state', LargeBinary, nullable=False),
schema=tableschema
)
def start(self, scheduler, alias):
super(SQLAlchemyJobStore, self).start(scheduler, alias)
self.jobs_t.create(self.engine, True)
def lookup_job(self, job_id):
selectable = select([self.jobs_t.c.job_state]).where(self.jobs_t.c.id == job_id)
job_state = self.engine.execute(selectable).scalar()
return self._reconstitute_job(job_state) if job_state else None
def get_due_jobs(self, now):
timestamp = datetime_to_utc_timestamp(now)
return self._get_jobs(self.jobs_t.c.next_run_time <= timestamp)
def get_next_run_time(self):
selectable = select([self.jobs_t.c.next_run_time]).\
where(self.jobs_t.c.next_run_time != null()).\
order_by(self.jobs_t.c.next_run_time).limit(1)
next_run_time = self.engine.execute(selectable).scalar()
return utc_timestamp_to_datetime(next_run_time)
def get_all_jobs(self):
jobs = self._get_jobs()
self._fix_paused_jobs_sorting(jobs)
return jobs
def add_job(self, job):
insert = self.jobs_t.insert().values(**{
'id': job.id,
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
'job_state': pickle.dumps(job.__getstate__(), self.pickle_protocol)
})
try:
self.engine.execute(insert)
except IntegrityError:
raise ConflictingIdError(job.id)
def update_job(self, job):
update = self.jobs_t.update().values(**{
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
'job_state': pickle.dumps(job.__getstate__(), self.pickle_protocol)
}).where(self.jobs_t.c.id == job.id)
result = self.engine.execute(update)
if result.rowcount == 0:
raise JobLookupError(job.id)
def remove_job(self, job_id):
delete = self.jobs_t.delete().where(self.jobs_t.c.id == job_id)
result = self.engine.execute(delete)
if result.rowcount == 0:
raise JobLookupError(job_id)
def remove_all_jobs(self):
delete = self.jobs_t.delete()
self.engine.execute(delete)
def shutdown(self):
self.engine.dispose()
def _reconstitute_job(self, job_state):
job_state = pickle.loads(job_state)
job_state['jobstore'] = self
job = Job.__new__(Job)
job.__setstate__(job_state)
job._scheduler = self._scheduler
job._jobstore_alias = self._alias
return job
def _get_jobs(self, *conditions):
jobs = []
selectable = select([self.jobs_t.c.id, self.jobs_t.c.job_state]).\
order_by(self.jobs_t.c.next_run_time)
selectable = selectable.where(*conditions) if conditions else selectable
failed_job_ids = set()
for row in self.engine.execute(selectable):
try:
jobs.append(self._reconstitute_job(row.job_state))
except BaseException:
self._logger.exception('Unable to restore job "%s" -- removing it', row.id)
failed_job_ids.add(row.id)
# Remove all the jobs we failed to restore
if failed_job_ids:
delete = self.jobs_t.delete().where(self.jobs_t.c.id.in_(failed_job_ids))
self.engine.execute(delete)
return jobs
def __repr__(self):
return '<%s (url=%s)>' % (self.__class__.__name__, self.engine.url)

View File

@@ -1,179 +0,0 @@
from __future__ import absolute_import
import os
from datetime import datetime
from pytz import utc
from kazoo.exceptions import NoNodeError, NodeExistsError
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime
from apscheduler.job import Job
try:
import cPickle as pickle
except ImportError: # pragma: nocover
import pickle
try:
from kazoo.client import KazooClient
except ImportError: # pragma: nocover
raise ImportError('ZooKeeperJobStore requires Kazoo installed')
class ZooKeeperJobStore(BaseJobStore):
"""
Stores jobs in a ZooKeeper tree. Any leftover keyword arguments are directly passed to
kazoo's `KazooClient
<http://kazoo.readthedocs.io/en/latest/api/client.html>`_.
Plugin alias: ``zookeeper``
:param str path: path to store jobs in
:param client: a :class:`~kazoo.client.KazooClient` instance to use instead of
providing connection arguments
:param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the
highest available
"""
def __init__(self, path='/apscheduler', client=None, close_connection_on_exit=False,
pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
super(ZooKeeperJobStore, self).__init__()
self.pickle_protocol = pickle_protocol
self.close_connection_on_exit = close_connection_on_exit
if not path:
raise ValueError('The "path" parameter must not be empty')
self.path = path
if client:
self.client = maybe_ref(client)
else:
self.client = KazooClient(**connect_args)
self._ensured_path = False
def _ensure_paths(self):
if not self._ensured_path:
self.client.ensure_path(self.path)
self._ensured_path = True
def start(self, scheduler, alias):
super(ZooKeeperJobStore, self).start(scheduler, alias)
if not self.client.connected:
self.client.start()
def lookup_job(self, job_id):
self._ensure_paths()
node_path = os.path.join(self.path, job_id)
try:
content, _ = self.client.get(node_path)
doc = pickle.loads(content)
job = self._reconstitute_job(doc['job_state'])
return job
except BaseException:
return None
def get_due_jobs(self, now):
timestamp = datetime_to_utc_timestamp(now)
jobs = [job_def['job'] for job_def in self._get_jobs()
if job_def['next_run_time'] is not None and job_def['next_run_time'] <= timestamp]
return jobs
def get_next_run_time(self):
next_runs = [job_def['next_run_time'] for job_def in self._get_jobs()
if job_def['next_run_time'] is not None]
return utc_timestamp_to_datetime(min(next_runs)) if len(next_runs) > 0 else None
def get_all_jobs(self):
jobs = [job_def['job'] for job_def in self._get_jobs()]
self._fix_paused_jobs_sorting(jobs)
return jobs
def add_job(self, job):
self._ensure_paths()
node_path = os.path.join(self.path, str(job.id))
value = {
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
'job_state': job.__getstate__()
}
data = pickle.dumps(value, self.pickle_protocol)
try:
self.client.create(node_path, value=data)
except NodeExistsError:
raise ConflictingIdError(job.id)
def update_job(self, job):
self._ensure_paths()
node_path = os.path.join(self.path, str(job.id))
changes = {
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
'job_state': job.__getstate__()
}
data = pickle.dumps(changes, self.pickle_protocol)
try:
self.client.set(node_path, value=data)
except NoNodeError:
raise JobLookupError(job.id)
def remove_job(self, job_id):
self._ensure_paths()
node_path = os.path.join(self.path, str(job_id))
try:
self.client.delete(node_path)
except NoNodeError:
raise JobLookupError(job_id)
def remove_all_jobs(self):
try:
self.client.delete(self.path, recursive=True)
except NoNodeError:
pass
self._ensured_path = False
def shutdown(self):
if self.close_connection_on_exit:
self.client.stop()
self.client.close()
def _reconstitute_job(self, job_state):
job_state = job_state
job = Job.__new__(Job)
job.__setstate__(job_state)
job._scheduler = self._scheduler
job._jobstore_alias = self._alias
return job
def _get_jobs(self):
self._ensure_paths()
jobs = []
failed_job_ids = []
all_ids = self.client.get_children(self.path)
for node_name in all_ids:
try:
node_path = os.path.join(self.path, node_name)
content, _ = self.client.get(node_path)
doc = pickle.loads(content)
job_def = {
'job_id': node_name,
'next_run_time': doc['next_run_time'] if doc['next_run_time'] else None,
'job_state': doc['job_state'],
'job': self._reconstitute_job(doc['job_state']),
'creation_time': _.ctime
}
jobs.append(job_def)
except BaseException:
self._logger.exception('Unable to restore job "%s" -- removing it' % node_name)
failed_job_ids.append(node_name)
# Remove all the jobs we failed to restore
if failed_job_ids:
for failed_id in failed_job_ids:
self.remove_job(failed_id)
paused_sort_key = datetime(9999, 12, 31, tzinfo=utc)
return sorted(jobs, key=lambda job_def: (job_def['job'].next_run_time or paused_sort_key,
job_def['creation_time']))
def __repr__(self):
self._logger.exception('<%s (client=%s)>' % (self.__class__.__name__, self.client))
return '<%s (client=%s)>' % (self.__class__.__name__, self.client)

View File

@@ -1,12 +0,0 @@
class SchedulerAlreadyRunningError(Exception):
"""Raised when attempting to start or configure the scheduler when it's already running."""
def __str__(self):
return 'Scheduler is already running'
class SchedulerNotRunningError(Exception):
"""Raised when attempting to shutdown the scheduler when it's not running."""
def __str__(self):
return 'Scheduler is not running'

View File

@@ -1,68 +0,0 @@
from __future__ import absolute_import
from functools import wraps, partial
from apscheduler.schedulers.base import BaseScheduler
from apscheduler.util import maybe_ref
try:
import asyncio
except ImportError: # pragma: nocover
try:
import trollius as asyncio
except ImportError:
raise ImportError(
'AsyncIOScheduler requires either Python 3.4 or the asyncio package installed')
def run_in_event_loop(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
wrapped = partial(func, self, *args, **kwargs)
self._eventloop.call_soon_threadsafe(wrapped)
return wrapper
class AsyncIOScheduler(BaseScheduler):
"""
A scheduler that runs on an asyncio (:pep:`3156`) event loop.
The default executor can run jobs based on native coroutines (``async def``).
Extra options:
============== =============================================================
``event_loop`` AsyncIO event loop to use (defaults to the global event loop)
============== =============================================================
"""
_eventloop = None
_timeout = None
@run_in_event_loop
def shutdown(self, wait=True):
super(AsyncIOScheduler, self).shutdown(wait)
self._stop_timer()
def _configure(self, config):
self._eventloop = maybe_ref(config.pop('event_loop', None)) or asyncio.get_event_loop()
super(AsyncIOScheduler, self)._configure(config)
def _start_timer(self, wait_seconds):
self._stop_timer()
if wait_seconds is not None:
self._timeout = self._eventloop.call_later(wait_seconds, self.wakeup)
def _stop_timer(self):
if self._timeout:
self._timeout.cancel()
del self._timeout
@run_in_event_loop
def wakeup(self):
self._stop_timer()
wait_seconds = self._process_jobs()
self._start_timer(wait_seconds)
def _create_default_executor(self):
from apscheduler.executors.asyncio import AsyncIOExecutor
return AsyncIOExecutor()

View File

@@ -1,41 +0,0 @@
from __future__ import absolute_import
from threading import Thread, Event
from apscheduler.schedulers.base import BaseScheduler
from apscheduler.schedulers.blocking import BlockingScheduler
from apscheduler.util import asbool
class BackgroundScheduler(BlockingScheduler):
"""
A scheduler that runs in the background using a separate thread
(:meth:`~apscheduler.schedulers.base.BaseScheduler.start` will return immediately).
Extra options:
========== =============================================================================
``daemon`` Set the ``daemon`` option in the background thread (defaults to ``True``, see
`the documentation
<https://docs.python.org/3.4/library/threading.html#thread-objects>`_
for further details)
========== =============================================================================
"""
_thread = None
def _configure(self, config):
self._daemon = asbool(config.pop('daemon', True))
super(BackgroundScheduler, self)._configure(config)
def start(self, *args, **kwargs):
self._event = Event()
BaseScheduler.start(self, *args, **kwargs)
self._thread = Thread(target=self._main_loop, name='APScheduler')
self._thread.daemon = self._daemon
self._thread.start()
def shutdown(self, *args, **kwargs):
super(BackgroundScheduler, self).shutdown(*args, **kwargs)
self._thread.join()
del self._thread

File diff suppressed because it is too large Load Diff

View File

@@ -1,33 +0,0 @@
from __future__ import absolute_import
from threading import Event
from apscheduler.schedulers.base import BaseScheduler, STATE_STOPPED
from apscheduler.util import TIMEOUT_MAX
class BlockingScheduler(BaseScheduler):
"""
A scheduler that runs in the foreground
(:meth:`~apscheduler.schedulers.base.BaseScheduler.start` will block).
"""
_event = None
def start(self, *args, **kwargs):
self._event = Event()
super(BlockingScheduler, self).start(*args, **kwargs)
self._main_loop()
def shutdown(self, wait=True):
super(BlockingScheduler, self).shutdown(wait)
self._event.set()
def _main_loop(self):
wait_seconds = TIMEOUT_MAX
while self.state != STATE_STOPPED:
self._event.wait(wait_seconds)
self._event.clear()
wait_seconds = self._process_jobs()
def wakeup(self):
self._event.set()

View File

@@ -1,35 +0,0 @@
from __future__ import absolute_import
from apscheduler.schedulers.blocking import BlockingScheduler
from apscheduler.schedulers.base import BaseScheduler
try:
from gevent.event import Event
from gevent.lock import RLock
import gevent
except ImportError: # pragma: nocover
raise ImportError('GeventScheduler requires gevent installed')
class GeventScheduler(BlockingScheduler):
"""A scheduler that runs as a Gevent greenlet."""
_greenlet = None
def start(self, *args, **kwargs):
self._event = Event()
BaseScheduler.start(self, *args, **kwargs)
self._greenlet = gevent.spawn(self._main_loop)
return self._greenlet
def shutdown(self, *args, **kwargs):
super(GeventScheduler, self).shutdown(*args, **kwargs)
self._greenlet.join()
del self._greenlet
def _create_lock(self):
return RLock()
def _create_default_executor(self):
from apscheduler.executors.gevent import GeventExecutor
return GeventExecutor()

View File

@@ -1,43 +0,0 @@
from __future__ import absolute_import
from apscheduler.schedulers.base import BaseScheduler
try:
from PyQt5.QtCore import QObject, QTimer
except (ImportError, RuntimeError): # pragma: nocover
try:
from PyQt4.QtCore import QObject, QTimer
except ImportError:
try:
from PySide.QtCore import QObject, QTimer # noqa
except ImportError:
raise ImportError('QtScheduler requires either PyQt5, PyQt4 or PySide installed')
class QtScheduler(BaseScheduler):
"""A scheduler that runs in a Qt event loop."""
_timer = None
def shutdown(self, *args, **kwargs):
super(QtScheduler, self).shutdown(*args, **kwargs)
self._stop_timer()
def _start_timer(self, wait_seconds):
self._stop_timer()
if wait_seconds is not None:
wait_time = min(wait_seconds * 1000, 2147483647)
self._timer = QTimer.singleShot(wait_time, self._process_jobs)
def _stop_timer(self):
if self._timer:
if self._timer.isActive():
self._timer.stop()
del self._timer
def wakeup(self):
self._start_timer(0)
def _process_jobs(self):
wait_seconds = super(QtScheduler, self)._process_jobs()
self._start_timer(wait_seconds)

View File

@@ -1,63 +0,0 @@
from __future__ import absolute_import
from datetime import timedelta
from functools import wraps
from apscheduler.schedulers.base import BaseScheduler
from apscheduler.util import maybe_ref
try:
from tornado.ioloop import IOLoop
except ImportError: # pragma: nocover
raise ImportError('TornadoScheduler requires tornado installed')
def run_in_ioloop(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
self._ioloop.add_callback(func, self, *args, **kwargs)
return wrapper
class TornadoScheduler(BaseScheduler):
"""
A scheduler that runs on a Tornado IOLoop.
The default executor can run jobs based on native coroutines (``async def``).
=========== ===============================================================
``io_loop`` Tornado IOLoop instance to use (defaults to the global IO loop)
=========== ===============================================================
"""
_ioloop = None
_timeout = None
@run_in_ioloop
def shutdown(self, wait=True):
super(TornadoScheduler, self).shutdown(wait)
self._stop_timer()
def _configure(self, config):
self._ioloop = maybe_ref(config.pop('io_loop', None)) or IOLoop.current()
super(TornadoScheduler, self)._configure(config)
def _start_timer(self, wait_seconds):
self._stop_timer()
if wait_seconds is not None:
self._timeout = self._ioloop.add_timeout(timedelta(seconds=wait_seconds), self.wakeup)
def _stop_timer(self):
if self._timeout:
self._ioloop.remove_timeout(self._timeout)
del self._timeout
def _create_default_executor(self):
from apscheduler.executors.tornado import TornadoExecutor
return TornadoExecutor()
@run_in_ioloop
def wakeup(self):
self._stop_timer()
wait_seconds = self._process_jobs()
self._start_timer(wait_seconds)

View File

@@ -1,62 +0,0 @@
from __future__ import absolute_import
from functools import wraps
from apscheduler.schedulers.base import BaseScheduler
from apscheduler.util import maybe_ref
try:
from twisted.internet import reactor as default_reactor
except ImportError: # pragma: nocover
raise ImportError('TwistedScheduler requires Twisted installed')
def run_in_reactor(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
self._reactor.callFromThread(func, self, *args, **kwargs)
return wrapper
class TwistedScheduler(BaseScheduler):
"""
A scheduler that runs on a Twisted reactor.
Extra options:
=========== ========================================================
``reactor`` Reactor instance to use (defaults to the global reactor)
=========== ========================================================
"""
_reactor = None
_delayedcall = None
def _configure(self, config):
self._reactor = maybe_ref(config.pop('reactor', default_reactor))
super(TwistedScheduler, self)._configure(config)
@run_in_reactor
def shutdown(self, wait=True):
super(TwistedScheduler, self).shutdown(wait)
self._stop_timer()
def _start_timer(self, wait_seconds):
self._stop_timer()
if wait_seconds is not None:
self._delayedcall = self._reactor.callLater(wait_seconds, self.wakeup)
def _stop_timer(self):
if self._delayedcall and self._delayedcall.active():
self._delayedcall.cancel()
del self._delayedcall
@run_in_reactor
def wakeup(self):
self._stop_timer()
wait_seconds = self._process_jobs()
self._start_timer(wait_seconds)
def _create_default_executor(self):
from apscheduler.executors.twisted import TwistedExecutor
return TwistedExecutor()

View File

@@ -1,48 +0,0 @@
from abc import ABCMeta, abstractmethod
from datetime import timedelta
import random
import six
class BaseTrigger(six.with_metaclass(ABCMeta)):
"""Abstract base class that defines the interface that every trigger must implement."""
__slots__ = ()
@abstractmethod
def get_next_fire_time(self, previous_fire_time, now):
"""
Returns the next datetime to fire on, If no such datetime can be calculated, returns
``None``.
:param datetime.datetime previous_fire_time: the previous time the trigger was fired
:param datetime.datetime now: current datetime
"""
def _apply_jitter(self, next_fire_time, jitter, now):
"""
Randomize ``next_fire_time`` by adding or subtracting a random value (the jitter). If the
resulting datetime is in the past, returns the initial ``next_fire_time`` without jitter.
``next_fire_time - jitter <= result <= next_fire_time + jitter``
:param datetime.datetime|None next_fire_time: next fire time without jitter applied. If
``None``, returns ``None``.
:param int|None jitter: maximum number of seconds to add or subtract to
``next_fire_time``. If ``None`` or ``0``, returns ``next_fire_time``
:param datetime.datetime now: current datetime
:return datetime.datetime|None: next fire time with a jitter.
"""
if next_fire_time is None or not jitter:
return next_fire_time
next_fire_time_with_jitter = next_fire_time + timedelta(
seconds=random.uniform(-jitter, jitter))
if next_fire_time_with_jitter < now:
# Next fire time with jitter is in the past.
# Ignore jitter to avoid false misfire.
return next_fire_time
return next_fire_time_with_jitter

View File

@@ -1,95 +0,0 @@
from apscheduler.triggers.base import BaseTrigger
from apscheduler.util import obj_to_ref, ref_to_obj
class BaseCombiningTrigger(BaseTrigger):
__slots__ = ('triggers', 'jitter')
def __init__(self, triggers, jitter=None):
self.triggers = triggers
self.jitter = jitter
def __getstate__(self):
return {
'version': 1,
'triggers': [(obj_to_ref(trigger.__class__), trigger.__getstate__())
for trigger in self.triggers],
'jitter': self.jitter
}
def __setstate__(self, state):
if state.get('version', 1) > 1:
raise ValueError(
'Got serialized data for version %s of %s, but only versions up to 1 can be '
'handled' % (state['version'], self.__class__.__name__))
self.jitter = state['jitter']
self.triggers = []
for clsref, state in state['triggers']:
cls = ref_to_obj(clsref)
trigger = cls.__new__(cls)
trigger.__setstate__(state)
self.triggers.append(trigger)
def __repr__(self):
return '<{}({}{})>'.format(self.__class__.__name__, self.triggers,
', jitter={}'.format(self.jitter) if self.jitter else '')
class AndTrigger(BaseCombiningTrigger):
"""
Always returns the earliest next fire time that all the given triggers can agree on.
The trigger is considered to be finished when any of the given triggers has finished its
schedule.
Trigger alias: ``and``
:param list triggers: triggers to combine
:param int|None jitter: advance or delay the job execution by ``jitter`` seconds at most.
"""
__slots__ = ()
def get_next_fire_time(self, previous_fire_time, now):
while True:
fire_times = [trigger.get_next_fire_time(previous_fire_time, now)
for trigger in self.triggers]
if None in fire_times:
return None
elif min(fire_times) == max(fire_times):
return self._apply_jitter(fire_times[0], self.jitter, now)
else:
now = max(fire_times)
def __str__(self):
return 'and[{}]'.format(', '.join(str(trigger) for trigger in self.triggers))
class OrTrigger(BaseCombiningTrigger):
"""
Always returns the earliest next fire time produced by any of the given triggers.
The trigger is considered finished when all the given triggers have finished their schedules.
Trigger alias: ``or``
:param list triggers: triggers to combine
:param int|None jitter: advance or delay the job execution by ``jitter`` seconds at most.
.. note:: Triggers that depends on the previous fire time, such as the interval trigger, may
seem to behave strangely since they are always passed the previous fire time produced by
any of the given triggers.
"""
__slots__ = ()
def get_next_fire_time(self, previous_fire_time, now):
fire_times = [trigger.get_next_fire_time(previous_fire_time, now)
for trigger in self.triggers]
fire_times = [fire_time for fire_time in fire_times if fire_time is not None]
if fire_times:
return self._apply_jitter(min(fire_times), self.jitter, now)
else:
return None
def __str__(self):
return 'or[{}]'.format(', '.join(str(trigger) for trigger in self.triggers))

View File

@@ -1,238 +0,0 @@
from datetime import datetime, timedelta
from tzlocal import get_localzone
import six
from apscheduler.triggers.base import BaseTrigger
from apscheduler.triggers.cron.fields import (
BaseField, MonthField, WeekField, DayOfMonthField, DayOfWeekField, DEFAULT_VALUES)
from apscheduler.util import datetime_ceil, convert_to_datetime, datetime_repr, astimezone
class CronTrigger(BaseTrigger):
"""
Triggers when current time matches all specified time constraints,
similarly to how the UNIX cron scheduler works.
:param int|str year: 4-digit year
:param int|str month: month (1-12)
:param int|str day: day of the (1-31)
:param int|str week: ISO week (1-53)
:param int|str day_of_week: number or name of weekday (0-6 or mon,tue,wed,thu,fri,sat,sun)
:param int|str hour: hour (0-23)
:param int|str minute: minute (0-59)
:param int|str second: second (0-59)
:param datetime|str start_date: earliest possible date/time to trigger on (inclusive)
:param datetime|str end_date: latest possible date/time to trigger on (inclusive)
:param datetime.tzinfo|str timezone: time zone to use for the date/time calculations (defaults
to scheduler timezone)
:param int|None jitter: advance or delay the job execution by ``jitter`` seconds at most.
.. note:: The first weekday is always **monday**.
"""
FIELD_NAMES = ('year', 'month', 'day', 'week', 'day_of_week', 'hour', 'minute', 'second')
FIELDS_MAP = {
'year': BaseField,
'month': MonthField,
'week': WeekField,
'day': DayOfMonthField,
'day_of_week': DayOfWeekField,
'hour': BaseField,
'minute': BaseField,
'second': BaseField
}
__slots__ = 'timezone', 'start_date', 'end_date', 'fields', 'jitter'
def __init__(self, year=None, month=None, day=None, week=None, day_of_week=None, hour=None,
minute=None, second=None, start_date=None, end_date=None, timezone=None,
jitter=None):
if timezone:
self.timezone = astimezone(timezone)
elif isinstance(start_date, datetime) and start_date.tzinfo:
self.timezone = start_date.tzinfo
elif isinstance(end_date, datetime) and end_date.tzinfo:
self.timezone = end_date.tzinfo
else:
self.timezone = get_localzone()
self.start_date = convert_to_datetime(start_date, self.timezone, 'start_date')
self.end_date = convert_to_datetime(end_date, self.timezone, 'end_date')
self.jitter = jitter
values = dict((key, value) for (key, value) in six.iteritems(locals())
if key in self.FIELD_NAMES and value is not None)
self.fields = []
assign_defaults = False
for field_name in self.FIELD_NAMES:
if field_name in values:
exprs = values.pop(field_name)
is_default = False
assign_defaults = not values
elif assign_defaults:
exprs = DEFAULT_VALUES[field_name]
is_default = True
else:
exprs = '*'
is_default = True
field_class = self.FIELDS_MAP[field_name]
field = field_class(field_name, exprs, is_default)
self.fields.append(field)
@classmethod
def from_crontab(cls, expr, timezone=None):
"""
Create a :class:`~CronTrigger` from a standard crontab expression.
See https://en.wikipedia.org/wiki/Cron for more information on the format accepted here.
:param expr: minute, hour, day of month, month, day of week
:param datetime.tzinfo|str timezone: time zone to use for the date/time calculations (
defaults to scheduler timezone)
:return: a :class:`~CronTrigger` instance
"""
values = expr.split()
if len(values) != 5:
raise ValueError('Wrong number of fields; got {}, expected 5'.format(len(values)))
return cls(minute=values[0], hour=values[1], day=values[2], month=values[3],
day_of_week=values[4], timezone=timezone)
def _increment_field_value(self, dateval, fieldnum):
"""
Increments the designated field and resets all less significant fields to their minimum
values.
:type dateval: datetime
:type fieldnum: int
:return: a tuple containing the new date, and the number of the field that was actually
incremented
:rtype: tuple
"""
values = {}
i = 0
while i < len(self.fields):
field = self.fields[i]
if not field.REAL:
if i == fieldnum:
fieldnum -= 1
i -= 1
else:
i += 1
continue
if i < fieldnum:
values[field.name] = field.get_value(dateval)
i += 1
elif i > fieldnum:
values[field.name] = field.get_min(dateval)
i += 1
else:
value = field.get_value(dateval)
maxval = field.get_max(dateval)
if value == maxval:
fieldnum -= 1
i -= 1
else:
values[field.name] = value + 1
i += 1
difference = datetime(**values) - dateval.replace(tzinfo=None)
return self.timezone.normalize(dateval + difference), fieldnum
def _set_field_value(self, dateval, fieldnum, new_value):
values = {}
for i, field in enumerate(self.fields):
if field.REAL:
if i < fieldnum:
values[field.name] = field.get_value(dateval)
elif i > fieldnum:
values[field.name] = field.get_min(dateval)
else:
values[field.name] = new_value
return self.timezone.localize(datetime(**values))
def get_next_fire_time(self, previous_fire_time, now):
if previous_fire_time:
start_date = min(now, previous_fire_time + timedelta(microseconds=1))
if start_date == previous_fire_time:
start_date += timedelta(microseconds=1)
else:
start_date = max(now, self.start_date) if self.start_date else now
fieldnum = 0
next_date = datetime_ceil(start_date).astimezone(self.timezone)
while 0 <= fieldnum < len(self.fields):
field = self.fields[fieldnum]
curr_value = field.get_value(next_date)
next_value = field.get_next_value(next_date)
if next_value is None:
# No valid value was found
next_date, fieldnum = self._increment_field_value(next_date, fieldnum - 1)
elif next_value > curr_value:
# A valid, but higher than the starting value, was found
if field.REAL:
next_date = self._set_field_value(next_date, fieldnum, next_value)
fieldnum += 1
else:
next_date, fieldnum = self._increment_field_value(next_date, fieldnum)
else:
# A valid value was found, no changes necessary
fieldnum += 1
# Return if the date has rolled past the end date
if self.end_date and next_date > self.end_date:
return None
if fieldnum >= 0:
next_date = self._apply_jitter(next_date, self.jitter, now)
return min(next_date, self.end_date) if self.end_date else next_date
def __getstate__(self):
return {
'version': 2,
'timezone': self.timezone,
'start_date': self.start_date,
'end_date': self.end_date,
'fields': self.fields,
'jitter': self.jitter,
}
def __setstate__(self, state):
# This is for compatibility with APScheduler 3.0.x
if isinstance(state, tuple):
state = state[1]
if state.get('version', 1) > 2:
raise ValueError(
'Got serialized data for version %s of %s, but only versions up to 2 can be '
'handled' % (state['version'], self.__class__.__name__))
self.timezone = state['timezone']
self.start_date = state['start_date']
self.end_date = state['end_date']
self.fields = state['fields']
self.jitter = state.get('jitter')
def __str__(self):
options = ["%s='%s'" % (f.name, f) for f in self.fields if not f.is_default]
return 'cron[%s]' % (', '.join(options))
def __repr__(self):
options = ["%s='%s'" % (f.name, f) for f in self.fields if not f.is_default]
if self.start_date:
options.append("start_date=%r" % datetime_repr(self.start_date))
if self.end_date:
options.append("end_date=%r" % datetime_repr(self.end_date))
if self.jitter:
options.append('jitter=%s' % self.jitter)
return "<%s (%s, timezone='%s')>" % (
self.__class__.__name__, ', '.join(options), self.timezone)

View File

@@ -1,251 +0,0 @@
"""This module contains the expressions applicable for CronTrigger's fields."""
from calendar import monthrange
import re
from apscheduler.util import asint
__all__ = ('AllExpression', 'RangeExpression', 'WeekdayRangeExpression',
'WeekdayPositionExpression', 'LastDayOfMonthExpression')
WEEKDAYS = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
MONTHS = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
class AllExpression(object):
value_re = re.compile(r'\*(?:/(?P<step>\d+))?$')
def __init__(self, step=None):
self.step = asint(step)
if self.step == 0:
raise ValueError('Increment must be higher than 0')
def validate_range(self, field_name):
from apscheduler.triggers.cron.fields import MIN_VALUES, MAX_VALUES
value_range = MAX_VALUES[field_name] - MIN_VALUES[field_name]
if self.step and self.step > value_range:
raise ValueError('the step value ({}) is higher than the total range of the '
'expression ({})'.format(self.step, value_range))
def get_next_value(self, date, field):
start = field.get_value(date)
minval = field.get_min(date)
maxval = field.get_max(date)
start = max(start, minval)
if not self.step:
next = start
else:
distance_to_next = (self.step - (start - minval)) % self.step
next = start + distance_to_next
if next <= maxval:
return next
def __eq__(self, other):
return isinstance(other, self.__class__) and self.step == other.step
def __str__(self):
if self.step:
return '*/%d' % self.step
return '*'
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.step)
class RangeExpression(AllExpression):
value_re = re.compile(
r'(?P<first>\d+)(?:-(?P<last>\d+))?(?:/(?P<step>\d+))?$')
def __init__(self, first, last=None, step=None):
super(RangeExpression, self).__init__(step)
first = asint(first)
last = asint(last)
if last is None and step is None:
last = first
if last is not None and first > last:
raise ValueError('The minimum value in a range must not be higher than the maximum')
self.first = first
self.last = last
def validate_range(self, field_name):
from apscheduler.triggers.cron.fields import MIN_VALUES, MAX_VALUES
super(RangeExpression, self).validate_range(field_name)
if self.first < MIN_VALUES[field_name]:
raise ValueError('the first value ({}) is lower than the minimum value ({})'
.format(self.first, MIN_VALUES[field_name]))
if self.last is not None and self.last > MAX_VALUES[field_name]:
raise ValueError('the last value ({}) is higher than the maximum value ({})'
.format(self.last, MAX_VALUES[field_name]))
value_range = (self.last or MAX_VALUES[field_name]) - self.first
if self.step and self.step > value_range:
raise ValueError('the step value ({}) is higher than the total range of the '
'expression ({})'.format(self.step, value_range))
def get_next_value(self, date, field):
startval = field.get_value(date)
minval = field.get_min(date)
maxval = field.get_max(date)
# Apply range limits
minval = max(minval, self.first)
maxval = min(maxval, self.last) if self.last is not None else maxval
nextval = max(minval, startval)
# Apply the step if defined
if self.step:
distance_to_next = (self.step - (nextval - minval)) % self.step
nextval += distance_to_next
return nextval if nextval <= maxval else None
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.first == other.first and
self.last == other.last)
def __str__(self):
if self.last != self.first and self.last is not None:
range = '%d-%d' % (self.first, self.last)
else:
range = str(self.first)
if self.step:
return '%s/%d' % (range, self.step)
return range
def __repr__(self):
args = [str(self.first)]
if self.last != self.first and self.last is not None or self.step:
args.append(str(self.last))
if self.step:
args.append(str(self.step))
return "%s(%s)" % (self.__class__.__name__, ', '.join(args))
class MonthRangeExpression(RangeExpression):
value_re = re.compile(r'(?P<first>[a-z]+)(?:-(?P<last>[a-z]+))?', re.IGNORECASE)
def __init__(self, first, last=None):
try:
first_num = MONTHS.index(first.lower()) + 1
except ValueError:
raise ValueError('Invalid month name "%s"' % first)
if last:
try:
last_num = MONTHS.index(last.lower()) + 1
except ValueError:
raise ValueError('Invalid month name "%s"' % last)
else:
last_num = None
super(MonthRangeExpression, self).__init__(first_num, last_num)
def __str__(self):
if self.last != self.first and self.last is not None:
return '%s-%s' % (MONTHS[self.first - 1], MONTHS[self.last - 1])
return MONTHS[self.first - 1]
def __repr__(self):
args = ["'%s'" % MONTHS[self.first]]
if self.last != self.first and self.last is not None:
args.append("'%s'" % MONTHS[self.last - 1])
return "%s(%s)" % (self.__class__.__name__, ', '.join(args))
class WeekdayRangeExpression(RangeExpression):
value_re = re.compile(r'(?P<first>[a-z]+)(?:-(?P<last>[a-z]+))?', re.IGNORECASE)
def __init__(self, first, last=None):
try:
first_num = WEEKDAYS.index(first.lower())
except ValueError:
raise ValueError('Invalid weekday name "%s"' % first)
if last:
try:
last_num = WEEKDAYS.index(last.lower())
except ValueError:
raise ValueError('Invalid weekday name "%s"' % last)
else:
last_num = None
super(WeekdayRangeExpression, self).__init__(first_num, last_num)
def __str__(self):
if self.last != self.first and self.last is not None:
return '%s-%s' % (WEEKDAYS[self.first], WEEKDAYS[self.last])
return WEEKDAYS[self.first]
def __repr__(self):
args = ["'%s'" % WEEKDAYS[self.first]]
if self.last != self.first and self.last is not None:
args.append("'%s'" % WEEKDAYS[self.last])
return "%s(%s)" % (self.__class__.__name__, ', '.join(args))
class WeekdayPositionExpression(AllExpression):
options = ['1st', '2nd', '3rd', '4th', '5th', 'last']
value_re = re.compile(r'(?P<option_name>%s) +(?P<weekday_name>(?:\d+|\w+))' %
'|'.join(options), re.IGNORECASE)
def __init__(self, option_name, weekday_name):
super(WeekdayPositionExpression, self).__init__(None)
try:
self.option_num = self.options.index(option_name.lower())
except ValueError:
raise ValueError('Invalid weekday position "%s"' % option_name)
try:
self.weekday = WEEKDAYS.index(weekday_name.lower())
except ValueError:
raise ValueError('Invalid weekday name "%s"' % weekday_name)
def get_next_value(self, date, field):
# Figure out the weekday of the month's first day and the number of days in that month
first_day_wday, last_day = monthrange(date.year, date.month)
# Calculate which day of the month is the first of the target weekdays
first_hit_day = self.weekday - first_day_wday + 1
if first_hit_day <= 0:
first_hit_day += 7
# Calculate what day of the month the target weekday would be
if self.option_num < 5:
target_day = first_hit_day + self.option_num * 7
else:
target_day = first_hit_day + ((last_day - first_hit_day) // 7) * 7
if target_day <= last_day and target_day >= date.day:
return target_day
def __eq__(self, other):
return (super(WeekdayPositionExpression, self).__eq__(other) and
self.option_num == other.option_num and self.weekday == other.weekday)
def __str__(self):
return '%s %s' % (self.options[self.option_num], WEEKDAYS[self.weekday])
def __repr__(self):
return "%s('%s', '%s')" % (self.__class__.__name__, self.options[self.option_num],
WEEKDAYS[self.weekday])
class LastDayOfMonthExpression(AllExpression):
value_re = re.compile(r'last', re.IGNORECASE)
def __init__(self):
super(LastDayOfMonthExpression, self).__init__(None)
def get_next_value(self, date, field):
return monthrange(date.year, date.month)[1]
def __str__(self):
return 'last'
def __repr__(self):
return "%s()" % self.__class__.__name__

View File

@@ -1,111 +0,0 @@
"""Fields represent CronTrigger options which map to :class:`~datetime.datetime` fields."""
from calendar import monthrange
import re
import six
from apscheduler.triggers.cron.expressions import (
AllExpression, RangeExpression, WeekdayPositionExpression, LastDayOfMonthExpression,
WeekdayRangeExpression, MonthRangeExpression)
__all__ = ('MIN_VALUES', 'MAX_VALUES', 'DEFAULT_VALUES', 'BaseField', 'WeekField',
'DayOfMonthField', 'DayOfWeekField')
MIN_VALUES = {'year': 1970, 'month': 1, 'day': 1, 'week': 1, 'day_of_week': 0, 'hour': 0,
'minute': 0, 'second': 0}
MAX_VALUES = {'year': 9999, 'month': 12, 'day': 31, 'week': 53, 'day_of_week': 6, 'hour': 23,
'minute': 59, 'second': 59}
DEFAULT_VALUES = {'year': '*', 'month': 1, 'day': 1, 'week': '*', 'day_of_week': '*', 'hour': 0,
'minute': 0, 'second': 0}
SEPARATOR = re.compile(' *, *')
class BaseField(object):
REAL = True
COMPILERS = [AllExpression, RangeExpression]
def __init__(self, name, exprs, is_default=False):
self.name = name
self.is_default = is_default
self.compile_expressions(exprs)
def get_min(self, dateval):
return MIN_VALUES[self.name]
def get_max(self, dateval):
return MAX_VALUES[self.name]
def get_value(self, dateval):
return getattr(dateval, self.name)
def get_next_value(self, dateval):
smallest = None
for expr in self.expressions:
value = expr.get_next_value(dateval, self)
if smallest is None or (value is not None and value < smallest):
smallest = value
return smallest
def compile_expressions(self, exprs):
self.expressions = []
# Split a comma-separated expression list, if any
for expr in SEPARATOR.split(str(exprs).strip()):
self.compile_expression(expr)
def compile_expression(self, expr):
for compiler in self.COMPILERS:
match = compiler.value_re.match(expr)
if match:
compiled_expr = compiler(**match.groupdict())
try:
compiled_expr.validate_range(self.name)
except ValueError as e:
exc = ValueError('Error validating expression {!r}: {}'.format(expr, e))
six.raise_from(exc, None)
self.expressions.append(compiled_expr)
return
raise ValueError('Unrecognized expression "%s" for field "%s"' % (expr, self.name))
def __eq__(self, other):
return isinstance(self, self.__class__) and self.expressions == other.expressions
def __str__(self):
expr_strings = (str(e) for e in self.expressions)
return ','.join(expr_strings)
def __repr__(self):
return "%s('%s', '%s')" % (self.__class__.__name__, self.name, self)
class WeekField(BaseField):
REAL = False
def get_value(self, dateval):
return dateval.isocalendar()[1]
class DayOfMonthField(BaseField):
COMPILERS = BaseField.COMPILERS + [WeekdayPositionExpression, LastDayOfMonthExpression]
def get_max(self, dateval):
return monthrange(dateval.year, dateval.month)[1]
class DayOfWeekField(BaseField):
REAL = False
COMPILERS = BaseField.COMPILERS + [WeekdayRangeExpression]
def get_value(self, dateval):
return dateval.weekday()
class MonthField(BaseField):
COMPILERS = BaseField.COMPILERS + [MonthRangeExpression]

View File

@@ -1,51 +0,0 @@
from datetime import datetime
from tzlocal import get_localzone
from apscheduler.triggers.base import BaseTrigger
from apscheduler.util import convert_to_datetime, datetime_repr, astimezone
class DateTrigger(BaseTrigger):
"""
Triggers once on the given datetime. If ``run_date`` is left empty, current time is used.
:param datetime|str run_date: the date/time to run the job at
:param datetime.tzinfo|str timezone: time zone for ``run_date`` if it doesn't have one already
"""
__slots__ = 'run_date'
def __init__(self, run_date=None, timezone=None):
timezone = astimezone(timezone) or get_localzone()
if run_date is not None:
self.run_date = convert_to_datetime(run_date, timezone, 'run_date')
else:
self.run_date = datetime.now(timezone)
def get_next_fire_time(self, previous_fire_time, now):
return self.run_date if previous_fire_time is None else None
def __getstate__(self):
return {
'version': 1,
'run_date': self.run_date
}
def __setstate__(self, state):
# This is for compatibility with APScheduler 3.0.x
if isinstance(state, tuple):
state = state[1]
if state.get('version', 1) > 1:
raise ValueError(
'Got serialized data for version %s of %s, but only version 1 can be handled' %
(state['version'], self.__class__.__name__))
self.run_date = state['run_date']
def __str__(self):
return 'date[%s]' % datetime_repr(self.run_date)
def __repr__(self):
return "<%s (run_date='%s')>" % (self.__class__.__name__, datetime_repr(self.run_date))

View File

@@ -1,106 +0,0 @@
from datetime import timedelta, datetime
from math import ceil
from tzlocal import get_localzone
from apscheduler.triggers.base import BaseTrigger
from apscheduler.util import convert_to_datetime, timedelta_seconds, datetime_repr, astimezone
class IntervalTrigger(BaseTrigger):
"""
Triggers on specified intervals, starting on ``start_date`` if specified, ``datetime.now()`` +
interval otherwise.
:param int weeks: number of weeks to wait
:param int days: number of days to wait
:param int hours: number of hours to wait
:param int minutes: number of minutes to wait
:param int seconds: number of seconds to wait
:param datetime|str start_date: starting point for the interval calculation
:param datetime|str end_date: latest possible date/time to trigger on
:param datetime.tzinfo|str timezone: time zone to use for the date/time calculations
:param int|None jitter: advance or delay the job execution by ``jitter`` seconds at most.
"""
__slots__ = 'timezone', 'start_date', 'end_date', 'interval', 'interval_length', 'jitter'
def __init__(self, weeks=0, days=0, hours=0, minutes=0, seconds=0, start_date=None,
end_date=None, timezone=None, jitter=None):
self.interval = timedelta(weeks=weeks, days=days, hours=hours, minutes=minutes,
seconds=seconds)
self.interval_length = timedelta_seconds(self.interval)
if self.interval_length == 0:
self.interval = timedelta(seconds=1)
self.interval_length = 1
if timezone:
self.timezone = astimezone(timezone)
elif isinstance(start_date, datetime) and start_date.tzinfo:
self.timezone = start_date.tzinfo
elif isinstance(end_date, datetime) and end_date.tzinfo:
self.timezone = end_date.tzinfo
else:
self.timezone = get_localzone()
start_date = start_date or (datetime.now(self.timezone) + self.interval)
self.start_date = convert_to_datetime(start_date, self.timezone, 'start_date')
self.end_date = convert_to_datetime(end_date, self.timezone, 'end_date')
self.jitter = jitter
def get_next_fire_time(self, previous_fire_time, now):
if previous_fire_time:
next_fire_time = previous_fire_time + self.interval
elif self.start_date > now:
next_fire_time = self.start_date
else:
timediff_seconds = timedelta_seconds(now - self.start_date)
next_interval_num = int(ceil(timediff_seconds / self.interval_length))
next_fire_time = self.start_date + self.interval * next_interval_num
if self.jitter is not None:
next_fire_time = self._apply_jitter(next_fire_time, self.jitter, now)
if not self.end_date or next_fire_time <= self.end_date:
return self.timezone.normalize(next_fire_time)
def __getstate__(self):
return {
'version': 2,
'timezone': self.timezone,
'start_date': self.start_date,
'end_date': self.end_date,
'interval': self.interval,
'jitter': self.jitter,
}
def __setstate__(self, state):
# This is for compatibility with APScheduler 3.0.x
if isinstance(state, tuple):
state = state[1]
if state.get('version', 1) > 2:
raise ValueError(
'Got serialized data for version %s of %s, but only versions up to 2 can be '
'handled' % (state['version'], self.__class__.__name__))
self.timezone = state['timezone']
self.start_date = state['start_date']
self.end_date = state['end_date']
self.interval = state['interval']
self.interval_length = timedelta_seconds(self.interval)
self.jitter = state.get('jitter')
def __str__(self):
return 'interval[%s]' % str(self.interval)
def __repr__(self):
options = ['interval=%r' % self.interval, 'start_date=%r' % datetime_repr(self.start_date)]
if self.end_date:
options.append("end_date=%r" % datetime_repr(self.end_date))
if self.jitter:
options.append('jitter=%s' % self.jitter)
return "<%s (%s, timezone='%s')>" % (
self.__class__.__name__, ', '.join(options), self.timezone)

View File

@@ -1,429 +0,0 @@
"""This module contains several handy functions primarily meant for internal use."""
from __future__ import division
from datetime import date, datetime, time, timedelta, tzinfo
from calendar import timegm
from functools import partial
from inspect import isclass, ismethod
import re
from pytz import timezone, utc, FixedOffset
import six
try:
from inspect import signature
except ImportError: # pragma: nocover
from funcsigs import signature
try:
from threading import TIMEOUT_MAX
except ImportError:
TIMEOUT_MAX = 4294967 # Maximum value accepted by Event.wait() on Windows
try:
from asyncio import iscoroutinefunction
except ImportError:
try:
from trollius import iscoroutinefunction
except ImportError:
def iscoroutinefunction(func):
return False
__all__ = ('asint', 'asbool', 'astimezone', 'convert_to_datetime', 'datetime_to_utc_timestamp',
'utc_timestamp_to_datetime', 'timedelta_seconds', 'datetime_ceil', 'get_callable_name',
'obj_to_ref', 'ref_to_obj', 'maybe_ref', 'repr_escape', 'check_callable_args',
'TIMEOUT_MAX')
class _Undefined(object):
def __nonzero__(self):
return False
def __bool__(self):
return False
def __repr__(self):
return '<undefined>'
undefined = _Undefined() #: a unique object that only signifies that no value is defined
def asint(text):
"""
Safely converts a string to an integer, returning ``None`` if the string is ``None``.
:type text: str
:rtype: int
"""
if text is not None:
return int(text)
def asbool(obj):
"""
Interprets an object as a boolean value.
:rtype: bool
"""
if isinstance(obj, str):
obj = obj.strip().lower()
if obj in ('true', 'yes', 'on', 'y', 't', '1'):
return True
if obj in ('false', 'no', 'off', 'n', 'f', '0'):
return False
raise ValueError('Unable to interpret value "%s" as boolean' % obj)
return bool(obj)
def astimezone(obj):
"""
Interprets an object as a timezone.
:rtype: tzinfo
"""
if isinstance(obj, six.string_types):
return timezone(obj)
if isinstance(obj, tzinfo):
if not hasattr(obj, 'localize') or not hasattr(obj, 'normalize'):
raise TypeError('Only timezones from the pytz library are supported')
if obj.zone == 'local':
raise ValueError(
'Unable to determine the name of the local timezone -- you must explicitly '
'specify the name of the local timezone. Please refrain from using timezones like '
'EST to prevent problems with daylight saving time. Instead, use a locale based '
'timezone name (such as Europe/Helsinki).')
return obj
if obj is not None:
raise TypeError('Expected tzinfo, got %s instead' % obj.__class__.__name__)
_DATE_REGEX = re.compile(
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
r'(?:[ T](?P<hour>\d{1,2}):(?P<minute>\d{1,2}):(?P<second>\d{1,2})'
r'(?:\.(?P<microsecond>\d{1,6}))?'
r'(?P<timezone>Z|[+-]\d\d:\d\d)?)?$')
def convert_to_datetime(input, tz, arg_name):
"""
Converts the given object to a timezone aware datetime object.
If a timezone aware datetime object is passed, it is returned unmodified.
If a native datetime object is passed, it is given the specified timezone.
If the input is a string, it is parsed as a datetime with the given timezone.
Date strings are accepted in three different forms: date only (Y-m-d), date with time
(Y-m-d H:M:S) or with date+time with microseconds (Y-m-d H:M:S.micro). Additionally you can
override the time zone by giving a specific offset in the format specified by ISO 8601:
Z (UTC), +HH:MM or -HH:MM.
:param str|datetime input: the datetime or string to convert to a timezone aware datetime
:param datetime.tzinfo tz: timezone to interpret ``input`` in
:param str arg_name: the name of the argument (used in an error message)
:rtype: datetime
"""
if input is None:
return
elif isinstance(input, datetime):
datetime_ = input
elif isinstance(input, date):
datetime_ = datetime.combine(input, time())
elif isinstance(input, six.string_types):
m = _DATE_REGEX.match(input)
if not m:
raise ValueError('Invalid date string')
values = m.groupdict()
tzname = values.pop('timezone')
if tzname == 'Z':
tz = utc
elif tzname:
hours, minutes = (int(x) for x in tzname[1:].split(':'))
sign = 1 if tzname[0] == '+' else -1
tz = FixedOffset(sign * (hours * 60 + minutes))
values = {k: int(v or 0) for k, v in values.items()}
datetime_ = datetime(**values)
else:
raise TypeError('Unsupported type for %s: %s' % (arg_name, input.__class__.__name__))
if datetime_.tzinfo is not None:
return datetime_
if tz is None:
raise ValueError(
'The "tz" argument must be specified if %s has no timezone information' % arg_name)
if isinstance(tz, six.string_types):
tz = timezone(tz)
try:
return tz.localize(datetime_, is_dst=None)
except AttributeError:
raise TypeError(
'Only pytz timezones are supported (need the localize() and normalize() methods)')
def datetime_to_utc_timestamp(timeval):
"""
Converts a datetime instance to a timestamp.
:type timeval: datetime
:rtype: float
"""
if timeval is not None:
return timegm(timeval.utctimetuple()) + timeval.microsecond / 1000000
def utc_timestamp_to_datetime(timestamp):
"""
Converts the given timestamp to a datetime instance.
:type timestamp: float
:rtype: datetime
"""
if timestamp is not None:
return datetime.fromtimestamp(timestamp, utc)
def timedelta_seconds(delta):
"""
Converts the given timedelta to seconds.
:type delta: timedelta
:rtype: float
"""
return delta.days * 24 * 60 * 60 + delta.seconds + \
delta.microseconds / 1000000.0
def datetime_ceil(dateval):
"""
Rounds the given datetime object upwards.
:type dateval: datetime
"""
if dateval.microsecond > 0:
return dateval + timedelta(seconds=1, microseconds=-dateval.microsecond)
return dateval
def datetime_repr(dateval):
return dateval.strftime('%Y-%m-%d %H:%M:%S %Z') if dateval else 'None'
def get_callable_name(func):
"""
Returns the best available display name for the given function/callable.
:rtype: str
"""
# the easy case (on Python 3.3+)
if hasattr(func, '__qualname__'):
return func.__qualname__
# class methods, bound and unbound methods
f_self = getattr(func, '__self__', None) or getattr(func, 'im_self', None)
if f_self and hasattr(func, '__name__'):
f_class = f_self if isclass(f_self) else f_self.__class__
else:
f_class = getattr(func, 'im_class', None)
if f_class and hasattr(func, '__name__'):
return '%s.%s' % (f_class.__name__, func.__name__)
# class or class instance
if hasattr(func, '__call__'):
# class
if hasattr(func, '__name__'):
return func.__name__
# instance of a class with a __call__ method
return func.__class__.__name__
raise TypeError('Unable to determine a name for %r -- maybe it is not a callable?' % func)
def obj_to_ref(obj):
"""
Returns the path to the given callable.
:rtype: str
:raises TypeError: if the given object is not callable
:raises ValueError: if the given object is a :class:`~functools.partial`, lambda or a nested
function
"""
if isinstance(obj, partial):
raise ValueError('Cannot create a reference to a partial()')
name = get_callable_name(obj)
if '<lambda>' in name:
raise ValueError('Cannot create a reference to a lambda')
if '<locals>' in name:
raise ValueError('Cannot create a reference to a nested function')
if ismethod(obj):
if hasattr(obj, 'im_self') and obj.im_self:
# bound method
module = obj.im_self.__module__
elif hasattr(obj, 'im_class') and obj.im_class:
# unbound method
module = obj.im_class.__module__
else:
module = obj.__module__
else:
module = obj.__module__
return '%s:%s' % (module, name)
def ref_to_obj(ref):
"""
Returns the object pointed to by ``ref``.
:type ref: str
"""
if not isinstance(ref, six.string_types):
raise TypeError('References must be strings')
if ':' not in ref:
raise ValueError('Invalid reference')
modulename, rest = ref.split(':', 1)
try:
obj = __import__(modulename, fromlist=[rest])
except ImportError:
raise LookupError('Error resolving reference %s: could not import module' % ref)
try:
for name in rest.split('.'):
obj = getattr(obj, name)
return obj
except Exception:
raise LookupError('Error resolving reference %s: error looking up object' % ref)
def maybe_ref(ref):
"""
Returns the object that the given reference points to, if it is indeed a reference.
If it is not a reference, the object is returned as-is.
"""
if not isinstance(ref, str):
return ref
return ref_to_obj(ref)
if six.PY2:
def repr_escape(string):
if isinstance(string, six.text_type):
return string.encode('ascii', 'backslashreplace')
return string
else:
def repr_escape(string):
return string
def check_callable_args(func, args, kwargs):
"""
Ensures that the given callable can be called with the given arguments.
:type args: tuple
:type kwargs: dict
"""
pos_kwargs_conflicts = [] # parameters that have a match in both args and kwargs
positional_only_kwargs = [] # positional-only parameters that have a match in kwargs
unsatisfied_args = [] # parameters in signature that don't have a match in args or kwargs
unsatisfied_kwargs = [] # keyword-only arguments that don't have a match in kwargs
unmatched_args = list(args) # args that didn't match any of the parameters in the signature
# kwargs that didn't match any of the parameters in the signature
unmatched_kwargs = list(kwargs)
# indicates if the signature defines *args and **kwargs respectively
has_varargs = has_var_kwargs = False
try:
sig = signature(func)
except ValueError:
# signature() doesn't work against every kind of callable
return
for param in six.itervalues(sig.parameters):
if param.kind == param.POSITIONAL_OR_KEYWORD:
if param.name in unmatched_kwargs and unmatched_args:
pos_kwargs_conflicts.append(param.name)
elif unmatched_args:
del unmatched_args[0]
elif param.name in unmatched_kwargs:
unmatched_kwargs.remove(param.name)
elif param.default is param.empty:
unsatisfied_args.append(param.name)
elif param.kind == param.POSITIONAL_ONLY:
if unmatched_args:
del unmatched_args[0]
elif param.name in unmatched_kwargs:
unmatched_kwargs.remove(param.name)
positional_only_kwargs.append(param.name)
elif param.default is param.empty:
unsatisfied_args.append(param.name)
elif param.kind == param.KEYWORD_ONLY:
if param.name in unmatched_kwargs:
unmatched_kwargs.remove(param.name)
elif param.default is param.empty:
unsatisfied_kwargs.append(param.name)
elif param.kind == param.VAR_POSITIONAL:
has_varargs = True
elif param.kind == param.VAR_KEYWORD:
has_var_kwargs = True
# Make sure there are no conflicts between args and kwargs
if pos_kwargs_conflicts:
raise ValueError('The following arguments are supplied in both args and kwargs: %s' %
', '.join(pos_kwargs_conflicts))
# Check if keyword arguments are being fed to positional-only parameters
if positional_only_kwargs:
raise ValueError('The following arguments cannot be given as keyword arguments: %s' %
', '.join(positional_only_kwargs))
# Check that the number of positional arguments minus the number of matched kwargs matches the
# argspec
if unsatisfied_args:
raise ValueError('The following arguments have not been supplied: %s' %
', '.join(unsatisfied_args))
# Check that all keyword-only arguments have been supplied
if unsatisfied_kwargs:
raise ValueError(
'The following keyword-only arguments have not been supplied in kwargs: %s' %
', '.join(unsatisfied_kwargs))
# Check that the callable can accept the given number of positional arguments
if not has_varargs and unmatched_args:
raise ValueError(
'The list of positional arguments is longer than the target callable can handle '
'(allowed: %d, given in args: %d)' % (len(args) - len(unmatched_args), len(args)))
# Check that the callable can accept the given keyword arguments
if not has_var_kwargs and unmatched_kwargs:
raise ValueError(
'The target callable does not accept the following keyword arguments: %s' %
', '.join(unmatched_kwargs))
def iscoroutinefunction_partial(f):
while isinstance(f, partial):
f = f.func
# The asyncio version of iscoroutinefunction includes testing for @coroutine
# decorations vs. the inspect version which does not.
return iscoroutinefunction(f)

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +0,0 @@
# -*- coding: utf-8 -*-
from .arrow import Arrow
from .factory import ArrowFactory
from .api import get, now, utcnow
__version__ = '0.10.0'
VERSION = __version__

View File

@@ -1,55 +0,0 @@
# -*- coding: utf-8 -*-
'''
Provides the default implementation of :class:`ArrowFactory <arrow.factory.ArrowFactory>`
methods for use as a module API.
'''
from __future__ import absolute_import
from arrow.factory import ArrowFactory
# internal default factory.
_factory = ArrowFactory()
def get(*args, **kwargs):
''' Implements the default :class:`ArrowFactory <arrow.factory.ArrowFactory>`
``get`` method.
'''
return _factory.get(*args, **kwargs)
def utcnow():
''' Implements the default :class:`ArrowFactory <arrow.factory.ArrowFactory>`
``utcnow`` method.
'''
return _factory.utcnow()
def now(tz=None):
''' Implements the default :class:`ArrowFactory <arrow.factory.ArrowFactory>`
``now`` method.
'''
return _factory.now(tz)
def factory(type):
''' Returns an :class:`.ArrowFactory` for the specified :class:`Arrow <arrow.arrow.Arrow>`
or derived type.
:param type: the type, :class:`Arrow <arrow.arrow.Arrow>` or derived.
'''
return ArrowFactory(type)
__all__ = ['get', 'utcnow', 'now', 'factory']

Some files were not shown because too many files have changed in this diff Show More