Compare commits
88 Commits
v2.6.0-bet
...
v2.6.3
Author | SHA1 | Date | |
---|---|---|---|
![]() |
366823cee9 | ||
![]() |
40e1eb9a49 | ||
![]() |
1af419a860 | ||
![]() |
397f18c435 | ||
![]() |
2e5dd05a6c | ||
![]() |
a9fb8ddfb8 | ||
![]() |
562c726787 | ||
![]() |
5f82c1dc17 | ||
![]() |
222800bdb6 | ||
![]() |
5dd3636571 | ||
![]() |
2296a9fbb3 | ||
![]() |
63b5a7c036 | ||
![]() |
b74ca2670e | ||
![]() |
393f4e0e58 | ||
![]() |
3a9ca29e99 | ||
![]() |
32995fef24 | ||
![]() |
a73c99fc64 | ||
![]() |
a5834470ba | ||
![]() |
da3bc127dc | ||
![]() |
0dddc4d58f | ||
![]() |
a4d5d9157b | ||
![]() |
c70d5d4398 | ||
![]() |
7c08b07ef5 | ||
![]() |
e426b5dd35 | ||
![]() |
2fdf619582 | ||
![]() |
d9eed14b7a | ||
![]() |
8230ffb8a4 | ||
![]() |
7098930b19 | ||
![]() |
56244245a4 | ||
![]() |
dd2f12fa8e | ||
![]() |
9598247a0d | ||
![]() |
230ee90b1c | ||
![]() |
e705bedc91 | ||
![]() |
b5ebe7590c | ||
![]() |
6d0831ceaa | ||
![]() |
19e00ee2f2 | ||
![]() |
80723d224e | ||
![]() |
0c82bb023a | ||
![]() |
0a86f24095 | ||
![]() |
b41249cfa8 | ||
![]() |
6659802689 | ||
![]() |
964c503223 | ||
![]() |
15568bf20a | ||
![]() |
d10cd324bb | ||
![]() |
2a22ab8c33 | ||
![]() |
ca736cdae2 | ||
![]() |
d589c57dd2 | ||
![]() |
9b0caf2a47 | ||
![]() |
f8b00bbd67 | ||
![]() |
91a8c0e7a0 | ||
![]() |
2089172384 | ||
![]() |
1ab87e5334 | ||
![]() |
b5e6861032 | ||
![]() |
189930918a | ||
![]() |
ff1bd0a4b8 | ||
![]() |
e544d0dd07 | ||
![]() |
3e0b240154 | ||
![]() |
199119cafb | ||
![]() |
89ab665923 | ||
![]() |
dfb60de6d2 | ||
![]() |
da8d41868d | ||
![]() |
e9db43ebf6 | ||
![]() |
c0453eae47 | ||
![]() |
a8863a5aeb | ||
![]() |
a8adad7dbb | ||
![]() |
4cfa5ac10b | ||
![]() |
55090ddeaa | ||
![]() |
14346b0e69 | ||
![]() |
ac24acf9ce | ||
![]() |
4cde62fde9 | ||
![]() |
7489bc8d98 | ||
![]() |
cde9287d85 | ||
![]() |
558023e18e | ||
![]() |
8157ee7811 | ||
![]() |
d746d2913f | ||
![]() |
0136fc6436 | ||
![]() |
7ce280cb92 | ||
![]() |
0209fa87aa | ||
![]() |
62cc2f769f | ||
![]() |
a49d44c880 | ||
![]() |
dab288380a | ||
![]() |
2ac5c35065 | ||
![]() |
ec9e2fe0f0 | ||
![]() |
ecbe79b5b9 | ||
![]() |
c4ac03738b | ||
![]() |
352dbd9bc8 | ||
![]() |
393b395df0 | ||
![]() |
1a96da04a1 |
@@ -5,6 +5,7 @@ contrib
|
||||
init-scripts
|
||||
package
|
||||
pylintrc
|
||||
snap
|
||||
*.md
|
||||
!CHANGELOG*.md
|
||||
start.bat
|
||||
|
20
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
## Description
|
||||
|
||||
Please include a summary of the change and which issue is fixed.
|
||||
|
||||
Fixes Tautulli/Tautulli-Issues#(issue)
|
||||
|
||||
## Type of change
|
||||
|
||||
Please delete options that are not relevant.
|
||||
|
||||
- [ ] Bug fix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
|
||||
## Checklist:
|
||||
|
||||
- [ ] My code follows the style guidelines of this project
|
||||
- [ ] I have performed a self-review of my own code
|
||||
- [ ] I have commented my code, particularly in hard-to-understand areas
|
||||
- [ ] I have added or updated the docstring for new or existing methods
|
65
.github/workflows/publish-docker.yml
vendored
@@ -1,10 +1,14 @@
|
||||
name: Publish Docker
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, beta, nightly, python3]
|
||||
branches: [master, beta, nightly]
|
||||
tags: [v*]
|
||||
pull_request: ~
|
||||
|
||||
jobs:
|
||||
build:
|
||||
build-docker:
|
||||
name: Build Docker Image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
@@ -20,7 +24,9 @@ jobs:
|
||||
else
|
||||
echo ::set-output name=tag::${GITHUB_REF#refs/heads/}
|
||||
fi
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
if [[ $GITHUB_REF == refs/tags/*-beta ]]; then
|
||||
echo ::set-output name=branch::beta
|
||||
elif [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
echo ::set-output name=branch::master
|
||||
else
|
||||
echo ::set-output name=branch::${GITHUB_REF#refs/heads/}
|
||||
@@ -30,14 +36,12 @@ jobs:
|
||||
echo ::set-output name=docker_platforms::linux/amd64,linux/arm64/v8,linux/arm/v7,linux/arm/v6
|
||||
echo ::set-output name=docker_image::${{ secrets.DOCKER_REPO }}/tautulli
|
||||
|
||||
- name: Set up QEMU
|
||||
- name: Set Up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
platforms: all
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
id: buildx
|
||||
with:
|
||||
version: latest
|
||||
|
||||
@@ -49,40 +53,63 @@ jobs:
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Docker Login
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
if: success()
|
||||
if: success() && github.event_name != 'pull_request'
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
if: success() && github.event_name != 'pull_request'
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.GHCR_TOKEN }}
|
||||
|
||||
- name: Docker Build and Push
|
||||
uses: docker/build-push-action@v2
|
||||
if: success()
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
platforms: ${{ steps.prepare.outputs.docker_platforms }}
|
||||
build-args: |
|
||||
TAG=${{ steps.prepare.outputs.tag }},
|
||||
BRANCH=${{ steps.prepare.outputs.branch }},
|
||||
COMMIT=${{ steps.prepare.outputs.commit }},
|
||||
TAG=${{ steps.prepare.outputs.tag }}
|
||||
BRANCH=${{ steps.prepare.outputs.branch }}
|
||||
COMMIT=${{ steps.prepare.outputs.commit }}
|
||||
BUILD_DATE=${{ steps.prepare.outputs.build_date }}
|
||||
tags: ${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
||||
tags: |
|
||||
${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
||||
ghcr.io/${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
- name: Clear
|
||||
if: always()
|
||||
discord:
|
||||
name: Discord Notification
|
||||
needs: build-docker
|
||||
if: always() && github.event_name != 'pull_request'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get Build Job Status
|
||||
uses: technote-space/workflow-conclusion-action@v1
|
||||
|
||||
- name: Combine Job Status
|
||||
id: status
|
||||
run: |
|
||||
rm -f ${HOME}/.docker/config.json
|
||||
failures=(neutral, skipped, timed_out, action_required)
|
||||
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
|
||||
echo ::set-output name=status::failure
|
||||
else
|
||||
echo ::set-output name=status::$WORKFLOW_CONCLUSION
|
||||
fi
|
||||
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ job.status }}
|
||||
status: ${{ steps.status.outputs.status }}
|
||||
title: ${{ github.workflow }}
|
||||
nofail: true
|
||||
|
@@ -1,12 +1,26 @@
|
||||
name: Publish Release
|
||||
name: Publish Installers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, beta, nightly, python3]
|
||||
branches: [master, beta, nightly]
|
||||
tags: [v*]
|
||||
pull_request: ~
|
||||
|
||||
jobs:
|
||||
build-windows:
|
||||
runs-on: windows-latest
|
||||
build-installer:
|
||||
name: Build ${{ matrix.os_upper }} Installer
|
||||
runs-on: ${{ matrix.os }}-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: 'windows'
|
||||
os_upper: 'Windows'
|
||||
ext: 'exe'
|
||||
- os: 'macos'
|
||||
os_upper: 'MacOS'
|
||||
ext: 'pkg'
|
||||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
@@ -16,11 +30,13 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV
|
||||
VERSION_NSIS=${GITHUB_REF#refs/tags/v}.1
|
||||
echo ::set-output name=VERSION_NSIS::${VERSION_NSIS/%-beta.1/.0}
|
||||
echo ::set-output name=VERSION::${GITHUB_REF#refs/tags/v}
|
||||
echo ::set-output name=RELEASE_VERSION::${GITHUB_REF#refs/tags/}
|
||||
else
|
||||
echo "VERSION=0.0.0" >> $GITHUB_ENV
|
||||
echo ::set-output name=VERSION_NSIS::0.0.0.0
|
||||
echo ::set-output name=VERSION::0.0.0
|
||||
echo ::set-output name=RELEASE_VERSION::${GITHUB_SHA::7}
|
||||
@@ -33,110 +49,79 @@ jobs:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Cache Dependencies
|
||||
id: cache_dependencies
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~\AppData\Local\pip\Cache
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('package/requirements-windows.txt') }}
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles(format('package/requirements-{0}.txt', matrix.os)) }}
|
||||
restore-keys: ${{ runner.os }}-pip-
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r package/requirements-windows.txt
|
||||
pip install -r package/requirements-${{ matrix.os }}.txt
|
||||
|
||||
- name: Build Package
|
||||
run: |
|
||||
pyinstaller -y ./package/Tautulli-windows.spec
|
||||
pyinstaller -y ./package/Tautulli-${{ matrix.os }}.spec
|
||||
|
||||
- name: Create Installer
|
||||
- name: Create Windows Installer
|
||||
uses: joncloud/makensis-action@v1.2
|
||||
if: matrix.os == 'windows'
|
||||
with:
|
||||
script-file: ./package/Tautulli.nsi
|
||||
arguments: /DVERSION=${{ steps.get_version.outputs.VERSION_NSIS }} /DINSTALLER_NAME=..\Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
arguments: >
|
||||
/DVERSION=${{ steps.get_version.outputs.VERSION_NSIS }}
|
||||
/DINSTALLER_NAME=..\Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
include-more-plugins: true
|
||||
include-custom-plugins-path: package/nsis-plugins
|
||||
|
||||
- name: Create MacOS Installer
|
||||
if: matrix.os == 'macos'
|
||||
run: |
|
||||
sudo pkgbuild \
|
||||
--install-location /Applications \
|
||||
--version ${{ steps.get_version.outputs.VERSION }} \
|
||||
--component ./dist/Tautulli.app \
|
||||
--scripts ./package/macos-scripts \
|
||||
Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
|
||||
- name: Upload Installer
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: Tautulli-windows-installer
|
||||
path: Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
name: Tautulli-${{ matrix.os }}-installer
|
||||
path: Tautulli-${{ matrix.os }}-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.${{ matrix.ext }}
|
||||
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ job.status }}
|
||||
title: Build Windows Installer
|
||||
nofail: true
|
||||
|
||||
build-macos:
|
||||
runs-on: macos-latest
|
||||
discord:
|
||||
name: Discord Notification
|
||||
needs: build-installer
|
||||
if: always() && github.event_name != 'pull_request'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
- name: Get Build Job Status
|
||||
uses: technote-space/workflow-conclusion-action@v1
|
||||
|
||||
- name: Set Release Version
|
||||
id: get_version
|
||||
shell: bash
|
||||
- name: Combine Job Status
|
||||
id: status
|
||||
run: |
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV
|
||||
echo ::set-output name=VERSION::${GITHUB_REF#refs/tags/v}
|
||||
echo ::set-output name=RELEASE_VERSION::${GITHUB_REF#refs/tags/}
|
||||
failures=(neutral, skipped, timed_out, action_required)
|
||||
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
|
||||
echo ::set-output name=status::failure
|
||||
else
|
||||
echo "VERSION=0.0.0" >> $GITHUB_ENV
|
||||
echo ::set-output name=VERSION::0.0.0
|
||||
echo ::set-output name=RELEASE_VERSION::${GITHUB_SHA::7}
|
||||
echo ::set-output name=status::$WORKFLOW_CONCLUSION
|
||||
fi
|
||||
echo $GITHUB_SHA > version.txt
|
||||
|
||||
- name: Set Up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Cache Dependencies
|
||||
id: cache_dependencies
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/Library/Caches/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('package/requirements-macos.txt') }}
|
||||
restore-keys: ${{ runner.os }}-pip-
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r package/requirements-macos.txt
|
||||
|
||||
- name: Build Package
|
||||
run: |
|
||||
pyinstaller -y ./package/Tautulli-macos.spec
|
||||
|
||||
- name: Create Installer
|
||||
run: |
|
||||
sudo pkgbuild --install-location /Applications --version ${{ steps.get_version.outputs.VERSION }} --component ./dist/Tautulli.app --scripts ./package/macos-scripts Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
|
||||
- name: Upload Installer
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: Tautulli-macos-installer
|
||||
path: Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ job.status }}
|
||||
title: Build MacOS Installer
|
||||
status: ${{ steps.status.outputs.status }}
|
||||
title: ${{ github.workflow }}
|
||||
nofail: true
|
||||
|
||||
release:
|
||||
needs: [build-windows, build-macos]
|
||||
if: startsWith(github.ref, 'refs/tags/') && always()
|
||||
name: Release Installers
|
||||
needs: build-installer
|
||||
if: always() && startsWith(github.ref, 'refs/tags/') && github.event_name != 'pull_request'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get Build Job Status
|
||||
@@ -150,25 +135,20 @@ jobs:
|
||||
run: |
|
||||
echo ::set-output name=RELEASE_VERSION::${GITHUB_REF#refs/tags/}
|
||||
|
||||
- name: Download Windows Installer
|
||||
- name: Download Installers
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: Tautulli-windows-installer
|
||||
|
||||
- name: Download MacOS Installer
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: Tautulli-macos-installer
|
||||
|
||||
- name: Get Changelog
|
||||
id: get_changelog
|
||||
run: echo ::set-output name=CHANGELOG::"$( sed -n '/^## /{p; :loop n; p; /^## /q; b loop}' CHANGELOG.md | sed '$d' | sed '$d' | sed '$d' | sed ':a;N;$!ba;s/\n/%0A/g' )"
|
||||
run: |
|
||||
changelog=$( sed -n '/^## /{p; :loop n; p; /^## /q; b loop}' CHANGELOG.md \
|
||||
| sed '$d' | sed '$d' | sed '$d' | sed ':a;N;$!ba;s/\n/%0A/g' )
|
||||
echo ::set-output name=CHANGELOG::$changelog
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1
|
||||
id: create_release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
@@ -182,8 +162,8 @@ jobs:
|
||||
prerelease: ${{ endsWith(steps.get_version.outputs.RELEASE_VERSION, '-beta') }}
|
||||
|
||||
- name: Upload Windows Installer
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
uses: actions/upload-release-asset@v1
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
@@ -193,8 +173,8 @@ jobs:
|
||||
asset_content_type: application/vnd.microsoft.portable-executable
|
||||
|
||||
- name: Upload MacOS Installer
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
uses: actions/upload-release-asset@v1
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
104
.github/workflows/publish-snap.yml
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
name: Publish Snap
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, beta, nightly]
|
||||
tags: [v*]
|
||||
pull_request: ~
|
||||
|
||||
jobs:
|
||||
build-snap:
|
||||
name: Build Snap Package (${{ matrix.architecture }})
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
architecture:
|
||||
- i386
|
||||
- amd64
|
||||
- arm64
|
||||
- armhf
|
||||
- ppc64el
|
||||
#- s390x # broken at the moment
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Prepare
|
||||
id: prepare
|
||||
run: |
|
||||
git fetch --prune --unshallow --tags
|
||||
if [[ $GITHUB_REF == refs/tags/*-beta || $GITHUB_REF == refs/heads/beta ]]; then
|
||||
echo ::set-output name=RELEASE::beta
|
||||
elif [[ $GITHUB_REF == refs/tags/* || $GITHUB_REF == refs/heads/master ]]; then
|
||||
echo ::set-output name=RELEASE::stable
|
||||
else
|
||||
echo ::set-output name=RELEASE::edge
|
||||
fi
|
||||
|
||||
- name: Set Up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Build Snap Package
|
||||
uses: diddlesnaps/snapcraft-multiarch-action@v1
|
||||
id: build
|
||||
with:
|
||||
architecture: ${{ matrix.architecture }}
|
||||
|
||||
- name: Upload Snap Package
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: Tautulli-snap-package-${{ matrix.architecture }}
|
||||
path: ${{ steps.build.outputs.snap }}
|
||||
|
||||
- name: Review Snap Package
|
||||
uses: diddlesnaps/snapcraft-review-tools-action@v1
|
||||
with:
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
|
||||
- name: Publish Snap Package
|
||||
uses: snapcore/action-publish@v1
|
||||
if: >
|
||||
github.event_name != 'pull_request' &&
|
||||
(startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/nightly')
|
||||
with:
|
||||
store_login: ${{ secrets.SNAP_LOGIN }}
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
release: ${{ steps.prepare.outputs.RELEASE }}
|
||||
|
||||
- name: Publish Snap Package (beta)
|
||||
uses: snapcore/action-publish@v1
|
||||
if: >
|
||||
github.event_name != 'pull_request' &&
|
||||
startsWith(github.ref, 'refs/tags/') && !endsWith(github.ref, '-beta')
|
||||
with:
|
||||
store_login: ${{ secrets.SNAP_LOGIN }}
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
release: beta
|
||||
|
||||
discord:
|
||||
name: Discord Notification
|
||||
needs: build-snap
|
||||
if: always() && github.event_name != 'pull_request'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get Build Job Status
|
||||
uses: technote-space/workflow-conclusion-action@v1
|
||||
|
||||
- name: Combine Job Status
|
||||
id: status
|
||||
run: |
|
||||
failures=(neutral, skipped, timed_out, action_required)
|
||||
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
|
||||
echo ::set-output name=status::failure
|
||||
else
|
||||
echo ::set-output name=status::$WORKFLOW_CONCLUSION
|
||||
fi
|
||||
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ steps.status.outputs.status }}
|
||||
title: ${{ github.workflow }}
|
||||
nofail: true
|
13
.gitignore
vendored
@@ -81,3 +81,16 @@ _ReSharper*/
|
||||
#Ignore files generated by pyinstaller
|
||||
/build
|
||||
/dist
|
||||
|
||||
#snapcraft specifics
|
||||
/parts/
|
||||
/stage/
|
||||
/prime/
|
||||
|
||||
*.snap
|
||||
|
||||
.snapcraft
|
||||
__pycache__
|
||||
*.pyc
|
||||
*_source.tar.bz2
|
||||
snap/.snapcraft
|
45
CHANGELOG.md
@@ -1,24 +1,67 @@
|
||||
# Changelog
|
||||
|
||||
## v2.6.0-beta (2020-10-16)
|
||||
## v2.6.3 (2020-12-19)
|
||||
|
||||
* Announcements:
|
||||
* This is the last Tautulli version to support Python 2. Python 3 will be required to continue receiving updates. You can check your Python version on the settings page.
|
||||
* Exporter:
|
||||
* Fix: Accessible and exists attributes were blank for media info export level 9.
|
||||
* UI:
|
||||
* Fix: Guest usernames were not masked on mouse hover.
|
||||
* Other:
|
||||
* Fix: macOS menu bar icon for light and dark mode.
|
||||
* New: Tautulli can officially be installed on Linux using a Snap package. See the installation wiki for details.
|
||||
|
||||
|
||||
## v2.6.2 (2020-12-05)
|
||||
|
||||
* Notifications:
|
||||
* Change: Send a notification of a user new device for the first time only. This can be toggled off in the settings.
|
||||
* Exporter:
|
||||
* Fix: Allow exporting child fields only without requiring the parent fields as well.
|
||||
* Fix: Exporting individual collection would fail.
|
||||
* Change: Remove accessible and exists fields from the default media info export levels. This prevents the Plex server from reading the media files unnecessarily.
|
||||
* Other:
|
||||
* Fix: Enable high resolution for the macOS system tray icon and menu.
|
||||
* New: Added rate limiting for failed login attempts.
|
||||
* Change: Use a white logo for the macOS system tray icon.
|
||||
* API:
|
||||
* New: Added machine_id to the get_history API response.
|
||||
|
||||
|
||||
## v2.6.1 (2020-11-03)
|
||||
|
||||
* Other:
|
||||
* Fix: High CPU/memory usage in some instances.
|
||||
* Fix: Logger error preventing Tautulli from starting.
|
||||
* Fix: Database issue with non-unique image hashes.
|
||||
|
||||
|
||||
## v2.6.0 (2020-10-31)
|
||||
|
||||
* Exporter:
|
||||
* New: New exporter feature that allows you to export the metadata and images for any library, collection, playlist, or media item to csv, json, xml, or m3u8. Refer to the Exporter Guide in the wiki for more details.
|
||||
* UI:
|
||||
* Fix: Margin on the homepage activity and statistic/library cards. (Thanks @dotsam)
|
||||
* Fix: Movie ratings not showing on the info page for the new Plex Movie agent.
|
||||
* New: Added ability to browse collections and playlists from the library and user pages.
|
||||
* Change: Updated platform brand logos and colours.
|
||||
* API:
|
||||
* New: Added export_metadata, download_export, and delete_export API commands.
|
||||
* New: Added get_collections_table, and get_playlists_table API commands.
|
||||
* New: Added min_version parameter to the register_device API command.
|
||||
* New: Added include_activity parameter to the get_history API command.
|
||||
* New: Added sync_id parameter to the get_metadata API command.
|
||||
* New: Added delete_synced_item API command.
|
||||
* New: Added a stat_id and stats_start parameters to the get_home_stats API command.
|
||||
* New: Allow deleting a mobile device using the registration device_id for the delete_mobile_device API command.
|
||||
* Change: Return Plex server info and Tautulli info from the register_device command.
|
||||
* Other:
|
||||
* New: The Docker container is now also built for the arm32v6 architecture.
|
||||
* New: The Docker container is also published to the GitHub Container Registry at ghcr.io/tautulli/tautulli.
|
||||
* Change: Tautulli is now using a forked version of plexapi 3.6.0. This is to support the exporter feature while still maintaining Python 2 compatibility.
|
||||
* Change: Updated systemd script to remove process forking. (Thanks @MichaIng)
|
||||
* Change: Cache GitHub update check on startup.
|
||||
|
||||
|
||||
## v2.5.6 (2020-10-02)
|
||||
|
@@ -11,13 +11,16 @@ ENV TZ=UTC
|
||||
WORKDIR /app
|
||||
|
||||
RUN \
|
||||
groupadd -g 1000 tautulli && \
|
||||
useradd -u 1000 -g 1000 tautulli && \
|
||||
echo ${BRANCH} > /app/branch.txt && \
|
||||
echo ${COMMIT} > /app/version.txt
|
||||
|
||||
COPY . /app
|
||||
|
||||
CMD [ "python", "Tautulli.py", "--datadir", "/config" ]
|
||||
ENTRYPOINT [ "./start.sh" ]
|
||||
|
||||
VOLUME /config
|
||||
EXPOSE 8181
|
||||
HEALTHCHECK --start-period=90s CMD curl -ILfSs http://localhost:8181/status > /dev/null || curl -ILfkSs https://localhost:8181/status > /dev/null || exit 1
|
||||
HEALTHCHECK --start-period=90s CMD curl -ILfSs http://localhost:8181/status > /dev/null || curl -ILfkSs https://localhost:8181/status > /dev/null || exit 1
|
||||
|
@@ -38,7 +38,8 @@ This project is based on code from [Headphones](https://github.com/rembo10/headp
|
||||
| --- | --- | --- | --- |
|
||||
| Release | [](https://github.com/Tautulli/Tautulli/releases/latest) <br> [](https://github.com/Tautulli/Tautulli/releases/latest) | [](https://github.com/Tautulli/Tautulli/releases) <br> [](https://github.com/Tautulli/Tautulli/commits/beta) | [](https://github.com/Tautulli/Tautulli/commits/nightly) <br> [](https://github.com/Tautulli/Tautulli/commits/nightly) |
|
||||
| Docker | [](https://hub.docker.com/r/tautulli/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Amaster) | [](https://hub.docker.com/r/tautulli/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Abeta) | [](https://hub.docker.com/r/tautulli/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Anightly) |
|
||||
| Installer | [](https://github.com/Tautulli/Tautulli/releases/latest) <br> [](https://github.com/Tautulli/Tautulli/releases/latest) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Release"+branch%3Amaster) | [](https://github.com/Tautulli/Tautulli/releases) <br> [](https://github.com/Tautulli/Tautulli/releases) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Release"+branch%3Abeta) | [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Release"+branch%3Anightly) |
|
||||
| Snap | [](https://snapcraft.io/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Snap"+branch%3Amaster) | [](https://snapcraft.io/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Snap"+branch%3Abeta) | [](https://snapcraft.io/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Snap"+branch%3Anightly) |
|
||||
| Installer | [](https://github.com/Tautulli/Tautulli/releases/latest) <br> [](https://github.com/Tautulli/Tautulli/releases/latest) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Installers"+branch%3Amaster) | [](https://github.com/Tautulli/Tautulli/releases) <br> [](https://github.com/Tautulli/Tautulli/releases) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Installers"+branch%3Abeta) | [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Installers"+branch%3Anightly) |
|
||||
|
||||
[](https://github.com/Tautulli/Tautulli-Wiki/wiki)
|
||||
[](https://tautulli.com/discord)
|
||||
|
@@ -124,6 +124,8 @@ def main():
|
||||
|
||||
if helpers.bool_true(os.getenv('TAUTULLI_DOCKER', False)):
|
||||
plexpy.DOCKER = True
|
||||
if helpers.bool_true(os.getenv('TAUTULLI_SNAP', False)):
|
||||
plexpy.SNAP = True
|
||||
|
||||
if args.dev:
|
||||
plexpy.DEV = True
|
||||
|
@@ -24,21 +24,21 @@
|
||||
${next.headIncludes()}
|
||||
|
||||
<!-- Favicons -->
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="${http_root}images/favicon/favicon-32x32.png?v=2.0.5">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="${http_root}images/favicon/favicon-16x16.png?v=2.0.5">
|
||||
<link rel="shortcut icon" href="${http_root}images/favicon/favicon.ico?v=2.0.5">
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="${http_root}images/favicon/favicon-32x32.png?v=2.6.0">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="${http_root}images/favicon/favicon-16x16.png?v=2.6.0">
|
||||
<link rel="shortcut icon" href="${http_root}images/favicon/favicon.ico?v=2.6.0">
|
||||
|
||||
<!-- ICONS -->
|
||||
<!-- Android -->
|
||||
<link rel="manifest" href="${http_root}images/favicon/manifest.json?v=2.0.5" crossorigin="use-credentials">
|
||||
<link rel="manifest" href="${http_root}images/favicon/manifest.json?v=2.6.0" crossorigin="use-credentials">
|
||||
<meta name="theme-color" content="#282a2d">
|
||||
<!-- Apple -->
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="${http_root}images/favicon/apple-touch-icon.png?v=2.0.5">
|
||||
<link rel="mask-icon" href="${http_root}images/favicon/safari-pinned-tab.svg?v=2.0.5" color="#282a2d">
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="${http_root}images/favicon/apple-touch-icon.png?v=2.6.0">
|
||||
<link rel="mask-icon" href="${http_root}images/favicon/safari-pinned-tab.svg?v=2.6.0" color="#282a2d">
|
||||
<meta name="apple-mobile-web-app-title" content="Tautulli">
|
||||
<!-- Microsoft -->
|
||||
<meta name="application-name" content="Tautulli">
|
||||
<meta name="msapplication-config" content="${http_root}images/favicon/browserconfig.xml?v=2.0.5">
|
||||
<meta name="msapplication-config" content="${http_root}images/favicon/browserconfig.xml?v=2.6.0">
|
||||
</head>
|
||||
|
||||
<body class="content">
|
||||
@@ -59,6 +59,8 @@
|
||||
% endif
|
||||
% if plexpy.INSTALL_TYPE == 'docker':
|
||||
Update your Docker container or <a href="#" id="updateDismiss">Dismiss</a>
|
||||
% elif plexpy.INSTALL_TYPE == 'snap':
|
||||
Update your Snap package or <a href="#" id="updateDismiss">Dismiss</a>
|
||||
% elif plexpy.INSTALL_TYPE in ('windows', 'macos'):
|
||||
<a href="${anon_url('https://github.com/%s/%s/releases/tag/%s' % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO, plexpy.LATEST_RELEASE))}" target="_blank" rel="noreferrer">Download</a> and install the latest version or <a href="#" id="updateDismiss">Dismiss</a>
|
||||
% else:
|
||||
@@ -204,7 +206,7 @@ ${next.modalIncludes()}
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<span id="incorrect-login" style="padding-right: 25px; display: none;">Incorrect username or password.</span>
|
||||
<span id="sign-in-alert" style="padding-right: 25px; display: none;"></span>
|
||||
<button id="sign-in" type="submit" class="btn btn-bright login-button"><i class="fa fa-sign-in"></i> Sign In</button>
|
||||
</div>
|
||||
<input type="hidden" id="admin_login" name="admin_login" value="1" />
|
||||
@@ -337,6 +339,8 @@ ${next.modalIncludes()}
|
||||
}
|
||||
if (result.install_type === 'docker') {
|
||||
msg += 'Update your Docker container or <a href="#" id="updateDismiss">Dismiss</a>';
|
||||
} else if (result.install_type === 'snap') {
|
||||
msg += 'Update your Snap package or <a href="#" id="updateDismiss">Dismiss</a>';
|
||||
} else if (result.install_type === 'windows' || result.install_type === 'macos') {
|
||||
msg += '<a href="' + result.release_url + '" target="_blank" rel="noreferrer">Download</a> and install the latest version or <a href="#" id="updateDismiss">Dismiss</a>'
|
||||
} else {
|
||||
@@ -446,12 +450,16 @@ ${next.modalIncludes()}
|
||||
data: $(this).serialize(),
|
||||
dataType: 'json',
|
||||
statusCode: {
|
||||
200: function() {
|
||||
200: function(xhr, status) {
|
||||
window.location = "${http_root}";
|
||||
},
|
||||
401: function() {
|
||||
$('#incorrect-login').show();
|
||||
$('#username').focus();
|
||||
401: function(xhr, status) {
|
||||
$('#sign-in-alert').text('Incorrect username or password.').show();
|
||||
$('#username').focus();
|
||||
},
|
||||
429: function(xhr, status) {
|
||||
var retry = Math.ceil(xhr.getResponseHeader('Retry-After') / 60)
|
||||
$('#sign-in-alert').text('Too many login attempts. Try again in ' + retry + ' minute(s).').show();
|
||||
}
|
||||
},
|
||||
complete: function() {
|
||||
|
@@ -750,7 +750,9 @@ a .users-poster-face:hover {
|
||||
position: relative;
|
||||
}
|
||||
#dashboard-checking-activity,
|
||||
#dashboard-no-activity {
|
||||
#dashboard-no-activity,
|
||||
#dashboard-checking-recently-added,
|
||||
#dashboard-no-recently-added {
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
.dashboard-activity-instance {
|
||||
@@ -1446,9 +1448,6 @@ a:hover .dashboard-stats-square {
|
||||
-moz-box-shadow: inset 0 0 0 2px #e9a049;
|
||||
box-shadow: inset 0 0 0 2px #e9a049;
|
||||
}
|
||||
#dashboard-no-recently-added {
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
.dashboard-recent-media-row {
|
||||
width: 100%;
|
||||
margin: 0 auto;
|
||||
@@ -3850,19 +3849,19 @@ a:hover .overlay-refresh-image:hover {
|
||||
background-position: center !important;
|
||||
}
|
||||
.platform-android {
|
||||
background-color: #a4ca39;
|
||||
background-color: #3ddc84;
|
||||
background-image: url(../images/platforms/android.svg);
|
||||
}
|
||||
.platform-atv {
|
||||
background-color: #858487;
|
||||
background-color: #a2aaad;
|
||||
background-image: url(../images/platforms/atv.svg);
|
||||
}
|
||||
.platform-chrome {
|
||||
background-color: #ed5e50;
|
||||
background-color: #db4437;
|
||||
background-image: url(../images/platforms/chrome.svg);
|
||||
}
|
||||
.platform-chromecast {
|
||||
background-color: #10a4e8;
|
||||
background-color: #4285f4;
|
||||
background-image: url(../images/platforms/chromecast.svg);
|
||||
}
|
||||
.platform-default {
|
||||
@@ -3870,11 +3869,11 @@ a:hover .overlay-refresh-image:hover {
|
||||
background-image: url(../images/platforms/default.svg);
|
||||
}
|
||||
.platform-dlna {
|
||||
background-color: #0cb14b;
|
||||
background-color: #4ba32f;
|
||||
background-image: url(../images/platforms/dlna.svg);
|
||||
}
|
||||
.platform-firefox {
|
||||
background-color: #e67817;
|
||||
background-color: #ff7139;
|
||||
background-image: url(../images/platforms/firefox.svg);
|
||||
}
|
||||
.platform-gtv {
|
||||
@@ -3882,27 +3881,27 @@ a:hover .overlay-refresh-image:hover {
|
||||
background-image: url(../images/platforms/gtv.svg);
|
||||
}
|
||||
.platform-ie {
|
||||
background-color: #00599e;
|
||||
background-color: #18bcef;
|
||||
background-image: url(../images/platforms/ie.svg);
|
||||
}
|
||||
.platform-ios {
|
||||
background-color: #858487;
|
||||
background-color: #a2aaad;
|
||||
background-image: url(../images/platforms/ios.svg);
|
||||
}
|
||||
.platform-kodi {
|
||||
background-color: #31afe1;
|
||||
background-color: #30aada;
|
||||
background-image: url(../images/platforms/kodi.svg);
|
||||
}
|
||||
.platform-lg {
|
||||
background-color: #a50034;
|
||||
background-color: #990033;
|
||||
background-image: url(../images/platforms/lg.svg);
|
||||
}
|
||||
.platform-linux {
|
||||
background-color: #1793d0;
|
||||
background-color: #0099cc;
|
||||
background-image: url(../images/platforms/linux.svg);
|
||||
}
|
||||
.platform-macos {
|
||||
background-color: #858487;
|
||||
background-color: #a2aaad;
|
||||
background-image: url(../images/platforms/macos.svg);
|
||||
}
|
||||
.platform-msedge {
|
||||
@@ -3910,11 +3909,11 @@ a:hover .overlay-refresh-image:hover {
|
||||
background-image: url(../images/platforms/msedge.svg);
|
||||
}
|
||||
.platform-opera {
|
||||
background-color: #ff1b2d;
|
||||
background-color: #fa1e4e;
|
||||
background-image: url(../images/platforms/opera.svg);
|
||||
}
|
||||
.platform-playstation {
|
||||
background-color: #034da2;
|
||||
background-color: #003087;
|
||||
background-image: url(../images/platforms/playstation.svg);
|
||||
}
|
||||
.platform-plex {
|
||||
@@ -3926,11 +3925,11 @@ a:hover .overlay-refresh-image:hover {
|
||||
background-image: url(../images/platforms/plexamp.svg);
|
||||
}
|
||||
.platform-roku {
|
||||
background-color: #6d3c97;
|
||||
background-color: #673293;
|
||||
background-image: url(../images/platforms/roku.svg);
|
||||
}
|
||||
.platform-safari {
|
||||
background-color: #00a9ec;
|
||||
background-color: #00d3f9;
|
||||
background-image: url(../images/platforms/safari.svg);
|
||||
}
|
||||
.platform-samsung {
|
||||
@@ -3950,7 +3949,7 @@ a:hover .overlay-refresh-image:hover {
|
||||
background-image: url(../images/platforms/wiiu.svg);
|
||||
}
|
||||
.platform-windows {
|
||||
background-color: #2fc0f5;
|
||||
background-color: #0078d7;
|
||||
background-image: url(../images/platforms/windows.svg);
|
||||
}
|
||||
.platform-wp {
|
||||
@@ -3966,55 +3965,55 @@ a:hover .overlay-refresh-image:hover {
|
||||
background-image: url(../images/platforms/xbox.svg);
|
||||
}
|
||||
.platform-android-rgba {
|
||||
background-color: rgba(164, 202, 57, 0.40);
|
||||
background-color: rgba(61, 220, 132, 0.40);
|
||||
}
|
||||
.platform-atv-rgba {
|
||||
background-color: rgba(133, 132, 135, 0.40);
|
||||
background-color: rgba(162, 170, 173, 0.40);
|
||||
}
|
||||
.platform-chrome-rgba {
|
||||
background-color: rgba(237, 94, 80, 0.40);
|
||||
background-color: rgba(219, 68, 55, 0.40);
|
||||
}
|
||||
.platform-chromecast-rgba {
|
||||
background-color: rgba(16, 164, 232, 0.40);
|
||||
background-color: rgba(66, 133, 244, 0.40);
|
||||
}
|
||||
.platform-default-rgba {
|
||||
background-color: rgba(229, 160, 13, 0.40);
|
||||
}
|
||||
.platform-dlna-rgba {
|
||||
background-color: rgba(12, 177, 75, 0.40);
|
||||
background-color: rgba(75, 163, 47, 0.40);
|
||||
}
|
||||
.platform-firefox-rgba {
|
||||
background-color: rgba(230, 120, 23, 0.40);
|
||||
background-color: rgba(255, 113, 57, 0.40);
|
||||
}
|
||||
.platform-gtv-rgba {
|
||||
background-color: rgba(0, 139, 207, 0.40);
|
||||
}
|
||||
.platform-ie-rgba {
|
||||
background-color: rgba(0, 89, 158, 0.40);
|
||||
background-color: rgba(24, 188, 239, 0.40);
|
||||
}
|
||||
.platform-ios-rgba {
|
||||
background-color: rgba(133, 132, 135, 0.40);
|
||||
background-color: rgba(162, 170, 173, 0.40);
|
||||
}
|
||||
.platform-kodi-rgba {
|
||||
background-color: rgba(49, 175, 225, 0.40);
|
||||
background-color: rgba(48, 170, 218, 0.40);
|
||||
}
|
||||
.platform-lg-rgba {
|
||||
background-color: rgba(165, 0, 52, 0.40);
|
||||
background-color: rgba(153, 0, 51, 0.40);
|
||||
}
|
||||
.platform-linux-rgba {
|
||||
background-color: rgba(23, 147, 208, 0.40);
|
||||
background-color: rgba(0, 153, 204, 0.40);
|
||||
}
|
||||
.platform-macos-rgba {
|
||||
background-color: rgba(133, 132, 135, 0.40);
|
||||
background-color: rgba(162, 170, 173, 0.40);
|
||||
}
|
||||
.platform-msedge-rgba {
|
||||
background-color: rgba(0, 120, 215, 0.40);
|
||||
}
|
||||
.platform-opera-rgba {
|
||||
background-color: rgba(255, 27, 45, 0.40);
|
||||
background-color: rgba(250, 30, 78, 0.40);
|
||||
}
|
||||
.platform-playstation-rgba {
|
||||
background-color: rgba(3, 77, 162, 0.40);
|
||||
background-color: rgba(0, 48, 135, 0.40);
|
||||
}
|
||||
.platform-plex-rgba {
|
||||
background-color: rgba(229, 160, 13, 0.40);
|
||||
@@ -4023,10 +4022,10 @@ a:hover .overlay-refresh-image:hover {
|
||||
background-color: rgba(229, 160, 13, 0.40);
|
||||
}
|
||||
.platform-roku-rgba {
|
||||
background-color: rgba(109, 60, 151, 0.40);
|
||||
background-color: rgba(103, 50, 147, 0.40);
|
||||
}
|
||||
.platform-safari-rgba {
|
||||
background-color: rgba(0, 169, 236, 0.40);
|
||||
background-color: rgba(0, 211, 249, 0.40);
|
||||
}
|
||||
.platform-samsung-rgba {
|
||||
background-color: rgba(3, 78, 162, 0.40);
|
||||
@@ -4041,7 +4040,7 @@ a:hover .overlay-refresh-image:hover {
|
||||
background-color: rgba(3, 169, 244, 0.40);
|
||||
}
|
||||
.platform-windows-rgba {
|
||||
background-color: rgba(47, 192, 245, 0.40);
|
||||
background-color: rgba(0, 120, 215, 0.40);
|
||||
}
|
||||
.platform-wp-rgba {
|
||||
background-color: rgba(104, 33, 122, 0.40);
|
||||
|
Before Width: | Height: | Size: 5.0 KiB After Width: | Height: | Size: 15 KiB |
Before Width: | Height: | Size: 6.8 KiB After Width: | Height: | Size: 21 KiB |
Before Width: | Height: | Size: 4.3 KiB |
Before Width: | Height: | Size: 3.3 KiB After Width: | Height: | Size: 4.5 KiB |
@@ -2,7 +2,7 @@
|
||||
<browserconfig>
|
||||
<msapplication>
|
||||
<tile>
|
||||
<square150x150logo src="mstile-150x150.png?v=2.0.5"/>
|
||||
<square150x150logo src="mstile-150x150.png?v=2.6.0"/>
|
||||
<TileColor>#282a2d</TileColor>
|
||||
</tile>
|
||||
</msapplication>
|
||||
|
Before Width: | Height: | Size: 553 B After Width: | Height: | Size: 997 B |
Before Width: | Height: | Size: 971 B After Width: | Height: | Size: 1.7 KiB |
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
@@ -6,12 +6,12 @@
|
||||
"scope": "../../",
|
||||
"icons": [
|
||||
{
|
||||
"src": "android-chrome-192x192.png?v=2.0.5",
|
||||
"src": "android-chrome-192x192.png?v=2.6.0",
|
||||
"sizes": "192x192",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "android-chrome-256x256.png?v=2.0.5",
|
||||
"src": "android-chrome-256x256.png?v=2.6.0",
|
||||
"sizes": "256x256",
|
||||
"type": "image/png"
|
||||
}
|
||||
|
Before Width: | Height: | Size: 3.2 KiB After Width: | Height: | Size: 10 KiB |
@@ -1 +1,32 @@
|
||||
<svg version="1" xmlns="http://www.w3.org/2000/svg" width="700.000000pt" height="700.000000pt" viewBox="0 0 700.000000 700.000000" preserveAspectRatio="xMidYMid meet"><g transform="translate(0.000000,700.000000) scale(0.100000,-0.100000)" fill="#000000" stroke="none"><path d="M5695 6555 c-135 -34 -244 -94 -342 -189 -40 -39 -73 -76 -73 -83 0 -7 -4 -13 -10 -13 -14 0 -87 -156 -106 -225 -22 -83 -26 -234 -8 -320 17 -79 86 -230 133 -288 l30 -39 -48 -71 c-39 -57 -159 -228 -251 -357 -69 -97 -398 -564 -416 -590 -13 -19 -60 -87 -105 -150 -45 -63 -107 -151 -138 -195 -30 -44 -59 -84 -63 -90 -7 -9 -251 -354 -346 -490 -92 -131 -173 -245 -175 -245 -1 0 -34 9 -72 21 -130 38 -325 31 -454 -18 -168 -63 -313 -196 -385 -354 -39 -87 -65 -183 -68 -256 0 -24 -3 -43 -4 -43 -2 0 -43 46 -91 102 -49 57 -100 117 -115 133 -14 17 -128 149 -253 295 -125 146 -251 292 -279 324 -56 65 -77 89 -108 126 -58 68 -152 178 -172 200 -12 14 -50 57 -83 96 l-61 71 27 44 c58 93 91 217 92 342 2 161 -38 294 -125 412 -133 181 -316 279 -542 292 -470 27 -833 -434 -699 -887 74 -251 275 -437 530 -490 132 -28 334 -6 421 45 l42 24 173 -197 c96 -108 186 -210 200 -227 15 -16 163 -187 330 -380 458 -529 491 -567 526 -605 18 -19 31 -35 30 -36 -6 -5 -265 -161 -277 -167 -8 -4 -34 -20 -58 -35 -194 -124 -634 -382 -651 -382 -12 0 -46 20 -75 44 -60 49 -180 112 -242 127 -21 5 -48 12 -59 15 -11 4 -65 9 -121 11 -81 4 -117 1 -182 -15 -261 -66 -462 -270 -528 -537 -10 -40 -11 -217 -2 -258 5 -23 11 -51 14 -61 29 -145 147 -312 284 -403 123 -82 224 -114 370 -118 83 -3 124 2 240 29 36 9 133 57 187 94 60 41 111 91 153 152 14 19 28 37 32 40 19 15 71 140 89 217 17 73 20 107 16 198 -4 61 -7 121 -9 134 -3 28 -46 0 482 321 179 108 379 228 444 265 104 59 120 65 133 52 13 -13 12 -22 -10 -78 -49 -123 -58 -165 -62 -262 -7 -149 25 -286 89 -383 47 -72 91 -128 125 -158 19 -17 39 -36 45 -42 27 -25 136 -94 150 -94 8 0 17 -4 20 -9 3 -5 16 -11 28 -14 13 -3 50 -12 83 -21 74 -19 278 -15 345 7 198 65 358 196 435 358 16 34 20 36 49 28 17 -4 49 -10 71 -14 22 -3 99 -16 170 -30 72 -13 144 -26 160 -29 28 -5 101 -18 170 -31 17 -3 80 -14 140 -25 61 -11 124 -22 140 -25 17 -4 49 -9 72 -12 40 -5 42 -7 48 -47 14 -98 29 -147 73 -235 36 -75 61 -110 121 -171 154 -154 280 -210 480 -213 134 -2 180 5 273 40 212 83 371 262 427 481 24 93 25 255 2 342 -64 241 -245 428 -481 501 -62 18 -97 23 -200 22 -107 0 -136 -4 -205 -26 -44 -15 -109 -43 -145 -64 -83 -48 -208 -171 -250 -245 -17 -32 -35 -60 -38 -61 -4 -2 -46 4 -93 13 -48 10 -104 20 -125 23 -22 3 -46 8 -54 11 -8 3 -33 7 -55 10 -38 5 -58 9 -122 21 -16 3 -53 10 -83 15 -30 6 -66 12 -79 15 -13 2 -103 19 -200 36 -169 30 -207 42 -196 60 10 16 -28 155 -62 224 -19 39 -54 96 -78 127 l-45 58 40 52 c96 125 143 266 143 433 1 164 -27 263 -108 391 -19 30 -35 57 -35 61 0 3 31 49 69 102 57 81 450 638 625 889 28 40 62 88 76 107 14 18 194 274 400 568 291 414 379 534 393 531 10 -2 27 -6 37 -9 78 -25 240 -29 338 -9 433 87 677 573 489 974 -93 200 -255 332 -478 389 -87 22 -227 25 -304 6z"/></g></svg>
|
||||
<?xml version="1.0" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
|
||||
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
|
||||
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
|
||||
width="350.000000pt" height="350.000000pt" viewBox="0 0 350.000000 350.000000"
|
||||
preserveAspectRatio="xMidYMid meet">
|
||||
<metadata>
|
||||
Created by potrace 1.11, written by Peter Selinger 2001-2013
|
||||
</metadata>
|
||||
<g transform="translate(0.000000,350.000000) scale(0.100000,-0.100000)"
|
||||
fill="#000000" stroke="none">
|
||||
<path d="M1566 3489 c-433 -46 -867 -274 -1141 -601 -404 -481 -526 -1100
|
||||
-334 -1688 91 -278 283 -569 498 -756 676 -589 1646 -589 2322 0 215 187 407
|
||||
478 498 756 142 436 113 895 -84 1305 -320 666 -1027 1061 -1759 984z m1147
|
||||
-604 c87 -36 146 -118 154 -214 10 -111 -39 -203 -137 -254 -49 -26 -63 -28
|
||||
-131 -25 l-76 3 -109 -154 c-60 -85 -190 -269 -290 -409 l-181 -255 26 -46
|
||||
c22 -38 26 -59 26 -121 0 -63 -5 -84 -29 -132 -27 -54 -28 -59 -13 -76 22 -24
|
||||
47 -86 47 -117 0 -14 6 -28 13 -30 6 -3 91 -16 187 -30 157 -23 175 -24 183
|
||||
-10 38 68 115 118 199 130 103 15 220 -51 268 -151 26 -52 29 -154 6 -207 -19
|
||||
-48 -82 -114 -129 -138 -151 -77 -346 22 -373 189 -7 46 15 39 -222 74 -142
|
||||
20 -155 21 -163 6 -65 -116 -225 -163 -347 -102 -116 58 -167 187 -126 323 8
|
||||
29 13 55 11 57 -3 3 -65 -33 -138 -79 -74 -46 -162 -100 -196 -120 l-62 -38 6
|
||||
-47 c11 -100 -46 -207 -136 -254 -43 -23 -66 -28 -121 -28 -77 0 -124 16 -175
|
||||
62 -48 41 -76 99 -82 167 -7 72 9 129 50 183 85 112 256 132 372 44 l31 -24
|
||||
174 109 c96 60 180 111 185 113 6 2 -2 16 -16 32 -35 39 -412 468 -414 471 0
|
||||
1 -21 -5 -45 -13 -57 -20 -142 -14 -196 14 -162 84 -197 288 -71 419 102 108
|
||||
291 101 386 -14 62 -75 78 -185 40 -273 l-21 -49 23 -28 c13 -16 102 -118 198
|
||||
-227 l175 -198 20 61 c26 78 64 125 124 155 63 31 117 39 177 26 49 -11 51
|
||||
-11 72 17 21 26 533 749 548 773 4 6 -4 28 -17 48 -88 133 -44 307 94 376 61
|
||||
31 163 36 221 11z"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 2.9 KiB After Width: | Height: | Size: 1.8 KiB |
Before Width: | Height: | Size: 123 KiB |
BIN
data/interfaces/default/images/logo-flat-white.ico
Normal file
After Width: | Height: | Size: 200 KiB |
BIN
data/interfaces/default/images/logo-flat-white.png
Normal file
After Width: | Height: | Size: 15 KiB |
@@ -1,8 +1,5 @@
|
||||
<!-- Generated by IcoMoon.io -->
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" width="64" height="64" viewBox="0 0 64 64">
|
||||
<title>android</title>
|
||||
<path fill="#fff" d="M31.944 21.318c5.556 0 11.113 0 16.67 0 0.042 0 0.084-0 0.126 0.001 0.548 0.012 0.554 0.012 0.554 0.555 0.002 2.526 0.001 5.052 0.001 7.577 0 5.789 0.003 11.577-0.002 17.365-0.001 1.197-0.344 2.274-1.205 3.155-0.759 0.777-1.671 1.191-2.753 1.22-0.757 0.019-1.515 0.011-2.273 0.016-0.772 0.005-0.774 0.006-0.774 0.751-0.001 2.505-0.032 5.010 0.013 7.514 0.024 1.305-0.386 2.363-1.302 3.29-1.214 1.23-3.457 1.485-4.769 0.396-1.051-0.873-1.725-1.978-1.715-3.423 0.019-2.547 0.010-5.093 0.003-7.64-0.003-1.010 0.144-0.869-0.858-0.876-1.158-0.008-2.315-0.005-3.473-0.001-0.829 0.003-0.76-0.103-0.76 0.794-0.002 2.505-0.027 5.010 0.010 7.514 0.019 1.278-0.377 2.325-1.281 3.235-1.199 1.208-3.371 1.494-4.716 0.437-1.067-0.838-1.779-1.932-1.77-3.386 0.017-2.61 0.005-5.219 0.005-7.829 0-0.147-0.008-0.295 0-0.442 0.013-0.24-0.092-0.339-0.334-0.335-0.736 0.012-1.473 0.002-2.209 0.022-0.575 0.015-1.129-0.058-1.673-0.251-1.682-0.597-2.691-2.017-2.737-3.858-0.063-2.566-0.031-5.135-0.035-7.703-0.007-5.304-0.010-10.608-0.016-15.912-0.001-0.568-0.017-1.136-0.018-1.704-0-0.464 0.006-0.472 0.494-0.479 0.989-0.013 1.978-0.023 2.968-0.023 4.609-0.002 9.219-0.001 13.829-0.001-0.001 0.006-0.001 0.014-0.001 0.021z"></path>
|
||||
<path fill="#fff" d="M31.944 19.89c-5.535 0-11.071 0.002-16.606-0.002-0.717-0-0.772 0.153-0.687-0.747 0.189-2.003 0.58-3.948 1.437-5.784 1.041-2.228 2.47-4.152 4.433-5.648 0.864-0.658 1.646-1.43 2.624-1.932 0.216-0.111 0.25-0.23 0.129-0.443-0.363-0.64-0.715-1.286-1.059-1.937-0.441-0.835-0.877-1.674-1.302-2.518-0.247-0.491-0.206-0.765 0.103-0.941 0.342-0.194 0.625-0.077 0.892 0.415 0.721 1.329 1.429 2.664 2.142 3.997 0.069 0.13 0.141 0.258 0.215 0.386 0.226 0.39 0.228 0.394 0.671 0.218 2.478-0.987 5.051-1.43 7.715-1.338 2.143 0.074 4.214 0.501 6.214 1.273 0.118 0.045 0.241 0.081 0.35 0.142 0.186 0.102 0.303 0.067 0.405-0.126 0.534-1.023 1.075-2.043 1.617-3.062 0.297-0.557 0.592-1.115 0.908-1.66 0.189-0.325 0.514-0.408 0.809-0.253 0.292 0.153 0.366 0.43 0.175 0.817-0.39 0.79-0.791 1.575-1.204 2.353-0.383 0.725-0.789 1.438-1.18 2.159-0.19 0.351-0.181 0.348 0.158 0.573 1.666 1.102 3.266 2.297 4.577 3.814 1.895 2.192 3.115 4.723 3.574 7.598 0.119 0.746 0.175 1.503 0.266 2.254 0.038 0.311-0.097 0.421-0.393 0.394-0.146-0.014-0.295-0.002-0.442-0.002-5.514 0-11.028 0-16.543 0zM25.561 12.038c-0.063-1.117-0.623-1.553-1.433-1.566-0.833-0.014-1.419 0.462-1.455 1.603-0.025 0.776 0.66 1.407 1.463 1.409 0.79 0.001 1.421-0.64 1.424-1.445zM39.872 13.483c0.788-0.007 1.497-0.676 1.439-1.441-0.076-0.997-0.486-1.549-1.506-1.576-0.841-0.022-1.403 0.67-1.386 1.605 0.016 0.816 0.635 1.418 1.453 1.411z"></path>
|
||||
<path fill="#fff" d="M50.587 32.655c0-2.715-0.003-5.429 0.001-8.143 0.003-1.77 0.853-2.959 2.453-3.698 0.717-0.331 1.433-0.52 2.172-0.287 0.794 0.251 1.537 0.649 2.123 1.273 0.519 0.552 0.839 1.207 0.944 1.957 0.052 0.374 0.082 0.754 0.083 1.131 0.005 5.282-0.005 10.564 0.010 15.846 0.004 1.249-0.402 2.288-1.278 3.179-1.245 1.267-3.35 1.546-4.76 0.479-1.076-0.815-1.719-1.943-1.745-3.342-0.019-1.010-0.013-2.020-0.014-3.030-0.002-1.789-0.001-3.578-0.001-5.366 0.004-0 0.008-0 0.012-0z"></path>
|
||||
<path fill="#fff" d="M13.369 32.464c0 2.335-0.001 4.669 0.001 7.004 0 0.63 0.047 1.263 0.002 1.889-0.072 1.003-0.541 1.811-1.23 2.554-0.931 1.004-2.059 1.18-3.323 1.058-1.55-0.15-3.156-2.028-3.181-3.665-0.004-0.231-0.015-0.462-0.014-0.694 0.003-5.406 0.007-10.812 0.011-16.218 0.001-1.655 0.863-2.749 2.268-3.501 0.683-0.366 1.397-0.602 2.158-0.402 1.622 0.427 3.305 1.697 3.292 3.834-0.016 2.713-0.004 5.427-0.004 8.141 0.007-0 0.013-0 0.020 0z"></path>
|
||||
<path fill="#fff" d="M46.73 40.88c-0.003 0-0.007 0-0.010 0-1.475 0-2.67-1.195-2.67-2.67s1.195-2.67 2.67-2.67c1.475 0 2.67 1.195 2.67 2.67v0c0 0 0 0 0 0 0 1.471-1.19 2.664-2.659 2.67h-0.001zM17.27 40.88c-1.475 0-2.67-1.195-2.67-2.67s1.195-2.67 2.67-2.67c1.475 0 2.67 1.195 2.67 2.67v0c0 0.003 0 0.007 0 0.010 0 1.469-1.191 2.66-2.66 2.66-0.003 0-0.007 0-0.011 0h0.001zM47.68 24.83l5.32-9.23c0.095-0.159 0.151-0.351 0.151-0.557 0-0.405-0.219-0.76-0.546-0.951l-0.005-0.003c-0.16-0.095-0.354-0.152-0.56-0.152-0.407 0-0.764 0.22-0.957 0.547l-0.003 0.005-5.38 9.34c-4.027-1.851-8.738-2.93-13.7-2.93s-9.673 1.079-13.909 3.016l0.209-0.086-5.39-9.34c-0.204-0.28-0.531-0.46-0.9-0.46-0.613 0-1.11 0.497-1.11 1.11 0 0.167 0.037 0.325 0.103 0.467l-0.003-0.007 5.33 9.23c-9.153 5.047-15.453 14.286-16.323 25.059l-0.007 0.111h64c-0.875-10.883-7.171-20.121-16.158-25.088l-0.162-0.082z"></path>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 3.7 KiB After Width: | Height: | Size: 1.0 KiB |
@@ -24,17 +24,13 @@
|
||||
</div>
|
||||
<div id="currentActivity">
|
||||
% if PLEX_SERVER_UP:
|
||||
<div class="text-muted" id="dashboard-checking-activity"><i class="fa fa-refresh fa-spin"></i> Checking for activity...</div>
|
||||
<div class="text-muted" id="dashboard-checking-activity"><i class="fa fa-refresh fa-spin"></i> Checking for activity...</div>
|
||||
% elif config['pms_is_cloud']:
|
||||
<div id="dashboard-no-activity" class="text-muted">Plex Cloud server is sleeping.</div>
|
||||
% elif not config['first_run_complete']:
|
||||
<div id="dashboard-no-activity" class="text-muted">The Tautulli setup wizard has not been completed. Please click <a href="welcome">here</a> to go to the setup wizard.</div>
|
||||
% else:
|
||||
<div id="dashboard-no-activity" class="text-muted">There was an error communicating with your Plex Server.
|
||||
% if _session['user_group'] == 'admin':
|
||||
Check the <a href="logs">logs</a> and verify your server connection in the <a href="settings#tab_tabs-plex_media_server">settings</a>.
|
||||
% endif
|
||||
</div>
|
||||
<div class="text-muted" id="dashboard-checking-activity"><i class="fa fa-refresh fa-spin"></i> Tautulli is connecting to the Plex server...</div>
|
||||
% endif
|
||||
</div>
|
||||
</div>
|
||||
@@ -65,7 +61,7 @@
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<div id="home-stats" class="home-platforms">
|
||||
<div class="text-muted"><i class="fa fa-refresh fa-spin"></i> Loading stats...</div>
|
||||
<div class="text-muted"><i class="fa fa-refresh fa-spin"></i> Loading stats...</div>
|
||||
<br>
|
||||
</div>
|
||||
</div>
|
||||
@@ -84,7 +80,7 @@
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<div id="library-stats" class="library-platforms">
|
||||
<div class="text-muted"><i class="fa fa-refresh fa-spin"></i> Loading stats...</div>
|
||||
<div class="text-muted"><i class="fa fa-refresh fa-spin"></i> Loading stats...</div>
|
||||
<br>
|
||||
</div>
|
||||
</div>
|
||||
@@ -132,17 +128,12 @@
|
||||
<div class="col-md-12">
|
||||
<div id="recentlyAdded" style="margin-right: -15px;">
|
||||
% if PLEX_SERVER_UP:
|
||||
<div class="text-muted"><i class="fa fa-refresh fa-spin"></i> Looking for new items...</div>
|
||||
<div id="dashboard-checking-recently-added" class="text-muted"><i class="fa fa-refresh fa-spin"></i> Looking for new items...</div>
|
||||
% elif config['pms_is_cloud']:
|
||||
<div class="text-muted">Plex Cloud server is sleeping.</div>
|
||||
% else:
|
||||
<div class="text-muted">There was an error communicating with your Plex Server.
|
||||
% if _session['user_group'] == 'admin':
|
||||
Check the <a href="logs">logs</a> and verify your server connection in the <a href="settings#tab_tabs-plex_media_server">settings</a>.
|
||||
% endif
|
||||
</div>
|
||||
<div id="dashboard-no-recently-added" class="text-muted"><i class="fa fa-refresh fa-spin"></i> Tautulli is connecting to your Plex server...</div>
|
||||
% endif
|
||||
<br>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -221,6 +212,28 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<% from plexpy.helpers import anon_url %>
|
||||
<div id="python2-modal" class="modal fade wide" tabindex="-1" role="dialog" aria-labelledby="python2-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title">Unable to Update</h4>
|
||||
</div>
|
||||
<div class="modal-body" style="text-align: center;">
|
||||
<p>Tautulli is still running using Python 2 and cannot be updated past v2.6.3.</p>
|
||||
<p>Python 3 is required to continue receiving updates.</p>
|
||||
<p>
|
||||
<strong>Please see the <a href="${anon_url('https://github.com/Tautulli/Tautulli-Wiki/wiki/Upgrading-to-Python-3-%28Tautulli-v2.5%29')}" target="_blank" rel="noreferrer">wiki</a>
|
||||
for instructions on how to upgrade to Python 3.</strong>
|
||||
</p>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<input type="button" class="btn btn-bright" data-dismiss="modal" value="Close">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% endif
|
||||
|
||||
<div class="modal fade" id="ip-info-modal" tabindex="-1" role="dialog" aria-labelledby="ip-info-modal">
|
||||
@@ -229,7 +242,6 @@
|
||||
</%def>
|
||||
|
||||
<%def name="javascriptIncludes()">
|
||||
<% from plexpy import PLEX_SERVER_UP %>
|
||||
<script src="${http_root}js/jquery.scrollbar.min.js"></script>
|
||||
<script src="${http_root}js/jquery.mousewheel.min.js"></script>
|
||||
<script>
|
||||
@@ -259,8 +271,33 @@
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
% if _session['user_group'] == 'admin':
|
||||
var msg_settings = ' Check the <a href="logs">logs</a> and verify your server connection in the <a href="settings#tab_tabs-plex_media_server">settings</a>.';
|
||||
% else:
|
||||
var msg_settings = '';
|
||||
% endif
|
||||
|
||||
var error_msg = 'There was an error communicating with your Plex Server.' + msg_settings;
|
||||
|
||||
var server_status;
|
||||
server_status = setInterval(function() {
|
||||
$.getJSON('server_status', function (data) {
|
||||
if (data.connected === true) {
|
||||
clearInterval(server_status);
|
||||
$('#currentActivity').html('<div id="dashboard-checking-activity" class="text-muted"><i class="fa fa-refresh fa-spin"></i> Checking for activity...</div>');
|
||||
$('#recentlyAdded').html('<div id="dashboard-checking-recently-added" class="text-muted"><i class="fa fa-refresh fa-spin"></i> Looking for new items...</div>');
|
||||
activityConnected();
|
||||
recentlyAddedConnected();
|
||||
} else if (data.connected === false) {
|
||||
clearInterval(server_status);
|
||||
$('#currentActivity').html('<div id="dashboard-no-activity" class="text-muted">' + error_msg + '</div>');
|
||||
$('#recentlyAdded').html('<div id="dashboard-no-recently-added" class="text-muted">' + error_msg + '</div>');
|
||||
}
|
||||
});
|
||||
}, 1000);
|
||||
</script>
|
||||
% if 'current_activity' in config['home_sections'] and PLEX_SERVER_UP:
|
||||
% if 'current_activity' in config['home_sections']:
|
||||
<script>
|
||||
var defaultHandler = {
|
||||
get: function(target, name) {
|
||||
@@ -271,7 +308,7 @@
|
||||
var create_instances = [];
|
||||
var activity_ready = true;
|
||||
|
||||
$('#currentActivityHeader-bandwidth-tooltip').tooltip({ container: 'body', placement: 'right', delay: 50 });
|
||||
$('#currentActivityHeader-bandwidth-tooltip').tooltip({ container: 'body', placement: 'right', delay: 50 });
|
||||
|
||||
function getCurrentActivity() {
|
||||
activity_ready = false;
|
||||
@@ -297,13 +334,8 @@
|
||||
}
|
||||
|
||||
if (!(current_activity)) {
|
||||
% if _session['user_group'] == 'admin':
|
||||
var msg_settings = ' Check the <a href="logs">logs</a> and verify your server connection in the <a href="settings#tab_tabs-plex_media_server">settings</a>.';
|
||||
% else:
|
||||
var msg_settings = '';
|
||||
% endif
|
||||
$('#currentActivityHeader').hide();
|
||||
$('#currentActivity').html('<div id="dashboard-no-activity" class="text-muted">There was an error communicating with your Plex Server.' + msg_settings + '</div>');
|
||||
$('#currentActivity').html('<div id="dashboard-no-activity" class="text-muted">' + error_msg + '</div>');
|
||||
return
|
||||
}
|
||||
|
||||
@@ -548,7 +580,7 @@
|
||||
}
|
||||
|
||||
// Update the progress bars, percent - 3 because of 3px padding-right
|
||||
$('#buffer-bar-' + key).width(parseInt(s.transcode_progress) - 3 + '%').html(s.transcode_progress + '%')
|
||||
$('#buffer-bar-' + key).css({width: parseInt(s.transcode_progress) - 3 + '%'}).html(s.transcode_progress + '%')
|
||||
.attr('data-original-title', 'Transcoder Progress ' + s.transcode_progress + '%');
|
||||
if (s.live !== 1) {
|
||||
var progress_bar = $('#progress-bar-' + key);
|
||||
@@ -625,34 +657,36 @@
|
||||
});
|
||||
}
|
||||
|
||||
getCurrentActivity();
|
||||
setInterval(function () {
|
||||
if (!(create_instances.length) && activity_ready) {
|
||||
getCurrentActivity();
|
||||
}
|
||||
}, ${config['home_refresh_interval'] * 1000});
|
||||
function activityConnected() {
|
||||
getCurrentActivity();
|
||||
setInterval(function () {
|
||||
if (!(create_instances.length) && activity_ready) {
|
||||
getCurrentActivity();
|
||||
}
|
||||
}, ${config['home_refresh_interval'] * 1000});
|
||||
|
||||
setInterval(function(){
|
||||
$('.progress_time_offset').each(function () {
|
||||
if ($(this).data('state') === 'playing' && $(this).data('view_offset') >= 0) {
|
||||
var view_offset = parseInt($(this).data('view_offset'));
|
||||
var stream_duration = parseInt($(this).data('stream_duration'));
|
||||
var timestamp = millisecondsToMinutes(Math.min(view_offset, stream_duration), false);
|
||||
$(this).html(timestamp).data('view_offset', Math.min(view_offset + 1000, stream_duration))
|
||||
}
|
||||
});
|
||||
$('.progress-bar').each(function () {
|
||||
if ($(this).data('state') === 'playing' && $(this).data('view_offset') >= 0) {
|
||||
var view_offset = parseInt($(this).data('view_offset'));
|
||||
var stream_duration = parseInt($(this).data('stream_duration'));
|
||||
var progress_percent = Math.floor(view_offset / stream_duration * 100);
|
||||
progress_percent = (progress_percent >= 0) ? Math.min(progress_percent, 100) : 100;
|
||||
$(this).width(progress_percent - 3 + '%').html(progress_percent + '%')
|
||||
.attr('data-original-title', 'Stream Progress ' + progress_percent + '%')
|
||||
.data('view_offset', Math.min(view_offset + 1000, stream_duration));
|
||||
}
|
||||
});
|
||||
}, 1000);
|
||||
setInterval(function(){
|
||||
$('.progress_time_offset').each(function () {
|
||||
if ($(this).data('state') === 'playing' && $(this).data('view_offset') >= 0) {
|
||||
var view_offset = parseInt($(this).data('view_offset'));
|
||||
var stream_duration = parseInt($(this).data('stream_duration'));
|
||||
var timestamp = millisecondsToMinutes(Math.min(view_offset, stream_duration), false);
|
||||
$(this).html(timestamp).data('view_offset', Math.min(view_offset + 1000, stream_duration))
|
||||
}
|
||||
});
|
||||
$('.progress-bar').each(function () {
|
||||
if ($(this).data('state') === 'playing' && $(this).data('view_offset') >= 0) {
|
||||
var view_offset = parseInt($(this).data('view_offset'));
|
||||
var stream_duration = parseInt($(this).data('stream_duration'));
|
||||
var progress_percent = Math.floor(view_offset / stream_duration * 100);
|
||||
progress_percent = (progress_percent >= 0) ? Math.min(progress_percent, 100) : 100;
|
||||
$(this).css({width: progress_percent - 3 + '%'}).html(progress_percent + '%')
|
||||
.attr('data-original-title', 'Stream Progress ' + progress_percent + '%')
|
||||
.data('view_offset', Math.min(view_offset + 1000, stream_duration));
|
||||
}
|
||||
});
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
$('#currentActivity').on('click', '.external_ip-modal', function () {
|
||||
$.get('get_ip_address_details', {
|
||||
@@ -876,7 +910,7 @@
|
||||
getLibraryStats();
|
||||
</script>
|
||||
% endif
|
||||
% if 'recently_added' in config['home_sections'] and PLEX_SERVER_UP:
|
||||
% if 'recently_added' in config['home_sections']:
|
||||
<script>
|
||||
function recentlyAdded(recently_added_count, recently_added_type) {
|
||||
showMsg("Loading recently added items...", true, false, 0);
|
||||
@@ -904,7 +938,9 @@
|
||||
$('#recently-added-toggle-' + recently_added_type).closest('label').addClass('active');
|
||||
$('#recently-added-count').val(recently_added_count);
|
||||
|
||||
recentlyAdded(recently_added_count, recently_added_type);
|
||||
function recentlyAddedConnected() {
|
||||
recentlyAdded(recently_added_count, recently_added_type);
|
||||
}
|
||||
|
||||
function highlightAddedScrollerButton() {
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
@@ -996,4 +1032,16 @@
|
||||
});
|
||||
</script>
|
||||
% endif
|
||||
% if _session['user_group'] == 'admin':
|
||||
<script>
|
||||
const queryString = window.location.search;
|
||||
const urlParams = new URLSearchParams(queryString);
|
||||
if (urlParams.get('update') === 'python2') {
|
||||
$("#python2-modal").modal({
|
||||
backdrop: 'static',
|
||||
keyboard: false
|
||||
});
|
||||
}
|
||||
</script>
|
||||
% endif
|
||||
</%def>
|
@@ -303,16 +303,17 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
<div class="summary-content">
|
||||
<div class="summary-content-details-wrapper">
|
||||
% if data['rating']:
|
||||
% if data['rating_image']:
|
||||
% if data['rating_image'].startswith('imdb://'):
|
||||
<div class="critic-rating hidden-xs hidden-sm" title="${data['rating']}">
|
||||
<span class="rating-image rating-imdb"><strong>${data['rating']}</strong></span>
|
||||
<% rating = data['rating'] or data['audience_rating'] %>
|
||||
% if rating:
|
||||
% if data['audience_rating_image']:
|
||||
% if data['audience_rating_image'].startswith('imdb://'):
|
||||
<div class="critic-rating hidden-xs hidden-sm" title="${rating}">
|
||||
<span class="rating-image rating-imdb"><strong>${rating}</strong></span>
|
||||
</div>
|
||||
% endif
|
||||
% if data['rating_image'].startswith('themoviedb://'):
|
||||
<div class="critic-rating hidden-xs hidden-sm" title="${data['rating']}">
|
||||
<span class="rating-image rating-themoviedb"><strong>${get_percent(data['rating'], 10)}%</strong></span>
|
||||
% if data['audience_rating_image'].startswith('themoviedb://'):
|
||||
<div class="critic-rating hidden-xs hidden-sm" title="${rating}">
|
||||
<span class="rating-image rating-themoviedb"><strong>${get_percent(rating, 10)}%</strong></span>
|
||||
</div>
|
||||
% endif
|
||||
% if data['audience_rating_image'].startswith('rottentomatoes://'):
|
||||
@@ -326,8 +327,8 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
% endif
|
||||
% else:
|
||||
<div class="critic-rating hidden-xs hidden-sm" title="${data['rating']}">
|
||||
<i class="star-icon fa fa-star"></i> <strong>${get_percent(data['rating'], 10)}%</strong>
|
||||
<div class="critic-rating hidden-xs hidden-sm" title="${rating}">
|
||||
<i class="star-icon fa fa-star"></i> <strong>${get_percent(rating, 10)}%</strong>
|
||||
</div>
|
||||
% endif
|
||||
% endif
|
||||
|
@@ -18,21 +18,21 @@
|
||||
<link href="${http_root}css/font-awesome.v4-shims.min.css" rel="stylesheet">
|
||||
|
||||
<!-- Favicons -->
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="${http_root}images/favicon/favicon-32x32.png?v=2.0.5">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="${http_root}images/favicon/favicon-16x16.png?v=2.0.5">
|
||||
<link rel="shortcut icon" href="${http_root}images/favicon/favicon.ico?v=2.0.5">
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="${http_root}images/favicon/favicon-32x32.png?v=2.6.0">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="${http_root}images/favicon/favicon-16x16.png?v=2.6.0">
|
||||
<link rel="shortcut icon" href="${http_root}images/favicon/favicon.ico?v=2.6.0">
|
||||
|
||||
<!-- ICONS -->
|
||||
<!-- Android -->
|
||||
<link rel="manifest" href="${http_root}images/favicon/manifest.json?v=2.0.5" crossorigin="use-credentials">
|
||||
<link rel="manifest" href="${http_root}images/favicon/manifest.json?v=2.6.0" crossorigin="use-credentials">
|
||||
<meta name="theme-color" content="#282a2d">
|
||||
<!-- Apple -->
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="${http_root}images/favicon/apple-touch-icon.png?v=2.0.5">
|
||||
<link rel="mask-icon" href="${http_root}images/favicon/safari-pinned-tab.svg?v=2.0.5" color="#282a2d">
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="${http_root}images/favicon/apple-touch-icon.png?v=2.6.0">
|
||||
<link rel="mask-icon" href="${http_root}images/favicon/safari-pinned-tab.svg?v=2.6.0" color="#282a2d">
|
||||
<meta name="apple-mobile-web-app-title" content="Tautulli">
|
||||
<!-- Microsoft -->
|
||||
<meta name="application-name" content="Tautulli">
|
||||
<meta name="msapplication-config" content="${http_root}images/favicon/browserconfig.xml?v=2.0.5">
|
||||
<meta name="msapplication-config" content="${http_root}images/favicon/browserconfig.xml?v=2.6.0">
|
||||
</head>
|
||||
|
||||
<body style="margin: 0; overflow: auto;">
|
||||
@@ -159,16 +159,20 @@
|
||||
data: data,
|
||||
dataType: 'json',
|
||||
statusCode: {
|
||||
200: function() {
|
||||
200: function(xhr, status) {
|
||||
window.location = "${redirect_uri or http_root}";
|
||||
},
|
||||
401: function() {
|
||||
401: function(xhr, status) {
|
||||
if (plex) {
|
||||
$('#sign-in-alert').text('Invalid Plex Login.').show();
|
||||
} else {
|
||||
$('#sign-in-alert').text('Incorrect username or password.').show();
|
||||
$('#username').focus();
|
||||
}
|
||||
},
|
||||
429: function(xhr, status) {
|
||||
var retry = Math.ceil(xhr.getResponseHeader('Retry-After') / 60)
|
||||
$('#sign-in-alert').text('Too many login attempts. Try again in ' + retry + ' minute(s).').show();
|
||||
}
|
||||
},
|
||||
complete: function() {
|
||||
|
@@ -8,9 +8,9 @@
|
||||
<meta charset="utf-8">
|
||||
<title>Tautulli - ${title} | ${server_name}</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="${http_root}images/favicon/favicon-32x32.png?v=2.0.5">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="${http_root}images/favicon/favicon-16x16.png?v=2.0.5">
|
||||
<link rel="shortcut icon" href="${http_root}images/favicon/favicon.ico?v=2.0.5">
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="${http_root}images/favicon/favicon-32x32.png?v=2.6.0">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="${http_root}images/favicon/favicon-16x16.png?v=2.6.0">
|
||||
<link rel="shortcut icon" href="${http_root}images/favicon/favicon.ico?v=2.6.0">
|
||||
<link href="${http_root}css/tautulli.css${cache_param}" rel="stylesheet">
|
||||
<style>
|
||||
* {
|
||||
|
@@ -220,7 +220,7 @@
|
||||
<p class="help-block">Check for Tautulli updates periodically.</p>
|
||||
</div>
|
||||
<div id="git_update_options">
|
||||
% if not plexpy.FROZEN:
|
||||
% if not plexpy.SNAP and not plexpy.FROZEN:
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="plexpy_auto_update" name="plexpy_auto_update" value="1" ${config['plexpy_auto_update']} ${docker_setting}> Update Automatically ${docker_msg | n}
|
||||
@@ -977,15 +977,21 @@
|
||||
</div>
|
||||
<div class="checkbox advanced-setting">
|
||||
<label>
|
||||
<input type="checkbox" name="notify_consecutive" id="notify_consecutive" value="1" ${config['notify_consecutive']}> Allow Consecutive Notifications
|
||||
<input type="checkbox" name="notify_consecutive" id="notify_consecutive" value="1" ${config['notify_consecutive']}> Allow Playback Stop Notifications Exceeding Watched Percent
|
||||
</label>
|
||||
<p class="help-block">Enable to allow sending of consecutive notifications (i.e. both watched & stopped notifications).</p>
|
||||
<p class="help-block">
|
||||
Enable to allow sending of playback stop notifications after the watched percent is exceeded.
|
||||
Disable to only send playback stop notifications below the watched percent.
|
||||
</p>
|
||||
</div>
|
||||
<div class="checkbox advanced-setting">
|
||||
<label>
|
||||
<input type="checkbox" name="notify_concurrent_by_ip" id="notify_concurrent_by_ip" value="1" ${config['notify_concurrent_by_ip']}> User Concurrent Streams Notifications by IP Address
|
||||
</label>
|
||||
<p class="help-block">Enable to only send a notification of concurrent streams by a single user from different IP addresses.</p>
|
||||
<p class="help-block">
|
||||
Enable to only send a concurrent streams notification by a single user from different IP addresses.
|
||||
Disable to send a concurrent streams notification anytime the concurrent stream threshold is exceeded regardless of IP address.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="notify_concurrent_threshold">User Concurrent Stream Threshold</label>
|
||||
@@ -997,6 +1003,15 @@
|
||||
</div>
|
||||
<p class="help-block">The number of concurrent streams by a single user for Tautulli to trigger a notification. Minimum 2.</p>
|
||||
</div>
|
||||
<div class="checkbox advanced-setting">
|
||||
<label>
|
||||
<input type="checkbox" name="notify_new_device_initial_only" id="notify_new_device_initial_only" value="1" ${config['notify_new_device_initial_only']}> User New Device Notification First Time Only
|
||||
</label>
|
||||
<p class="help-block">
|
||||
Enable to only send a new device notification the first time a user streams from a new device.
|
||||
Disable to send a new device notification everytime a user streams from the device until it is recorded in history (i.e. exceeds the ignore interval).
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="notify_concurrent_threshold">Continued Session Threshold</label>
|
||||
<div class="row">
|
||||
@@ -2911,10 +2926,6 @@ $(document).ready(function() {
|
||||
});
|
||||
});
|
||||
|
||||
$('#http_base_url').change(function () {
|
||||
$(this).val($(this).val().replace(/\/*$/, ''));
|
||||
});
|
||||
|
||||
$('#http_root').change(function() {
|
||||
setBaseURLSuffix();
|
||||
});
|
||||
|
@@ -21,21 +21,21 @@
|
||||
<link href="${http_root}css/font-awesome.v4-shims.min.css" rel="stylesheet">
|
||||
|
||||
<!-- Favicons -->
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="${http_root}images/favicon/favicon-32x32.png?v=2.0.5">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="${http_root}images/favicon/favicon-16x16.png?v=2.0.5">
|
||||
<link rel="shortcut icon" href="${http_root}images/favicon/favicon.ico?v=2.0.5">
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="${http_root}images/favicon/favicon-32x32.png?v=2.6.0">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="${http_root}images/favicon/favicon-16x16.png?v=2.6.0">
|
||||
<link rel="shortcut icon" href="${http_root}images/favicon/favicon.ico?v=2.6.0">
|
||||
|
||||
<!-- ICONS -->
|
||||
<!-- Android -->
|
||||
<link rel="manifest" href="${http_root}images/favicon/manifest.json?v=2.0.5" crossorigin="use-credentials">
|
||||
<link rel="manifest" href="${http_root}images/favicon/manifest.json?v=2.6.0" crossorigin="use-credentials">
|
||||
<meta name="theme-color" content="#282a2d">
|
||||
<!-- Apple -->
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="${http_root}images/favicon/apple-touch-icon.png?v=2.0.5">
|
||||
<link rel="mask-icon" href="${http_root}images/favicon/safari-pinned-tab.svg?v=2.0.5" color="#282a2d">
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="${http_root}images/favicon/apple-touch-icon.png?v=2.6.0">
|
||||
<link rel="mask-icon" href="${http_root}images/favicon/safari-pinned-tab.svg?v=2.6.0" color="#282a2d">
|
||||
<meta name="apple-mobile-web-app-title" content="Tautulli">
|
||||
<!-- Microsoft -->
|
||||
<meta name="application-name" content="Tautulli">
|
||||
<meta name="msapplication-config" content="${http_root}images/favicon/browserconfig.xml?v=2.0.5">
|
||||
<meta name="msapplication-config" content="${http_root}images/favicon/browserconfig.xml?v=2.6.0">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
@@ -521,7 +521,7 @@
|
||||
line-height: 100%;
|
||||
}
|
||||
|
||||
.apple-link a {
|
||||
a[x-apple-data-detectors] {
|
||||
color: inherit !important;
|
||||
font-family: inherit !important;
|
||||
font-size: inherit !important;
|
||||
|
@@ -521,7 +521,7 @@
|
||||
line-height: 100%;
|
||||
}
|
||||
|
||||
.apple-link a {
|
||||
a[x-apple-data-detectors] {
|
||||
color: inherit !important;
|
||||
font-family: inherit !important;
|
||||
font-size: inherit !important;
|
||||
|
@@ -1,3 +1,3 @@
|
||||
from .core import where
|
||||
from .core import contents, where
|
||||
|
||||
__version__ = "2019.11.28"
|
||||
__version__ = "2020.11.08"
|
||||
|
@@ -1,2 +1,12 @@
|
||||
from certifi import where
|
||||
print(where())
|
||||
import argparse
|
||||
|
||||
from certifi import contents, where
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-c", "--contents", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.contents:
|
||||
print(contents())
|
||||
else:
|
||||
print(where())
|
||||
|
@@ -58,38 +58,6 @@ AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
|
||||
TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
|
||||
# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
|
||||
# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
|
||||
# Serial: 206684696279472310254277870180966723415
|
||||
# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
|
||||
# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
|
||||
# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
|
||||
CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
|
||||
cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
|
||||
LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
|
||||
aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
|
||||
dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
|
||||
VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
|
||||
aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
|
||||
bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
|
||||
IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
|
||||
LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
|
||||
N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
|
||||
KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
|
||||
kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
|
||||
CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
|
||||
Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
|
||||
imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
|
||||
2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
|
||||
DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
|
||||
/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
|
||||
F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
|
||||
TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
|
||||
# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
|
||||
# Label: "Entrust.net Premium 2048 Secure Server CA"
|
||||
@@ -152,39 +120,6 @@ ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
|
||||
R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
|
||||
# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
|
||||
# Label: "AddTrust External Root"
|
||||
# Serial: 1
|
||||
# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
|
||||
# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
|
||||
# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
|
||||
MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
|
||||
IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
|
||||
MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
|
||||
FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
|
||||
bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
|
||||
dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
|
||||
H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
|
||||
uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
|
||||
mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
|
||||
a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
|
||||
E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
|
||||
WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
|
||||
VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
|
||||
Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
|
||||
cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
|
||||
IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
|
||||
AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
|
||||
YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
|
||||
6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
|
||||
Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
|
||||
c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
|
||||
mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
|
||||
# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
|
||||
# Label: "Entrust Root Certification Authority"
|
||||
@@ -640,46 +575,6 @@ VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
|
||||
WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: O=Government Root Certification Authority
|
||||
# Subject: O=Government Root Certification Authority
|
||||
# Label: "Taiwan GRCA"
|
||||
# Serial: 42023070807708724159991140556527066870
|
||||
# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e
|
||||
# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9
|
||||
# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/
|
||||
MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj
|
||||
YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow
|
||||
PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp
|
||||
Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
|
||||
AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR
|
||||
IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q
|
||||
gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy
|
||||
yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts
|
||||
F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2
|
||||
jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx
|
||||
ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC
|
||||
VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK
|
||||
YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH
|
||||
EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN
|
||||
Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud
|
||||
DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE
|
||||
MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK
|
||||
UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ
|
||||
TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf
|
||||
qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK
|
||||
ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE
|
||||
JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7
|
||||
hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1
|
||||
EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm
|
||||
nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX
|
||||
udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz
|
||||
ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe
|
||||
LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
|
||||
pYYsfPQS
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
|
||||
# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
|
||||
# Label: "DigiCert Assured ID Root CA"
|
||||
@@ -1127,38 +1022,6 @@ fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
|
||||
GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
|
||||
# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
|
||||
# Label: "OISTE WISeKey Global Root GA CA"
|
||||
# Serial: 86718877871133159090080555911823548314
|
||||
# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93
|
||||
# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9
|
||||
# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB
|
||||
ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly
|
||||
aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl
|
||||
ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w
|
||||
NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G
|
||||
A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD
|
||||
VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX
|
||||
SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
|
||||
MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR
|
||||
VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2
|
||||
w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF
|
||||
mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg
|
||||
4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9
|
||||
4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw
|
||||
DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw
|
||||
EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx
|
||||
SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2
|
||||
ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8
|
||||
vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa
|
||||
hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi
|
||||
Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
|
||||
/L7fCg0=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Certigna O=Dhimyotis
|
||||
# Subject: CN=Certigna O=Dhimyotis
|
||||
# Label: "Certigna"
|
||||
@@ -1499,47 +1362,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
|
||||
XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
|
||||
# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
|
||||
# Label: "Staat der Nederlanden Root CA - G2"
|
||||
# Serial: 10000012
|
||||
# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a
|
||||
# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16
|
||||
# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
|
||||
TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
|
||||
dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX
|
||||
DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
|
||||
ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
|
||||
b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291
|
||||
qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp
|
||||
uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU
|
||||
Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE
|
||||
pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp
|
||||
5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M
|
||||
UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN
|
||||
GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy
|
||||
5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv
|
||||
6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK
|
||||
eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6
|
||||
B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/
|
||||
BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov
|
||||
L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV
|
||||
HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG
|
||||
SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS
|
||||
CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen
|
||||
5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897
|
||||
IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK
|
||||
gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL
|
||||
+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL
|
||||
vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm
|
||||
bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk
|
||||
N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC
|
||||
Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z
|
||||
ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
|
||||
# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
|
||||
# Label: "Hongkong Post Root CA 1"
|
||||
@@ -2140,6 +1962,45 @@ t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
|
||||
SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes
|
||||
# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes
|
||||
# Label: "EC-ACC"
|
||||
# Serial: -23701579247955709139626555126524820479
|
||||
# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09
|
||||
# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8
|
||||
# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB
|
||||
8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy
|
||||
dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1
|
||||
YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3
|
||||
dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh
|
||||
IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD
|
||||
LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG
|
||||
EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g
|
||||
KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD
|
||||
ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu
|
||||
bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg
|
||||
ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN
|
||||
BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R
|
||||
85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm
|
||||
4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV
|
||||
HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd
|
||||
QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t
|
||||
lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB
|
||||
o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E
|
||||
BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4
|
||||
opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo
|
||||
dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW
|
||||
ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN
|
||||
AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y
|
||||
/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k
|
||||
SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy
|
||||
Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS
|
||||
Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl
|
||||
nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
|
||||
# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
|
||||
# Label: "Hellenic Academic and Research Institutions RootCA 2011"
|
||||
@@ -2352,38 +2213,6 @@ e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
|
||||
TpPDpFQUWw==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
|
||||
# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
|
||||
# Label: "EE Certification Centre Root CA"
|
||||
# Serial: 112324828676200291871926431888494945866
|
||||
# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f
|
||||
# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7
|
||||
# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1
|
||||
MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1
|
||||
czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG
|
||||
CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy
|
||||
MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl
|
||||
ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS
|
||||
b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB
|
||||
AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy
|
||||
euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO
|
||||
bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw
|
||||
WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d
|
||||
MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE
|
||||
1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD
|
||||
VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/
|
||||
zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB
|
||||
BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF
|
||||
BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV
|
||||
v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG
|
||||
E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u
|
||||
uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW
|
||||
iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
|
||||
GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
|
||||
# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
|
||||
# Label: "D-TRUST Root Class 3 CA 2 2009"
|
||||
@@ -3749,47 +3578,6 @@ CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
|
||||
1KyLa2tJElMzrdfkviT8tQp21KW8EA==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
|
||||
# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
|
||||
# Label: "LuxTrust Global Root 2"
|
||||
# Serial: 59914338225734147123941058376788110305822489521
|
||||
# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c
|
||||
# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f
|
||||
# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL
|
||||
BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV
|
||||
BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw
|
||||
MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B
|
||||
LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN
|
||||
AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F
|
||||
ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem
|
||||
hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1
|
||||
EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn
|
||||
Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4
|
||||
zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ
|
||||
96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m
|
||||
j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g
|
||||
DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+
|
||||
8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j
|
||||
X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH
|
||||
hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB
|
||||
KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0
|
||||
Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT
|
||||
+Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL
|
||||
BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9
|
||||
BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO
|
||||
jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9
|
||||
loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c
|
||||
qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+
|
||||
2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/
|
||||
JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre
|
||||
zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf
|
||||
LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+
|
||||
x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6
|
||||
oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
|
||||
# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
|
||||
# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
|
||||
@@ -4600,3 +4388,219 @@ IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk
|
||||
5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY
|
||||
n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
|
||||
# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
|
||||
# Label: "Microsoft ECC Root Certificate Authority 2017"
|
||||
# Serial: 136839042543790627607696632466672567020
|
||||
# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67
|
||||
# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5
|
||||
# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw
|
||||
CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD
|
||||
VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw
|
||||
MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV
|
||||
UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy
|
||||
b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq
|
||||
hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR
|
||||
ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb
|
||||
hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E
|
||||
BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3
|
||||
FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV
|
||||
L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB
|
||||
iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation
|
||||
# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation
|
||||
# Label: "Microsoft RSA Root Certificate Authority 2017"
|
||||
# Serial: 40975477897264996090493496164228220339
|
||||
# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47
|
||||
# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74
|
||||
# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl
|
||||
MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw
|
||||
NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5
|
||||
IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG
|
||||
EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N
|
||||
aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi
|
||||
MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ
|
||||
Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0
|
||||
ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1
|
||||
HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm
|
||||
gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ
|
||||
jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc
|
||||
aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG
|
||||
YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6
|
||||
W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K
|
||||
UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH
|
||||
+FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q
|
||||
W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/
|
||||
BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC
|
||||
NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC
|
||||
LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC
|
||||
gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6
|
||||
tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh
|
||||
SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2
|
||||
TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3
|
||||
pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR
|
||||
xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp
|
||||
GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9
|
||||
dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN
|
||||
AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB
|
||||
RA+GsCyRxj3qrg+E
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd.
|
||||
# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd.
|
||||
# Label: "e-Szigno Root CA 2017"
|
||||
# Serial: 411379200276854331539784714
|
||||
# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98
|
||||
# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1
|
||||
# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV
|
||||
BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk
|
||||
LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv
|
||||
b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ
|
||||
BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg
|
||||
THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v
|
||||
IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv
|
||||
xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H
|
||||
Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
|
||||
A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB
|
||||
eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo
|
||||
jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ
|
||||
+efcMQ==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2
|
||||
# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2
|
||||
# Label: "certSIGN Root CA G2"
|
||||
# Serial: 313609486401300475190
|
||||
# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7
|
||||
# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32
|
||||
# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV
|
||||
BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g
|
||||
Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ
|
||||
BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ
|
||||
R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF
|
||||
dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw
|
||||
vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ
|
||||
uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp
|
||||
n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs
|
||||
cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW
|
||||
xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P
|
||||
rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF
|
||||
DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx
|
||||
DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy
|
||||
LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C
|
||||
eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB
|
||||
/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ
|
||||
d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq
|
||||
kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC
|
||||
b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl
|
||||
qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0
|
||||
OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c
|
||||
NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk
|
||||
ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO
|
||||
pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj
|
||||
03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk
|
||||
PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE
|
||||
1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX
|
||||
QRBdJ3NghVdJIgc=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc.
|
||||
# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc.
|
||||
# Label: "Trustwave Global Certification Authority"
|
||||
# Serial: 1846098327275375458322922162
|
||||
# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e
|
||||
# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5
|
||||
# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw
|
||||
CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x
|
||||
ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1
|
||||
c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx
|
||||
OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI
|
||||
SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI
|
||||
b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp
|
||||
Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
|
||||
ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn
|
||||
swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu
|
||||
7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8
|
||||
1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW
|
||||
80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP
|
||||
JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l
|
||||
RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw
|
||||
hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10
|
||||
coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc
|
||||
BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n
|
||||
twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud
|
||||
EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud
|
||||
DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W
|
||||
0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe
|
||||
uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q
|
||||
lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB
|
||||
aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE
|
||||
sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT
|
||||
MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe
|
||||
qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh
|
||||
VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8
|
||||
h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9
|
||||
EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK
|
||||
yeC2nOnOcXHebD8WpHk=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc.
|
||||
# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc.
|
||||
# Label: "Trustwave Global ECC P256 Certification Authority"
|
||||
# Serial: 4151900041497450638097112925
|
||||
# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54
|
||||
# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf
|
||||
# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD
|
||||
VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf
|
||||
BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3
|
||||
YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x
|
||||
NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G
|
||||
A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0
|
||||
d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF
|
||||
Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG
|
||||
SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN
|
||||
FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w
|
||||
DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw
|
||||
CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh
|
||||
DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc.
|
||||
# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc.
|
||||
# Label: "Trustwave Global ECC P384 Certification Authority"
|
||||
# Serial: 2704997926503831671788816187
|
||||
# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6
|
||||
# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2
|
||||
# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD
|
||||
VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf
|
||||
BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3
|
||||
YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x
|
||||
NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G
|
||||
A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0
|
||||
d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF
|
||||
Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB
|
||||
BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ
|
||||
j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF
|
||||
1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G
|
||||
A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3
|
||||
AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC
|
||||
MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu
|
||||
Sw==
|
||||
-----END CERTIFICATE-----
|
||||
|
@@ -4,12 +4,57 @@
|
||||
certifi.py
|
||||
~~~~~~~~~~
|
||||
|
||||
This module returns the installation location of cacert.pem.
|
||||
This module returns the installation location of cacert.pem or its contents.
|
||||
"""
|
||||
import os
|
||||
|
||||
try:
|
||||
from importlib.resources import path as get_path, read_text
|
||||
|
||||
def where():
|
||||
f = os.path.dirname(__file__)
|
||||
_CACERT_CTX = None
|
||||
_CACERT_PATH = None
|
||||
|
||||
return os.path.join(f, 'cacert.pem')
|
||||
def where():
|
||||
# This is slightly terrible, but we want to delay extracting the file
|
||||
# in cases where we're inside of a zipimport situation until someone
|
||||
# actually calls where(), but we don't want to re-extract the file
|
||||
# on every call of where(), so we'll do it once then store it in a
|
||||
# global variable.
|
||||
global _CACERT_CTX
|
||||
global _CACERT_PATH
|
||||
if _CACERT_PATH is None:
|
||||
# This is slightly janky, the importlib.resources API wants you to
|
||||
# manage the cleanup of this file, so it doesn't actually return a
|
||||
# path, it returns a context manager that will give you the path
|
||||
# when you enter it and will do any cleanup when you leave it. In
|
||||
# the common case of not needing a temporary file, it will just
|
||||
# return the file system location and the __exit__() is a no-op.
|
||||
#
|
||||
# We also have to hold onto the actual context manager, because
|
||||
# it will do the cleanup whenever it gets garbage collected, so
|
||||
# we will also store that at the global level as well.
|
||||
_CACERT_CTX = get_path("certifi", "cacert.pem")
|
||||
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
||||
|
||||
return _CACERT_PATH
|
||||
|
||||
|
||||
except ImportError:
|
||||
# This fallback will work for Python versions prior to 3.7 that lack the
|
||||
# importlib.resources module but relies on the existing `where` function
|
||||
# so won't address issues with environments like PyOxidizer that don't set
|
||||
# __file__ on modules.
|
||||
def read_text(_module, _path, encoding="ascii"):
|
||||
with open(where(), "r", encoding=encoding) as data:
|
||||
return data.read()
|
||||
|
||||
# If we don't have importlib.resources, then we will just do the old logic
|
||||
# of assuming we're on the filesystem and munge the path directly.
|
||||
def where():
|
||||
f = os.path.dirname(__file__)
|
||||
|
||||
return os.path.join(f, "cacert.pem")
|
||||
|
||||
|
||||
def contents():
|
||||
return read_text("certifi", "cacert.pem", encoding="ascii")
|
||||
|
@@ -54,7 +54,7 @@ class AlertListener(threading.Thread):
|
||||
|
||||
def stop(self):
|
||||
""" Stop the AlertListener thread. Once the notifier is stopped, it cannot be directly
|
||||
started again. You must call :func:`plexapi.server.PlexServer.startAlertListener()`
|
||||
started again. You must call :func:`~plexapi.server.PlexServer.startAlertListener`
|
||||
from a PlexServer instance.
|
||||
"""
|
||||
log.info('Stopping AlertListener.')
|
||||
|
@@ -10,6 +10,8 @@ class Audio(PlexPartialObject):
|
||||
|
||||
Attributes:
|
||||
addedAt (datetime): Datetime this item was added to the library.
|
||||
art (str): URL to artwork image.
|
||||
artBlurHash (str): BlurHash string for artwork image.
|
||||
index (sting): Index Number (often the track number).
|
||||
key (str): API URL (/library/metadata/<ratingkey>).
|
||||
lastViewedAt (datetime): Datetime item was last accessed.
|
||||
@@ -18,6 +20,7 @@ class Audio(PlexPartialObject):
|
||||
ratingKey (int): Unique key identifying this item.
|
||||
summary (str): Summary of the artist, track, or album.
|
||||
thumb (str): URL to thumbnail image.
|
||||
thumbBlurHash (str): BlurHash string for thumbnail image.
|
||||
title (str): Artist, Album or Track title. (Jason Mraz, We Sing, Lucky, etc.)
|
||||
titleSort (str): Title to use when sorting (defaults to title).
|
||||
type (str): 'artist', 'album', or 'track'.
|
||||
@@ -32,6 +35,8 @@ class Audio(PlexPartialObject):
|
||||
self._data = data
|
||||
self.listType = 'audio'
|
||||
self.addedAt = utils.toDatetime(data.attrib.get('addedAt'))
|
||||
self.art = data.attrib.get('art')
|
||||
self.artBlurHash = data.attrib.get('artBlurHash')
|
||||
self.index = data.attrib.get('index')
|
||||
self.key = data.attrib.get('key')
|
||||
self.lastViewedAt = utils.toDatetime(data.attrib.get('lastViewedAt'))
|
||||
@@ -41,6 +46,7 @@ class Audio(PlexPartialObject):
|
||||
self.ratingKey = utils.cast(int, data.attrib.get('ratingKey'))
|
||||
self.summary = data.attrib.get('summary')
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
self.thumbBlurHash = data.attrib.get('thumbBlurHash')
|
||||
self.title = data.attrib.get('title')
|
||||
self.titleSort = data.attrib.get('titleSort', self.title)
|
||||
self.type = data.attrib.get('type')
|
||||
@@ -69,20 +75,20 @@ class Audio(PlexPartialObject):
|
||||
|
||||
def sync(self, bitrate, client=None, clientId=None, limit=None, title=None):
|
||||
""" Add current audio (artist, album or track) as sync item for specified device.
|
||||
See :func:`plexapi.myplex.MyPlexAccount.sync()` for possible exceptions.
|
||||
See :func:`~plexapi.myplex.MyPlexAccount.sync` for possible exceptions.
|
||||
|
||||
Parameters:
|
||||
bitrate (int): maximum bitrate for synchronized music, better use one of MUSIC_BITRATE_* values from the
|
||||
module :mod:`plexapi.sync`.
|
||||
client (:class:`plexapi.myplex.MyPlexDevice`): sync destination, see
|
||||
:func:`plexapi.myplex.MyPlexAccount.sync`.
|
||||
clientId (str): sync destination, see :func:`plexapi.myplex.MyPlexAccount.sync`.
|
||||
module :mod:`~plexapi.sync`.
|
||||
client (:class:`~plexapi.myplex.MyPlexDevice`): sync destination, see
|
||||
:func:`~plexapi.myplex.MyPlexAccount.sync`.
|
||||
clientId (str): sync destination, see :func:`~plexapi.myplex.MyPlexAccount.sync`.
|
||||
limit (int): maximum count of items to sync, unlimited if `None`.
|
||||
title (str): descriptive title for the new :class:`plexapi.sync.SyncItem`, if empty the value would be
|
||||
title (str): descriptive title for the new :class:`~plexapi.sync.SyncItem`, if empty the value would be
|
||||
generated from metadata of current media.
|
||||
|
||||
Returns:
|
||||
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
:class:`~plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
"""
|
||||
|
||||
from plexapi.sync import SyncItem, Policy, MediaSettings
|
||||
@@ -111,7 +117,6 @@ class Artist(Audio):
|
||||
Attributes:
|
||||
TAG (str): 'Directory'
|
||||
TYPE (str): 'artist'
|
||||
art (str): Artist artwork (/library/metadata/<ratingkey>/art/<artid>)
|
||||
countries (list): List of :class:`~plexapi.media.Country` objects this artist respresents.
|
||||
genres (list): List of :class:`~plexapi.media.Genre` objects this artist respresents.
|
||||
guid (str): Unknown (unique ID; com.plexapp.agents.plexmusic://gracenote/artist/05517B8701668D28?lang=en)
|
||||
@@ -122,17 +127,10 @@ class Artist(Audio):
|
||||
TAG = 'Directory'
|
||||
TYPE = 'artist'
|
||||
|
||||
_include = ('?checkFiles=1&includeExtras=1&includeRelated=1'
|
||||
'&includeOnDeck=1&includeChapters=1&includePopularLeaves=1'
|
||||
'&includeMarkers=1&includeConcerts=1&includePreferences=1'
|
||||
'&includeBandwidths=1&includeLoudnessRamps=1')
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
Audio._loadData(self, data)
|
||||
self.key = self.key.replace('/children', '') # FIX_BUG_50
|
||||
self._details_key = self.key + self._include
|
||||
self.art = data.attrib.get('art')
|
||||
self.guid = data.attrib.get('guid')
|
||||
self.locations = self.listAttrs(data, 'path', etag='Location')
|
||||
self.countries = self.findItems(data, media.Country)
|
||||
@@ -187,7 +185,7 @@ class Artist(Audio):
|
||||
keep_original_name (bool): Set True to keep the original filename as stored in
|
||||
the Plex server. False will create a new filename with the format
|
||||
"<Atrist> - <Album> <Track>".
|
||||
kwargs (dict): If specified, a :func:`~plexapi.audio.Track.getStreamURL()` will
|
||||
kwargs (dict): If specified, a :func:`~plexapi.audio.Track.getStreamURL` will
|
||||
be returned and the additional arguments passed in will be sent to that
|
||||
function. If kwargs is not specified, the media items will be downloaded
|
||||
and saved to disk.
|
||||
@@ -206,7 +204,6 @@ class Album(Audio):
|
||||
Attributes:
|
||||
TAG (str): 'Directory'
|
||||
TYPE (str): 'album'
|
||||
art (str): Album artwork (/library/metadata/<ratingkey>/art/<artid>)
|
||||
genres (list): List of :class:`~plexapi.media.Genre` objects this album respresents.
|
||||
key (str): API URL (/library/metadata/<ratingkey>).
|
||||
originallyAvailableAt (datetime): Datetime this album was released.
|
||||
@@ -227,11 +224,10 @@ class Album(Audio):
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
Audio._loadData(self, data)
|
||||
self.art = data.attrib.get('art')
|
||||
self.guid = data.attrib.get('guid')
|
||||
self.leafCount = utils.cast(int, data.attrib.get('leafCount'))
|
||||
self.loudnessAnalysisVersion = utils.cast(int, data.attrib.get('loudnessAnalysisVersion'))
|
||||
self.key = self.key.replace('/children', '') # fixes bug #50
|
||||
self.key = self.key.replace('/children', '') # FIX_BUG_50
|
||||
self.originallyAvailableAt = utils.toDatetime(data.attrib.get('originallyAvailableAt'), '%Y-%m-%d')
|
||||
self.parentGuid = data.attrib.get('parentGuid')
|
||||
self.parentKey = data.attrib.get('parentKey')
|
||||
@@ -279,7 +275,7 @@ class Album(Audio):
|
||||
keep_original_name (bool): Set True to keep the original filename as stored in
|
||||
the Plex server. False will create a new filename with the format
|
||||
"<Atrist> - <Album> <Track>".
|
||||
kwargs (dict): If specified, a :func:`~plexapi.audio.Track.getStreamURL()` will
|
||||
kwargs (dict): If specified, a :func:`~plexapi.audio.Track.getStreamURL` will
|
||||
be returned and the additional arguments passed in will be sent to that
|
||||
function. If kwargs is not specified, the media items will be downloaded
|
||||
and saved to disk.
|
||||
@@ -301,7 +297,6 @@ class Track(Audio, Playable):
|
||||
Attributes:
|
||||
TAG (str): 'Directory'
|
||||
TYPE (str): 'track'
|
||||
art (str): Track artwork (/library/metadata/<ratingkey>/art/<artid>)
|
||||
chapterSource (TYPE): Unknown
|
||||
duration (int): Length of this album in seconds.
|
||||
grandparentArt (str): Album artist artwork.
|
||||
@@ -332,17 +327,10 @@ class Track(Audio, Playable):
|
||||
TAG = 'Track'
|
||||
TYPE = 'track'
|
||||
|
||||
_include = ('?checkFiles=1&includeExtras=1&includeRelated=1'
|
||||
'&includeOnDeck=1&includeChapters=1&includePopularLeaves=1'
|
||||
'&includeMarkers=1&includeConcerts=1&includePreferences=1'
|
||||
'&includeBandwidths=1&includeLoudnessRamps=1')
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
Audio._loadData(self, data)
|
||||
Playable._loadData(self, data)
|
||||
self._details_key = self.key + self._include
|
||||
self.art = data.attrib.get('art')
|
||||
self.chapterSource = data.attrib.get('chapterSource')
|
||||
self.duration = utils.cast(int, data.attrib.get('duration'))
|
||||
self.grandparentArt = data.attrib.get('grandparentArt')
|
||||
|
@@ -44,9 +44,9 @@ class PlexObject(object):
|
||||
self._server = server
|
||||
self._data = data
|
||||
self._initpath = initpath or self.key
|
||||
self._details_key = ''
|
||||
if data is not None:
|
||||
self._loadData(data)
|
||||
self._details_key = self._buildDetailsKey()
|
||||
|
||||
def __repr__(self):
|
||||
uid = self._clean(self.firstAttr('_baseurl', 'key', 'id', 'playQueueID', 'uri'))
|
||||
@@ -81,7 +81,7 @@ class PlexObject(object):
|
||||
raise UnknownType("Unknown library type <%s type='%s'../>" % (elem.tag, etype))
|
||||
|
||||
def _buildItemOrNone(self, elem, cls=None, initpath=None):
|
||||
""" Calls :func:`~plexapi.base.PlexObject._buildItem()` but returns
|
||||
""" Calls :func:`~plexapi.base.PlexObject._buildItem` but returns
|
||||
None if elem is an unknown type.
|
||||
"""
|
||||
try:
|
||||
@@ -89,6 +89,22 @@ class PlexObject(object):
|
||||
except UnknownType:
|
||||
return None
|
||||
|
||||
def _buildDetailsKey(self, **kwargs):
|
||||
""" Builds the details key with the XML include parameters.
|
||||
All parameters are included by default with the option to override each parameter
|
||||
or disable each parameter individually by setting it to False or 0.
|
||||
"""
|
||||
details_key = self.key
|
||||
if hasattr(self, '_INCLUDES'):
|
||||
includes = {}
|
||||
for k, v in self._INCLUDES.items():
|
||||
value = kwargs.get(k, v)
|
||||
if value not in [False, 0, '0']:
|
||||
includes[k] = 1 if value is True else value
|
||||
if includes:
|
||||
details_key += '?' + urlencode(sorted(includes.items()))
|
||||
return details_key
|
||||
|
||||
def fetchItem(self, ekey, cls=None, **kwargs):
|
||||
""" Load the specified key to find and build the first item with the
|
||||
specified tag and attrs. If no tag or attrs are specified then
|
||||
@@ -203,9 +219,39 @@ class PlexObject(object):
|
||||
results.append(elem.attrib.get(attr))
|
||||
return results
|
||||
|
||||
def reload(self, key=None):
|
||||
""" Reload the data for this object from self.key. """
|
||||
key = key or self._details_key or self.key
|
||||
def reload(self, key=None, **kwargs):
|
||||
""" Reload the data for this object from self.key.
|
||||
|
||||
Parameters:
|
||||
key (string, optional): Override the key to reload.
|
||||
**kwargs (dict): A dictionary of XML include parameters to exclude or override.
|
||||
All parameters are included by default with the option to override each parameter
|
||||
or disable each parameter individually by setting it to False or 0.
|
||||
See :class:`~plexapi.base.PlexPartialObject` for all the available include parameters.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from plexapi.server import PlexServer
|
||||
plex = PlexServer('http://localhost:32400', token='xxxxxxxxxxxxxxxxxxxx')
|
||||
movie = plex.library.section('Movies').get('Cars')
|
||||
|
||||
# Partial reload of the movie without the `checkFiles` parameter.
|
||||
# Excluding `checkFiles` will prevent the Plex server from reading the
|
||||
# file to check if the file still exists and is accessible.
|
||||
# The movie object will remain as a partial object.
|
||||
movie.reload(checkFiles=False)
|
||||
movie.isPartialObject() # Returns True
|
||||
|
||||
# Full reload of the movie with all include parameters.
|
||||
# The movie object will be a full object.
|
||||
movie.reload()
|
||||
movie.isFullObject() # Returns True
|
||||
|
||||
"""
|
||||
details_key = self._buildDetailsKey(**kwargs) if kwargs else self._details_key
|
||||
key = key or details_key or self.key
|
||||
if not key:
|
||||
raise Unsupported('Cannot reload an object not built from a URL.')
|
||||
self._initpath = key
|
||||
@@ -281,6 +327,27 @@ class PlexPartialObject(PlexObject):
|
||||
and if the specified value you request is None it will fetch the full object
|
||||
automatically and update itself.
|
||||
"""
|
||||
_INCLUDES = {
|
||||
'checkFiles': 1,
|
||||
'includeAllConcerts': 1,
|
||||
'includeBandwidths': 1,
|
||||
'includeChapters': 1,
|
||||
'includeChildren': 1,
|
||||
'includeConcerts': 1,
|
||||
'includeExternalMedia': 1,
|
||||
'includeExtras': 1,
|
||||
'includeFields': 'thumbBlurHash,artBlurHash',
|
||||
'includeGeolocation': 1,
|
||||
'includeLoudnessRamps': 1,
|
||||
'includeMarkers': 1,
|
||||
'includeOnDeck': 1,
|
||||
'includePopularLeaves': 1,
|
||||
'includePreferences': 1,
|
||||
'includeRelated': 1,
|
||||
'includeRelatedCount': 1,
|
||||
'includeReviews': 1,
|
||||
'includeStations': 1
|
||||
}
|
||||
|
||||
def __eq__(self, other):
|
||||
return other is not None and self.key == other.key
|
||||
@@ -332,9 +399,9 @@ class PlexPartialObject(PlexObject):
|
||||
""" Retruns True if this is already a full object. A full object means all attributes
|
||||
were populated from the api path representing only this item. For example, the
|
||||
search result for a movie often only contain a portion of the attributes a full
|
||||
object (main url) for that movie contain.
|
||||
object (main url) for that movie would contain.
|
||||
"""
|
||||
return not self.key or self.key == self._initpath
|
||||
return not self.key or (self._details_key or self.key) == self._initpath
|
||||
|
||||
def isPartialObject(self):
|
||||
""" Returns True if this is not a full object. """
|
||||
@@ -608,14 +675,6 @@ class Playable(object):
|
||||
self.accountID = utils.cast(int, data.attrib.get('accountID')) # history
|
||||
self.playlistItemID = utils.cast(int, data.attrib.get('playlistItemID')) # playlist
|
||||
|
||||
def isFullObject(self):
|
||||
""" Retruns True if this is already a full object. A full object means all attributes
|
||||
were populated from the api path representing only this item. For example, the
|
||||
search result for a movie often only contain a portion of the attributes a full
|
||||
object (main url) for that movie contain.
|
||||
"""
|
||||
return self._details_key == self._initpath or not self.key
|
||||
|
||||
def getStreamURL(self, **params):
|
||||
""" Returns a stream url that may be used by external applications such as VLC.
|
||||
|
||||
@@ -625,7 +684,7 @@ class Playable(object):
|
||||
offset, copyts, protocol, mediaIndex, platform.
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.Unsupported`: When the item doesn't support fetching a stream URL.
|
||||
:exc:`plexapi.exceptions.Unsupported`: When the item doesn't support fetching a stream URL.
|
||||
"""
|
||||
if self.TYPE not in ('movie', 'episode', 'track'):
|
||||
raise Unsupported('Fetching stream URL for %s is unsupported.' % self.TYPE)
|
||||
@@ -690,7 +749,7 @@ class Playable(object):
|
||||
keep_original_name (bool): Set True to keep the original filename as stored in
|
||||
the Plex server. False will create a new filename with the format
|
||||
"<Artist> - <Album> <Track>".
|
||||
kwargs (dict): If specified, a :func:`~plexapi.audio.Track.getStreamURL()` will
|
||||
kwargs (dict): If specified, a :func:`~plexapi.audio.Track.getStreamURL` will
|
||||
be returned and the additional arguments passed in will be sent to that
|
||||
function. If kwargs is not specified, the media items will be downloaded
|
||||
and saved to disk.
|
||||
|
@@ -53,7 +53,7 @@ class PlexClient(PlexObject):
|
||||
_token (str): Token used to access this client.
|
||||
_session (obj): Requests session object used to access this client.
|
||||
_proxyThroughServer (bool): Set to True after calling
|
||||
:func:`~plexapi.client.PlexClient.proxyThroughServer()` (default False).
|
||||
:func:`~plexapi.client.PlexClient.proxyThroughServer` (default False).
|
||||
"""
|
||||
TAG = 'Player'
|
||||
key = '/resources'
|
||||
@@ -138,7 +138,7 @@ class PlexClient(PlexObject):
|
||||
value (bool): Enable or disable proxying (optional, default True).
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.Unsupported`: Cannot use client proxy with unknown server.
|
||||
:exc:`plexapi.exceptions.Unsupported`: Cannot use client proxy with unknown server.
|
||||
"""
|
||||
if server:
|
||||
self._server = server
|
||||
@@ -171,7 +171,7 @@ class PlexClient(PlexObject):
|
||||
return ElementTree.fromstring(data) if data.strip() else None
|
||||
|
||||
def sendCommand(self, command, proxy=None, **params):
|
||||
""" Convenience wrapper around :func:`~plexapi.client.PlexClient.query()` to more easily
|
||||
""" Convenience wrapper around :func:`~plexapi.client.PlexClient.query` to more easily
|
||||
send simple commands to the client. Returns an ElementTree object containing
|
||||
the response.
|
||||
|
||||
@@ -181,7 +181,7 @@ class PlexClient(PlexObject):
|
||||
**params (dict): Additional GET parameters to include with the command.
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.Unsupported`: When we detect the client doesn't support this capability.
|
||||
:exc:`plexapi.exceptions.Unsupported`: When we detect the client doesn't support this capability.
|
||||
"""
|
||||
command = command.strip('/')
|
||||
controller = command.split('/')[0]
|
||||
@@ -296,7 +296,7 @@ class PlexClient(PlexObject):
|
||||
**params (dict): Additional GET parameters to include with the command.
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.Unsupported`: When no PlexServer specified in this object.
|
||||
:exc:`plexapi.exceptions.Unsupported`: When no PlexServer specified in this object.
|
||||
"""
|
||||
if not self._server:
|
||||
raise Unsupported('A server must be specified before using this command.')
|
||||
@@ -466,7 +466,7 @@ class PlexClient(PlexObject):
|
||||
also: https://github.com/plexinc/plex-media-player/wiki/Remote-control-API#modified-commands
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.Unsupported`: When no PlexServer specified in this object.
|
||||
:exc:`plexapi.exceptions.Unsupported`: When no PlexServer specified in this object.
|
||||
"""
|
||||
if not self._server:
|
||||
raise Unsupported('A server must be specified before using this command.')
|
||||
|
@@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from plexapi import X_PLEX_CONTAINER_SIZE, log, utils, media
|
||||
from plexapi.base import PlexObject
|
||||
from plexapi.base import PlexObject, PlexPartialObject
|
||||
from plexapi.compat import quote, quote_plus, unquote, urlencode
|
||||
from plexapi.exceptions import BadRequest, NotFound
|
||||
from plexapi.media import MediaTag
|
||||
@@ -455,7 +455,7 @@ class LibrarySection(PlexObject):
|
||||
return self.fetchItems(key, **kwargs)
|
||||
|
||||
def agents(self):
|
||||
""" Returns a list of available `:class:`~plexapi.media.Agent` for this library section.
|
||||
""" Returns a list of available :class:`~plexapi.media.Agent` for this library section.
|
||||
"""
|
||||
return self._server.agents(utils.searchType(self.type))
|
||||
|
||||
@@ -517,7 +517,7 @@ class LibrarySection(PlexObject):
|
||||
def listChoices(self, category, libtype=None, **kwargs):
|
||||
""" Returns a list of :class:`~plexapi.library.FilterChoice` objects for the
|
||||
specified category and libtype. kwargs can be any of the same kwargs in
|
||||
:func:`plexapi.library.LibraySection.search()` to help narrow down the choices
|
||||
:func:`~plexapi.library.LibraySection.search` to help narrow down the choices
|
||||
to only those that matter in your current context.
|
||||
|
||||
Parameters:
|
||||
@@ -526,7 +526,7 @@ class LibrarySection(PlexObject):
|
||||
**kwargs (dict): Additional kwargs to narrow down the choices.
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.BadRequest`: Cannot include kwarg equal to specified category.
|
||||
:exc:`plexapi.exceptions.BadRequest`: Cannot include kwarg equal to specified category.
|
||||
"""
|
||||
# TODO: Should this be moved to base?
|
||||
if category in kwargs:
|
||||
@@ -573,7 +573,7 @@ class LibrarySection(PlexObject):
|
||||
* year: List of years to search within ([yyyy, ...]). [all]
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.BadRequest`: when applying unknown filter
|
||||
:exc:`plexapi.exceptions.BadRequest`: when applying unknown filter
|
||||
"""
|
||||
# cleanup the core arguments
|
||||
args = {}
|
||||
@@ -659,20 +659,20 @@ class LibrarySection(PlexObject):
|
||||
def sync(self, policy, mediaSettings, client=None, clientId=None, title=None, sort=None, libtype=None,
|
||||
**kwargs):
|
||||
""" Add current library section as sync item for specified device.
|
||||
See description of :func:`~plexapi.library.LibrarySection.search()` for details about filtering / sorting
|
||||
and :func:`plexapi.myplex.MyPlexAccount.sync()` for possible exceptions.
|
||||
See description of :func:`~plexapi.library.LibrarySection.search` for details about filtering / sorting
|
||||
and :func:`~plexapi.myplex.MyPlexAccount.sync` for possible exceptions.
|
||||
|
||||
Parameters:
|
||||
policy (:class:`plexapi.sync.Policy`): policy of syncing the media (how many items to sync and process
|
||||
policy (:class:`~plexapi.sync.Policy`): policy of syncing the media (how many items to sync and process
|
||||
watched media or not), generated automatically when method
|
||||
called on specific LibrarySection object.
|
||||
mediaSettings (:class:`plexapi.sync.MediaSettings`): Transcoding settings used for the media, generated
|
||||
mediaSettings (:class:`~plexapi.sync.MediaSettings`): Transcoding settings used for the media, generated
|
||||
automatically when method called on specific
|
||||
LibrarySection object.
|
||||
client (:class:`plexapi.myplex.MyPlexDevice`): sync destination, see
|
||||
:func:`plexapi.myplex.MyPlexAccount.sync`.
|
||||
clientId (str): sync destination, see :func:`plexapi.myplex.MyPlexAccount.sync`.
|
||||
title (str): descriptive title for the new :class:`plexapi.sync.SyncItem`, if empty the value would be
|
||||
client (:class:`~plexapi.myplex.MyPlexDevice`): sync destination, see
|
||||
:func:`~plexapi.myplex.MyPlexAccount.sync`.
|
||||
clientId (str): sync destination, see :func:`~plexapi.myplex.MyPlexAccount.sync`.
|
||||
title (str): descriptive title for the new :class:`~plexapi.sync.SyncItem`, if empty the value would be
|
||||
generated from metadata of current media.
|
||||
sort (str): formatted as `column:dir`; column can be any of {`addedAt`, `originallyAvailableAt`,
|
||||
`lastViewedAt`, `titleSort`, `rating`, `mediaHeight`, `duration`}. dir can be `asc` or
|
||||
@@ -681,10 +681,10 @@ class LibrarySection(PlexObject):
|
||||
`track`).
|
||||
|
||||
Returns:
|
||||
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
:class:`~plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.BadRequest`: when the library is not allowed to sync
|
||||
:exc:`plexapi.exceptions.BadRequest`: when the library is not allowed to sync
|
||||
|
||||
Example:
|
||||
|
||||
@@ -765,10 +765,17 @@ class MovieSection(LibrarySection):
|
||||
METADATA_TYPE = 'movie'
|
||||
CONTENT_TYPE = 'video'
|
||||
|
||||
def all(self, **kwargs):
|
||||
""" Returns a list of all items from this library section.
|
||||
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting.
|
||||
"""
|
||||
return self.search(libtype='movie', **kwargs)
|
||||
|
||||
def collection(self, **kwargs):
|
||||
""" Returns a list of collections from this library section. """
|
||||
key = '/library/sections/%s/collections' % self.key
|
||||
return self.fetchItems(key, **kwargs)
|
||||
""" Returns a list of collections from this library section.
|
||||
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting.
|
||||
"""
|
||||
return self.search(libtype='collection', **kwargs)
|
||||
|
||||
def playlist(self, **kwargs):
|
||||
""" Returns a list of playlists from this library section. """
|
||||
@@ -777,17 +784,17 @@ class MovieSection(LibrarySection):
|
||||
|
||||
def sync(self, videoQuality, limit=None, unwatched=False, **kwargs):
|
||||
""" Add current Movie library section as sync item for specified device.
|
||||
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting and
|
||||
:func:`plexapi.library.LibrarySection.sync()` for details on syncing libraries and possible exceptions.
|
||||
See description of :func:`~plexapi.library.LibrarySection.search` for details about filtering / sorting and
|
||||
:func:`~plexapi.library.LibrarySection.sync` for details on syncing libraries and possible exceptions.
|
||||
|
||||
Parameters:
|
||||
videoQuality (int): idx of quality of the video, one of VIDEO_QUALITY_* values defined in
|
||||
:mod:`plexapi.sync` module.
|
||||
:mod:`~plexapi.sync` module.
|
||||
limit (int): maximum count of movies to sync, unlimited if `None`.
|
||||
unwatched (bool): if `True` watched videos wouldn't be synced.
|
||||
|
||||
Returns:
|
||||
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
:class:`~plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
|
||||
Example:
|
||||
|
||||
@@ -836,11 +843,11 @@ class ShowSection(LibrarySection):
|
||||
CONTENT_TYPE = 'video'
|
||||
|
||||
def searchShows(self, **kwargs):
|
||||
""" Search for a show. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
|
||||
""" Search for a show. See :func:`~plexapi.library.LibrarySection.search` for usage. """
|
||||
return self.search(libtype='show', **kwargs)
|
||||
|
||||
def searchEpisodes(self, **kwargs):
|
||||
""" Search for an episode. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
|
||||
""" Search for an episode. See :func:`~plexapi.library.LibrarySection.search` for usage. """
|
||||
return self.search(libtype='episode', **kwargs)
|
||||
|
||||
def recentlyAdded(self, libtype='episode', maxresults=50):
|
||||
@@ -851,10 +858,17 @@ class ShowSection(LibrarySection):
|
||||
"""
|
||||
return self.search(sort='addedAt:desc', libtype=libtype, maxresults=maxresults)
|
||||
|
||||
def all(self, libtype='show', **kwargs):
|
||||
""" Returns a list of all items from this library section.
|
||||
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting.
|
||||
"""
|
||||
return self.search(libtype=libtype, **kwargs)
|
||||
|
||||
def collection(self, **kwargs):
|
||||
""" Returns a list of collections from this library section. """
|
||||
key = '/library/sections/%s/collections' % self.key
|
||||
return self.fetchItems(key, **kwargs)
|
||||
""" Returns a list of collections from this library section.
|
||||
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting.
|
||||
"""
|
||||
return self.search(libtype='collection', **kwargs)
|
||||
|
||||
def playlist(self, **kwargs):
|
||||
""" Returns a list of playlists from this library section. """
|
||||
@@ -863,17 +877,17 @@ class ShowSection(LibrarySection):
|
||||
|
||||
def sync(self, videoQuality, limit=None, unwatched=False, **kwargs):
|
||||
""" Add current Show library section as sync item for specified device.
|
||||
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting and
|
||||
:func:`plexapi.library.LibrarySection.sync()` for details on syncing libraries and possible exceptions.
|
||||
See description of :func:`~plexapi.library.LibrarySection.search` for details about filtering / sorting and
|
||||
:func:`~plexapi.library.LibrarySection.sync` for details on syncing libraries and possible exceptions.
|
||||
|
||||
Parameters:
|
||||
videoQuality (int): idx of quality of the video, one of VIDEO_QUALITY_* values defined in
|
||||
:mod:`plexapi.sync` module.
|
||||
:mod:`~plexapi.sync` module.
|
||||
limit (int): maximum count of episodes to sync, unlimited if `None`.
|
||||
unwatched (bool): if `True` watched videos wouldn't be synced.
|
||||
|
||||
Returns:
|
||||
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
:class:`~plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
|
||||
Example:
|
||||
|
||||
@@ -927,21 +941,28 @@ class MusicSection(LibrarySection):
|
||||
return self.fetchItems(key)
|
||||
|
||||
def searchArtists(self, **kwargs):
|
||||
""" Search for an artist. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
|
||||
""" Search for an artist. See :func:`~plexapi.library.LibrarySection.search` for usage. """
|
||||
return self.search(libtype='artist', **kwargs)
|
||||
|
||||
def searchAlbums(self, **kwargs):
|
||||
""" Search for an album. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
|
||||
""" Search for an album. See :func:`~plexapi.library.LibrarySection.search` for usage. """
|
||||
return self.search(libtype='album', **kwargs)
|
||||
|
||||
def searchTracks(self, **kwargs):
|
||||
""" Search for a track. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
|
||||
""" Search for a track. See :func:`~plexapi.library.LibrarySection.search` for usage. """
|
||||
return self.search(libtype='track', **kwargs)
|
||||
|
||||
def all(self, libtype='artist', **kwargs):
|
||||
""" Returns a list of all items from this library section.
|
||||
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting.
|
||||
"""
|
||||
return self.search(libtype=libtype, **kwargs)
|
||||
|
||||
def collection(self, **kwargs):
|
||||
""" Returns a list of collections from this library section. """
|
||||
key = '/library/sections/%s/collections' % self.key
|
||||
return self.fetchItems(key, **kwargs)
|
||||
""" Returns a list of collections from this library section.
|
||||
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting.
|
||||
"""
|
||||
return self.search(libtype='collection', **kwargs)
|
||||
|
||||
def playlist(self, **kwargs):
|
||||
""" Returns a list of playlists from this library section. """
|
||||
@@ -950,16 +971,16 @@ class MusicSection(LibrarySection):
|
||||
|
||||
def sync(self, bitrate, limit=None, **kwargs):
|
||||
""" Add current Music library section as sync item for specified device.
|
||||
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting and
|
||||
:func:`plexapi.library.LibrarySection.sync()` for details on syncing libraries and possible exceptions.
|
||||
See description of :func:`~plexapi.library.LibrarySection.search` for details about filtering / sorting and
|
||||
:func:`~plexapi.library.LibrarySection.sync` for details on syncing libraries and possible exceptions.
|
||||
|
||||
Parameters:
|
||||
bitrate (int): maximum bitrate for synchronized music, better use one of MUSIC_BITRATE_* values from the
|
||||
module :mod:`plexapi.sync`.
|
||||
module :mod:`~plexapi.sync`.
|
||||
limit (int): maximum count of tracks to sync, unlimited if `None`.
|
||||
|
||||
Returns:
|
||||
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
:class:`~plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
|
||||
Example:
|
||||
|
||||
@@ -1002,13 +1023,19 @@ class PhotoSection(LibrarySection):
|
||||
METADATA_TYPE = 'photo'
|
||||
|
||||
def searchAlbums(self, title, **kwargs):
|
||||
""" Search for an album. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
|
||||
""" Search for an album. See :func:`~plexapi.library.LibrarySection.search` for usage. """
|
||||
return self.search(libtype='photoalbum', title=title, **kwargs)
|
||||
|
||||
def searchPhotos(self, title, **kwargs):
|
||||
""" Search for a photo. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
|
||||
""" Search for a photo. See :func:`~plexapi.library.LibrarySection.search` for usage. """
|
||||
return self.search(libtype='photo', title=title, **kwargs)
|
||||
|
||||
def all(self, libtype='photoalbum', **kwargs):
|
||||
""" Returns a list of all items from this library section.
|
||||
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting.
|
||||
"""
|
||||
return self.search(libtype=libtype, **kwargs)
|
||||
|
||||
def playlist(self, **kwargs):
|
||||
""" Returns a list of playlists from this library section. """
|
||||
key = '/playlists?type=15&playlistType=%s§ionID=%s' % (self.CONTENT_TYPE, self.key)
|
||||
@@ -1016,16 +1043,16 @@ class PhotoSection(LibrarySection):
|
||||
|
||||
def sync(self, resolution, limit=None, **kwargs):
|
||||
""" Add current Music library section as sync item for specified device.
|
||||
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting and
|
||||
:func:`plexapi.library.LibrarySection.sync()` for details on syncing libraries and possible exceptions.
|
||||
See description of :func:`~plexapi.library.LibrarySection.search` for details about filtering / sorting and
|
||||
:func:`~plexapi.library.LibrarySection.sync` for details on syncing libraries and possible exceptions.
|
||||
|
||||
Parameters:
|
||||
resolution (str): maximum allowed resolution for synchronized photos, see PHOTO_QUALITY_* values in the
|
||||
module :mod:`plexapi.sync`.
|
||||
module :mod:`~plexapi.sync`.
|
||||
limit (int): maximum count of tracks to sync, unlimited if `None`.
|
||||
|
||||
Returns:
|
||||
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
:class:`~plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
|
||||
Example:
|
||||
|
||||
@@ -1052,7 +1079,7 @@ class PhotoSection(LibrarySection):
|
||||
class FilterChoice(PlexObject):
|
||||
""" Represents a single filter choice. These objects are gathered when using filters
|
||||
while searching for library items and is the object returned in the result set of
|
||||
:func:`~plexapi.library.LibrarySection.listChoices()`.
|
||||
:func:`~plexapi.library.LibrarySection.listChoices`.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Directory'
|
||||
@@ -1106,42 +1133,74 @@ class Hub(PlexObject):
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Collections(PlexObject):
|
||||
class Collections(PlexPartialObject):
|
||||
""" Represents a single Collection.
|
||||
Attributes:
|
||||
TAG (str): 'Directory'
|
||||
TYPE (str): 'collection'
|
||||
ratingKey (int): Unique key identifying this item.
|
||||
addedAt (datetime): Datetime this item was added to the library.
|
||||
art (str): URL to artwork image.
|
||||
artBlurHash (str): BlurHash string for artwork image.
|
||||
childCount (int): Count of child object(s)
|
||||
collectionMode (str): How the items in the collection are displayed.
|
||||
collectionSort (str): How to sort the items in the collection.
|
||||
contentRating (str) Content rating (PG-13; NR; TV-G).
|
||||
fields (list): List of :class:`~plexapi.media.Field`.
|
||||
guid (str): Plex GUID (collection://XXXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXX).
|
||||
index (int): Unknown
|
||||
key (str): API URL (/library/metadata/<ratingkey>).
|
||||
labels (List<:class:`~plexapi.media.Label`>): List of field objects.
|
||||
librarySectionID (int): :class:`~plexapi.library.LibrarySection` ID.
|
||||
librarySectionKey (str): API URL (/library/sections/<sectionkey>).
|
||||
librarySectionTitle (str): Section Title
|
||||
maxYear (int): Year
|
||||
minYear (int): Year
|
||||
subtype (str): Media type
|
||||
summary (str): Summary of the collection
|
||||
thumb (str): URL to thumbnail image.
|
||||
thumbBlurHash (str): BlurHash string for thumbnail image.
|
||||
title (str): Collection Title
|
||||
titleSort (str): Title to use when sorting (defaults to title).
|
||||
type (str): Hardcoded 'collection'
|
||||
updatedAt (datatime): Datetime this item was updated.
|
||||
"""
|
||||
|
||||
TAG = 'Directory'
|
||||
TYPE = 'collection'
|
||||
_include = "?includeExternalMedia=1&includePreferences=1"
|
||||
|
||||
def _loadData(self, data):
|
||||
self.ratingKey = utils.cast(int, data.attrib.get('ratingKey'))
|
||||
self._details_key = "/library/metadata/%s%s" % (self.ratingKey, self._include)
|
||||
self.key = data.attrib.get('key').replace('/children', '') # FIX_BUG_50
|
||||
self.addedAt = utils.toDatetime(data.attrib.get('addedAt'))
|
||||
self.art = data.attrib.get('art')
|
||||
self.artBlurHash = data.attrib.get('artBlurHash')
|
||||
self.childCount = utils.cast(int, data.attrib.get('childCount'))
|
||||
self.collectionMode = utils.cast(int, data.attrib.get('collectionMode'))
|
||||
self.collectionSort = utils.cast(int, data.attrib.get('collectionSort'))
|
||||
self.contentRating = data.attrib.get('contentRating')
|
||||
self.fields = self.findItems(data, media.Field)
|
||||
self.guid = data.attrib.get('guid')
|
||||
self.key = data.attrib.get('key')
|
||||
self.index = utils.cast(int, data.attrib.get('index'))
|
||||
self.labels = self.findItems(data, media.Label)
|
||||
self.librarySectionID = data.attrib.get('librarySectionID')
|
||||
self.librarySectionKey = data.attrib.get('librarySectionKey')
|
||||
self.librarySectionTitle = data.attrib.get('librarySectionTitle')
|
||||
self.type = data.attrib.get('type')
|
||||
self.title = data.attrib.get('title')
|
||||
self.titleSort = data.attrib.get('titleSort')
|
||||
self.maxYear = utils.cast(int, data.attrib.get('maxYear'))
|
||||
self.minYear = utils.cast(int, data.attrib.get('minYear'))
|
||||
self.subtype = data.attrib.get('subtype')
|
||||
self.summary = data.attrib.get('summary')
|
||||
self.index = utils.cast(int, data.attrib.get('index'))
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
self.addedAt = utils.toDatetime(data.attrib.get('addedAt'))
|
||||
self.thumbBlurHash = data.attrib.get('thumbBlurHash')
|
||||
self.title = data.attrib.get('title')
|
||||
self.titleSort = data.attrib.get('titleSort')
|
||||
self.type = data.attrib.get('type')
|
||||
self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt'))
|
||||
self.childCount = utils.cast(int, data.attrib.get('childCount'))
|
||||
self.minYear = utils.cast(int, data.attrib.get('minYear'))
|
||||
self.maxYear = utils.cast(int, data.attrib.get('maxYear'))
|
||||
self.collectionMode = utils.cast(int, data.attrib.get('collectionMode'))
|
||||
self.collectionSort = utils.cast(int, data.attrib.get('collectionSort'))
|
||||
self.labels = self.findItems(data, media.Label)
|
||||
self.fields = self.findItems(data, media.Field)
|
||||
|
||||
@property
|
||||
def children(self):
|
||||
return self.fetchItems(self.key)
|
||||
""" Returns a list of all items in the collection. """
|
||||
key = '/library/metadata/%s/children' % self.ratingKey
|
||||
return self.fetchItems(key)
|
||||
|
||||
@property
|
||||
def thumbUrl(self):
|
||||
@@ -1162,18 +1221,16 @@ class Collections(PlexObject):
|
||||
|
||||
def modeUpdate(self, mode=None):
|
||||
""" Update Collection Mode
|
||||
|
||||
Parameters:
|
||||
mode: default (Library default)
|
||||
hide (Hide Collection)
|
||||
hideItems (Hide Items in this Collection)
|
||||
showItems (Show this Collection and its Items)
|
||||
Example:
|
||||
|
||||
collection = 'plexapi.library.Collections'
|
||||
collection.updateMode(mode="hide")
|
||||
"""
|
||||
mode_dict = {'default': '-2',
|
||||
mode_dict = {'default': '-1',
|
||||
'hide': '0',
|
||||
'hideItems': '1',
|
||||
'showItems': '2'}
|
||||
@@ -1185,13 +1242,10 @@ class Collections(PlexObject):
|
||||
|
||||
def sortUpdate(self, sort=None):
|
||||
""" Update Collection Sorting
|
||||
|
||||
Parameters:
|
||||
sort: realease (Order Collection by realease dates)
|
||||
alpha (Order Collection Alphabetically)
|
||||
|
||||
Example:
|
||||
|
||||
colleciton = 'plexapi.library.Collections'
|
||||
collection.updateSort(mode="alpha")
|
||||
"""
|
||||
@@ -1243,3 +1297,54 @@ class Collections(PlexObject):
|
||||
|
||||
# def edit(self, **kwargs):
|
||||
# TODO
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Path(PlexObject):
|
||||
""" Represents a single directory Path.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Path'
|
||||
|
||||
home (bool): True if the path is the home directory
|
||||
key (str): API URL (/services/browse/<base64path>)
|
||||
network (bool): True if path is a network location
|
||||
path (str): Full path to folder
|
||||
title (str): Folder name
|
||||
"""
|
||||
TAG = 'Path'
|
||||
|
||||
def _loadData(self, data):
|
||||
self.home = utils.cast(bool, data.attrib.get('home'))
|
||||
self.key = data.attrib.get('key')
|
||||
self.network = utils.cast(bool, data.attrib.get('network'))
|
||||
self.path = data.attrib.get('path')
|
||||
self.title = data.attrib.get('title')
|
||||
|
||||
def browse(self, includeFiles=True):
|
||||
""" Alias for :func:`~plexapi.server.PlexServer.browse`. """
|
||||
return self._server.browse(self, includeFiles)
|
||||
|
||||
def walk(self):
|
||||
""" Alias for :func:`~plexapi.server.PlexServer.walk`. """
|
||||
for path, paths, files in self._server.walk(self):
|
||||
yield path, paths, files
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class File(PlexObject):
|
||||
""" Represents a single File.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'File'
|
||||
|
||||
key (str): API URL (/services/browse/<base64path>)
|
||||
path (str): Full path to file
|
||||
title (str): File name
|
||||
"""
|
||||
TAG = 'File'
|
||||
|
||||
def _loadData(self, data):
|
||||
self.key = data.attrib.get('key')
|
||||
self.path = data.attrib.get('path')
|
||||
self.title = data.attrib.get('title')
|
||||
|
@@ -210,7 +210,6 @@ class MediaPartStream(PlexObject):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.codec = data.attrib.get('codec')
|
||||
self.codecID = data.attrib.get('codecID')
|
||||
self.default = cast(bool, data.attrib.get('selected', '0'))
|
||||
self.displayTitle = data.attrib.get('displayTitle')
|
||||
self.extendedDisplayTitle = data.attrib.get('extendedDisplayTitle')
|
||||
@@ -268,6 +267,7 @@ class VideoStream(MediaPartStream):
|
||||
self.cabac = cast(int, data.attrib.get('cabac'))
|
||||
self.chromaLocation = data.attrib.get('chromaLocation')
|
||||
self.chromaSubsampling = data.attrib.get('chromaSubsampling')
|
||||
self.codecID = data.attrib.get('codecID')
|
||||
self.codedHeight = data.attrib.get('codedHeight')
|
||||
self.codedWidth = data.attrib.get('codedWidth')
|
||||
self.colorPrimaries = data.attrib.get('colorPrimaries')
|
||||
@@ -326,11 +326,11 @@ class AudioStream(MediaPartStream):
|
||||
self.bitrate = cast(int, data.attrib.get('bitrate'))
|
||||
self.bitrateMode = data.attrib.get('bitrateMode')
|
||||
self.channels = cast(int, data.attrib.get('channels'))
|
||||
self.dialogNorm = cast(int, data.attrib.get('dialogNorm'))
|
||||
self.duration = cast(int, data.attrib.get('duration'))
|
||||
self.profile = data.attrib.get('profile')
|
||||
self.requiredBandwidths = data.attrib.get('requiredBandwidths')
|
||||
self.samplingRate = cast(int, data.attrib.get('samplingRate'))
|
||||
self.streamIdentifier = cast(int, data.attrib.get('streamIdentifier'))
|
||||
|
||||
# For Track only
|
||||
self.albumGain = cast(float, data.attrib.get('albumGain'))
|
||||
@@ -368,6 +368,7 @@ class SubtitleStream(MediaPartStream):
|
||||
self.headerCompression = data.attrib.get('headerCompression')
|
||||
self.key = data.attrib.get('key')
|
||||
self.requiredBandwidths = data.attrib.get('requiredBandwidths')
|
||||
self.transient = data.attrib.get('transient')
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
@@ -561,7 +562,7 @@ class MediaTag(PlexObject):
|
||||
tag (str): Name of the tag. This will be Animation, SciFi etc for Genres. The name of
|
||||
person for Directors and Roles (ex: Animation, Stephen Graham, etc).
|
||||
<Hub_Search_Attributes>: Attributes only applicable in search results from
|
||||
PlexServer :func:`~plexapi.server.PlexServer.search()`. They provide details of which
|
||||
PlexServer :func:`~plexapi.server.PlexServer.search`. They provide details of which
|
||||
library section the tag was found as well as the url to dig deeper into the results.
|
||||
|
||||
* key (str): API URL to dig deeper into this tag (ex: /library/sections/1/all?actor=9081).
|
||||
@@ -588,7 +589,7 @@ class MediaTag(PlexObject):
|
||||
|
||||
def items(self, *args, **kwargs):
|
||||
""" Return the list of items within this tag. This function is only applicable
|
||||
in search results from PlexServer :func:`~plexapi.server.PlexServer.search()`.
|
||||
in search results from PlexServer :func:`~plexapi.server.PlexServer.search`.
|
||||
"""
|
||||
if not self.key:
|
||||
raise BadRequest('Key is not defined for this tag: %s' % self.tag)
|
||||
@@ -821,6 +822,27 @@ class Chapter(PlexObject):
|
||||
self.end = cast(int, data.attrib.get('endTimeOffset'))
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Marker(PlexObject):
|
||||
""" Represents a single Marker media tag.
|
||||
Attributes:
|
||||
TAG (str): 'Marker'
|
||||
"""
|
||||
TAG = 'Marker'
|
||||
|
||||
def __repr__(self):
|
||||
name = self._clean(self.firstAttr('type'))
|
||||
start = utils.millisecondToHumanstr(self._clean(self.firstAttr('start')))
|
||||
end = utils.millisecondToHumanstr(self._clean(self.firstAttr('end')))
|
||||
return '<%s:%s %s - %s>' % (self.__class__.__name__, name, start, end)
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.type = data.attrib.get('type')
|
||||
self.start = cast(int, data.attrib.get('startTimeOffset'))
|
||||
self.end = cast(int, data.attrib.get('endTimeOffset'))
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Field(PlexObject):
|
||||
""" Represents a single Field.
|
||||
|
@@ -544,7 +544,7 @@ class MyPlexAccount(PlexObject):
|
||||
return self.query(url, method=self._session.put, data=params)
|
||||
|
||||
def syncItems(self, client=None, clientId=None):
|
||||
""" Returns an instance of :class:`plexapi.sync.SyncList` for specified client.
|
||||
""" Returns an instance of :class:`~plexapi.sync.SyncList` for specified client.
|
||||
|
||||
Parameters:
|
||||
client (:class:`~plexapi.myplex.MyPlexDevice`): a client to query SyncItems for.
|
||||
@@ -564,22 +564,22 @@ class MyPlexAccount(PlexObject):
|
||||
|
||||
def sync(self, sync_item, client=None, clientId=None):
|
||||
""" Adds specified sync item for the client. It's always easier to use methods defined directly in the media
|
||||
objects, e.g. :func:`plexapi.video.Video.sync`, :func:`plexapi.audio.Audio.sync`.
|
||||
objects, e.g. :func:`~plexapi.video.Video.sync`, :func:`~plexapi.audio.Audio.sync`.
|
||||
|
||||
Parameters:
|
||||
client (:class:`~plexapi.myplex.MyPlexDevice`): a client for which you need to add SyncItem to.
|
||||
clientId (str): an identifier of a client for which you need to add SyncItem to.
|
||||
sync_item (:class:`plexapi.sync.SyncItem`): prepared SyncItem object with all fields set.
|
||||
sync_item (:class:`~plexapi.sync.SyncItem`): prepared SyncItem object with all fields set.
|
||||
|
||||
If both `client` and `clientId` provided the client would be preferred.
|
||||
If neither `client` nor `clientId` provided the clientId would be set to current clients`s identifier.
|
||||
|
||||
Returns:
|
||||
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
:class:`~plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.BadRequest`: when client with provided clientId wasn`t found.
|
||||
:class:`plexapi.exceptions.BadRequest`: provided client doesn`t provides `sync-target`.
|
||||
:exc:`plexapi.exceptions.BadRequest`: when client with provided clientId wasn`t found.
|
||||
:exc:`plexapi.exceptions.BadRequest`: provided client doesn`t provides `sync-target`.
|
||||
"""
|
||||
if not client and not clientId:
|
||||
clientId = X_PLEX_IDENTIFIER
|
||||
@@ -686,7 +686,7 @@ class MyPlexAccount(PlexObject):
|
||||
|
||||
class MyPlexUser(PlexObject):
|
||||
""" This object represents non-signed in users such as friends and linked
|
||||
accounts. NOTE: This should not be confused with the :class:`~myplex.MyPlexAccount`
|
||||
accounts. NOTE: This should not be confused with the :class:`~plexapi.myplex.MyPlexAccount`
|
||||
which is your specific account. The raw xml for the data presented here
|
||||
can be found at: https://plex.tv/api/users/
|
||||
|
||||
@@ -885,7 +885,7 @@ class MyPlexResource(PlexObject):
|
||||
key (str): 'https://plex.tv/api/resources?includeHttps=1&includeRelay=1'
|
||||
accessToken (str): This resources accesstoken.
|
||||
clientIdentifier (str): Unique ID for this resource.
|
||||
connections (list): List of :class:`~myplex.ResourceConnection` objects
|
||||
connections (list): List of :class:`~plexapi.myplex.ResourceConnection` objects
|
||||
for this resource.
|
||||
createdAt (datetime): Timestamp this resource first connected to your server.
|
||||
device (str): Best guess on the type of device this is (PS, iPhone, Linux, etc).
|
||||
@@ -930,7 +930,7 @@ class MyPlexResource(PlexObject):
|
||||
self.sourceTitle = data.attrib.get('sourceTitle') # owners plex username.
|
||||
|
||||
def connect(self, ssl=None, timeout=None):
|
||||
""" Returns a new :class:`~server.PlexServer` or :class:`~client.PlexClient` object.
|
||||
""" Returns a new :class:`~plexapi.server.PlexServer` or :class:`~plexapi.client.PlexClient` object.
|
||||
Often times there is more than one address specified for a server or client.
|
||||
This function will prioritize local connections before remote and HTTPS before HTTP.
|
||||
After trying to connect to all available addresses for this resource and
|
||||
@@ -942,7 +942,7 @@ class MyPlexResource(PlexObject):
|
||||
HTTP or HTTPS connection.
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.NotFound`: When unable to connect to any addresses for this resource.
|
||||
:exc:`plexapi.exceptions.NotFound`: When unable to connect to any addresses for this resource.
|
||||
"""
|
||||
# Sort connections from (https, local) to (http, remote)
|
||||
# Only check non-local connections unless we own the resource
|
||||
@@ -965,7 +965,7 @@ class MyPlexResource(PlexObject):
|
||||
|
||||
class ResourceConnection(PlexObject):
|
||||
""" Represents a Resource Connection object found within the
|
||||
:class:`~myplex.MyPlexResource` objects.
|
||||
:class:`~plexapi.myplex.MyPlexResource` objects.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Connection'
|
||||
@@ -1049,7 +1049,7 @@ class MyPlexDevice(PlexObject):
|
||||
at least one connection was successful, the PlexClient object is built and returned.
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.NotFound`: When unable to connect to any addresses for this device.
|
||||
:exc:`plexapi.exceptions.NotFound`: When unable to connect to any addresses for this device.
|
||||
"""
|
||||
cls = PlexServer if 'server' in self.provides else PlexClient
|
||||
listargs = [[cls, url, self.token, timeout] for url in self.connections]
|
||||
@@ -1063,10 +1063,10 @@ class MyPlexDevice(PlexObject):
|
||||
self._server.query(key, self._server._session.delete)
|
||||
|
||||
def syncItems(self):
|
||||
""" Returns an instance of :class:`plexapi.sync.SyncList` for current device.
|
||||
""" Returns an instance of :class:`~plexapi.sync.SyncList` for current device.
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.BadRequest`: when the device doesn`t provides `sync-target`.
|
||||
:exc:`plexapi.exceptions.BadRequest`: when the device doesn`t provides `sync-target`.
|
||||
"""
|
||||
if 'sync-target' not in self.provides:
|
||||
raise BadRequest('Requested syncList for device which do not provides sync-target')
|
||||
@@ -1082,12 +1082,12 @@ class MyPlexPinLogin(object):
|
||||
This helper class supports a polling, threaded and callback approach.
|
||||
|
||||
- The polling approach expects the developer to periodically check if the PIN login was
|
||||
successful using :func:`plexapi.myplex.MyPlexPinLogin.checkLogin`.
|
||||
successful using :func:`~plexapi.myplex.MyPlexPinLogin.checkLogin`.
|
||||
- The threaded approach expects the developer to call
|
||||
:func:`plexapi.myplex.MyPlexPinLogin.run` and then at a later time call
|
||||
:func:`plexapi.myplex.MyPlexPinLogin.waitForLogin` to wait for and check the result.
|
||||
:func:`~plexapi.myplex.MyPlexPinLogin.run` and then at a later time call
|
||||
:func:`~plexapi.myplex.MyPlexPinLogin.waitForLogin` to wait for and check the result.
|
||||
- The callback approach is an extension of the threaded approach and expects the developer
|
||||
to pass the `callback` parameter to the call to :func:`plexapi.myplex.MyPlexPinLogin.run`.
|
||||
to pass the `callback` parameter to the call to :func:`~plexapi.myplex.MyPlexPinLogin.run`.
|
||||
The callback will be called when the thread waiting for the PIN login to succeed either
|
||||
finishes or expires. The parameter passed to the callback is the received authentication
|
||||
token or `None` if the login expired.
|
||||
|
@@ -38,7 +38,7 @@ class Photoalbum(PlexPartialObject):
|
||||
self.composite = data.attrib.get('composite')
|
||||
self.guid = data.attrib.get('guid')
|
||||
self.index = utils.cast(int, data.attrib.get('index'))
|
||||
self.key = data.attrib.get('key', '').replace('/children', '')
|
||||
self.key = data.attrib.get('key', '').replace('/children', '') # FIX_BUG_50
|
||||
self.librarySectionID = data.attrib.get('librarySectionID')
|
||||
self.librarySectionKey = data.attrib.get('librarySectionKey')
|
||||
self.librarySectionTitle = data.attrib.get('librarySectionTitle')
|
||||
@@ -168,20 +168,20 @@ class Photo(PlexPartialObject):
|
||||
|
||||
def sync(self, resolution, client=None, clientId=None, limit=None, title=None):
|
||||
""" Add current photo as sync item for specified device.
|
||||
See :func:`plexapi.myplex.MyPlexAccount.sync()` for possible exceptions.
|
||||
See :func:`~plexapi.myplex.MyPlexAccount.sync` for possible exceptions.
|
||||
|
||||
Parameters:
|
||||
resolution (str): maximum allowed resolution for synchronized photos, see PHOTO_QUALITY_* values in the
|
||||
module :mod:`plexapi.sync`.
|
||||
client (:class:`plexapi.myplex.MyPlexDevice`): sync destination, see
|
||||
:func:`plexapi.myplex.MyPlexAccount.sync`.
|
||||
clientId (str): sync destination, see :func:`plexapi.myplex.MyPlexAccount.sync`.
|
||||
module :mod:`~plexapi.sync`.
|
||||
client (:class:`~plexapi.myplex.MyPlexDevice`): sync destination, see
|
||||
:func:`~plexapi.myplex.MyPlexAccount.sync`.
|
||||
clientId (str): sync destination, see :func:`~plexapi.myplex.MyPlexAccount.sync`.
|
||||
limit (int): maximum count of items to sync, unlimited if `None`.
|
||||
title (str): descriptive title for the new :class:`plexapi.sync.SyncItem`, if empty the value would be
|
||||
title (str): descriptive title for the new :class:`~plexapi.sync.SyncItem`, if empty the value would be
|
||||
generated from metadata of current photo.
|
||||
|
||||
Returns:
|
||||
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
:class:`~plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
"""
|
||||
|
||||
from plexapi.sync import SyncItem, Policy, MediaSettings
|
||||
|
@@ -163,7 +163,7 @@ class Playlist(PlexPartialObject, Playable):
|
||||
**kwargs (dict): is passed to the filters. For a example see the search method.
|
||||
|
||||
Returns:
|
||||
:class:`plexapi.playlist.Playlist`: an instance of created Playlist.
|
||||
:class:`~plexapi.playlist.Playlist`: an instance of created Playlist.
|
||||
"""
|
||||
if smart:
|
||||
return cls._createSmart(server, title, section, limit, **kwargs)
|
||||
@@ -217,29 +217,29 @@ class Playlist(PlexPartialObject, Playable):
|
||||
def sync(self, videoQuality=None, photoResolution=None, audioBitrate=None, client=None, clientId=None, limit=None,
|
||||
unwatched=False, title=None):
|
||||
""" Add current playlist as sync item for specified device.
|
||||
See :func:`plexapi.myplex.MyPlexAccount.sync()` for possible exceptions.
|
||||
See :func:`~plexapi.myplex.MyPlexAccount.sync` for possible exceptions.
|
||||
|
||||
Parameters:
|
||||
videoQuality (int): idx of quality of the video, one of VIDEO_QUALITY_* values defined in
|
||||
:mod:`plexapi.sync` module. Used only when playlist contains video.
|
||||
:mod:`~plexapi.sync` module. Used only when playlist contains video.
|
||||
photoResolution (str): maximum allowed resolution for synchronized photos, see PHOTO_QUALITY_* values in
|
||||
the module :mod:`plexapi.sync`. Used only when playlist contains photos.
|
||||
the module :mod:`~plexapi.sync`. Used only when playlist contains photos.
|
||||
audioBitrate (int): maximum bitrate for synchronized music, better use one of MUSIC_BITRATE_* values
|
||||
from the module :mod:`plexapi.sync`. Used only when playlist contains audio.
|
||||
client (:class:`plexapi.myplex.MyPlexDevice`): sync destination, see
|
||||
:func:`plexapi.myplex.MyPlexAccount.sync`.
|
||||
clientId (str): sync destination, see :func:`plexapi.myplex.MyPlexAccount.sync`.
|
||||
from the module :mod:`~plexapi.sync`. Used only when playlist contains audio.
|
||||
client (:class:`~plexapi.myplex.MyPlexDevice`): sync destination, see
|
||||
:func:`~plexapi.myplex.MyPlexAccount.sync`.
|
||||
clientId (str): sync destination, see :func:`~plexapi.myplex.MyPlexAccount.sync`.
|
||||
limit (int): maximum count of items to sync, unlimited if `None`.
|
||||
unwatched (bool): if `True` watched videos wouldn't be synced.
|
||||
title (str): descriptive title for the new :class:`plexapi.sync.SyncItem`, if empty the value would be
|
||||
title (str): descriptive title for the new :class:`~plexapi.sync.SyncItem`, if empty the value would be
|
||||
generated from metadata of current photo.
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.BadRequest`: when playlist is not allowed to sync.
|
||||
:class:`plexapi.exceptions.Unsupported`: when playlist content is unsupported.
|
||||
:exc:`plexapi.exceptions.BadRequest`: when playlist is not allowed to sync.
|
||||
:exc:`plexapi.exceptions.Unsupported`: when playlist content is unsupported.
|
||||
|
||||
Returns:
|
||||
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
:class:`~plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
"""
|
||||
|
||||
if not self.allowSync:
|
||||
|
@@ -8,7 +8,7 @@ from plexapi.base import PlexObject
|
||||
from plexapi.client import PlexClient
|
||||
from plexapi.compat import ElementTree, urlencode
|
||||
from plexapi.exceptions import BadRequest, NotFound, Unauthorized
|
||||
from plexapi.library import Library, Hub
|
||||
from plexapi.library import Hub, Library, Path, File
|
||||
from plexapi.settings import Settings
|
||||
from plexapi.playlist import Playlist
|
||||
from plexapi.playqueue import PlayQueue
|
||||
@@ -185,7 +185,7 @@ class PlexServer(PlexObject):
|
||||
return Account(self, data)
|
||||
|
||||
def agents(self, mediaType=None):
|
||||
""" Returns the `:class:`~plexapi.media.Agent` objects this server has available. """
|
||||
""" Returns the :class:`~plexapi.media.Agent` objects this server has available. """
|
||||
key = '/system/agents'
|
||||
if mediaType:
|
||||
key += '?mediaType=%s' % mediaType
|
||||
@@ -233,6 +233,53 @@ class PlexServer(PlexObject):
|
||||
log.warning('Unable to fetch client ports from myPlex: %s', err)
|
||||
return ports
|
||||
|
||||
def browse(self, path=None, includeFiles=True):
|
||||
""" Browse the system file path using the Plex API.
|
||||
Returns list of :class:`~plexapi.library.Path` and :class:`~plexapi.library.File` objects.
|
||||
|
||||
Parameters:
|
||||
path (:class:`~plexapi.library.Path` or str, optional): Full path to browse.
|
||||
includeFiles (bool): True to include files when browsing (Default).
|
||||
False to only return folders.
|
||||
"""
|
||||
if isinstance(path, Path):
|
||||
key = path.key
|
||||
elif path is not None:
|
||||
base64path = utils.base64str(path)
|
||||
key = '/services/browse/%s' % base64path
|
||||
else:
|
||||
key = '/services/browse'
|
||||
if includeFiles:
|
||||
key += '?includeFiles=1'
|
||||
return self.fetchItems(key)
|
||||
|
||||
def walk(self, path=None):
|
||||
""" Walk the system file tree using the Plex API similar to `os.walk`.
|
||||
Yields a 3-tuple `(path, paths, files)` where
|
||||
`path` is a string of the directory path,
|
||||
`paths` is a list of :class:`~plexapi.library.Path` objects, and
|
||||
`files` is a list of :class:`~plexapi.library.File` objects.
|
||||
|
||||
Parameters:
|
||||
path (:class:`~plexapi.library.Path` or str, optional): Full path to walk.
|
||||
"""
|
||||
paths = []
|
||||
files = []
|
||||
for item in self.browse(path):
|
||||
if isinstance(item, Path):
|
||||
paths.append(item)
|
||||
elif isinstance(item, File):
|
||||
files.append(item)
|
||||
|
||||
if isinstance(path, Path):
|
||||
path = path.path
|
||||
|
||||
yield path or '', paths, files
|
||||
|
||||
for _path in paths:
|
||||
for path, paths, files in self.walk(_path):
|
||||
yield path, paths, files
|
||||
|
||||
def clients(self):
|
||||
""" Returns list of all :class:`~plexapi.client.PlexClient` objects connected to server. """
|
||||
items = []
|
||||
@@ -256,7 +303,7 @@ class PlexServer(PlexObject):
|
||||
name (str): Name of the client to return.
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.NotFound`: Unknown client name
|
||||
:exc:`plexapi.exceptions.NotFound`: Unknown client name
|
||||
"""
|
||||
for client in self.clients():
|
||||
if client and client.title == name:
|
||||
@@ -379,7 +426,7 @@ class PlexServer(PlexObject):
|
||||
title (str): Title of the playlist to return.
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.NotFound`: Invalid playlist title
|
||||
:exc:`plexapi.exceptions.NotFound`: Invalid playlist title
|
||||
"""
|
||||
return self.fetchItem('/playlists', title=title)
|
||||
|
||||
@@ -480,8 +527,8 @@ class PlexServer(PlexObject):
|
||||
Parameters:
|
||||
callback (func): Callback function to call on recieved messages.
|
||||
|
||||
raises:
|
||||
:class:`plexapi.exception.Unsupported`: Websocket-client not installed.
|
||||
Raises:
|
||||
:exc:`plexapi.exception.Unsupported`: Websocket-client not installed.
|
||||
"""
|
||||
notifier = AlertListener(self, callback)
|
||||
notifier.start()
|
||||
|
@@ -21,7 +21,10 @@ class Settings(PlexObject):
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr.startswith('_'):
|
||||
return self.__dict__[attr]
|
||||
try:
|
||||
return self.__dict__[attr]
|
||||
except KeyError:
|
||||
raise AttributeError
|
||||
return self.get(attr).value
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
|
@@ -78,7 +78,7 @@ class SyncItem(PlexObject):
|
||||
self.location = data.find('Location').attrib.get('uri', '')
|
||||
|
||||
def server(self):
|
||||
""" Returns :class:`plexapi.myplex.MyPlexResource` with server of current item. """
|
||||
""" Returns :class:`~plexapi.myplex.MyPlexResource` with server of current item. """
|
||||
server = [s for s in self._server.resources() if s.clientIdentifier == self.machineIdentifier]
|
||||
if len(server) == 0:
|
||||
raise NotFound('Unable to find server with uuid %s' % self.machineIdentifier)
|
||||
@@ -201,7 +201,7 @@ class MediaSettings(object):
|
||||
videoQuality (int): idx of quality of the video, one of VIDEO_QUALITY_* values defined in this module.
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.BadRequest`: when provided unknown video quality.
|
||||
:exc:`plexapi.exceptions.BadRequest`: when provided unknown video quality.
|
||||
"""
|
||||
if videoQuality == VIDEO_QUALITY_ORIGINAL:
|
||||
return MediaSettings('', '', '')
|
||||
@@ -231,7 +231,7 @@ class MediaSettings(object):
|
||||
module.
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.BadRequest` when provided unknown video quality.
|
||||
:exc:`plexapi.exceptions.BadRequest` when provided unknown video quality.
|
||||
"""
|
||||
if resolution in PHOTO_QUALITIES:
|
||||
return MediaSettings(photoQuality=PHOTO_QUALITIES[resolution], photoResolution=resolution)
|
||||
|
@@ -1,4 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import base64
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
@@ -147,7 +148,7 @@ def searchType(libtype):
|
||||
libtype (str): LibType to lookup (movie, show, season, episode, artist, album, track,
|
||||
collection)
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.NotFound`: Unknown libtype
|
||||
:exc:`plexapi.exceptions.NotFound`: Unknown libtype
|
||||
"""
|
||||
libtype = compat.ustr(libtype)
|
||||
if libtype in [compat.ustr(v) for v in SEARCHTYPES.values()]:
|
||||
@@ -399,3 +400,7 @@ def getAgentIdentifier(section, agent):
|
||||
agents += identifiers
|
||||
raise NotFound('Couldnt find "%s" in agents list (%s)' %
|
||||
(agent, ', '.join(agents)))
|
||||
|
||||
|
||||
def base64str(text):
|
||||
return base64.b64encode(text.encode('utf-8')).decode('utf-8')
|
||||
|
@@ -13,6 +13,8 @@ class Video(PlexPartialObject):
|
||||
|
||||
Attributes:
|
||||
addedAt (datetime): Datetime this item was added to the library.
|
||||
art (str): URL to artwork image.
|
||||
artBlurHash (str): BlurHash string for artwork image.
|
||||
key (str): API URL (/library/metadata/<ratingkey>).
|
||||
lastViewedAt (datetime): Datetime item was last accessed.
|
||||
librarySectionID (int): :class:`~plexapi.library.LibrarySection` ID.
|
||||
@@ -20,6 +22,7 @@ class Video(PlexPartialObject):
|
||||
ratingKey (int): Unique key identifying this item.
|
||||
summary (str): Summary of the artist, track, or album.
|
||||
thumb (str): URL to thumbnail image.
|
||||
thumbBlurHash (str): BlurHash string for thumbnail image.
|
||||
title (str): Artist, Album or Track title. (Jason Mraz, We Sing, Lucky, etc.)
|
||||
titleSort (str): Title to use when sorting (defaults to title).
|
||||
type (str): 'artist', 'album', or 'track'.
|
||||
@@ -32,6 +35,8 @@ class Video(PlexPartialObject):
|
||||
self._data = data
|
||||
self.listType = 'video'
|
||||
self.addedAt = utils.toDatetime(data.attrib.get('addedAt'))
|
||||
self.art = data.attrib.get('art')
|
||||
self.artBlurHash = data.attrib.get('artBlurHash')
|
||||
self.key = data.attrib.get('key', '')
|
||||
self.lastViewedAt = utils.toDatetime(data.attrib.get('lastViewedAt'))
|
||||
self.librarySectionID = data.attrib.get('librarySectionID')
|
||||
@@ -40,6 +45,7 @@ class Video(PlexPartialObject):
|
||||
self.ratingKey = utils.cast(int, data.attrib.get('ratingKey'))
|
||||
self.summary = data.attrib.get('summary')
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
self.thumbBlurHash = data.attrib.get('thumbBlurHash')
|
||||
self.title = data.attrib.get('title')
|
||||
self.titleSort = data.attrib.get('titleSort', self.title)
|
||||
self.type = data.attrib.get('type')
|
||||
@@ -201,21 +207,21 @@ class Video(PlexPartialObject):
|
||||
|
||||
def sync(self, videoQuality, client=None, clientId=None, limit=None, unwatched=False, title=None):
|
||||
""" Add current video (movie, tv-show, season or episode) as sync item for specified device.
|
||||
See :func:`plexapi.myplex.MyPlexAccount.sync()` for possible exceptions.
|
||||
See :func:`~plexapi.myplex.MyPlexAccount.sync` for possible exceptions.
|
||||
|
||||
Parameters:
|
||||
videoQuality (int): idx of quality of the video, one of VIDEO_QUALITY_* values defined in
|
||||
:mod:`plexapi.sync` module.
|
||||
client (:class:`plexapi.myplex.MyPlexDevice`): sync destination, see
|
||||
:func:`plexapi.myplex.MyPlexAccount.sync`.
|
||||
clientId (str): sync destination, see :func:`plexapi.myplex.MyPlexAccount.sync`.
|
||||
:mod:`~plexapi.sync` module.
|
||||
client (:class:`~plexapi.myplex.MyPlexDevice`): sync destination, see
|
||||
:func:`~plexapi.myplex.MyPlexAccount.sync`.
|
||||
clientId (str): sync destination, see :func:`~plexapi.myplex.MyPlexAccount.sync`.
|
||||
limit (int): maximum count of items to sync, unlimited if `None`.
|
||||
unwatched (bool): if `True` watched videos wouldn't be synced.
|
||||
title (str): descriptive title for the new :class:`plexapi.sync.SyncItem`, if empty the value would be
|
||||
title (str): descriptive title for the new :class:`~plexapi.sync.SyncItem`, if empty the value would be
|
||||
generated from metadata of current media.
|
||||
|
||||
Returns:
|
||||
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
:class:`~plexapi.sync.SyncItem`: an instance of created syncItem.
|
||||
"""
|
||||
|
||||
from plexapi.sync import SyncItem, Policy, MediaSettings
|
||||
@@ -277,17 +283,12 @@ class Movie(Playable, Video):
|
||||
TAG = 'Video'
|
||||
TYPE = 'movie'
|
||||
METADATA_TYPE = 'movie'
|
||||
_include = ('?checkFiles=1&includeExtras=1&includeRelated=1'
|
||||
'&includeOnDeck=1&includeChapters=1&includePopularLeaves=1'
|
||||
'&includeConcerts=1&includePreferences=1'
|
||||
'&includeBandwidths=1')
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
Video._loadData(self, data)
|
||||
Playable._loadData(self, data)
|
||||
|
||||
self._details_key = self.key + self._include
|
||||
self.art = data.attrib.get('art')
|
||||
self.audienceRating = utils.cast(float, data.attrib.get('audienceRating'))
|
||||
self.audienceRatingImage = data.attrib.get('audienceRatingImage')
|
||||
@@ -343,7 +344,7 @@ class Movie(Playable, Video):
|
||||
savepath (str): Defaults to current working dir.
|
||||
keep_original_name (bool): True to keep the original file name otherwise
|
||||
a friendlier is generated.
|
||||
**kwargs: Additional options passed into :func:`~plexapi.base.PlexObject.getStreamURL()`.
|
||||
**kwargs: Additional options passed into :func:`~plexapi.base.PlexObject.getStreamURL`.
|
||||
"""
|
||||
filepaths = []
|
||||
locations = [i for i in self.iterParts() if i]
|
||||
@@ -402,7 +403,7 @@ class Show(Video):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
Video._loadData(self, data)
|
||||
# fix key if loaded from search
|
||||
self.key = self.key.replace('/children', '')
|
||||
self.key = self.key.replace('/children', '') # FIX_BUG_50
|
||||
self.art = data.attrib.get('art')
|
||||
self.banner = data.attrib.get('banner')
|
||||
self.childCount = utils.cast(int, data.attrib.get('childCount'))
|
||||
@@ -460,14 +461,14 @@ class Show(Video):
|
||||
def episode(self, title=None, season=None, episode=None):
|
||||
""" Find a episode using a title or season and episode.
|
||||
|
||||
Parameters:
|
||||
Parameters:
|
||||
title (str): Title of the episode to return
|
||||
season (int): Season number (default:None; required if title not specified).
|
||||
episode (int): Episode number (default:None; required if title not specified).
|
||||
|
||||
Raises:
|
||||
:class:`plexapi.exceptions.BadRequest`: If season and episode is missing.
|
||||
:class:`plexapi.exceptions.NotFound`: If the episode is missing.
|
||||
Raises:
|
||||
:exc:`plexapi.exceptions.BadRequest`: If season and episode is missing.
|
||||
:exc:`plexapi.exceptions.NotFound`: If the episode is missing.
|
||||
"""
|
||||
if title:
|
||||
key = '/library/metadata/%s/allLeaves' % self.ratingKey
|
||||
@@ -488,7 +489,7 @@ class Show(Video):
|
||||
return self.episodes(viewCount=0)
|
||||
|
||||
def get(self, title=None, season=None, episode=None):
|
||||
""" Alias to :func:`~plexapi.video.Show.episode()`. """
|
||||
""" Alias to :func:`~plexapi.video.Show.episode`. """
|
||||
return self.episode(title, season, episode)
|
||||
|
||||
def download(self, savepath=None, keep_original_name=False, **kwargs):
|
||||
@@ -498,7 +499,7 @@ class Show(Video):
|
||||
savepath (str): Defaults to current working dir.
|
||||
keep_original_name (bool): True to keep the original file name otherwise
|
||||
a friendlier is generated.
|
||||
**kwargs: Additional options passed into :func:`~plexapi.base.PlexObject.getStreamURL()`.
|
||||
**kwargs: Additional options passed into :func:`~plexapi.base.PlexObject.getStreamURL`.
|
||||
"""
|
||||
filepaths = []
|
||||
for episode in self.episodes():
|
||||
@@ -585,7 +586,7 @@ class Season(Video):
|
||||
return self.fetchItem(key, parentIndex=self.index, index=episode)
|
||||
|
||||
def get(self, title=None, episode=None):
|
||||
""" Alias to :func:`~plexapi.video.Season.episode()`. """
|
||||
""" Alias to :func:`~plexapi.video.Season.episode`. """
|
||||
return self.episode(title, episode)
|
||||
|
||||
def show(self):
|
||||
@@ -607,7 +608,7 @@ class Season(Video):
|
||||
savepath (str): Defaults to current working dir.
|
||||
keep_original_name (bool): True to keep the original file name otherwise
|
||||
a friendlier is generated.
|
||||
**kwargs: Additional options passed into :func:`~plexapi.base.PlexObject.getStreamURL()`.
|
||||
**kwargs: Additional options passed into :func:`~plexapi.base.PlexObject.getStreamURL`.
|
||||
"""
|
||||
filepaths = []
|
||||
for episode in self.episodes():
|
||||
@@ -656,16 +657,10 @@ class Episode(Playable, Video):
|
||||
TYPE = 'episode'
|
||||
METADATA_TYPE = 'episode'
|
||||
|
||||
_include = ('?checkFiles=1&includeExtras=1&includeRelated=1'
|
||||
'&includeOnDeck=1&includeChapters=1&includePopularLeaves=1'
|
||||
'&includeMarkers=1&includeConcerts=1&includePreferences=1'
|
||||
'&includeBandwidths=1')
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
Video._loadData(self, data)
|
||||
Playable._loadData(self, data)
|
||||
self._details_key = self.key + self._include
|
||||
self._seasonNumber = None # cached season number
|
||||
art = data.attrib.get('art')
|
||||
self.art = art if art and str(self.ratingKey) in art else None
|
||||
@@ -699,6 +694,7 @@ class Episode(Playable, Video):
|
||||
self.labels = self.findItems(data, media.Label)
|
||||
self.collections = self.findItems(data, media.Collection)
|
||||
self.chapters = self.findItems(data, media.Chapter)
|
||||
self.markers = self.findItems(data, media.Marker)
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s>' % ':'.join([p for p in [
|
||||
@@ -730,6 +726,13 @@ class Episode(Playable, Video):
|
||||
""" Returns the s00e00 string containing the season and episode. """
|
||||
return 's%se%s' % (str(self.seasonNumber).zfill(2), str(self.index).zfill(2))
|
||||
|
||||
@property
|
||||
def hasIntroMarker(self):
|
||||
""" Returns True if this episode has an intro marker in the xml. """
|
||||
if not self.isFullObject():
|
||||
self.reload()
|
||||
return any(marker.type == 'intro' for marker in self.markers)
|
||||
|
||||
def season(self):
|
||||
"""" Return this episodes :func:`~plexapi.video.Season`.. """
|
||||
return self.fetchItem(self.parentKey)
|
||||
|
@@ -1,34 +0,0 @@
|
||||
from ._tqdm import tqdm
|
||||
from ._tqdm import trange
|
||||
from ._tqdm_gui import tqdm_gui
|
||||
from ._tqdm_gui import tgrange
|
||||
from ._tqdm_pandas import tqdm_pandas
|
||||
from ._main import main
|
||||
from ._monitor import TMonitor, TqdmSynchronisationWarning
|
||||
from ._version import __version__ # NOQA
|
||||
from ._tqdm import TqdmTypeError, TqdmKeyError, TqdmWarning, \
|
||||
TqdmDeprecationWarning, TqdmExperimentalWarning, \
|
||||
TqdmMonitorWarning
|
||||
|
||||
__all__ = ['tqdm', 'tqdm_gui', 'trange', 'tgrange', 'tqdm_pandas',
|
||||
'tqdm_notebook', 'tnrange', 'main', 'TMonitor',
|
||||
'TqdmTypeError', 'TqdmKeyError',
|
||||
'TqdmWarning', 'TqdmDeprecationWarning',
|
||||
'TqdmExperimentalWarning',
|
||||
'TqdmMonitorWarning', 'TqdmSynchronisationWarning',
|
||||
'__version__']
|
||||
|
||||
|
||||
def tqdm_notebook(*args, **kwargs): # pragma: no cover
|
||||
"""See tqdm._tqdm_notebook.tqdm_notebook for full documentation"""
|
||||
from ._tqdm_notebook import tqdm_notebook as _tqdm_notebook
|
||||
return _tqdm_notebook(*args, **kwargs)
|
||||
|
||||
|
||||
def tnrange(*args, **kwargs): # pragma: no cover
|
||||
"""
|
||||
A shortcut for tqdm_notebook(xrange(*args), **kwargs).
|
||||
On Python3+ range is used instead of xrange.
|
||||
"""
|
||||
from ._tqdm_notebook import tnrange as _tnrange
|
||||
return _tnrange(*args, **kwargs)
|
@@ -1,2 +0,0 @@
|
||||
from ._main import main
|
||||
main()
|
@@ -1,207 +0,0 @@
|
||||
from ._tqdm import tqdm, TqdmTypeError, TqdmKeyError
|
||||
from ._version import __version__ # NOQA
|
||||
import sys
|
||||
import re
|
||||
import logging
|
||||
__all__ = ["main"]
|
||||
|
||||
|
||||
def cast(val, typ):
|
||||
log = logging.getLogger(__name__)
|
||||
log.debug((val, typ))
|
||||
if " or " in typ:
|
||||
for t in typ.split(" or "):
|
||||
try:
|
||||
return cast(val, t)
|
||||
except TqdmTypeError:
|
||||
pass
|
||||
raise TqdmTypeError(val + ' : ' + typ)
|
||||
|
||||
# sys.stderr.write('\ndebug | `val:type`: `' + val + ':' + typ + '`.\n')
|
||||
if typ == 'bool':
|
||||
if (val == 'True') or (val == ''):
|
||||
return True
|
||||
elif val == 'False':
|
||||
return False
|
||||
else:
|
||||
raise TqdmTypeError(val + ' : ' + typ)
|
||||
try:
|
||||
return eval(typ + '("' + val + '")')
|
||||
except:
|
||||
if typ == 'chr':
|
||||
return chr(ord(eval('"' + val + '"')))
|
||||
else:
|
||||
raise TqdmTypeError(val + ' : ' + typ)
|
||||
|
||||
|
||||
def posix_pipe(fin, fout, delim='\n', buf_size=256,
|
||||
callback=lambda int: None # pragma: no cover
|
||||
):
|
||||
"""
|
||||
Params
|
||||
------
|
||||
fin : file with `read(buf_size : int)` method
|
||||
fout : file with `write` (and optionally `flush`) methods.
|
||||
callback : function(int), e.g.: `tqdm.update`
|
||||
"""
|
||||
fp_write = fout.write
|
||||
|
||||
# tmp = ''
|
||||
if not delim:
|
||||
while True:
|
||||
tmp = fin.read(buf_size)
|
||||
|
||||
# flush at EOF
|
||||
if not tmp:
|
||||
getattr(fout, 'flush', lambda: None)() # pragma: no cover
|
||||
return
|
||||
|
||||
fp_write(tmp)
|
||||
callback(len(tmp))
|
||||
# return
|
||||
|
||||
buf = ''
|
||||
# n = 0
|
||||
while True:
|
||||
tmp = fin.read(buf_size)
|
||||
|
||||
# flush at EOF
|
||||
if not tmp:
|
||||
if buf:
|
||||
fp_write(buf)
|
||||
callback(1 + buf.count(delim)) # n += 1 + buf.count(delim)
|
||||
getattr(fout, 'flush', lambda: None)() # pragma: no cover
|
||||
return # n
|
||||
|
||||
while True:
|
||||
try:
|
||||
i = tmp.index(delim)
|
||||
except ValueError:
|
||||
buf += tmp
|
||||
break
|
||||
else:
|
||||
fp_write(buf + tmp[:i + len(delim)])
|
||||
callback(1) # n += 1
|
||||
buf = ''
|
||||
tmp = tmp[i + len(delim):]
|
||||
|
||||
|
||||
# ((opt, type), ... )
|
||||
RE_OPTS = re.compile(r'\n {8}(\S+)\s{2,}:\s*([^,]+)')
|
||||
# better split method assuming no positional args
|
||||
RE_SHLEX = re.compile(r'\s*(?<!\S)--?([^\s=]+)(?:\s*|=|$)')
|
||||
|
||||
# TODO: add custom support for some of the following?
|
||||
UNSUPPORTED_OPTS = ('iterable', 'gui', 'out', 'file')
|
||||
|
||||
# The 8 leading spaces are required for consistency
|
||||
CLI_EXTRA_DOC = r"""
|
||||
Extra CLI Options
|
||||
-----------------
|
||||
name : type, optional
|
||||
TODO: find out why this is needed.
|
||||
delim : chr, optional
|
||||
Delimiting character [default: '\n']. Use '\0' for null.
|
||||
N.B.: on Windows systems, Python converts '\n' to '\r\n'.
|
||||
buf_size : int, optional
|
||||
String buffer size in bytes [default: 256]
|
||||
used when `delim` is specified.
|
||||
bytes : bool, optional
|
||||
If true, will count bytes, ignore `delim`, and default
|
||||
`unit_scale` to True, `unit_divisor` to 1024, and `unit` to 'B'.
|
||||
log : str, optional
|
||||
CRITICAL|FATAL|ERROR|WARN(ING)|[default: 'INFO']|DEBUG|NOTSET.
|
||||
"""
|
||||
|
||||
|
||||
def main(fp=sys.stderr):
|
||||
"""
|
||||
Paramters (internal use only)
|
||||
---------
|
||||
fp : file-like object for tqdm
|
||||
"""
|
||||
try:
|
||||
log = sys.argv.index('--log')
|
||||
except ValueError:
|
||||
logLevel = 'INFO'
|
||||
else:
|
||||
# sys.argv.pop(log)
|
||||
# logLevel = sys.argv.pop(log)
|
||||
logLevel = sys.argv[log + 1]
|
||||
logging.basicConfig(level=getattr(logging, logLevel),
|
||||
format="%(levelname)s:%(module)s:%(lineno)d:%(message)s")
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
d = tqdm.__init__.__doc__ + CLI_EXTRA_DOC
|
||||
|
||||
opt_types = dict(RE_OPTS.findall(d))
|
||||
# opt_types['delim'] = 'chr'
|
||||
|
||||
for o in UNSUPPORTED_OPTS:
|
||||
opt_types.pop(o)
|
||||
|
||||
log.debug(sorted(opt_types.items()))
|
||||
|
||||
# d = RE_OPTS.sub(r' --\1=<\1> : \2', d)
|
||||
split = RE_OPTS.split(d)
|
||||
opt_types_desc = zip(split[1::3], split[2::3], split[3::3])
|
||||
d = ''.join('\n --{0}=<{0}> : {1}{2}'.format(*otd)
|
||||
for otd in opt_types_desc if otd[0] not in UNSUPPORTED_OPTS)
|
||||
|
||||
d = """Usage:
|
||||
tqdm [--help | options]
|
||||
|
||||
Options:
|
||||
-h, --help Print this help and exit
|
||||
-v, --version Print version and exit
|
||||
|
||||
""" + d.strip('\n') + '\n'
|
||||
|
||||
# opts = docopt(d, version=__version__)
|
||||
if any(v in sys.argv for v in ('-v', '--version')):
|
||||
sys.stdout.write(__version__ + '\n')
|
||||
sys.exit(0)
|
||||
elif any(v in sys.argv for v in ('-h', '--help')):
|
||||
sys.stdout.write(d + '\n')
|
||||
sys.exit(0)
|
||||
|
||||
argv = RE_SHLEX.split(' '.join(["tqdm"] + sys.argv[1:]))
|
||||
opts = dict(zip(argv[1::2], argv[2::2]))
|
||||
|
||||
log.debug(opts)
|
||||
opts.pop('log', True)
|
||||
|
||||
tqdm_args = {'file': fp}
|
||||
try:
|
||||
for (o, v) in opts.items():
|
||||
try:
|
||||
tqdm_args[o] = cast(v, opt_types[o])
|
||||
except KeyError as e:
|
||||
raise TqdmKeyError(str(e))
|
||||
log.debug('args:' + str(tqdm_args))
|
||||
except:
|
||||
fp.write('\nError:\nUsage:\n tqdm [--help | options]\n')
|
||||
for i in sys.stdin:
|
||||
sys.stdout.write(i)
|
||||
raise
|
||||
else:
|
||||
buf_size = tqdm_args.pop('buf_size', 256)
|
||||
delim = tqdm_args.pop('delim', '\n')
|
||||
delim_per_char = tqdm_args.pop('bytes', False)
|
||||
if delim_per_char:
|
||||
tqdm_args.setdefault('unit', 'B')
|
||||
tqdm_args.setdefault('unit_scale', True)
|
||||
tqdm_args.setdefault('unit_divisor', 1024)
|
||||
log.debug(tqdm_args)
|
||||
with tqdm(**tqdm_args) as t:
|
||||
posix_pipe(sys.stdin, sys.stdout,
|
||||
'', buf_size, t.update)
|
||||
elif delim == '\n':
|
||||
log.debug(tqdm_args)
|
||||
for i in tqdm(sys.stdin, **tqdm_args):
|
||||
sys.stdout.write(i)
|
||||
else:
|
||||
log.debug(tqdm_args)
|
||||
with tqdm(**tqdm_args) as t:
|
||||
posix_pipe(sys.stdin, sys.stdout,
|
||||
delim, buf_size, t.update)
|
@@ -1,93 +0,0 @@
|
||||
from threading import Event, Thread
|
||||
from time import time
|
||||
from warnings import warn
|
||||
__all__ = ["TMonitor", "TqdmSynchronisationWarning"]
|
||||
|
||||
|
||||
class TqdmSynchronisationWarning(RuntimeWarning):
|
||||
"""tqdm multi-thread/-process errors which may cause incorrect nesting
|
||||
but otherwise no adverse effects"""
|
||||
pass
|
||||
|
||||
|
||||
class TMonitor(Thread):
|
||||
"""
|
||||
Monitoring thread for tqdm bars.
|
||||
Monitors if tqdm bars are taking too much time to display
|
||||
and readjusts miniters automatically if necessary.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
tqdm_cls : class
|
||||
tqdm class to use (can be core tqdm or a submodule).
|
||||
sleep_interval : fload
|
||||
Time to sleep between monitoring checks.
|
||||
"""
|
||||
|
||||
# internal vars for unit testing
|
||||
_time = None
|
||||
_event = None
|
||||
|
||||
def __init__(self, tqdm_cls, sleep_interval):
|
||||
Thread.__init__(self)
|
||||
self.daemon = True # kill thread when main killed (KeyboardInterrupt)
|
||||
self.was_killed = Event()
|
||||
self.woken = 0 # last time woken up, to sync with monitor
|
||||
self.tqdm_cls = tqdm_cls
|
||||
self.sleep_interval = sleep_interval
|
||||
if TMonitor._time is not None:
|
||||
self._time = TMonitor._time
|
||||
else:
|
||||
self._time = time
|
||||
if TMonitor._event is not None:
|
||||
self._event = TMonitor._event
|
||||
else:
|
||||
self._event = Event
|
||||
self.start()
|
||||
|
||||
def exit(self):
|
||||
self.was_killed.set()
|
||||
self.join()
|
||||
return self.report()
|
||||
|
||||
def run(self):
|
||||
cur_t = self._time()
|
||||
while True:
|
||||
# After processing and before sleeping, notify that we woke
|
||||
# Need to be done just before sleeping
|
||||
self.woken = cur_t
|
||||
# Sleep some time...
|
||||
self.was_killed.wait(self.sleep_interval)
|
||||
# Quit if killed
|
||||
if self.was_killed.is_set():
|
||||
return
|
||||
# Then monitor!
|
||||
# Acquire lock (to access _instances)
|
||||
with self.tqdm_cls.get_lock():
|
||||
cur_t = self._time()
|
||||
# Check tqdm instances are waiting too long to print
|
||||
instances = self.tqdm_cls._instances.copy()
|
||||
for instance in instances:
|
||||
# Check event in loop to reduce blocking time on exit
|
||||
if self.was_killed.is_set():
|
||||
return
|
||||
# Avoid race by checking that the instance started
|
||||
if not hasattr(instance, 'start_t'): # pragma: nocover
|
||||
continue
|
||||
# Only if mininterval > 1 (else iterations are just slow)
|
||||
# and last refresh exceeded maxinterval
|
||||
if instance.miniters > 1 and \
|
||||
(cur_t - instance.last_print_t) >= \
|
||||
instance.maxinterval:
|
||||
# force bypassing miniters on next iteration
|
||||
# (dynamic_miniters adjusts mininterval automatically)
|
||||
instance.miniters = 1
|
||||
# Refresh now! (works only for manual tqdm)
|
||||
instance.refresh(nolock=True)
|
||||
if instances != self.tqdm_cls._instances: # pragma: nocover
|
||||
warn("Set changed size during iteration" +
|
||||
" (see https://github.com/tqdm/tqdm/issues/481)",
|
||||
TqdmSynchronisationWarning)
|
||||
|
||||
def report(self):
|
||||
return not self.was_killed.is_set()
|
1223
lib/tqdm/_tqdm.py
@@ -1,351 +0,0 @@
|
||||
"""
|
||||
GUI progressbar decorator for iterators.
|
||||
Includes a default (x)range iterator printing to stderr.
|
||||
|
||||
Usage:
|
||||
>>> from tqdm_gui import tgrange[, tqdm_gui]
|
||||
>>> for i in tgrange(10): #same as: for i in tqdm_gui(xrange(10))
|
||||
... ...
|
||||
"""
|
||||
# future division is important to divide integers and get as
|
||||
# a result precise floating numbers (instead of truncated int)
|
||||
from __future__ import division, absolute_import
|
||||
# import compatibility functions and utilities
|
||||
# import sys
|
||||
from time import time
|
||||
from ._utils import _range
|
||||
# to inherit from the tqdm class
|
||||
from ._tqdm import tqdm, TqdmExperimentalWarning
|
||||
from warnings import warn
|
||||
|
||||
|
||||
__author__ = {"github.com/": ["casperdcl", "lrq3000"]}
|
||||
__all__ = ['tqdm_gui', 'tgrange']
|
||||
|
||||
|
||||
class tqdm_gui(tqdm): # pragma: no cover
|
||||
"""
|
||||
Experimental GUI version of tqdm!
|
||||
"""
|
||||
|
||||
# TODO: @classmethod: write() on GUI?
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
import matplotlib as mpl
|
||||
import matplotlib.pyplot as plt
|
||||
from collections import deque
|
||||
kwargs['gui'] = True
|
||||
|
||||
super(tqdm_gui, self).__init__(*args, **kwargs)
|
||||
|
||||
# Initialize the GUI display
|
||||
if self.disable or not kwargs['gui']:
|
||||
return
|
||||
|
||||
warn('GUI is experimental/alpha', TqdmExperimentalWarning)
|
||||
self.mpl = mpl
|
||||
self.plt = plt
|
||||
self.sp = None
|
||||
|
||||
# Remember if external environment uses toolbars
|
||||
self.toolbar = self.mpl.rcParams['toolbar']
|
||||
self.mpl.rcParams['toolbar'] = 'None'
|
||||
|
||||
self.mininterval = max(self.mininterval, 0.5)
|
||||
self.fig, ax = plt.subplots(figsize=(9, 2.2))
|
||||
# self.fig.subplots_adjust(bottom=0.2)
|
||||
if self.total:
|
||||
self.xdata = []
|
||||
self.ydata = []
|
||||
self.zdata = []
|
||||
else:
|
||||
self.xdata = deque([])
|
||||
self.ydata = deque([])
|
||||
self.zdata = deque([])
|
||||
self.line1, = ax.plot(self.xdata, self.ydata, color='b')
|
||||
self.line2, = ax.plot(self.xdata, self.zdata, color='k')
|
||||
ax.set_ylim(0, 0.001)
|
||||
if self.total:
|
||||
ax.set_xlim(0, 100)
|
||||
ax.set_xlabel('percent')
|
||||
self.fig.legend((self.line1, self.line2), ('cur', 'est'),
|
||||
loc='center right')
|
||||
# progressbar
|
||||
self.hspan = plt.axhspan(0, 0.001,
|
||||
xmin=0, xmax=0, color='g')
|
||||
else:
|
||||
# ax.set_xlim(-60, 0)
|
||||
ax.set_xlim(0, 60)
|
||||
ax.invert_xaxis()
|
||||
ax.set_xlabel('seconds')
|
||||
ax.legend(('cur', 'est'), loc='lower left')
|
||||
ax.grid()
|
||||
# ax.set_xlabel('seconds')
|
||||
ax.set_ylabel((self.unit if self.unit else 'it') + '/s')
|
||||
if self.unit_scale:
|
||||
plt.ticklabel_format(style='sci', axis='y',
|
||||
scilimits=(0, 0))
|
||||
ax.yaxis.get_offset_text().set_x(-0.15)
|
||||
|
||||
# Remember if external environment is interactive
|
||||
self.wasion = plt.isinteractive()
|
||||
plt.ion()
|
||||
self.ax = ax
|
||||
|
||||
def __iter__(self):
|
||||
# TODO: somehow allow the following:
|
||||
# if not self.gui:
|
||||
# return super(tqdm_gui, self).__iter__()
|
||||
iterable = self.iterable
|
||||
if self.disable:
|
||||
for obj in iterable:
|
||||
yield obj
|
||||
return
|
||||
|
||||
# ncols = self.ncols
|
||||
mininterval = self.mininterval
|
||||
maxinterval = self.maxinterval
|
||||
miniters = self.miniters
|
||||
dynamic_miniters = self.dynamic_miniters
|
||||
unit = self.unit
|
||||
unit_scale = self.unit_scale
|
||||
ascii = self.ascii
|
||||
start_t = self.start_t
|
||||
last_print_t = self.last_print_t
|
||||
last_print_n = self.last_print_n
|
||||
n = self.n
|
||||
# dynamic_ncols = self.dynamic_ncols
|
||||
smoothing = self.smoothing
|
||||
avg_time = self.avg_time
|
||||
bar_format = self.bar_format
|
||||
|
||||
plt = self.plt
|
||||
ax = self.ax
|
||||
xdata = self.xdata
|
||||
ydata = self.ydata
|
||||
zdata = self.zdata
|
||||
line1 = self.line1
|
||||
line2 = self.line2
|
||||
|
||||
for obj in iterable:
|
||||
yield obj
|
||||
# Update and print the progressbar.
|
||||
# Note: does not call self.update(1) for speed optimisation.
|
||||
n += 1
|
||||
delta_it = n - last_print_n
|
||||
# check the counter first (avoid calls to time())
|
||||
if delta_it >= miniters:
|
||||
cur_t = time()
|
||||
delta_t = cur_t - last_print_t
|
||||
if delta_t >= mininterval:
|
||||
elapsed = cur_t - start_t
|
||||
# EMA (not just overall average)
|
||||
if smoothing and delta_t:
|
||||
avg_time = delta_t / delta_it \
|
||||
if avg_time is None \
|
||||
else smoothing * delta_t / delta_it + \
|
||||
(1 - smoothing) * avg_time
|
||||
|
||||
# Inline due to multiple calls
|
||||
total = self.total
|
||||
# instantaneous rate
|
||||
y = delta_it / delta_t
|
||||
# overall rate
|
||||
z = n / elapsed
|
||||
# update line data
|
||||
xdata.append(n * 100.0 / total if total else cur_t)
|
||||
ydata.append(y)
|
||||
zdata.append(z)
|
||||
|
||||
# Discard old values
|
||||
# xmin, xmax = ax.get_xlim()
|
||||
# if (not total) and elapsed > xmin * 1.1:
|
||||
if (not total) and elapsed > 66:
|
||||
xdata.popleft()
|
||||
ydata.popleft()
|
||||
zdata.popleft()
|
||||
|
||||
ymin, ymax = ax.get_ylim()
|
||||
if y > ymax or z > ymax:
|
||||
ymax = 1.1 * y
|
||||
ax.set_ylim(ymin, ymax)
|
||||
ax.figure.canvas.draw()
|
||||
|
||||
if total:
|
||||
line1.set_data(xdata, ydata)
|
||||
line2.set_data(xdata, zdata)
|
||||
try:
|
||||
poly_lims = self.hspan.get_xy()
|
||||
except AttributeError:
|
||||
self.hspan = plt.axhspan(0, 0.001, xmin=0,
|
||||
xmax=0, color='g')
|
||||
poly_lims = self.hspan.get_xy()
|
||||
poly_lims[0, 1] = ymin
|
||||
poly_lims[1, 1] = ymax
|
||||
poly_lims[2] = [n / total, ymax]
|
||||
poly_lims[3] = [poly_lims[2, 0], ymin]
|
||||
if len(poly_lims) > 4:
|
||||
poly_lims[4, 1] = ymin
|
||||
self.hspan.set_xy(poly_lims)
|
||||
else:
|
||||
t_ago = [cur_t - i for i in xdata]
|
||||
line1.set_data(t_ago, ydata)
|
||||
line2.set_data(t_ago, zdata)
|
||||
|
||||
ax.set_title(self.format_meter(
|
||||
n, total, elapsed, 0,
|
||||
self.desc, ascii, unit, unit_scale,
|
||||
1 / avg_time if avg_time else None, bar_format),
|
||||
fontname="DejaVu Sans Mono", fontsize=11)
|
||||
plt.pause(1e-9)
|
||||
|
||||
# If no `miniters` was specified, adjust automatically
|
||||
# to the maximum iteration rate seen so far.
|
||||
if dynamic_miniters:
|
||||
if maxinterval and delta_t > maxinterval:
|
||||
# Set miniters to correspond to maxinterval
|
||||
miniters = delta_it * maxinterval / delta_t
|
||||
elif mininterval and delta_t:
|
||||
# EMA-weight miniters to converge
|
||||
# towards the timeframe of mininterval
|
||||
miniters = smoothing * delta_it * mininterval \
|
||||
/ delta_t + (1 - smoothing) * miniters
|
||||
else:
|
||||
miniters = smoothing * delta_it + \
|
||||
(1 - smoothing) * miniters
|
||||
|
||||
# Store old values for next call
|
||||
last_print_n = n
|
||||
last_print_t = cur_t
|
||||
|
||||
# Closing the progress bar.
|
||||
# Update some internal variables for close().
|
||||
self.last_print_n = last_print_n
|
||||
self.n = n
|
||||
self.close()
|
||||
|
||||
def update(self, n=1):
|
||||
# if not self.gui:
|
||||
# return super(tqdm_gui, self).close()
|
||||
if self.disable:
|
||||
return
|
||||
|
||||
if n < 0:
|
||||
n = 1
|
||||
self.n += n
|
||||
|
||||
delta_it = self.n - self.last_print_n # should be n?
|
||||
if delta_it >= self.miniters:
|
||||
# We check the counter first, to reduce the overhead of time()
|
||||
cur_t = time()
|
||||
delta_t = cur_t - self.last_print_t
|
||||
if delta_t >= self.mininterval:
|
||||
elapsed = cur_t - self.start_t
|
||||
# EMA (not just overall average)
|
||||
if self.smoothing and delta_t:
|
||||
self.avg_time = delta_t / delta_it \
|
||||
if self.avg_time is None \
|
||||
else self.smoothing * delta_t / delta_it + \
|
||||
(1 - self.smoothing) * self.avg_time
|
||||
|
||||
# Inline due to multiple calls
|
||||
total = self.total
|
||||
ax = self.ax
|
||||
|
||||
# instantaneous rate
|
||||
y = delta_it / delta_t
|
||||
# smoothed rate
|
||||
z = self.n / elapsed
|
||||
# update line data
|
||||
self.xdata.append(self.n * 100.0 / total
|
||||
if total else cur_t)
|
||||
self.ydata.append(y)
|
||||
self.zdata.append(z)
|
||||
|
||||
# Discard old values
|
||||
if (not total) and elapsed > 66:
|
||||
self.xdata.popleft()
|
||||
self.ydata.popleft()
|
||||
self.zdata.popleft()
|
||||
|
||||
ymin, ymax = ax.get_ylim()
|
||||
if y > ymax or z > ymax:
|
||||
ymax = 1.1 * y
|
||||
ax.set_ylim(ymin, ymax)
|
||||
ax.figure.canvas.draw()
|
||||
|
||||
if total:
|
||||
self.line1.set_data(self.xdata, self.ydata)
|
||||
self.line2.set_data(self.xdata, self.zdata)
|
||||
try:
|
||||
poly_lims = self.hspan.get_xy()
|
||||
except AttributeError:
|
||||
self.hspan = self.plt.axhspan(0, 0.001, xmin=0,
|
||||
xmax=0, color='g')
|
||||
poly_lims = self.hspan.get_xy()
|
||||
poly_lims[0, 1] = ymin
|
||||
poly_lims[1, 1] = ymax
|
||||
poly_lims[2] = [self.n / total, ymax]
|
||||
poly_lims[3] = [poly_lims[2, 0], ymin]
|
||||
if len(poly_lims) > 4:
|
||||
poly_lims[4, 1] = ymin
|
||||
self.hspan.set_xy(poly_lims)
|
||||
else:
|
||||
t_ago = [cur_t - i for i in self.xdata]
|
||||
self.line1.set_data(t_ago, self.ydata)
|
||||
self.line2.set_data(t_ago, self.zdata)
|
||||
|
||||
ax.set_title(self.format_meter(
|
||||
self.n, total, elapsed, 0,
|
||||
self.desc, self.ascii, self.unit, self.unit_scale,
|
||||
1 / self.avg_time if self.avg_time else None,
|
||||
self.bar_format),
|
||||
fontname="DejaVu Sans Mono", fontsize=11)
|
||||
self.plt.pause(1e-9)
|
||||
|
||||
# If no `miniters` was specified, adjust automatically to the
|
||||
# maximum iteration rate seen so far.
|
||||
# e.g.: After running `tqdm.update(5)`, subsequent
|
||||
# calls to `tqdm.update()` will only cause an update after
|
||||
# at least 5 more iterations.
|
||||
if self.dynamic_miniters:
|
||||
if self.maxinterval and delta_t > self.maxinterval:
|
||||
self.miniters = self.miniters * self.maxinterval \
|
||||
/ delta_t
|
||||
elif self.mininterval and delta_t:
|
||||
self.miniters = self.smoothing * delta_it \
|
||||
* self.mininterval / delta_t + \
|
||||
(1 - self.smoothing) * self.miniters
|
||||
else:
|
||||
self.miniters = self.smoothing * delta_it + \
|
||||
(1 - self.smoothing) * self.miniters
|
||||
|
||||
# Store old values for next call
|
||||
self.last_print_n = self.n
|
||||
self.last_print_t = cur_t
|
||||
|
||||
def close(self):
|
||||
# if not self.gui:
|
||||
# return super(tqdm_gui, self).close()
|
||||
if self.disable:
|
||||
return
|
||||
|
||||
self.disable = True
|
||||
|
||||
self._instances.remove(self)
|
||||
|
||||
# Restore toolbars
|
||||
self.mpl.rcParams['toolbar'] = self.toolbar
|
||||
# Return to non-interactive mode
|
||||
if not self.wasion:
|
||||
self.plt.ioff()
|
||||
if not self.leave:
|
||||
self.plt.close(self.fig)
|
||||
|
||||
|
||||
def tgrange(*args, **kwargs):
|
||||
"""
|
||||
A shortcut for tqdm_gui(xrange(*args), **kwargs).
|
||||
On Python3+ range is used instead of xrange.
|
||||
"""
|
||||
return tqdm_gui(_range(*args), **kwargs)
|
@@ -1,236 +0,0 @@
|
||||
"""
|
||||
IPython/Jupyter Notebook progressbar decorator for iterators.
|
||||
Includes a default (x)range iterator printing to stderr.
|
||||
|
||||
Usage:
|
||||
>>> from tqdm_notebook import tnrange[, tqdm_notebook]
|
||||
>>> for i in tnrange(10): #same as: for i in tqdm_notebook(xrange(10))
|
||||
... ...
|
||||
"""
|
||||
# future division is important to divide integers and get as
|
||||
# a result precise floating numbers (instead of truncated int)
|
||||
from __future__ import division, absolute_import
|
||||
# import compatibility functions and utilities
|
||||
import sys
|
||||
from ._utils import _range
|
||||
# to inherit from the tqdm class
|
||||
from ._tqdm import tqdm
|
||||
|
||||
|
||||
if True: # pragma: no cover
|
||||
# import IPython/Jupyter base widget and display utilities
|
||||
try: # IPython 4.x
|
||||
import ipywidgets
|
||||
IPY = 4
|
||||
except ImportError: # IPython 3.x / 2.x
|
||||
IPY = 32
|
||||
import warnings
|
||||
with warnings.catch_warnings():
|
||||
ipy_deprecation_msg = "The `IPython.html` package" \
|
||||
" has been deprecated"
|
||||
warnings.filterwarnings('error',
|
||||
message=".*" + ipy_deprecation_msg + ".*")
|
||||
try:
|
||||
import IPython.html.widgets as ipywidgets
|
||||
except Warning as e:
|
||||
if ipy_deprecation_msg not in str(e):
|
||||
raise
|
||||
warnings.simplefilter('ignore')
|
||||
try:
|
||||
import IPython.html.widgets as ipywidgets # NOQA
|
||||
except ImportError:
|
||||
pass
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try: # IPython 4.x / 3.x
|
||||
if IPY == 32:
|
||||
from IPython.html.widgets import IntProgress, HBox, HTML
|
||||
IPY = 3
|
||||
else:
|
||||
from ipywidgets import IntProgress, HBox, HTML
|
||||
except ImportError:
|
||||
try: # IPython 2.x
|
||||
from IPython.html.widgets import IntProgressWidget as IntProgress
|
||||
from IPython.html.widgets import ContainerWidget as HBox
|
||||
from IPython.html.widgets import HTML
|
||||
IPY = 2
|
||||
except ImportError:
|
||||
IPY = 0
|
||||
|
||||
try:
|
||||
from IPython.display import display # , clear_output
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# HTML encoding
|
||||
try: # Py3
|
||||
from html import escape
|
||||
except ImportError: # Py2
|
||||
from cgi import escape
|
||||
|
||||
|
||||
__author__ = {"github.com/": ["lrq3000", "casperdcl", "alexanderkuk"]}
|
||||
__all__ = ['tqdm_notebook', 'tnrange']
|
||||
|
||||
|
||||
class tqdm_notebook(tqdm):
|
||||
"""
|
||||
Experimental IPython/Jupyter Notebook widget using tqdm!
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def status_printer(_, total=None, desc=None):
|
||||
"""
|
||||
Manage the printing of an IPython/Jupyter Notebook progress bar widget.
|
||||
"""
|
||||
# Fallback to text bar if there's no total
|
||||
# DEPRECATED: replaced with an 'info' style bar
|
||||
# if not total:
|
||||
# return super(tqdm_notebook, tqdm_notebook).status_printer(file)
|
||||
|
||||
# fp = file
|
||||
|
||||
# Prepare IPython progress bar
|
||||
if total:
|
||||
pbar = IntProgress(min=0, max=total)
|
||||
else: # No total? Show info style bar with no progress tqdm status
|
||||
pbar = IntProgress(min=0, max=1)
|
||||
pbar.value = 1
|
||||
pbar.bar_style = 'info'
|
||||
if desc:
|
||||
pbar.description = desc
|
||||
# Prepare status text
|
||||
ptext = HTML()
|
||||
# Only way to place text to the right of the bar is to use a container
|
||||
container = HBox(children=[pbar, ptext])
|
||||
display(container)
|
||||
|
||||
def print_status(s='', close=False, bar_style=None, desc=None):
|
||||
# Note: contrary to native tqdm, s='' does NOT clear bar
|
||||
# goal is to keep all infos if error happens so user knows
|
||||
# at which iteration the loop failed.
|
||||
|
||||
# Clear previous output (really necessary?)
|
||||
# clear_output(wait=1)
|
||||
|
||||
# Get current iteration value from format_meter string
|
||||
if total:
|
||||
# n = None
|
||||
if s:
|
||||
npos = s.find(r'/|/') # cause we use bar_format=r'{n}|...'
|
||||
# Check that n can be found in s (else n > total)
|
||||
if npos >= 0:
|
||||
n = int(s[:npos]) # get n from string
|
||||
s = s[npos + 3:] # remove from string
|
||||
|
||||
# Update bar with current n value
|
||||
if n is not None:
|
||||
pbar.value = n
|
||||
|
||||
# Print stats
|
||||
if s: # never clear the bar (signal: s='')
|
||||
s = s.replace('||', '') # remove inesthetical pipes
|
||||
s = escape(s) # html escape special characters (like '?')
|
||||
ptext.value = s
|
||||
|
||||
# Change bar style
|
||||
if bar_style:
|
||||
# Hack-ish way to avoid the danger bar_style being overriden by
|
||||
# success because the bar gets closed after the error...
|
||||
if not (pbar.bar_style == 'danger' and bar_style == 'success'):
|
||||
pbar.bar_style = bar_style
|
||||
|
||||
# Special signal to close the bar
|
||||
if close and pbar.bar_style != 'danger': # hide only if no error
|
||||
try:
|
||||
container.close()
|
||||
except AttributeError:
|
||||
container.visible = False
|
||||
|
||||
# Update description
|
||||
if desc:
|
||||
pbar.description = desc
|
||||
|
||||
return print_status
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Setup default output
|
||||
if kwargs.get('file', sys.stderr) is sys.stderr:
|
||||
kwargs['file'] = sys.stdout # avoid the red block in IPython
|
||||
|
||||
# Remove the bar from the printed string, only print stats
|
||||
if not kwargs.get('bar_format', None):
|
||||
kwargs['bar_format'] = r'{n}/|/{l_bar}{r_bar}'
|
||||
|
||||
# Initialize parent class + avoid printing by using gui=True
|
||||
kwargs['gui'] = True
|
||||
super(tqdm_notebook, self).__init__(*args, **kwargs)
|
||||
if self.disable or not kwargs['gui']:
|
||||
return
|
||||
|
||||
# Delete first pbar generated from super() (wrong total and text)
|
||||
# DEPRECATED by using gui=True
|
||||
# self.sp('', close=True)
|
||||
# Replace with IPython progress bar display (with correct total)
|
||||
self.sp = self.status_printer(self.fp, self.total, self.desc)
|
||||
self.desc = None # trick to place description before the bar
|
||||
|
||||
# Print initial bar state
|
||||
if not self.disable:
|
||||
self.sp(self.__repr__()) # same as self.refresh without clearing
|
||||
|
||||
def __iter__(self, *args, **kwargs):
|
||||
try:
|
||||
for obj in super(tqdm_notebook, self).__iter__(*args, **kwargs):
|
||||
# return super(tqdm...) will not catch exception
|
||||
yield obj
|
||||
# NB: except ... [ as ...] breaks IPython async KeyboardInterrupt
|
||||
except:
|
||||
self.sp(bar_style='danger')
|
||||
raise
|
||||
|
||||
def update(self, *args, **kwargs):
|
||||
try:
|
||||
super(tqdm_notebook, self).update(*args, **kwargs)
|
||||
except Exception as exc:
|
||||
# cannot catch KeyboardInterrupt when using manual tqdm
|
||||
# as the interrupt will most likely happen on another statement
|
||||
self.sp(bar_style='danger')
|
||||
raise exc
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
super(tqdm_notebook, self).close(*args, **kwargs)
|
||||
# If it was not run in a notebook, sp is not assigned, check for it
|
||||
if hasattr(self, 'sp'):
|
||||
# Try to detect if there was an error or KeyboardInterrupt
|
||||
# in manual mode: if n < total, things probably got wrong
|
||||
if self.total and self.n < self.total:
|
||||
self.sp(bar_style='danger')
|
||||
else:
|
||||
if self.leave:
|
||||
self.sp(bar_style='success')
|
||||
else:
|
||||
self.sp(close=True)
|
||||
|
||||
def moveto(self, *args, **kwargs):
|
||||
# void -> avoid extraneous `\n` in IPython output cell
|
||||
return
|
||||
|
||||
def set_description(self, desc=None, **_):
|
||||
"""
|
||||
Set/modify description of the progress bar.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
desc : str, optional
|
||||
"""
|
||||
self.sp(desc=desc)
|
||||
|
||||
|
||||
def tnrange(*args, **kwargs):
|
||||
"""
|
||||
A shortcut for tqdm_notebook(xrange(*args), **kwargs).
|
||||
On Python3+ range is used instead of xrange.
|
||||
"""
|
||||
return tqdm_notebook(_range(*args), **kwargs)
|
@@ -1,46 +0,0 @@
|
||||
import sys
|
||||
|
||||
__author__ = "github.com/casperdcl"
|
||||
__all__ = ['tqdm_pandas']
|
||||
|
||||
|
||||
def tqdm_pandas(tclass, *targs, **tkwargs):
|
||||
"""
|
||||
Registers the given `tqdm` instance with
|
||||
`pandas.core.groupby.DataFrameGroupBy.progress_apply`.
|
||||
It will even close() the `tqdm` instance upon completion.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
tclass : tqdm class you want to use (eg, tqdm, tqdm_notebook, etc)
|
||||
targs and tkwargs : arguments for the tqdm instance
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import pandas as pd
|
||||
>>> import numpy as np
|
||||
>>> from tqdm import tqdm, tqdm_pandas
|
||||
>>>
|
||||
>>> df = pd.DataFrame(np.random.randint(0, 100, (100000, 6)))
|
||||
>>> tqdm_pandas(tqdm, leave=True) # can use tqdm_gui, optional kwargs, etc
|
||||
>>> # Now you can use `progress_apply` instead of `apply`
|
||||
>>> df.groupby(0).progress_apply(lambda x: x**2)
|
||||
|
||||
References
|
||||
----------
|
||||
https://stackoverflow.com/questions/18603270/
|
||||
progress-indicator-during-pandas-operations-python
|
||||
"""
|
||||
from tqdm import TqdmDeprecationWarning
|
||||
|
||||
if isinstance(tclass, type) or (getattr(tclass, '__name__', '').startswith(
|
||||
'tqdm_')): # delayed adapter case
|
||||
TqdmDeprecationWarning("""\
|
||||
Please use `tqdm.pandas(...)` instead of `tqdm_pandas(tqdm, ...)`.
|
||||
""", fp_write=getattr(tkwargs.get('file', None), 'write', sys.stderr.write))
|
||||
tclass.pandas(*targs, **tkwargs)
|
||||
else:
|
||||
TqdmDeprecationWarning("""\
|
||||
Please use `tqdm.pandas(...)` instead of `tqdm_pandas(tqdm(...))`.
|
||||
""", fp_write=getattr(tclass.fp, 'write', sys.stderr.write))
|
||||
type(tclass).pandas(deprecated_t=tclass)
|
@@ -1,215 +0,0 @@
|
||||
import os
|
||||
import subprocess
|
||||
from platform import system as _curos
|
||||
CUR_OS = _curos()
|
||||
IS_WIN = CUR_OS in ['Windows', 'cli']
|
||||
IS_NIX = (not IS_WIN) and any(
|
||||
CUR_OS.startswith(i) for i in
|
||||
['CYGWIN', 'MSYS', 'Linux', 'Darwin', 'SunOS', 'FreeBSD', 'NetBSD'])
|
||||
|
||||
|
||||
# Py2/3 compat. Empty conditional to avoid coverage
|
||||
if True: # pragma: no cover
|
||||
try:
|
||||
_range = xrange
|
||||
except NameError:
|
||||
_range = range
|
||||
|
||||
try:
|
||||
_unich = unichr
|
||||
except NameError:
|
||||
_unich = chr
|
||||
|
||||
try:
|
||||
_unicode = unicode
|
||||
except NameError:
|
||||
_unicode = str
|
||||
|
||||
try:
|
||||
if IS_WIN:
|
||||
import colorama
|
||||
colorama.init()
|
||||
else:
|
||||
colorama = None
|
||||
except ImportError:
|
||||
colorama = None
|
||||
|
||||
try:
|
||||
from weakref import WeakSet
|
||||
except ImportError:
|
||||
WeakSet = set
|
||||
|
||||
try:
|
||||
_basestring = basestring
|
||||
except NameError:
|
||||
_basestring = str
|
||||
|
||||
try: # py>=2.7,>=3.1
|
||||
from collections import OrderedDict as _OrderedDict
|
||||
except ImportError:
|
||||
try: # older Python versions with backported ordereddict lib
|
||||
from ordereddict import OrderedDict as _OrderedDict
|
||||
except ImportError: # older Python versions without ordereddict lib
|
||||
# Py2.6,3.0 compat, from PEP 372
|
||||
from collections import MutableMapping
|
||||
|
||||
class _OrderedDict(dict, MutableMapping):
|
||||
# Methods with direct access to underlying attributes
|
||||
def __init__(self, *args, **kwds):
|
||||
if len(args) > 1:
|
||||
raise TypeError('expected at 1 argument, got %d',
|
||||
len(args))
|
||||
if not hasattr(self, '_keys'):
|
||||
self._keys = []
|
||||
self.update(*args, **kwds)
|
||||
|
||||
def clear(self):
|
||||
del self._keys[:]
|
||||
dict.clear(self)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if key not in self:
|
||||
self._keys.append(key)
|
||||
dict.__setitem__(self, key, value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
dict.__delitem__(self, key)
|
||||
self._keys.remove(key)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._keys)
|
||||
|
||||
def __reversed__(self):
|
||||
return reversed(self._keys)
|
||||
|
||||
def popitem(self):
|
||||
if not self:
|
||||
raise KeyError
|
||||
key = self._keys.pop()
|
||||
value = dict.pop(self, key)
|
||||
return key, value
|
||||
|
||||
def __reduce__(self):
|
||||
items = [[k, self[k]] for k in self]
|
||||
inst_dict = vars(self).copy()
|
||||
inst_dict.pop('_keys', None)
|
||||
return self.__class__, (items,), inst_dict
|
||||
|
||||
# Methods with indirect access via the above methods
|
||||
setdefault = MutableMapping.setdefault
|
||||
update = MutableMapping.update
|
||||
pop = MutableMapping.pop
|
||||
keys = MutableMapping.keys
|
||||
values = MutableMapping.values
|
||||
items = MutableMapping.items
|
||||
|
||||
def __repr__(self):
|
||||
pairs = ', '.join(map('%r: %r'.__mod__, self.items()))
|
||||
return '%s({%s})' % (self.__class__.__name__, pairs)
|
||||
|
||||
def copy(self):
|
||||
return self.__class__(self)
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, value=None):
|
||||
d = cls()
|
||||
for key in iterable:
|
||||
d[key] = value
|
||||
return d
|
||||
|
||||
|
||||
def _is_utf(encoding):
|
||||
try:
|
||||
u'\u2588\u2589'.encode(encoding)
|
||||
except UnicodeEncodeError: # pragma: no cover
|
||||
return False
|
||||
except Exception: # pragma: no cover
|
||||
try:
|
||||
return encoding.lower().startswith('utf-') or ('U8' == encoding)
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def _supports_unicode(fp):
|
||||
try:
|
||||
return _is_utf(fp.encoding)
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
|
||||
def _environ_cols_wrapper(): # pragma: no cover
|
||||
"""
|
||||
Return a function which gets width and height of console
|
||||
(linux,osx,windows,cygwin).
|
||||
"""
|
||||
_environ_cols = None
|
||||
if IS_WIN:
|
||||
_environ_cols = _environ_cols_windows
|
||||
if _environ_cols is None:
|
||||
_environ_cols = _environ_cols_tput
|
||||
if IS_NIX:
|
||||
_environ_cols = _environ_cols_linux
|
||||
return _environ_cols
|
||||
|
||||
|
||||
def _environ_cols_windows(fp): # pragma: no cover
|
||||
try:
|
||||
from ctypes import windll, create_string_buffer
|
||||
import struct
|
||||
from sys import stdin, stdout
|
||||
|
||||
io_handle = -12 # assume stderr
|
||||
if fp == stdin:
|
||||
io_handle = -10
|
||||
elif fp == stdout:
|
||||
io_handle = -11
|
||||
|
||||
h = windll.kernel32.GetStdHandle(io_handle)
|
||||
csbi = create_string_buffer(22)
|
||||
res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
|
||||
if res:
|
||||
(_bufx, _bufy, _curx, _cury, _wattr, left, _top, right, _bottom,
|
||||
_maxx, _maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw)
|
||||
# nlines = bottom - top + 1
|
||||
return right - left # +1
|
||||
except:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def _environ_cols_tput(*_): # pragma: no cover
|
||||
"""cygwin xterm (windows)"""
|
||||
try:
|
||||
import shlex
|
||||
cols = int(subprocess.check_call(shlex.split('tput cols')))
|
||||
# rows = int(subprocess.check_call(shlex.split('tput lines')))
|
||||
return cols
|
||||
except:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def _environ_cols_linux(fp): # pragma: no cover
|
||||
|
||||
try:
|
||||
from termios import TIOCGWINSZ
|
||||
from fcntl import ioctl
|
||||
from array import array
|
||||
except ImportError:
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
return array('h', ioctl(fp, TIOCGWINSZ, '\0' * 8))[1]
|
||||
except:
|
||||
try:
|
||||
from os.environ import get
|
||||
except ImportError:
|
||||
return None
|
||||
else:
|
||||
return int(get('COLUMNS', 1)) - 1
|
||||
|
||||
|
||||
def _term_move_up(): # pragma: no cover
|
||||
return '' if (os.name == 'nt') and (colorama is None) else '\x1b[A'
|
@@ -1,59 +0,0 @@
|
||||
# Definition of the version number
|
||||
import os
|
||||
from io import open as io_open
|
||||
|
||||
__all__ = ["__version__"]
|
||||
|
||||
# major, minor, patch, -extra
|
||||
version_info = 4, 21, 0
|
||||
|
||||
# Nice string for the version
|
||||
__version__ = '.'.join(map(str, version_info))
|
||||
|
||||
|
||||
# auto -extra based on commit hash (if not tagged as release)
|
||||
scriptdir = os.path.dirname(__file__)
|
||||
gitdir = os.path.abspath(os.path.join(scriptdir, "..", ".git"))
|
||||
if os.path.isdir(gitdir): # pragma: nocover
|
||||
extra = None
|
||||
# Open config file to check if we are in tqdm project
|
||||
with io_open(os.path.join(gitdir, "config"), 'r') as fh_config:
|
||||
if 'tqdm' in fh_config.read():
|
||||
# Open the HEAD file
|
||||
with io_open(os.path.join(gitdir, "HEAD"), 'r') as fh_head:
|
||||
extra = fh_head.readline().strip()
|
||||
# in a branch => HEAD points to file containing last commit
|
||||
if 'ref:' in extra:
|
||||
# reference file path
|
||||
ref_file = extra[5:]
|
||||
branch_name = ref_file.rsplit('/', 1)[-1]
|
||||
|
||||
ref_file_path = os.path.abspath(os.path.join(gitdir, ref_file))
|
||||
# check that we are in git folder
|
||||
# (by stripping the git folder from the ref file path)
|
||||
if os.path.relpath(
|
||||
ref_file_path, gitdir).replace('\\', '/') != ref_file:
|
||||
# out of git folder
|
||||
extra = None
|
||||
else:
|
||||
# open the ref file
|
||||
with io_open(ref_file_path, 'r') as fh_branch:
|
||||
commit_hash = fh_branch.readline().strip()
|
||||
extra = commit_hash[:8]
|
||||
if branch_name != "master":
|
||||
extra += '.' + branch_name
|
||||
|
||||
# detached HEAD mode, already have commit hash
|
||||
else:
|
||||
extra = extra[:8]
|
||||
|
||||
# Append commit hash (and branch) to version string if not tagged
|
||||
if extra is not None:
|
||||
try:
|
||||
with io_open(os.path.join(gitdir, "refs", "tags",
|
||||
'v' + __version__)) as fdv:
|
||||
if fdv.readline().strip()[:8] != extra[:8]:
|
||||
__version__ += '-' + extra
|
||||
except Exception as e:
|
||||
if "No such file" not in str(e):
|
||||
raise
|
@@ -1,94 +0,0 @@
|
||||
import sys
|
||||
import subprocess
|
||||
from tqdm import main, TqdmKeyError, TqdmTypeError
|
||||
|
||||
from tests_tqdm import with_setup, pretest, posttest, _range, closing, \
|
||||
UnicodeIO, StringIO
|
||||
|
||||
|
||||
def _sh(*cmd, **kwargs):
|
||||
return subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||
**kwargs).communicate()[0].decode('utf-8')
|
||||
|
||||
|
||||
# WARNING: this should be the last test as it messes with sys.stdin, argv
|
||||
@with_setup(pretest, posttest)
|
||||
def test_main():
|
||||
"""Test command line pipes"""
|
||||
ls_out = _sh('ls').replace('\r\n', '\n')
|
||||
ls = subprocess.Popen('ls', stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
res = _sh(sys.executable, '-c', 'from tqdm import main; main()',
|
||||
stdin=ls.stdout, stderr=subprocess.STDOUT)
|
||||
ls.wait()
|
||||
|
||||
# actual test:
|
||||
|
||||
assert (ls_out in res.replace('\r\n', '\n'))
|
||||
|
||||
# semi-fake test which gets coverage:
|
||||
_SYS = sys.stdin, sys.argv
|
||||
|
||||
with closing(StringIO()) as sys.stdin:
|
||||
sys.argv = ['', '--desc', 'Test CLI-delims',
|
||||
'--ascii', 'True', '--delim', r'\0', '--buf_size', '64']
|
||||
sys.stdin.write('\0'.join(map(str, _range(int(1e3)))))
|
||||
sys.stdin.seek(0)
|
||||
main()
|
||||
|
||||
IN_DATA_LIST = map(str, _range(int(1e3)))
|
||||
sys.stdin = IN_DATA_LIST
|
||||
sys.argv = ['', '--desc', 'Test CLI pipes',
|
||||
'--ascii', 'True', '--unit_scale', 'True']
|
||||
import tqdm.__main__ # NOQA
|
||||
|
||||
IN_DATA = '\0'.join(IN_DATA_LIST)
|
||||
with closing(StringIO()) as sys.stdin:
|
||||
sys.stdin.write(IN_DATA)
|
||||
sys.stdin.seek(0)
|
||||
sys.argv = ['', '--ascii', '--bytes', '--unit_scale', 'False']
|
||||
with closing(UnicodeIO()) as fp:
|
||||
main(fp=fp)
|
||||
assert (str(len(IN_DATA)) in fp.getvalue())
|
||||
|
||||
sys.stdin = IN_DATA_LIST
|
||||
sys.argv = ['', '-ascii', '--unit_scale', 'False',
|
||||
'--desc', 'Test CLI errors']
|
||||
main()
|
||||
|
||||
sys.argv = ['', '-ascii', '-unit_scale', '--bad_arg_u_ment', 'foo']
|
||||
try:
|
||||
main()
|
||||
except TqdmKeyError as e:
|
||||
if 'bad_arg_u_ment' not in str(e):
|
||||
raise
|
||||
else:
|
||||
raise TqdmKeyError('bad_arg_u_ment')
|
||||
|
||||
sys.argv = ['', '-ascii', '-unit_scale', 'invalid_bool_value']
|
||||
try:
|
||||
main()
|
||||
except TqdmTypeError as e:
|
||||
if 'invalid_bool_value' not in str(e):
|
||||
raise
|
||||
else:
|
||||
raise TqdmTypeError('invalid_bool_value')
|
||||
|
||||
sys.argv = ['', '-ascii', '--total', 'invalid_int_value']
|
||||
try:
|
||||
main()
|
||||
except TqdmTypeError as e:
|
||||
if 'invalid_int_value' not in str(e):
|
||||
raise
|
||||
else:
|
||||
raise TqdmTypeError('invalid_int_value')
|
||||
|
||||
for i in ('-h', '--help', '-v', '--version'):
|
||||
sys.argv = ['', i]
|
||||
try:
|
||||
main()
|
||||
except SystemExit:
|
||||
pass
|
||||
|
||||
# clean up
|
||||
sys.stdin, sys.argv = _SYS
|
@@ -1,207 +0,0 @@
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
from tqdm import tqdm
|
||||
from tests_tqdm import with_setup, pretest, posttest, StringIO, closing
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_series():
|
||||
"""Test pandas.Series.progress_apply and .progress_map"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm.pandas(file=our_file, leave=True, ascii=True)
|
||||
|
||||
series = pd.Series(randint(0, 50, (123,)))
|
||||
res1 = series.progress_apply(lambda x: x + 10)
|
||||
res2 = series.apply(lambda x: x + 10)
|
||||
assert res1.equals(res2)
|
||||
|
||||
res3 = series.progress_map(lambda x: x + 10)
|
||||
res4 = series.map(lambda x: x + 10)
|
||||
assert res3.equals(res4)
|
||||
|
||||
expects = ['100%', '123/123']
|
||||
for exres in expects:
|
||||
our_file.seek(0)
|
||||
if our_file.getvalue().count(exres) < 2:
|
||||
our_file.seek(0)
|
||||
raise AssertionError(
|
||||
"\nExpected:\n{0}\nIn:\n{1}\n".format(
|
||||
exres + " at least twice.", our_file.read()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_data_frame():
|
||||
"""Test pandas.DataFrame.progress_apply and .progress_applymap"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm.pandas(file=our_file, leave=True, ascii=True)
|
||||
df = pd.DataFrame(randint(0, 50, (100, 200)))
|
||||
|
||||
def task_func(x):
|
||||
return x + 1
|
||||
|
||||
# applymap
|
||||
res1 = df.progress_applymap(task_func)
|
||||
res2 = df.applymap(task_func)
|
||||
assert res1.equals(res2)
|
||||
|
||||
# apply
|
||||
for axis in [0, 1]:
|
||||
res3 = df.progress_apply(task_func, axis=axis)
|
||||
res4 = df.apply(task_func, axis=axis)
|
||||
assert res3.equals(res4)
|
||||
|
||||
our_file.seek(0)
|
||||
if our_file.read().count('100%') < 3:
|
||||
our_file.seek(0)
|
||||
raise AssertionError("\nExpected:\n{0}\nIn:\n{1}\n".format(
|
||||
'100% at least three times', our_file.read()))
|
||||
|
||||
# apply_map, apply axis=0, apply axis=1
|
||||
expects = ['20000/20000', '200/200', '100/100']
|
||||
for exres in expects:
|
||||
our_file.seek(0)
|
||||
if our_file.getvalue().count(exres) < 1:
|
||||
our_file.seek(0)
|
||||
raise AssertionError(
|
||||
"\nExpected:\n{0}\nIn:\n {1}\n".format(
|
||||
exres + " at least once.", our_file.read()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_groupby_apply():
|
||||
"""Test pandas.DataFrame.groupby(...).progress_apply"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm.pandas(file=our_file, leave=False, ascii=True)
|
||||
|
||||
df = pd.DataFrame(randint(0, 50, (500, 3)))
|
||||
df.groupby(0).progress_apply(lambda x: None)
|
||||
|
||||
dfs = pd.DataFrame(randint(0, 50, (500, 3)), columns=list('abc'))
|
||||
dfs.groupby(['a']).progress_apply(lambda x: None)
|
||||
|
||||
our_file.seek(0)
|
||||
|
||||
# don't expect final output since no `leave` and
|
||||
# high dynamic `miniters`
|
||||
nexres = '100%|##########|'
|
||||
if nexres in our_file.read():
|
||||
our_file.seek(0)
|
||||
raise AssertionError("\nDid not expect:\n{0}\nIn:{1}\n".format(
|
||||
nexres, our_file.read()))
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm.pandas(file=our_file, leave=True, ascii=True)
|
||||
|
||||
dfs = pd.DataFrame(randint(0, 50, (500, 3)), columns=list('abc'))
|
||||
dfs.loc[0] = [2, 1, 1]
|
||||
dfs['d'] = 100
|
||||
|
||||
expects = ['500/500', '1/1', '4/4', '2/2']
|
||||
dfs.groupby(dfs.index).progress_apply(lambda x: None)
|
||||
dfs.groupby('d').progress_apply(lambda x: None)
|
||||
dfs.groupby(dfs.columns, axis=1).progress_apply(lambda x: None)
|
||||
dfs.groupby([2, 2, 1, 1], axis=1).progress_apply(lambda x: None)
|
||||
|
||||
our_file.seek(0)
|
||||
if our_file.read().count('100%') < 4:
|
||||
our_file.seek(0)
|
||||
raise AssertionError("\nExpected:\n{0}\nIn:\n{1}\n".format(
|
||||
'100% at least four times', our_file.read()))
|
||||
|
||||
for exres in expects:
|
||||
our_file.seek(0)
|
||||
if our_file.getvalue().count(exres) < 1:
|
||||
our_file.seek(0)
|
||||
raise AssertionError(
|
||||
"\nExpected:\n{0}\nIn:\n {1}\n".format(
|
||||
exres + " at least once.", our_file.read()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_leave():
|
||||
"""Test pandas with `leave=True`"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
df = pd.DataFrame(randint(0, 100, (1000, 6)))
|
||||
tqdm.pandas(file=our_file, leave=True, ascii=True)
|
||||
df.groupby(0).progress_apply(lambda x: None)
|
||||
|
||||
our_file.seek(0)
|
||||
|
||||
exres = '100%|##########| 100/100'
|
||||
if exres not in our_file.read():
|
||||
our_file.seek(0)
|
||||
raise AssertionError(
|
||||
"\nExpected:\n{0}\nIn:{1}\n".format(exres, our_file.read()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_apply_args_deprecation():
|
||||
"""Test warning info in
|
||||
`pandas.Dataframe(Series).progress_apply(func, *args)`"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
from tqdm import tqdm_pandas
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm_pandas(tqdm(file=our_file, leave=False, ascii=True, ncols=20))
|
||||
df = pd.DataFrame(randint(0, 50, (500, 3)))
|
||||
df.progress_apply(lambda x: None, 1) # 1 shall cause a warning
|
||||
# Check deprecation message
|
||||
res = our_file.getvalue()
|
||||
assert all([i in res for i in (
|
||||
"TqdmDeprecationWarning", "not supported",
|
||||
"keyword arguments instead")])
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_deprecation():
|
||||
"""Test bar object instance as argument deprecation"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
from tqdm import tqdm_pandas
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm_pandas(tqdm(file=our_file, leave=False, ascii=True, ncols=20))
|
||||
df = pd.DataFrame(randint(0, 50, (500, 3)))
|
||||
df.groupby(0).progress_apply(lambda x: None)
|
||||
# Check deprecation message
|
||||
assert "TqdmDeprecationWarning" in our_file.getvalue()
|
||||
assert "instead of `tqdm_pandas(tqdm(...))`" in our_file.getvalue()
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm_pandas(tqdm, file=our_file, leave=False, ascii=True, ncols=20)
|
||||
df = pd.DataFrame(randint(0, 50, (500, 3)))
|
||||
df.groupby(0).progress_apply(lambda x: None)
|
||||
# Check deprecation message
|
||||
assert "TqdmDeprecationWarning" in our_file.getvalue()
|
||||
assert "instead of `tqdm_pandas(tqdm, ...)`" in our_file.getvalue()
|
@@ -1,336 +0,0 @@
|
||||
from __future__ import print_function, division
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
import sys
|
||||
from time import sleep, time
|
||||
|
||||
from tqdm import trange
|
||||
from tqdm import tqdm
|
||||
|
||||
from tests_tqdm import with_setup, pretest, posttest, StringIO, closing, _range
|
||||
|
||||
# Use relative/cpu timer to have reliable timings when there is a sudden load
|
||||
try:
|
||||
from time import process_time
|
||||
except ImportError:
|
||||
from time import clock
|
||||
process_time = clock
|
||||
|
||||
|
||||
def get_relative_time(prevtime=0):
|
||||
return process_time() - prevtime
|
||||
|
||||
|
||||
def cpu_sleep(t):
|
||||
"""Sleep the given amount of cpu time"""
|
||||
start = process_time()
|
||||
while (process_time() - start) < t:
|
||||
pass
|
||||
|
||||
|
||||
def checkCpuTime(sleeptime=0.2):
|
||||
"""Check if cpu time works correctly"""
|
||||
if checkCpuTime.passed:
|
||||
return True
|
||||
# First test that sleeping does not consume cputime
|
||||
start1 = process_time()
|
||||
sleep(sleeptime)
|
||||
t1 = process_time() - start1
|
||||
|
||||
# secondly check by comparing to cpusleep (where we actually do something)
|
||||
start2 = process_time()
|
||||
cpu_sleep(sleeptime)
|
||||
t2 = process_time() - start2
|
||||
|
||||
if abs(t1) < 0.0001 and (t1 < t2 / 10):
|
||||
return True
|
||||
raise SkipTest
|
||||
|
||||
|
||||
checkCpuTime.passed = False
|
||||
|
||||
|
||||
@contextmanager
|
||||
def relative_timer():
|
||||
start = process_time()
|
||||
|
||||
def elapser():
|
||||
return process_time() - start
|
||||
|
||||
yield lambda: elapser()
|
||||
spent = process_time() - start
|
||||
|
||||
def elapser(): # NOQA
|
||||
return spent
|
||||
|
||||
|
||||
def retry_on_except(n=3):
|
||||
def wrapper(fn):
|
||||
def test_inner():
|
||||
for i in range(1, n + 1):
|
||||
try:
|
||||
checkCpuTime()
|
||||
fn()
|
||||
except SkipTest:
|
||||
if i >= n:
|
||||
raise
|
||||
else:
|
||||
return
|
||||
|
||||
test_inner.__doc__ = fn.__doc__
|
||||
return test_inner
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class MockIO(StringIO):
|
||||
"""Wraps StringIO to mock a file with no I/O"""
|
||||
|
||||
def write(self, data):
|
||||
return
|
||||
|
||||
|
||||
def simple_progress(iterable=None, total=None, file=sys.stdout, desc='',
|
||||
leave=False, miniters=1, mininterval=0.1, width=60):
|
||||
"""Simple progress bar reproducing tqdm's major features"""
|
||||
n = [0] # use a closure
|
||||
start_t = [time()]
|
||||
last_n = [0]
|
||||
last_t = [0]
|
||||
if iterable is not None:
|
||||
total = len(iterable)
|
||||
|
||||
def format_interval(t):
|
||||
mins, s = divmod(int(t), 60)
|
||||
h, m = divmod(mins, 60)
|
||||
if h:
|
||||
return '{0:d}:{1:02d}:{2:02d}'.format(h, m, s)
|
||||
else:
|
||||
return '{0:02d}:{1:02d}'.format(m, s)
|
||||
|
||||
def update_and_print(i=1):
|
||||
n[0] += i
|
||||
if (n[0] - last_n[0]) >= miniters:
|
||||
last_n[0] = n[0]
|
||||
|
||||
if (time() - last_t[0]) >= mininterval:
|
||||
last_t[0] = time() # last_t[0] == current time
|
||||
|
||||
spent = last_t[0] - start_t[0]
|
||||
spent_fmt = format_interval(spent)
|
||||
rate = n[0] / spent if spent > 0 else 0
|
||||
if 0.0 < rate < 1.0:
|
||||
rate_fmt = "%.2fs/it" % (1.0 / rate)
|
||||
else:
|
||||
rate_fmt = "%.2fit/s" % rate
|
||||
|
||||
frac = n[0] / total
|
||||
percentage = int(frac * 100)
|
||||
eta = (total - n[0]) / rate if rate > 0 else 0
|
||||
eta_fmt = format_interval(eta)
|
||||
|
||||
# bar = "#" * int(frac * width)
|
||||
barfill = " " * int((1.0 - frac) * width)
|
||||
bar_length, frac_bar_length = divmod(int(frac * width * 10), 10)
|
||||
bar = '#' * bar_length
|
||||
frac_bar = chr(48 + frac_bar_length) if frac_bar_length \
|
||||
else ' '
|
||||
|
||||
file.write("\r%s %i%%|%s%s%s| %i/%i [%s<%s, %s]" %
|
||||
(desc, percentage, bar, frac_bar, barfill, n[0],
|
||||
total, spent_fmt, eta_fmt, rate_fmt))
|
||||
|
||||
if n[0] == total and leave:
|
||||
file.write("\n")
|
||||
file.flush()
|
||||
|
||||
def update_and_yield():
|
||||
for elt in iterable:
|
||||
yield elt
|
||||
update_and_print()
|
||||
|
||||
update_and_print(0)
|
||||
if iterable is not None:
|
||||
return update_and_yield()
|
||||
else:
|
||||
return update_and_print
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_iter_overhead():
|
||||
"""Test overhead of iteration based tqdm"""
|
||||
|
||||
total = int(1e6)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
a = 0
|
||||
with trange(total, file=our_file) as t:
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in t:
|
||||
a += i
|
||||
assert (a == (total * total - total) / 2.0)
|
||||
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
our_file.write(a)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
if time_tqdm() > 9 * time_bench():
|
||||
raise AssertionError('trange(%g): %f, range(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_manual_overhead():
|
||||
"""Test overhead of manual tqdm"""
|
||||
|
||||
total = int(1e6)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
with tqdm(total=total * 10, file=our_file, leave=True) as t:
|
||||
a = 0
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
t.update(10)
|
||||
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
our_file.write(a)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
if time_tqdm() > 10 * time_bench():
|
||||
raise AssertionError('tqdm(%g): %f, range(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_iter_overhead_hard():
|
||||
"""Test overhead of iteration based tqdm (hard)"""
|
||||
|
||||
total = int(1e5)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
a = 0
|
||||
with trange(total, file=our_file, leave=True, miniters=1,
|
||||
mininterval=0, maxinterval=0) as t:
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in t:
|
||||
a += i
|
||||
assert (a == (total * total - total) / 2.0)
|
||||
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
our_file.write(("%i" % a) * 40)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
try:
|
||||
assert (time_tqdm() < 60 * time_bench())
|
||||
except AssertionError:
|
||||
raise AssertionError('trange(%g): %f, range(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_manual_overhead_hard():
|
||||
"""Test overhead of manual tqdm (hard)"""
|
||||
|
||||
total = int(1e5)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
t = tqdm(total=total * 10, file=our_file, leave=True, miniters=1,
|
||||
mininterval=0, maxinterval=0)
|
||||
a = 0
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
t.update(10)
|
||||
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
our_file.write(("%i" % a) * 40)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
try:
|
||||
assert (time_tqdm() < 100 * time_bench())
|
||||
except AssertionError:
|
||||
raise AssertionError('tqdm(%g): %f, range(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_iter_overhead_simplebar_hard():
|
||||
"""Test overhead of iteration based tqdm vs simple progress bar (hard)"""
|
||||
|
||||
total = int(1e4)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
a = 0
|
||||
with trange(total, file=our_file, leave=True, miniters=1,
|
||||
mininterval=0, maxinterval=0) as t:
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in t:
|
||||
a += i
|
||||
assert (a == (total * total - total) / 2.0)
|
||||
|
||||
a = 0
|
||||
s = simple_progress(_range(total), file=our_file, leave=True,
|
||||
miniters=1, mininterval=0)
|
||||
with relative_timer() as time_bench:
|
||||
for i in s:
|
||||
a += i
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
try:
|
||||
assert (time_tqdm() < 2.5 * time_bench())
|
||||
except AssertionError:
|
||||
raise AssertionError('trange(%g): %f, simple_progress(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_manual_overhead_simplebar_hard():
|
||||
"""Test overhead of manual tqdm vs simple progress bar (hard)"""
|
||||
|
||||
total = int(1e4)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
t = tqdm(total=total * 10, file=our_file, leave=True, miniters=1,
|
||||
mininterval=0, maxinterval=0)
|
||||
a = 0
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
t.update(10)
|
||||
|
||||
simplebar_update = simple_progress(
|
||||
total=total, file=our_file, leave=True, miniters=1, mininterval=0)
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
simplebar_update(10)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
try:
|
||||
assert (time_tqdm() < 2.5 * time_bench())
|
||||
except AssertionError:
|
||||
raise AssertionError('tqdm(%g): %f, simple_progress(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
@@ -1,164 +0,0 @@
|
||||
from __future__ import division
|
||||
from tqdm import tqdm
|
||||
from tests_tqdm import with_setup, pretest, posttest, StringIO, closing
|
||||
from tests_tqdm import DiscreteTimer, cpu_timify
|
||||
|
||||
from time import sleep
|
||||
from threading import Event
|
||||
from tqdm import TMonitor
|
||||
|
||||
|
||||
class FakeSleep(object):
|
||||
"""Wait until the discrete timer reached the required time"""
|
||||
def __init__(self, dtimer):
|
||||
self.dtimer = dtimer
|
||||
|
||||
def sleep(self, t):
|
||||
end = t + self.dtimer.t
|
||||
while self.dtimer.t < end:
|
||||
sleep(0.0000001) # sleep a bit to interrupt (instead of pass)
|
||||
|
||||
|
||||
class FakeTqdm(object):
|
||||
_instances = []
|
||||
|
||||
|
||||
def make_create_fake_sleep_event(sleep):
|
||||
def wait(self, timeout=None):
|
||||
if timeout is not None:
|
||||
sleep(timeout)
|
||||
return self.is_set()
|
||||
|
||||
def create_fake_sleep_event():
|
||||
event = Event()
|
||||
event.wait = wait
|
||||
return event
|
||||
|
||||
return create_fake_sleep_event
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_monitor_thread():
|
||||
"""Test dummy monitoring thread"""
|
||||
maxinterval = 10
|
||||
|
||||
# Setup a discrete timer
|
||||
timer = DiscreteTimer()
|
||||
TMonitor._time = timer.time
|
||||
# And a fake sleeper
|
||||
sleeper = FakeSleep(timer)
|
||||
TMonitor._event = make_create_fake_sleep_event(sleeper.sleep)
|
||||
|
||||
# Instanciate the monitor
|
||||
monitor = TMonitor(FakeTqdm, maxinterval)
|
||||
# Test if alive, then killed
|
||||
assert monitor.report()
|
||||
monitor.exit()
|
||||
timer.sleep(maxinterval * 2) # need to go out of the sleep to die
|
||||
assert not monitor.report()
|
||||
# assert not monitor.is_alive() # not working dunno why, thread not killed
|
||||
del monitor
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_monitoring_and_cleanup():
|
||||
"""Test for stalled tqdm instance and monitor deletion"""
|
||||
# Note: should fix miniters for these tests, else with dynamic_miniters
|
||||
# it's too complicated to handle with monitoring update and maxinterval...
|
||||
maxinterval = 2
|
||||
|
||||
total = 1000
|
||||
# Setup a discrete timer
|
||||
timer = DiscreteTimer()
|
||||
# And a fake sleeper
|
||||
sleeper = FakeSleep(timer)
|
||||
# Setup TMonitor to use the timer
|
||||
TMonitor._time = timer.time
|
||||
TMonitor._event = make_create_fake_sleep_event(sleeper.sleep)
|
||||
# Set monitor interval
|
||||
tqdm.monitor_interval = maxinterval
|
||||
with closing(StringIO()) as our_file:
|
||||
with tqdm(total=total, file=our_file, miniters=500, mininterval=0.1,
|
||||
maxinterval=maxinterval) as t:
|
||||
cpu_timify(t, timer)
|
||||
# Do a lot of iterations in a small timeframe
|
||||
# (smaller than monitor interval)
|
||||
timer.sleep(maxinterval / 2) # monitor won't wake up
|
||||
t.update(500)
|
||||
# check that our fixed miniters is still there
|
||||
assert t.miniters == 500
|
||||
# Then do 1 it after monitor interval, so that monitor kicks in
|
||||
timer.sleep(maxinterval * 2)
|
||||
t.update(1)
|
||||
# Wait for the monitor to get out of sleep's loop and update tqdm..
|
||||
timeend = timer.time()
|
||||
while not (t.monitor.woken >= timeend and t.miniters == 1):
|
||||
timer.sleep(1) # Force monitor to wake up if it woken too soon
|
||||
sleep(0.000001) # sleep to allow interrupt (instead of pass)
|
||||
assert t.miniters == 1 # check that monitor corrected miniters
|
||||
# Note: at this point, there may be a race condition: monitor saved
|
||||
# current woken time but timer.sleep() happen just before monitor
|
||||
# sleep. To fix that, either sleep here or increase time in a loop
|
||||
# to ensure that monitor wakes up at some point.
|
||||
|
||||
# Try again but already at miniters = 1 so nothing will be done
|
||||
timer.sleep(maxinterval * 2)
|
||||
t.update(2)
|
||||
timeend = timer.time()
|
||||
while not (t.monitor.woken >= timeend):
|
||||
timer.sleep(1) # Force monitor to wake up if it woken too soon
|
||||
sleep(0.000001)
|
||||
# Wait for the monitor to get out of sleep's loop and update tqdm..
|
||||
assert t.miniters == 1 # check that monitor corrected miniters
|
||||
|
||||
# Check that class var monitor is deleted if no instance left
|
||||
tqdm.monitor_interval = 10
|
||||
assert tqdm.monitor is None
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_monitoring_multi():
|
||||
"""Test on multiple bars, one not needing miniters adjustment"""
|
||||
# Note: should fix miniters for these tests, else with dynamic_miniters
|
||||
# it's too complicated to handle with monitoring update and maxinterval...
|
||||
maxinterval = 2
|
||||
|
||||
total = 1000
|
||||
# Setup a discrete timer
|
||||
timer = DiscreteTimer()
|
||||
# And a fake sleeper
|
||||
sleeper = FakeSleep(timer)
|
||||
# Setup TMonitor to use the timer
|
||||
TMonitor._time = timer.time
|
||||
TMonitor._event = make_create_fake_sleep_event(sleeper.sleep)
|
||||
# Set monitor interval
|
||||
tqdm.monitor_interval = maxinterval
|
||||
with closing(StringIO()) as our_file:
|
||||
with tqdm(total=total, file=our_file, miniters=500, mininterval=0.1,
|
||||
maxinterval=maxinterval) as t1:
|
||||
# Set high maxinterval for t2 so monitor does not need to adjust it
|
||||
with tqdm(total=total, file=our_file, miniters=500, mininterval=0.1,
|
||||
maxinterval=1E5) as t2:
|
||||
cpu_timify(t1, timer)
|
||||
cpu_timify(t2, timer)
|
||||
# Do a lot of iterations in a small timeframe
|
||||
timer.sleep(maxinterval / 2)
|
||||
t1.update(500)
|
||||
t2.update(500)
|
||||
assert t1.miniters == 500
|
||||
assert t2.miniters == 500
|
||||
# Then do 1 it after monitor interval, so that monitor kicks in
|
||||
timer.sleep(maxinterval * 2)
|
||||
t1.update(1)
|
||||
t2.update(1)
|
||||
# Wait for the monitor to get out of sleep and update tqdm
|
||||
timeend = timer.time()
|
||||
while not (t1.monitor.woken >= timeend and t1.miniters == 1):
|
||||
timer.sleep(1)
|
||||
sleep(0.000001)
|
||||
assert t1.miniters == 1 # check that monitor corrected miniters
|
||||
assert t2.miniters == 500 # check that t2 was not adjusted
|
||||
|
||||
# Check that class var monitor is deleted if no instance left
|
||||
tqdm.monitor_interval = 10
|
||||
assert tqdm.monitor is None
|
@@ -1,12 +0,0 @@
|
||||
import re
|
||||
|
||||
|
||||
def test_version():
|
||||
"""Test version string"""
|
||||
from tqdm import __version__
|
||||
version_parts = re.split('[.-]', __version__)
|
||||
assert 3 <= len(version_parts) # must have at least Major.minor.patch
|
||||
try:
|
||||
map(int, version_parts[:3])
|
||||
except ValueError:
|
||||
raise TypeError('Version Major.minor.patch must be 3 integers')
|
@@ -49,7 +49,7 @@ app = BUNDLE(
|
||||
bundle_identifier='com.Tautulli.Tautulli',
|
||||
version=VERSION,
|
||||
info_plist={
|
||||
'LSBackgroundOnly': True,
|
||||
'LSUIElement': True
|
||||
'LSUIElement': True,
|
||||
'NSHighResolutionCapable': True
|
||||
}
|
||||
)
|
||||
|
@@ -1,4 +1,5 @@
|
||||
pyinstaller==3.6
|
||||
pyopenssl
|
||||
pycryptodomex
|
||||
pyobjc-framework-Cocoa
|
||||
pyopenssl==20.0.0
|
||||
pycryptodomex==3.9.9
|
||||
pyobjc-framework-Cocoa==6.2.2
|
||||
pyobjc-core==6.2.2
|
||||
|
@@ -1,4 +1,4 @@
|
||||
pyinstaller==3.6
|
||||
pyopenssl
|
||||
pycryptodomex
|
||||
pywin32
|
||||
pyopenssl==20.0.0
|
||||
pycryptodomex==3.9.9
|
||||
pywin32==300
|
||||
|
@@ -98,6 +98,7 @@ CREATEPID = False
|
||||
PIDFILE = None
|
||||
NOFORK = False
|
||||
DOCKER = False
|
||||
SNAP = False
|
||||
FROZEN = False
|
||||
|
||||
SCHED = None
|
||||
@@ -194,6 +195,8 @@ def initialize(config_file):
|
||||
|
||||
if DOCKER:
|
||||
build = '[Docker] '
|
||||
elif SNAP:
|
||||
build = '[Snap] '
|
||||
elif FROZEN:
|
||||
build = '[Bundle] '
|
||||
else:
|
||||
@@ -300,7 +303,7 @@ def initialize(config_file):
|
||||
# Check for new versions
|
||||
if CONFIG.CHECK_GITHUB_ON_STARTUP and CONFIG.CHECK_GITHUB:
|
||||
try:
|
||||
versioncheck.check_update()
|
||||
versioncheck.check_update(use_cache=True)
|
||||
except:
|
||||
logger.exception("Unhandled exception")
|
||||
LATEST_VERSION = CURRENT_VERSION
|
||||
@@ -334,18 +337,6 @@ def initialize(config_file):
|
||||
logger.error("Unable to write current release to file '%s': %s" %
|
||||
(release_file, e))
|
||||
|
||||
# Get the real PMS urls for SSL and remote access
|
||||
if CONFIG.PMS_TOKEN and CONFIG.PMS_IP and CONFIG.PMS_PORT:
|
||||
plextv.get_server_resources()
|
||||
|
||||
# Refresh the users list on startup
|
||||
if CONFIG.PMS_TOKEN and CONFIG.REFRESH_USERS_ON_STARTUP:
|
||||
users.refresh_users()
|
||||
|
||||
# Refresh the libraries list on startup
|
||||
if CONFIG.PMS_IP and CONFIG.PMS_TOKEN and CONFIG.REFRESH_LIBRARIES_ON_STARTUP:
|
||||
libraries.refresh_libraries()
|
||||
|
||||
# Store the original umask
|
||||
UMASK = os.umask(0)
|
||||
os.umask(UMASK)
|
||||
@@ -523,6 +514,9 @@ def start():
|
||||
global _STARTED
|
||||
|
||||
if _INITIALIZED:
|
||||
# Start refreshes on a separate thread
|
||||
threading.Thread(target=startup_refresh).start()
|
||||
|
||||
global SCHED
|
||||
SCHED = BackgroundScheduler(timezone=pytz.UTC)
|
||||
activity_handler.ACTIVITY_SCHED = BackgroundScheduler(timezone=pytz.UTC)
|
||||
@@ -535,12 +529,13 @@ def start():
|
||||
notification_handler.start_threads(num_threads=CONFIG.NOTIFICATION_THREADS)
|
||||
notifiers.check_browser_enabled()
|
||||
|
||||
# Schedule newsletters
|
||||
newsletter_handler.NEWSLETTER_SCHED.start()
|
||||
newsletter_handler.schedule_newsletters()
|
||||
|
||||
# Cancel processing exports
|
||||
exporter.cancel_exports()
|
||||
|
||||
if CONFIG.FIRST_RUN_COMPLETE:
|
||||
activity_pinger.connect_server(log=True, startup=True)
|
||||
|
||||
if CONFIG.SYSTEM_ANALYTICS:
|
||||
global TRACKER
|
||||
TRACKER = initialize_tracker()
|
||||
@@ -554,13 +549,27 @@ def start():
|
||||
|
||||
analytics_event(category='system', action='start')
|
||||
|
||||
# Schedule newsletters
|
||||
newsletter_handler.NEWSLETTER_SCHED.start()
|
||||
newsletter_handler.schedule_newsletters()
|
||||
|
||||
_STARTED = True
|
||||
|
||||
|
||||
def startup_refresh():
|
||||
# Get the real PMS urls for SSL and remote access
|
||||
if CONFIG.PMS_TOKEN and CONFIG.PMS_IP and CONFIG.PMS_PORT:
|
||||
plextv.get_server_resources()
|
||||
|
||||
# Connect server after server resource is refreshed
|
||||
if CONFIG.FIRST_RUN_COMPLETE:
|
||||
activity_pinger.connect_server(log=True, startup=True)
|
||||
|
||||
# Refresh the users list on startup
|
||||
if CONFIG.PMS_TOKEN and CONFIG.REFRESH_USERS_ON_STARTUP:
|
||||
users.refresh_users()
|
||||
|
||||
# Refresh the libraries list on startup
|
||||
if CONFIG.PMS_IP and CONFIG.PMS_TOKEN and CONFIG.REFRESH_LIBRARIES_ON_STARTUP:
|
||||
libraries.refresh_libraries()
|
||||
|
||||
|
||||
def sig_handler(signum=None, frame=None):
|
||||
if signum is not None:
|
||||
logger.info("Signal %i caught, saving and exiting...", signum)
|
||||
@@ -778,7 +787,7 @@ def dbcheck():
|
||||
# image_hash_lookup table :: This table keeps record of the image hash lookups
|
||||
c_db.execute(
|
||||
'CREATE TABLE IF NOT EXISTS image_hash_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, '
|
||||
'img_hash TEXT, img TEXT, rating_key INTEGER, width INTEGER, height INTEGER, '
|
||||
'img_hash TEXT UNIQUE, img TEXT, rating_key INTEGER, width INTEGER, height INTEGER, '
|
||||
'opacity INTEGER, background TEXT, blur INTEGER, fallback TEXT)'
|
||||
)
|
||||
|
||||
@@ -2043,7 +2052,7 @@ def dbcheck():
|
||||
# Update official mobile device flag
|
||||
for device_id, in c_db.execute('SELECT device_id FROM mobile_devices').fetchall():
|
||||
c_db.execute('UPDATE mobile_devices SET official = ? WHERE device_id = ?',
|
||||
[mobile_app.validate_device_id(device_id), device_id])
|
||||
[mobile_app.validate_onesignal_id(device_id), device_id])
|
||||
|
||||
# Upgrade mobile_devices table from earlier versions
|
||||
try:
|
||||
@@ -2204,6 +2213,13 @@ def dbcheck():
|
||||
'ALTER TABLE exports ADD COLUMN total_items INTEGER DEFAULT 0'
|
||||
)
|
||||
|
||||
# Upgrade image_hash_lookup table from earlier versions
|
||||
try:
|
||||
c_db.execute('DELETE FROM image_hash_lookup '
|
||||
'WHERE id NOT IN (SELECT MIN(id) FROM image_hash_lookup GROUP BY img_hash)')
|
||||
except sqlite3.OperationalError:
|
||||
pass
|
||||
|
||||
# Add "Local" user to database as default unauthenticated user.
|
||||
result = c_db.execute('SELECT id FROM users WHERE username = "Local"')
|
||||
if not result.fetchone():
|
||||
|
@@ -501,7 +501,8 @@ NOTIFICATION_PARAMETERS = [
|
||||
{'name': 'Tagline', 'type': 'str', 'value': 'tagline', 'description': 'A tagline for the media item.'},
|
||||
{'name': 'Rating', 'type': 'float', 'value': 'rating', 'description': 'The rating (out of 10) for the item.'},
|
||||
{'name': 'Critic Rating', 'type': 'int', 'value': 'critic_rating', 'description': 'The critic rating (%) for the item.', 'help_text': 'Ratings source must be Rotten Tomatoes for the Plex Movie agent'},
|
||||
{'name': 'Audience Rating', 'type': 'int', 'value': 'audience_rating', 'description': 'The audience rating (%) for the item.', 'help_text': 'Ratings source must be Rotten Tomatoes for the Plex Movie agent'},
|
||||
{'name': 'Audience Rating', 'type': 'float', 'value': 'audience_rating', 'description': 'The audience rating for the item.', 'help_text': 'Rating out of 10 for IMDB, percentage (%) for Rotten Tomatoes and TMDB.'},
|
||||
{'name': 'User Rating', 'type': 'float', 'value': 'user_rating', 'description': 'The user (star) rating (out of 10) for the item.'},
|
||||
{'name': 'Duration', 'type': 'int', 'value': 'duration', 'description': 'The duration (in minutes) for the item.'},
|
||||
{'name': 'Poster URL', 'type': 'str', 'value': 'poster_url', 'description': 'A URL for the movie, TV show, or album poster.'},
|
||||
{'name': 'Plex ID', 'type': 'str', 'value': 'plex_id', 'description': 'The Plex ID for the item.', 'example': 'e.g. 5d7769a9594b2b001e6a6b7e'},
|
||||
|
@@ -89,6 +89,7 @@ _CONFIG_DEFINITIONS = {
|
||||
'CHECK_GITHUB': (int, 'General', 1),
|
||||
'CHECK_GITHUB_INTERVAL': (int, 'General', 360),
|
||||
'CHECK_GITHUB_ON_STARTUP': (int, 'General', 1),
|
||||
'CHECK_GITHUB_CACHE_SECONDS': (int, 'Advanced', 3600),
|
||||
'CLEANUP_FILES': (int, 'General', 0),
|
||||
'CLOUDINARY_CLOUD_NAME': (str, 'Cloudinary', ''),
|
||||
'CLOUDINARY_API_KEY': (str, 'Cloudinary', ''),
|
||||
@@ -133,6 +134,9 @@ _CONFIG_DEFINITIONS = {
|
||||
'HTTP_USERNAME': (str, 'General', ''),
|
||||
'HTTP_PLEX_ADMIN': (int, 'General', 0),
|
||||
'HTTP_BASE_URL': (str, 'General', ''),
|
||||
'HTTP_RATE_LIMIT_ATTEMPTS': (int, 'General', 10),
|
||||
'HTTP_RATE_LIMIT_ATTEMPTS_INTERVAL': (int, 'General', 300),
|
||||
'HTTP_RATE_LIMIT_LOCKOUT_TIME': (int, 'General', 300),
|
||||
'INTERFACE': (str, 'General', 'default'),
|
||||
'IMGUR_CLIENT_ID': (str, 'Monitoring', ''),
|
||||
'JOURNAL_MODE': (str, 'Advanced', 'WAL'),
|
||||
@@ -166,6 +170,7 @@ _CONFIG_DEFINITIONS = {
|
||||
'NOTIFY_REMOTE_ACCESS_THRESHOLD': (int, 'Monitoring', 60),
|
||||
'NOTIFY_CONCURRENT_BY_IP': (int, 'Monitoring', 0),
|
||||
'NOTIFY_CONCURRENT_THRESHOLD': (int, 'Monitoring', 2),
|
||||
'NOTIFY_NEW_DEVICE_INITIAL_ONLY': (int, 'Monitoring', 1),
|
||||
'PLEXPY_AUTO_UPDATE': (int, 'General', 0),
|
||||
'REFRESH_LIBRARIES_INTERVAL': (int, 'Monitoring', 12),
|
||||
'REFRESH_LIBRARIES_ON_STARTUP': (int, 'Monitoring', 1),
|
||||
@@ -252,7 +257,7 @@ def import_tautulli_config(config=None, backup=False):
|
||||
# Remove keys that should not be imported
|
||||
for key in _DO_NOT_IMPORT_KEYS:
|
||||
delattr(imported_config, key)
|
||||
if plexpy.DOCKER:
|
||||
if plexpy.DOCKER or plexpy.SNAP:
|
||||
for key in _DO_NOT_IMPORT_KEYS_DOCKER:
|
||||
delattr(imported_config, key)
|
||||
|
||||
@@ -535,3 +540,9 @@ class Config(object):
|
||||
self.JWT_UPDATE_SECRET = True
|
||||
|
||||
self.CONFIG_VERSION = 16
|
||||
|
||||
if self.CONFIG_VERSION == 16:
|
||||
if plexpy.SNAP:
|
||||
self.PLEXPY_AUTO_UPDATE = 0
|
||||
|
||||
self.CONFIG_VERSION = 17
|
||||
|
@@ -99,6 +99,7 @@ class DataFactory(object):
|
||||
'product',
|
||||
'player',
|
||||
'ip_address',
|
||||
'machine_id',
|
||||
'session_history.media_type',
|
||||
'session_history_metadata.rating_key',
|
||||
'session_history_metadata.parent_rating_key',
|
||||
@@ -151,6 +152,7 @@ class DataFactory(object):
|
||||
'product',
|
||||
'player',
|
||||
'ip_address',
|
||||
'machine_id',
|
||||
'media_type',
|
||||
'rating_key',
|
||||
'parent_rating_key',
|
||||
@@ -263,6 +265,7 @@ class DataFactory(object):
|
||||
'player': item['player'],
|
||||
'ip_address': item['ip_address'],
|
||||
'live': item['live'],
|
||||
'machine_id': item['machine_id'],
|
||||
'media_type': item['media_type'],
|
||||
'rating_key': item['rating_key'],
|
||||
'parent_rating_key': item['parent_rating_key'],
|
||||
@@ -1838,12 +1841,22 @@ class DataFactory(object):
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for delete_newsletter_log: %s." % e)
|
||||
return False
|
||||
|
||||
def get_user_devices(self, user_id=''):
|
||||
def get_user_devices(self, user_id='', history_only=True):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
if user_id:
|
||||
if history_only:
|
||||
query = 'SELECT machine_id FROM session_history ' \
|
||||
'WHERE user_id = ? ' \
|
||||
'GROUP BY machine_id'
|
||||
else:
|
||||
query = 'SELECT * FROM (' \
|
||||
'SELECT user_id, machine_id FROM session_history ' \
|
||||
'UNION SELECT user_id, machine_id from sessions_continued) ' \
|
||||
'WHERE user_id = ? ' \
|
||||
'GROUP BY machine_id'
|
||||
|
||||
try:
|
||||
query = 'SELECT machine_id FROM session_history WHERE user_id = ? GROUP BY machine_id'
|
||||
result = monitor_db.select(query=query, args=[user_id])
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_user_devices: %s." % e)
|
||||
|
@@ -92,6 +92,14 @@ class Export(object):
|
||||
'collection': ['children'],
|
||||
'playlist': ['item']
|
||||
}
|
||||
TREE_MEDIA_TYPES = [
|
||||
('episode', 'season', 'show'),
|
||||
('track', 'album', 'artist'),
|
||||
('photo', 'photoalbum'),
|
||||
('clip', 'photoalbum'),
|
||||
('children', 'collection'),
|
||||
('item', 'playlist')
|
||||
]
|
||||
METADATA_LEVELS = (0, 1, 2, 3, 9)
|
||||
MEDIA_INFO_LEVELS = (0, 1, 2, 3, 9)
|
||||
IMAGE_LEVELS = (0, 1, 2, 9)
|
||||
@@ -128,6 +136,7 @@ class Export(object):
|
||||
self.file_size = 0
|
||||
self.exported_thumb = False
|
||||
self.exported_art = False
|
||||
self._reload_check_files = False
|
||||
|
||||
self.total_items = 0
|
||||
self.exported_items = 0
|
||||
@@ -288,7 +297,6 @@ class Export(object):
|
||||
},
|
||||
'audioStreams': {
|
||||
'codec': None,
|
||||
'codecID': None,
|
||||
'default': None,
|
||||
'displayTitle': None,
|
||||
'extendedDisplayTitle': None,
|
||||
@@ -305,15 +313,14 @@ class Export(object):
|
||||
'bitrate': None,
|
||||
'bitrateMode': None,
|
||||
'channels': None,
|
||||
'dialogNorm': None,
|
||||
'duration': None,
|
||||
'profile': None,
|
||||
'requiredBandwidths': None,
|
||||
'samplingRate': None
|
||||
'samplingRate': None,
|
||||
'streamIdentifier': None
|
||||
},
|
||||
'subtitleStreams': {
|
||||
'codec': None,
|
||||
'codecID': None,
|
||||
'default': None,
|
||||
'displayTitle': None,
|
||||
'extendedDisplayTitle': None,
|
||||
@@ -329,7 +336,8 @@ class Export(object):
|
||||
'forced': None,
|
||||
'format': None,
|
||||
'headerCompression': None,
|
||||
'key': None
|
||||
'key': None,
|
||||
'transient': None
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -492,6 +500,7 @@ class Export(object):
|
||||
'grandparentThumb': None,
|
||||
'grandparentTitle': None,
|
||||
'guid': None,
|
||||
'hasIntroMarker': None,
|
||||
'index': None,
|
||||
'key': None,
|
||||
'lastViewedAt': helpers.datetime_to_iso,
|
||||
@@ -499,6 +508,11 @@ class Export(object):
|
||||
'librarySectionKey': None,
|
||||
'librarySectionTitle': None,
|
||||
'locations': None,
|
||||
'markers': {
|
||||
'end': None,
|
||||
'start': None,
|
||||
'type': None
|
||||
},
|
||||
'media': {
|
||||
'aspectRatio': None,
|
||||
'audioChannels': None,
|
||||
@@ -589,7 +603,6 @@ class Export(object):
|
||||
},
|
||||
'audioStreams': {
|
||||
'codec': None,
|
||||
'codecID': None,
|
||||
'default': None,
|
||||
'displayTitle': None,
|
||||
'extendedDisplayTitle': None,
|
||||
@@ -606,15 +619,14 @@ class Export(object):
|
||||
'bitrate': None,
|
||||
'bitrateMode': None,
|
||||
'channels': None,
|
||||
'dialogNorm': None,
|
||||
'duration': None,
|
||||
'profile': None,
|
||||
'requiredBandwidths': None,
|
||||
'samplingRate': None
|
||||
'samplingRate': None,
|
||||
'streamIdentifier': None
|
||||
},
|
||||
'subtitleStreams': {
|
||||
'codec': None,
|
||||
'codecID': None,
|
||||
'default': None,
|
||||
'displayTitle': None,
|
||||
'extendedDisplayTitle': None,
|
||||
@@ -630,7 +642,8 @@ class Export(object):
|
||||
'forced': None,
|
||||
'format': None,
|
||||
'headerCompression': None,
|
||||
'key': None
|
||||
'key': None,
|
||||
'transient': None
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -823,7 +836,6 @@ class Export(object):
|
||||
'syncState': None,
|
||||
'audioStreams': {
|
||||
'codec': None,
|
||||
'codecID': None,
|
||||
'default': None,
|
||||
'displayTitle': None,
|
||||
'extendedDisplayTitle': None,
|
||||
@@ -851,7 +863,6 @@ class Export(object):
|
||||
},
|
||||
'lyricStreams': {
|
||||
'codec': None,
|
||||
'codecID': None,
|
||||
'default': None,
|
||||
'displayTitle': None,
|
||||
'extendedDisplayTitle': None,
|
||||
@@ -1091,7 +1102,7 @@ class Export(object):
|
||||
'media.optimizedVersion', 'media.hdr'
|
||||
],
|
||||
2: [
|
||||
'media.parts.accessible', 'media.parts.exists', 'media.parts.file', 'media.parts.duration',
|
||||
'media.parts.file', 'media.parts.duration',
|
||||
'media.parts.container', 'media.parts.indexes', 'media.parts.size', 'media.parts.sizeHuman',
|
||||
'media.parts.audioProfile', 'media.parts.videoProfile',
|
||||
'media.parts.optimizedForStreaming', 'media.parts.deepAnalysisVersion'
|
||||
@@ -1179,11 +1190,12 @@ class Export(object):
|
||||
'rating', 'userRating', 'contentRating',
|
||||
'summary', 'guid', 'duration', 'durationHuman', 'type', 'index',
|
||||
'parentTitle', 'parentRatingKey', 'parentGuid', 'parentIndex',
|
||||
'grandparentTitle', 'grandparentRatingKey', 'grandparentGuid'
|
||||
'grandparentTitle', 'grandparentRatingKey', 'grandparentGuid', 'hasIntroMarker'
|
||||
],
|
||||
2: [
|
||||
'directors.tag', 'writers.tag',
|
||||
'fields.name', 'fields.locked'
|
||||
'fields.name', 'fields.locked',
|
||||
'markers.type', 'markers.start', 'markers.end'
|
||||
],
|
||||
3: [
|
||||
'art', 'thumb', 'key', 'chapterSource',
|
||||
@@ -1201,7 +1213,7 @@ class Export(object):
|
||||
'media.optimizedVersion', 'media.hdr'
|
||||
],
|
||||
2: [
|
||||
'media.parts.accessible', 'media.parts.exists', 'media.parts.file', 'media.parts.duration',
|
||||
'media.parts.file', 'media.parts.duration',
|
||||
'media.parts.container', 'media.parts.indexes', 'media.parts.size', 'media.parts.sizeHuman',
|
||||
'media.parts.audioProfile', 'media.parts.videoProfile',
|
||||
'media.parts.optimizedForStreaming', 'media.parts.deepAnalysisVersion'
|
||||
@@ -1310,7 +1322,7 @@ class Export(object):
|
||||
'media.bitrate', 'media.container', 'media.duration'
|
||||
],
|
||||
2: [
|
||||
'media.parts.accessible', 'media.parts.exists', 'media.parts.file', 'media.parts.duration',
|
||||
'media.parts.file', 'media.parts.duration',
|
||||
'media.parts.container', 'media.parts.size', 'media.parts.sizeHuman',
|
||||
'media.parts.audioProfile',
|
||||
'media.parts.deepAnalysisVersion', 'media.parts.hasThumbnail'
|
||||
@@ -1385,7 +1397,7 @@ class Export(object):
|
||||
'media.iso', 'media.lens', 'media.make', 'media.model'
|
||||
],
|
||||
2: [
|
||||
'media.parts.accessible', 'media.parts.exists', 'media.parts.file',
|
||||
'media.parts.file',
|
||||
'media.parts.container', 'media.parts.size', 'media.parts.sizeHuman'
|
||||
],
|
||||
3: [
|
||||
@@ -1815,6 +1827,15 @@ class Export(object):
|
||||
else:
|
||||
self._custom_fields[media_type] = {field}
|
||||
|
||||
for tree in self.TREE_MEDIA_TYPES:
|
||||
for child_media_type, parent_media_type in zip(tree[:-1], tree[1:]):
|
||||
if child_media_type in self._custom_fields:
|
||||
plural_child_media_type = self.PLURAL_MEDIA_TYPES[child_media_type]
|
||||
if parent_media_type in self._custom_fields:
|
||||
self._custom_fields[parent_media_type].add(plural_child_media_type)
|
||||
else:
|
||||
self._custom_fields[parent_media_type] = {plural_child_media_type}
|
||||
|
||||
def _parse_custom_field(self, media_type, field):
|
||||
for child_media_type in self.CHILD_MEDIA_TYPES.get(media_type, []):
|
||||
plural_key = self.PLURAL_MEDIA_TYPES[child_media_type]
|
||||
@@ -1864,6 +1885,10 @@ class Export(object):
|
||||
elif self.media_type == 'playlist' and 'item' in self._custom_fields:
|
||||
export_attrs_set.update(self._custom_fields['item'])
|
||||
|
||||
if 'media.parts.accessible' in export_attrs_set or 'media.parts.exists' in export_attrs_set or \
|
||||
self.media_info_level == 9:
|
||||
self._reload_check_files = True
|
||||
|
||||
for attr in export_attrs_set:
|
||||
try:
|
||||
value = helpers.get_dict_value_by_path(media_attrs, attr)
|
||||
@@ -1951,12 +1976,13 @@ class Export(object):
|
||||
pass
|
||||
|
||||
def export_obj(self, obj):
|
||||
# Reload ~plexapi.base.PlexPartialObject
|
||||
if hasattr(obj, 'isPartialObject') and obj.isPartialObject():
|
||||
obj = obj.reload()
|
||||
|
||||
media_type = self._media_type(obj)
|
||||
export_attrs = self._get_export_attrs(media_type)
|
||||
|
||||
# Reload ~plexapi.base.PlexPartialObject
|
||||
if hasattr(obj, 'isPartialObject') and obj.isPartialObject():
|
||||
obj = obj.reload(checkFiles=self._reload_check_files)
|
||||
|
||||
return helpers.get_attrs_to_dict(obj, attrs=export_attrs)
|
||||
|
||||
def get_any_hdr(self, item, media_type):
|
||||
|
@@ -19,11 +19,11 @@ from __future__ import unicode_literals
|
||||
from future.builtins import object
|
||||
from future.builtins import str
|
||||
|
||||
from functools import partial
|
||||
from multiprocessing.dummy import Pool as ThreadPool
|
||||
from future.moves.urllib.parse import urljoin
|
||||
|
||||
import certifi
|
||||
import requests
|
||||
import urllib3
|
||||
|
||||
import plexpy
|
||||
@@ -41,6 +41,7 @@ class HTTPHandler(object):
|
||||
"""
|
||||
|
||||
def __init__(self, urls, headers=None, token=None, timeout=10, ssl_verify=True, silent=False):
|
||||
self._valid_request_types = {'GET', 'POST', 'PUT', 'DELETE'}
|
||||
self._silent = silent
|
||||
|
||||
if isinstance(urls, str):
|
||||
@@ -51,24 +52,34 @@ class HTTPHandler(object):
|
||||
if headers:
|
||||
self.headers = headers
|
||||
else:
|
||||
self.headers = {'X-Plex-Product': plexpy.common.PRODUCT,
|
||||
'X-Plex-Version': plexpy.common.RELEASE,
|
||||
'X-Plex-Client-Identifier': plexpy.CONFIG.PMS_UUID,
|
||||
'X-Plex-Platform': plexpy.common.PLATFORM,
|
||||
'X-Plex-Platform-Version': plexpy.common.PLATFORM_RELEASE,
|
||||
'X-Plex-Device': '{} {}'.format(plexpy.common.PLATFORM,
|
||||
plexpy.common.PLATFORM_RELEASE),
|
||||
'X-Plex-Device-Name': plexpy.common.PLATFORM_DEVICE_NAME
|
||||
}
|
||||
self.headers = {
|
||||
'X-Plex-Product': plexpy.common.PRODUCT,
|
||||
'X-Plex-Version': plexpy.common.RELEASE,
|
||||
'X-Plex-Client-Identifier': plexpy.CONFIG.PMS_UUID,
|
||||
'X-Plex-Platform': plexpy.common.PLATFORM,
|
||||
'X-Plex-Platform-Version': plexpy.common.PLATFORM_RELEASE,
|
||||
'X-Plex-Device': '{} {}'.format(plexpy.common.PLATFORM,
|
||||
plexpy.common.PLATFORM_RELEASE),
|
||||
'X-Plex-Device-Name': plexpy.common.PLATFORM_DEVICE_NAME
|
||||
}
|
||||
|
||||
self.token = token
|
||||
if self.token:
|
||||
self.headers['X-Plex-Token'] = self.token
|
||||
|
||||
self._session = requests.Session()
|
||||
self.timeout = timeout
|
||||
self.ssl_verify = ssl_verify
|
||||
self.ssl_verify = certifi.where() if ssl_verify else False
|
||||
if not self.ssl_verify:
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
self.valid_request_types = ('GET', 'POST', 'PUT', 'DELETE')
|
||||
self.uri = None
|
||||
self.data = None
|
||||
self.request_type = 'GET'
|
||||
self.output_format = 'raw'
|
||||
self.return_type = False
|
||||
self.callback = None
|
||||
self.request_kwargs = {}
|
||||
|
||||
def make_request(self,
|
||||
uri=None,
|
||||
@@ -96,7 +107,7 @@ class HTTPHandler(object):
|
||||
self.timeout = timeout or self.timeout
|
||||
self.request_kwargs = request_kwargs
|
||||
|
||||
if self.request_type not in self.valid_request_types:
|
||||
if self.request_type not in self._valid_request_types:
|
||||
logger.debug("HTTP request made but unsupported request type given.")
|
||||
return None
|
||||
|
||||
@@ -115,7 +126,7 @@ class HTTPHandler(object):
|
||||
return responses[0]
|
||||
|
||||
else:
|
||||
logger.debug("HTTP request made but no enpoint given.")
|
||||
logger.debug("HTTP request made but no uri endpoint provided.")
|
||||
return None
|
||||
|
||||
def _http_requests_pool(self, urls, workers=10, chunk=None):
|
||||
@@ -128,20 +139,13 @@ class HTTPHandler(object):
|
||||
if len(urls) == 0:
|
||||
chunk = 0
|
||||
|
||||
if self.ssl_verify:
|
||||
session = urllib3.PoolManager(cert_reqs=2, ca_certs=certifi.where()) # ssl.CERT_REQUIRED = 2
|
||||
else:
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
session = urllib3.PoolManager()
|
||||
part = partial(self._http_requests_urllib3, session=session)
|
||||
|
||||
if len(urls) == 1:
|
||||
yield part(urls[0])
|
||||
yield self._http_requests_single(urls[0])
|
||||
else:
|
||||
pool = ThreadPool(workers)
|
||||
|
||||
try:
|
||||
for work in pool.imap_unordered(part, urls, chunk):
|
||||
for work in pool.imap_unordered(self._http_requests_single, urls, chunk):
|
||||
yield work
|
||||
except Exception as e:
|
||||
if not self._silent:
|
||||
@@ -150,34 +154,40 @@ class HTTPHandler(object):
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
def _http_requests_urllib3(self, url, session):
|
||||
def _http_requests_single(self, url):
|
||||
"""Request the data from the url"""
|
||||
error_msg = "Failed to access uri endpoint %s. " % self.uri
|
||||
try:
|
||||
r = session.request(self.request_type, url, headers=self.headers, fields=self.data,
|
||||
timeout=self.timeout, **self.request_kwargs)
|
||||
except IOError as e:
|
||||
r = self._session.request(self.request_type, url, headers=self.headers, data=self.data,
|
||||
timeout=self.timeout, verify=self.ssl_verify, **self.request_kwargs)
|
||||
r.raise_for_status()
|
||||
except requests.exceptions.Timeout as e:
|
||||
if not self._silent:
|
||||
logger.warn("Failed to access uri endpoint %s with error %s" % (self.uri, e))
|
||||
logger.error(error_msg + "Request timed out: %s", e)
|
||||
return None
|
||||
except Exception as e:
|
||||
except requests.exceptions.SSLError as e:
|
||||
if not self._silent:
|
||||
logger.warn("Failed to access uri endpoint %s. Is your server maybe accepting SSL connections only? %s" % (self.uri, e))
|
||||
logger.error(error_msg + "Is your server maybe accepting SSL connections only? %s", e)
|
||||
return None
|
||||
except:
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if not self._silent:
|
||||
logger.warn("Failed to access uri endpoint %s with Uncaught exception." % self.uri)
|
||||
logger.error(error_msg + "Status code %s", e)
|
||||
return None
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
if not self._silent:
|
||||
logger.error(error_msg + "Connection error: %s", e)
|
||||
return None
|
||||
except requests.exceptions.RequestException as e:
|
||||
if not self._silent:
|
||||
logger.error(error_msg + "Uncaught exception: %s", e)
|
||||
return None
|
||||
|
||||
response_status = r.status
|
||||
response_content = r.data
|
||||
response_status = r.status_code
|
||||
response_content = r.content
|
||||
response_headers = r.headers
|
||||
|
||||
if response_status in (200, 201):
|
||||
return self._http_format_output(response_content, response_headers)
|
||||
else:
|
||||
if not self._silent:
|
||||
logger.warn("Failed to access uri endpoint %s. Status code %r" % (self.uri, response_status))
|
||||
return None
|
||||
|
||||
def _http_format_output(self, response_content, response_headers):
|
||||
"""Formats the request response to the desired type"""
|
||||
|
@@ -122,7 +122,8 @@ def add_live_tv_library(refresh=False):
|
||||
if result and not refresh or not result and refresh:
|
||||
return
|
||||
|
||||
logger.info("Tautulli Libraries :: Adding Live TV library to the database.")
|
||||
if not refresh:
|
||||
logger.info("Tautulli Libraries :: Adding Live TV library to the database.")
|
||||
|
||||
section_keys = {'server_id': plexpy.CONFIG.PMS_IDENTIFIER,
|
||||
'section_id': common.LIVE_TV_SECTION_ID}
|
||||
|
@@ -74,14 +74,6 @@ def blacklist_config(config):
|
||||
_BLACKLIST_WORDS.update(blacklist)
|
||||
|
||||
|
||||
class CherrypyEngineFilter(logging.Filter):
|
||||
"""
|
||||
Log filter for the Cherrypy Engine serving message
|
||||
"""
|
||||
def filter(self, record):
|
||||
return 'ENGINE Serving on' not in record.msg
|
||||
|
||||
|
||||
class NoThreadFilter(logging.Filter):
|
||||
"""
|
||||
Log filter for the current thread
|
||||
@@ -352,9 +344,6 @@ def initLogger(console=False, log_dir=False, verbose=False):
|
||||
handler.addFilter(EmailFilter())
|
||||
handler.addFilter(PlexTokenFilter())
|
||||
|
||||
for handler in cherrypy.log.error_log.handlers:
|
||||
handler.addFilter(CherrypyEngineFilter())
|
||||
|
||||
# Install exception hooks
|
||||
initHooks()
|
||||
|
||||
|
@@ -44,11 +44,12 @@ else:
|
||||
class MacOSSystemTray(object):
|
||||
def __init__(self):
|
||||
self.image_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/', plexpy.CONFIG.INTERFACE, 'images')
|
||||
self.icon = os.path.join(self.image_dir, 'logo-flat-white.ico')
|
||||
|
||||
if plexpy.UPDATE_AVAILABLE:
|
||||
self.icon = os.path.join(self.image_dir, 'logo-circle-update.ico')
|
||||
self.update_title = 'Check for Updates - Update Available!'
|
||||
else:
|
||||
self.icon = os.path.join(self.image_dir, 'logo-circle.ico')
|
||||
self.update_title = 'Check for Updates'
|
||||
|
||||
self.menu = [
|
||||
rumps.MenuItem('Open Tautulli', callback=self.tray_open),
|
||||
@@ -56,7 +57,7 @@ class MacOSSystemTray(object):
|
||||
rumps.MenuItem('Start Tautulli at Login', callback=self.tray_startup),
|
||||
rumps.MenuItem('Open Browser when Tautulli Starts', callback=self.tray_browser),
|
||||
None,
|
||||
rumps.MenuItem('Check for Updates', callback=self.tray_check_update),
|
||||
rumps.MenuItem(self.update_title, callback=self.tray_check_update),
|
||||
rumps.MenuItem('Restart', callback=self.tray_restart),
|
||||
rumps.MenuItem('Quit', callback=self.tray_quit)
|
||||
]
|
||||
@@ -65,7 +66,8 @@ class MacOSSystemTray(object):
|
||||
self.menu[2].state = plexpy.CONFIG.LAUNCH_STARTUP
|
||||
self.menu[3].state = plexpy.CONFIG.LAUNCH_BROWSER
|
||||
|
||||
self.tray_icon = rumps.App(common.PRODUCT, icon=self.icon, menu=self.menu, quit_button=None)
|
||||
self.tray_icon = rumps.App(common.PRODUCT, icon=self.icon, template=True,
|
||||
menu=self.menu, quit_button=None)
|
||||
|
||||
def start(self):
|
||||
logger.info("Launching MacOS menu bar icon.")
|
||||
@@ -98,6 +100,9 @@ class MacOSSystemTray(object):
|
||||
def tray_update(self, tray_icon):
|
||||
if plexpy.UPDATE_AVAILABLE:
|
||||
plexpy.SIGNAL = 'update'
|
||||
else:
|
||||
self.update_title = 'Check for Updates - No Update Available'
|
||||
self.menu[5].title = self.update_title
|
||||
|
||||
def tray_restart(self, tray_icon):
|
||||
plexpy.SIGNAL = 'restart'
|
||||
@@ -107,10 +112,10 @@ class MacOSSystemTray(object):
|
||||
|
||||
def change_tray_update_icon(self):
|
||||
if plexpy.UPDATE_AVAILABLE:
|
||||
self.icon = os.path.join(self.image_dir, 'logo-circle-update.ico')
|
||||
self.update_title = 'Check for Updates - Update Available!'
|
||||
else:
|
||||
self.icon = os.path.join(self.image_dir, 'logo-circle.ico')
|
||||
self.update(icon=self.icon)
|
||||
self.update_title = 'Check for Updates'
|
||||
self.menu[5].title = self.update_title
|
||||
|
||||
def change_tray_icons(self):
|
||||
self.tray_icon.menu['Start Tautulli at Login'].state = plexpy.CONFIG.LAUNCH_STARTUP
|
||||
|
@@ -193,7 +193,8 @@ def notify_conditions(notify_action=None, stream_data=None, timeline_data=None):
|
||||
|
||||
elif notify_action == 'on_newdevice':
|
||||
data_factory = datafactory.DataFactory()
|
||||
user_devices = data_factory.get_user_devices(user_id=stream_data['user_id'])
|
||||
user_devices = data_factory.get_user_devices(user_id=stream_data['user_id'],
|
||||
history_only=not plexpy.CONFIG.NOTIFY_NEW_DEVICE_INITIAL_ONLY)
|
||||
evaluated = stream_data['machine_id'] not in user_devices
|
||||
|
||||
elif stream_data['media_type'] in ('movie', 'episode', 'clip'):
|
||||
@@ -831,12 +832,16 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
child_count = 1
|
||||
grandchild_count = 1
|
||||
|
||||
rating = notify_params['rating'] or notify_params['audience_rating']
|
||||
|
||||
critic_rating = ''
|
||||
if notify_params['rating_image'].startswith('rottentomatoes://') and notify_params['rating']:
|
||||
if notify_params['rating_image'].startswith('rottentomatoes://') \
|
||||
and notify_params['rating']:
|
||||
critic_rating = helpers.get_percent(notify_params['rating'], 10)
|
||||
|
||||
audience_rating = ''
|
||||
if notify_params['audience_rating']:
|
||||
audience_rating = notify_params['audience_rating']
|
||||
if notify_params['audience_rating_image'].startswith(('rottentomatoes://', 'themoviedb://')) \
|
||||
and audience_rating:
|
||||
audience_rating = helpers.get_percent(notify_params['audience_rating'], 10)
|
||||
|
||||
now = arrow.now()
|
||||
@@ -1013,9 +1018,10 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
'collections': ', '.join(notify_params['collections']),
|
||||
'summary': notify_params['summary'],
|
||||
'tagline': notify_params['tagline'],
|
||||
'rating': notify_params['rating'],
|
||||
'rating': rating,
|
||||
'critic_rating': critic_rating,
|
||||
'audience_rating': audience_rating,
|
||||
'user_rating': notify_params['user_rating'],
|
||||
'duration': duration,
|
||||
'poster_title': notify_params['poster_title'],
|
||||
'poster_url': notify_params['poster_url'],
|
||||
|
@@ -854,8 +854,8 @@ class Notifier(object):
|
||||
|
||||
else:
|
||||
verify_msg = ""
|
||||
if response is not None and response.status_code >= 400 and response.status_code < 500:
|
||||
verify_msg = " Verify you notification agent settings are correct."
|
||||
if response is not None and 400 <= response.status_code < 500:
|
||||
verify_msg = " Verify your notification agent settings are correct."
|
||||
|
||||
logger.error("Tautulli Notifiers :: {name} notification failed.{msg}".format(msg=verify_msg, name=self.NAME))
|
||||
|
||||
@@ -1819,9 +1819,6 @@ class GROWL(Notifier):
|
||||
logger.error("Tautulli Notifiers :: {name} notification failed: authentication error".format(name=self.NAME))
|
||||
return False
|
||||
|
||||
# Fix message
|
||||
body = body.encode(plexpy.SYS_ENCODING, "replace")
|
||||
|
||||
# Send it, including an image
|
||||
image_file = os.path.join(str(plexpy.PROG_DIR),
|
||||
"data/interfaces/default/images/logo-circle.png")
|
||||
|
@@ -213,6 +213,7 @@ def mask_session_info(list_of_dicts, mask_metadata=True):
|
||||
|
||||
keys_to_mask = {'user_id': '',
|
||||
'user': 'Plex User',
|
||||
'username': 'Plex User',
|
||||
'friendly_name': 'Plex User',
|
||||
'user_thumb': common.DEFAULT_USER_THUMB,
|
||||
'ip_address': 'N/A',
|
||||
|
@@ -48,6 +48,11 @@ def refresh_users():
|
||||
logger.info("Tautulli Users :: Requesting users list refresh...")
|
||||
result = plextv.PlexTV().get_full_users_list()
|
||||
|
||||
server_id = plexpy.CONFIG.PMS_IDENTIFIER
|
||||
if not server_id:
|
||||
logger.error("Tautulli Users :: No PMS identifier, cannot refresh users. Verify server in settings.")
|
||||
return
|
||||
|
||||
if result:
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
|
@@ -17,5 +17,5 @@
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
PLEXPY_BRANCH = "beta"
|
||||
PLEXPY_RELEASE_VERSION = "v2.6.0-beta"
|
||||
PLEXPY_BRANCH = "master"
|
||||
PLEXPY_RELEASE_VERSION = "v2.6.3"
|
@@ -20,6 +20,7 @@ from __future__ import unicode_literals
|
||||
from future.builtins import next
|
||||
from future.builtins import str
|
||||
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
@@ -29,10 +30,12 @@ import tarfile
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import common
|
||||
import helpers
|
||||
import logger
|
||||
import request
|
||||
else:
|
||||
from plexpy import common
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
from plexpy import request
|
||||
|
||||
@@ -130,7 +133,13 @@ def get_version():
|
||||
return cur_commit_hash, remote_name, branch_name
|
||||
|
||||
else:
|
||||
plexpy.INSTALL_TYPE = 'docker' if plexpy.DOCKER else 'source'
|
||||
if plexpy.DOCKER:
|
||||
plexpy.INSTALL_TYPE = 'docker'
|
||||
elif plexpy.SNAP:
|
||||
plexpy.INSTALL_TYPE = 'snap'
|
||||
else:
|
||||
plexpy.INSTALL_TYPE = 'source'
|
||||
|
||||
current_version, current_branch = get_version_from_file()
|
||||
return current_version, 'origin', current_branch
|
||||
|
||||
@@ -154,15 +163,18 @@ def get_version_from_file():
|
||||
return current_version, current_branch
|
||||
|
||||
|
||||
def check_update(scheduler=False, notify=False):
|
||||
check_github(scheduler=scheduler, notify=notify)
|
||||
def check_update(scheduler=False, notify=False, use_cache=False):
|
||||
check_github(scheduler=scheduler, notify=notify, use_cache=use_cache)
|
||||
|
||||
if not plexpy.CURRENT_VERSION:
|
||||
plexpy.UPDATE_AVAILABLE = None
|
||||
elif plexpy.COMMITS_BEHIND > 0 and (plexpy.common.BRANCH in ('master', 'beta') or plexpy.FROZEN) and \
|
||||
elif plexpy.COMMITS_BEHIND > 0 and \
|
||||
(plexpy.common.BRANCH in ('master', 'beta') or plexpy.SNAP or plexpy.FROZEN) and \
|
||||
plexpy.common.RELEASE != plexpy.LATEST_RELEASE:
|
||||
plexpy.UPDATE_AVAILABLE = 'release'
|
||||
elif plexpy.COMMITS_BEHIND > 0 and plexpy.CURRENT_VERSION != plexpy.LATEST_VERSION and not plexpy.FROZEN:
|
||||
elif plexpy.COMMITS_BEHIND > 0 and \
|
||||
not plexpy.SNAP and not plexpy.FROZEN and \
|
||||
plexpy.CURRENT_VERSION != plexpy.LATEST_VERSION:
|
||||
plexpy.UPDATE_AVAILABLE = 'commit'
|
||||
else:
|
||||
plexpy.UPDATE_AVAILABLE = False
|
||||
@@ -173,7 +185,7 @@ def check_update(scheduler=False, notify=False):
|
||||
plexpy.MAC_SYS_TRAY_ICON.change_tray_update_icon()
|
||||
|
||||
|
||||
def check_github(scheduler=False, notify=False):
|
||||
def check_github(scheduler=False, notify=False, use_cache=False):
|
||||
plexpy.COMMITS_BEHIND = 0
|
||||
|
||||
if plexpy.CONFIG.GIT_TOKEN:
|
||||
@@ -181,12 +193,16 @@ def check_github(scheduler=False, notify=False):
|
||||
else:
|
||||
headers = {}
|
||||
|
||||
# Get the latest version available from github
|
||||
logger.info('Retrieving latest version information from GitHub')
|
||||
url = 'https://api.github.com/repos/%s/%s/commits/%s' % (plexpy.CONFIG.GIT_USER,
|
||||
plexpy.CONFIG.GIT_REPO,
|
||||
plexpy.CONFIG.GIT_BRANCH)
|
||||
version = request.request_json(url, headers=headers, timeout=20, validator=lambda x: type(x) == dict)
|
||||
version = github_cache('version', use_cache=use_cache)
|
||||
if not version:
|
||||
# Get the latest version available from github
|
||||
logger.info('Retrieving latest version information from GitHub')
|
||||
url = 'https://api.github.com/repos/%s/%s/commits/%s' % (plexpy.CONFIG.GIT_USER,
|
||||
plexpy.CONFIG.GIT_REPO,
|
||||
plexpy.CONFIG.GIT_BRANCH)
|
||||
version = request.request_json(url, headers=headers, timeout=20,
|
||||
validator=lambda x: type(x) == dict)
|
||||
github_cache('version', github_data=version)
|
||||
|
||||
if version is None:
|
||||
logger.warn('Could not get the latest version from GitHub. Are you running a local development version?')
|
||||
@@ -204,13 +220,16 @@ def check_github(scheduler=False, notify=False):
|
||||
logger.info('Tautulli is up to date')
|
||||
return plexpy.LATEST_VERSION
|
||||
|
||||
logger.info('Comparing currently installed version with latest GitHub version')
|
||||
url = 'https://api.github.com/repos/%s/%s/compare/%s...%s' % (plexpy.CONFIG.GIT_USER,
|
||||
plexpy.CONFIG.GIT_REPO,
|
||||
plexpy.LATEST_VERSION,
|
||||
plexpy.CURRENT_VERSION)
|
||||
commits = request.request_json(url, headers=headers, timeout=20, whitelist_status_code=404,
|
||||
validator=lambda x: type(x) == dict)
|
||||
commits = github_cache('commits', use_cache=use_cache)
|
||||
if not commits:
|
||||
logger.info('Comparing currently installed version with latest GitHub version')
|
||||
url = 'https://api.github.com/repos/%s/%s/compare/%s...%s' % (plexpy.CONFIG.GIT_USER,
|
||||
plexpy.CONFIG.GIT_REPO,
|
||||
plexpy.LATEST_VERSION,
|
||||
plexpy.CURRENT_VERSION)
|
||||
commits = request.request_json(url, headers=headers, timeout=20, whitelist_status_code=404,
|
||||
validator=lambda x: type(x) == dict)
|
||||
github_cache('commits', github_data=commits)
|
||||
|
||||
if commits is None:
|
||||
logger.warn('Could not get commits behind from GitHub.')
|
||||
@@ -226,8 +245,13 @@ def check_github(scheduler=False, notify=False):
|
||||
if plexpy.COMMITS_BEHIND > 0:
|
||||
logger.info('New version is available. You are %s commits behind' % plexpy.COMMITS_BEHIND)
|
||||
|
||||
url = 'https://api.github.com/repos/%s/%s/releases' % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO)
|
||||
releases = request.request_json(url, timeout=20, whitelist_status_code=404, validator=lambda x: type(x) == list)
|
||||
releases = github_cache('releases', use_cache=use_cache)
|
||||
if not releases:
|
||||
url = 'https://api.github.com/repos/%s/%s/releases' % (plexpy.CONFIG.GIT_USER,
|
||||
plexpy.CONFIG.GIT_REPO)
|
||||
releases = request.request_json(url, timeout=20, whitelist_status_code=404,
|
||||
validator=lambda x: type(x) == list)
|
||||
github_cache('releases', github_data=releases)
|
||||
|
||||
if releases is None:
|
||||
logger.warn('Could not get releases from GitHub.')
|
||||
@@ -250,7 +274,11 @@ def check_github(scheduler=False, notify=False):
|
||||
'plexpy_update_commit': plexpy.LATEST_VERSION,
|
||||
'plexpy_update_behind': plexpy.COMMITS_BEHIND})
|
||||
|
||||
if scheduler and plexpy.CONFIG.PLEXPY_AUTO_UPDATE and not plexpy.DOCKER and not plexpy.FROZEN:
|
||||
if plexpy.PYTHON2:
|
||||
logger.warn('Tautulli is running using Python 2. Unable to run automatic update.')
|
||||
|
||||
elif scheduler and plexpy.CONFIG.PLEXPY_AUTO_UPDATE and \
|
||||
not plexpy.DOCKER and not plexpy.SNAP and not plexpy.FROZEN:
|
||||
logger.info('Running automatic update.')
|
||||
plexpy.shutdown(restart=True, update=True)
|
||||
|
||||
@@ -261,10 +289,14 @@ def check_github(scheduler=False, notify=False):
|
||||
|
||||
|
||||
def update():
|
||||
if plexpy.PYTHON2:
|
||||
logger.warn('Tautulli is running using Python 2. Unable to update.')
|
||||
return
|
||||
|
||||
if not plexpy.UPDATE_AVAILABLE:
|
||||
return
|
||||
|
||||
if plexpy.INSTALL_TYPE in ('docker', 'windows', 'macos'):
|
||||
if plexpy.INSTALL_TYPE in ('docker', 'snap', 'windows', 'macos'):
|
||||
return
|
||||
|
||||
elif plexpy.INSTALL_TYPE == 'git':
|
||||
@@ -391,6 +423,30 @@ def checkout_git_branch():
|
||||
plexpy.CONFIG.GIT_BRANCH))
|
||||
|
||||
|
||||
def github_cache(cache, github_data=None, use_cache=True):
|
||||
timestamp = helpers.timestamp()
|
||||
cache_filepath = os.path.join(plexpy.CONFIG.CACHE_DIR, 'github_{}.json'.format(cache))
|
||||
|
||||
if github_data:
|
||||
cache_data = {'github_data': github_data, '_cache_time': timestamp}
|
||||
try:
|
||||
with open(cache_filepath, 'w', encoding='utf-8') as cache_file:
|
||||
json.dump(cache_data, cache_file)
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
if not use_cache:
|
||||
return
|
||||
try:
|
||||
with open(cache_filepath, 'r', encoding='utf-8') as cache_file:
|
||||
cache_data = json.load(cache_file)
|
||||
if timestamp - cache_data['_cache_time'] < plexpy.CONFIG.CHECK_GITHUB_CACHE_SECONDS:
|
||||
logger.debug('Using cached GitHub %s data', cache)
|
||||
return cache_data['github_data']
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def read_changelog(latest_only=False, since_prev_release=False):
|
||||
changelog_file = os.path.join(plexpy.PROG_DIR, 'CHANGELOG.md')
|
||||
|
||||
|
@@ -16,7 +16,7 @@
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
# http://tools.cherrypy.org/wiki/AuthenticationAndAccessRestrictions
|
||||
# https://github.com/cherrypy/tools/blob/master/AuthenticationAndAccessRestrictions
|
||||
# Form based authentication for CherryPy. Requires the
|
||||
# Session tool to be loaded.
|
||||
|
||||
@@ -33,11 +33,13 @@ import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import logger
|
||||
from database import MonitorDatabase
|
||||
from helpers import timestamp
|
||||
from users import Users, refresh_users
|
||||
from plextv import PlexTV
|
||||
else:
|
||||
from plexpy import logger
|
||||
from plexpy.database import MonitorDatabase
|
||||
from plexpy.helpers import timestamp
|
||||
from plexpy.users import Users, refresh_users
|
||||
from plexpy.plextv import PlexTV
|
||||
|
||||
@@ -246,6 +248,33 @@ def all_of(*conditions):
|
||||
return check
|
||||
|
||||
|
||||
def check_rate_limit(ip_address):
|
||||
monitor_db = MonitorDatabase()
|
||||
result = monitor_db.select('SELECT timestamp, success FROM user_login '
|
||||
'WHERE ip_address = ? '
|
||||
'AND timestamp >= ( '
|
||||
'SELECT CASE WHEN MAX(timestamp) IS NULL THEN 0 ELSE MAX(timestamp) END '
|
||||
'FROM user_login WHERE ip_address = ? AND success = 1) '
|
||||
'ORDER BY timestamp DESC',
|
||||
[ip_address, ip_address])
|
||||
|
||||
try:
|
||||
last_timestamp = result[0]['timestamp']
|
||||
except IndexError:
|
||||
last_timestamp = 0
|
||||
|
||||
try:
|
||||
last_success = max(login['timestamp'] for login in result if login['success'])
|
||||
except ValueError:
|
||||
last_success = 0
|
||||
|
||||
max_timestamp = max(last_success, last_timestamp - plexpy.CONFIG.HTTP_RATE_LIMIT_ATTEMPTS_INTERVAL)
|
||||
attempts = [login for login in result if login['timestamp'] >= max_timestamp and not login['success']]
|
||||
|
||||
if len(attempts) >= plexpy.CONFIG.HTTP_RATE_LIMIT_ATTEMPTS:
|
||||
return max(last_timestamp - (timestamp() - plexpy.CONFIG.HTTP_RATE_LIMIT_LOCKOUT_TIME), 0)
|
||||
|
||||
|
||||
# Controller to provide login and logout actions
|
||||
|
||||
class AuthController(object):
|
||||
@@ -325,6 +354,16 @@ class AuthController(object):
|
||||
cherrypy.response.status = 405
|
||||
return {'status': 'error', 'message': 'Sign in using POST.'}
|
||||
|
||||
ip_address = cherrypy.request.remote.ip
|
||||
rate_limit = check_rate_limit(ip_address)
|
||||
|
||||
if rate_limit:
|
||||
logger.debug("Tautulli WebAuth :: Too many incorrect login attempts from '%s'." % ip_address)
|
||||
error_message = {'status': 'error', 'message': 'Too many login attempts.'}
|
||||
cherrypy.response.status = 429
|
||||
cherrypy.response.headers['Retry-After'] = rate_limit
|
||||
return error_message
|
||||
|
||||
error_message = {'status': 'error', 'message': 'Invalid credentials.'}
|
||||
|
||||
valid_login, user_details, user_group = check_credentials(username=username,
|
||||
|
@@ -1871,6 +1871,7 @@ class WebInterface(object):
|
||||
"guid": "com.plexapp.agents.thetvdb://121361/6/1?lang=en",
|
||||
"ip_address": "xxx.xxx.xxx.xxx",
|
||||
"live": 0,
|
||||
"machine_id": "lmd93nkn12k29j2lnm",
|
||||
"media_index": 17,
|
||||
"media_type": "episode",
|
||||
"originally_available_at": "2016-04-24",
|
||||
@@ -2649,13 +2650,28 @@ class WebInterface(object):
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
def delete_sync_rows(self, client_id, sync_id, **kwargs):
|
||||
@addtoapi("delete_synced_item")
|
||||
def delete_sync_rows(self, client_id=None, sync_id=None, **kwargs):
|
||||
""" Delete a synced item from a device.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
client_id (str): The client ID of the device to delete from
|
||||
sync_id (str): The sync ID of the synced item
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
None
|
||||
```
|
||||
"""
|
||||
if client_id and sync_id:
|
||||
plex_tv = plextv.PlexTV()
|
||||
delete_row = plex_tv.delete_sync(client_id=client_id, sync_id=sync_id)
|
||||
return {'message': 'Sync deleted'}
|
||||
return {'result': 'success', 'message': 'Synced item deleted successfully.'}
|
||||
else:
|
||||
return {'message': 'no data received'}
|
||||
return {'result': 'error', 'message': 'Missing client ID and sync ID.'}
|
||||
|
||||
|
||||
##### Logs #####
|
||||
@@ -3120,6 +3136,7 @@ class WebInterface(object):
|
||||
"notify_concurrent_by_ip": checked(plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP),
|
||||
"notify_concurrent_threshold": plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD,
|
||||
"notify_continued_session_threshold": plexpy.CONFIG.NOTIFY_CONTINUED_SESSION_THRESHOLD,
|
||||
"notify_new_device_initial_only": checked(plexpy.CONFIG.NOTIFY_NEW_DEVICE_INITIAL_ONLY),
|
||||
"home_sections": json.dumps(plexpy.CONFIG.HOME_SECTIONS),
|
||||
"home_stats_cards": json.dumps(plexpy.CONFIG.HOME_STATS_CARDS),
|
||||
"home_library_cards": json.dumps(plexpy.CONFIG.HOME_LIBRARY_CARDS),
|
||||
@@ -3183,6 +3200,7 @@ class WebInterface(object):
|
||||
"refresh_libraries_on_startup", "refresh_users_on_startup",
|
||||
"notify_consecutive", "notify_recently_added_upgrade",
|
||||
"notify_group_recently_added_grandparent", "notify_group_recently_added_parent",
|
||||
"notify_new_device_initial_only",
|
||||
"monitor_pms_updates", "get_file_sizes", "log_blacklist", "http_hash_password",
|
||||
"allow_guest_access", "cache_images", "http_proxy", "http_basic_auth", "notify_concurrent_by_ip",
|
||||
"history_table_activity", "plexpy_auto_update",
|
||||
@@ -4291,7 +4309,7 @@ class WebInterface(object):
|
||||
plexpy.CONFIG.GIT_REPO,
|
||||
plexpy.CURRENT_VERSION,
|
||||
plexpy.LATEST_VERSION))
|
||||
}
|
||||
}
|
||||
|
||||
else:
|
||||
update = {'result': 'success',
|
||||
@@ -4299,7 +4317,7 @@ class WebInterface(object):
|
||||
'message': 'Tautulli is up to date.'
|
||||
}
|
||||
|
||||
if plexpy.DOCKER or plexpy.FROZEN:
|
||||
if plexpy.DOCKER or plexpy.SNAP or plexpy.FROZEN:
|
||||
update['install_type'] = plexpy.INSTALL_TYPE
|
||||
|
||||
return update
|
||||
@@ -4333,7 +4351,9 @@ class WebInterface(object):
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def update(self, **kwargs):
|
||||
if plexpy.DOCKER:
|
||||
if plexpy.PYTHON2:
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "home?update=python2")
|
||||
if plexpy.DOCKER or plexpy.SNAP:
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "home")
|
||||
|
||||
# Show changelog after updating
|
||||
@@ -5032,12 +5052,13 @@ class WebInterface(object):
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi("get_metadata")
|
||||
def get_metadata_details(self, rating_key='', **kwargs):
|
||||
def get_metadata_details(self, rating_key='', sync_id='', **kwargs):
|
||||
""" Get the metadata for a media item.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
rating_key (str): Rating key of the item
|
||||
rating_key (str): Rating key of the item, OR
|
||||
sync_id (str): Sync ID of a synced item
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
@@ -5188,7 +5209,8 @@ class WebInterface(object):
|
||||
```
|
||||
"""
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
metadata = pms_connect.get_metadata_details(rating_key=rating_key)
|
||||
metadata = pms_connect.get_metadata_details(rating_key=rating_key,
|
||||
sync_id=sync_id)
|
||||
|
||||
if metadata:
|
||||
return metadata
|
||||
@@ -6528,6 +6550,31 @@ class WebInterface(object):
|
||||
|
||||
return status
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@addtoapi()
|
||||
def server_status(self, *args, **kwargs):
|
||||
""" Get the current status of Tautulli's connection to the Plex server.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
None
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"result": "success",
|
||||
"connected": true,
|
||||
}
|
||||
```
|
||||
"""
|
||||
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
|
||||
status = {'result': 'success', 'connected': plexpy.PLEX_SERVER_UP}
|
||||
|
||||
return status
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
|
@@ -43,13 +43,14 @@ else:
|
||||
class WindowsSystemTray(object):
|
||||
def __init__(self):
|
||||
self.image_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/', plexpy.CONFIG.INTERFACE, 'images')
|
||||
self.icon = os.path.join(self.image_dir, 'logo-circle.ico')
|
||||
|
||||
if plexpy.UPDATE_AVAILABLE:
|
||||
self.icon = os.path.join(self.image_dir, 'logo-circle-update.ico')
|
||||
self.hover_text = common.PRODUCT + ' - Update Available!'
|
||||
self.update_title = 'Check for Updates - Update Available!'
|
||||
else:
|
||||
self.icon = os.path.join(self.image_dir, 'logo-circle.ico')
|
||||
self.hover_text = common.PRODUCT
|
||||
self.update_title = 'Check for Updates'
|
||||
|
||||
if plexpy.CONFIG.LAUNCH_STARTUP:
|
||||
launch_start_icon = os.path.join(self.image_dir, 'check-solid.ico')
|
||||
@@ -66,7 +67,7 @@ class WindowsSystemTray(object):
|
||||
['Start Tautulli at Login', launch_start_icon, self.tray_startup, None],
|
||||
['Open Browser when Tautulli Starts', launch_browser_icon, self.tray_browser, None],
|
||||
['', None, 'separator', None],
|
||||
['Check for Updates', None, self.tray_check_update, None],
|
||||
[self.update_title, None, self.tray_check_update, None],
|
||||
['Restart', None, self.tray_restart, None]
|
||||
]
|
||||
if not plexpy.FROZEN:
|
||||
@@ -105,8 +106,10 @@ class WindowsSystemTray(object):
|
||||
if plexpy.UPDATE_AVAILABLE:
|
||||
plexpy.SIGNAL = 'update'
|
||||
else:
|
||||
hover_text = common.PRODUCT + ' - No Update Available'
|
||||
self.update(hover_text=hover_text)
|
||||
self.hover_text = common.PRODUCT + ' - No Update Available'
|
||||
self.update_title = 'Check for Updates - No Update Available'
|
||||
self.menu[5][0] = self.update_title
|
||||
self.update(hover_text=self.hover_text, menu_options=self.menu)
|
||||
|
||||
def tray_restart(self, tray_icon):
|
||||
plexpy.SIGNAL = 'restart'
|
||||
@@ -116,12 +119,13 @@ class WindowsSystemTray(object):
|
||||
|
||||
def change_tray_update_icon(self):
|
||||
if plexpy.UPDATE_AVAILABLE:
|
||||
self.icon = os.path.join(self.image_dir, 'logo-circle-update.ico')
|
||||
self.hover_text = common.PRODUCT + ' - Update Available!'
|
||||
self.update_title = 'Check for Updates - Update Available!'
|
||||
else:
|
||||
self.icon = os.path.join(self.image_dir, 'logo-circle.ico')
|
||||
self.hover_text = common.PRODUCT + ' - No Update Available'
|
||||
self.update(icon=self.icon, hover_text=self.hover_text)
|
||||
self.update_title = 'Check for Updates'
|
||||
self.menu[5][0] = self.update_title
|
||||
self.update(hover_text=self.hover_text, menu_options=self.menu)
|
||||
|
||||
def change_tray_icons(self):
|
||||
if plexpy.CONFIG.LAUNCH_STARTUP:
|
||||
|
65
snap/snapcraft.yaml
Normal file
@@ -0,0 +1,65 @@
|
||||
name: tautulli
|
||||
adopt-info: tautulli
|
||||
summary: A Python based monitoring and tracking tool for Plex Media Server.
|
||||
description: >
|
||||
Tautulli is a 3rd party application that you can run alongside your Plex Media Server to monitor activity and track various statistics.
|
||||
Most importantly, these statistics include what has been watched, who watched it, when and where they watched it, and how it was watched.
|
||||
The only thing missing is "why they watched it", but who am I to question your 42 plays of Frozen.
|
||||
All statistics are presented in a nice and clean interface with many tables and graphs, which makes it easy to brag about your server to everyone else.
|
||||
|
||||
base: core18
|
||||
confinement: strict
|
||||
|
||||
parts:
|
||||
tautulli:
|
||||
plugin: dump
|
||||
source: .
|
||||
stage-packages:
|
||||
- python3
|
||||
- python3-openssl
|
||||
- python3-pycryptodome
|
||||
- python3-setuptools
|
||||
build-packages:
|
||||
- git
|
||||
override-pull: |
|
||||
snapcraftctl pull
|
||||
TAG_FULL=$(git describe --tag)
|
||||
TAG=$(echo $TAG_FULL | grep -oP '(v\d+\.\d+\.\d+(?>-beta)?)')
|
||||
BRANCH=$(git rev-parse --abbrev-ref HEAD)
|
||||
COMMIT=$(git rev-parse HEAD)
|
||||
if [ "$TAG" = "$TAG_FULL" ]; then
|
||||
VERSION=$TAG
|
||||
else
|
||||
VERSION=$(echo $COMMIT | head -c 7)
|
||||
fi
|
||||
if [ ! "$VERSION" = "$TAG" ] || [ ! "$VERSION" = "${VERSION%-beta}" ]; then
|
||||
GRADE=devel
|
||||
else
|
||||
GRADE=stable
|
||||
fi
|
||||
if [ "$VERSION" = "$TAG" ] && [ ! "$VERSION" = "${VERSION%-beta}" ]; then
|
||||
BRANCH=beta
|
||||
elif [ "$VERSION" = "$TAG" ]; then
|
||||
BRANCH=master
|
||||
fi
|
||||
echo $BRANCH > branch.txt
|
||||
echo $COMMIT > version.txt
|
||||
snapcraftctl set-version "$VERSION"
|
||||
snapcraftctl set-grade "$GRADE"
|
||||
|
||||
apps:
|
||||
tautulli:
|
||||
command: >
|
||||
usr/bin/python3 $SNAP/Tautulli.py
|
||||
--datadir $SNAP_USER_COMMON/Tautulli
|
||||
--config $SNAP_USER_COMMON/Tautulli/config.ini
|
||||
--quiet
|
||||
--nolaunch
|
||||
daemon: simple
|
||||
restart-condition: on-abnormal
|
||||
restart-delay: 5s
|
||||
plugs:
|
||||
- network
|
||||
- network-bind
|
||||
environment:
|
||||
TAUTULLI_SNAP: "True"
|
19
start.sh
@@ -1,21 +1,16 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
if [[ "$TAUTULLI_DOCKER" == "True" ]]; then
|
||||
if [[ -n $PUID && -n $PGID ]]; then
|
||||
getent group "$PGID" 2>&1 > /dev/null || groupadd -g "$PGID" tautulli
|
||||
getent passwd "$PUID" 2>&1 > /dev/null || useradd -r -u "$PUID" -g "$PGID" tautulli
|
||||
PUID=${PUID:-1000}
|
||||
PGID=${PGID:-1000}
|
||||
|
||||
user=$(getent passwd "$PUID" | cut -d: -f1)
|
||||
group=$(getent group "$PGID" | cut -d: -f1)
|
||||
usermod -a -G root "$user"
|
||||
groupmod -o -g "$PGID" tautulli
|
||||
usermod -o -u "$PUID" tautulli
|
||||
|
||||
chown -R "$user":"$group" /config
|
||||
chown -R tautulli:tautulli /config
|
||||
|
||||
echo "Running Tautulli using user $user (uid=$PUID) and group $group (gid=$PGID)"
|
||||
su "$user" -g "$group" -c "python /app/Tautulli.py --datadir /config"
|
||||
else
|
||||
python Tautulli.py --datadir /config
|
||||
fi
|
||||
echo "Running Tautulli using user tautulli (uid=$(id -u tautulli)) and group tautulli (gid=$(id -g tautulli))"
|
||||
exec gosu tautulli "$@"
|
||||
else
|
||||
python_versions=("python3" "python3.8" "python3.7" "python3.6" "python" "python2" "python2.7")
|
||||
for cmd in "${python_versions[@]}"; do
|
||||
|