Compare commits

...

87 Commits

Author SHA1 Message Date
JonnyWong16
35a0242037 v2.2.3 2020-05-01 09:22:35 -07:00
JonnyWong16
e2e7063a29 Expose remote access check settings in the UI 2020-04-28 18:39:12 -07:00
JonnyWong16
03035d0eac Prevent spamming the logs with remote access failures 2020-04-28 18:04:05 -07:00
JonnyWong16
7ce9283421 Fix geopip lookup for IPv6 addresses 2020-04-28 08:42:00 -07:00
JonnyWong16
fc2faa247a v2.2.3-beta 2020-04-27 18:04:27 -07:00
JonnyWong16
9b11fd4f18 Remove all MaxMind/GeoLite2 settings 2020-04-27 17:43:00 -07:00
JonnyWong16
ccac7d1bd4 Remove maxminddb library 2020-04-27 17:34:51 -07:00
JonnyWong16
5494d1e7bf Remove geoip2 library 2020-04-27 17:33:05 -07:00
JonnyWong16
1ab407eb38 Update API to keep backwards compatibility for geoip lookup 2020-04-27 17:29:48 -07:00
JonnyWong16
82ab732144 Use Plex.tv for geoip lookup instead of MaxMind database 2020-04-27 17:28:44 -07:00
JonnyWong16
2162210393 Add remote access notification parameters 2020-04-24 18:03:27 -07:00
JonnyWong16
54a7839421 Add remote access failure reason 2020-04-24 18:03:05 -07:00
JonnyWong16
576ac88a6a Add advanced remote access ping interval 2020-04-24 18:01:27 -07:00
JonnyWong16
426fc09b17 Check continued session greater than or equal to 2020-04-23 23:18:55 -07:00
JonnyWong16
22bc0b3f9a Rename continued session to initial stream 2020-04-23 23:03:20 -07:00
JonnyWong16
4ece976dc8 Add continued session threshold setting and notification parameter 2020-04-23 22:47:19 -07:00
JonnyWong16
3ff0b4a256 Add method to check if a stream is a continued session 2020-04-23 22:34:30 -07:00
JonnyWong16
ecfc3ed74f Use server_id when retrieving library details 2020-04-23 22:30:32 -07:00
JonnyWong16
976154ed6c Add episode count to season and year to album Plex Mobile App notifications 2020-04-23 19:05:13 -07:00
JonnyWong16
c108765857 Add id parameter to get_history API for backwards compatibility 2020-04-19 23:01:58 -07:00
JonnyWong16
96438e1e15 Add id parameter to get_stream_data API for backwards compatibility
* Fixes Tautulli/Tautulli-Issues#239
2020-04-19 14:06:05 -07:00
JonnyWong16
0afd77fb2f Test all Plex mobile app triggers 2020-04-18 20:56:06 -07:00
JonnyWong16
a6cd512ebf Rename Plex Mobile App to Plex Android / iOS App 2020-04-18 20:27:23 -07:00
JonnyWong16
fb5d97a627 Refactor some notifiers code 2020-04-18 19:59:30 -07:00
JonnyWong16
231d439ef8 Remove plex_logs volume from Dockerfile 2020-04-18 19:24:52 -07:00
JonnyWong16
28e48e6b2f Fix MusicBrainzlookup missing artist 2020-04-18 19:24:20 -07:00
JonnyWong16
89c1ec8d21 Fix history table refreshing after deleting 2020-04-18 17:11:44 -07:00
JonnyWong16
3270a60bd7 Add Plex Mobile App notification agent 2020-04-18 17:06:23 -07:00
JonnyWong16
6ccf801ee6 Add code to filter available triggers for notification agents 2020-04-18 15:45:44 -07:00
JonnyWong16
79cd2ca9b9 Add user_thumb to notification parameters 2020-04-18 14:30:03 -07:00
JonnyWong16
063271aabb Fix notification rating key being overwritten when retrieving lookup info 2020-04-18 14:29:49 -07:00
JonnyWong16
e6c2133bf5 Fix auto-updater not working after enabling unless Tautulli was restarted 2020-04-17 18:23:47 -07:00
JonnyWong16
63e056987a Add bandwidth notification parameters 2020-04-17 18:19:27 -07:00
JonnyWong16
df35689c35 Fix typo in CHANGELOG 2020-04-13 11:43:23 -07:00
JonnyWong16
b66e845c6e Fix typo in README 2020-04-13 11:43:04 -07:00
JonnyWong16
6d5c320701 v2.2.2-beta 2020-04-12 21:27:01 -07:00
JonnyWong16
400a189455 Invalidate temporary mobile app token after 5 minutes 2020-04-12 21:20:14 -07:00
JonnyWong16
b7d03a4f31 Fix refreshing libraries and users table after deleting 2020-04-12 20:51:22 -07:00
JonnyWong16
523e6421be Don't delete library history if server_id doesn't match 2020-04-12 20:49:45 -07:00
JonnyWong16
e0cd6f7071 Rename docker build arg VERSION to TAG 2020-04-12 18:18:08 -07:00
JonnyWong16
38db0b7a70 Rename VERSION to COMMIT in Dockerfile 2020-04-12 18:15:14 -07:00
JonnyWong16
f39ecd89a7 Docker docker build badges on README 2020-04-12 18:03:40 -07:00
JonnyWong16
f7f76d82b6 Add Docker buildx GitHub workflow 2020-04-12 17:56:15 -07:00
JonnyWong16
9097e79e4f Change web app manifest start url to relative path 2020-04-12 11:33:23 -07:00
JonnyWong16
88711e7601 Add more info to manifest.json 2020-04-12 10:43:30 -07:00
JonnyWong16
d0fa83bb8c Use Kodi platform image for xbmc (Fixes Tautulli/Tautulli-Issues##231) 2020-04-11 20:39:39 -07:00
JonnyWong16
1271458f83 Fix web app manifest file (Fixes Tautulli/Tautulli-Issues#232) 2020-04-11 12:40:51 -07:00
JonnyWong16
2ae09a07e6 Some css syntax fixes 2020-04-11 12:39:12 -07:00
JonnyWong16
33d860384c Add Tautulli database corruption notification trigger 2020-04-10 15:11:32 -07:00
JonnyWong16
a4eda99a4a Update API docs to mention enabling api_sql while Tautulli is shut down 2020-04-10 14:44:14 -07:00
JonnyWong16
752c7badd2 Make inactive icon larger on library/user page 2020-04-10 14:41:14 -07:00
JonnyWong16
6399c90642 Fix platform icon size on activity card 2020-04-10 14:36:51 -07:00
JonnyWong16
97089846e9 Make inactive user/library triangle always orange 2020-04-10 14:31:12 -07:00
JonnyWong16
4de7884e39 Update API docs with all delete function changes 2020-04-10 14:14:34 -07:00
JonnyWong16
440adfb914 Fix missing page functions in library table 2020-04-10 14:12:38 -07:00
JonnyWong16
5f26d0085d Simplify library undelete function 2020-04-10 14:07:15 -07:00
JonnyWong16
f484604c69 Simplify user undelete function 2020-04-10 14:07:03 -07:00
JonnyWong16
899d2fbf9d Make library delete server_id optional 2020-04-10 14:03:32 -07:00
JonnyWong16
6a87dc9c40 Improve library delete/purge function 2020-04-10 14:00:19 -07:00
JonnyWong16
104e2929df Simplify user delete loop 2020-04-10 13:27:26 -07:00
JonnyWong16
faac6b11c2 Improve user delete/purge function 2020-04-10 13:15:29 -07:00
JonnyWong16
377a23478e Rename history id to row_id 2020-04-10 12:56:50 -07:00
JonnyWong16
c979e78802 Refactor database delete_session_history_rows ids 2020-04-10 12:40:35 -07:00
JonnyWong16
38f64c7d85 Improve delete history using list of row ids 2020-04-10 11:45:20 -07:00
JonnyWong16
1091a64863 Update API docs with library is_active 2020-04-10 00:20:25 -07:00
JonnyWong16
23de9616f1 Show library active status on Libraries table 2020-04-10 00:17:52 -07:00
JonnyWong16
198e7767dc Add library is_active to database 2020-04-10 00:12:38 -07:00
JonnyWong16
aa31bf1a19 Update API docs with user is_active 2020-04-10 00:11:43 -07:00
JonnyWong16
f366304c50 Show user active status on Users table 2020-04-10 00:02:30 -07:00
JonnyWong16
ce289995ff Add user is_active to database 2020-04-09 23:15:08 -07:00
JonnyWong16
ca2b4085c9 Fix if bad query_days passed to API 2020-04-09 18:33:02 -07:00
JonnyWong16
1d08069162 Rename time_queries to query_days 2020-04-09 18:30:46 -07:00
JonnyWong16
bcbfaae630 Merge pull request #1372 from KaasKop97/custom_time_queries
Allow custom time_queries for get_watch_stats (Closes #1345)
2020-04-09 18:21:35 -07:00
JonnyWong16
ae9df92d28 Divide file size by 2^10 but display SI units 2020-04-08 22:55:56 -07:00
JonnyWong16
47610323b0 Fix API grouping parameter not defaulting to match setting 2020-04-07 18:18:16 -07:00
Mitch
d1f1763919 Allow custom time_queries for get_watch_stats 2020-04-05 20:03:57 +02:00
JonnyWong16
1326ad8708 Add TAUTULLI_PYTHON_VERSION to script environment variables
* Period separated string (e.g. 2.7.17 or 3.8.2)
2020-04-04 08:12:42 -07:00
JonnyWong16
6e09e509bd Remove duplicate dictionary key in top movie stats 2020-04-03 21:07:00 -07:00
JonnyWong16
e8d0557852 Fix typo in send newsletter argument 2020-04-03 21:06:10 -07:00
JonnyWong16
aac705f465 Put import OpenSSL in try/except block for self-signed certificates 2020-04-03 21:05:44 -07:00
JonnyWong16
009971901b Fix delete lookup info by rating key 2020-04-03 20:53:35 -07:00
JonnyWong16
1ffd6c0ea1 Encode API XML output to UTF-8 2020-03-30 13:55:17 -07:00
JonnyWong16
50ce29cc64 Fix enable notification grouping by default again 2020-03-29 21:14:17 -07:00
JonnyWong16
e4ec24be26 Reorder git pull command for update 2020-03-29 10:28:42 -07:00
JonnyWong16
04765288d7 Change default file size on media info tables to SI units 2020-03-29 10:27:56 -07:00
JonnyWong16
8fdd0ba0d9 Merge pull request #1363 from aaronldunlap/master
Change humanFileSize to default to SI notation
2020-03-29 10:18:14 -07:00
aaronldunlap
aa5affe366 Change humanFileSize to default to SI notation 2020-01-23 17:09:39 -06:00
66 changed files with 1454 additions and 3730 deletions

View File

@@ -1,5 +1,8 @@
.git
.github
.gitignore
contrib
init-scripts
pylintrc
*.md
!CHANGELOG*.md

83
.github/workflows/publish-docker.yml vendored Normal file
View File

@@ -0,0 +1,83 @@
name: Publish Docker
on:
push:
branches: [master, beta, nightly]
tags: [v*]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Prepare
id: prepare
run: |
if [[ $GITHUB_REF == refs/tags/* ]]; then
echo ::set-output name=tag::${GITHUB_REF#refs/tags/}
elif [[ $GITHUB_REF == refs/heads/master ]]; then
echo ::set-output name=tag::latest
else
echo ::set-output name=tag::${GITHUB_REF#refs/heads/}
fi
if [[ $GITHUB_REF == refs/tags/* ]]; then
echo ::set-output name=branch::master
else
echo ::set-output name=branch::${GITHUB_REF#refs/heads/}
fi
echo ::set-output name=commit::${GITHUB_SHA}
echo ::set-output name=build_date::$(date -u +'%Y-%m-%dT%H:%M:%SZ')
echo ::set-output name=docker_platforms::linux/amd64,linux/arm64,linux/arm
echo ::set-output name=docker_image::tautulli/tautulli
- name: Set up Docker Buildx
id: buildx
uses: crazy-max/ghaction-docker-buildx@v1
with:
version: latest
- name: Checkout
uses: actions/checkout@v2
- name: Docker Buildx (no push)
run: |
docker buildx build \
--platform ${{ steps.prepare.outputs.docker_platforms }} \
--output "type=image,push=false" \
--build-arg "TAG=${{ steps.prepare.outputs.tag }}" \
--build-arg "BRANCH=${{ steps.prepare.outputs.branch }}" \
--build-arg "COMMIT=${{ steps.prepare.outputs.commit }}" \
--build-arg "BUILD_DATE=${{ steps.prepare.outputs.build_date }}" \
--tag "${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}" \
--file Dockerfile .
- name: Docker Login
if: success()
env:
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
run: |
echo "${DOCKER_PASSWORD}" | docker login --username "${{ secrets.DOCKER_USERNAME }}" --password-stdin
- name: Docker Buildx (push)
if: success()
run: |
docker buildx build \
--platform ${{ steps.prepare.outputs.docker_platforms }} \
--output "type=image,push=true" \
--build-arg "TAG=${{ steps.prepare.outputs.tag }}" \
--build-arg "BRANCH=${{ steps.prepare.outputs.branch }}" \
--build-arg "COMMIT=${{ steps.prepare.outputs.commit }}" \
--build-arg "BUILD_DATE=${{ steps.prepare.outputs.build_date }}" \
--tag "${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}" \
--file Dockerfile .
- name: Clear
if: always()
run: |
rm -f ${HOME}/.docker/config.json
- name: Post Status to Discord
uses: sarisia/actions-status-discord@v1
if: always()
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
job: ${{ github.workflow }}
nofail: true

View File

@@ -1,8 +1,7 @@
name: Create Release
name: Publish Release
on:
push:
tags:
- 'v*'
tags: [v*]
jobs:
build:
runs-on: ubuntu-latest
@@ -10,7 +9,7 @@ jobs:
- name: Checkout Code
uses: actions/checkout@master
- name: Get Release Version
run: echo ::set-env name=RELEASE_VERSION::${GITHUB_REF/refs\/tags\//}
run: echo ::set-env name=RELEASE_VERSION::${GITHUB_REF#refs/tags/}
- name: Get Changelog
run: echo ::set-env name=CHANGELOG::"$( sed -n '/^## /{p; :loop n; p; /^## /q; b loop}' CHANGELOG.md | sed '$d' | sed '$d' | sed '$d' | sed ':a;N;$!ba;s/\n/%0A/g' )"
- name: Create Release

View File

@@ -1,30 +0,0 @@
name: Publish Docker Branch
on:
push:
branches: [master, beta, nightly]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@master
- name: Get Branch
run: echo ::set-env name=BRANCH::${GITHUB_REF#refs/heads/}
- name: Publish to Registry
uses: elgohr/Publish-Docker-Github-Action@master
env:
VERSION: ${{ github.sha }}
with:
name: tautulli/tautulli
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
dockerfile: Dockerfile
buildargs: VERSION, BRANCH
- name: Post Status to Discord
uses: sarisia/actions-status-discord@v1
if: always()
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
job: ${{ github.workflow }}
nofail: true

View File

@@ -1,32 +0,0 @@
name: Publish Docker Release
on:
push:
tags:
- 'v*'
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@master
- name: Get Branch
run: echo ::set-env name=BRANCH::${GITHUB_REF/refs\/tags\//}
- name: Publish to Registry
uses: elgohr/Publish-Docker-Github-Action@master
env:
VERSION: ${{ github.sha }}
with:
name: tautulli/tautulli
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
dockerfile: Dockerfile
buildargs: VERSION, BRANCH
tags: ${{ env.BRANCH }}
- name: Post Status to Discord
uses: sarisia/actions-status-discord@v1
if: always()
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
job: ${{ github.workflow }}
nofail: true

66
API.md
View File

@@ -88,7 +88,8 @@ Required parameters:
section_id (str): The id of the Plex library section
Optional parameters:
None
server_id (str): The Plex server identifier of the library section
row_ids (str): Comma separated row ids to delete, e.g. "2,3,8"
Returns:
None
@@ -103,7 +104,7 @@ Required parameters:
user_id (str): The id of the Plex user
Optional parameters:
None
row_ids (str): Comma separated row ids to delete, e.g. "2,3,8"
Returns:
None
@@ -114,6 +115,21 @@ Returns:
Delete and recreate the cache directory.
### delete_history
Delete history rows from Tautulli.
```
Required parameters:
row_ids (str): Comma separated row ids to delete, e.g. "65,110,2,3645"
Optional parameters:
None
Returns:
None
```
### delete_hosted_images
Delete the images uploaded to image hosting services.
@@ -146,7 +162,8 @@ Required parameters:
section_id (str): The id of the Plex library section
Optional parameters:
None
server_id (str): The Plex server identifier of the library section
row_ids (str): Comma separated row ids to delete, e.g. "2,3,8"
Returns:
None
@@ -294,7 +311,7 @@ Required parameters:
user_id (str): The id of the Plex user
Optional parameters:
None
row_ids (str): Comma separated row ids to delete, e.g. "2,3,8"
Returns:
None
@@ -655,7 +672,7 @@ Returns:
### get_geoip_lookup
Get the geolocation info for an IP address. The GeoLite2 database must be installed.
Get the geolocation info for an IP address.
```
Required parameters:
@@ -666,7 +683,7 @@ Optional parameters:
Returns:
json:
{"continent": "North America",
{"code": 'US",
"country": "United States",
"region": "California",
"city": "Mountain View",
@@ -676,9 +693,6 @@ Returns:
"longitude": -122.0838,
"accuracy": 1000
}
json:
{"error": "The address 127.0.0.1 is not in the database."
}
```
@@ -726,7 +740,6 @@ Returns:
"group_count": 1,
"group_ids": "1124",
"guid": "com.plexapp.agents.thetvdb://121361/6/1?lang=en",
"id": 1124,
"ip_address": "xxx.xxx.xxx.xxx",
"live": 0,
"media_index": 17,
@@ -742,6 +755,7 @@ Returns:
"player": "Castle-PC",
"rating_key": 4348,
"reference_id": 1123,
"row_id": 1124,
"session_key": null,
"started": 1462688107,
"state": null,
@@ -853,6 +867,7 @@ Returns:
[{"art": "/:/resources/show-fanart.jpg",
"child_count": "3745",
"count": "62",
"is_active": 1,
"parent_count": "240",
"section_id": "2",
"section_name": "TV Shows",
@@ -894,7 +909,8 @@ Returns:
"do_notify_created": "Checked",
"duration": 1578037,
"guid": "com.plexapp.agents.thetvdb://121361/6/1?lang=en",
"id": 1128,
"histroy_row_id": 1128,
"is_active": 1,
"keep_history": "Checked",
"labels": [],
"last_accessed": 1462693216,
@@ -910,9 +926,11 @@ Returns:
"parent_title": "",
"plays": 772,
"rating_key": 153037,
"row_id": 1,
"section_id": 2,
"section_name": "TV Shows",
"section_type": "Show",
"server_id": "ds48g4r354a8v9byrrtr697g3g79w",
"thumb": "/library/metadata/153036/thumb/1462175062",
"year": 2016
},
@@ -940,13 +958,16 @@ Returns:
"deleted_section": 0,
"do_notify": 1,
"do_notify_created": 1,
"is_active": 1,
"keep_history": 1,
"library_art": "/:/resources/movie-fanart.jpg",
"library_thumb": "/:/resources/movie.png",
"parent_count": null,
"row_id": 1,
"section_id": 1,
"section_name": "Movies",
"section_type": "movie"
"section_type": "movie",
"server_id": "ds48g4r354a8v9byrrtr697g3g79w"
}
```
@@ -1066,6 +1087,7 @@ Required parameters:
Optional parameters:
grouping (int): 0 or 1
query_days (str): Comma separated days, e.g. "1,7,30,0"
Returns:
json:
@@ -2222,10 +2244,13 @@ Returns:
"do_notify": 1,
"email": "Jon.Snow.1337@CastleBlack.com",
"friendly_name": "Jon Snow",
"is_active": 1,
"is_admin": 0,
"is_allow_sync": 1,
"is_home_user": 1,
"is_restricted": 0,
"keep_history": 1,
"row_id": 1,
"shared_libraries": ["10", "1", "4", "5", "15", "20", "2"],
"user_id": 133788,
"user_thumb": "https://plex.tv/users/k10w42309cynaopq/avatar",
@@ -2378,6 +2403,7 @@ Required parameters:
Optional parameters:
grouping (int): 0 or 1
query_days (str): Comma separated days, e.g. "1,7,30,0"
Returns:
json:
@@ -2421,11 +2447,13 @@ Returns:
"filter_music": "",
"filter_photos": "",
"filter_tv": "",
"is_active": 1,
"is_admin": 0,
"is_allow_sync": 1,
"is_home_user": 1,
"is_restricted": 0,
"keep_history": 1,
"row_id": 1,
"server_token": "PU9cMuQZxJKFBtGqHk68",
"shared_libraries": "1;2;3",
"thumb": "https://plex.tv/users/k10w42309cynaopq/avatar",
@@ -2465,8 +2493,9 @@ Returns:
"duration": 2998290,
"friendly_name": "Jon Snow",
"guid": "com.plexapp.agents.thetvdb://121361/6/1?lang=en",
"id": 1121,
"history_row_id": 1121,
"ip_address": "xxx.xxx.xxx.xxx",
"is_active": 1,
"keep_history": "Checked",
"last_played": "Game of Thrones - The Red Woman",
"last_seen": 1462591869,
@@ -2480,6 +2509,7 @@ Returns:
"player": "Plex Web (Chrome)",
"plays": 487,
"rating_key": 153037,
"row_id": 1,
"thumb": "/library/metadata/153036/thumb/1462175062",
"transcode_decision": "transcode",
"user_id": 133788,
@@ -2541,10 +2571,6 @@ Returns:
```
### install_geoip_db
Downloads and installs the GeoLite2 database
### notify
Send a notification using Tautulli.
@@ -2741,7 +2767,7 @@ Returns:
### sql
Query the Tautulli database with raw SQL. Automatically makes a backup of
the database if the latest backup is older then 24h. `api_sql` must be
manually enabled in the config file.
manually enabled in the config file while Tautulli is shut down.
```
Required parameters:
@@ -2821,10 +2847,6 @@ Returns:
```
### uninstall_geoip_db
Uninstalls the GeoLite2 database
### update
Update Tautulli.

View File

@@ -1,5 +1,44 @@
# Changelog
## v2.2.3 (2020-05-01)
* Notifications:
* Fix: Notification grouping by season/album and show/artist not enabled by default.
* Fix: The rating key notification parameter was being overwritten when 3rd party lookup was enabled.
* Fix: Missing artist value for Musicbrainz lookup in certain situations.
* New: Added notification trigger for Tautulli database corruption.
* New: Added TAUTULLI_PYTHON_VERSION to script notification environment variables.
* New: Added Plex Android / iOS App notification agent.
* New: Added bandwidth notification parameters.
* New: Added user thumb to notification parameters.
* New: Added initial stream notification parameter and threshold setting to determine if a stream is the first stream of a continuous streaming session.
* New: Added Plex remote access notification parameters.
* Change: The file size notification parameter is now reported in SI units. (Thanks @aaronldunlap)
* UI:
* Fix: Delete lookup info from the media info page failing.
* Fix: XBMC platform icon not being redirected to the Kodi platform icon.
* Fix: History table was not being refreshed after deleting entries.
* New: Added icon on the users table to indicate if the user is not on the Plex server.
* New: Added icon on the libraries table to indicate if the library is not on the Plex server.
* Change: Improved deleting libraries so libraries with the same section ID are not also deleted.
* Mobile App:
* Fix: Temporary device token was not being invalidated after cancelling device registration.
* API:
* Fix: Returning XML from the API failing due to unicode characters.
* Fix: Grouping parameter for various API commands not falling back to default setting.
* New: Added time_queries parameter to get_library_watch_time_stats and get_user_watch_time_stats API command. (Thanks @KaasKop97)
* New: Added an "is_active" return value to the get_user, get_users, get_library, and get_libraries API commands which indicates if the user or library is on the Plex server.
* New: Added delete_history API command.
* Change: Added optional parameter for row_ids for delete_library, delete_user, delete_all_library_history, and delete_all_user_history API commands.
* Other:
* Fix: Update failing on CentOS due to an older git version.
* Fix: Manifest file for creating a web app had incorrect info.
* Fix: Auto-updater was not scheduled when enabling the setting unless Tautulli was restarted.
* New: Docker images updated to support ARM platforms.
* Change: Remove the unnecessary optional Plex logs volume from the Docker image.
* Change: Use Plex.tv for GeoIP lookup instead of requiring the MaxMind GeoLite2 database.
## v2.2.1 (2020-03-28)
* Notifications:

View File

@@ -1,9 +1,9 @@
FROM python:2.7.17-slim
FROM tautulli/tautulli-baseimage:latest
LABEL maintainer="TheMeanCanEHdian"
LABEL maintainer="Tautulli"
ARG VERSION
ARG BRANCH
ARG COMMIT
ENV TAUTULLI_DOCKER=True
ENV TZ=UTC
@@ -11,21 +11,13 @@ ENV TZ=UTC
WORKDIR /app
RUN \
apt-get -q -y update --no-install-recommends && \
apt-get install -q -y --no-install-recommends \
curl && \
rm -rf /var/lib/apt/lists/* && \
pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir --upgrade \
pycryptodomex \
pyopenssl && \
echo ${VERSION} > /app/version.txt && \
echo ${BRANCH} > /app/branch.txt
echo ${BRANCH} > /app/branch.txt && \
echo ${COMMIT} > /app/version.txt
COPY . /app
CMD [ "python", "Tautulli.py", "--datadir", "/config" ]
VOLUME /config /plex_logs
VOLUME /config
EXPOSE 8181
HEALTHCHECK --start-period=90s CMD curl -ILfSs http://localhost:8181/status > /dev/null || curl -ILfkSs https://localhost:8181/status > /dev/null || exit 1

View File

@@ -35,8 +35,8 @@ This project is based on code from [Headphones](https://github.com/rembo10/headp
| Status | Branch: `master` | Branch: `beta` | Branch: `nightly` |
| --- | --- | --- | --- |
| Release | [![Release@master](https://img.shields.io/github/v/release/Tautulli/Tautulli?style=flat-square)](https://github.com/Tautulli/Tautulli/releases/latest) <br> [![Release Date@master](https://img.shields.io/github/release-date/Tautulli/Tautulli?style=flat-square&color=blue)](https://github.com/Tautulli/Tautulli/releases/latest) | [![Release@beta](https://img.shields.io/github/v/release/Tautulli/Tautulli?include_prereleases&style=flat-square)](https://github.com/Tautulli/Tautulli/releases) <br> [![Commits@nightly](https://img.shields.io/github/commits-since/Tautulli/Tautulli/latest/beta?style=flat-square&color=blue)](https://github.com/Tautulli/Tautulli/commits/beta) | [![Last Commits@nightly](https://img.shields.io/github/last-commit/Tautulli/Tautulli/nightly?style=flat-square&color=blue)](https://github.com/Tautulli/Tautulli/commits/nightly) <br> [![Commits@nightly](https://img.shields.io/github/commits-since/Tautulli/Tautulli/latest/nightly?style=flat-square&color=blue)](https://github.com/Tautulli/Tautulli/commits/nightly) |
| Docker | [![Docker@master](https://img.shields.io/badge/tautulli-tautulli:latest-blue?style=flat-square)](https://hub.docker.com/r/tautulli/tautulli) <br> [![Docker Build@master](https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Docker%20Branch/master?style=flat-square)](https://github.com/Tautulli/Tautulli/actions?query=branch%3Amaster) | [![Docker@beta](https://img.shields.io/badge/tautulli-tautulli:beta-blue?style=flat-square)](https://hub.docker.com/r/tautulli/tautulli) <br> [![Docker Build@beta](https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Docker%20Branch/beta?style=flat-square)](https://github.com/Tautulli/Tautulli/actions?query=branch%3Abeta) | [![Docker@nightly](https://img.shields.io/badge/tautulli-tautulli:nightly-blue?style=flat-square)](https://hub.docker.com/r/tautulli/tautulli) <br> [![Docker Build@nightly](https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Docker%20Branch/nightly?style=flat-square)](https://github.com/Tautulli/Tautulli/actions?query=branch%3Anightly) |
| Release | [![Release@master](https://img.shields.io/github/v/release/Tautulli/Tautulli?style=flat-square)](https://github.com/Tautulli/Tautulli/releases/latest) <br> [![Release Date@master](https://img.shields.io/github/release-date/Tautulli/Tautulli?style=flat-square&color=blue)](https://github.com/Tautulli/Tautulli/releases/latest) | [![Release@beta](https://img.shields.io/github/v/release/Tautulli/Tautulli?include_prereleases&style=flat-square)](https://github.com/Tautulli/Tautulli/releases) <br> [![Commits@beta](https://img.shields.io/github/commits-since/Tautulli/Tautulli/latest/beta?style=flat-square&color=blue)](https://github.com/Tautulli/Tautulli/commits/beta) | [![Last Commits@nightly](https://img.shields.io/github/last-commit/Tautulli/Tautulli/nightly?style=flat-square&color=blue)](https://github.com/Tautulli/Tautulli/commits/nightly) <br> [![Commits@nightly](https://img.shields.io/github/commits-since/Tautulli/Tautulli/latest/nightly?style=flat-square&color=blue)](https://github.com/Tautulli/Tautulli/commits/nightly) |
| Docker | [![Docker@master](https://img.shields.io/badge/tautulli-tautulli:latest-blue?style=flat-square)](https://hub.docker.com/r/tautulli/tautulli) <br> [![Docker Build@master](https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Docker/master?style=flat-square)](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Amaster) | [![Docker@beta](https://img.shields.io/badge/tautulli-tautulli:beta-blue?style=flat-square)](https://hub.docker.com/r/tautulli/tautulli) <br> [![Docker Build@beta](https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Docker/beta?style=flat-square)](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Abeta) | [![Docker@nightly](https://img.shields.io/badge/tautulli-tautulli:nightly-blue?style=flat-square)](https://hub.docker.com/r/tautulli/tautulli) <br> [![Docker Build@nightly](https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Docker/nightly?style=flat-square)](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Anightly) |
[![Wiki](https://img.shields.io/badge/github-wiki-black?style=flat-square)](https://github.com/Tautulli/Tautulli-Wiki/wiki)
[![Discord](https://img.shields.io/discord/183396325142822912?label=discord&style=flat-square&color=7289DA)](https://tautulli.com/discord)

View File

@@ -711,7 +711,6 @@ fieldset[disabled] .form-control {
box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
}
.users-poster-face {
overflow: hidden;
float: left;
background-size: cover;
background-position: center;
@@ -857,7 +856,6 @@ a .users-poster-face:hover {
z-index: 2;
}
.dashboard-activity-info-platform {
padding: 6px !important;
background-position: center;
background-size: cover;
width: 50px;
@@ -1036,13 +1034,13 @@ a .users-poster-face:hover {
}
.dashboard-activity-container:hover .progress-bar {
color: rgba(255, 255, 255, 1);
background-image: -webkit-linear-gradient(left,rgba(0,0,0,0.25),0%,rgba(0,0,0,0),50px);
background-image: -webkit-linear-gradient(left,rgba(0,0,0,0.25) 0%,rgba(0,0,0,0) 50px);
background-image: -moz-linear-gradient(left,rgba(0,0,0,0.25) 0%,rgba(0,0,0,0) 50px);
background-image: linear-gradient(to left,rgba(0,0,0,0.25) 0%,rgba(0,0,0,0) 50px);
}
.dashboard-activity-container:hover .buffer-bar {
color: rgba(255, 255, 255, 1);
background-image: -webkit-linear-gradient(left,rgba(0,0,0,0.25),0%,rgba(0,0,0,0),50px);
background-image: -webkit-linear-gradient(left,rgba(0,0,0,0.25) 0%,rgba(0,0,0,0) 50px);
background-image: -moz-linear-gradient(left,rgba(0,0,0,0.25) 0%,rgba(0,0,0,0) 50px);
background-image: linear-gradient(to left,rgba(0,0,0,0.25) 0%,rgba(0,0,0,0) 50px);
}
@@ -1742,7 +1740,7 @@ a:hover .dashboard-recent-media-cover {
top: 0;
bottom: 0;
background-image: -webkit-gradient(linear,left 0,left 100%,from(rgba(0,0,0,.7)),to(rgba(0,0,0,.9)));
background-image: -webkit-linear-gradient(top,rgba(0,0,0,.7),0,rgba(0,0,0,.9),100%);
background-image: -webkit-linear-gradient(top,rgba(0,0,0,.7) 0,rgba(0,0,0,.9) 100%);
background-image: -moz-linear-gradient(top,rgba(0,0,0,.7) 0,rgba(0,0,0,.9) 100%);
background-image: linear-gradient(to bottom,rgba(0,0,0,.7) 0,rgba(0,0,0,.9) 100%);
background-repeat: repeat-x;
@@ -3119,6 +3117,21 @@ div.dataTables_info {
font-weight: bold;
border-radius: 2px;
}
.inactive-library-tooltip,
.inactive-user-tooltip {
display: inline-block;
position: relative;
width: 100%;
height: 100%;
}
.inactive-library-tooltip i.fa,
.inactive-user-tooltip i.fa {
color: #E5A00D;
position: absolute;
right: 0;
bottom: 0;
text-shadow: 0 0 2px rgba(0,0,0,.5);
}
.history-thumbnail-popover {
z-index: 2000;
padding: 0;
@@ -3808,9 +3821,8 @@ a:hover .overlay-refresh-image:hover {
}
.svg-icon {
padding: 10px;
background-size: calc(100% - 20px) calc(100% - 20px) !important;
background-origin: content-box !important;
background-size: contain !important;
background-repeat: no-repeat !important;
background-position: center !important;
}
@@ -3920,7 +3932,7 @@ a:hover .overlay-refresh-image:hover {
}
.platform-xbmc {
background-color: #3b4872;
background-image: url(../images/platforms/xbmc.svg);
background-image: url(../images/platforms/kodi.svg);
}
.platform-xbox {
background-color: #107c10;

View File

@@ -143,7 +143,7 @@ DOCUMENTATION :: END
<div id="platform-${sk}" class="dashboard-activity-info-platform${no_terminate} svg-icon platform-${data['platform_name']}" title="${data['platform']}"></div>
% if _session['user_group'] == 'admin' and plexpy.CONFIG.PMS_PLEXPASS and data['session_id']:
<div class="dashboard-activity-terminate-session" id="terminate-button-${sk}" data-key="${sk}" data-id="${data['session_id']}" data-toggle="tooltip" title="Terminate Stream">
<i class="fa fa-times" style="padding-top: 8px;"></i>
<i class="fa fa-times" style="padding-top: 10px;"></i>
</div>
% endif
</div>

View File

@@ -185,20 +185,18 @@
$('#deleteCount').text(history_to_delete.length);
$('#confirm-modal-delete').modal();
$('#confirm-modal-delete').one('click', '#confirm-delete', function () {
history_to_delete.forEach(function(row, idx) {
$.ajax({
url: 'delete_history_rows',
type: 'POST',
data: { row_id: row },
data: { row_ids: history_to_delete.join(',') },
async: true,
success: function (data) {
var msg = "History deleted";
showMsg(msg, false, true, 2000);
history_table.draw();
}
});
});
history_table.draw();
});
}
$('.delete-control').each(function () {

View File

@@ -2,7 +2,7 @@
<browserconfig>
<msapplication>
<tile>
<square150x150logo src="${http_root}images/favicon/mstile-150x150.png?v=2.0.5"/>
<square150x150logo src="mstile-150x150.png?v=2.0.5"/>
<TileColor>#282a2d</TileColor>
</tile>
</msapplication>

View File

@@ -1,18 +1,23 @@
{
"name": "Tautulli",
"name": "Tautulli: Monitor your Plex Media Server",
"short_name": "Tautulli",
"Description": "A Python based monitoring and tracking tool for Plex Media Server.",
"start_url": "../../",
"scope": "../../",
"icons": [
{
"src": "${http_root}images/favicon/android-chrome-192x192.png?v=2.0.5",
"src": "android-chrome-192x192.png?v=2.0.5",
"sizes": "192x192",
"type": "image/png"
},
{
"src": "${http_root}images/favicon/android-chrome-256x256.png?v=2.0.5",
"src": "android-chrome-256x256.png?v=2.0.5",
"sizes": "256x256",
"type": "image/png"
}
],
"theme_color": "#282a2d",
"background_color": "#282a2d",
"display": "standalone"
"display": "standalone",
"orientation": "any"
}

View File

@@ -721,20 +721,18 @@ DOCUMENTATION :: END
$('#deleteCount').text(history_to_delete.length);
$('#confirm-modal-delete').modal();
$('#confirm-modal-delete').one('click', '#confirm-delete', function () {
history_to_delete.forEach(function (row, idx) {
$.ajax({
url: 'delete_history_rows',
type: 'POST',
data: { row_id: row },
data: { row_ids: history_to_delete.join(',') },
async: true,
success: function (data) {
var msg = "History deleted";
showMsg(msg, false, true, 2000);
history_table.draw();
}
});
});
history_table.draw();
});
}
$('.delete-control').each(function () {

View File

@@ -24,7 +24,6 @@
<div id="ip_error" class="col-sm-12 text-muted"></div>
<div class="col-sm-6">
<ul class="list-unstyled">
<li>Continent: <strong><span id="continent"></span></strong></li>
<li>Country: <strong><span id="country"></span></strong></li>
<li>Region: <strong><span id="region"></span></strong></li>
<li>City: <strong><span id="city"></span></strong></li>
@@ -36,7 +35,6 @@
<li>Timezone: <strong><span id="timezone"></span></strong></li>
<li>Latitude: <strong><span id="latitude"></span></strong></li>
<li>Longitude: <strong><span id="longitude"></span></strong></li>
<li>Accuracy Radius: <strong><span id="accuracy"></span></strong></li>
</ul>
</div>
<div class="col-sm-12">
@@ -61,8 +59,6 @@
</div>
</div>
<div class="modal-footer">
<% from plexpy.helpers import anon_url %>
<span class="text-muted">GeoLite2 data created by <a href="${anon_url('http://www.maxmind.com')}" target="_blank">MaxMind</a>.</span>
</div>
</div>
</div>
@@ -82,11 +78,11 @@
error: function () {
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> Internal request failed.').show();
},
success: function (data) {
if ('error' in data) {
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> ' + data.error).show();
success: function (result) {
if (result.result === 'error') {
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> ' + result.message).show();
} else {
$('#continent').html(data.continent);
var data = result.data;
$('#country').html(data.country);
$('#region').html(data.region);
$('#city').html(data.city);
@@ -94,7 +90,6 @@
$('#timezone').html(data.timezone);
$('#latitude').html(data.latitude);
$('#longitude').html(data.longitude);
$('#accuracy').html(data.accuracy + ' km');
}
}
});

View File

@@ -461,8 +461,9 @@ $('*').on('click', '.refresh_pms_image', function (e) {
});
// Taken from http://stackoverflow.com/questions/10420352/converting-file-size-in-bytes-to-human-readable#answer-14919494
function humanFileSize(bytes, si) {
var thresh = si ? 1000 : 1024;
function humanFileSize(bytes, si = true) {
//var thresh = si ? 1000 : 1024;
var thresh = 1024; // Always divide by 2^10 but display SI units
if (Math.abs(bytes) < thresh) {
return bytes + ' B';
}

View File

@@ -36,10 +36,10 @@ history_table_options = {
"targets": [0],
"data": null,
"createdCell": function (td, cellData, rowData, row, col) {
if (rowData['id'] === null) {
if (rowData['row_id'] === null) {
$(td).html('');
} else {
$(td).html('<button class="btn btn-xs btn-warning" data-id="' + rowData['id'] + '"><i class="fa fa-trash-o fa-fw"></i> Delete</button>');
$(td).html('<button class="btn btn-xs btn-warning" data-id="' + rowData['row_id'] + '"><i class="fa fa-trash-o fa-fw"></i> Delete</button>');
}
},
"width": "5%",
@@ -317,19 +317,19 @@ history_table_options = {
"rowCallback": function (row, rowData, rowIndex) {
if (rowData['group_count'] == 1) {
// if no grouped rows simply toggle the delete button
if ($.inArray(rowData['id'], history_to_delete) !== -1) {
$(row).find('button[data-id="' + rowData['id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
if ($.inArray(rowData['row_id'], history_to_delete) !== -1) {
$(row).find('button[data-id="' + rowData['row_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
}
} else if (rowData['id'] !== null) {
} else if (rowData['row_id'] !== null) {
// if grouped rows
// toggle the parent button to danger
$(row).find('button[data-id="' + rowData['id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
$(row).find('button[data-id="' + rowData['row_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
// check if any child rows are not selected
var group_ids = rowData['group_ids'].split(',').map(Number);
group_ids.forEach(function (id) {
var index = $.inArray(id, history_to_delete);
if (index == -1) {
$(row).find('button[data-id="' + rowData['id'] + '"]').addClass('btn-warning').removeClass('btn-danger');
$(row).find('button[data-id="' + rowData['row_id'] + '"]').addClass('btn-warning').removeClass('btn-danger');
}
});
}
@@ -353,7 +353,7 @@ $('.history_table').on('click', '> tbody > tr > td.modal-control', function () {
var rowData = row.data();
$.get('get_stream_data', {
row_id: rowData['id'],
row_id: rowData['row_id'],
session_key: rowData['session_key'],
user: rowData['friendly_name']
}).then(function (jqXHR) {
@@ -382,9 +382,9 @@ $('.history_table').on('click', '> tbody > tr > td.delete-control > button', fun
if (rowData['group_count'] == 1) {
// if no grouped rows simply add or remove row from history_to_delete
var index = $.inArray(rowData['id'], history_to_delete);
var index = $.inArray(rowData['row_id'], history_to_delete);
if (index === -1) {
history_to_delete.push(rowData['id']);
history_to_delete.push(rowData['row_id']);
} else {
history_to_delete.splice(index, 1);
}
@@ -549,7 +549,7 @@ function createChildTable(row, rowData) {
var childRowData = childRow.data();
$.get('get_stream_data', {
row_id: childRowData['id'],
row_id: childRowData['row_id'],
user: childRowData['friendly_name']
}).then(function (jqXHR) {
$("#info-modal").html(jqXHR);
@@ -576,9 +576,9 @@ function createChildTable(row, rowData) {
var childRowData = childRow.data();
// add or remove row from history_to_delete
var index = $.inArray(childRowData['id'], history_to_delete);
var index = $.inArray(childRowData['row_id'], history_to_delete);
if (index === -1) {
history_to_delete.push(childRowData['id']);
history_to_delete.push(childRowData['row_id']);
} else {
history_to_delete.splice(index, 1);
}

View File

@@ -169,7 +169,7 @@ $('.history_table').on('click', 'td.modal-control', function () {
function showStreamDetails() {
$.ajax({
url: 'get_stream_data',
data: { row_id: rowData['id'], user: rowData['friendly_name'] },
data: { row_id: rowData['row_id'], user: rowData['friendly_name'] },
cache: false,
async: true,
complete: function (xhr, status) {

View File

@@ -27,8 +27,8 @@ libraries_list_table_options = {
"data": null,
"createdCell": function (td, cellData, rowData, row, col) {
$(td).html('<div class="edit-library-toggles">' +
'<button class="btn btn-xs btn-warning delete-library" data-id="' + rowData['section_id'] + '" data-toggle="button"><i class="fa fa-trash-o fa-fw"></i> Delete</button>&nbsp' +
'<button class="btn btn-xs btn-warning purge-library" data-id="' + rowData['section_id'] + '" data-toggle="button"><i class="fa fa-eraser fa-fw"></i> Purge</button>&nbsp&nbsp&nbsp' +
'<button class="btn btn-xs btn-warning delete-library" data-id="' + rowData['row_id'] + '" data-toggle="button"><i class="fa fa-trash-o fa-fw"></i> Delete</button>&nbsp' +
'<button class="btn btn-xs btn-warning purge-library" data-id="' + rowData['row_id'] + '" data-toggle="button"><i class="fa fa-eraser fa-fw"></i> Purge</button>&nbsp&nbsp&nbsp' +
'<input type="checkbox" id="keep_history-' + rowData['section_id'] + '" name="keep_history" value="1" ' + rowData['keep_history'] + '><label class="edit-tooltip" for="keep_history-' + rowData['section_id'] + '" data-toggle="tooltip" title="Toggle History"><i class="fa fa-history fa-lg fa-fw"></i></label>&nbsp' +
'</div>');
},
@@ -41,14 +41,16 @@ libraries_list_table_options = {
"targets": [1],
"data": "library_thumb",
"createdCell": function (td, cellData, rowData, row, col) {
var inactive = '';
if (!rowData['is_active']) { inactive = '<span class="inactive-library-tooltip" data-toggle="tooltip" title="Library not on Plex server"><i class="fa fa-exclamation-triangle"></i></span>'; }
if (cellData !== null && cellData !== '') {
if (rowData['library_thumb'].substring(0, 4) == "http") {
$(td).html('<a href="library?section_id=' + rowData['section_id'] + '"><div class="libraries-poster-face" style="background-image: url(' + rowData['library_thumb'] + ');"></div></a>');
$(td).html('<a href="' + page('library', rowData['section_id']) + '"><div class="libraries-poster-face" style="background-image: url(' + rowData['library_thumb'] + ');">' + inactive + '</div></a>');
} else {
$(td).html('<a href="library?section_id=' + rowData['section_id'] + '"><div class="libraries-poster-face svg-icon library-' + rowData['section_type'] + '"></div></a>');
$(td).html('<a href="' + page('library', rowData['section_id']) + '"><div class="libraries-poster-face svg-icon library-' + rowData['section_type'] + '">' + inactive + '</div></a>');
}
} else {
$(td).html('<a href="library?section_id=' + rowData['section_id'] + '"><div class="libraries-poster-face" style="background-image: url(../../images/cover.png);"></div></a>');
$(td).html('<a href="' + page('library', rowData['section_id']) + '"><div class="libraries-poster-face" style="background-image: url(../../images/cover.png);">' + inactive + '</div></a>');
}
},
"orderable": false,
@@ -61,8 +63,8 @@ libraries_list_table_options = {
"data": "section_name",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== null && cellData !== '') {
$(td).html('<div data-id="' + rowData['section_id'] + '">' +
'<a href="library?section_id=' + rowData['section_id'] + '">' + cellData + '</a>' +
$(td).html('<div data-id="' + rowData['row_id'] + '">' +
'<a href="' + page('library', rowData['section_id']) + '">' + cellData + '</a>' +
'</div>');
} else {
$(td).html('n/a');
@@ -232,11 +234,11 @@ libraries_list_table_options = {
showMsg(msg, false, false, 0)
},
"rowCallback": function (row, rowData) {
if ($.inArray(rowData['section_id'], libraries_to_delete) !== -1) {
$(row).find('button.delete-library[data-id="' + rowData['section_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
if ($.inArray(rowData['row_id'], libraries_to_delete) !== -1) {
$(row).find('button.delete-library[data-id="' + rowData['row_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
}
if ($.inArray(rowData['section_id'], libraries_to_purge) !== -1) {
$(row).find('button.purge-library[data-id="' + rowData['section_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
if ($.inArray(rowData['row_id'], libraries_to_purge) !== -1) {
$(row).find('button.purge-library[data-id="' + rowData['row_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
}
}
}
@@ -277,11 +279,11 @@ $('#libraries_list_table').on('click', 'td.edit-control > .edit-library-toggles
var row = libraries_list_table.row(tr);
var rowData = row.data();
var index_delete = $.inArray(rowData['section_id'], libraries_to_delete);
var index_purge = $.inArray(rowData['section_id'], libraries_to_purge);
var index_delete = $.inArray(rowData['row_id'], libraries_to_delete);
var index_purge = $.inArray(rowData['row_id'], libraries_to_purge);
if (index_delete === -1) {
libraries_to_delete.push(rowData['section_id']);
libraries_to_delete.push(rowData['row_id']);
if (index_purge === -1) {
tr.find('button.purge-library').click();
}
@@ -300,11 +302,11 @@ $('#libraries_list_table').on('click', 'td.edit-control > .edit-library-toggles
var row = libraries_list_table.row(tr);
var rowData = row.data();
var index_delete = $.inArray(rowData['section_id'], libraries_to_delete);
var index_purge = $.inArray(rowData['section_id'], libraries_to_purge);
var index_delete = $.inArray(rowData['row_id'], libraries_to_delete);
var index_purge = $.inArray(rowData['row_id'], libraries_to_purge);
if (index_purge === -1) {
libraries_to_purge.push(rowData['section_id']);
libraries_to_purge.push(rowData['row_id']);
} else {
libraries_to_purge.splice(index_purge, 1);
if (index_delete != -1) {

View File

@@ -167,7 +167,7 @@ $('.user_ip_table').on('click', 'td.modal-control', function () {
function showStreamDetails() {
$.ajax({
url: 'get_stream_data',
data: { row_id: rowData['id'], user: rowData['friendly_name'] },
data: { row_id: rowData['history_row_id'], user: rowData['friendly_name'] },
cache: false,
async: true,
complete: function (xhr, status) {

View File

@@ -44,8 +44,8 @@ users_list_table_options = {
"data": null,
"createdCell": function (td, cellData, rowData, row, col) {
$(td).html('<div class="edit-user-toggles">' +
'<button class="btn btn-xs btn-warning delete-user" data-id="' + rowData['user_id'] + '" data-toggle="button"><i class="fa fa-trash-o fa-fw"></i> Delete</button>&nbsp' +
'<button class="btn btn-xs btn-warning purge-user" data-id="' + rowData['user_id'] + '" data-toggle="button"><i class="fa fa-eraser fa-fw"></i> Purge</button>&nbsp&nbsp&nbsp' +
'<button class="btn btn-xs btn-warning delete-user" data-id="' + rowData['row_id'] + '" data-toggle="button"><i class="fa fa-trash-o fa-fw"></i> Delete</button>&nbsp' +
'<button class="btn btn-xs btn-warning purge-user" data-id="' + rowData['row_id'] + '" data-toggle="button"><i class="fa fa-eraser fa-fw"></i> Purge</button>&nbsp&nbsp&nbsp' +
'<input type="checkbox" id="keep_history-' + rowData['user_id'] + '" name="keep_history" value="1" ' + rowData['keep_history'] + '><label class="edit-tooltip" for="keep_history-' + rowData['user_id'] + '" data-toggle="tooltip" title="Toggle History"><i class="fa fa-history fa-lg fa-fw"></i></label>&nbsp' +
'<input type="checkbox" id="allow_guest-' + rowData['user_id'] + '" name="allow_guest" value="1" ' + rowData['allow_guest'] + '><label class="edit-tooltip" for="allow_guest-' + rowData['user_id'] + '" data-toggle="tooltip" title="Toggle Guest Access"><i class="fa fa-unlock-alt fa-lg fa-fw"></i></label>&nbsp' +
'</div>');
@@ -59,10 +59,12 @@ users_list_table_options = {
"targets": [1],
"data": "user_thumb",
"createdCell": function (td, cellData, rowData, row, col) {
var inactive = '';
if (!rowData['is_active']) { inactive = '<span class="inactive-user-tooltip" data-toggle="tooltip" title="User not on Plex server"><i class="fa fa-exclamation-triangle"></i></span>'; }
if (cellData === '') {
$(td).html('<a href="' + page('user', rowData['user_id']) + '"><div class="users-poster-face" style="background-image: url(../../images/gravatar-default-80x80.png);"></div></a>');
$(td).html('<a href="' + page('user', rowData['user_id']) + '"><div class="users-poster-face" style="background-image: url(../../images/gravatar-default-80x80.png);">' + inactive + '</div></a>');
} else {
$(td).html('<a href="' + page('user', rowData['user_id']) + '"><div class="users-poster-face" style="background-image: url(' + rowData['user_thumb'] + ');"></div></a>');
$(td).html('<a href="' + page('user', rowData['user_id']) + '"><div class="users-poster-face" style="background-image: url(' + rowData['user_thumb'] + ');">' + inactive + '</div></a>');
}
},
"orderable": false,
@@ -75,7 +77,7 @@ users_list_table_options = {
"data": "friendly_name",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== null && cellData !== '') {
$(td).html('<div class="edit-user-name" data-id="' + rowData['user_id'] + '">' +
$(td).html('<div class="edit-user-name" data-id="' + rowData['row_id'] + '">' +
'<a href="' + page('user', rowData['user_id']) + '">' + cellData + '</a>' +
'<input type="text" class="hidden" value="' + cellData + '">' +
'</div>');
@@ -254,10 +256,10 @@ users_list_table_options = {
},
"rowCallback": function (row, rowData) {
if ($.inArray(rowData['user_id'], users_to_delete) !== -1) {
$(row).find('button.delete-user[data-id="' + rowData['user_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
$(row).find('button.delete-user[data-id="' + rowData['row_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
}
if ($.inArray(rowData['user_id'], users_to_purge) !== -1) {
$(row).find('button.purge-user[data-id="' + rowData['user_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
$(row).find('button.purge-user[data-id="' + rowData['row_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
}
}
}
@@ -268,7 +270,7 @@ $('#users_list_table').on('click', 'td.modal-control', function () {
var rowData = row.data();
$.get('get_stream_data', {
row_id: rowData['id'],
row_id: rowData['history_row_id'],
user: rowData['friendly_name']
}).then(function (jqXHR) {
$("#info-modal").html(jqXHR);
@@ -326,11 +328,11 @@ $('#users_list_table').on('click', 'td.edit-control > .edit-user-toggles > butto
var row = users_list_table.row(tr);
var rowData = row.data();
var index_delete = $.inArray(rowData['user_id'], users_to_delete);
var index_purge = $.inArray(rowData['user_id'], users_to_purge);
var index_delete = $.inArray(rowData['row_id'], users_to_delete);
var index_purge = $.inArray(rowData['row_id'], users_to_purge);
if (index_delete === -1) {
users_to_delete.push(rowData['user_id']);
users_to_delete.push(rowData['row_id']);
if (index_purge === -1) {
tr.find('button.purge-user').click();
}
@@ -349,11 +351,11 @@ $('#users_list_table').on('click', 'td.edit-control > .edit-user-toggles > butto
var row = users_list_table.row(tr);
var rowData = row.data();
var index_delete = $.inArray(rowData['user_id'], users_to_delete);
var index_purge = $.inArray(rowData['user_id'], users_to_purge);
var index_delete = $.inArray(rowData['row_id'], users_to_delete);
var index_purge = $.inArray(rowData['row_id'], users_to_purge);
if (index_purge === -1) {
users_to_purge.push(rowData['user_id']);
users_to_purge.push(rowData['row_id']);
} else {
users_to_purge.splice(index_purge, 1);
if (index_delete != -1) {

View File

@@ -116,14 +116,14 @@
});
if (libraries_to_delete.length > 0) {
$('#libraries-to-delete').prepend('<p>Are you REALLY sure you want to delete the following libraries:</p>')
$('#libraries-to-delete').prepend('<p>Are you REALLY sure you want to delete the following libraries:</p>');
for (var i = 0; i < libraries_to_delete.length; i++) {
$('#libraries-to-delete').append('<li>' + $('div[data-id=' + libraries_to_delete[i] + ']').text() + '</li>');
}
}
if (libraries_to_purge.length > 0) {
$('#libraries-to-purge').prepend('<p>Are you REALLY sure you want to purge all history for the following libraries:</p>')
$('#libraries-to-purge').prepend('<p>Are you REALLY sure you want to purge all history for the following libraries:</p>');
for (var i = 0; i < libraries_to_purge.length; i++) {
$('#libraries-to-purge').append('<li>' + $('div[data-id=' + libraries_to_purge[i] + ']').text() + '</li>');
}
@@ -131,33 +131,30 @@
$('#confirm-modal-delete').modal();
$('#confirm-modal-delete').one('click', '#confirm-delete', function () {
libraries_to_delete.forEach(function(row, idx) {
$.ajax({
url: 'delete_library',
type: 'POST',
data: { section_id: row },
cache: false,
async: true,
success: function (data) {
var msg = "Library deleted";
showMsg(msg, false, true, 2000);
}
});
});
libraries_to_purge.forEach(function(row, idx) {
$.ajax({
url: 'delete_all_library_history',
type: 'POST',
data: { section_id: row },
data: { row_ids: libraries_to_purge.join(',') },
cache: false,
async: true,
success: function (data) {
var msg = "Library history purged";
showMsg(msg, false, true, 2000);
libraries_list_table.draw();
}
});
});
$.ajax({
url: 'delete_library',
type: 'POST',
data: { row_ids: libraries_to_delete.join(',') },
cache: false,
async: true,
success: function (data) {
var msg = "Library deleted";
showMsg(msg, false, true, 2000);
libraries_list_table.draw();
}
});
});
}
@@ -188,7 +185,7 @@
complete: function (xhr, status) {
var result = $.parseJSON(xhr.responseText);
var msg = result.message;
if (result.result == 'success') {
if (result.result === 'success') {
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 2000, false);
libraries_list_table.draw();
} else {

View File

@@ -62,9 +62,21 @@ DOCUMENTATION :: END
<div class="table-card-back">
<div class="user-info-wrapper">
% if data['library_thumb'].startswith('http'):
<div class="library-info-poster-face" style="background-image: url(${page('pms_image_proxy', data['library_thumb'], None, 80, 80)});"></div>
<div class="library-info-poster-face" style="background-image: url(${page('pms_image_proxy', data['library_thumb'], None, 80, 80)});">
% if not data['is_active']:
<span class="inactive-library-tooltip" data-toggle="tooltip" title="Library not on Plex server">
<i class="fa fa-2x fa-exclamation-triangle"></i>
</span>
% endif
</div>
% else:
<div class="library-info-poster-face svg-icon library-${data['section_type']}"></div>
<div class="library-info-poster-face svg-icon library-${data['section_type']}">
% if not data['is_active']:
<span class="inactive-library-tooltip" data-toggle="tooltip" title="Library not on Plex server">
<i class="fa fa-2x fa-exclamation-triangle"></i>
</span>
% endif
</div>
% endif
<div class="user-info-username">
<span class="set-username">${data['section_name']}</span>
@@ -411,6 +423,8 @@ DOCUMENTATION :: END
history_table.draw();
});
$(".inactive-library-tooltip").tooltip();
% if _session['user_group'] == 'admin':
function loadMediaInfoTable() {
// Build media info table
@@ -471,20 +485,18 @@ DOCUMENTATION :: END
$('#deleteCount').text(history_to_delete.length);
$('#confirm-modal-delete').modal();
$('#confirm-modal-delete').one('click', '#confirm-delete', function () {
history_to_delete.forEach(function(row, idx) {
$.ajax({
url: 'delete_history_rows',
type: 'POST',
data: { row_id: row },
data: { row_ids: history_to_delete.join(',') },
async: true,
success: function (data) {
var msg = "History deleted";
showMsg(msg, false, true, 2000);
history_table.draw();
}
});
});
history_table.draw();
});
}
$('.delete-control').each(function () {

View File

@@ -1,9 +1,9 @@
% if notifier:
<%!
<%
import json
from plexpy import notifiers, users
from plexpy.helpers import checked
available_notification_actions = notifiers.available_notification_actions()
available_notification_actions = notifiers.available_notification_actions(agent_id=notifier['agent_id'])
user_emails = [{'user': u['friendly_name'] or u['username'], 'email': u['email']} for u in users.Users().get_users() if u['email']]
sorted(user_emails, key=lambda u: u['user'])
@@ -25,7 +25,7 @@
<li role="presentation"><a href="#tabs-notify_text" aria-controls="tabs-notify_text" role="tab" data-toggle="tab">Arguments</a></li>
% elif notifier['agent_name'] == 'webhook':
<li role="presentation"><a href="#tabs-notify_text" aria-controls="tabs-notify_text" role="tab" data-toggle="tab">Data</a></li>
% else:
% elif notifier['agent_name'] != 'plexmobileapp':
<li role="presentation"><a href="#tabs-notify_text" aria-controls="tabs-notify_text" role="tab" data-toggle="tab">Text</a></li>
% endif
<li role="presentation"><a href="#tabs-test_notifications" aria-controls="tabs-test_notifications" role="tab" data-toggle="tab">Test Notifications</a></li>
@@ -684,6 +684,15 @@
pushoverPriority();
});
% elif notifier['agent_name'] == 'plexmobileapp':
var $plexmobileapp_user_ids = $('#plexmobileapp_user_ids').selectize({
plugins: ['remove_button'],
maxItems: null,
create: true
});
var plexmobileapp_user_ids = $plexmobileapp_user_ids[0].selectize;
plexmobileapp_user_ids.setValue(${json.dumps(next((c['value'] for c in notifier['config_options'] if c['name'] == 'plexmobileapp_user_ids'), [])) | n});
% endif
function validateLogic() {

View File

@@ -850,6 +850,28 @@
<span id="remoteAccessCheck" class="settings-warning"></span>
<p class="help-block">Enable to have Tautulli check if remote access to the Plex Media Server goes down.</p>
</div>
<div id="monitor_remote_access_options">
<div class="form-group advanced-setting">
<label for="remote_access_ping_interval">Remote Access Ping Interval</label>
<div class="row">
<div class="col-md-2">
<input type="text" class="form-control" data-parsley-type="integer" id="remote_access_ping_interval" name="remote_access_ping_interval" value="${config['remote_access_ping_interval']}" size="5" data-parsley-min="60" data-parsley-trigger="change" data-parsley-errors-container="#remote_access_ping_interval_error" required>
</div>
<div id="remote_access_ping_interval_error" class="alert alert-danger settings-alert" role="alert"></div>
</div>
<p class="help-block">The interval (in seconds) Tautulli will ping the Plex Media Server for the remote access status. Minimum 60.</p>
</div>
<div class="form-group advanced-setting">
<label for="remote_access_ping_threshold">Remote Access Ping Threshold</label>
<div class="row">
<div class="col-md-2">
<input type="text" class="form-control" data-parsley-type="integer" id="remote_access_ping_threshold" name="remote_access_ping_threshold" value="${config['remote_access_ping_threshold']}" size="5" data-parsley-min="1" data-parsley-trigger="change" data-parsley-errors-container="#remote_access_ping_threshold_error" required>
</div>
<div id="remote_access_ping_threshold_error" class="alert alert-danger settings-alert" role="alert"></div>
</div>
<p class="help-block">The number of consecutive remote access status failures to consider remote access as down. Minimum 1.</p>
</div>
</div>
<div class="form-group advanced-setting">
<label for="refresh_users_interval">Users List Refresh Interval</label>
@@ -941,7 +963,7 @@
</div>
<div id="buffer_wait_error" class="alert alert-danger settings-alert" role="alert"></div>
</div>
<p class="help-block">The value (in seconds) Tautulli should wait before triggering the next buffer warning. 0 to always trigger.</p>
<p class="help-block">The value (in seconds) Tautulli should wait before triggering the next buffer warning. Set to 0 to always trigger.</p>
</div>
<div class="checkbox advanced-setting">
<label>
@@ -965,6 +987,20 @@
</div>
<p class="help-block">The number of concurrent streams by a single user for Tautulli to trigger a notification. Minimum 2.</p>
</div>
<div class="form-group advanced-setting">
<label for="notify_concurrent_threshold">Continued Session Threshold</label>
<div class="row">
<div class="col-md-2">
<input type="text" class="form-control" data-parsley-type="integer" id="notify_continued_session_threshold" name="notify_continued_session_threshold" value="${config['notify_continued_session_threshold']}" data-parsley-min="0" data-parsley-trigger="change" data-parsley-errors-container="#notify_continued_session_threshold_error" required>
</div>
<div id="notify_continued_session_threshold_error" class="alert alert-danger settings-alert" role="alert"></div>
</div>
<p class="help-block">
The number of seconds between stopping and starting a new stream to be considered as a continued session. Set to 0 to consider all streams as new sessions.
<br>
Note: The threshold is only used by the "Initial Stream" notification parameter to determine if a stream is the first stream of a continuous streaming session.
</p>
</div>
<div class="padded-header">
<h3>Recently Added Notifications</h3>
@@ -1254,7 +1290,7 @@
<p class="help-block">Enable to lookup links to MusicBrainz for music when available.</p>
</div>
<div class="form-group">
<label for="maxmind_license_key">Delete Lookup Info</label>
<label for="delete_lookup_info">Delete Lookup Info</label>
<p class="help-block">Delete all cached metadata lookup info in Tautulli.</p>
<div class="row">
<div class="col-md-9">
@@ -1267,54 +1303,6 @@
</div>
</div>
<div class="padded-header">
<h3>Geolocation Database</h3>
</div>
<p class="help-block">The GeoLite2 database is used to geolocate IP addresses.</p>
<p class="help-block">
Please see the <a target='_blank' href='${anon_url('https://github.com/%s/%s-Wiki/wiki/3rd-Party-APIs-Guide' % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO))}'>3rd Party APIs Guide</a> for instructions on setting up MaxMind.<br>
</p>
<div class="form-group">
<label for="maxmind_license_key">MaxMind License Key</label>
<div class="row">
<div class="col-md-6">
<input type="text" class="form-control" id="maxmind_license_key" name="maxmind_license_key" value="${config['maxmind_license_key']}" data-parsley-trigger="change">
</div>
</div>
<p class="help-block">
Enter your MaxMind License Key to install the GeoLite2 database.
</p>
</div>
<div class="form-group">
<label for="geoip_db">GeoLite2 Database File</label> ${docker_msg | n}
<div class="row">
<div class="col-md-9">
<div class="input-group">
<input type="text" class="form-control" id="geoip_db" name="geoip_db" value="${config['geoip_db']}" ${docker_setting} data-parsley-trigger="change" data-parsley-pattern=".+\.mmdb$" data-parsley-errors-container="#geoip_db_error" data-parsley-error-message="Must end with '.mmdb'">
<span class="input-group-btn">
<button class="btn btn-form" type="button" id="install_geoip_db">${'Update' if config["geoip_db_installed"] else 'Install'}</button>
<button class="btn btn-form" type="button" id="uninstall_geoip_db" ${'disabled' if not config['geoip_db_installed'] else ''}>Uninstall</button>
</span>
</div>
</div>
<div id="geoip_db_error" class="alert alert-danger settings-alert" role="alert"></div>
</div>
<p class="help-block">
Leave blank to install in the default location. GeoLite2 database last updated <strong><span id="geoip_db_updated">never</span></strong>.
</p>
</div>
<div class="form-group advanced-setting">
<label for="geoip_db_update_days">GeoLite2 Database Update Interval</label>
<div class="row">
<div class="col-md-2">
<input type="text" class="form-control" data-parsley-type="integer" id="geoip_db_update_days" name="geoip_db_update_days" value="${config['geoip_db_update_days']}" size="5" data-parsley-range="[7, 30]" data-parsley-trigger="change" data-parsley-errors-container="#geoip_db_update_days_error" required>
</div>
<div id="geoip_db_update_days_error" class="alert alert-danger settings-alert" role="alert"></div>
</div>
<p class="help-block">The interval (in days) Tautulli will automatically update the GeoLite2 database. Minimum 7, maximum 30, default 30.</p>
</div>
<p><input type="button" class="btn btn-bright save-button" value="Save" data-success="Changes saved successfully"></p>
</div>
@@ -2077,6 +2065,7 @@ $(document).ready(function() {
initConfigCheckbox('#https_create_cert');
initConfigCheckbox('#check_github');
initConfigCheckbox('#monitor_pms_updates');
initConfigCheckbox('#monitor_remote_access');
initConfigCheckbox('#newsletter_self_hosted');
$('#menu_link_shutdown').click(function() {
@@ -2928,56 +2917,6 @@ $(document).ready(function() {
$('#resources-xml').on('tripleclick', function () {
openPlexXML('/api/resources', true, {includeHttps: 1});
});
if ("${kwargs.get('install_geoip')}" === 'true') {
gotoSetting('3rd_party_apis', 'geoip_db')
}
if ("${config['geoip_db_installed']}" > "0") {
$("#geoip_db_updated").text(moment("${config['geoip_db_installed']}", "X").fromNow());
}
$("#install_geoip_db").click(function () {
var maxmind_license_key = $("#maxmind_license_key");
maxmind_license_key.val($.trim(maxmind_license_key.val()));
if (maxmind_license_key.val() === "") {
maxmind_license_key.focus();
showMsg('<i class="fa fa-exclamation-circle"></i> Maxmind License Key is required.', false, true, 5000, true);
return false;
} else if (!(saveSettings())) {
return false;
}
var msg = 'Are you sure you want to install the GeoLite2 database?<br /><br />' +
'The database is used to lookup IP address geolocation info.<br />' +
'The database will be downloaded from <a href="${anon_url("https://dev.maxmind.com/geoip/geoip2/geolite2/")}" target="_blank">MaxMind</a>, <br />' +
'and requires <strong>100MB</strong> of free space to install.<br />';
var url = 'install_geoip_db';
if ($(this).text() === 'Update') {
url += '?update=true';
}
confirmAjaxCall(url, msg, null, 'Installing GeoLite2 database.', function (result) {
if (result.result === "success") {
$('#install_geoip_db').text('Update');
$('#uninstall_geoip_db').prop('disabled', false);
$('#geoip_db_updated').text(moment(result.updated, "X").fromNow());
}
getSchedulerTable();
});
});
$("#uninstall_geoip_db").click(function () {
var msg = 'Are you sure you want to uninstall the GeoLite2 database?<br /><br />' +
'You will not be able to lookup IP address geolocation info.';
var url = 'uninstall_geoip_db';
confirmAjaxCall(url, msg, null, 'Uninstalling GeoLite2 database.', function (result) {
if (result.result === "success") {
$('#install_geoip_db').text('Install');
$('#uninstall_geoip_db').prop('disabled', true);
$('#geoip_db_updated').text('never');
}
getSchedulerTable();
});
});
});
</script>
</%def>

View File

@@ -51,7 +51,13 @@ DOCUMENTATION :: END
<div class="col-md-12">
<div class="table-card-back">
<div class="user-info-wrapper">
<div class="user-info-poster-face" style="background-image: url(${data['user_thumb']});"></div>
<div class="user-info-poster-face" style="background-image: url(${data['user_thumb']});">
% if not data['is_active']:
<span class="inactive-user-tooltip" data-toggle="tooltip" title="User not on Plex server">
<i class="fa fa-2x fa-exclamation-triangle"></i>
</span>
% endif
</div>
<div class="user-info-username">
<span class="set-username">${data['friendly_name']}</span>
% if _session['user_group'] == 'admin':
@@ -540,6 +546,8 @@ DOCUMENTATION :: END
login_log_table.draw();
});
$(".inactive-user-tooltip").tooltip();
% if _session['user_group'] == 'admin':
$("#edit-user-tooltip").tooltip();
@@ -566,20 +574,18 @@ DOCUMENTATION :: END
$('#deleteType').text('history');
$('#confirm-modal-delete').modal();
$('#confirm-modal-delete').one('click', '#confirm-delete', function () {
history_to_delete.forEach(function(row, idx) {
$.ajax({
url: 'delete_history_rows',
type: 'POST',
data: { row_id: row },
data: { row_ids: history_to_delete.join(',') },
async: true,
success: function (data) {
var msg = "History deleted";
showMsg(msg, false, true, 2000);
history_table.draw();
}
});
});
history_table.draw();
});
}
$('.history_table .delete-control').each(function () {

View File

@@ -119,14 +119,14 @@
});
if (users_to_delete.length > 0) {
$('#users-to-delete').prepend('<p>Are you REALLY sure you want to delete and purge all history for the following users:</p>')
$('#users-to-delete').prepend('<p>Are you REALLY sure you want to delete and purge all history for the following users:</p>');
for (var i = 0; i < users_to_delete.length; i++) {
$('#users-to-delete').append('<li>' + $('div[data-id=' + users_to_delete[i] + '] > input').val() + '</li>');
}
}
if (users_to_purge.length > 0) {
$('#users-to-purge').prepend('<p>Are you REALLY sure you want to purge all history for the following users:</p>')
$('#users-to-purge').prepend('<p>Are you REALLY sure you want to purge all history for the following users:</p>');
for (var i = 0; i < users_to_purge.length; i++) {
$('#users-to-purge').append('<li>' + $('div[data-id=' + users_to_purge[i] + '] > input').val() + '</li>');
}
@@ -134,33 +134,30 @@
$('#confirm-modal-delete').modal();
$('#confirm-modal-delete').one('click', '#confirm-delete', function () {
users_to_delete.forEach(function(row, idx) {
$.ajax({
url: 'delete_user',
type: 'POST',
data: { user_id: row },
cache: false,
async: true,
success: function (data) {
var msg = "User deleted";
showMsg(msg, false, true, 2000);
}
});
});
users_to_purge.forEach(function(row, idx) {
$.ajax({
url: 'delete_all_user_history',
type: 'POST',
data: { user_id: row },
data: { row_ids: users_to_purge.join(',') },
cache: false,
async: true,
success: function (data) {
var msg = "User history purged";
showMsg(msg, false, true, 2000);
users_list_table.draw();
}
});
});
$.ajax({
url: 'delete_user',
type: 'POST',
data: { row_ids: users_to_delete.join(',') },
cache: false,
async: true,
success: function (data) {
var msg = "User deleted";
showMsg(msg, false, true, 2000);
users_list_table.draw();
}
});
});
}
@@ -192,7 +189,7 @@
complete: function (xhr, status) {
var result = $.parseJSON(xhr.responseText);
var msg = result.message;
if (result.result == 'success') {
if (result.result === 'success') {
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 2000, false);
users_list_table.draw();
} else {

View File

@@ -224,6 +224,8 @@
<input type="checkbox" name="check_github" id="check_github" value="1" checked>
<input type="checkbox" name="log_blacklist" id="log_blacklist" value="1" checked>
<input type="checkbox" name="cache_images" id="cache_images" value="1" checked>
<input type="checkbox" name="notify_group_recently_added_grandparent" id="notify_group_recently_added_grandparent" value="1" checked>
<input type="checkbox" name="notify_group_recently_added_parent" id="notify_group_recently_added_parent" value="1" checked>
<input type="checkbox" name="server_changed" id="server_changed" value="1" checked>
<input type="checkbox" name="first_run_complete" id="first_run_complete" value="1" checked>
<input type="text" name="home_stats_cards" id="home_stats_cards" value="first_run_wizard">

View File

@@ -1,7 +0,0 @@
# pylint:disable=C0111
__title__ = 'geoip2'
__version__ = '2.4.0'
__author__ = 'Gregory Oschwald'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright (c) 2013-2016 Maxmind, Inc.'

View File

@@ -1,17 +0,0 @@
"""Intended for internal use only."""
import sys
import ipaddress
# pylint: skip-file
if sys.version_info[0] == 2:
def compat_ip_address(address):
"""Intended for internal use only."""
if isinstance(address, bytes):
address = address.decode()
return ipaddress.ip_address(address)
else:
def compat_ip_address(address):
"""Intended for internal use only."""
return ipaddress.ip_address(address)

View File

@@ -1,199 +0,0 @@
"""
======================
GeoIP2 Database Reader
======================
"""
import inspect
import maxminddb
# pylint: disable=unused-import
from maxminddb import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE,
MODE_MEMORY)
import geoip2
import geoip2.models
import geoip2.errors
class Reader(object):
"""GeoIP2 database Reader object.
Instances of this class provide a reader for the GeoIP2 database format.
IP addresses can be looked up using the ``country`` and ``city`` methods.
The basic API for this class is the same for every database. First, you
create a reader object, specifying a file name. You then call the method
corresponding to the specific database, passing it the IP address you want
to look up.
If the request succeeds, the method call will return a model class for the
method you called. This model in turn contains multiple record classes,
each of which represents part of the data returned by the database. If the
database does not contain the requested information, the attributes on the
record class will have a ``None`` value.
If the address is not in the database, an
``geoip2.errors.AddressNotFoundError`` exception will be thrown. If the
database is corrupt or invalid, a ``maxminddb.InvalidDatabaseError`` will
be thrown.
"""
def __init__(self, filename, locales=None, mode=MODE_AUTO):
"""Create GeoIP2 Reader.
:param filename: The path to the GeoIP2 database.
:param locales: This is list of locale codes. This argument will be
passed on to record classes to use when their name properties are
called. The default value is ['en'].
The order of the locales is significant. When a record class has
multiple names (country, city, etc.), its name property will return
the name in the first locale that has one.
Note that the only locale which is always present in the GeoIP2
data is "en". If you do not include this locale, the name property
may end up returning None even when the record has an English name.
Currently, the valid locale codes are:
* de -- German
* en -- English names may still include accented characters if that
is the accepted spelling in English. In other words, English does
not mean ASCII.
* es -- Spanish
* fr -- French
* ja -- Japanese
* pt-BR -- Brazilian Portuguese
* ru -- Russian
* zh-CN -- Simplified Chinese.
:param mode: The mode to open the database with. Valid mode are:
* MODE_MMAP_EXT - use the C extension with memory map.
* MODE_MMAP - read from memory map. Pure Python.
* MODE_FILE - read database as standard file. Pure Python.
* MODE_MEMORY - load database into memory. Pure Python.
* MODE_AUTO - try MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that order.
Default.
"""
if locales is None:
locales = ['en']
self._db_reader = maxminddb.open_database(filename, mode)
self._locales = locales
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def country(self, ip_address):
"""Get the Country object for the IP address.
:param ip_address: IPv4 or IPv6 address as a string.
:returns: :py:class:`geoip2.models.Country` object
"""
return self._model_for(geoip2.models.Country, 'Country', ip_address)
def city(self, ip_address):
"""Get the City object for the IP address.
:param ip_address: IPv4 or IPv6 address as a string.
:returns: :py:class:`geoip2.models.City` object
"""
return self._model_for(geoip2.models.City, 'City', ip_address)
def anonymous_ip(self, ip_address):
"""Get the AnonymousIP object for the IP address.
:param ip_address: IPv4 or IPv6 address as a string.
:returns: :py:class:`geoip2.models.AnonymousIP` object
"""
return self._flat_model_for(geoip2.models.AnonymousIP,
'GeoIP2-Anonymous-IP', ip_address)
def connection_type(self, ip_address):
"""Get the ConnectionType object for the IP address.
:param ip_address: IPv4 or IPv6 address as a string.
:returns: :py:class:`geoip2.models.ConnectionType` object
"""
return self._flat_model_for(geoip2.models.ConnectionType,
'GeoIP2-Connection-Type', ip_address)
def domain(self, ip_address):
"""Get the Domain object for the IP address.
:param ip_address: IPv4 or IPv6 address as a string.
:returns: :py:class:`geoip2.models.Domain` object
"""
return self._flat_model_for(geoip2.models.Domain, 'GeoIP2-Domain',
ip_address)
def enterprise(self, ip_address):
"""Get the Enterprise object for the IP address.
:param ip_address: IPv4 or IPv6 address as a string.
:returns: :py:class:`geoip2.models.Enterprise` object
"""
return self._model_for(geoip2.models.Enterprise, 'Enterprise',
ip_address)
def isp(self, ip_address):
"""Get the ISP object for the IP address.
:param ip_address: IPv4 or IPv6 address as a string.
:returns: :py:class:`geoip2.models.ISP` object
"""
return self._flat_model_for(geoip2.models.ISP, 'GeoIP2-ISP',
ip_address)
def _get(self, database_type, ip_address):
if database_type not in self.metadata().database_type:
caller = inspect.stack()[2][3]
raise TypeError("The %s method cannot be used with the "
"%s database" %
(caller, self.metadata().database_type))
record = self._db_reader.get(ip_address)
if record is None:
raise geoip2.errors.AddressNotFoundError(
"The address %s is not in the database." % ip_address)
return record
def _model_for(self, model_class, types, ip_address):
record = self._get(types, ip_address)
record.setdefault('traits', {})['ip_address'] = ip_address
return model_class(record, locales=self._locales)
def _flat_model_for(self, model_class, types, ip_address):
record = self._get(types, ip_address)
record['ip_address'] = ip_address
return model_class(record)
def metadata(self):
"""The metadata for the open database.
:returns: :py:class:`maxminddb.reader.Metadata` object
"""
return self._db_reader.metadata()
def close(self):
"""Closes the GeoIP2 database."""
self._db_reader.close()

View File

@@ -1,51 +0,0 @@
"""
Errors
======
"""
class GeoIP2Error(RuntimeError):
"""There was a generic error in GeoIP2.
This class represents a generic error. It extends :py:exc:`RuntimeError`
and does not add any additional attributes.
"""
class AddressNotFoundError(GeoIP2Error):
"""The address you were looking up was not found."""
class AuthenticationError(GeoIP2Error):
"""There was a problem authenticating the request."""
class HTTPError(GeoIP2Error):
"""There was an error when making your HTTP request.
This class represents an HTTP transport error. It extends
:py:exc:`GeoIP2Error` and adds attributes of its own.
:ivar http_status: The HTTP status code returned
:ivar uri: The URI queried
"""
def __init__(self, message, http_status=None, uri=None):
super(HTTPError, self).__init__(message)
self.http_status = http_status
self.uri = uri
class InvalidRequestError(GeoIP2Error):
"""The request was invalid."""
class OutOfQueriesError(GeoIP2Error):
"""Your account is out of funds for the service queried."""
class PermissionRequiredError(GeoIP2Error):
"""Your account does not have permission to access this service."""

View File

@@ -1,16 +0,0 @@
"""This package contains utility mixins"""
# pylint: disable=too-few-public-methods
from abc import ABCMeta
class SimpleEquality(object):
"""Naive __dict__ equality mixin"""
__metaclass__ = ABCMeta
def __eq__(self, other):
return (isinstance(other, self.__class__) and
self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)

View File

@@ -1,472 +0,0 @@
"""
Models
======
These classes provide models for the data returned by the GeoIP2
web service and databases.
The only difference between the City and Insights model classes is which
fields in each record may be populated. See
http://dev.maxmind.com/geoip/geoip2/web-services for more details.
"""
# pylint: disable=too-many-instance-attributes,too-few-public-methods
from abc import ABCMeta
import geoip2.records
from geoip2.mixins import SimpleEquality
class Country(SimpleEquality):
"""Model for the GeoIP2 Precision: Country and the GeoIP2 Country database.
This class provides the following attributes:
.. attribute:: continent
Continent object for the requested IP address.
:type: :py:class:`geoip2.records.Continent`
.. attribute:: country
Country object for the requested IP address. This record represents the
country where MaxMind believes the IP is located.
:type: :py:class:`geoip2.records.Country`
.. attribute:: maxmind
Information related to your MaxMind account.
:type: :py:class:`geoip2.records.MaxMind`
.. attribute:: registered_country
The registered country object for the requested IP address. This record
represents the country where the ISP has registered a given IP block in
and may differ from the user's country.
:type: :py:class:`geoip2.records.Country`
.. attribute:: represented_country
Object for the country represented by the users of the IP address
when that country is different than the country in ``country``. For
instance, the country represented by an overseas military base.
:type: :py:class:`geoip2.records.RepresentedCountry`
.. attribute:: traits
Object with the traits of the requested IP address.
:type: :py:class:`geoip2.records.Traits`
"""
def __init__(self, raw_response, locales=None):
if locales is None:
locales = ['en']
self._locales = locales
self.continent = \
geoip2.records.Continent(locales,
**raw_response.get('continent', {}))
self.country = \
geoip2.records.Country(locales,
**raw_response.get('country', {}))
self.registered_country = \
geoip2.records.Country(locales,
**raw_response.get('registered_country',
{}))
self.represented_country \
= geoip2.records.RepresentedCountry(locales,
**raw_response.get(
'represented_country', {}))
self.maxmind = \
geoip2.records.MaxMind(**raw_response.get('maxmind', {}))
self.traits = geoip2.records.Traits(**raw_response.get('traits', {}))
self.raw = raw_response
def __repr__(self):
return '{module}.{class_name}({data}, {locales})'.format(
module=self.__module__,
class_name=self.__class__.__name__,
data=self.raw,
locales=self._locales)
class City(Country):
"""Model for the GeoIP2 Precision: City and the GeoIP2 City database.
.. attribute:: city
City object for the requested IP address.
:type: :py:class:`geoip2.records.City`
.. attribute:: continent
Continent object for the requested IP address.
:type: :py:class:`geoip2.records.Continent`
.. attribute:: country
Country object for the requested IP address. This record represents the
country where MaxMind believes the IP is located.
:type: :py:class:`geoip2.records.Country`
.. attribute:: location
Location object for the requested IP address.
.. attribute:: maxmind
Information related to your MaxMind account.
:type: :py:class:`geoip2.records.MaxMind`
.. attribute:: registered_country
The registered country object for the requested IP address. This record
represents the country where the ISP has registered a given IP block in
and may differ from the user's country.
:type: :py:class:`geoip2.records.Country`
.. attribute:: represented_country
Object for the country represented by the users of the IP address
when that country is different than the country in ``country``. For
instance, the country represented by an overseas military base.
:type: :py:class:`geoip2.records.RepresentedCountry`
.. attribute:: subdivisions
Object (tuple) representing the subdivisions of the country to which
the location of the requested IP address belongs.
:type: :py:class:`geoip2.records.Subdivisions`
.. attribute:: traits
Object with the traits of the requested IP address.
:type: :py:class:`geoip2.records.Traits`
"""
def __init__(self, raw_response, locales=None):
super(City, self).__init__(raw_response, locales)
self.city = \
geoip2.records.City(locales, **raw_response.get('city', {}))
self.location = \
geoip2.records.Location(**raw_response.get('location', {}))
self.postal = \
geoip2.records.Postal(**raw_response.get('postal', {}))
self.subdivisions = \
geoip2.records.Subdivisions(locales,
*raw_response.get('subdivisions', []))
class Insights(City):
"""Model for the GeoIP2 Precision: Insights web service endpoint.
.. attribute:: city
City object for the requested IP address.
:type: :py:class:`geoip2.records.City`
.. attribute:: continent
Continent object for the requested IP address.
:type: :py:class:`geoip2.records.Continent`
.. attribute:: country
Country object for the requested IP address. This record represents the
country where MaxMind believes the IP is located.
:type: :py:class:`geoip2.records.Country`
.. attribute:: location
Location object for the requested IP address.
.. attribute:: maxmind
Information related to your MaxMind account.
:type: :py:class:`geoip2.records.MaxMind`
.. attribute:: registered_country
The registered country object for the requested IP address. This record
represents the country where the ISP has registered a given IP block in
and may differ from the user's country.
:type: :py:class:`geoip2.records.Country`
.. attribute:: represented_country
Object for the country represented by the users of the IP address
when that country is different than the country in ``country``. For
instance, the country represented by an overseas military base.
:type: :py:class:`geoip2.records.RepresentedCountry`
.. attribute:: subdivisions
Object (tuple) representing the subdivisions of the country to which
the location of the requested IP address belongs.
:type: :py:class:`geoip2.records.Subdivisions`
.. attribute:: traits
Object with the traits of the requested IP address.
:type: :py:class:`geoip2.records.Traits`
"""
class Enterprise(City):
"""Model for the GeoIP2 Enterprise database.
.. attribute:: city
City object for the requested IP address.
:type: :py:class:`geoip2.records.City`
.. attribute:: continent
Continent object for the requested IP address.
:type: :py:class:`geoip2.records.Continent`
.. attribute:: country
Country object for the requested IP address. This record represents the
country where MaxMind believes the IP is located.
:type: :py:class:`geoip2.records.Country`
.. attribute:: location
Location object for the requested IP address.
.. attribute:: maxmind
Information related to your MaxMind account.
:type: :py:class:`geoip2.records.MaxMind`
.. attribute:: registered_country
The registered country object for the requested IP address. This record
represents the country where the ISP has registered a given IP block in
and may differ from the user's country.
:type: :py:class:`geoip2.records.Country`
.. attribute:: represented_country
Object for the country represented by the users of the IP address
when that country is different than the country in ``country``. For
instance, the country represented by an overseas military base.
:type: :py:class:`geoip2.records.RepresentedCountry`
.. attribute:: subdivisions
Object (tuple) representing the subdivisions of the country to which
the location of the requested IP address belongs.
:type: :py:class:`geoip2.records.Subdivisions`
.. attribute:: traits
Object with the traits of the requested IP address.
:type: :py:class:`geoip2.records.Traits`
"""
class SimpleModel(SimpleEquality):
"""Provides basic methods for non-location models"""
__metaclass__ = ABCMeta
def __repr__(self):
# pylint: disable=no-member
return '{module}.{class_name}({data})'.format(
module=self.__module__,
class_name=self.__class__.__name__,
data=str(self.raw))
class AnonymousIP(SimpleModel):
"""Model class for the GeoIP2 Anonymous IP.
This class provides the following attribute:
.. attribute:: is_anonymous
This is true if the IP address belongs to any sort of anonymous network.
:type: bool
.. attribute:: is_anonymous_vpn
This is true if the IP address belongs to an anonymous VPN system.
:type: bool
.. attribute:: is_hosting_provider
This is true if the IP address belongs to a hosting provider.
:type: bool
.. attribute:: is_public_proxy
This is true if the IP address belongs to a public proxy.
:type: bool
.. attribute:: is_tor_exit_node
This is true if the IP address is a Tor exit node.
:type: bool
.. attribute:: ip_address
The IP address used in the lookup.
:type: unicode
"""
def __init__(self, raw):
self.is_anonymous = raw.get('is_anonymous', False)
self.is_anonymous_vpn = raw.get('is_anonymous_vpn', False)
self.is_hosting_provider = raw.get('is_hosting_provider', False)
self.is_public_proxy = raw.get('is_public_proxy', False)
self.is_tor_exit_node = raw.get('is_tor_exit_node', False)
self.ip_address = raw.get('ip_address')
self.raw = raw
class ConnectionType(SimpleModel):
"""Model class for the GeoIP2 Connection-Type.
This class provides the following attribute:
.. attribute:: connection_type
The connection type may take the following values:
- Dialup
- Cable/DSL
- Corporate
- Cellular
Additional values may be added in the future.
:type: unicode
.. attribute:: ip_address
The IP address used in the lookup.
:type: unicode
"""
def __init__(self, raw):
self.connection_type = raw.get('connection_type')
self.ip_address = raw.get('ip_address')
self.raw = raw
class Domain(SimpleModel):
"""Model class for the GeoIP2 Domain.
This class provides the following attribute:
.. attribute:: domain
The domain associated with the IP address.
:type: unicode
.. attribute:: ip_address
The IP address used in the lookup.
:type: unicode
"""
def __init__(self, raw):
self.domain = raw.get('domain')
self.ip_address = raw.get('ip_address')
self.raw = raw
class ISP(SimpleModel):
"""Model class for the GeoIP2 ISP.
This class provides the following attribute:
.. attribute:: autonomous_system_number
The autonomous system number associated with the IP address.
:type: int
.. attribute:: autonomous_system_organization
The organization associated with the registered autonomous system number
for the IP address.
:type: unicode
.. attribute:: isp
The name of the ISP associated with the IP address.
:type: unicode
.. attribute:: organization
The name of the organization associated with the IP address.
:type: unicode
.. attribute:: ip_address
The IP address used in the lookup.
:type: unicode
"""
# pylint:disable=too-many-arguments
def __init__(self, raw):
self.autonomous_system_number = raw.get('autonomous_system_number')
self.autonomous_system_organization = raw.get(
'autonomous_system_organization')
self.isp = raw.get('isp')
self.organization = raw.get('organization')
self.ip_address = raw.get('ip_address')
self.raw = raw

View File

@@ -1,605 +0,0 @@
"""
Records
=======
"""
# pylint:disable=R0903
from abc import ABCMeta
from geoip2.mixins import SimpleEquality
class Record(SimpleEquality):
"""All records are subclasses of the abstract class ``Record``."""
__metaclass__ = ABCMeta
_valid_attributes = set()
def __init__(self, **kwargs):
valid_args = dict((k, kwargs.get(k)) for k in self._valid_attributes)
self.__dict__.update(valid_args)
def __setattr__(self, name, value):
raise AttributeError("can't set attribute")
def __repr__(self):
args = ', '.join('%s=%r' % x for x in self.__dict__.items())
return '{module}.{class_name}({data})'.format(
module=self.__module__,
class_name=self.__class__.__name__,
data=args)
class PlaceRecord(Record):
"""All records with :py:attr:`names` subclass :py:class:`PlaceRecord`."""
__metaclass__ = ABCMeta
def __init__(self, locales=None, **kwargs):
if locales is None:
locales = ['en']
if kwargs.get('names') is None:
kwargs['names'] = {}
object.__setattr__(self, '_locales', locales)
super(PlaceRecord, self).__init__(**kwargs)
@property
def name(self):
"""Dict with locale codes as keys and localized name as value."""
# pylint:disable=E1101
return next(
(self.names.get(x) for x in self._locales
if x in self.names), None)
class City(PlaceRecord):
"""Contains data for the city record associated with an IP address.
This class contains the city-level data associated with an IP address.
This record is returned by ``city``, ``enterprise``, and ``insights``.
Attributes:
.. attribute:: confidence
A value from 0-100 indicating MaxMind's
confidence that the city is correct. This attribute is only available
from the Insights end point and the GeoIP2 Enterprise database.
:type: int
.. attribute:: geoname_id
The GeoName ID for the city.
:type: int
.. attribute:: name
The name of the city based on the locales list passed to the
constructor.
:type: unicode
.. attribute:: names
A dictionary where the keys are locale codes
and the values are names.
:type: dict
"""
_valid_attributes = set(['confidence', 'geoname_id', 'names'])
class Continent(PlaceRecord):
"""Contains data for the continent record associated with an IP address.
This class contains the continent-level data associated with an IP
address.
Attributes:
.. attribute:: code
A two character continent code like "NA" (North America)
or "OC" (Oceania).
:type: unicode
.. attribute:: geoname_id
The GeoName ID for the continent.
:type: int
.. attribute:: name
Returns the name of the continent based on the locales list passed to
the constructor.
:type: unicode
.. attribute:: names
A dictionary where the keys are locale codes
and the values are names.
:type: dict
"""
_valid_attributes = set(['code', 'geoname_id', 'names'])
class Country(PlaceRecord):
"""Contains data for the country record associated with an IP address.
This class contains the country-level data associated with an IP address.
Attributes:
.. attribute:: confidence
A value from 0-100 indicating MaxMind's confidence that
the country is correct. This attribute is only available from the
Insights end point and the GeoIP2 Enterprise database.
:type: int
.. attribute:: geoname_id
The GeoName ID for the country.
:type: int
.. attribute:: iso_code
The two-character `ISO 3166-1
<http://en.wikipedia.org/wiki/ISO_3166-1>`_ alpha code for the
country.
:type: unicode
.. attribute:: name
The name of the country based on the locales list passed to the
constructor.
:type: unicode
.. attribute:: names
A dictionary where the keys are locale codes and the values
are names.
:type: dict
"""
_valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names'])
class RepresentedCountry(Country):
"""Contains data for the represented country associated with an IP address.
This class contains the country-level data associated with an IP address
for the IP's represented country. The represented country is the country
represented by something like a military base.
Attributes:
.. attribute:: confidence
A value from 0-100 indicating MaxMind's confidence that
the country is correct. This attribute is only available from the
Insights end point and the GeoIP2 Enterprise database.
:type: int
.. attribute:: geoname_id
The GeoName ID for the country.
:type: int
.. attribute:: iso_code
The two-character `ISO 3166-1
<http://en.wikipedia.org/wiki/ISO_3166-1>`_ alpha code for the country.
:type: unicode
.. attribute:: name
The name of the country based on the locales list passed to the
constructor.
:type: unicode
.. attribute:: names
A dictionary where the keys are locale codes and the values
are names.
:type: dict
.. attribute:: type
A string indicating the type of entity that is representing the
country. Currently we only return ``military`` but this could expand to
include other types in the future.
:type: unicode
"""
_valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names',
'type'])
class Location(Record):
"""Contains data for the location record associated with an IP address.
This class contains the location data associated with an IP address.
This record is returned by ``city``, ``enterprise``, and ``insights``.
Attributes:
.. attribute:: average_income
The average income in US dollars associated with the requested IP
address. This attribute is only available from the Insights end point.
:type: int
.. attribute:: accuracy_radius
The radius in kilometers around the specified location where the IP
address is likely to be.
:type: int
.. attribute:: latitude
The approximate latitude of the location associated with the IP
address. This value is not precise and should not be used to identify a
particular address or household.
:type: float
.. attribute:: longitude
The approximate longitude of the location associated with the IP
address. This value is not precise and should not be used to identify a
particular address or household.
:type: float
.. attribute:: metro_code
The metro code of the location if the
location is in the US. MaxMind returns the same metro codes as the
`Google AdWords API
<https://developers.google.com/adwords/api/docs/appendix/cities-DMAregions>`_.
:type: int
.. attribute:: population_density
The estimated population per square kilometer associated with the IP
address. This attribute is only available from the Insights end point.
:type: int
.. attribute:: time_zone
The time zone associated with location, as specified by the `IANA Time
Zone Database <http://www.iana.org/time-zones>`_, e.g.,
"America/New_York".
:type: unicode
"""
_valid_attributes = set(['average_income', 'accuracy_radius', 'latitude',
'longitude', 'metro_code', 'population_density',
'postal_code', 'postal_confidence', 'time_zone'])
class MaxMind(Record):
"""Contains data related to your MaxMind account.
Attributes:
.. attribute:: queries_remaining
The number of remaining queries you have
for the end point you are calling.
:type: int
"""
_valid_attributes = set(['queries_remaining'])
class Postal(Record):
"""Contains data for the postal record associated with an IP address.
This class contains the postal data associated with an IP address.
This attribute is returned by ``city``, ``enterprise``, and ``insights``.
Attributes:
.. attribute:: code
The postal code of the location. Postal
codes are not available for all countries. In some countries, this will
only contain part of the postal code.
:type: unicode
.. attribute:: confidence
A value from 0-100 indicating
MaxMind's confidence that the postal code is correct. This attribute is
only available from the Insights end point and the GeoIP2 Enterprise
database.
:type: int
"""
_valid_attributes = set(['code', 'confidence'])
class Subdivision(PlaceRecord):
"""Contains data for the subdivisions associated with an IP address.
This class contains the subdivision data associated with an IP address.
This attribute is returned by ``city``, ``enterprise``, and ``insights``.
Attributes:
.. attribute:: confidence
This is a value from 0-100 indicating MaxMind's
confidence that the subdivision is correct. This attribute is only
available from the Insights end point and the GeoIP2 Enterprise
database.
:type: int
.. attribute:: geoname_id
This is a GeoName ID for the subdivision.
:type: int
.. attribute:: iso_code
This is a string up to three characters long
contain the subdivision portion of the `ISO 3166-2 code
<http://en.wikipedia.org/wiki/ISO_3166-2>`_.
:type: unicode
.. attribute:: name
The name of the subdivision based on the locales list passed to the
constructor.
:type: unicode
.. attribute:: names
A dictionary where the keys are locale codes and the
values are names
:type: dict
"""
_valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names'])
class Subdivisions(tuple):
"""A tuple-like collection of subdivisions associated with an IP address.
This class contains the subdivisions of the country associated with the
IP address from largest to smallest.
For instance, the response for Oxford in the United Kingdom would have
England as the first element and Oxfordshire as the second element.
This attribute is returned by ``city``, ``enterprise``, and ``insights``.
"""
def __new__(cls, locales, *subdivisions):
subdivisions = [Subdivision(locales, **x) for x in subdivisions]
obj = super(cls, Subdivisions).__new__(cls, subdivisions)
return obj
def __init__(self, locales, *subdivisions): # pylint:disable=W0613
self._locales = locales
super(Subdivisions, self).__init__()
@property
def most_specific(self):
"""The most specific (smallest) subdivision available.
If there are no :py:class:`Subdivision` objects for the response,
this returns an empty :py:class:`Subdivision`.
:type: :py:class:`Subdivision`
"""
try:
return self[-1]
except IndexError:
return Subdivision(self._locales)
class Traits(Record):
"""Contains data for the traits record associated with an IP address.
This class contains the traits data associated with an IP address.
This class has the following attributes:
.. attribute:: autonomous_system_number
The `autonomous system
number <http://en.wikipedia.org/wiki/Autonomous_system_(Internet)>`_
associated with the IP address. This attribute is only available from
the City and Insights web service end points and the GeoIP2 Enterprise
database.
:type: int
.. attribute:: autonomous_system_organization
The organization associated with the registered `autonomous system
number <http://en.wikipedia.org/wiki/Autonomous_system_(Internet)>`_ for
the IP address. This attribute is only available from the City and
Insights web service end points and the GeoIP2 Enterprise database.
:type: unicode
.. attribute:: connection_type
The connection type may take the following values:
- Dialup
- Cable/DSL
- Corporate
- Cellular
Additional values may be added in the future.
This attribute is only available in the GeoIP2 Enterprise database.
:type: unicode
.. attribute:: domain
The second level domain associated with the
IP address. This will be something like "example.com" or
"example.co.uk", not "foo.example.com". This attribute is only available
from the City and Insights web service end points and the GeoIP2
Enterprise database.
:type: unicode
.. attribute:: ip_address
The IP address that the data in the model
is for. If you performed a "me" lookup against the web service, this
will be the externally routable IP address for the system the code is
running on. If the system is behind a NAT, this may differ from the IP
address locally assigned to it.
:type: unicode
.. attribute:: is_anonymous_proxy
This is true if the IP is an anonymous
proxy. See http://dev.maxmind.com/faq/geoip#anonproxy for further
details.
:type: bool
.. deprecated:: 2.2.0
Use our our `GeoIP2 Anonymous IP database
<https://www.maxmind.com/en/geoip2-anonymous-ip-database GeoIP2>`_
instead.
.. attribute:: is_legitimate_proxy
This attribute is true if MaxMind believes this IP address to be a
legitimate proxy, such as an internal VPN used by a corporation. This
attribute is only available in the GeoIP2 Enterprise database.
:type: bool
.. attribute:: is_satellite_provider
This is true if the IP address is from a satellite provider that
provides service to multiple countries.
:type: bool
.. deprecated:: 2.2.0
Due to the increased coverage by mobile carriers, very few
satellite providers now serve multiple countries. As a result, the
output does not provide sufficiently relevant data for us to maintain
it.
.. attribute:: isp
The name of the ISP associated with the IP address. This attribute is
only available from the City and Insights web service end points and the
GeoIP2 Enterprise database.
:type: unicode
.. attribute:: organization
The name of the organization associated with the IP address. This
attribute is only available from the City and Insights web service end
points and the GeoIP2 Enterprise database.
:type: unicode
.. attribute:: user_type
The user type associated with the IP
address. This can be one of the following values:
* business
* cafe
* cellular
* college
* content_delivery_network
* dialup
* government
* hosting
* library
* military
* residential
* router
* school
* search_engine_spider
* traveler
This attribute is only available from the Insights end point and the
GeoIP2 Enterprise database.
:type: unicode
"""
_valid_attributes = set(
['autonomous_system_number', 'autonomous_system_organization',
'connection_type', 'domain', 'is_anonymous_proxy',
'is_legitimate_proxy', 'is_satellite_provider', 'isp', 'ip_address',
'organization', 'user_type'])
def __init__(self, **kwargs):
for k in ['is_anonymous_proxy', 'is_legitimate_proxy',
'is_satellite_provider']:
kwargs[k] = bool(kwargs.get(k, False))
super(Traits, self).__init__(**kwargs)

View File

@@ -1,219 +0,0 @@
"""
============================
WebServices Client API
============================
This class provides a client API for all the GeoIP2 Precision web service end
points. The end points are Country, City, and Insights. Each end point returns
a different set of data about an IP address, with Country returning the least
data and Insights the most.
Each web service end point is represented by a different model class, and
these model classes in turn contain multiple record classes. The record
classes have attributes which contain data about the IP address.
If the web service does not return a particular piece of data for an IP
address, the associated attribute is not populated.
The web service may not return any information for an entire record, in which
case all of the attributes for that record class will be empty.
SSL
---
Requests to the GeoIP2 Precision web service are always made with SSL.
"""
import requests
from requests.utils import default_user_agent
import geoip2
import geoip2.models
from .compat import compat_ip_address
from .errors import (AddressNotFoundError, AuthenticationError, GeoIP2Error,
HTTPError, InvalidRequestError, OutOfQueriesError,
PermissionRequiredError)
class Client(object):
"""Creates a new client object.
It accepts the following required arguments:
:param user_id: Your MaxMind User ID.
:param license_key: Your MaxMind license key.
Go to https://www.maxmind.com/en/my_license_key to see your MaxMind
User ID and license key.
The following keyword arguments are also accepted:
:param host: The hostname to make a request against. This defaults to
"geoip.maxmind.com". In most cases, you should not need to set this
explicitly.
:param locales: This is list of locale codes. This argument will be
passed on to record classes to use when their name properties are
called. The default value is ['en'].
The order of the locales is significant. When a record class has
multiple names (country, city, etc.), its name property will return
the name in the first locale that has one.
Note that the only locale which is always present in the GeoIP2
data is "en". If you do not include this locale, the name property
may end up returning None even when the record has an English name.
Currently, the valid locale codes are:
* de -- German
* en -- English names may still include accented characters if that is
the accepted spelling in English. In other words, English does not
mean ASCII.
* es -- Spanish
* fr -- French
* ja -- Japanese
* pt-BR -- Brazilian Portuguese
* ru -- Russian
* zh-CN -- Simplified Chinese.
"""
def __init__(self,
user_id,
license_key,
host='geoip.maxmind.com',
locales=None,
timeout=None):
"""Construct a Client."""
# pylint: disable=too-many-arguments
if locales is None:
locales = ['en']
self._locales = locales
self._user_id = user_id
self._license_key = license_key
self._base_uri = 'https://%s/geoip/v2.1' % host
self._timeout = timeout
def city(self, ip_address='me'):
"""Call GeoIP2 Precision City endpoint with the specified IP.
:param ip_address: IPv4 or IPv6 address as a string. If no
address is provided, the address that the web service is
called from will be used.
:returns: :py:class:`geoip2.models.City` object
"""
return self._response_for('city', geoip2.models.City, ip_address)
def country(self, ip_address='me'):
"""Call the GeoIP2 Country endpoint with the specified IP.
:param ip_address: IPv4 or IPv6 address as a string. If no address
is provided, the address that the web service is called from will
be used.
:returns: :py:class:`geoip2.models.Country` object
"""
return self._response_for('country', geoip2.models.Country, ip_address)
def insights(self, ip_address='me'):
"""Call the GeoIP2 Precision: Insights endpoint with the specified IP.
:param ip_address: IPv4 or IPv6 address as a string. If no address
is provided, the address that the web service is called from will
be used.
:returns: :py:class:`geoip2.models.Insights` object
"""
return self._response_for('insights', geoip2.models.Insights,
ip_address)
def _response_for(self, path, model_class, ip_address):
if ip_address != 'me':
ip_address = str(compat_ip_address(ip_address))
uri = '/'.join([self._base_uri, path, ip_address])
response = requests.get(uri,
auth=(self._user_id, self._license_key),
headers={'Accept': 'application/json',
'User-Agent': self._user_agent()},
timeout=self._timeout)
if response.status_code == 200:
body = self._handle_success(response, uri)
return model_class(body, locales=self._locales)
else:
self._handle_error(response, uri)
def _user_agent(self):
return 'GeoIP2 Python Client v%s (%s)' % (geoip2.__version__,
default_user_agent())
def _handle_success(self, response, uri):
try:
return response.json()
except ValueError as ex:
raise GeoIP2Error('Received a 200 response for %(uri)s'
' but could not decode the response as '
'JSON: ' % locals() + ', '.join(ex.args), 200,
uri)
def _handle_error(self, response, uri):
status = response.status_code
if 400 <= status < 500:
self._handle_4xx_status(response, status, uri)
elif 500 <= status < 600:
self._handle_5xx_status(status, uri)
else:
self._handle_non_200_status(status, uri)
def _handle_4xx_status(self, response, status, uri):
if not response.content:
raise HTTPError('Received a %(status)i error for %(uri)s '
'with no body.' % locals(), status, uri)
elif response.headers['Content-Type'].find('json') == -1:
raise HTTPError('Received a %i for %s with the following '
'body: %s' % (status, uri, response.content),
status, uri)
try:
body = response.json()
except ValueError as ex:
raise HTTPError(
'Received a %(status)i error for %(uri)s but it did'
' not include the expected JSON body: ' % locals() +
', '.join(ex.args), status, uri)
else:
if 'code' in body and 'error' in body:
self._handle_web_service_error(
body.get('error'), body.get('code'), status, uri)
else:
raise HTTPError(
'Response contains JSON but it does not specify '
'code or error keys', status, uri)
def _handle_web_service_error(self, message, code, status, uri):
if code in ('IP_ADDRESS_NOT_FOUND', 'IP_ADDRESS_RESERVED'):
raise AddressNotFoundError(message)
elif code in ('AUTHORIZATION_INVALID', 'LICENSE_KEY_REQUIRED',
'USER_ID_REQUIRED', 'USER_ID_UNKNOWN'):
raise AuthenticationError(message)
elif code in ('INSUFFICIENT_FUNDS', 'OUT_OF_QUERIES'):
raise OutOfQueriesError(message)
elif code == 'PERMISSION_REQUIRED':
raise PermissionRequiredError(message)
raise InvalidRequestError(message, code, status, uri)
def _handle_5xx_status(self, status, uri):
raise HTTPError('Received a server error (%(status)i) for '
'%(uri)s' % locals(), status, uri)
def _handle_non_200_status(self, status, uri):
raise HTTPError('Received a very surprising HTTP status '
'(%(status)i) for %(uri)s' % locals(), status, uri)

View File

@@ -1,46 +0,0 @@
# pylint:disable=C0111
import os
import maxminddb.reader
try:
import maxminddb.extension
except ImportError:
maxminddb.extension = None
from maxminddb.const import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE,
MODE_MEMORY)
from maxminddb.decoder import InvalidDatabaseError
def open_database(database, mode=MODE_AUTO):
"""Open a Maxmind DB database
Arguments:
database -- A path to a valid MaxMind DB file such as a GeoIP2
database file.
mode -- mode to open the database with. Valid mode are:
* MODE_MMAP_EXT - use the C extension with memory map.
* MODE_MMAP - read from memory map. Pure Python.
* MODE_FILE - read database as standard file. Pure Python.
* MODE_MEMORY - load database into memory. Pure Python.
* MODE_AUTO - tries MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that
order. Default mode.
"""
if (mode == MODE_AUTO and maxminddb.extension and
hasattr(maxminddb.extension, 'Reader')) or mode == MODE_MMAP_EXT:
return maxminddb.extension.Reader(database)
elif mode in (MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY):
return maxminddb.reader.Reader(database, mode)
raise ValueError('Unsupported open mode: {0}'.format(mode))
def Reader(database): # pylint: disable=invalid-name
"""This exists for backwards compatibility. Use open_database instead"""
return open_database(database)
__title__ = 'maxminddb'
__version__ = '1.2.1'
__author__ = 'Gregory Oschwald'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2014 Maxmind, Inc.'

View File

@@ -1,33 +0,0 @@
import sys
import ipaddress
# pylint: skip-file
if sys.version_info[0] == 2:
def compat_ip_address(address):
if isinstance(address, bytes):
address = address.decode()
return ipaddress.ip_address(address)
int_from_byte = ord
FileNotFoundError = IOError
def int_from_bytes(b):
if b:
return int(b.encode("hex"), 16)
return 0
byte_from_int = chr
else:
def compat_ip_address(address):
return ipaddress.ip_address(address)
int_from_byte = lambda x: x
FileNotFoundError = FileNotFoundError
int_from_bytes = lambda x: int.from_bytes(x, 'big')
byte_from_int = lambda x: bytes([x])

View File

@@ -1,7 +0,0 @@
"""Constants used in the API"""
MODE_AUTO = 0
MODE_MMAP_EXT = 1
MODE_MMAP = 2
MODE_FILE = 4
MODE_MEMORY = 8

View File

@@ -1,173 +0,0 @@
"""
maxminddb.decoder
~~~~~~~~~~~~~~~~~
This package contains code for decoding the MaxMind DB data section.
"""
from __future__ import unicode_literals
import struct
from maxminddb.compat import byte_from_int, int_from_bytes
from maxminddb.errors import InvalidDatabaseError
class Decoder(object): # pylint: disable=too-few-public-methods
"""Decoder for the data section of the MaxMind DB"""
def __init__(self, database_buffer, pointer_base=0, pointer_test=False):
"""Created a Decoder for a MaxMind DB
Arguments:
database_buffer -- an mmap'd MaxMind DB file.
pointer_base -- the base number to use when decoding a pointer
pointer_test -- used for internal unit testing of pointer code
"""
self._pointer_test = pointer_test
self._buffer = database_buffer
self._pointer_base = pointer_base
def _decode_array(self, size, offset):
array = []
for _ in range(size):
(value, offset) = self.decode(offset)
array.append(value)
return array, offset
def _decode_boolean(self, size, offset):
return size != 0, offset
def _decode_bytes(self, size, offset):
new_offset = offset + size
return self._buffer[offset:new_offset], new_offset
# pylint: disable=no-self-argument
# |-> I am open to better ways of doing this as long as it doesn't involve
# lots of code duplication.
def _decode_packed_type(type_code, type_size, pad=False):
# pylint: disable=protected-access, missing-docstring
def unpack_type(self, size, offset):
if not pad:
self._verify_size(size, type_size)
new_offset = offset + type_size
packed_bytes = self._buffer[offset:new_offset]
if pad:
packed_bytes = packed_bytes.rjust(type_size, b'\x00')
(value,) = struct.unpack(type_code, packed_bytes)
return value, new_offset
return unpack_type
def _decode_map(self, size, offset):
container = {}
for _ in range(size):
(key, offset) = self.decode(offset)
(value, offset) = self.decode(offset)
container[key] = value
return container, offset
_pointer_value_offset = {
1: 0,
2: 2048,
3: 526336,
4: 0,
}
def _decode_pointer(self, size, offset):
pointer_size = ((size >> 3) & 0x3) + 1
new_offset = offset + pointer_size
pointer_bytes = self._buffer[offset:new_offset]
packed = pointer_bytes if pointer_size == 4 else struct.pack(
b'!c', byte_from_int(size & 0x7)) + pointer_bytes
unpacked = int_from_bytes(packed)
pointer = unpacked + self._pointer_base + \
self._pointer_value_offset[pointer_size]
if self._pointer_test:
return pointer, new_offset
(value, _) = self.decode(pointer)
return value, new_offset
def _decode_uint(self, size, offset):
new_offset = offset + size
uint_bytes = self._buffer[offset:new_offset]
return int_from_bytes(uint_bytes), new_offset
def _decode_utf8_string(self, size, offset):
new_offset = offset + size
return self._buffer[offset:new_offset].decode('utf-8'), new_offset
_type_decoder = {
1: _decode_pointer,
2: _decode_utf8_string,
3: _decode_packed_type(b'!d', 8), # double,
4: _decode_bytes,
5: _decode_uint, # uint16
6: _decode_uint, # uint32
7: _decode_map,
8: _decode_packed_type(b'!i', 4, pad=True), # int32
9: _decode_uint, # uint64
10: _decode_uint, # uint128
11: _decode_array,
14: _decode_boolean,
15: _decode_packed_type(b'!f', 4), # float,
}
def decode(self, offset):
"""Decode a section of the data section starting at offset
Arguments:
offset -- the location of the data structure to decode
"""
new_offset = offset + 1
(ctrl_byte,) = struct.unpack(b'!B', self._buffer[offset:new_offset])
type_num = ctrl_byte >> 5
# Extended type
if not type_num:
(type_num, new_offset) = self._read_extended(new_offset)
if type_num not in self._type_decoder:
raise InvalidDatabaseError('Unexpected type number ({type}) '
'encountered'.format(type=type_num))
(size, new_offset) = self._size_from_ctrl_byte(
ctrl_byte, new_offset, type_num)
return self._type_decoder[type_num](self, size, new_offset)
def _read_extended(self, offset):
(next_byte,) = struct.unpack(b'!B', self._buffer[offset:offset + 1])
type_num = next_byte + 7
if type_num < 7:
raise InvalidDatabaseError(
'Something went horribly wrong in the decoder. An '
'extended type resolved to a type number < 8 '
'({type})'.format(type=type_num))
return type_num, offset + 1
def _verify_size(self, expected, actual):
if expected != actual:
raise InvalidDatabaseError(
'The MaxMind DB file\'s data section contains bad data '
'(unknown data type or corrupt data)'
)
def _size_from_ctrl_byte(self, ctrl_byte, offset, type_num):
size = ctrl_byte & 0x1f
if type_num == 1:
return size, offset
bytes_to_read = 0 if size < 29 else size - 28
new_offset = offset + bytes_to_read
size_bytes = self._buffer[offset:new_offset]
# Using unpack rather than int_from_bytes as it is about 200 lookups
# per second faster here.
if size == 29:
size = 29 + struct.unpack(b'!B', size_bytes)[0]
elif size == 30:
size = 285 + struct.unpack(b'!H', size_bytes)[0]
elif size > 30:
size = struct.unpack(
b'!I', size_bytes.rjust(4, b'\x00'))[0] + 65821
return size, new_offset

View File

@@ -1,11 +0,0 @@
"""
maxminddb.errors
~~~~~~~~~~~~~~~~
This module contains custom errors for the MaxMind DB reader
"""
class InvalidDatabaseError(RuntimeError):
"""This error is thrown when unexpected data is found in the database."""

View File

@@ -1,570 +0,0 @@
#include <Python.h>
#include <maxminddb.h>
#include "structmember.h"
#define __STDC_FORMAT_MACROS
#include <inttypes.h>
static PyTypeObject Reader_Type;
static PyTypeObject Metadata_Type;
static PyObject *MaxMindDB_error;
typedef struct {
PyObject_HEAD /* no semicolon */
MMDB_s *mmdb;
} Reader_obj;
typedef struct {
PyObject_HEAD /* no semicolon */
PyObject *binary_format_major_version;
PyObject *binary_format_minor_version;
PyObject *build_epoch;
PyObject *database_type;
PyObject *description;
PyObject *ip_version;
PyObject *languages;
PyObject *node_count;
PyObject *record_size;
} Metadata_obj;
static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list);
static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list);
static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list);
static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list);
#if PY_MAJOR_VERSION >= 3
#define MOD_INIT(name) PyMODINIT_FUNC PyInit_ ## name(void)
#define RETURN_MOD_INIT(m) return (m)
#define FILE_NOT_FOUND_ERROR PyExc_FileNotFoundError
#else
#define MOD_INIT(name) PyMODINIT_FUNC init ## name(void)
#define RETURN_MOD_INIT(m) return
#define PyInt_FromLong PyLong_FromLong
#define FILE_NOT_FOUND_ERROR PyExc_IOError
#endif
#ifdef __GNUC__
# define UNUSED(x) UNUSED_ ## x __attribute__((__unused__))
#else
# define UNUSED(x) UNUSED_ ## x
#endif
static int Reader_init(PyObject *self, PyObject *args, PyObject *kwds)
{
char *filename;
int mode = 0;
static char *kwlist[] = {"database", "mode", NULL};
if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|i", kwlist, &filename, &mode)) {
return -1;
}
if (mode != 0 && mode != 1) {
PyErr_Format(PyExc_ValueError, "Unsupported open mode (%i). Only "
"MODE_AUTO and MODE_MMAP_EXT are supported by this extension.",
mode);
return -1;
}
if (0 != access(filename, R_OK)) {
PyErr_Format(FILE_NOT_FOUND_ERROR,
"No such file or directory: '%s'",
filename);
return -1;
}
MMDB_s *mmdb = (MMDB_s *)malloc(sizeof(MMDB_s));
if (NULL == mmdb) {
PyErr_NoMemory();
return -1;
}
Reader_obj *mmdb_obj = (Reader_obj *)self;
if (!mmdb_obj) {
free(mmdb);
PyErr_NoMemory();
return -1;
}
uint16_t status = MMDB_open(filename, MMDB_MODE_MMAP, mmdb);
if (MMDB_SUCCESS != status) {
free(mmdb);
PyErr_Format(
MaxMindDB_error,
"Error opening database file (%s). Is this a valid MaxMind DB file?",
filename
);
return -1;
}
mmdb_obj->mmdb = mmdb;
return 0;
}
static PyObject *Reader_get(PyObject *self, PyObject *args)
{
char *ip_address = NULL;
Reader_obj *mmdb_obj = (Reader_obj *)self;
if (!PyArg_ParseTuple(args, "s", &ip_address)) {
return NULL;
}
MMDB_s *mmdb = mmdb_obj->mmdb;
if (NULL == mmdb) {
PyErr_SetString(PyExc_ValueError,
"Attempt to read from a closed MaxMind DB.");
return NULL;
}
int gai_error = 0;
int mmdb_error = MMDB_SUCCESS;
MMDB_lookup_result_s result =
MMDB_lookup_string(mmdb, ip_address, &gai_error,
&mmdb_error);
if (0 != gai_error) {
PyErr_Format(PyExc_ValueError,
"'%s' does not appear to be an IPv4 or IPv6 address.",
ip_address);
return NULL;
}
if (MMDB_SUCCESS != mmdb_error) {
PyObject *exception;
if (MMDB_IPV6_LOOKUP_IN_IPV4_DATABASE_ERROR == mmdb_error) {
exception = PyExc_ValueError;
} else {
exception = MaxMindDB_error;
}
PyErr_Format(exception, "Error looking up %s. %s",
ip_address, MMDB_strerror(mmdb_error));
return NULL;
}
if (!result.found_entry) {
Py_RETURN_NONE;
}
MMDB_entry_data_list_s *entry_data_list = NULL;
int status = MMDB_get_entry_data_list(&result.entry, &entry_data_list);
if (MMDB_SUCCESS != status) {
PyErr_Format(MaxMindDB_error,
"Error while looking up data for %s. %s",
ip_address, MMDB_strerror(status));
MMDB_free_entry_data_list(entry_data_list);
return NULL;
}
MMDB_entry_data_list_s *original_entry_data_list = entry_data_list;
PyObject *py_obj = from_entry_data_list(&entry_data_list);
MMDB_free_entry_data_list(original_entry_data_list);
return py_obj;
}
static PyObject *Reader_metadata(PyObject *self, PyObject *UNUSED(args))
{
Reader_obj *mmdb_obj = (Reader_obj *)self;
if (NULL == mmdb_obj->mmdb) {
PyErr_SetString(PyExc_IOError,
"Attempt to read from a closed MaxMind DB.");
return NULL;
}
MMDB_entry_data_list_s *entry_data_list;
MMDB_get_metadata_as_entry_data_list(mmdb_obj->mmdb, &entry_data_list);
MMDB_entry_data_list_s *original_entry_data_list = entry_data_list;
PyObject *metadata_dict = from_entry_data_list(&entry_data_list);
MMDB_free_entry_data_list(original_entry_data_list);
if (NULL == metadata_dict || !PyDict_Check(metadata_dict)) {
PyErr_SetString(MaxMindDB_error,
"Error decoding metadata.");
return NULL;
}
PyObject *args = PyTuple_New(0);
if (NULL == args) {
Py_DECREF(metadata_dict);
return NULL;
}
PyObject *metadata = PyObject_Call((PyObject *)&Metadata_Type, args,
metadata_dict);
Py_DECREF(metadata_dict);
return metadata;
}
static PyObject *Reader_close(PyObject *self, PyObject *UNUSED(args))
{
Reader_obj *mmdb_obj = (Reader_obj *)self;
if (NULL != mmdb_obj->mmdb) {
MMDB_close(mmdb_obj->mmdb);
free(mmdb_obj->mmdb);
mmdb_obj->mmdb = NULL;
}
Py_RETURN_NONE;
}
static void Reader_dealloc(PyObject *self)
{
Reader_obj *obj = (Reader_obj *)self;
if (NULL != obj->mmdb) {
Reader_close(self, NULL);
}
PyObject_Del(self);
}
static int Metadata_init(PyObject *self, PyObject *args, PyObject *kwds)
{
PyObject
*binary_format_major_version,
*binary_format_minor_version,
*build_epoch,
*database_type,
*description,
*ip_version,
*languages,
*node_count,
*record_size;
static char *kwlist[] = {
"binary_format_major_version",
"binary_format_minor_version",
"build_epoch",
"database_type",
"description",
"ip_version",
"languages",
"node_count",
"record_size",
NULL
};
if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOOOOOOOO", kwlist,
&binary_format_major_version,
&binary_format_minor_version,
&build_epoch,
&database_type,
&description,
&ip_version,
&languages,
&node_count,
&record_size)) {
return -1;
}
Metadata_obj *obj = (Metadata_obj *)self;
obj->binary_format_major_version = binary_format_major_version;
obj->binary_format_minor_version = binary_format_minor_version;
obj->build_epoch = build_epoch;
obj->database_type = database_type;
obj->description = description;
obj->ip_version = ip_version;
obj->languages = languages;
obj->node_count = node_count;
obj->record_size = record_size;
Py_INCREF(obj->binary_format_major_version);
Py_INCREF(obj->binary_format_minor_version);
Py_INCREF(obj->build_epoch);
Py_INCREF(obj->database_type);
Py_INCREF(obj->description);
Py_INCREF(obj->ip_version);
Py_INCREF(obj->languages);
Py_INCREF(obj->node_count);
Py_INCREF(obj->record_size);
return 0;
}
static void Metadata_dealloc(PyObject *self)
{
Metadata_obj *obj = (Metadata_obj *)self;
Py_DECREF(obj->binary_format_major_version);
Py_DECREF(obj->binary_format_minor_version);
Py_DECREF(obj->build_epoch);
Py_DECREF(obj->database_type);
Py_DECREF(obj->description);
Py_DECREF(obj->ip_version);
Py_DECREF(obj->languages);
Py_DECREF(obj->node_count);
Py_DECREF(obj->record_size);
PyObject_Del(self);
}
static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list)
{
if (NULL == entry_data_list || NULL == *entry_data_list) {
PyErr_SetString(
MaxMindDB_error,
"Error while looking up data. Your database may be corrupt or you have found a bug in libmaxminddb."
);
return NULL;
}
switch ((*entry_data_list)->entry_data.type) {
case MMDB_DATA_TYPE_MAP:
return from_map(entry_data_list);
case MMDB_DATA_TYPE_ARRAY:
return from_array(entry_data_list);
case MMDB_DATA_TYPE_UTF8_STRING:
return PyUnicode_FromStringAndSize(
(*entry_data_list)->entry_data.utf8_string,
(*entry_data_list)->entry_data.data_size
);
case MMDB_DATA_TYPE_BYTES:
return PyByteArray_FromStringAndSize(
(const char *)(*entry_data_list)->entry_data.bytes,
(Py_ssize_t)(*entry_data_list)->entry_data.data_size);
case MMDB_DATA_TYPE_DOUBLE:
return PyFloat_FromDouble((*entry_data_list)->entry_data.double_value);
case MMDB_DATA_TYPE_FLOAT:
return PyFloat_FromDouble((*entry_data_list)->entry_data.float_value);
case MMDB_DATA_TYPE_UINT16:
return PyLong_FromLong( (*entry_data_list)->entry_data.uint16);
case MMDB_DATA_TYPE_UINT32:
return PyLong_FromLong((*entry_data_list)->entry_data.uint32);
case MMDB_DATA_TYPE_BOOLEAN:
return PyBool_FromLong((*entry_data_list)->entry_data.boolean);
case MMDB_DATA_TYPE_UINT64:
return PyLong_FromUnsignedLongLong(
(*entry_data_list)->entry_data.uint64);
case MMDB_DATA_TYPE_UINT128:
return from_uint128(*entry_data_list);
case MMDB_DATA_TYPE_INT32:
return PyLong_FromLong((*entry_data_list)->entry_data.int32);
default:
PyErr_Format(MaxMindDB_error,
"Invalid data type arguments: %d",
(*entry_data_list)->entry_data.type);
return NULL;
}
return NULL;
}
static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list)
{
PyObject *py_obj = PyDict_New();
if (NULL == py_obj) {
PyErr_NoMemory();
return NULL;
}
const uint32_t map_size = (*entry_data_list)->entry_data.data_size;
uint i;
// entry_data_list cannot start out NULL (see from_entry_data_list). We
// check it in the loop because it may become NULL.
// coverity[check_after_deref]
for (i = 0; i < map_size && entry_data_list; i++) {
*entry_data_list = (*entry_data_list)->next;
PyObject *key = PyUnicode_FromStringAndSize(
(char *)(*entry_data_list)->entry_data.utf8_string,
(*entry_data_list)->entry_data.data_size
);
*entry_data_list = (*entry_data_list)->next;
PyObject *value = from_entry_data_list(entry_data_list);
if (NULL == value) {
Py_DECREF(key);
Py_DECREF(py_obj);
return NULL;
}
PyDict_SetItem(py_obj, key, value);
Py_DECREF(value);
Py_DECREF(key);
}
return py_obj;
}
static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list)
{
const uint32_t size = (*entry_data_list)->entry_data.data_size;
PyObject *py_obj = PyList_New(size);
if (NULL == py_obj) {
PyErr_NoMemory();
return NULL;
}
uint i;
// entry_data_list cannot start out NULL (see from_entry_data_list). We
// check it in the loop because it may become NULL.
// coverity[check_after_deref]
for (i = 0; i < size && entry_data_list; i++) {
*entry_data_list = (*entry_data_list)->next;
PyObject *value = from_entry_data_list(entry_data_list);
if (NULL == value) {
Py_DECREF(py_obj);
return NULL;
}
// PyList_SetItem 'steals' the reference
PyList_SetItem(py_obj, i, value);
}
return py_obj;
}
static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list)
{
uint64_t high = 0;
uint64_t low = 0;
#if MMDB_UINT128_IS_BYTE_ARRAY
int i;
for (i = 0; i < 8; i++) {
high = (high << 8) | entry_data_list->entry_data.uint128[i];
}
for (i = 8; i < 16; i++) {
low = (low << 8) | entry_data_list->entry_data.uint128[i];
}
#else
high = entry_data_list->entry_data.uint128 >> 64;
low = (uint64_t)entry_data_list->entry_data.uint128;
#endif
char *num_str = malloc(33);
if (NULL == num_str) {
PyErr_NoMemory();
return NULL;
}
snprintf(num_str, 33, "%016" PRIX64 "%016" PRIX64, high, low);
PyObject *py_obj = PyLong_FromString(num_str, NULL, 16);
free(num_str);
return py_obj;
}
static PyMethodDef Reader_methods[] = {
{ "get", Reader_get, METH_VARARGS,
"Get record for IP address" },
{ "metadata", Reader_metadata, METH_NOARGS,
"Returns metadata object for database" },
{ "close", Reader_close, METH_NOARGS, "Closes database"},
{ NULL, NULL, 0, NULL }
};
static PyTypeObject Reader_Type = {
PyVarObject_HEAD_INIT(NULL, 0)
.tp_basicsize = sizeof(Reader_obj),
.tp_dealloc = Reader_dealloc,
.tp_doc = "Reader object",
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_methods = Reader_methods,
.tp_name = "Reader",
.tp_init = Reader_init,
};
static PyMethodDef Metadata_methods[] = {
{ NULL, NULL, 0, NULL }
};
/* *INDENT-OFF* */
static PyMemberDef Metadata_members[] = {
{ "binary_format_major_version", T_OBJECT, offsetof(
Metadata_obj, binary_format_major_version), READONLY, NULL },
{ "binary_format_minor_version", T_OBJECT, offsetof(
Metadata_obj, binary_format_minor_version), READONLY, NULL },
{ "build_epoch", T_OBJECT, offsetof(Metadata_obj, build_epoch),
READONLY, NULL },
{ "database_type", T_OBJECT, offsetof(Metadata_obj, database_type),
READONLY, NULL },
{ "description", T_OBJECT, offsetof(Metadata_obj, description),
READONLY, NULL },
{ "ip_version", T_OBJECT, offsetof(Metadata_obj, ip_version),
READONLY, NULL },
{ "languages", T_OBJECT, offsetof(Metadata_obj, languages), READONLY,
NULL },
{ "node_count", T_OBJECT, offsetof(Metadata_obj, node_count),
READONLY, NULL },
{ "record_size", T_OBJECT, offsetof(Metadata_obj, record_size),
READONLY, NULL },
{ NULL, 0, 0, 0, NULL }
};
/* *INDENT-ON* */
static PyTypeObject Metadata_Type = {
PyVarObject_HEAD_INIT(NULL, 0)
.tp_basicsize = sizeof(Metadata_obj),
.tp_dealloc = Metadata_dealloc,
.tp_doc = "Metadata object",
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_members = Metadata_members,
.tp_methods = Metadata_methods,
.tp_name = "Metadata",
.tp_init = Metadata_init
};
static PyMethodDef MaxMindDB_methods[] = {
{ NULL, NULL, 0, NULL }
};
#if PY_MAJOR_VERSION >= 3
static struct PyModuleDef MaxMindDB_module = {
PyModuleDef_HEAD_INIT,
.m_name = "extension",
.m_doc = "This is a C extension to read MaxMind DB file format",
.m_methods = MaxMindDB_methods,
};
#endif
MOD_INIT(extension){
PyObject *m;
#if PY_MAJOR_VERSION >= 3
m = PyModule_Create(&MaxMindDB_module);
#else
m = Py_InitModule("extension", MaxMindDB_methods);
#endif
if (!m) {
RETURN_MOD_INIT(NULL);
}
Reader_Type.tp_new = PyType_GenericNew;
if (PyType_Ready(&Reader_Type)) {
RETURN_MOD_INIT(NULL);
}
Py_INCREF(&Reader_Type);
PyModule_AddObject(m, "Reader", (PyObject *)&Reader_Type);
Metadata_Type.tp_new = PyType_GenericNew;
if (PyType_Ready(&Metadata_Type)) {
RETURN_MOD_INIT(NULL);
}
PyModule_AddObject(m, "extension", (PyObject *)&Metadata_Type);
PyObject* error_mod = PyImport_ImportModule("maxminddb.errors");
if (error_mod == NULL) {
RETURN_MOD_INIT(NULL);
}
MaxMindDB_error = PyObject_GetAttrString(error_mod, "InvalidDatabaseError");
Py_DECREF(error_mod);
if (MaxMindDB_error == NULL) {
RETURN_MOD_INIT(NULL);
}
Py_INCREF(MaxMindDB_error);
/* We primarily add it to the module for backwards compatibility */
PyModule_AddObject(m, "InvalidDatabaseError", MaxMindDB_error);
RETURN_MOD_INIT(m);
}

View File

@@ -1,66 +0,0 @@
"""For internal use only. It provides a slice-like file reader."""
import os
try:
# pylint: disable=no-name-in-module
from multiprocessing import Lock
except ImportError:
from threading import Lock
class FileBuffer(object):
"""A slice-able file reader"""
def __init__(self, database):
self._handle = open(database, 'rb')
self._size = os.fstat(self._handle.fileno()).st_size
if not hasattr(os, 'pread'):
self._lock = Lock()
def __getitem__(self, key):
if isinstance(key, slice):
return self._read(key.stop - key.start, key.start)
elif isinstance(key, int):
return self._read(1, key)
else:
raise TypeError("Invalid argument type.")
def rfind(self, needle, start):
"""Reverse find needle from start"""
pos = self._read(self._size - start - 1, start).rfind(needle)
if pos == -1:
return pos
return start + pos
def size(self):
"""Size of file"""
return self._size
def close(self):
"""Close file"""
self._handle.close()
if hasattr(os, 'pread'):
def _read(self, buffersize, offset):
"""read that uses pread"""
# pylint: disable=no-member
return os.pread(self._handle.fileno(), buffersize, offset)
else:
def _read(self, buffersize, offset):
"""read with a lock
This lock is necessary as after a fork, the different processes
will share the same file table entry, even if we dup the fd, and
as such the same offsets. There does not appear to be a way to
duplicate the file table entry and we cannot re-open based on the
original path as that file may have replaced with another or
unlinked.
"""
with self._lock:
self._handle.seek(offset)
return self._handle.read(buffersize)

View File

@@ -1,223 +0,0 @@
"""
maxminddb.reader
~~~~~~~~~~~~~~~~
This module contains the pure Python database reader and related classes.
"""
from __future__ import unicode_literals
try:
import mmap
except ImportError:
# pylint: disable=invalid-name
mmap = None
import struct
from maxminddb.compat import byte_from_int, int_from_byte, compat_ip_address
from maxminddb.const import MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY
from maxminddb.decoder import Decoder
from maxminddb.errors import InvalidDatabaseError
from maxminddb.file import FileBuffer
class Reader(object):
"""
Instances of this class provide a reader for the MaxMind DB format. IP
addresses can be looked up using the ``get`` method.
"""
_DATA_SECTION_SEPARATOR_SIZE = 16
_METADATA_START_MARKER = b"\xAB\xCD\xEFMaxMind.com"
_ipv4_start = None
def __init__(self, database, mode=MODE_AUTO):
"""Reader for the MaxMind DB file format
Arguments:
database -- A path to a valid MaxMind DB file such as a GeoIP2
database file.
mode -- mode to open the database with. Valid mode are:
* MODE_MMAP - read from memory map.
* MODE_FILE - read database as standard file.
* MODE_MEMORY - load database into memory.
* MODE_AUTO - tries MODE_MMAP and then MODE_FILE. Default.
"""
# pylint: disable=redefined-variable-type
if (mode == MODE_AUTO and mmap) or mode == MODE_MMAP:
with open(database, 'rb') as db_file:
self._buffer = mmap.mmap(
db_file.fileno(), 0, access=mmap.ACCESS_READ)
self._buffer_size = self._buffer.size()
elif mode in (MODE_AUTO, MODE_FILE):
self._buffer = FileBuffer(database)
self._buffer_size = self._buffer.size()
elif mode == MODE_MEMORY:
with open(database, 'rb') as db_file:
self._buffer = db_file.read()
self._buffer_size = len(self._buffer)
else:
raise ValueError('Unsupported open mode ({0}). Only MODE_AUTO, '
' MODE_FILE, and MODE_MEMORY are support by the pure Python '
'Reader'.format(mode))
metadata_start = self._buffer.rfind(self._METADATA_START_MARKER,
max(0, self._buffer_size
- 128 * 1024))
if metadata_start == -1:
self.close()
raise InvalidDatabaseError('Error opening database file ({0}). '
'Is this a valid MaxMind DB file?'
''.format(database))
metadata_start += len(self._METADATA_START_MARKER)
metadata_decoder = Decoder(self._buffer, metadata_start)
(metadata, _) = metadata_decoder.decode(metadata_start)
self._metadata = Metadata(
**metadata) # pylint: disable=bad-option-value
self._decoder = Decoder(self._buffer, self._metadata.search_tree_size
+ self._DATA_SECTION_SEPARATOR_SIZE)
def metadata(self):
"""Return the metadata associated with the MaxMind DB file"""
return self._metadata
def get(self, ip_address):
"""Return the record for the ip_address in the MaxMind DB
Arguments:
ip_address -- an IP address in the standard string notation
"""
address = compat_ip_address(ip_address)
if address.version == 6 and self._metadata.ip_version == 4:
raise ValueError('Error looking up {0}. You attempted to look up '
'an IPv6 address in an IPv4-only database.'.format(
ip_address))
pointer = self._find_address_in_tree(address)
return self._resolve_data_pointer(pointer) if pointer else None
def _find_address_in_tree(self, ip_address):
packed = ip_address.packed
bit_count = len(packed) * 8
node = self._start_node(bit_count)
for i in range(bit_count):
if node >= self._metadata.node_count:
break
bit = 1 & (int_from_byte(packed[i >> 3]) >> 7 - (i % 8))
node = self._read_node(node, bit)
if node == self._metadata.node_count:
# Record is empty
return 0
elif node > self._metadata.node_count:
return node
raise InvalidDatabaseError('Invalid node in search tree')
def _start_node(self, length):
if self._metadata.ip_version != 6 or length == 128:
return 0
# We are looking up an IPv4 address in an IPv6 tree. Skip over the
# first 96 nodes.
if self._ipv4_start:
return self._ipv4_start
node = 0
for _ in range(96):
if node >= self._metadata.node_count:
break
node = self._read_node(node, 0)
self._ipv4_start = node
return node
def _read_node(self, node_number, index):
base_offset = node_number * self._metadata.node_byte_size
record_size = self._metadata.record_size
if record_size == 24:
offset = base_offset + index * 3
node_bytes = b'\x00' + self._buffer[offset:offset + 3]
elif record_size == 28:
(middle,) = struct.unpack(
b'!B', self._buffer[base_offset + 3:base_offset + 4])
if index:
middle &= 0x0F
else:
middle = (0xF0 & middle) >> 4
offset = base_offset + index * 4
node_bytes = byte_from_int(
middle) + self._buffer[offset:offset + 3]
elif record_size == 32:
offset = base_offset + index * 4
node_bytes = self._buffer[offset:offset + 4]
else:
raise InvalidDatabaseError(
'Unknown record size: {0}'.format(record_size))
return struct.unpack(b'!I', node_bytes)[0]
def _resolve_data_pointer(self, pointer):
resolved = pointer - self._metadata.node_count + \
self._metadata.search_tree_size
if resolved > self._buffer_size:
raise InvalidDatabaseError(
"The MaxMind DB file's search tree is corrupt")
(data, _) = self._decoder.decode(resolved)
return data
def close(self):
"""Closes the MaxMind DB file and returns the resources to the system"""
# pylint: disable=unidiomatic-typecheck
if type(self._buffer) not in (str, bytes):
self._buffer.close()
class Metadata(object):
"""Metadata for the MaxMind DB reader"""
# pylint: disable=too-many-instance-attributes
def __init__(self, **kwargs):
"""Creates new Metadata object. kwargs are key/value pairs from spec"""
# Although I could just update __dict__, that is less obvious and it
# doesn't work well with static analysis tools and some IDEs
self.node_count = kwargs['node_count']
self.record_size = kwargs['record_size']
self.ip_version = kwargs['ip_version']
self.database_type = kwargs['database_type']
self.languages = kwargs['languages']
self.binary_format_major_version = kwargs[
'binary_format_major_version']
self.binary_format_minor_version = kwargs[
'binary_format_minor_version']
self.build_epoch = kwargs['build_epoch']
self.description = kwargs['description']
@property
def node_byte_size(self):
"""The size of a node in bytes"""
return self.record_size // 4
@property
def search_tree_size(self):
"""The size of the search tree"""
return self.node_count * self.node_byte_size
def __repr__(self):
args = ', '.join('%s=%r' % x for x in self.__dict__.items())
return '{module}.{class_name}({data})'.format(
module=self.__module__,
class_name=self.__class__.__name__,
data=args)

View File

@@ -34,6 +34,8 @@ from apscheduler.triggers.interval import IntervalTrigger
from UniversalAnalytics import Tracker
import pytz
PYTHON_VERSION = sys.version_info[:3]
import activity_handler
import activity_pinger
import common
@@ -440,7 +442,7 @@ def initialize_scheduler():
pms_update_check_hours = CONFIG.PMS_UPDATE_CHECK_INTERVAL if 1 <= CONFIG.PMS_UPDATE_CHECK_INTERVAL else 24
schedule_job(versioncheck.check_update, 'Check GitHub for updates',
hours=0, minutes=github_minutes, seconds=0, args=(bool(CONFIG.PLEXPY_AUTO_UPDATE), True))
hours=0, minutes=github_minutes, seconds=0, args=(True, True))
backup_hours = CONFIG.BACKUP_INTERVAL if 1 <= CONFIG.BACKUP_INTERVAL <= 24 else 6
@@ -448,15 +450,15 @@ def initialize_scheduler():
hours=backup_hours, minutes=0, seconds=0, args=(True, True))
schedule_job(config.make_backup, 'Backup Tautulli config',
hours=backup_hours, minutes=0, seconds=0, args=(True, True))
schedule_job(helpers.update_geoip_db, 'Update GeoLite2 database',
hours=12 * bool(CONFIG.GEOIP_DB_INSTALLED), minutes=0, seconds=0)
if WS_CONNECTED and CONFIG.PMS_IP and CONFIG.PMS_TOKEN:
schedule_job(plextv.get_server_resources, 'Refresh Plex server URLs',
hours=12 * (not bool(CONFIG.PMS_URL_MANUAL)), minutes=0, seconds=0)
pms_remote_access_seconds = CONFIG.REMOTE_ACCESS_PING_INTERVAL if 60 <= CONFIG.REMOTE_ACCESS_PING_INTERVAL else 60
schedule_job(activity_pinger.check_server_access, 'Check for Plex remote access',
hours=0, minutes=0, seconds=60 * bool(CONFIG.MONITOR_REMOTE_ACCESS))
hours=0, minutes=0, seconds=pms_remote_access_seconds * bool(CONFIG.MONITOR_REMOTE_ACCESS))
schedule_job(activity_pinger.check_server_updates, 'Check for Plex updates',
hours=pms_update_check_hours * bool(CONFIG.MONITOR_PMS_UPDATES), minutes=0, seconds=0)
@@ -580,8 +582,8 @@ def dbcheck():
'CREATE TABLE IF NOT EXISTS sessions (id INTEGER PRIMARY KEY AUTOINCREMENT, session_key INTEGER, session_id TEXT, '
'transcode_key TEXT, rating_key INTEGER, section_id INTEGER, media_type TEXT, started INTEGER, stopped INTEGER, '
'paused_counter INTEGER DEFAULT 0, state TEXT, user_id INTEGER, user TEXT, friendly_name TEXT, '
'ip_address TEXT, machine_id TEXT, player TEXT, product TEXT, platform TEXT, title TEXT, parent_title TEXT, '
'grandparent_title TEXT, original_title TEXT, full_title TEXT, '
'ip_address TEXT, machine_id TEXT, bandwidth INTEGER, location TEXT, player TEXT, product TEXT, platform TEXT, '
'title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, '
'media_index INTEGER, parent_media_index INTEGER, '
'thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, year INTEGER, '
'parent_rating_key INTEGER, grandparent_rating_key INTEGER, '
@@ -608,7 +610,13 @@ def dbcheck():
'live INTEGER, live_uuid TEXT, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, '
'secure INTEGER, relayed INTEGER, '
'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, watched INTEGER DEFAULT 0, '
'write_attempts INTEGER DEFAULT 0, raw_stream_info TEXT)'
'initial_stream INTEGER DEFAULT 1, write_attempts INTEGER DEFAULT 0, raw_stream_info TEXT)'
)
# sessions_continued table :: This is a temp table that keeps track of continued streaming sessions
c_db.execute(
'CREATE TABLE IF NOT EXISTS sessions_continued (id INTEGER PRIMARY KEY AUTOINCREMENT, '
'user_id INTEGER, machine_id TEXT, media_type TEXT, stopped INTEGER)'
)
# session_history table :: This is a history table which logs essential stream details
@@ -661,11 +669,11 @@ def dbcheck():
c_db.execute(
'CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY AUTOINCREMENT, '
'user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL, friendly_name TEXT, '
'thumb TEXT, custom_avatar_url TEXT, email TEXT, is_admin INTEGER DEFAULT 0, is_home_user INTEGER DEFAULT NULL, '
'is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, do_notify INTEGER DEFAULT 1, '
'keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0, allow_guest INTEGER DEFAULT 0, '
'user_token TEXT, server_token TEXT, shared_libraries TEXT, filter_all TEXT, filter_movies TEXT, filter_tv TEXT, '
'filter_music TEXT, filter_photos TEXT)'
'thumb TEXT, custom_avatar_url TEXT, email TEXT, is_active INTEGER DEFAULT 1, is_admin INTEGER DEFAULT 0, '
'is_home_user INTEGER DEFAULT NULL, is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, '
'do_notify INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0, '
'allow_guest INTEGER DEFAULT 0, user_token TEXT, server_token TEXT, shared_libraries TEXT, '
'filter_all TEXT, filter_movies TEXT, filter_tv TEXT, filter_music TEXT, filter_photos TEXT)'
)
# library_sections table :: This table keeps record of the servers library sections
@@ -673,7 +681,7 @@ def dbcheck():
'CREATE TABLE IF NOT EXISTS library_sections (id INTEGER PRIMARY KEY AUTOINCREMENT, '
'server_id TEXT, section_id INTEGER, section_name TEXT, section_type TEXT, agent TEXT, '
'thumb TEXT, custom_thumb_url TEXT, art TEXT, custom_art_url TEXT, '
'count INTEGER, parent_count INTEGER, child_count INTEGER, '
'count INTEGER, parent_count INTEGER, child_count INTEGER, is_active INTEGER DEFAULT 1, '
'do_notify INTEGER DEFAULT 1, do_notify_created INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, '
'deleted_section INTEGER DEFAULT 0, UNIQUE(server_id, section_id))'
)
@@ -694,16 +702,19 @@ def dbcheck():
'on_created INTEGER DEFAULT 0, on_extdown INTEGER DEFAULT 0, on_intdown INTEGER DEFAULT 0, '
'on_extup INTEGER DEFAULT 0, on_intup INTEGER DEFAULT 0, on_pmsupdate INTEGER DEFAULT 0, '
'on_concurrent INTEGER DEFAULT 0, on_newdevice INTEGER DEFAULT 0, on_plexpyupdate INTEGER DEFAULT 0, '
'on_plexpydbcorrupt INTEGER DEFAULT 0, '
'on_play_subject TEXT, on_stop_subject TEXT, on_pause_subject TEXT, '
'on_resume_subject TEXT, on_change_subject TEXT, on_buffer_subject TEXT, on_watched_subject TEXT, '
'on_created_subject TEXT, on_extdown_subject TEXT, on_intdown_subject TEXT, '
'on_extup_subject TEXT, on_intup_subject TEXT, on_pmsupdate_subject TEXT, '
'on_concurrent_subject TEXT, on_newdevice_subject TEXT, on_plexpyupdate_subject TEXT, '
'on_plexpydbcorrupt_subject TEXT, '
'on_play_body TEXT, on_stop_body TEXT, on_pause_body TEXT, '
'on_resume_body TEXT, on_change_body TEXT, on_buffer_body TEXT, on_watched_body TEXT, '
'on_created_body TEXT, on_extdown_body TEXT, on_intdown_body TEXT, '
'on_extup_body TEXT, on_intup_body TEXT, on_pmsupdate_body TEXT, '
'on_concurrent_body TEXT, on_newdevice_body TEXT, on_plexpyupdate_body TEXT, '
'on_plexpydbcorrupt_body TEXT, '
'custom_conditions TEXT, custom_conditions_logic TEXT)'
)
@@ -1259,6 +1270,27 @@ def dbcheck():
'ALTER TABLE sessions ADD COLUMN guid TEXT'
)
# Upgrade sessions table from earlier versions
try:
c_db.execute('SELECT bandwidth FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN bandwidth INTEGER'
)
c_db.execute(
'ALTER TABLE sessions ADD COLUMN location TEXT'
)
# Upgrade sessions table from earlier versions
try:
c_db.execute('SELECT initial_stream FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN initial_stream INTEGER DEFAULT 1'
)
# Upgrade session_history table from earlier versions
try:
c_db.execute('SELECT reference_id FROM session_history')
@@ -1731,6 +1763,15 @@ def dbcheck():
'ALTER TABLE users ADD COLUMN is_admin INTEGER DEFAULT 0'
)
# Upgrade users table from earlier versions
try:
c_db.execute('SELECT is_active FROM users')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table users.")
c_db.execute(
'ALTER TABLE users ADD COLUMN is_active INTEGER DEFAULT 1'
)
# Upgrade notify_log table from earlier versions
try:
c_db.execute('SELECT poster_url FROM notify_log')
@@ -1903,6 +1944,15 @@ def dbcheck():
'ALTER TABLE library_sections ADD COLUMN custom_art_url TEXT'
)
# Upgrade library_sections table from earlier versions
try:
c_db.execute('SELECT is_active FROM library_sections')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table library_sections.")
c_db.execute(
'ALTER TABLE library_sections ADD COLUMN is_active INTEGER DEFAULT 1'
)
# Upgrade users table from earlier versions (remove UNIQUE constraint on username)
try:
result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="users"').fetchone()
@@ -1989,6 +2039,21 @@ def dbcheck():
'ALTER TABLE notifiers ADD COLUMN on_change_body TEXT'
)
# Upgrade notifiers table from earlier versions
try:
c_db.execute('SELECT on_plexpydbcorrupt FROM notifiers')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table notifiers.")
c_db.execute(
'ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt INTEGER DEFAULT 0'
)
c_db.execute(
'ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt_subject TEXT'
)
c_db.execute(
'ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt_body TEXT'
)
# Upgrade tvmaze_lookup table from earlier versions
try:
c_db.execute('SELECT rating_key FROM tvmaze_lookup')

View File

@@ -84,14 +84,14 @@ class ActivityHandler(object):
return None
def update_db_session(self, session=None):
def update_db_session(self, session=None, notify=False):
if session is None:
session = self.get_live_session()
if session:
# Update our session temp table values
ap = activity_processor.ActivityProcessor()
ap.write_session(session=session, notify=False)
ap.write_session(session=session, notify=notify)
self.set_session_state()
@@ -121,10 +121,11 @@ class ActivityHandler(object):
% (str(session['session_key']), str(session['user_id']), session['username'],
str(session['rating_key']), session['full_title'], '[Live TV]' if session['live'] else ''))
plexpy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
# Send notification after updating db
#plexpy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
# Write the new session to our temp session table
self.update_db_session(session=session)
self.update_db_session(session=session, notify=True)
# Schedule a callback to force stop a stale stream 5 minutes later
schedule_callback('session_key-{}'.format(self.get_session_key()),

View File

@@ -31,6 +31,7 @@ import web_socket
monitor_lock = threading.Lock()
ext_ping_count = 0
ext_ping_error = None
int_ping_count = 0
@@ -309,34 +310,39 @@ def check_server_access():
server_response = pms_connect.get_server_response()
global ext_ping_count
global ext_ping_error
# Check for remote access
if server_response:
log = (server_response['mapping_error'] != ext_ping_error)
mapping_state = server_response['mapping_state']
mapping_error = server_response['mapping_error']
if server_response['reason']:
ext_ping_count += 1
ext_ping_error = server_response['mapping_error']
if log:
logger.warn(u"Tautulli Monitor :: Remote access failed: %s, ping attempt %s."
% (server_response['reason'], str(ext_ping_count)))
# Check if the port is mapped
if not mapping_state == 'mapped':
ext_ping_count += 1
logger.warn(u"Tautulli Monitor :: Plex remote access port not mapped, ping attempt %s." \
% str(ext_ping_count))
# Check if the port is open
elif mapping_error == 'unreachable':
ext_ping_count += 1
logger.warn(u"Tautulli Monitor :: Plex remote access port mapped, but mapping failed, ping attempt %s." \
# Waiting for port mapping
elif server_response['mapping_state'] == 'waiting':
ext_ping_error = server_response['mapping_error']
if log:
logger.warn(u"Tautulli Monitor :: Remote access waiting for port mapping, ping attempt %s."
% str(ext_ping_count))
# Reset external ping counter
else:
if ext_ping_count >= plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD:
logger.info(u"Tautulli Monitor :: Plex remote access is back up.")
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extup'})
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extup', 'remote_access_info': server_response})
ext_ping_count = 0
ext_ping_error = None
if ext_ping_count == plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD:
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extdown'})
logger.info(u"Tautulli Monitor: Plex remote access is down.")
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extdown', 'remote_access_info': server_response})
def check_server_updates():

View File

@@ -56,6 +56,8 @@ class ActivityProcessor(object):
'year': session.get('year', ''),
'friendly_name': session.get('friendly_name', ''),
'ip_address': session.get('ip_address', ''),
'bandwidth': session.get('bandwidth', 0),
'location': session.get('location', ''),
'player': session.get('player', ''),
'product': session.get('product', ''),
'platform': session.get('platform', ''),
@@ -140,15 +142,20 @@ class ActivityProcessor(object):
result = self.db.upsert('sessions', values, keys)
if result == 'insert':
# Check if any notification agents have notifications enabled
if notify:
plexpy.NOTIFY_QUEUE.put({'stream_data': values.copy(), 'notify_action': 'on_play'})
# If it's our first write then time stamp it.
started = int(time.time())
timestamp = {'started': started}
initial_stream = self.is_initial_stream(user_id=values['user_id'],
machine_id=values['machine_id'],
media_type=values['media_type'],
started=started)
timestamp = {'started': started, 'initial_stream': initial_stream}
self.db.upsert('sessions', timestamp, keys)
# Check if any notification agents have notifications enabled
if notify:
session.update(timestamp)
plexpy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
# Add Live TV library if it hasn't been added
if values['live']:
libraries.add_live_tv_library()
@@ -197,6 +204,12 @@ class ActivityProcessor(object):
state='stopped',
stopped=stopped)
if not is_import:
self.write_continued_session(user_id=session['user_id'],
machine_id=session['machine_id'],
media_type=session['media_type'],
stopped=stopped)
if str(session['rating_key']).isdigit() and session['media_type'] in ('movie', 'episode', 'track'):
logging_enabled = True
else:
@@ -628,3 +641,16 @@ class ActivityProcessor(object):
self.db.action('UPDATE sessions SET watched = ?'
'WHERE session_key = ?',
[1, session_key])
def write_continued_session(self, user_id=None, machine_id=None, media_type=None, stopped=None):
keys = {'user_id': user_id, 'machine_id': machine_id, 'media_type': media_type}
values = {'stopped': stopped}
self.db.upsert(table_name='sessions_continued', key_dict=keys, value_dict=values)
def is_initial_stream(self, user_id=None, machine_id=None, media_type=None, started=None):
last_session = self.db.select_single('SELECT stopped '
'FROM sessions_continued '
'WHERE user_id = ? AND machine_id = ? AND media_type = ? '
'ORDER BY stopped DESC',
[user_id, machine_id, media_type])
return int(started - last_session.get('stopped', 0) >= plexpy.CONFIG.NOTIFY_CONTINUED_SESSION_THRESHOLD)

View File

@@ -120,7 +120,7 @@ class API2:
self._api_app = True
if plexpy.CONFIG.API_ENABLED and not self._api_msg or self._api_cmd in ('get_apikey', 'docs', 'docs_md'):
if self._api_apikey == plexpy.CONFIG.API_KEY or (self._api_app and self._api_apikey == mobile_app.TEMP_DEVICE_TOKEN):
if self._api_apikey == plexpy.CONFIG.API_KEY or (self._api_app and self._api_apikey == mobile_app.get_temp_device_token()):
self._api_authenticated = True
elif self._api_app and mobile_app.get_mobile_device_by_token(self._api_apikey):
@@ -292,7 +292,7 @@ class API2:
def sql(self, query=''):
""" Query the Tautulli database with raw SQL. Automatically makes a backup of
the database if the latest backup is older then 24h. `api_sql` must be
manually enabled in the config file.
manually enabled in the config file while Tautulli is shut down.
```
Required parameters:
@@ -404,7 +404,7 @@ class API2:
if result:
self._api_msg = 'Device registration successful.'
self._api_result_type = 'success'
mobile_app.TEMP_DEVICE_TOKEN = None
mobile_app.set_temp_device_token(None)
else:
self._api_msg = 'Device registartion failed: database error.'
self._api_result_type = 'error'
@@ -615,13 +615,19 @@ General optional parameters:
cherrypy.response.headers['Content-Type'] = 'image/jpeg'
return out['response']['data']
elif self._api_cmd == 'get_geoip_lookup':
# Remove nested data and put error message inside data for backwards compatibility
out['response']['data'] = out['response']['data'].get('data')
if not out['response']['data']:
out['response']['data'] = {'error': out['response']['message']}
if self._api_out_type == 'json':
cherrypy.response.headers['Content-Type'] = 'application/json;charset=UTF-8'
try:
if self._api_debug:
out = json.dumps(out, indent=4, sort_keys=True, ensure_ascii=False).encode('utf-8')
out = json.dumps(out, indent=4, sort_keys=True, ensure_ascii=False)
else:
out = json.dumps(out, ensure_ascii=False).encode('utf-8')
out = json.dumps(out, ensure_ascii=False)
if self._api_callback is not None:
cherrypy.response.headers['Content-Type'] = 'application/javascript'
# wrap with JSONP call if requested
@@ -634,7 +640,7 @@ General optional parameters:
out['result'] = 'error'
elif self._api_out_type == 'xml':
cherrypy.response.headers['Content-Type'] = 'application/xml'
cherrypy.response.headers['Content-Type'] = 'application/xml;charset=UTF-8'
try:
out = xmltodict.unparse(out, pretty=True)
except Exception as e:
@@ -655,7 +661,7 @@ General optional parameters:
</response>
''' % e
return out
return out.encode('utf-8')
def _api_run(self, *args, **kwargs):
""" handles the stuff from the handler """

View File

@@ -214,8 +214,7 @@ SCHEDULER_LIST = [
'Refresh libraries list',
'Refresh Plex server URLs',
'Backup Tautulli database',
'Backup Tautulli config',
'Update GeoLite2 database'
'Backup Tautulli config'
]
DATE_TIME_FORMATS = [
@@ -340,10 +339,13 @@ NOTIFICATION_PARAMETERS = [
{
'category': 'Stream Details',
'parameters': [
{'name': 'Streams', 'type': 'int', 'value': 'streams', 'description': 'The number of concurrent streams.'},
{'name': 'Direct Plays', 'type': 'int', 'value': 'direct_plays', 'description': 'The number of concurrent direct plays.'},
{'name': 'Direct Streams', 'type': 'int', 'value': 'direct_streams', 'description': 'The number of concurrent direct streams.'},
{'name': 'Transcodes', 'type': 'int', 'value': 'transcodes', 'description': 'The number of concurrent transcodes.'},
{'name': 'Streams', 'type': 'int', 'value': 'streams', 'description': 'The total number of concurrent streams.'},
{'name': 'Direct Plays', 'type': 'int', 'value': 'direct_plays', 'description': 'The total number of concurrent direct plays.'},
{'name': 'Direct Streams', 'type': 'int', 'value': 'direct_streams', 'description': 'The total number of concurrent direct streams.'},
{'name': 'Transcodes', 'type': 'int', 'value': 'transcodes', 'description': 'The total number of concurrent transcodes.'},
{'name': 'Total Bandwidth', 'type': 'int', 'value': 'total_bandwidth', 'description': 'The total Plex Streaming Brain reserved bandwidth (in kbps).', 'help_text': 'not the used bandwidth'},
{'name': 'LAN Bandwidth', 'type': 'int', 'value': 'lan_bandwidth', 'description': 'The total Plex Streaming Brain reserved LAN bandwidth (in kbps).', 'help_text': 'not the used bandwidth'},
{'name': 'WAN Bandwidth', 'type': 'int', 'value': 'wan_bandwidth', 'description': 'The total Plex Streaming Brain reserved WAN bandwidth (in kbps).', 'help_text': 'not the used bandwidth'},
{'name': 'User Streams', 'type': 'int', 'value': 'user_streams', 'description': 'The number of concurrent streams by the user streaming.'},
{'name': 'User Direct Plays', 'type': 'int', 'value': 'user_direct_plays', 'description': 'The number of concurrent direct plays by the user streaming.'},
{'name': 'User Direct Streams', 'type': 'int', 'value': 'user_direct_streams', 'description': 'The number of concurrent direct streams by the user streaming.'},
@@ -351,10 +353,12 @@ NOTIFICATION_PARAMETERS = [
{'name': 'User', 'type': 'str', 'value': 'user', 'description': 'The friendly name of the user streaming.'},
{'name': 'Username', 'type': 'str', 'value': 'username', 'description': 'The username of the user streaming.'},
{'name': 'User Email', 'type': 'str', 'value': 'user_email', 'description': 'The email address of the user streaming.'},
{'name': 'User Thumb', 'type': 'str', 'value': 'user_thumb', 'description': 'The profile picture URL of the user streaming.'},
{'name': 'Device', 'type': 'str', 'value': 'device', 'description': 'The type of client device being used for playback.'},
{'name': 'Platform', 'type': 'str', 'value': 'platform', 'description': 'The type of client platform being used for playback.'},
{'name': 'Product', 'type': 'str', 'value': 'product', 'description': 'The type of client product being used for playback.'},
{'name': 'Player', 'type': 'str', 'value': 'player', 'description': 'The name of the player being used for playback.'},
{'name': 'Initial Stream', 'type': 'int', 'value': 'initial_stream', 'description': 'If the stream is the initial stream of a continuous streaming session.', 'example': '0 or 1'},
{'name': 'IP Address', 'type': 'str', 'value': 'ip_address', 'description': 'The IP address of the device being used for playback.'},
{'name': 'Stream Duration', 'type': 'int', 'value': 'stream_duration', 'description': 'The duration (in minutes) for the stream.'},
{'name': 'Stream Time', 'type': 'str', 'value': 'stream_time', 'description': 'The duration (in time format) of the stream.'},
@@ -379,7 +383,7 @@ NOTIFICATION_PARAMETERS = [
{'name': 'Relayed', 'type': 'int', 'value': 'relayed', 'description': 'If the stream is using Plex Relay.', 'example': '0 or 1'},
{'name': 'Stream Local', 'type': 'int', 'value': 'stream_local', 'description': 'If the stream is local.', 'example': '0 or 1'},
{'name': 'Stream Location', 'type': 'str', 'value': 'stream_location', 'description': 'The network location of the stream.', 'example': 'lan or wan'},
{'name': 'Stream Bandwidth', 'type': 'int', 'value': 'stream_bandwidth', 'description': 'The required bandwidth (in kbps) of the stream.', 'help_text': 'not the used bandwidth'},
{'name': 'Stream Bandwidth', 'type': 'int', 'value': 'stream_bandwidth', 'description': 'The Plex Streaming Brain reserved bandwidth (in kbps) of the stream.', 'help_text': 'not the used bandwidth'},
{'name': 'Stream Container', 'type': 'str', 'value': 'stream_container', 'description': 'The media container of the stream.'},
{'name': 'Stream Bitrate', 'type': 'int', 'value': 'stream_bitrate', 'description': 'The bitrate (in kbps) of the stream.'},
{'name': 'Stream Aspect Ratio', 'type': 'float', 'value': 'stream_aspect_ratio', 'description': 'The aspect ratio of the stream.'},
@@ -546,6 +550,18 @@ NOTIFICATION_PARAMETERS = [
{'name': 'Indexes', 'type': 'int', 'value': 'indexes', 'description': 'If the media has video preview thumbnails.', 'example': '0 or 1'},
]
},
{
'category': 'Plex Remote Access',
'parameters': [
{'name': 'Remote Access Mapping State', 'type': 'str', 'value': 'remote_access_mapping_state', 'description': 'The mapping state of the Plex remote access port.'},
{'name': 'Remote Access Mapping Error', 'type': 'str', 'value': 'remote_access_mapping_error', 'description': 'The mapping error of the Plex remote access port.'},
{'name': 'Remote Access Public IP Address', 'type': 'str', 'value': 'remote_access_public_address', 'description': 'The Plex remote access public IP address.'},
{'name': 'Remote Access Public Port', 'type': 'str', 'value': 'remote_access_public_port', 'description': 'The Plex remote access public port.'},
{'name': 'Remote Access Private IP Address', 'type': 'str', 'value': 'remote_access_private_address', 'description': 'The Plex remote access private IP address.'},
{'name': 'Remote Access Private Port', 'type': 'str', 'value': 'remote_access_private_port', 'description': 'The Plex remote access private port.'},
{'name': 'Remote Access Failure Reason', 'type': 'str', 'value': 'remote_access_reason', 'description': 'The failure reason for Plex remote access going down.'},
]
},
{
'category': 'Plex Update Available',
'parameters': [

View File

@@ -175,9 +175,6 @@ _CONFIG_DEFINITIONS = {
'FACEBOOK_ON_NEWDEVICE': (int, 'Facebook', 0),
'FIRST_RUN_COMPLETE': (int, 'General', 0),
'FREEZE_DB': (int, 'General', 0),
'GEOIP_DB': (str, 'General', ''),
'GEOIP_DB_INSTALLED': (int, 'General', 0),
'GEOIP_DB_UPDATE_DAYS': (int, 'General', 30),
'GET_FILE_SIZES': (int, 'General', 0),
'GET_FILE_SIZES_HOLD': (dict, 'General', {'section_ids': [], 'rating_keys': []}),
'GIT_BRANCH': (str, 'General', 'master'),
@@ -292,7 +289,6 @@ _CONFIG_DEFINITIONS = {
'LOG_BLACKLIST': (int, 'General', 1),
'LOG_DIR': (str, 'General', ''),
'LOGGING_IGNORE_INTERVAL': (int, 'Monitoring', 120),
'MAXMIND_LICENSE_KEY': (str, 'General', ''),
'METADATA_CACHE_SECONDS': (int, 'Advanced', 1800),
'MOVIE_LOGGING_ENABLE': (int, 'Monitoring', 1),
'MOVIE_NOTIFY_ENABLE': (int, 'Monitoring', 0),
@@ -338,7 +334,8 @@ _CONFIG_DEFINITIONS = {
'NMA_ON_NEWDEVICE': (int, 'NMA', 0),
'NOTIFICATION_THREADS': (int, 'Advanced', 2),
'NOTIFY_CONSECUTIVE': (int, 'Monitoring', 1),
'NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT': (int, 'Monitoring', 0),
'NOTIFY_CONTINUED_SESSION_THRESHOLD': (int, 'Monitoring', 15),
'NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT': (int, 'Monitoring', 1),
'NOTIFY_GROUP_RECENTLY_ADDED_PARENT': (int, 'Monitoring', 1),
'NOTIFY_GROUP_RECENTLY_ADDED': (int, 'Monitoring', 1),
'NOTIFY_UPLOAD_POSTERS': (int, 'Monitoring', 0),
@@ -490,6 +487,7 @@ _CONFIG_DEFINITIONS = {
'REFRESH_LIBRARIES_ON_STARTUP': (int, 'Monitoring', 1),
'REFRESH_USERS_INTERVAL': (int, 'Monitoring', 12),
'REFRESH_USERS_ON_STARTUP': (int, 'Monitoring', 1),
'REMOTE_ACCESS_PING_INTERVAL': (int, 'Advanced', 60),
'REMOTE_ACCESS_PING_THRESHOLD': (int, 'Advanced', 3),
'SESSION_DB_WRITE_ATTEMPTS': (int, 'Advanced', 5),
'SHOW_ADVANCED_SETTINGS': (int, 'General', 0),
@@ -930,8 +928,6 @@ class Config(object):
self.CONFIG_VERSION = 13
if self.CONFIG_VERSION == 13:
if not self.GEOIP_DB:
self.GEOIP_DB = os.path.join(plexpy.DATA_DIR, 'GeoLite2-City.mmdb')
self.CONFIG_VERSION = 14

View File

@@ -21,6 +21,7 @@ import threading
import time
import plexpy
import helpers
import logger
FILENAME = "tautulli.db"
@@ -57,6 +58,60 @@ def delete_recently_added():
return clear_table('recently_added')
def delete_rows_from_table(table, row_ids):
if row_ids and isinstance(row_ids, basestring):
row_ids = map(helpers.cast_to_int, row_ids.split(','))
if row_ids:
logger.info(u"Tautulli Database :: Deleting row ids %s from %s database table", row_ids, table)
query = "DELETE FROM " + table + " WHERE id IN (%s) " % ','.join(['?'] * len(row_ids))
monitor_db = MonitorDatabase()
try:
monitor_db.action(query, row_ids)
return True
except Exception as e:
logger.error(u"Tautulli Database :: Failed to delete rows from %s database table: %s" % (table, row_ids))
return False
return True
def delete_session_history_rows(row_ids=None):
success = []
for table in ('session_history', 'session_history_media_info', 'session_history_metadata'):
success.append(delete_rows_from_table(table=table, row_ids=row_ids))
return all(success)
def delete_user_history(user_id=None):
if str(user_id).isdigit():
monitor_db = MonitorDatabase()
# Get all history associated with the user_id
result = monitor_db.select('SELECT id FROM session_history WHERE user_id = ?',
[user_id])
row_ids = [row['id'] for row in result]
logger.info(u"Tautulli Database :: Deleting all history for user_id %s from database." % user_id)
return delete_session_history_rows(row_ids=row_ids)
def delete_library_history(section_id=None):
if str(section_id).isdigit():
monitor_db = MonitorDatabase()
# Get all history associated with the section_id
result = monitor_db.select('SELECT session_history.id FROM session_history '
'JOIN session_history_metadata ON session_history.id = session_history_metadata.id '
'WHERE session_history_metadata.section_id = ?',
[section_id])
row_ids = [row['id'] for row in result]
logger.info(u"Tautulli Database :: Deleting all history for library section_id %s from database." % section_id)
return delete_session_history_rows(row_ids=row_ids)
def db_filename(filename=FILENAME):
""" Returns the filepath to the db """
@@ -72,6 +127,7 @@ def make_backup(cleanup=False, scheduler=False):
corrupt = ''
if not integrity:
corrupt = '.corrupt'
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpydbcorrupt'})
if scheduler:
backup_file = 'tautulli.backup-{}{}.sched.db'.format(arrow.now().format('YYYYMMDDHHmmss'), corrupt)
@@ -186,7 +242,7 @@ class MonitorDatabase(object):
sql_results = self.action(query, args).fetchone()
if sql_results is None or sql_results == "":
return ""
return {}
return sql_results

View File

@@ -64,7 +64,7 @@ class DataFactory(object):
columns = [
'session_history.reference_id',
'session_history.id',
'session_history.id AS row_id',
'MAX(started) AS date',
'MIN(started) AS started',
'MAX(stopped) AS stopped',
@@ -116,7 +116,7 @@ class DataFactory(object):
columns_union = [
'NULL AS reference_id',
'NULL AS id',
'NULL AS row_id',
'started AS date',
'started',
'stopped',
@@ -228,7 +228,8 @@ class DataFactory(object):
platform = common.PLATFORM_NAME_OVERRIDES.get(item['platform'], item['platform'])
row = {'reference_id': item['reference_id'],
'id': item['id'],
'row_id': item['row_id'],
'id': item['row_id'],
'date': item['date'],
'started': item['started'],
'stopped': item['stopped'],
@@ -336,7 +337,6 @@ class DataFactory(object):
'user': '',
'friendly_name': '',
'platform': '',
'platform': '',
'live': item['live'],
'guid': item['guid'],
'row_id': item['id']
@@ -1415,7 +1415,7 @@ class DataFactory(object):
if rating_key:
logger.info(u"Tautulli DataFactory :: Deleting lookup info for rating_key %s from the database."
% (title, rating_key))
% rating_key)
result_themoviedb = monitor_db.action('DELETE FROM themoviedb_lookup WHERE rating_key = ?', [rating_key])
result_tvmaze = monitor_db.action('DELETE FROM tvmaze_lookup WHERE rating_key = ?', [rating_key])
result_musicbrainz = monitor_db.action('DELETE FROM musicbrainz_lookup WHERE rating_key = ?', [rating_key])
@@ -1562,22 +1562,6 @@ class DataFactory(object):
return key_list
def delete_session_history_rows(self, row_id=None):
monitor_db = database.MonitorDatabase()
if row_id.isdigit():
logger.info(u"Tautulli DataFactory :: Deleting row id %s from the session history database." % row_id)
session_history_del = \
monitor_db.action('DELETE FROM session_history WHERE id = ?', [row_id])
session_history_media_info_del = \
monitor_db.action('DELETE FROM session_history_media_info WHERE id = ?', [row_id])
session_history_metadata_del = \
monitor_db.action('DELETE FROM session_history_metadata WHERE id = ?', [row_id])
return 'Deleted rows %s.' % row_id
else:
return 'Unable to delete rows. Input row not valid.'
def update_metadata(self, old_key_list='', new_key_list='', media_type=''):
pms_connect = pmsconnect.PmsConnect()
monitor_db = database.MonitorDatabase()

View File

@@ -15,14 +15,12 @@
import arrow
import base64
import certifi
import cloudinary
from cloudinary.api import delete_resources_by_tag
from cloudinary.uploader import upload
from cloudinary.utils import cloudinary_url
import datetime
from functools import wraps
import geoip2.database, geoip2.errors
import hashlib
import imghdr
from itertools import izip_longest
@@ -30,19 +28,15 @@ import ipwhois, ipwhois.exceptions, ipwhois.utils
from IPy import IP
import json
import math
import maxminddb
from operator import itemgetter
import os
import re
import shlex
import shutil
import socket
import sys
import tarfile
import time
import unicodedata
import urllib
import urllib3
from xml.dom import minidom
import xmltodict
@@ -440,7 +434,11 @@ def create_https_certificates(ssl_cert, ssl_key):
This code is stolen from SickBeard (http://github.com/midgetspy/Sick-Beard).
"""
try:
from OpenSSL import crypto
except ImportError:
logger.error("Unable to generate self-signed certificates: Missing OpenSSL module.")
return False
from certgen import createKeyPair, createSelfSignedCertificate, TYPE_RSA
serial = int(time.time())
@@ -603,164 +601,6 @@ def is_valid_ip(address):
return False
def update_geoip_db():
if plexpy.CONFIG.GEOIP_DB_INSTALLED:
logger.info(u"Tautulli Helpers :: Checking for GeoLite2 database updates.")
now = int(time.time())
if now - plexpy.CONFIG.GEOIP_DB_INSTALLED >= plexpy.CONFIG.GEOIP_DB_UPDATE_DAYS * 24 * 60 * 60:
return install_geoip_db(update=True)
logger.info(u"Tautulli Helpers :: GeoLite2 database already updated within the last %s days."
% plexpy.CONFIG.GEOIP_DB_UPDATE_DAYS)
def install_geoip_db(update=False):
if not plexpy.CONFIG.MAXMIND_LICENSE_KEY:
logger.error(u"Tautulli Helpers :: Failed to download GeoLite2 database file from MaxMind: Missing MaxMindLicense Key")
return False
maxmind_db = 'GeoLite2-City'
maxmind_url = 'https://download.maxmind.com/app/geoip_download?edition_id={db}&suffix={{suffix}}&license_key={key}'.format(
db=maxmind_db, key=plexpy.CONFIG.MAXMIND_LICENSE_KEY)
geolite2_db_url = maxmind_url.format(suffix='tar.gz')
geolite2_md5_url = maxmind_url.format(suffix='tar.gz.md5')
geolite2_gz = maxmind_db + '.tar.gz'
geolite2_md5 = geolite2_gz + '.md5'
geolite2_db = maxmind_db + '.mmdb'
geolite2_db_path = plexpy.CONFIG.GEOIP_DB or os.path.join(plexpy.DATA_DIR, geolite2_db)
# Check path ends with .mmdb
if os.path.splitext(geolite2_db_path)[1] != os.path.splitext(geolite2_db)[1]:
geolite2_db_path = os.path.join(geolite2_db_path, geolite2_db)
temp_gz = os.path.join(plexpy.CONFIG.CACHE_DIR, geolite2_gz)
temp_md5 = os.path.join(plexpy.CONFIG.CACHE_DIR, geolite2_md5)
# Retrieve the GeoLite2 gzip file
logger.debug(u"Tautulli Helpers :: Downloading GeoLite2 gzip file from MaxMind...")
try:
maxmind = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where())
with maxmind.request('GET', geolite2_db_url, preload_content=False) as r_db, open(temp_gz, 'wb') as f_db:
shutil.copyfileobj(r_db, f_db)
with maxmind.request('GET', geolite2_md5_url, preload_content=False) as r_md5, open(temp_md5, 'wb') as f_md5:
shutil.copyfileobj(r_md5, f_md5)
except Exception as e:
logger.error(u"Tautulli Helpers :: Failed to download GeoLite2 gzip file from MaxMind: %s" % e)
return False
# Check MD5 hash for GeoLite2 tar.gz file
logger.debug(u"Tautulli Helpers :: Checking MD5 checksum for GeoLite2 gzip file...")
try:
hash_md5 = hashlib.md5()
with open(temp_gz, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
md5_hash = hash_md5.hexdigest()
with open(temp_md5, 'r') as f:
md5_checksum = f.read()
if md5_hash != md5_checksum:
logger.error(u"Tautulli Helpers :: MD5 checksum doesn't match for GeoLite2 database. "
"Checksum: %s, file hash: %s" % (md5_checksum, md5_hash))
return False
except Exception as e:
logger.error(u"Tautulli Helpers :: Failed to generate MD5 checksum for GeoLite2 gzip file: %s" % e)
return False
# Extract the GeoLite2 database file
logger.debug(u"Tautulli Helpers :: Extracting GeoLite2 database...")
try:
mmdb = None
with tarfile.open(temp_gz, 'r:gz') as tar:
for member in tar.getmembers():
if geolite2_db in member.name:
member.name = os.path.basename(member.name)
tar.extractall(path=os.path.dirname(geolite2_db_path), members=[member])
mmdb = True
break
if not mmdb:
raise Exception("{} not found in gzip file.".format(geolite2_db))
except Exception as e:
logger.error(u"Tautulli Helpers :: Failed to extract the GeoLite2 database: %s" % e)
return False
# Delete temportary GeoLite2 gzip file
logger.debug(u"Tautulli Helpers :: Deleting temporary GeoLite2 gzip file...")
try:
os.remove(temp_gz)
os.remove(temp_md5)
except Exception as e:
logger.warn(u"Tautulli Helpers :: Failed to remove temporary GeoLite2 gzip file: %s" % e)
plexpy.CONFIG.__setattr__('GEOIP_DB', geolite2_db_path)
plexpy.CONFIG.__setattr__('GEOIP_DB_INSTALLED', int(time.time()))
plexpy.CONFIG.write()
logger.debug(u"Tautulli Helpers :: GeoLite2 database installed successfully.")
if not update:
plexpy.schedule_job(update_geoip_db, 'Update GeoLite2 database', hours=12, minutes=0, seconds=0)
return plexpy.CONFIG.GEOIP_DB_INSTALLED
def uninstall_geoip_db():
logger.debug(u"Tautulli Helpers :: Uninstalling the GeoLite2 database...")
try:
os.remove(plexpy.CONFIG.GEOIP_DB)
except Exception as e:
logger.error(u"Tautulli Helpers :: Failed to uninstall the GeoLite2 database: %s" % e)
return False
plexpy.CONFIG.__setattr__('GEOIP_DB_INSTALLED', 0)
plexpy.CONFIG.write()
logger.debug(u"Tautulli Helpers :: GeoLite2 database uninstalled successfully.")
plexpy.schedule_job(update_geoip_db, 'Update GeoLite2 database', hours=0, minutes=0, seconds=0)
return True
def geoip_lookup(ip_address):
if not plexpy.CONFIG.GEOIP_DB_INSTALLED:
return 'GeoLite2 database not installed. Please install from the ' \
'<a href="settings?install_geoip=true">Settings</a> page.'
if not ip_address:
return 'No IP address provided.'
try:
reader = geoip2.database.Reader(plexpy.CONFIG.GEOIP_DB)
geo = reader.city(ip_address)
reader.close()
except ValueError as e:
return 'Invalid IP address provided: %s.' % ip_address
except IOError as e:
return 'Missing GeoLite2 database. Please reinstall from the ' \
'<a href="settings?install_geoip=true">Settings</a> page.'
except maxminddb.InvalidDatabaseError as e:
return 'Invalid GeoLite2 database. Please reinstall from the ' \
'<a href="settings?install_geoip=true">Settings</a> page.'
except geoip2.errors.AddressNotFoundError as e:
return '%s' % e
except Exception as e:
return 'Error: %s' % e
geo_info = {'continent': geo.continent.name,
'country': geo.country.name,
'region': geo.subdivisions.most_specific.name,
'city': geo.city.name,
'postal_code': geo.postal.code,
'timezone': geo.location.time_zone,
'latitude': geo.location.latitude,
'longitude': geo.location.longitude,
'accuracy': geo.location.accuracy_radius
}
return geo_info
def whois_lookup(ip_address):
nets = []
@@ -1024,13 +864,14 @@ def build_datatables_json(kwargs, dt_columns, default_sort_col=None):
return json.dumps(json_data)
def humanFileSize(bytes, si=False):
def humanFileSize(bytes, si=True):
if str(bytes).isdigit():
bytes = cast_to_float(bytes)
else:
return bytes
thresh = 1000 if si else 1024
#thresh = 1000 if si else 1024
thresh = 1024 # Always divide by 2^10 but display SI units
if bytes < thresh:
return str(bytes) + ' B'
@@ -1260,8 +1101,10 @@ def mask_config_passwords(config):
return config
def bool_true(value):
if value is True or value == 1:
def bool_true(value, return_none=False):
if value is None and return_none:
return None
elif value is True or value == 1:
return True
elif isinstance(value, basestring) and value.lower() in ('1', 'true', 't', 'yes', 'y', 'on'):
return True

View File

@@ -43,7 +43,12 @@ def refresh_libraries():
library_keys = []
new_keys = []
# Keep track of section_id to update is_active status
section_ids = [common.LIVE_TV_SECTION_ID] # Live TV library always considered active
for section in library_sections:
section_ids.append(helpers.cast_to_int(section['section_id']))
section_keys = {'server_id': server_id,
'section_id': section['section_id']}
section_values = {'server_id': server_id,
@@ -65,6 +70,10 @@ def refresh_libraries():
if result == 'insert':
new_keys.append(section['section_id'])
query = 'UPDATE library_sections SET is_active = 0 WHERE server_id != ? OR ' \
'section_id NOT IN ({})'.format(', '.join(['?'] * len(section_ids)))
monitor_db.action(query=query, args=[plexpy.CONFIG.PMS_IDENTIFIER] + section_ids)
if plexpy.CONFIG.HOME_LIBRARY_CARDS == ['first_run_wizard']:
plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', library_keys)
plexpy.CONFIG.write()
@@ -289,7 +298,9 @@ class Libraries(object):
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
columns = ['library_sections.section_id',
columns = ['library_sections.id AS row_id',
'library_sections.server_id',
'library_sections.section_id',
'library_sections.section_name',
'library_sections.section_type',
'library_sections.count',
@@ -303,7 +314,7 @@ class Libraries(object):
ELSE 0 END) - SUM(CASE WHEN session_history.paused_counter IS NULL THEN 0 ELSE \
session_history.paused_counter END) AS duration',
'MAX(session_history.started) AS last_accessed',
'MAX(session_history.id) AS id',
'MAX(session_history.id) AS history_row_id',
'session_history_metadata.full_title AS last_played',
'session_history.rating_key',
'session_history_metadata.media_type',
@@ -322,7 +333,8 @@ class Libraries(object):
'session_history_metadata.guid',
'library_sections.do_notify',
'library_sections.do_notify_created',
'library_sections.keep_history'
'library_sections.keep_history',
'library_sections.is_active'
]
try:
query = data_tables.ssp_query(table_name='library_sections',
@@ -361,7 +373,9 @@ class Libraries(object):
else:
library_thumb = common.DEFAULT_COVER_THUMB
row = {'section_id': item['section_id'],
row = {'row_id': item['row_id'],
'server_id': item['server_id'],
'section_id': item['section_id'],
'section_name': item['section_name'],
'section_type': item['section_type'],
'count': item['count'],
@@ -372,7 +386,7 @@ class Libraries(object):
'plays': item['plays'],
'duration': item['duration'],
'last_accessed': item['last_accessed'],
'id': item['id'],
'history_row_id': item['history_row_id'],
'last_played': item['last_played'],
'rating_key': item['rating_key'],
'media_type': item['media_type'],
@@ -388,7 +402,8 @@ class Libraries(object):
'guid': item['guid'],
'do_notify': helpers.checked(item['do_notify']),
'do_notify_created': helpers.checked(item['do_notify_created']),
'keep_history': helpers.checked(item['keep_history'])
'keep_history': helpers.checked(item['keep_history']),
'is_active': item['is_active']
}
rows.append(row)
@@ -724,8 +739,10 @@ class Libraries(object):
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for set_config: %s." % e)
def get_details(self, section_id=None):
default_return = {'section_id': 0,
def get_details(self, section_id=None, server_id=None):
default_return = {'row_id': 0,
'server_id': '',
'section_id': 0,
'section_name': 'Local',
'section_type': '',
'library_thumb': common.DEFAULT_COVER_THUMB,
@@ -733,6 +750,7 @@ class Libraries(object):
'count': 0,
'parent_count': 0,
'child_count': 0,
'is_active': 1,
'do_notify': 0,
'do_notify_created': 0,
'keep_history': 1,
@@ -742,18 +760,22 @@ class Libraries(object):
if not section_id:
return default_return
def get_library_details(section_id=section_id):
if server_id is None:
server_id = plexpy.CONFIG.PMS_IDENTIFIER
def get_library_details(section_id=section_id, server_id=server_id):
monitor_db = database.MonitorDatabase()
try:
if str(section_id).isdigit():
query = 'SELECT section_id, section_name, section_type, count, parent_count, child_count, ' \
query = 'SELECT id AS row_id, server_id, section_id, section_name, section_type, ' \
'count, parent_count, child_count, ' \
'thumb AS library_thumb, custom_thumb_url AS custom_thumb, art AS library_art, ' \
'custom_art_url AS custom_art, ' \
'custom_art_url AS custom_art, is_active, ' \
'do_notify, do_notify_created, keep_history, deleted_section ' \
'FROM library_sections ' \
'WHERE section_id = ? '
result = monitor_db.select(query, args=[section_id])
'WHERE section_id = ? AND server_id = ? '
result = monitor_db.select(query, args=[section_id, server_id])
else:
result = []
except Exception as e:
@@ -775,7 +797,9 @@ class Libraries(object):
else:
library_art = item['library_art']
library_details = {'section_id': item['section_id'],
library_details = {'row_id': item['row_id'],
'server_id': item['server_id'],
'section_id': item['section_id'],
'section_name': item['section_name'],
'section_type': item['section_type'],
'library_thumb': library_thumb,
@@ -783,6 +807,7 @@ class Libraries(object):
'count': item['count'],
'parent_count': item['parent_count'],
'child_count': item['child_count'],
'is_active': item['is_active'],
'do_notify': item['do_notify'],
'do_notify_created': item['do_notify_created'],
'keep_history': item['keep_history'],
@@ -790,7 +815,7 @@ class Libraries(object):
}
return library_details
library_details = get_library_details(section_id=section_id)
library_details = get_library_details(section_id=section_id, server_id=server_id)
if library_details:
return library_details
@@ -801,7 +826,7 @@ class Libraries(object):
# Let's first refresh the libraries list to make sure the library isn't newly added and not in the db yet
refresh_libraries()
library_details = get_library_details(section_id=section_id)
library_details = get_library_details(section_id=section_id, server_id=server_id)
if library_details:
return library_details
@@ -812,21 +837,25 @@ class Libraries(object):
# If there is no library data we must return something
return default_return
def get_watch_time_stats(self, section_id=None, grouping=None):
def get_watch_time_stats(self, section_id=None, grouping=None, query_days=None):
if not session.allow_session_library(section_id):
return []
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
if query_days and query_days is not None:
query_days = map(helpers.cast_to_int, query_days.split(','))
else:
query_days = [1, 7, 30, 0]
monitor_db = database.MonitorDatabase()
time_queries = [1, 7, 30, 0]
library_watch_time_stats = []
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
for days in time_queries:
for days in query_days:
try:
if days > 0:
if str(section_id).isdigit():
@@ -998,61 +1027,48 @@ class Libraries(object):
return libraries
def delete_all_history(self, section_id=None):
def delete(self, server_id=None, section_id=None, row_ids=None, purge_only=False):
monitor_db = database.MonitorDatabase()
try:
if section_id.isdigit():
logger.info(u"Tautulli Libraries :: Deleting all history for library id %s from database." % section_id)
session_history_media_info_del = \
monitor_db.action('DELETE FROM '
'session_history_media_info '
'WHERE session_history_media_info.id IN (SELECT session_history_media_info.id '
'FROM session_history_media_info '
'JOIN session_history_metadata ON session_history_media_info.id = session_history_metadata.id '
'WHERE session_history_metadata.section_id = ?)', [section_id])
session_history_del = \
monitor_db.action('DELETE FROM '
'session_history '
'WHERE session_history.id IN (SELECT session_history.id '
'FROM session_history '
'JOIN session_history_metadata ON session_history.id = session_history_metadata.id '
'WHERE session_history_metadata.section_id = ?)', [section_id])
session_history_metadata_del = \
monitor_db.action('DELETE FROM '
'session_history_metadata '
'WHERE session_history_metadata.section_id = ?', [section_id])
if row_ids and row_ids is not None:
row_ids = map(helpers.cast_to_int, row_ids.split(','))
return 'Deleted all items for section_id %s.' % section_id
# Get the user_ids corresponding to the row_ids
result = monitor_db.select('SELECT server_id, section_id FROM library_sections '
'WHERE id IN ({})'.format(','.join(['?'] * len(row_ids))), row_ids)
success = []
for library in result:
success.append(self.delete(server_id=library['server_id'], section_id=library['section_id'],
purge_only=purge_only))
return all(success)
elif str(section_id).isdigit():
server_id = server_id or plexpy.CONFIG.PMS_IDENTIFIER
if server_id == plexpy.CONFIG.PMS_IDENTIFIER:
delete_success = database.delete_library_history(section_id=section_id)
else:
return 'Unable to delete items, section_id not valid.'
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for delete_all_history: %s." % e)
logger.warn(u"Tautulli Libraries :: Library history not deleted for library section_id %s "
u"because library server_id %s does not match Plex server identifier %s."
% (section_id, server_id, plexpy.CONFIG.PMS_IDENTIFIER))
delete_success = True
def delete(self, section_id=None):
monitor_db = database.MonitorDatabase()
try:
if section_id.isdigit():
self.delete_all_history(section_id)
logger.info(u"Tautulli Libraries :: Deleting library with id %s from database." % section_id)
monitor_db.action('UPDATE library_sections SET deleted_section = 1 WHERE section_id = ?', [section_id])
monitor_db.action('UPDATE library_sections SET keep_history = 0 WHERE section_id = ?', [section_id])
monitor_db.action('UPDATE library_sections SET do_notify = 0 WHERE section_id = ?', [section_id])
monitor_db.action('UPDATE library_sections SET do_notify_created = 0 WHERE section_id = ?', [section_id])
library_cards = plexpy.CONFIG.HOME_LIBRARY_CARDS
if section_id in library_cards:
library_cards.remove(section_id)
plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', library_cards)
plexpy.CONFIG.write()
return 'Deleted library with id %s.' % section_id
if purge_only:
return delete_success
else:
return 'Unable to delete library, section_id not valid.'
logger.info(u"Tautulli Libraries :: Deleting library with server_id %s and section_id %s from database."
% (server_id, section_id))
try:
monitor_db.action('UPDATE library_sections '
'SET deleted_section = 1, keep_history = 0, do_notify = 0, do_notify_created = 0 '
'WHERE server_id = ? AND section_id = ?', [server_id, section_id])
return delete_success
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for delete: %s." % e)
else:
return False
def undelete(self, section_id=None, section_name=None):
monitor_db = database.MonitorDatabase()
@@ -1062,10 +1078,10 @@ class Libraries(object):
result = monitor_db.select(query=query, args=[section_id])
if result:
logger.info(u"Tautulli Libraries :: Re-adding library with id %s to database." % section_id)
monitor_db.action('UPDATE library_sections SET deleted_section = 0 WHERE section_id = ?', [section_id])
monitor_db.action('UPDATE library_sections SET keep_history = 1 WHERE section_id = ?', [section_id])
monitor_db.action('UPDATE library_sections SET do_notify = 1 WHERE section_id = ?', [section_id])
monitor_db.action('UPDATE library_sections SET do_notify_created = 1 WHERE section_id = ?', [section_id])
monitor_db.action('UPDATE library_sections '
'SET deleted_section = 0, keep_history = 1, do_notify = 1, do_notify_created = 1 '
'WHERE section_id = ?',
[section_id])
return True
else:
return False
@@ -1075,10 +1091,10 @@ class Libraries(object):
result = monitor_db.select(query=query, args=[section_name])
if result:
logger.info(u"Tautulli Libraries :: Re-adding library with name %s to database." % section_name)
monitor_db.action('UPDATE library_sections SET deleted_section = 0 WHERE section_name = ?', [section_name])
monitor_db.action('UPDATE library_sections SET keep_history = 1 WHERE section_name = ?', [section_name])
monitor_db.action('UPDATE library_sections SET do_notify = 1 WHERE section_name = ?', [section_name])
monitor_db.action('UPDATE library_sections SET do_notify_created = 1 WHERE section_name = ?', [section_name])
monitor_db.action('UPDATE library_sections '
'SET deleted_section = 0, keep_history = 1, do_notify = 1, do_notify_created = 1 '
'WHERE section_name = ?',
[section_name])
return True
else:
return False

View File

@@ -14,6 +14,7 @@
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
import time
import threading
import plexpy
import database
@@ -22,6 +23,24 @@ import logger
TEMP_DEVICE_TOKEN = None
INVALIDATE_TIMER = None
def set_temp_device_token(token=None):
global TEMP_DEVICE_TOKEN
TEMP_DEVICE_TOKEN = token
if TEMP_DEVICE_TOKEN is not None:
global INVALIDATE_TIMER
if INVALIDATE_TIMER:
INVALIDATE_TIMER.cancel()
invalidate_time = 5 * 60 # 5 minutes
INVALIDATE_TIMER = threading.Timer(invalidate_time, set_temp_device_token, args=[None])
INVALIDATE_TIMER.start()
def get_temp_device_token():
return TEMP_DEVICE_TOKEN
def get_mobile_devices(device_id=None, device_token=None):

View File

@@ -284,7 +284,7 @@ def send_newsletter(newsletter_id=None, subject=None, body=None, message=None, n
email_config=newsletter_config['email_config'],
subject=subject,
body=body,
messsage=message)
message=message)
return agent.send()
else:
logger.debug(u"Tautulli Newsletters :: Notification requested but no newsletter_id received.")

View File

@@ -547,6 +547,10 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
stream_count = len(sessions)
user_stream_count = len(user_sessions)
lan_bandwidth = sum(helpers.cast_to_int(s['bandwidth']) for s in sessions if s['location'] == 'lan')
wan_bandwidth = sum(helpers.cast_to_int(s['bandwidth']) for s in sessions if s['location'] != 'lan')
total_bandwidth = lan_bandwidth + wan_bandwidth
# Generate a combined transcode decision value
if session.get('stream_video_decision', '') == 'transcode' or session.get('stream_audio_decision', '') == 'transcode':
transcode_decision = 'Transcode'
@@ -632,6 +636,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
themoviedb_info = lookup_themoviedb_by_id(rating_key=lookup_key,
thetvdb_id=notify_params.get('thetvdb_id'),
imdb_id=notify_params.get('imdb_id'))
themoviedb_info.pop('rating_key', None)
notify_params.update(themoviedb_info)
# Get TVmaze info (for tv shows only)
@@ -647,6 +652,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
tvmaze_info = lookup_tvmaze_by_id(rating_key=lookup_key,
thetvdb_id=notify_params.get('thetvdb_id'),
imdb_id=notify_params.get('imdb_id'))
tvmaze_info.pop('rating_key', None)
notify_params.update(tvmaze_info)
if tvmaze_info.get('thetvdb_id'):
@@ -667,7 +673,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
tracks = notify_params['children_count']
else:
musicbrainz_type = 'recording'
artist = notify_params['original_title']
artist = notify_params['original_title'] or notify_params['grandparent_title']
release = notify_params['parent_title']
recording = notify_params['title']
tracks = notify_params['children_count']
@@ -676,6 +682,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
musicbrainz_info = lookup_musicbrainz_info(musicbrainz_type=musicbrainz_type, rating_key=rating_key,
artist=artist, release=release, recording=recording, tracks=tracks,
tnum=tnum)
musicbrainz_info.pop('rating_key', None)
notify_params.update(musicbrainz_info)
if notify_params['media_type'] in ('movie', 'show', 'artist'):
@@ -813,6 +820,9 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
'direct_plays': transcode_decision_count['direct play'],
'direct_streams': transcode_decision_count['copy'],
'transcodes': transcode_decision_count['transcode'],
'total_bandwidth': total_bandwidth,
'lan_bandwidth': lan_bandwidth,
'wan_bandwidth': wan_bandwidth,
'user_streams': user_stream_count,
'user_direct_plays': user_transcode_decision_count['direct play'],
'user_direct_streams': user_transcode_decision_count['copy'],
@@ -820,6 +830,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
'user': notify_params['friendly_name'],
'username': notify_params['user'],
'user_email': notify_params['email'],
'user_thumb': notify_params['user_thumb'],
'device': notify_params['device'],
'platform': notify_params['platform'],
'product': notify_params['product'],
@@ -832,6 +843,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
'progress_duration': view_offset,
'progress_time': arrow.get(view_offset * 60).format(duration_format),
'progress_percent': helpers.get_percent(view_offset, duration),
'initial_stream': notify_params['initial_stream'],
'transcode_decision': transcode_decision,
'video_decision': notify_params['video_decision'],
'audio_decision': notify_params['audio_decision'],
@@ -1029,6 +1041,7 @@ def build_server_notify_params(notify_action=None, **kwargs):
pms_download_info = defaultdict(str, kwargs.pop('pms_download_info', {}))
plexpy_download_info = defaultdict(str, kwargs.pop('plexpy_download_info', {}))
remote_access_info = defaultdict(str, kwargs.pop('remote_access_info', {}))
now = arrow.now()
now_iso = now.isocalendar()
@@ -1060,6 +1073,14 @@ def build_server_notify_params(notify_action=None, **kwargs):
'timestamp': now.format(time_format),
'unixtime': int(time.time()),
'utctime': helpers.utc_now_iso(),
# Plex remote access parameters
'remote_access_mapping_state': remote_access_info['mapping_state'],
'remote_access_mapping_error': remote_access_info['mapping_error'],
'remote_access_public_address': remote_access_info['public_address'],
'remote_access_public_port': remote_access_info['public_port'],
'remote_access_private_address': remote_access_info['private_address'],
'remote_access_private_port': remote_access_info['private_port'],
'remote_access_reason': remote_access_info['reason'],
# Plex Media Server update parameters
'update_version': pms_download_info['version'],
'update_url': pms_download_info['download_url'],

View File

@@ -66,7 +66,6 @@ import users
BROWSER_NOTIFIERS = {}
AGENT_IDS = {'growl': 0,
'prowl': 1,
'xbmc': 2,
@@ -92,7 +91,8 @@ AGENT_IDS = {'growl': 0,
'groupme': 22,
'mqtt': 23,
'zapier': 24,
'webhook': 25
'webhook': 25,
'plexmobileapp': 26
}
DEFAULT_CUSTOM_CONDITIONS = [{'parameter': '', 'operator': '', 'value': ''}]
@@ -101,103 +101,159 @@ DEFAULT_CUSTOM_CONDITIONS = [{'parameter': '', 'operator': '', 'value': ''}]
def available_notification_agents():
agents = [{'label': 'Tautulli Remote Android App',
'name': 'androidapp',
'id': AGENT_IDS['androidapp']
'id': AGENT_IDS['androidapp'],
'class': ANDROIDAPP,
'action_types': ('all',)
},
{'label': 'Boxcar',
'name': 'boxcar',
'id': AGENT_IDS['boxcar']
'id': AGENT_IDS['boxcar'],
'class': BOXCAR,
'action_types': ('all',)
},
{'label': 'Browser',
'name': 'browser',
'id': AGENT_IDS['browser']
'id': AGENT_IDS['browser'],
'class': BROWSER,
'action_types': ('all',)
},
{'label': 'Discord',
'name': 'discord',
'id': AGENT_IDS['discord'],
'class': DISCORD,
'action_types': ('all',)
},
{'label': 'Email',
'name': 'email',
'id': AGENT_IDS['email']
'id': AGENT_IDS['email'],
'class': EMAIL,
'action_types': ('all',)
},
{'label': 'Facebook',
'name': 'facebook',
'id': AGENT_IDS['facebook']
'id': AGENT_IDS['facebook'],
'class': FACEBOOK,
'action_types': ('all',)
},
{'label': 'GroupMe',
'name': 'groupme',
'id': AGENT_IDS['groupme']
'id': AGENT_IDS['groupme'],
'class': GROUPME,
'action_types': ('all',)
},
{'label': 'Growl',
'name': 'growl',
'id': AGENT_IDS['growl']
'id': AGENT_IDS['growl'],
'class': GROWL,
'action_types': ('all',)
},
{'label': 'Hipchat',
'name': 'hipchat',
'id': AGENT_IDS['hipchat']
'id': AGENT_IDS['hipchat'],
'class': HIPCHAT,
'action_types': ('all',)
},
{'label': 'IFTTT',
'name': 'ifttt',
'id': AGENT_IDS['ifttt']
'id': AGENT_IDS['ifttt'],
'class': IFTTT,
'action_types': ('all',)
},
{'label': 'Join',
'name': 'join',
'id': AGENT_IDS['join']
'id': AGENT_IDS['join'],
'class': JOIN,
'action_types': ('all',)
},
{'label': 'Kodi',
'name': 'xbmc',
'id': AGENT_IDS['xbmc']
'id': AGENT_IDS['xbmc'],
'class': XBMC,
'action_types': ('all',)
},
# {'label': 'Notify My Android',
# 'name': 'nma',
# 'id': AGENT_IDS['nma']
# 'id': AGENT_IDS['nma'],
# 'class': NMA,
# 'action_types': ('all',)
# },
{'label': 'MQTT',
'name': 'mqtt',
'id': AGENT_IDS['mqtt']
'id': AGENT_IDS['mqtt'],
'class': MQTT,
'action_types': ('all',)
},
{'label': 'Plex Home Theater',
'name': 'plex',
'id': AGENT_IDS['plex']
'id': AGENT_IDS['plex'],
'class': PLEX,
'action_types': ('all',)
},
{'label': 'Plex Android / iOS App',
'name': 'plexmobileapp',
'id': AGENT_IDS['plexmobileapp'],
'class': PLEXMOBILEAPP,
'action_types': ('on_play', 'on_created', 'on_newdevice')
},
{'label': 'Prowl',
'name': 'prowl',
'id': AGENT_IDS['prowl']
'id': AGENT_IDS['prowl'],
'class': PROWL,
'action_types': ('all',)
},
# {'label': 'Pushalot',
# 'name': 'pushalot',
# 'id': AGENT_IDS['pushalot']
# 'id': AGENT_IDS['pushalot'],
# 'class': PUSHALOT,
# 'action_types': ('all',)
# },
{'label': 'Pushbullet',
'name': 'pushbullet',
'id': AGENT_IDS['pushbullet']
'id': AGENT_IDS['pushbullet'],
'class': PUSHBULLET,
'action_types': ('all',)
},
{'label': 'Pushover',
'name': 'pushover',
'id': AGENT_IDS['pushover']
'id': AGENT_IDS['pushover'],
'class': PUSHOVER,
'action_types': ('all',)
},
{'label': 'Script',
'name': 'scripts',
'id': AGENT_IDS['scripts']
'id': AGENT_IDS['scripts'],
'class': SCRIPTS,
'action_types': ('all',)
},
{'label': 'Slack',
'name': 'slack',
'id': AGENT_IDS['slack']
'id': AGENT_IDS['slack'],
'class': SLACK,
'action_types': ('all',)
},
{'label': 'Telegram',
'name': 'telegram',
'id': AGENT_IDS['telegram']
'id': AGENT_IDS['telegram'],
'class': TELEGRAM,
'action_types': ('all',)
},
{'label': 'Twitter',
'name': 'twitter',
'id': AGENT_IDS['twitter']
'id': AGENT_IDS['twitter'],
'class': TWITTER,
'action_types': ('all',)
},
{'label': 'Webhook',
'name': 'webhook',
'id': AGENT_IDS['webhook']
'id': AGENT_IDS['webhook'],
'class': WEBHOOK,
'action_types': ('all',)
},
{'label': 'Zapier',
'name': 'zapier',
'id': AGENT_IDS['zapier']
'id': AGENT_IDS['zapier'],
'class': ZAPIER,
'action_types': ('all',)
}
]
@@ -205,13 +261,15 @@ def available_notification_agents():
if OSX().validate():
agents.append({'label': 'macOS Notification Center',
'name': 'osx',
'id': AGENT_IDS['osx']
'id': AGENT_IDS['osx'],
'class': OSX,
'action_types': ('all',)
})
return agents
def available_notification_actions():
def available_notification_actions(agent_id=None):
actions = [{'label': 'Playback Start',
'name': 'on_play',
'description': 'Trigger a notification when a stream is started.',
@@ -312,7 +370,7 @@ def available_notification_actions():
'name': 'on_extdown',
'description': 'Trigger a notification when the Plex Media Server cannot be reached externally.',
'subject': 'Tautulli ({server_name})',
'body': 'The Plex Media Server remote access is down.',
'body': 'The Plex Media Server remote access is down. ({remote_access_reason})',
'icon': 'fa-server',
'media_types': ('server',)
},
@@ -339,81 +397,42 @@ def available_notification_actions():
'body': 'An update is available for Tautulli (version {tautulli_update_version}).',
'icon': 'fa-refresh',
'media_types': ('server',)
},
{'label': 'Tautulli Database Corruption',
'name': 'on_plexpydbcorrupt',
'description': 'Trigger a notification if Tautulli database corruption is detected when backing up the database.',
'subject': 'Tautulli ({server_name})',
'body': 'Tautulli database corruption detected. Automatic cleanup of database backups is suspended.',
'icon': 'fa-database',
'media_types': ('server',)
}
]
if str(agent_id).isdigit():
action_types = get_notify_agents(return_dict=True).get(int(agent_id), {}).get('action_types', [])
if 'all' not in action_types:
actions = [a for a in actions if a['name'] in action_types]
return actions
def get_agent_class(agent_id=None, config=None):
if str(agent_id).isdigit():
agent_id = int(agent_id)
if agent_id == 0:
return GROWL(config=config)
elif agent_id == 1:
return PROWL(config=config)
elif agent_id == 2:
return XBMC(config=config)
elif agent_id == 3:
return PLEX(config=config)
elif agent_id == 4:
return NMA(config=config)
elif agent_id == 5:
return PUSHALOT(config=config)
elif agent_id == 6:
return PUSHBULLET(config=config)
elif agent_id == 7:
return PUSHOVER(config=config)
elif agent_id == 8:
return OSX(config=config)
elif agent_id == 9:
return BOXCAR(config=config)
elif agent_id == 10:
return EMAIL(config=config)
elif agent_id == 11:
return TWITTER(config=config)
elif agent_id == 12:
return IFTTT(config=config)
elif agent_id == 13:
return TELEGRAM(config=config)
elif agent_id == 14:
return SLACK(config=config)
elif agent_id == 15:
return SCRIPTS(config=config)
elif agent_id == 16:
return FACEBOOK(config=config)
elif agent_id == 17:
return BROWSER(config=config)
elif agent_id == 18:
return JOIN(config=config)
elif agent_id == 19:
return HIPCHAT(config=config)
elif agent_id == 20:
return DISCORD(config=config)
elif agent_id == 21:
return ANDROIDAPP(config=config)
elif agent_id == 22:
return GROUPME(config=config)
elif agent_id == 23:
return MQTT(config=config)
elif agent_id == 24:
return ZAPIER(config=config)
elif agent_id == 25:
return WEBHOOK(config=config)
else:
return Notifier(config=config)
agent = get_notify_agents(return_dict=True).get(int(agent_id), {}).get('class', Notifier)
return agent(config=config)
else:
return None
def get_notify_agents():
def get_notify_agents(return_dict=False):
if return_dict:
return {a['id']: a for a in available_notification_agents()}
return tuple(a['name'] for a in sorted(available_notification_agents(), key=lambda k: k['label']))
def get_notify_actions(return_dict=False):
if return_dict:
return {a.pop('name'): a for a in available_notification_actions()}
return {a['name']: a for a in available_notification_actions()}
return tuple(a['name'] for a in available_notification_actions())
@@ -521,7 +540,7 @@ def add_notifier_config(agent_id=None, **kwargs):
% agent_id)
return False
agent = next((a for a in available_notification_agents() if a['id'] == agent_id), None)
agent = get_notify_agents(return_dict=True).get(agent_id, None)
if not agent:
logger.error(u"Tautulli Notifiers :: Unable to retrieve new notification agent: invalid agent_id %s."
@@ -570,7 +589,7 @@ def set_notifier_config(notifier_id=None, agent_id=None, **kwargs):
% agent_id)
return False
agent = next((a for a in available_notification_agents() if a['id'] == agent_id), None)
agent = get_notify_agents(return_dict=True).get(agent_id, None)
if not agent:
logger.error(u"Tautulli Notifiers :: Unable to retrieve existing notification agent: invalid agent_id %s."
@@ -2587,6 +2606,190 @@ class PLEX(Notifier):
return config_option
class PLEXMOBILEAPP(Notifier):
"""
Plex Mobile App Notifications
"""
NAME = 'Plex Android / iOS App'
NOTIFICATION_URL = 'https://notifications.plex.tv/api/v1/notifications'
_DEFAULT_CONFIG = {'user_ids': [],
'tap_action': 'preplay',
}
def __init__(self, config=None):
super(PLEXMOBILEAPP, self).__init__(config=config)
self.configurations = {
'created': {'group': 'media', 'identifier': 'tv.plex.notification.library.new'},
'play': {'group': 'media', 'identifier': 'tv.plex.notification.playback.started'},
'newdevice': {'group': 'admin', 'identifier': 'tv.plex.notification.device.new'}
}
def agent_notify(self, subject='', body='', action='', **kwargs):
if action not in self.configurations and not action.startswith('test'):
logger.error(u"Tautulli Notifiers :: Notification action %s not allowed for %s." % (action, self.NAME))
return
if action == 'test':
tests = []
for configuration in self.configurations:
tests.append(self.agent_notify(subject=subject, body=body, action='test_'+configuration))
return all(tests)
configuration_action = action.split('test_')[-1]
# No subject to always show up regardless of client selected filters
# icon can be info, warning, or error
# play = true to start playing when tapping the notification
# Send the minimal amount of data necessary through Plex servers
data = {
'group': self.configurations[configuration_action]['group'],
'identifier': self.configurations[configuration_action]['identifier'],
'to': self.config['user_ids'],
'data': {
'provider': {
'identifier': plexpy.CONFIG.PMS_IDENTIFIER,
'title': plexpy.CONFIG.PMS_NAME
}
}
}
pretty_metadata = PrettyMetadata(kwargs.get('parameters'))
if action.startswith('test'):
data['data']['player'] = {
'title': 'Device',
'platform': 'Platform',
'machineIdentifier': 'Tautulli'
}
data['data']['user'] = {
'title': 'User',
'id': 0
}
data['metadata'] = {
'type': 'movie',
'title': subject,
'year': body
}
elif action in ('play', 'newdevice'):
data['data']['player'] = {
'title': pretty_metadata.parameters['player'],
'platform': pretty_metadata.parameters['platform'],
'machineIdentifier': pretty_metadata.parameters['machine_id']
}
data['data']['user'] = {
'title': pretty_metadata.parameters['user'],
'id': pretty_metadata.parameters['user_id'],
'thumb': pretty_metadata.parameters['user_thumb'],
}
elif action == 'created':
# No addition data required for recently added
pass
else:
logger.error(u"Tautulli Notifiers :: Notification action %s not supported for %s." % (action, self.NAME))
return
if data['group'] == 'media' and not action.startswith('test'):
media_type = pretty_metadata.media_type
uri_rating_key = None
if media_type == 'movie':
metadata = {
'type': media_type,
'title': pretty_metadata.parameters['title'],
'year': pretty_metadata.parameters['year'],
'thumb': pretty_metadata.parameters['thumb']
}
elif media_type == 'show':
metadata = {
'type': media_type,
'title': pretty_metadata.parameters['show_name'],
'thumb': pretty_metadata.parameters['thumb']
}
elif media_type == 'season':
metadata = {
'type': 'show',
'title': pretty_metadata.parameters['show_name'],
'thumb': pretty_metadata.parameters['thumb'],
}
data['data']['count'] = pretty_metadata.parameters['episode_count']
elif media_type == 'episode':
metadata = {
'type': media_type,
'title': pretty_metadata.parameters['episode_name'],
'grandparentTitle': pretty_metadata.parameters['show_name'],
'index': pretty_metadata.parameters['episode_num'],
'parentIndex': pretty_metadata.parameters['season_num'],
'grandparentThumb': pretty_metadata.parameters['grandparent_thumb']
}
elif media_type == 'artist':
metadata = {
'type': media_type,
'title': pretty_metadata.parameters['artist_name'],
'thumb': pretty_metadata.parameters['thumb']
}
elif media_type == 'album':
metadata = {
'type': media_type,
'title': pretty_metadata.parameters['album_name'],
'year': pretty_metadata.parameters['year'],
'parentTitle': pretty_metadata.parameters['artist_name'],
'thumb': pretty_metadata.parameters['thumb'],
}
elif media_type == 'track':
metadata = {
'type': 'album',
'title': pretty_metadata.parameters['album_name'],
'year': pretty_metadata.parameters['year'],
'parentTitle': pretty_metadata.parameters['artist_name'],
'thumb': pretty_metadata.parameters['parent_thumb']
}
uri_rating_key = pretty_metadata.parameters['parent_rating_key']
else:
logger.error(u"Tautulli Notifiers :: Media type %s not supported for %s." % (media_type, self.NAME))
return
data['metadata'] = metadata
data['uri'] = 'server://{}/com.plexapp.plugins.library/library/metadata/{}'.format(
plexpy.CONFIG.PMS_IDENTIFIER, uri_rating_key or pretty_metadata.parameters['rating_key']
)
data['play'] = self.config['tap_action'] == 'play'
headers = {'X-Plex-Token': plexpy.CONFIG.PMS_TOKEN}
return self.make_request(self.NOTIFICATION_URL, headers=headers, json=data)
def get_users(self):
user_ids = {u['user_id']: u['friendly_name'] for u in users.Users().get_users() if u['user_id']}
user_ids[''] = ''
return user_ids
def _return_config_options(self):
config_option = [{'label': 'Plex User(s)',
'value': self.config['user_ids'],
'name': 'plexmobileapp_user_ids',
'description': 'Select which Plex User(s) to receive notifications.<br>'
'Note: The user(s) must have notifications enabled '
'for the matching Tautulli triggers in their Plex mobile app.',
'input_type': 'select',
'select_options': self.get_users()
},
{'label': 'Notification Tap Action',
'value': self.config['tap_action'],
'name': 'plexmobileapp_tap_action',
'description': 'Set the action when tapping on the notification.',
'input_type': 'select',
'select_options': {'preplay': 'Go to media pre-play screen',
'play': 'Start playing the media'}
},
]
return config_option
class PROWL(Notifier):
"""
Prowl notifications.
@@ -3060,7 +3263,8 @@ class SCRIPTS(Notifier):
'TAUTULLI_URL': helpers.get_plexpy_url(hostname='localhost'),
'TAUTULLI_PUBLIC_URL': plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT,
'TAUTULLI_APIKEY': plexpy.CONFIG.API_KEY,
'TAUTULLI_ENCODING': plexpy.SYS_ENCODING
'TAUTULLI_ENCODING': plexpy.SYS_ENCODING,
'TAUTULLI_PYTHON_VERSION': '.'.join(map(str, plexpy.PYTHON_VERSION))
})
if user_id:

View File

@@ -377,6 +377,14 @@ class PlexTV(object):
return request
def get_plextv_geoip(self, ip_address='', output_format=''):
uri = '/api/v2/geoip?ip_address=%s' % ip_address
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format)
return request
def get_full_users_list(self):
own_account = self.get_plextv_user_details(output_format='xml')
friends_list = self.get_plextv_friends(output_format='xml')
@@ -396,6 +404,7 @@ class PlexTV(object):
"username": helpers.get_xml_attr(a, 'username'),
"thumb": helpers.get_xml_attr(a, 'thumb'),
"email": helpers.get_xml_attr(a, 'email'),
"is_active": 1,
"is_admin": 1,
"is_home_user": helpers.get_xml_attr(a, 'home'),
"is_allow_sync": 1,
@@ -423,6 +432,7 @@ class PlexTV(object):
"username": helpers.get_xml_attr(a, 'title'),
"thumb": helpers.get_xml_attr(a, 'thumb'),
"email": helpers.get_xml_attr(a, 'email'),
"is_active": 1,
"is_admin": 0,
"is_home_user": helpers.get_xml_attr(a, 'home'),
"is_allow_sync": helpers.get_xml_attr(a, 'allowSync'),
@@ -921,3 +931,35 @@ class PlexTV(object):
"user_token": helpers.get_xml_attr(a, 'authToken')
}
return account_details
def get_geoip_lookup(self, ip_address=''):
if not ip_address or not helpers.is_valid_ip(ip_address):
return
geoip_data = self.get_plextv_geoip(ip_address=ip_address, output_format='xml')
try:
xml_head = geoip_data.getElementsByTagName('location')
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse XML for get_geoip_lookup: %s." % e)
return None
for a in xml_head:
coordinates = helpers.get_xml_attr(a, 'coordinates').split(',')
latitude = longitude = None
if len(coordinates) == 2:
latitude, longitude = [helpers.cast_to_float(c) for c in coordinates]
geo_info = {"code": helpers.get_xml_attr(a, 'code') or None,
"country": helpers.get_xml_attr(a, 'country') or None,
"region": helpers.get_xml_attr(a, 'subdivisions') or None,
"city": helpers.get_xml_attr(a, 'city') or None,
"postal_code": helpers.get_xml_attr(a, 'postal_code') or None,
"timezone": helpers.get_xml_attr(a, 'time_zone') or None,
"latitude": latitude,
"longitude": longitude,
"continent": None, # keep for backwards compatibility with GeoLite2
"accuracy": None # keep for backwards compatibility with GeoLite2
}
return geo_info

View File

@@ -2643,7 +2643,8 @@ class PmsConnect(object):
'agent': library['agent'],
'thumb': library['thumb'],
'art': library['art'],
'count': children_list['library_count']
'count': children_list['library_count'],
'is_active': 1
}
if section_type == 'show':
@@ -2962,10 +2963,26 @@ class PmsConnect(object):
for a in xml_head:
server_response = {'mapping_state': helpers.get_xml_attr(a, 'mappingState'),
'mapping_error': helpers.get_xml_attr(a, 'mappingError'),
'sign_in_state': helpers.get_xml_attr(a, 'signInState'),
'public_address': helpers.get_xml_attr(a, 'publicAddress'),
'public_port': helpers.get_xml_attr(a, 'publicPort')
'public_port': helpers.get_xml_attr(a, 'publicPort'),
'private_address': helpers.get_xml_attr(a, 'privateAddress'),
'private_port': helpers.get_xml_attr(a, 'privatePort')
}
if server_response['mapping_state'] == 'unknown':
server_response['reason'] = 'Plex remote access port mapping unknown'
elif server_response['mapping_state'] not in ('mapped', 'waiting'):
server_response['reason'] = 'Plex remote access port not mapped'
elif server_response['mapping_error'] == 'unreachable':
server_response['reason'] = 'Plex remote access port mapped, ' \
'but the port is unreachable from Plex.tv'
elif server_response['mapping_error'] == 'publisherror':
server_response['reason'] = 'Plex remote access port mapped, ' \
'but failed to publish the port to Plex.tv'
else:
server_response['reason'] = ''
return server_response
def get_update_staus(self):

View File

@@ -34,7 +34,11 @@ def refresh_users():
if result:
monitor_db = database.MonitorDatabase()
# Keep track of user_id to update is_active status
user_ids = [0] # Local user always considered active
for item in result:
user_ids.append(helpers.cast_to_int(item['user_id']))
if item.get('shared_libraries'):
item['shared_libraries'] = ';'.join(item['shared_libraries'])
@@ -58,6 +62,9 @@ def refresh_users():
monitor_db.upsert('users', item, keys_dict)
query = 'UPDATE users SET is_active = 0 WHERE user_id NOT IN ({})'.format(', '.join(['?'] * len(user_ids)))
monitor_db.action(query=query, args=user_ids)
logger.info(u"Tautulli Users :: Users list refreshed.")
return True
else:
@@ -92,7 +99,8 @@ class Users(object):
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
columns = ['users.user_id',
columns = ['users.id AS row_id',
'users.user_id',
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
THEN users.username ELSE users.friendly_name END) AS friendly_name',
'users.thumb AS user_thumb',
@@ -102,7 +110,7 @@ class Users(object):
ELSE 0 END) - SUM(CASE WHEN session_history.paused_counter IS NULL THEN 0 ELSE \
session_history.paused_counter END) AS duration',
'MAX(session_history.started) AS last_seen',
'MAX(session_history.id) AS id',
'MAX(session_history.id) AS history_row_id',
'session_history_metadata.full_title AS last_played',
'session_history.ip_address',
'session_history.platform',
@@ -121,9 +129,10 @@ class Users(object):
'session_history_metadata.originally_available_at',
'session_history_metadata.guid',
'session_history_media_info.transcode_decision',
'users.do_notify as do_notify',
'users.keep_history as keep_history',
'users.allow_guest as allow_guest'
'users.do_notify AS do_notify',
'users.keep_history AS keep_history',
'users.allow_guest AS allow_guest',
'users.is_active AS is_active'
]
try:
query = data_tables.ssp_query(table_name='users',
@@ -165,14 +174,15 @@ class Users(object):
# Rename Mystery platform names
platform = common.PLATFORM_NAME_OVERRIDES.get(item['platform'], item['platform'])
row = {'user_id': item['user_id'],
row = {'row_id': item['row_id'],
'user_id': item['user_id'],
'friendly_name': item['friendly_name'],
'user_thumb': user_thumb,
'plays': item['plays'],
'duration': item['duration'],
'last_seen': item['last_seen'],
'last_played': item['last_played'],
'id': item['id'],
'history_row_id': item['history_row_id'],
'ip_address': item['ip_address'],
'platform': platform,
'player': item['player'],
@@ -189,7 +199,8 @@ class Users(object):
'transcode_decision': item['transcode_decision'],
'do_notify': helpers.checked(item['do_notify']),
'keep_history': helpers.checked(item['keep_history']),
'allow_guest': helpers.checked(item['allow_guest'])
'allow_guest': helpers.checked(item['allow_guest']),
'is_active': item['is_active']
}
rows.append(row)
@@ -216,7 +227,7 @@ class Users(object):
custom_where = ['users.user_id', user_id]
columns = ['session_history.id',
columns = ['session_history.id AS history_row_id',
'MAX(session_history.started) AS last_seen',
'session_history.ip_address',
'COUNT(session_history.id) AS play_count',
@@ -276,7 +287,7 @@ class Users(object):
# Rename Mystery platform names
platform = common.PLATFORM_NAME_OVERRIDES.get(item["platform"], item["platform"])
row = {'id': item['id'],
row = {'history_row_id': item['history_row_id'],
'last_seen': item['last_seen'],
'ip_address': item['ip_address'],
'play_count': item['play_count'],
@@ -325,11 +336,13 @@ class Users(object):
logger.warn(u"Tautulli Users :: Unable to execute database query for set_config: %s." % e)
def get_details(self, user_id=None, user=None, email=None):
default_return = {'user_id': 0,
default_return = {'row_id': 0,
'user_id': 0,
'username': 'Local',
'friendly_name': 'Local',
'user_thumb': common.DEFAULT_USER_THUMB,
'email': '',
'is_active': 1,
'is_admin': '',
'is_home_user': 0,
'is_allow_sync': 0,
@@ -349,22 +362,28 @@ class Users(object):
try:
if str(user_id).isdigit():
query = 'SELECT user_id, username, friendly_name, thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \
'email, is_admin, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history, deleted_user, ' \
query = 'SELECT id AS row_id, user_id, username, friendly_name, ' \
'thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \
'email, is_active, is_admin, is_home_user, is_allow_sync, is_restricted, ' \
'do_notify, keep_history, deleted_user, ' \
'allow_guest, shared_libraries ' \
'FROM users ' \
'WHERE user_id = ? '
result = monitor_db.select(query, args=[user_id])
elif user:
query = 'SELECT user_id, username, friendly_name, thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \
'email, is_admin, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history, deleted_user, ' \
query = 'SELECT id AS row_id, user_id, username, friendly_name, ' \
'thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \
'email, is_active, is_admin, is_home_user, is_allow_sync, is_restricted, ' \
'do_notify, keep_history, deleted_user, ' \
'allow_guest, shared_libraries ' \
'FROM users ' \
'WHERE username = ? COLLATE NOCASE '
result = monitor_db.select(query, args=[user])
elif email:
query = 'SELECT user_id, username, friendly_name, thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \
'email, is_admin, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history, deleted_user, ' \
query = 'SELECT id AS row_id, user_id, username, friendly_name, ' \
'thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \
'email, is_active, is_admin, is_home_user, is_allow_sync, is_restricted, ' \
'do_notify, keep_history, deleted_user, ' \
'allow_guest, shared_libraries ' \
'FROM users ' \
'WHERE email = ? COLLATE NOCASE '
@@ -394,11 +413,13 @@ class Users(object):
shared_libraries = tuple(item['shared_libraries'].split(';')) if item['shared_libraries'] else ()
user_details = {'user_id': item['user_id'],
user_details = {'row_id': item['row_id'],
'user_id': item['user_id'],
'username': item['username'],
'friendly_name': friendly_name,
'user_thumb': user_thumb,
'email': item['email'],
'is_active': item['is_active'],
'is_admin': item['is_admin'],
'is_home_user': item['is_home_user'],
'is_allow_sync': item['is_allow_sync'],
@@ -434,21 +455,25 @@ class Users(object):
# Use "Local" user to retain compatibility with PlexWatch database value
return default_return
def get_watch_time_stats(self, user_id=None, grouping=None):
def get_watch_time_stats(self, user_id=None, grouping=None, query_days=None):
if not session.allow_session_user(user_id):
return []
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
if query_days and query_days is not None:
query_days = map(helpers.cast_to_int, query_days.split(','))
else:
query_days = [1, 7, 30, 0]
monitor_db = database.MonitorDatabase()
time_queries = [1, 7, 30, 0]
user_watch_time_stats = []
group_by = 'reference_id' if grouping else 'id'
for days in time_queries:
for days in query_days:
try:
if days > 0:
if str(user_id).isdigit():
@@ -601,8 +626,8 @@ class Users(object):
monitor_db = database.MonitorDatabase()
try:
query = 'SELECT user_id, username, friendly_name, thumb, custom_avatar_url, email, ' \
'is_admin, is_home_user, is_allow_sync, is_restricted, ' \
query = 'SELECT id AS row_id, user_id, username, friendly_name, thumb, custom_avatar_url, email, ' \
'is_active, is_admin, is_home_user, is_allow_sync, is_restricted, ' \
'do_notify, keep_history, allow_guest, server_token, shared_libraries, ' \
'filter_all, filter_movies, filter_tv, filter_music, filter_photos ' \
'FROM users WHERE deleted_user = 0'
@@ -613,11 +638,13 @@ class Users(object):
users = []
for item in result:
user = {'user_id': item['user_id'],
user = {'row_id': item['row_id'],
'user_id': item['user_id'],
'username': item['username'],
'friendly_name': item['friendly_name'] or item['username'],
'thumb': item['custom_avatar_url'] or item['thumb'],
'email': item['email'],
'is_active': item['is_active'],
'is_admin': item['is_admin'],
'is_home_user': item['is_home_user'],
'is_allow_sync': item['is_allow_sync'],
@@ -637,54 +664,41 @@ class Users(object):
return users
def delete_all_history(self, user_id=None):
def delete(self, user_id=None, row_ids=None, purge_only=False):
monitor_db = database.MonitorDatabase()
try:
if str(user_id).isdigit():
logger.info(u"Tautulli Users :: Deleting all history for user id %s from database." % user_id)
session_history_media_info_del = \
monitor_db.action('DELETE FROM '
'session_history_media_info '
'WHERE session_history_media_info.id IN (SELECT session_history_media_info.id '
'FROM session_history_media_info '
'JOIN session_history ON session_history_media_info.id = session_history.id '
'WHERE session_history.user_id = ?)', [user_id])
session_history_metadata_del = \
monitor_db.action('DELETE FROM '
'session_history_metadata '
'WHERE session_history_metadata.id IN (SELECT session_history_metadata.id '
'FROM session_history_metadata '
'JOIN session_history ON session_history_metadata.id = session_history.id '
'WHERE session_history.user_id = ?)', [user_id])
session_history_del = \
monitor_db.action('DELETE FROM '
'session_history '
'WHERE session_history.user_id = ?', [user_id])
if row_ids and row_ids is not None:
row_ids = map(helpers.cast_to_int, row_ids.split(','))
return 'Deleted all items for user_id %s.' % user_id
# Get the user_ids corresponding to the row_ids
result = monitor_db.select('SELECT user_id FROM users '
'WHERE id IN ({})'.format(','.join(['?'] * len(row_ids))), row_ids)
success = []
for user in result:
success.append(self.delete(user_id=user['user_id'],
purge_only=purge_only))
return all(success)
elif str(user_id).isdigit():
delete_success = database.delete_user_history(user_id=user_id)
if purge_only:
return delete_success
else:
return 'Unable to delete items. Input user_id not valid.'
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for delete_all_history: %s." % e)
def delete(self, user_id=None):
monitor_db = database.MonitorDatabase()
logger.info(u"Tautulli Users :: Deleting user with user_id %s from database."
% user_id)
try:
if str(user_id).isdigit():
self.delete_all_history(user_id)
logger.info(u"Tautulli Users :: Deleting user with id %s from database." % user_id)
monitor_db.action('UPDATE users SET deleted_user = 1 WHERE user_id = ?', [user_id])
monitor_db.action('UPDATE users SET keep_history = 0 WHERE user_id = ?', [user_id])
monitor_db.action('UPDATE users SET do_notify = 0 WHERE user_id = ?', [user_id])
return 'Deleted user with id %s.' % user_id
else:
return 'Unable to delete user, user_id not valid.'
monitor_db.action('UPDATE users '
'SET deleted_user = 1, keep_history = 0, do_notify = 0 '
'WHERE user_id = ?', [user_id])
return delete_success
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for delete: %s." % e)
else:
return False
def undelete(self, user_id=None, username=None):
monitor_db = database.MonitorDatabase()
@@ -694,9 +708,9 @@ class Users(object):
result = monitor_db.select(query=query, args=[user_id])
if result:
logger.info(u"Tautulli Users :: Re-adding user with id %s to database." % user_id)
monitor_db.action('UPDATE users SET deleted_user = 0 WHERE user_id = ?', [user_id])
monitor_db.action('UPDATE users SET keep_history = 1 WHERE user_id = ?', [user_id])
monitor_db.action('UPDATE users SET do_notify = 1 WHERE user_id = ?', [user_id])
monitor_db.action('UPDATE users '
'SET deleted_user = 0, keep_history = 1, do_notify = 1 '
'WHERE user_id = ?', [user_id])
return True
else:
return False
@@ -706,9 +720,9 @@ class Users(object):
result = monitor_db.select(query=query, args=[username])
if result:
logger.info(u"Tautulli Users :: Re-adding user with username %s to database." % username)
monitor_db.action('UPDATE users SET deleted_user = 0 WHERE username = ?', [username])
monitor_db.action('UPDATE users SET keep_history = 1 WHERE username = ?', [username])
monitor_db.action('UPDATE users SET do_notify = 1 WHERE username = ?', [username])
monitor_db.action('UPDATE users '
'SET deleted_user = 0, keep_history = 1, do_notify = 1 '
'WHERE username = ?', [username])
return True
else:
return False

View File

@@ -1,2 +1,2 @@
PLEXPY_BRANCH = "master"
PLEXPY_RELEASE_VERSION = "v2.2.1"
PLEXPY_RELEASE_VERSION = "v2.2.3"

View File

@@ -135,8 +135,8 @@ def getVersion():
return current_version, 'origin', current_branch
def check_update(auto_update=False, notify=False):
check_github(auto_update=auto_update, notify=notify)
def check_update(scheduler=False, notify=False):
check_github(scheduler=scheduler, notify=notify)
if not plexpy.CURRENT_VERSION:
plexpy.UPDATE_AVAILABLE = None
@@ -159,7 +159,7 @@ def check_update(auto_update=False, notify=False):
plexpy.WIN_SYS_TRAY_ICON.update(icon=icon, hover_text=hover_text)
def check_github(auto_update=False, notify=False):
def check_github(scheduler=False, notify=False):
plexpy.COMMITS_BEHIND = 0
if plexpy.CONFIG.GIT_TOKEN:
@@ -236,7 +236,7 @@ def check_github(auto_update=False, notify=False):
'plexpy_update_commit': plexpy.LATEST_VERSION,
'plexpy_update_behind': plexpy.COMMITS_BEHIND})
if auto_update and not plexpy.DOCKER:
if scheduler and plexpy.CONFIG.PLEXPY_AUTO_UPDATE and not plexpy.DOCKER:
logger.info('Running automatic update.')
plexpy.shutdown(restart=True, update=True)
@@ -251,7 +251,7 @@ def update():
logger.info('Windows .exe updating not supported yet.')
elif plexpy.INSTALL_TYPE == 'git':
output, err = runGit('pull {} {} --ff-only'.format(plexpy.CONFIG.GIT_REMOTE,
output, err = runGit('pull --ff-only {} {}'.format(plexpy.CONFIG.GIT_REMOTE,
plexpy.CONFIG.GIT_BRANCH))
if not output:

View File

@@ -397,7 +397,8 @@ class WebInterface(object):
"do_notify_created": "Checked",
"duration": 1578037,
"guid": "com.plexapp.agents.thetvdb://121361/6/1?lang=en",
"id": 1128,
"histroy_row_id": 1128,
"is_active": 1,
"keep_history": "Checked",
"labels": [],
"last_accessed": 1462693216,
@@ -413,9 +414,11 @@ class WebInterface(object):
"parent_title": "",
"plays": 772,
"rating_key": 153037,
"row_id": 1,
"section_id": 2,
"section_name": "TV Shows",
"section_type": "Show",
"server_id": "ds48g4r354a8v9byrrtr697g3g79w",
"thumb": "/library/metadata/153036/thumb/1462175062",
"year": 2016
},
@@ -441,6 +444,8 @@ class WebInterface(object):
("duration", True, False)]
kwargs['json_data'] = build_datatables_json(kwargs, dt_columns, "section_name")
grouping = helpers.bool_true(grouping, return_none=True)
library_data = libraries.Libraries()
library_list = library_data.get_datatables_list(kwargs=kwargs, grouping=grouping)
@@ -786,13 +791,16 @@ class WebInterface(object):
"deleted_section": 0,
"do_notify": 1,
"do_notify_created": 1,
"is_active": 1,
"keep_history": 1,
"library_art": "/:/resources/movie-fanart.jpg",
"library_thumb": "/:/resources/movie.png",
"parent_count": null,
"row_id": 1,
"section_id": 1,
"section_name": "Movies",
"section_type": "movie"
"section_type": "movie",
"server_id": "ds48g4r354a8v9byrrtr697g3g79w"
}
```
"""
@@ -810,7 +818,7 @@ class WebInterface(object):
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
@addtoapi()
def get_library_watch_time_stats(self, section_id=None, grouping=None, **kwargs):
def get_library_watch_time_stats(self, section_id=None, grouping=None, query_days=None, **kwargs):
""" Get a library's watch time statistics.
```
@@ -819,6 +827,7 @@ class WebInterface(object):
Optional parameters:
grouping (int): 0 or 1
query_days (str): Comma separated days, e.g. "1,7,30,0"
Returns:
json:
@@ -841,11 +850,12 @@ class WebInterface(object):
]
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
if section_id:
library_data = libraries.Libraries()
result = library_data.get_watch_time_stats(section_id=section_id, grouping=grouping)
result = library_data.get_watch_time_stats(section_id=section_id, grouping=grouping,
query_days=query_days)
if result:
return result
else:
@@ -884,7 +894,7 @@ class WebInterface(object):
]
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
if section_id:
library_data = libraries.Libraries()
@@ -900,7 +910,7 @@ class WebInterface(object):
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
@addtoapi()
def delete_all_library_history(self, section_id, **kwargs):
def delete_all_library_history(self, server_id=None, section_id=None, row_ids=None, **kwargs):
""" Delete all Tautulli history for a specific library.
```
@@ -908,27 +918,28 @@ class WebInterface(object):
section_id (str): The id of the Plex library section
Optional parameters:
None
server_id (str): The Plex server identifier of the library section
row_ids (str): Comma separated row ids to delete, e.g. "2,3,8"
Returns:
None
```
"""
if (server_id and section_id) or row_ids:
library_data = libraries.Libraries()
if section_id:
delete_row = library_data.delete_all_history(section_id=section_id)
if delete_row:
return {'message': delete_row}
success = library_data.delete(server_id=server_id, section_id=section_id, row_ids=row_ids, purge_only=True)
if success:
return {'result': 'success', 'message': 'Deleted library history.'}
else:
return {'message': 'no data received'}
return {'result': 'error', 'message': 'Failed to delete library(s) history.'}
else:
return {'result': 'error', 'message': 'No server id and section id or row ids received.'}
@cherrypy.expose
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
@addtoapi()
def delete_library(self, section_id, **kwargs):
def delete_library(self, server_id=None, section_id=None, row_ids=None, **kwargs):
""" Delete a library section from Tautulli. Also erases all history for the library.
```
@@ -936,21 +947,22 @@ class WebInterface(object):
section_id (str): The id of the Plex library section
Optional parameters:
None
server_id (str): The Plex server identifier of the library section
row_ids (str): Comma separated row ids to delete, e.g. "2,3,8"
Returns:
None
```
"""
if (server_id and section_id) or row_ids:
library_data = libraries.Libraries()
if section_id:
delete_row = library_data.delete(section_id=section_id)
if delete_row:
return {'message': delete_row}
success = library_data.delete(server_id=server_id, section_id=section_id, row_ids=row_ids)
if success:
return {'result': 'success', 'message': 'Deleted library.'}
else:
return {'message': 'no data received'}
return {'result': 'error', 'message': 'Failed to delete library(s).'}
else:
return {'result': 'error', 'message': 'No server id and section id or row ids received.'}
@cherrypy.expose
@cherrypy.tools.json_out()
@@ -1066,8 +1078,9 @@ class WebInterface(object):
"duration": 2998290,
"friendly_name": "Jon Snow",
"guid": "com.plexapp.agents.thetvdb://121361/6/1?lang=en",
"id": 1121,
"history_row_id": 1121,
"ip_address": "xxx.xxx.xxx.xxx",
"is_active": 1,
"keep_history": "Checked",
"last_played": "Game of Thrones - The Red Woman",
"last_seen": 1462591869,
@@ -1081,6 +1094,7 @@ class WebInterface(object):
"player": "Plex Web (Chrome)",
"plays": 487,
"rating_key": 153037,
"row_id": 1,
"thumb": "/library/metadata/153036/thumb/1462175062",
"transcode_decision": "transcode",
"user_id": 133788,
@@ -1108,6 +1122,8 @@ class WebInterface(object):
("duration", True, False)]
kwargs['json_data'] = build_datatables_json(kwargs, dt_columns, "friendly_name")
grouping = helpers.bool_true(grouping, return_none=True)
user_data = users.Users()
user_list = user_data.get_datatables_list(kwargs=kwargs, grouping=grouping)
@@ -1402,10 +1418,13 @@ class WebInterface(object):
"do_notify": 1,
"email": "Jon.Snow.1337@CastleBlack.com",
"friendly_name": "Jon Snow",
"is_active": 1,
"is_admin": 0,
"is_allow_sync": 1,
"is_home_user": 1,
"is_restricted": 0,
"keep_history": 1,
"row_id": 1,
"shared_libraries": ["10", "1", "4", "5", "15", "20", "2"],
"user_id": 133788,
"user_thumb": "https://plex.tv/users/k10w42309cynaopq/avatar",
@@ -1427,7 +1446,7 @@ class WebInterface(object):
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
@addtoapi()
def get_user_watch_time_stats(self, user_id=None, grouping=None, **kwargs):
def get_user_watch_time_stats(self, user_id=None, grouping=None, query_days=None, **kwargs):
""" Get a user's watch time statistics.
```
@@ -1436,6 +1455,7 @@ class WebInterface(object):
Optional parameters:
grouping (int): 0 or 1
query_days (str): Comma separated days, e.g. "1,7,30,0"
Returns:
json:
@@ -1458,11 +1478,11 @@ class WebInterface(object):
]
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
if user_id:
user_data = users.Users()
result = user_data.get_watch_time_stats(user_id=user_id, grouping=grouping)
result = user_data.get_watch_time_stats(user_id=user_id, grouping=grouping, query_days=query_days)
if result:
return result
else:
@@ -1501,7 +1521,7 @@ class WebInterface(object):
]
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
if user_id:
user_data = users.Users()
@@ -1517,7 +1537,7 @@ class WebInterface(object):
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
@addtoapi()
def delete_all_user_history(self, user_id, **kwargs):
def delete_all_user_history(self, user_id=None, row_ids=None, **kwargs):
""" Delete all Tautulli history for a specific user.
```
@@ -1525,25 +1545,27 @@ class WebInterface(object):
user_id (str): The id of the Plex user
Optional parameters:
None
row_ids (str): Comma separated row ids to delete, e.g. "2,3,8"
Returns:
None
```
"""
if user_id:
if user_id or row_ids:
user_data = users.Users()
delete_row = user_data.delete_all_history(user_id=user_id)
if delete_row:
return {'message': delete_row}
success = user_data.delete(user_id=user_id, row_ids=row_ids, purge_only=True)
if success:
return {'result': 'success', 'message': 'Deleted user history.'}
else:
return {'message': 'no data received'}
return {'result': 'error', 'message': 'Failed to delete user(s) history.'}
else:
return {'result': 'error', 'message': 'No user id or row ids received.'}
@cherrypy.expose
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
@addtoapi()
def delete_user(self, user_id, **kwargs):
def delete_user(self, user_id=None, row_ids=None, **kwargs):
""" Delete a user from Tautulli. Also erases all history for the user.
```
@@ -1551,19 +1573,21 @@ class WebInterface(object):
user_id (str): The id of the Plex user
Optional parameters:
None
row_ids (str): Comma separated row ids to delete, e.g. "2,3,8"
Returns:
None
```
"""
if user_id:
if user_id or row_ids:
user_data = users.Users()
delete_row = user_data.delete(user_id=user_id)
if delete_row:
return {'message': delete_row}
success = user_data.delete(user_id=user_id, row_ids=row_ids)
if success:
return {'result': 'success', 'message': 'Deleted user.'}
else:
return {'message': 'no data received'}
return {'result': 'error', 'message': 'Failed to delete user(s).'}
else:
return {'result': 'error', 'message': 'No user id or row ids received.'}
@cherrypy.expose
@cherrypy.tools.json_out()
@@ -1651,7 +1675,6 @@ class WebInterface(object):
"group_count": 1,
"group_ids": "1124",
"guid": "com.plexapp.agents.thetvdb://121361/6/1?lang=en",
"id": 1124,
"ip_address": "xxx.xxx.xxx.xxx",
"live": 0,
"media_index": 17,
@@ -1667,6 +1690,7 @@ class WebInterface(object):
"player": "Castle-PC",
"rating_key": 4348,
"reference_id": 1123,
"row_id": 1124,
"session_key": null,
"started": 1462688107,
"state": null,
@@ -1703,10 +1727,7 @@ class WebInterface(object):
("watched_status", False, False)]
kwargs['json_data'] = build_datatables_json(kwargs, dt_columns, "date")
if grouping and str(grouping).isdigit():
grouping = int(grouping)
elif grouping == 'false':
grouping = 0
grouping = helpers.bool_true(grouping, return_none=True)
custom_where = []
if user_id:
@@ -1828,6 +1849,10 @@ class WebInterface(object):
}
```
"""
# For backwards compatibility
if 'id' in kwargs:
row_id = kwargs['id']
data_factory = datafactory.DataFactory()
stream_data = data_factory.get_stream_details(row_id, session_key)
@@ -1844,16 +1869,32 @@ class WebInterface(object):
@cherrypy.expose
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
def delete_history_rows(self, row_id, **kwargs):
@addtoapi("delete_history")
def delete_history_rows(self, row_ids=None, **kwargs):
""" Delete history rows from Tautulli.
```
Required parameters:
row_ids (str): Comma separated row ids to delete, e.g. "65,110,2,3645"
Optional parameters:
None
Returns:
None
```
"""
data_factory = datafactory.DataFactory()
if row_id:
delete_row = data_factory.delete_session_history_rows(row_id=row_id)
if row_ids:
success = database.delete_session_history_rows(row_ids=row_ids)
if delete_row:
return {'message': delete_row}
if success:
return {'result': 'success', 'message': 'Deleted history.'}
else:
return {'message': 'no data received'}
return {'result': 'error', 'message': 'Failed to delete history.'}
else:
return {'result': 'error', 'message': 'No row ids received.'}
##### Graphs #####
@@ -1922,10 +1963,13 @@ class WebInterface(object):
}
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
graph = graphs.Graphs()
result = graph.get_total_plays_per_day(time_range=time_range, user_id=user_id, y_axis=y_axis, grouping=grouping)
result = graph.get_total_plays_per_day(time_range=time_range,
y_axis=y_axis,
user_id=user_id,
grouping=grouping)
if result:
return result
@@ -1962,10 +2006,13 @@ class WebInterface(object):
}
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
graph = graphs.Graphs()
result = graph.get_total_plays_per_dayofweek(time_range=time_range, user_id=user_id, y_axis=y_axis)
result = graph.get_total_plays_per_dayofweek(time_range=time_range,
y_axis=y_axis,
user_id=user_id,
grouping=grouping)
if result:
return result
@@ -2002,10 +2049,13 @@ class WebInterface(object):
}
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
graph = graphs.Graphs()
result = graph.get_total_plays_per_hourofday(time_range=time_range, user_id=user_id, y_axis=y_axis)
result = graph.get_total_plays_per_hourofday(time_range=time_range,
y_axis=y_axis,
user_id=user_id,
grouping=grouping)
if result:
return result
@@ -2042,10 +2092,13 @@ class WebInterface(object):
}
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
graph = graphs.Graphs()
result = graph.get_total_plays_per_month(time_range=time_range, y_axis=y_axis, user_id=user_id)
result = graph.get_total_plays_per_month(time_range=time_range,
y_axis=y_axis,
user_id=user_id,
grouping=grouping)
if result:
return result
@@ -2056,7 +2109,7 @@ class WebInterface(object):
@cherrypy.tools.json_out()
@requireAuth()
@addtoapi()
def get_plays_by_top_10_platforms(self, time_range='30', y_axis='plays', grouping=None, user_id=None, **kwargs):
def get_plays_by_top_10_platforms(self, time_range='30', y_axis='plays', user_id=None, grouping=None, **kwargs):
""" Get graph data by top 10 platforms.
```
@@ -2082,10 +2135,13 @@ class WebInterface(object):
}
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
graph = graphs.Graphs()
result = graph.get_total_plays_by_top_10_platforms(time_range=time_range, y_axis=y_axis, user_id=user_id)
result = graph.get_total_plays_by_top_10_platforms(time_range=time_range,
y_axis=y_axis,
user_id=user_id,
grouping=grouping)
if result:
return result
@@ -2096,7 +2152,7 @@ class WebInterface(object):
@cherrypy.tools.json_out()
@requireAuth()
@addtoapi()
def get_plays_by_top_10_users(self, time_range='30', y_axis='plays', grouping=None, user_id=None, **kwargs):
def get_plays_by_top_10_users(self, time_range='30', y_axis='plays', user_id=None, grouping=None, **kwargs):
""" Get graph data by top 10 users.
```
@@ -2122,10 +2178,13 @@ class WebInterface(object):
}
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
graph = graphs.Graphs()
result = graph.get_total_plays_by_top_10_users(time_range=time_range, y_axis=y_axis, user_id=user_id)
result = graph.get_total_plays_by_top_10_users(time_range=time_range,
y_axis=y_axis,
user_id=user_id,
grouping=grouping)
if result:
return result
@@ -2136,7 +2195,7 @@ class WebInterface(object):
@cherrypy.tools.json_out()
@requireAuth()
@addtoapi()
def get_plays_by_stream_type(self, time_range='30', y_axis='plays', grouping=None, user_id=None, **kwargs):
def get_plays_by_stream_type(self, time_range='30', y_axis='plays', user_id=None, grouping=None, **kwargs):
""" Get graph data by stream type by date.
```
@@ -2161,10 +2220,13 @@ class WebInterface(object):
}
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
graph = graphs.Graphs()
result = graph.get_total_plays_per_stream_type(time_range=time_range, y_axis=y_axis, user_id=user_id)
result = graph.get_total_plays_per_stream_type(time_range=time_range,
y_axis=y_axis,
user_id=user_id,
grouping=grouping)
if result:
return result
@@ -2175,7 +2237,7 @@ class WebInterface(object):
@cherrypy.tools.json_out()
@requireAuth()
@addtoapi()
def get_plays_by_source_resolution(self, time_range='30', y_axis='plays', grouping=None, user_id=None, **kwargs):
def get_plays_by_source_resolution(self, time_range='30', y_axis='plays', user_id=None, grouping=None, **kwargs):
""" Get graph data by source resolution.
```
@@ -2200,10 +2262,13 @@ class WebInterface(object):
}
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
graph = graphs.Graphs()
result = graph.get_total_plays_by_source_resolution(time_range=time_range, y_axis=y_axis, user_id=user_id)
result = graph.get_total_plays_by_source_resolution(time_range=time_range,
y_axis=y_axis,
user_id=user_id,
grouping=grouping)
if result:
return result
@@ -2214,7 +2279,7 @@ class WebInterface(object):
@cherrypy.tools.json_out()
@requireAuth()
@addtoapi()
def get_plays_by_stream_resolution(self, time_range='30', y_axis='plays', grouping=None, user_id=None, **kwargs):
def get_plays_by_stream_resolution(self, time_range='30', y_axis='plays', user_id=None, grouping=None, **kwargs):
""" Get graph data by stream resolution.
```
@@ -2239,10 +2304,13 @@ class WebInterface(object):
}
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
graph = graphs.Graphs()
result = graph.get_total_plays_by_stream_resolution(time_range=time_range, y_axis=y_axis, user_id=user_id)
result = graph.get_total_plays_by_stream_resolution(time_range=time_range,
y_axis=y_axis,
user_id=user_id,
grouping=grouping)
if result:
return result
@@ -2253,7 +2321,7 @@ class WebInterface(object):
@cherrypy.tools.json_out()
@requireAuth()
@addtoapi()
def get_stream_type_by_top_10_users(self, time_range='30', y_axis='plays', grouping=None, user_id=None, **kwargs):
def get_stream_type_by_top_10_users(self, time_range='30', y_axis='plays', user_id=None, grouping=None, **kwargs):
""" Get graph data by stream type by top 10 users.
```
@@ -2278,10 +2346,13 @@ class WebInterface(object):
}
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
graph = graphs.Graphs()
result = graph.get_stream_type_by_top_10_users(time_range=time_range, y_axis=y_axis, user_id=user_id)
result = graph.get_stream_type_by_top_10_users(time_range=time_range,
y_axis=y_axis,
user_id=user_id,
grouping=grouping)
if result:
return result
@@ -2292,7 +2363,7 @@ class WebInterface(object):
@cherrypy.tools.json_out()
@requireAuth()
@addtoapi()
def get_stream_type_by_top_10_platforms(self, time_range='30', y_axis='plays', grouping=None, user_id=None, **kwargs):
def get_stream_type_by_top_10_platforms(self, time_range='30', y_axis='plays', user_id=None, grouping=None, **kwargs):
""" Get graph data by stream type by top 10 platforms.
```
@@ -2317,10 +2388,13 @@ class WebInterface(object):
}
```
"""
grouping = int(grouping) if str(grouping).isdigit() else grouping
grouping = helpers.bool_true(grouping, return_none=True)
graph = graphs.Graphs()
result = graph.get_stream_type_by_top_10_platforms(time_range=time_range, y_axis=y_axis, user_id=user_id)
result = graph.get_stream_type_by_top_10_platforms(time_range=time_range,
y_axis=y_axis,
user_id=user_id,
grouping=grouping)
if result:
return result
@@ -2823,6 +2897,8 @@ class WebInterface(object):
"grouping_charts": checked(plexpy.CONFIG.GROUPING_CHARTS),
"monitor_pms_updates": checked(plexpy.CONFIG.MONITOR_PMS_UPDATES),
"monitor_remote_access": checked(plexpy.CONFIG.MONITOR_REMOTE_ACCESS),
"remote_access_ping_interval": plexpy.CONFIG.REMOTE_ACCESS_PING_INTERVAL,
"remote_access_ping_threshold": plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD,
"refresh_libraries_interval": plexpy.CONFIG.REFRESH_LIBRARIES_INTERVAL,
"refresh_libraries_on_startup": checked(plexpy.CONFIG.REFRESH_LIBRARIES_ON_STARTUP),
"refresh_users_interval": plexpy.CONFIG.REFRESH_USERS_INTERVAL,
@@ -2836,6 +2912,7 @@ class WebInterface(object):
"notify_recently_added_delay": plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY,
"notify_concurrent_by_ip": checked(plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP),
"notify_concurrent_threshold": plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD,
"notify_continued_session_threshold": plexpy.CONFIG.NOTIFY_CONTINUED_SESSION_THRESHOLD,
"home_sections": json.dumps(plexpy.CONFIG.HOME_SECTIONS),
"home_stats_cards": json.dumps(plexpy.CONFIG.HOME_STATS_CARDS),
"home_library_cards": json.dumps(plexpy.CONFIG.HOME_LIBRARY_CARDS),
@@ -2867,11 +2944,7 @@ class WebInterface(object):
"newsletter_password": plexpy.CONFIG.NEWSLETTER_PASSWORD,
"newsletter_inline_styles": checked(plexpy.CONFIG.NEWSLETTER_INLINE_STYLES),
"newsletter_custom_dir": plexpy.CONFIG.NEWSLETTER_CUSTOM_DIR,
"win_sys_tray": checked(plexpy.CONFIG.WIN_SYS_TRAY),
"maxmind_license_key": plexpy.CONFIG.MAXMIND_LICENSE_KEY,
"geoip_db": plexpy.CONFIG.GEOIP_DB,
"geoip_db_installed": plexpy.CONFIG.GEOIP_DB_INSTALLED,
"geoip_db_update_days": plexpy.CONFIG.GEOIP_DB_UPDATE_DAYS
"win_sys_tray": checked(plexpy.CONFIG.WIN_SYS_TRAY)
}
return serve_template(templatename="settings.html", title="Settings", config=config, kwargs=kwargs)
@@ -2951,6 +3024,7 @@ class WebInterface(object):
kwargs.get('pms_update_check_interval') != str(plexpy.CONFIG.PMS_UPDATE_CHECK_INTERVAL) or \
kwargs.get('monitor_pms_updates') != plexpy.CONFIG.MONITOR_PMS_UPDATES or \
kwargs.get('monitor_remote_access') != plexpy.CONFIG.MONITOR_REMOTE_ACCESS or \
kwargs.get('remote_access_ping_interval') != plexpy.CONFIG.REMOTE_ACCESS_PING_INTERVAL or \
kwargs.get('pms_url_manual') != plexpy.CONFIG.PMS_URL_MANUAL:
reschedule = True
@@ -3103,36 +3177,6 @@ class WebInterface(object):
else:
return {'result': 'error', 'message': 'Database backup failed.'}
@cherrypy.expose
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
@addtoapi()
def install_geoip_db(self, update=False, **kwargs):
""" Downloads and installs the GeoLite2 database """
update = helpers.bool_true(update)
result = helpers.install_geoip_db(update=update)
if result:
return {'result': 'success', 'message': 'GeoLite2 database installed successful.', 'updated': result}
else:
return {'result': 'error', 'message': 'GeoLite2 database install failed.', 'updated': 0}
@cherrypy.expose
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
@addtoapi()
def uninstall_geoip_db(self, **kwargs):
""" Uninstalls the GeoLite2 database """
result = helpers.uninstall_geoip_db()
if result:
return {'result': 'success', 'message': 'GeoLite2 database uninstalled successfully.'}
else:
return {'result': 'error', 'message': 'GeoLite2 database uninstall failed.'}
@cherrypy.expose
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
@@ -3517,12 +3561,12 @@ class WebInterface(object):
@requireAuth(member_of("admin"))
def verify_mobile_device(self, device_token='', cancel=False, **kwargs):
if helpers.bool_true(cancel):
mobile_app.TEMP_DEVICE_TOKEN = None
mobile_app.set_temp_device_token(None)
return {'result': 'error', 'message': 'Device registration cancelled.'}
result = mobile_app.get_mobile_device_by_token(device_token)
if result:
mobile_app.TEMP_DEVICE_TOKEN = None
mobile_app.set_temp_device_token(None)
return {'result': 'success', 'message': 'Device registered successfully.', 'data': result}
else:
return {'result': 'error', 'message': 'Device not registered.'}
@@ -3809,7 +3853,7 @@ class WebInterface(object):
logger._BLACKLIST_WORDS.add(apikey)
if helpers.bool_true(device):
mobile_app.TEMP_DEVICE_TOKEN = apikey
mobile_app.set_temp_device_token(apikey)
return apikey
@@ -5319,6 +5363,7 @@ class WebInterface(object):
[{"art": "/:/resources/show-fanart.jpg",
"child_count": "3745",
"count": "62",
"is_active": 1,
"parent_count": "240",
"section_id": "2",
"section_name": "TV Shows",
@@ -5362,11 +5407,13 @@ class WebInterface(object):
"filter_music": "",
"filter_photos": "",
"filter_tv": "",
"is_active": 1,
"is_admin": 0,
"is_allow_sync": 1,
"is_home_user": 1,
"is_restricted": 0,
"keep_history": 1,
"row_id": 1,
"server_token": "PU9cMuQZxJKFBtGqHk68",
"shared_libraries": "1;2;3",
"thumb": "https://plex.tv/users/k10w42309cynaopq/avatar",
@@ -5457,7 +5504,7 @@ class WebInterface(object):
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
@addtoapi()
def get_home_stats(self, grouping=0, time_range=30, stats_type='plays', stats_count=10, **kwargs):
def get_home_stats(self, time_range=30, stats_type='plays', stats_count=10, grouping=None, **kwargs):
""" Get the homepage watch statistics.
```
@@ -5539,6 +5586,8 @@ class WebInterface(object):
elif stats_type in (1, '1'):
stats_type = 'duration'
grouping = helpers.bool_true(grouping, return_none=True)
data_factory = datafactory.DataFactory()
result = data_factory.get_home_stats(grouping=grouping,
time_range=time_range,
@@ -5645,7 +5694,7 @@ class WebInterface(object):
@requireAuth()
@addtoapi()
def get_geoip_lookup(self, ip_address='', **kwargs):
""" Get the geolocation info for an IP address. The GeoLite2 database must be installed.
""" Get the geolocation info for an IP address.
```
Required parameters:
@@ -5656,7 +5705,7 @@ class WebInterface(object):
Returns:
json:
{"continent": "North America",
{"code": 'US",
"country": "United States",
"region": "California",
"city": "Mountain View",
@@ -5666,15 +5715,22 @@ class WebInterface(object):
"longitude": -122.0838,
"accuracy": 1000
}
json:
{"error": "The address 127.0.0.1 is not in the database."
}
```
"""
geo_info = helpers.geoip_lookup(ip_address)
if isinstance(geo_info, basestring):
return {'error': geo_info}
return geo_info
message = ''
if not ip_address:
message = 'No IP address provided.'
elif not helpers.is_valid_ip(ip_address):
message = 'Invalid IP address provided: %s' % ip_address
if message:
return {'result': 'error', 'message': message}
plex_tv = plextv.PlexTV()
geo_info = plex_tv.get_geoip_lookup(ip_address)
if geo_info:
return {'result': 'success', 'data': geo_info}
return {'result': 'error', 'message': 'Failed to lookup GeoIP info for address: %s' % ip_address}
@cherrypy.expose
@cherrypy.tools.json_out()