Compare commits
122 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
26ac539bc4 | ||
![]() |
75ae6b16a4 | ||
![]() |
2835b1d28f | ||
![]() |
748aad16d7 | ||
![]() |
2c2fbb8583 | ||
![]() |
20edcbf7fa | ||
![]() |
db81dc39ba | ||
![]() |
c3b0aef1ef | ||
![]() |
50e29efdfe | ||
![]() |
285e41bc88 | ||
![]() |
ea9d0fc449 | ||
![]() |
9cdd2eef81 | ||
![]() |
2f8833236a | ||
![]() |
2b680eeb6d | ||
![]() |
809f120db0 | ||
![]() |
6d9ef8bbc3 | ||
![]() |
a26d6ec6bb | ||
![]() |
2d26ced3fc | ||
![]() |
d74cd4bf24 | ||
![]() |
f040d897a7 | ||
![]() |
ed2f87f57b | ||
![]() |
9b9e31f54c | ||
![]() |
b3cfcf660e | ||
![]() |
e5bcd1f94e | ||
![]() |
2b6fa769f7 | ||
![]() |
3ccc82f343 | ||
![]() |
f4273cafb6 | ||
![]() |
59d63f61d9 | ||
![]() |
9d9103a83b | ||
![]() |
0b085b6d03 | ||
![]() |
f77538f179 | ||
![]() |
f7810f7f95 | ||
![]() |
4d28e4603f | ||
![]() |
8b787e4ae0 | ||
![]() |
f5ba168172 | ||
![]() |
1df6dadbdd | ||
![]() |
3dc29144a3 | ||
![]() |
951167ce17 | ||
![]() |
906e4055d8 | ||
![]() |
2f5526388a | ||
![]() |
82341642f4 | ||
![]() |
c96b1eb09d | ||
![]() |
f5bfa67c69 | ||
![]() |
47797ffcd4 | ||
![]() |
a73053e380 | ||
![]() |
bc042fead7 | ||
![]() |
ed6779e937 | ||
![]() |
ee7ca68f87 | ||
![]() |
32693b6378 | ||
![]() |
984e5588c8 | ||
![]() |
a42a1af867 | ||
![]() |
03de680915 | ||
![]() |
8c6e142314 | ||
![]() |
b12bde4f79 | ||
![]() |
1120aa3841 | ||
![]() |
652ca73126 | ||
![]() |
8706e72f6a | ||
![]() |
319d521773 | ||
![]() |
d9474cdcc5 | ||
![]() |
e49a34177a | ||
![]() |
67d203e011 | ||
![]() |
0d38b3de16 | ||
![]() |
38116a14f3 | ||
![]() |
b28f0b65f0 | ||
![]() |
13ab4a9363 | ||
![]() |
7cb7783a34 | ||
![]() |
d1a13dad38 | ||
![]() |
b4e06dea99 | ||
![]() |
0f92dc0fdf | ||
![]() |
6a58895d37 | ||
![]() |
1709a2b7df | ||
![]() |
febb3da0c1 | ||
![]() |
552a428985 | ||
![]() |
38e04bd42a | ||
![]() |
8f0ba5ba4f | ||
![]() |
c67aedceb1 | ||
![]() |
b3a7fbd9b5 | ||
![]() |
29522428de | ||
![]() |
77bd52b2ae | ||
![]() |
d8112e7628 | ||
![]() |
ffa208e73f | ||
![]() |
61ead15c38 | ||
![]() |
407e2ae481 | ||
![]() |
5fb16edf43 | ||
![]() |
8eb5c475bb | ||
![]() |
84090310f7 | ||
![]() |
fc98e2f052 | ||
![]() |
bedcfa9520 | ||
![]() |
bb152b590b | ||
![]() |
3623732cf7 | ||
![]() |
05ba89f164 | ||
![]() |
cb5053476d | ||
![]() |
cee656a053 | ||
![]() |
cfc7d529e1 | ||
![]() |
a93dc68e6c | ||
![]() |
2d91cfd3db | ||
![]() |
36e81f44cb | ||
![]() |
cb0e65337f | ||
![]() |
1c627f4649 | ||
![]() |
16cbfed20b | ||
![]() |
f6a3bc57e2 | ||
![]() |
594443d1dc | ||
![]() |
c3378e1653 | ||
![]() |
bc57dd650c | ||
![]() |
311a8c6fa3 | ||
![]() |
bdb43c0e9e | ||
![]() |
8033b47596 | ||
![]() |
9d5052cc68 | ||
![]() |
f4c9dc8a5f | ||
![]() |
a3f0a78df0 | ||
![]() |
b70363e005 | ||
![]() |
65eab801e8 | ||
![]() |
9e764248d3 | ||
![]() |
a660a1c44b | ||
![]() |
33458c1bdb | ||
![]() |
e5530182cd | ||
![]() |
9ecabc3faf | ||
![]() |
8b58f6b861 | ||
![]() |
9e41bf529d | ||
![]() |
36398fe958 | ||
![]() |
69cfbea5f3 | ||
![]() |
1e1e3beca6 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -15,6 +15,7 @@
|
||||
version.lock
|
||||
logs/*
|
||||
cache/*
|
||||
*.mmdb
|
||||
|
||||
# HTTPS Cert/Key #
|
||||
##################
|
||||
|
256
API.md
256
API.md
@@ -169,6 +169,10 @@ Return the api docs formatted with markdown.
|
||||
Download the PlexPy log file.
|
||||
|
||||
|
||||
### download_plex_log
|
||||
Download the Plex log file.
|
||||
|
||||
|
||||
### edit_library
|
||||
Update a library section on PlexPy.
|
||||
|
||||
@@ -318,6 +322,34 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_geoip_lookup
|
||||
Get the geolocation info for an IP address. The GeoLite2 database must be installed.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
ip_address
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"continent": "North America",
|
||||
"country": "United States",
|
||||
"region": "California",
|
||||
"city": "Mountain View",
|
||||
"postal_code": "94035",
|
||||
"timezone": "America/Los_Angeles",
|
||||
"latitude": 37.386,
|
||||
"longitude": -122.0838,
|
||||
"accuracy": 1000
|
||||
}
|
||||
json:
|
||||
{"error": "The address 127.0.0.1 is not in the database."
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### get_history
|
||||
Get the PlexPy history.
|
||||
|
||||
@@ -543,6 +575,33 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_library
|
||||
Get a library's details.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"child_count": null,
|
||||
"count": 887,
|
||||
"do_notify": 1,
|
||||
"do_notify_created": 1,
|
||||
"keep_history": 1,
|
||||
"library_art": "/:/resources/movie-fanart.jpg",
|
||||
"library_thumb": "/:/resources/movie.png",
|
||||
"parent_count": null,
|
||||
"section_id": 1,
|
||||
"section_name": "Movies",
|
||||
"section_type": "movie"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### get_library_media_info
|
||||
Get the data on the PlexPy media info tables.
|
||||
|
||||
@@ -619,6 +678,66 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_library_user_stats
|
||||
Get a library's user statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"friendly_name": "Jon Snow",
|
||||
"total_plays": 170,
|
||||
"user_id": 133788,
|
||||
"user_thumb": "https://plex.tv/users/k10w42309cynaopq/avatar"
|
||||
},
|
||||
{"platform_type": "DanyKhaleesi69",
|
||||
"total_plays": 42,
|
||||
"user_id": 8008135,
|
||||
"user_thumb": "https://plex.tv/users/568gwwoib5t98a3a/avatar"
|
||||
},
|
||||
{...},
|
||||
{...}
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
### get_library_watch_time_stats
|
||||
Get a library's watch time statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"query_days": 1,
|
||||
"total_plays": 0,
|
||||
"total_time": 0
|
||||
},
|
||||
{"query_days": 7,
|
||||
"total_plays": 3,
|
||||
"total_time": 15694
|
||||
},
|
||||
{"query_days": 30,
|
||||
"total_plays": 35,
|
||||
"total_time": 63054
|
||||
},
|
||||
{"query_days": 0,
|
||||
"total_plays": 508,
|
||||
"total_time": 1183080
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
### get_logs
|
||||
Get the PlexPy logs.
|
||||
|
||||
@@ -1061,6 +1180,7 @@ Required parameters:
|
||||
count (str): Number of items to return
|
||||
|
||||
Optional parameters:
|
||||
start (str): The item number to start at
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Returns:
|
||||
@@ -1310,6 +1430,35 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_user
|
||||
Get a user's details.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
user_id (str): The id of the Plex user
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"allow_guest": 1,
|
||||
"deleted_user": 0,
|
||||
"do_notify": 1,
|
||||
"email": "Jon.Snow.1337@CastleBlack.com",
|
||||
"friendly_name": "Jon Snow",
|
||||
"is_allow_sync": 1,
|
||||
"is_home_user": 1,
|
||||
"is_restricted": 0,
|
||||
"keep_history": 1,
|
||||
"shared_libraries": ["10", "1", "4", "5", "15", "20", "2"],
|
||||
"user_id": 133788,
|
||||
"user_thumb": "https://plex.tv/users/k10w42309cynaopq/avatar",
|
||||
"username": "LordCommanderSnow"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### get_user_ips
|
||||
Get the data on PlexPy users IP table.
|
||||
|
||||
@@ -1357,7 +1506,7 @@ Returns:
|
||||
|
||||
|
||||
### get_user_logins
|
||||
Get the data on PlexPy user login table.
|
||||
Get the data on PlexPy user login table.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
@@ -1376,15 +1525,15 @@ Returns:
|
||||
"recordsTotal": 2344,
|
||||
"recordsFiltered": 10,
|
||||
"data":
|
||||
[{"browser": "Safari 7.0.3",
|
||||
"friendly_name": "Jon Snow",
|
||||
"host": "http://plexpy.castleblack.com",
|
||||
"ip_address": "xxx.xxx.xxx.xxx",
|
||||
"os": "Mac OS X",
|
||||
"timestamp": 1462591869,
|
||||
"user": "LordCommanderSnow",
|
||||
"user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A",
|
||||
"user_group": "guest",
|
||||
[{"browser": "Safari 7.0.3",
|
||||
"friendly_name": "Jon Snow",
|
||||
"host": "http://plexpy.castleblack.com",
|
||||
"ip_address": "xxx.xxx.xxx.xxx",
|
||||
"os": "Mac OS X",
|
||||
"timestamp": 1462591869,
|
||||
"user": "LordCommanderSnow",
|
||||
"user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A",
|
||||
"user_group": "guest",
|
||||
"user_id": 133788
|
||||
},
|
||||
{...},
|
||||
@@ -1414,6 +1563,66 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_user_player_stats
|
||||
Get a user's player statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
user_id (str): The id of the Plex user
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"platform_type": "Chrome",
|
||||
"player_name": "Plex Web (Chrome)",
|
||||
"result_id": 1,
|
||||
"total_plays": 170
|
||||
},
|
||||
{"platform_type": "Chromecast",
|
||||
"player_name": "Chromecast",
|
||||
"result_id": 2,
|
||||
"total_plays": 42
|
||||
},
|
||||
{...},
|
||||
{...}
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
### get_user_watch_time_stats
|
||||
Get a user's watch time statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
user_id (str): The id of the Plex user
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"query_days": 1,
|
||||
"total_plays": 0,
|
||||
"total_time": 0
|
||||
},
|
||||
{"query_days": 7,
|
||||
"total_plays": 3,
|
||||
"total_time": 15694
|
||||
},
|
||||
{"query_days": 30,
|
||||
"total_plays": 35,
|
||||
"total_time": 63054
|
||||
},
|
||||
{"query_days": 0,
|
||||
"total_plays": 508,
|
||||
"total_time": 1183080
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
### get_users
|
||||
Get a list of all users that have access to your server.
|
||||
|
||||
@@ -1513,12 +1722,35 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### install_geoip_db
|
||||
Downloads and installs the GeoLite2 database
|
||||
|
||||
|
||||
### notify
|
||||
Send a notification using PlexPy.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
agent_id(str): The id of the notification agent to use
|
||||
9 # Boxcar2
|
||||
17 # Browser
|
||||
10 # Email
|
||||
16 # Facebook
|
||||
0 # Growl
|
||||
19 # Hipchat
|
||||
12 # IFTTT
|
||||
18 # Join
|
||||
4 # NotifyMyAndroid
|
||||
3 # Plex Home Theater
|
||||
1 # Prowl
|
||||
5 # Pushalot
|
||||
6 # Pushbullet
|
||||
7 # Pushover
|
||||
15 # Scripts
|
||||
14 # Slack
|
||||
13 # Telegram
|
||||
11 # Twitter
|
||||
2 # XBMC
|
||||
subject(str): The subject of the message
|
||||
body(str): The body of the message
|
||||
|
||||
@@ -1617,6 +1849,10 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### uninstall_geoip_db
|
||||
Uninstalls the GeoLite2 database
|
||||
|
||||
|
||||
### update
|
||||
Check for PlexPy updates on Github.
|
||||
|
||||
|
86
CHANGELOG.md
86
CHANGELOG.md
@@ -1,5 +1,91 @@
|
||||
# Changelog
|
||||
|
||||
## v1.4.7 (2016-07-14)
|
||||
|
||||
* New: Use MaxMind GeoLite2 for IP address lookup.
|
||||
* Note: The GeoLite2 database must be installed from the settings page.
|
||||
* New: Check for Plex updates using plex.tv downloads instead of the server API.
|
||||
* Note: Check for Plex updates has been disabled and must be re-enabled in the settings.
|
||||
* New: More notification options for Plex updates.
|
||||
* New: Notifications for concurrent streams by a single user.
|
||||
* New: Notifications for user streaming from a new device.
|
||||
* New: HipChat notification agent. (Thanks @aboron)
|
||||
* Fix: Username showing as blank when friendly name is blank.
|
||||
* Fix: Direct stream count wrong in the current activity header.
|
||||
* Fix: Current activity reporting direct stream when reducing the stream quality switches to transcoding.
|
||||
* Fix: Apostophe in an Arnold quote causing the shutdown/restart page to crash.
|
||||
* Fix: Disable refreshing posters in guest mode.
|
||||
* Fix: PlexWatch/Plexivity import unable to select the "grouped" database table.
|
||||
* Change: Updated Facebook notification instructions.
|
||||
* Change: Subject line optional for Join notifications.
|
||||
* Change: Line break between subject and body text instead of a colon for Facebook, Slack, Twitter, and Telegram.
|
||||
* Change: Allow Mattermost notifications using the Slack config.
|
||||
* Change: Better formatting for Slack poster notifications.
|
||||
* Change: Telegram only notifies once instead of twice when posters are enabled.
|
||||
* Change: Host Open Sans font locally instead of querying Google Fonts.
|
||||
|
||||
|
||||
## v1.4.6 (2016-06-11)
|
||||
|
||||
* New: Added User and Library statistics to the API.
|
||||
* New: Ability to refresh individual poster images without clearing the entire cache. (Thanks @Hellowlol)
|
||||
* New: Added {added_date}, {updated_date}, and {last_viewed_date} to metadata notification options.
|
||||
* New: Log level filter for Plex logs. (Thanks @sanderploegsma)
|
||||
* New: Log level filter for PlexPy logs.
|
||||
* New: Button to download Plex logs directly from the web interface.
|
||||
* New: Advanced setting in the config file to change the number of Plex log lines retrieved.
|
||||
* Fix: FreeBSD and FreeNAS init scripts to reflect the path in the installation guide. (Thanks @nortron)
|
||||
* Fix: Monitoring crashing when failed to retrieve current activity.
|
||||
|
||||
|
||||
## v1.4.5 (2016-05-25)
|
||||
|
||||
* Fix: PlexPy unable to start if failed to get shared libraries for a user.
|
||||
* Fix: Matching port number when retrieving the PMS url.
|
||||
* Fix: Extract mapped IPv4 address in Plexivity import.
|
||||
* Change: Revert back to internal url when retrieving PMS images.
|
||||
|
||||
|
||||
## v1.4.4 (2016-05-24)
|
||||
|
||||
* Fix: Image queries crashing the PMS when playing clips from channels.
|
||||
* Fix: Plexivity import if IP address is missing.
|
||||
* Fix: Tooltips shown behind the datatable headers.
|
||||
* Fix: Current activity instances rendered in a random order causing them to jump around.
|
||||
|
||||
|
||||
## v1.4.3 (2016-05-22)
|
||||
|
||||
* Fix: PlexPy not starting without any authentication method.
|
||||
|
||||
|
||||
## v1.4.2 (2016-05-22)
|
||||
|
||||
* New: Option to use HTTP basic authentication instead of the HTML login form.
|
||||
* Fix: Unable to save settings when enabling the HTTP proxy setting.
|
||||
* Change: Match the PMS port when retrieving the PMS url.
|
||||
|
||||
|
||||
## v1.4.1 (2016-05-20)
|
||||
|
||||
* New: HTTP Proxy checkbox in the settings. Enable this if using an SSL enabled reverse proxy in front of PlexPy.
|
||||
* Fix: Check for blank username/password on login.
|
||||
* Fix: Persist current activity artwork blur across refreshes when transcoding details are visible.
|
||||
* Fix: Send notifications to multiple XBMC/Plex Home Theater devices.
|
||||
* Fix: Reset PMS identifier when clicking verify server button in settings.
|
||||
* Fix: Crash when trying to group current activity session in database.
|
||||
* Fix: Check current activity returns sessions when refreshing.
|
||||
* Fix: Logs sorted out of order.
|
||||
* Fix: Resolution reported incorrectly in the stream info modal.
|
||||
* Fix: PlexPy crashing when hashing password in the config file.
|
||||
* Fix: CherryPy doubling the port number when accessing PlexPy locally with http_proxy enabled.
|
||||
* Change: Sort by most recent for ties in watch statistics.
|
||||
* Change: Refresh Join devices when changing the API key.
|
||||
* Change: Format the Join device IDs.
|
||||
* Change: Join notifications now sent with Python Requests module.
|
||||
* Change: Add paging for recently added in the API.
|
||||
|
||||
|
||||
## v1.4.0 (2016-05-15)
|
||||
|
||||
* New: An HTML form login page with sessions support.
|
||||
|
@@ -1,10 +1,10 @@
|
||||
<!---
|
||||
Reporting Issues:
|
||||
* To ensure that a develpoer has enough information to work with please include all of the information below.
|
||||
* To ensure that a developer has enough information to work with please include all of the information below.
|
||||
Please provide as much detail as possible. Screenshots can be very useful to see the problem.
|
||||
* Use proper markdown syntax to structure your post (i.e. code/log in code blocks).
|
||||
See: https://help.github.com/articles/basic-writing-and-formatting-syntax/
|
||||
* Iclude a link to your **FULL** log file that has the error(not just a few lines!).
|
||||
* Include a link to your **FULL** log file that has the error(not just a few lines!).
|
||||
Please use [Gist](http://gist.github.com) or [Pastebin](http://pastebin.com/).
|
||||
|
||||
Feature Requests:
|
||||
|
@@ -214,6 +214,7 @@ def main():
|
||||
'https_key': plexpy.CONFIG.HTTPS_KEY,
|
||||
'http_username': plexpy.CONFIG.HTTP_USERNAME,
|
||||
'http_password': plexpy.CONFIG.HTTP_PASSWORD,
|
||||
'http_basic_auth': plexpy.CONFIG.HTTP_BASIC_AUTH
|
||||
}
|
||||
webstart.initialize(web_config)
|
||||
|
||||
|
@@ -30,7 +30,7 @@
|
||||
<div class="col-xs-4">
|
||||
<select id="table_name" class="form-control" name="table_name">
|
||||
<option value="processed">processed</option>
|
||||
<option value="processed">grouped</option>
|
||||
<option value="grouped">grouped</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -15,7 +15,7 @@
|
||||
<link href="${http_root}css/bootstrap3/bootstrap.css" rel="stylesheet">
|
||||
<link href="${http_root}css/pnotify.custom.min.css" rel="stylesheet" />
|
||||
<link href="${http_root}css/plexpy.css" rel="stylesheet">
|
||||
<link href="https://fonts.googleapis.com/css?family=Open+Sans:400,600" rel="stylesheet" type="text/css">
|
||||
<link href="${http_root}css/opensans.min.css" rel="stylesheet">
|
||||
<link href="${http_root}css/font-awesome.min.css" rel="stylesheet">
|
||||
${next.headIncludes()}
|
||||
|
||||
@@ -170,7 +170,7 @@
|
||||
<form action="search" method="post" class="form" id="search_form">
|
||||
<div class="input-group">
|
||||
<span class="input-textbox">
|
||||
<input type="text" class="form-control" name="query" id="query" aria-label="Search" placeholder="Search..."/>
|
||||
<input type="text" class="form-control" name="query" id="query" aria-label="Search" placeholder="Search Plex library..."/>
|
||||
</span>
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-dark btn-inactive" type="submit" id="search_button"><i class="fa fa-search"></i></button>
|
||||
|
129
data/interfaces/default/configuration_table.html
Normal file
129
data/interfaces/default/configuration_table.html
Normal file
@@ -0,0 +1,129 @@
|
||||
<%doc>
|
||||
USAGE DOCUMENTATION :: PLEASE LEAVE THIS AT THE TOP OF THIS FILE
|
||||
|
||||
For Mako templating syntax documentation please visit: http://docs.makotemplates.org/en/latest/
|
||||
|
||||
Filename: configuration_table.html
|
||||
Version: 0.1
|
||||
|
||||
DOCUMENTATION :: END
|
||||
</%doc>
|
||||
|
||||
<%!
|
||||
import os
|
||||
import sys
|
||||
import plexpy
|
||||
from plexpy import common, logger
|
||||
from plexpy.helpers import anon_url
|
||||
%>
|
||||
|
||||
<table class="config-info-table small-muted">
|
||||
<tbody>
|
||||
% if plexpy.CURRENT_VERSION:
|
||||
<tr>
|
||||
<td>Git Branch:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/tree/%s' % plexpy.CONFIG.GIT_BRANCH)}">${plexpy.CONFIG.GIT_BRANCH}</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Git Commit Hash:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/commit/%s' % plexpy.CURRENT_VERSION)}">${plexpy.CURRENT_VERSION}</a></td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Configuration File:</td>
|
||||
<td>${plexpy.CONFIG_FILE}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Database File:</td>
|
||||
<td>${plexpy.DB_FILE}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Log File:</td>
|
||||
<td><a class="no-highlight" href="logFile" target="_blank">${os.path.join(plexpy.CONFIG.LOG_DIR, logger.FILENAME)}</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Backup Directory:</td>
|
||||
<td>${plexpy.CONFIG.BACKUP_DIR}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Cache Directory:</td>
|
||||
<td>${plexpy.CONFIG.CACHE_DIR}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>GeoLite2 Database:</td>
|
||||
% if plexpy.CONFIG.GEOIP_DB:
|
||||
<td>${plexpy.CONFIG.GEOIP_DB} | <a class="no-highlight" href="#" id="reinstall_geoip_db">Reinstall / Update</a> | <a class="no-highlight" href="#" id="uninstall_geoip_db">Uninstall</a></td>
|
||||
% else:
|
||||
<td><a class="no-highlight" href="#" id="install_geoip_db">Click here to install the GeoLite2 database.</a></td>
|
||||
% endif
|
||||
</tr>
|
||||
% if plexpy.ARGS:
|
||||
<tr>
|
||||
<td>Arguments:</td>
|
||||
<td>${plexpy.ARGS}</td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Platform:</td>
|
||||
<td>${common.PLATFORM} ${common.PLATFORM_VERSION}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Python Version:</td>
|
||||
<td>${sys.version}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="top-line">Plex Forums:</td>
|
||||
<td class="top-line"><a class="no-highlight" href="${anon_url('https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program')}" target="_blank">https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Source:</td>
|
||||
<td><a id="source-link" class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy')}" target="_blank">https://github.com/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Wiki:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/wiki')}" target="_blank">https://github.com/drzoidberg33/plexpy/wiki</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Issues:</td>
|
||||
<td><a class="no-highlight guidelines-modal-link" href="${anon_url('https://github.com/drzoidberg33/plexpy/issues')}" data-id="issue">https://github.com/drzoidberg33/plexpy/issues</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Feature Requests:</td>
|
||||
<td><a class="no-highlight guidelines-modal-link" href="${anon_url('http://feathub.com/drzoidberg33/plexpy')}" data-id="feature request">http://feathub.com/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Gitter Chat:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://gitter.im/drzoidberg33/plexpy')}" target="_blank">https://gitter.im/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$("#install_geoip_db, #reinstall_geoip_db").click(function () {
|
||||
var msg = 'Are you sure you want to install the GeoLite2 database?<br /><br />' +
|
||||
'The database is used to lookup IP address geolocation info.<br />' +
|
||||
'The database will be downloaded from <a href="${anon_url("https://dev.maxmind.com/geoip/geoip2/geolite2/")}" target="_blank">MaxMind</a>, <br />' +
|
||||
'and requires <strong>100MB</strong> of free space to install in your PlexPy directory.<br />'
|
||||
var url = 'install_geoip_db';
|
||||
confirmAjaxCall(url, msg, 'Installing GeoLite2 database.', getConfigurationTable);
|
||||
});
|
||||
|
||||
$("#uninstall_geoip_db").click(function () {
|
||||
var msg = 'Are you sure you want to uninstall the GeoLite2 database?<br /><br />' +
|
||||
'You will not be able to lookup IP address geolocation info.';
|
||||
var url = 'uninstall_geoip_db';
|
||||
confirmAjaxCall(url, msg, 'Uninstalling GeoLite2 database.', getConfigurationTable);
|
||||
});
|
||||
|
||||
$('.guidelines-modal-link').on('click', function (e) {
|
||||
e.preventDefault();
|
||||
$('#guidelines-link').attr('href', $('#source-link').attr('href'));
|
||||
$('#guidelines-type').text($(this).data('id'))
|
||||
$('#guidelines-modal').modal();
|
||||
$('#guidelines-continue').attr('href', $(this).attr('href')).on('click', function () {
|
||||
$('#guidelines-modal').modal('hide');
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
1
data/interfaces/default/css/opensans.min.css
vendored
Normal file
1
data/interfaces/default/css/opensans.min.css
vendored
Normal file
@@ -0,0 +1 @@
|
||||
@font-face{font-family:'Open Sans';font-weight:400;font-style:normal;src:url(../fonts/Open-Sans-regular/Open-Sans-regular.eot);src:url(../fonts/Open-Sans-regular/Open-Sans-regular.eot?#iefix) format('embedded-opentype'),local('Open Sans'),local('Open-Sans-regular'),url(../fonts/Open-Sans-regular/Open-Sans-regular.woff2) format('woff2'),url(../fonts/Open-Sans-regular/Open-Sans-regular.woff) format('woff'),url(../fonts/Open-Sans-regular/Open-Sans-regular.ttf) format('truetype'),url(../fonts/Open-Sans-regular/Open-Sans-regular.svg#OpenSans) format('svg')}@font-face{font-family:'Open Sans';font-weight:600;font-style:normal;src:url(../fonts/Open-Sans-600/Open-Sans-600.eot);src:url(../fonts/Open-Sans-600/Open-Sans-600.eot?#iefix) format('embedded-opentype'),local('Open Sans Semibold'),local('Open-Sans-600'),url(../fonts/Open-Sans-600/Open-Sans-600.woff2) format('woff2'),url(../fonts/Open-Sans-600/Open-Sans-600.woff) format('woff'),url(../fonts/Open-Sans-600/Open-Sans-600.ttf) format('truetype'),url(../fonts/Open-Sans-600/Open-Sans-600.svg#OpenSans) format('svg')}
|
@@ -1184,6 +1184,7 @@ a:hover .dashboard-recent-media-cover {
|
||||
margin: 0 40px 0 25px;
|
||||
height: 100px;
|
||||
overflow: visible;
|
||||
position: relative;
|
||||
}
|
||||
.summary-poster-face {
|
||||
background-position: center;
|
||||
@@ -1922,6 +1923,7 @@ a .library-user-instance-box:hover {
|
||||
.home-platforms-instance-poster {
|
||||
margin-left: 0px;
|
||||
position: absolute;
|
||||
overflow: hidden;
|
||||
}
|
||||
.home-platforms-instance-poster .home-platforms-poster-face {
|
||||
background-position: center;
|
||||
@@ -2079,6 +2081,7 @@ a .library-user-instance-box:hover {
|
||||
.home-platforms-instance-list-poster {
|
||||
position: absolute;
|
||||
left: 20px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.home-platforms-instance-list-poster .home-platforms-list-poster-face {
|
||||
background-position: center;
|
||||
@@ -2954,4 +2957,56 @@ a.no-highlight:hover {
|
||||
.datatable-wrap {
|
||||
min-width: 150px;
|
||||
max-width: 250px;
|
||||
}
|
||||
.inline-pre {
|
||||
font-family: monospace;
|
||||
margin: 0 2px;
|
||||
padding: 2px 5px;
|
||||
font-size: 13px;
|
||||
color: #fff;
|
||||
background-color: #555;
|
||||
border: 0px solid #444;
|
||||
border-radius: 3px;
|
||||
}
|
||||
.overlay-refresh-image {
|
||||
opacity: 0;
|
||||
color: #000;
|
||||
font-size: 16px;
|
||||
float: left;
|
||||
position: absolute;
|
||||
top: 0px;
|
||||
right: 10px;
|
||||
z-index: 1;
|
||||
transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
-webkit-transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
-moz-transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
-o-transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
text-shadow: -1px -1px 0 #fff, 1px -1px 0 #fff, -1px 1px 0 #fff, 1px 1px 0 #fff;
|
||||
}
|
||||
.overlay-refresh-image.left {
|
||||
left: 10px;
|
||||
}
|
||||
.overlay-refresh-image.info-art {
|
||||
color: #999;
|
||||
top: 15px;
|
||||
right: 25px;
|
||||
opacity: 1;
|
||||
text-shadow: none;
|
||||
cursor: pointer;
|
||||
}
|
||||
.overlay-refresh-image.info-art:hover {
|
||||
color: #fff;
|
||||
text-shadow: none;
|
||||
}
|
||||
a:hover .overlay-refresh-image {
|
||||
opacity: .25;
|
||||
top: 8px;
|
||||
}
|
||||
a:hover .overlay-refresh-image:hover {
|
||||
opacity: .9;
|
||||
}
|
||||
#ip_error {
|
||||
color: #aaa;
|
||||
display: none;
|
||||
text-align: center;
|
||||
}
|
@@ -106,6 +106,9 @@ DOCUMENTATION :: END
|
||||
% else:
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(${a['art']});"></div>
|
||||
% endif
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="dashboard-activity-button-info">
|
||||
<button type="button" class="btn btn-activity-info btn-lg" data-target="#stream-${a['session_key']}">
|
||||
<i class="fa fa-info-circle"></i>
|
||||
|
@@ -61,6 +61,8 @@ DOCUMENTATION :: END
|
||||
|
||||
% if data is not None:
|
||||
<%
|
||||
from urllib import quote
|
||||
|
||||
from plexpy import helpers
|
||||
data['indexes'] = helpers.cast_to_int(data['indexes'])
|
||||
%>
|
||||
@@ -71,7 +73,7 @@ DOCUMENTATION :: END
|
||||
% else:
|
||||
<a href="#">
|
||||
% endif
|
||||
<div class="dashboard-activity-poster">
|
||||
<div class="dashboard-activity-poster" id="poster-${data['session_key']}">
|
||||
% if not data['art'].startswith('interfaces') or not data['thumb'].startswith('interfaces'):
|
||||
% if (data['media_type'] == 'movie' and not data['indexes']) or (data['indexes'] and not data['view_offset']):
|
||||
<div id="bif-${data['session_key']}" class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${data['art']}&width=500&height=280&fallback=art);"></div>
|
||||
@@ -90,9 +92,11 @@ DOCUMENTATION :: END
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(${data['thumb']});"></div>
|
||||
% else:
|
||||
% if data['art']:
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${data['art']}&width=500&height=280&fallback=art);"></div>
|
||||
<!--Hacky solution to escape the image url until I come up with something better-->
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${quote(data['art'])}&width=500&height=280&fallback=art);"></div>
|
||||
% else:
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${data['thumb']}&width=500&height=280&fallback=art);"></div>
|
||||
<!--Hacky solution to escape the image url until I come up with something better-->
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${quote(data['thumb'])}&width=500&height=280&fallback=art);"></div>
|
||||
% endif
|
||||
% endif
|
||||
% elif data['media_type'] == 'photo':
|
||||
@@ -104,8 +108,11 @@ DOCUMENTATION :: END
|
||||
% else:
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(${data['art']});"></div>
|
||||
% endif
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image left" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="dashboard-activity-button-info">
|
||||
<button type="button" class="btn btn-activity-info btn-lg" data-target="#stream-${data['session_key']}">
|
||||
<button type="button" class="btn btn-activity-info btn-lg" data-target="#stream-${data['session_key']}" data-id="${data['session_key']}">
|
||||
<i class="fa fa-info-circle"></i>
|
||||
</button>
|
||||
</div>
|
||||
@@ -128,79 +135,39 @@ DOCUMENTATION :: END
|
||||
% endif
|
||||
</span>
|
||||
</div>
|
||||
% if data['media_type'] == 'track':
|
||||
% if data['audio_decision'] == 'direct play':
|
||||
Stream <strong>Direct Play</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>
|
||||
Transcoding
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
(Speed: ${data['transcode_speed']})
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
% if data['video_decision'] == 'transcode' or data['audio_decision'] == 'transcode':
|
||||
Stream <strong>Transcode (Speed: ${data['transcode_speed']})
|
||||
% if data['throttled'] == '1':
|
||||
(Throttled)
|
||||
% endif
|
||||
</span>
|
||||
</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['audio_decision'] == 'direct play':
|
||||
Audio <strong>Direct Play (${data['audio_codec']}) (${data['audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Audio <strong>Direct Stream (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'transcode':
|
||||
Audio <strong>Transcode (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% endif
|
||||
% elif data['media_type'] == 'episode' or data['media_type'] == 'movie' or data['media_type'] == 'clip':
|
||||
% if data['video_decision'] == 'direct play' and data['audio_decision'] == 'direct play':
|
||||
Stream <strong>Direct Play</strong>
|
||||
% elif data['video_decision'] == 'copy' and data['audio_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>
|
||||
Transcoding
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
(Speed: ${data['transcode_speed']})
|
||||
% if data['throttled'] == '1':
|
||||
(Throttled)
|
||||
</strong>
|
||||
% elif data['video_decision'] == 'copy' or data['audio_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>Direct Play</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['video_decision'] and data['media_type'] != 'photo':
|
||||
% if data['video_decision'] == 'transcode':
|
||||
Video <strong>Transcode (${data['transcode_video_codec']}) (${data['transcode_width']}x${data['transcode_height']})</strong>
|
||||
% elif data['video_decision'] == 'copy':
|
||||
Video <strong>Direct Stream (${data['transcode_video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% else:
|
||||
Video <strong>Direct Play (${data['video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% endif
|
||||
</span>
|
||||
</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['video_decision'] == 'direct play':
|
||||
Video <strong>Direct Play (${data['video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% elif data['video_decision'] == 'copy':
|
||||
Video <strong>Direct Stream (${data['transcode_video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% elif data['video_decision'] == 'transcode':
|
||||
Video <strong>Transcode (${data['transcode_video_codec']}) (${data['transcode_width']}x${data['transcode_height']})</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['audio_decision'] == 'direct play':
|
||||
Audio <strong>Direct Play (${data['audio_codec']}) (${data['audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Audio <strong>Direct Stream (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'transcode':
|
||||
Audio <strong>Transcode (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% endif
|
||||
% elif data['media_type'] == 'photo':
|
||||
% if data['video_decision'] == 'direct play':
|
||||
Stream <strong>Direct Play</strong>
|
||||
% elif data['video_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
(Speed: ${data['transcode_speed']})
|
||||
% if data['throttled'] == '1':
|
||||
(Throttled)
|
||||
<br />
|
||||
% endif
|
||||
% if data['audio_decision']:
|
||||
% if data['audio_decision'] == 'transcode':
|
||||
Audio <strong>Transcode (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Audio <strong>Direct Stream (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% else:
|
||||
Audio <strong>Direct Play (${data['audio_codec']}) (${data['audio_channels']}ch)</strong>
|
||||
% endif
|
||||
</span>
|
||||
</strong>
|
||||
% endif
|
||||
% endif
|
||||
<br>
|
||||
% endif
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
% if data['media_type'] != 'photo':
|
||||
|
202
data/interfaces/default/fonts/Open-Sans-600/LICENSE.txt
Normal file
202
data/interfaces/default/fonts/Open-Sans-600/LICENSE.txt
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.eot
Normal file
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.eot
Normal file
Binary file not shown.
1637
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.svg
Normal file
1637
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.svg
Normal file
File diff suppressed because it is too large
Load Diff
After Width: | Height: | Size: 104 KiB |
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.ttf
Normal file
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.ttf
Normal file
Binary file not shown.
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.woff
Normal file
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.woff
Normal file
Binary file not shown.
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.woff2
Normal file
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.woff2
Normal file
Binary file not shown.
202
data/interfaces/default/fonts/Open-Sans-regular/LICENSE.txt
Normal file
202
data/interfaces/default/fonts/Open-Sans-regular/LICENSE.txt
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
After Width: | Height: | Size: 105 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -103,6 +103,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -149,6 +152,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
@@ -199,6 +205,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -241,6 +250,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
@@ -295,6 +307,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -341,6 +356,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
@@ -391,6 +409,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -433,6 +454,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
@@ -487,6 +511,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -533,6 +560,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
@@ -583,6 +613,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -625,6 +658,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
@@ -847,6 +883,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -903,6 +942,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
|
@@ -103,10 +103,25 @@
|
||||
type: 'GET',
|
||||
cache: false,
|
||||
async: true,
|
||||
error: function (xhr, status, error) {
|
||||
console.log(status + ': ' + error);
|
||||
},
|
||||
complete: function (xhr, status) {
|
||||
$('#dashboard-checking-activity').remove();
|
||||
|
||||
var current_activity = $.parseJSON(xhr.responseText);
|
||||
var current_activity;
|
||||
try {
|
||||
current_activity = $.parseJSON(xhr.responseText);
|
||||
} catch (e) {
|
||||
console.log(status + ': ' + e);
|
||||
current_activity = null;
|
||||
}
|
||||
|
||||
if (!(current_activity)) {
|
||||
$('#currentActivity').html('<div id="dashboard-no-activity" class="text-muted">There was an error communicating with your Plex Server.</div>');
|
||||
return
|
||||
}
|
||||
|
||||
var stream_count = parseInt(current_activity.stream_count);
|
||||
var sessions = current_activity.sessions;
|
||||
|
||||
@@ -150,13 +165,38 @@
|
||||
bif_poster.animate({ opacity: 0 }, { duration: 1000, queue: false });
|
||||
bif_poster.after($('<div id="bif-' + key + '"class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img='
|
||||
+ s.bif_thumb + '&width=500&height=280&fallback=art);"></div>').fadeIn(1000, function () { bif_poster.remove() }));
|
||||
blurArtwork(key);
|
||||
}
|
||||
|
||||
// if transcoding, update the transcode state
|
||||
var ts = '';
|
||||
if (s.video_decision == 'transcode' || s.audio_decision == 'transcode') {
|
||||
var throttled = (s.throttled == '1') ? ' (Throttled)' : '';
|
||||
$('#transcode-state-' + key).html('(Speed: ' + s.transcode_speed + ')' + throttled);
|
||||
ts += 'Stream <strong>Transcode (Speed: ' + s.transcode_speed + ')' + throttled + '</strong><br>';
|
||||
} else if (s.video_decision == 'copy' || s.audio_decision == 'copy') {
|
||||
ts += 'Stream <strong>Direct Stream</strong><br>';
|
||||
} else {
|
||||
ts += 'Stream <strong>Direct Play</strong><br>';
|
||||
}
|
||||
if (s.video_decision != '' && s.media_type != 'photo') {
|
||||
if (s.video_decision == 'transcode') {
|
||||
ts += 'Video <strong>Transcode (' + s.transcode_video_codec + ') (' + s.transcode_width + 'x' + s.transcode_height + ')</strong><br>';
|
||||
} else if (s.video_decision == 'copy') {
|
||||
ts += 'Video <strong>Direct Stream (' + s.transcode_video_codec + ') (' + s.width + 'x' + s.height + ')</strong><br>';
|
||||
} else {
|
||||
ts += 'Video <strong>Direct Play (' + s.video_codec + ') (' + s.width + 'x' + s.height + ')</strong><br>';
|
||||
}
|
||||
}
|
||||
if (s.audio_decision != '') {
|
||||
if (s.audio_decision == 'transcode') {
|
||||
ts += 'Audio <strong>Transcode (' + s.transcode_audio_codec + ') (' + s.transcode_audio_channels + 'ch)</strong>';
|
||||
} else if (s.audio_decision == 'copy') {
|
||||
ts += 'Audio <strong>Direct Stream (' + s.transcode_audio_codec + ') (' + s.transcode_audio_channels + 'ch)</strong>';
|
||||
} else {
|
||||
ts += 'Audio <strong>Direct Play (' + s.audio_codec + ') (' + s.audio_channels + 'ch)</strong>';
|
||||
}
|
||||
}
|
||||
$('#transcode-state-' + key).html(ts);
|
||||
|
||||
// update the stream progress times
|
||||
$('#stream-eta-' + key).html(moment().add(parseInt(s.duration) - parseInt(s.view_offset), 'milliseconds').format(time_format));
|
||||
@@ -210,14 +250,18 @@
|
||||
getCurrentActivity();
|
||||
}, 15000);
|
||||
|
||||
function blurArtwork(session_key) {
|
||||
var filterVal = $('#stream-' + session_key).is(':visible') ? 'blur(5px)' : '';
|
||||
$($('#poster-' + session_key).find('.dashboard-activity-poster-face, .dashboard-activity-cover-face'))
|
||||
.css('filter', filterVal).css('webkitFilter', filterVal).css('mozFilter', filterVal).css('oFilter', filterVal).css('msFilter', filterVal);
|
||||
}
|
||||
|
||||
// Show/Hide activity info
|
||||
$('#currentActivity').on('click', '.btn-activity-info', function (e) {
|
||||
e.preventDefault();
|
||||
$($(this).attr('data-target')).toggle();
|
||||
var id = $(this).closest('.dashboard-instance').data('id');
|
||||
var filterVal = $('#stream-' + id).is(':visible') ? 'blur(5px)' : '';
|
||||
$($(this).closest('.dashboard-activity-poster').find('.dashboard-activity-poster-face, .dashboard-activity-cover-face'))
|
||||
.css('filter',filterVal).css('webkitFilter',filterVal).css('mozFilter',filterVal).css('oFilter',filterVal).css('msFilter',filterVal);
|
||||
var key = $(this).data('id');
|
||||
blurArtwork(key);
|
||||
});
|
||||
|
||||
// Add hover class to dashboard-instance
|
||||
|
@@ -68,6 +68,9 @@ DOCUMENTATION :: END
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<div class="art-face" style="background-image:url(pms_image_proxy?img=${data['art']}&width=1920&height=1080)"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image info-art" title="Refresh background image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="summary-container">
|
||||
<div class="summary-navbar">
|
||||
<div class="col-md-12">
|
||||
@@ -119,18 +122,27 @@ DOCUMENTATION :: END
|
||||
<span></span>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% elif data['media_type'] == 'artist' or data['media_type'] == 'album' or data['media_type'] == 'track':
|
||||
<div class="summary-poster-face-track" style="background-image: url(pms_image_proxy?img=${data['thumb']}&width=500&height=500&fallback=cover);">
|
||||
<div class="summary-poster-face-overlay">
|
||||
<span></span>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% else:
|
||||
<div class="summary-poster-face" style="background-image: url(pms_image_proxy?img=${data['thumb']}&width=300&height=450&fallback=poster);">
|
||||
<div class="summary-poster-face-overlay">
|
||||
<span></span>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% endif
|
||||
</a>
|
||||
</div>
|
||||
|
@@ -51,6 +51,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
% elif data['children_type'] == 'episode':
|
||||
@@ -63,6 +66,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="item-children-instance-text-wrapper episode-item">
|
||||
@@ -74,6 +80,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" title="${child['title']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
|
@@ -65,6 +65,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper season-item">
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
@@ -87,6 +90,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper season-item">
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
@@ -109,6 +115,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper season-item">
|
||||
<h3 title="${child['parent_title']}">${child['parent_title']}</h3>
|
||||
@@ -131,6 +140,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face episode-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=500&height=250&fallback=art);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper episode-item">
|
||||
<h3 title="${child['grandparent_title']}">${child['grandparent_title']}</h3>
|
||||
@@ -154,6 +166,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
@@ -175,6 +190,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
<h3 title="${child['parent_title']}">${child['parent_title']}</h3>
|
||||
@@ -204,6 +222,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
<h3 title="${child['grandparent_title']}">${child['grandparent_title']}</h3>
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
|
@@ -3,37 +3,41 @@
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="myModalLabel">
|
||||
% if data:
|
||||
<strong><span id="modal_header_ip_address">
|
||||
<i class="fa fa-spin fa-refresh"></i> Loading Details...
|
||||
% if data:
|
||||
<i class="fa fa-map-marker"></i> IP Address: ${data}
|
||||
% else:
|
||||
<i class="fa fa-exclamation-circle"></i> Invalid IP Address
|
||||
% endif
|
||||
</span></strong>
|
||||
% else:
|
||||
<i class="fa fa-exclamation-circle"></i> Invalid IP Address</span></strong>
|
||||
% endif
|
||||
</h4>
|
||||
</div>
|
||||
<div class="modal-body" id="modal-text">
|
||||
<div class="col-sm-6">
|
||||
<div id="ip_error" class="text-muted"></div>
|
||||
<div class="col-sm-12">
|
||||
<h4><strong>Location Details</strong></h4>
|
||||
</div>
|
||||
<div class="col-sm-6">
|
||||
<ul class="list-unstyled">
|
||||
<li>Continent: <strong><span id="continent"></span></strong></li>
|
||||
<li>Country: <strong><span id="country"></span></strong></li>
|
||||
<li>Region: <strong><span id="region"></span></strong></li>
|
||||
<li>City: <strong><span id="city"></span></strong></li>
|
||||
<li>Timezone: <strong><span id="timezone"></span></strong></li>
|
||||
<li>Latitude: <strong><span id="lat"></span></strong></li>
|
||||
<li>Longitude: <strong><span id="lon"></span></strong></li>
|
||||
<li>Postal Code: <strong><span id="postal_code"></span></strong></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-sm-6">
|
||||
<h4><strong>Connection Details</strong></h4>
|
||||
<ul class="list-unstyled">
|
||||
<li>Organization: <strong><span id="organization"></span></strong></li>
|
||||
<li>Timezone: <strong><span id="timezone"></span></strong></li>
|
||||
<li>Latitude: <strong><span id="latitude"></span></strong></li>
|
||||
<li>Longitude: <strong><span id="longitude"></span></strong></li>
|
||||
<li>Accuracy Radius: <strong><span id="accuracy"></span></strong></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<% from plexpy.helpers import anon_url %>
|
||||
<span class="text-muted">Telize service written by <a href="${anon_url('https://github.com/fcambus/telize')}" target="_blank">Frederic Cambus</a>.</span>
|
||||
<span class="text-muted">GeoLite2 data created by <a href="${anon_url('http://www.maxmind.com')}" target="_blank">MaxMind</a>.</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -42,25 +46,29 @@
|
||||
<script>
|
||||
function getUserLocation(ip_address) {
|
||||
$.ajax({
|
||||
url: 'https://telize.myhtpc.co.za/geoip/' + ip_address,
|
||||
cache: true,
|
||||
async: true,
|
||||
url: 'get_geoip_lookup',
|
||||
type: 'GET',
|
||||
dataType: 'json',
|
||||
error: function(){
|
||||
$('#modal_header_ip_address').html("Request failed. Server may be too busy.");
|
||||
data: { ip_address: ip_address },
|
||||
cache: true,
|
||||
async: true,
|
||||
error: function () {
|
||||
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> Request failed.<br /><br />').show();
|
||||
},
|
||||
success: function(data) {
|
||||
$('#modal_header_ip_address').html('<i class="fa fa-map-marker"></i> IP Address: ' + ip_address);
|
||||
$('#country').html(data.country);
|
||||
$('#city').html(data.city);
|
||||
$('#region').html(data.region);
|
||||
$('#timezone').html(data.timezone);
|
||||
$('#lat').html(data.latitude);
|
||||
$('#lon').html(data.longitude);
|
||||
$('#organization').html(data.organization);
|
||||
},
|
||||
timeout: 5000
|
||||
success: function (data) {
|
||||
if ('error' in data) {
|
||||
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> ' + data.error + '<br /><br />').show();
|
||||
} else {
|
||||
$('#continent').html(data.continent);
|
||||
$('#country').html(data.country);
|
||||
$('#region').html(data.region);
|
||||
$('#city').html(data.city);
|
||||
$('#postal_code').html(data.postal_code);
|
||||
$('#timezone').html(data.timezone);
|
||||
$('#latitude').html(data.latitude);
|
||||
$('#longitude').html(data.longitude);
|
||||
$('#accuracy').html(data.accuracy + ' km');
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
getUserLocation('${data}');
|
||||
|
@@ -54,6 +54,32 @@ function showMsg(msg, loader, timeout, ms, error) {
|
||||
}
|
||||
}
|
||||
|
||||
function confirmAjaxCall(url, msg, loader_msg, callback) {
|
||||
$("#confirm-message").html(msg);
|
||||
$('#confirm-modal').modal();
|
||||
$('#confirm-modal').one('click', '#confirm-button', function () {
|
||||
if (loader_msg) {
|
||||
showMsg(loader_msg, true, false)
|
||||
}
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: 'POST',
|
||||
complete: function (xhr, status) {
|
||||
result = $.parseJSON(xhr.responseText);
|
||||
msg = result.message;
|
||||
if (result.result == 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 5000)
|
||||
} else {
|
||||
showMsg('<i class="fa fa-times"></i> ' + msg, false, true, 5000, true)
|
||||
}
|
||||
if (typeof callback === "function") {
|
||||
callback();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function doAjaxCall(url, elem, reload, form, callback) {
|
||||
// Set Message
|
||||
feedback = $("#ajaxMsg");
|
||||
@@ -399,4 +425,27 @@ window.onerror = function (message, file, line) {
|
||||
'line': line
|
||||
};
|
||||
$.post("log_js_errors", e, function (data) { });
|
||||
};
|
||||
};
|
||||
|
||||
$('*').on('click', '.refresh_pms_image', function (e) {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
|
||||
var background_div = $(this).parent().siblings(['style*=pms_image_proxy']).first();
|
||||
var pms_proxy_url = background_div.css('background-image');
|
||||
pms_proxy_url = /^url\((['"]?)(.*)\1\)$/.exec(pms_proxy_url);
|
||||
pms_proxy_url = pms_proxy_url ? pms_proxy_url[2] : ""; // If matched, retrieve url, otherwise ""
|
||||
|
||||
if (pms_proxy_url.indexOf('pms_image_proxy') == -1) {
|
||||
console.log('PMS image proxy url not found.');
|
||||
} else {
|
||||
if (pms_proxy_url.indexOf('refresh=true') > -1) {
|
||||
pms_proxy_url = pms_proxy_url.replace("&refresh=true", "");
|
||||
console.log(pms_proxy_url)
|
||||
background_div.css('background-image', 'url(' + pms_proxy_url + ')');
|
||||
background_div.css('background-image', 'url(' + pms_proxy_url + '&refresh=true)');
|
||||
} else {
|
||||
background_div.css('background-image', 'url(' + pms_proxy_url + '&refresh=true)');
|
||||
}
|
||||
}
|
||||
});
|
@@ -448,10 +448,10 @@ function childTableOptions(rowData) {
|
||||
|
||||
// Create the tooltips.
|
||||
$('.expand-history-tooltip').tooltip({ container: 'body' });
|
||||
$('.external-ip-tooltip').tooltip();
|
||||
$('.transcode-tooltip').tooltip();
|
||||
$('.media-type-tooltip').tooltip();
|
||||
$('.watched-tooltip').tooltip();
|
||||
$('.external-ip-tooltip').tooltip({ container: 'body' });
|
||||
$('.transcode-tooltip').tooltip({ container: 'body' });
|
||||
$('.media-type-tooltip').tooltip({ container: 'body' });
|
||||
$('.watched-tooltip').tooltip({ container: 'body' });
|
||||
$('.thumb-tooltip').popover({
|
||||
html: true,
|
||||
container: 'body',
|
||||
|
@@ -132,8 +132,8 @@ history_table_modal_options = {
|
||||
$('#ajaxMsg').fadeOut();
|
||||
|
||||
// Create the tooltips.
|
||||
$('.transcode-tooltip').tooltip();
|
||||
$('.media-type-tooltip').tooltip();
|
||||
$('.transcode-tooltip').tooltip({ container: 'body' });
|
||||
$('.media-type-tooltip').tooltip({ container: 'body' });
|
||||
$('.thumb-tooltip').popover({
|
||||
html: true,
|
||||
container: '#history-modal',
|
||||
|
@@ -217,10 +217,10 @@ libraries_list_table_options = {
|
||||
$('#ajaxMsg').fadeOut();
|
||||
|
||||
// Create the tooltips.
|
||||
$('.purge-tooltip').tooltip();
|
||||
$('.edit-tooltip').tooltip();
|
||||
$('.transcode-tooltip').tooltip();
|
||||
$('.media-type-tooltip').tooltip();
|
||||
$('.purge-tooltip').tooltip({ container: 'body' });
|
||||
$('.edit-tooltip').tooltip({ container: 'body' });
|
||||
$('.transcode-tooltip').tooltip({ container: 'body' });
|
||||
$('.media-type-tooltip').tooltip({ container: 'body' });
|
||||
$('.thumb-tooltip').popover({
|
||||
html: true,
|
||||
container: 'body',
|
||||
|
@@ -220,13 +220,14 @@ users_list_table_options = {
|
||||
$('#ajaxMsg').fadeOut();
|
||||
|
||||
// Create the tooltips.
|
||||
$('.purge-tooltip').tooltip();
|
||||
$('.edit-tooltip').tooltip();
|
||||
$('.transcode-tooltip').tooltip();
|
||||
$('.media-type-tooltip').tooltip();
|
||||
$('.watched-tooltip').tooltip();
|
||||
$('.purge-tooltip').tooltip({ container: 'body' });
|
||||
$('.edit-tooltip').tooltip({ container: 'body' });
|
||||
$('.transcode-tooltip').tooltip({ container: 'body' });
|
||||
$('.media-type-tooltip').tooltip({ container: 'body' });
|
||||
$('.watched-tooltip').tooltip({ container: 'body' });
|
||||
$('.thumb-tooltip').popover({
|
||||
html: true,
|
||||
container: 'body',
|
||||
trigger: 'hover',
|
||||
placement: 'right',
|
||||
template: '<div class="popover history-thumbnail-popover" role="tooltip"><div class="arrow" style="top: 50%;"></div><div class="popover-content"></div></div>',
|
||||
|
@@ -39,6 +39,9 @@ DOCUMENTATION :: END
|
||||
<div class="row">
|
||||
% if data['library_art']:
|
||||
<div class="art-face" style="background-image:url(pms_image_proxy?img=${data['library_art']}&width=1920&height=1080)"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image info-art" title="Refresh background image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% endif
|
||||
<div class="summary-container">
|
||||
<div class="summary-navbar">
|
||||
@@ -362,7 +365,7 @@ DOCUMENTATION :: END
|
||||
|
||||
// Populate watch time stats
|
||||
$.ajax({
|
||||
url: 'get_library_watch_time_stats',
|
||||
url: 'library_watch_time_stats',
|
||||
async: true,
|
||||
data: { section_id: section_id },
|
||||
complete: function(xhr, status) {
|
||||
@@ -372,7 +375,7 @@ DOCUMENTATION :: END
|
||||
|
||||
// Populate user stats
|
||||
$.ajax({
|
||||
url: 'get_library_user_stats',
|
||||
url: 'library_user_stats',
|
||||
async: true,
|
||||
data: { section_id: section_id },
|
||||
complete: function(xhr, status) {
|
||||
@@ -498,7 +501,7 @@ DOCUMENTATION :: END
|
||||
function recentlyWatched() {
|
||||
// Populate recently watched
|
||||
$.ajax({
|
||||
url: 'get_library_recently_watched',
|
||||
url: 'library_recently_watched',
|
||||
async: true,
|
||||
data: {
|
||||
section_id: section_id,
|
||||
@@ -514,7 +517,7 @@ DOCUMENTATION :: END
|
||||
function recentlyAdded() {
|
||||
// Populate recently added
|
||||
$.ajax({
|
||||
url: 'get_library_recently_added',
|
||||
url: 'library_recently_added',
|
||||
async: true,
|
||||
data: {
|
||||
section_id: section_id,
|
||||
|
@@ -60,6 +60,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
|
@@ -21,7 +21,33 @@
|
||||
<span><i class="fa fa-list-alt"></i> Logs</span>
|
||||
</div>
|
||||
<div class="button-bar">
|
||||
<button class="btn btn-dark" id="download-plexpylog"><i class="fa fa-download"></i> Download log</button>
|
||||
<div class="btn-group" id="plexpy-log-levels">
|
||||
<label>
|
||||
<select name="plexpy-log-level-filter" id="plexpy-log-level-filter" class="btn" style="color: inherit;">
|
||||
<option value="">All log levels</option>
|
||||
<option disabled>────────────</option>
|
||||
<option value="DEBUG">Debug</option>
|
||||
<option value="INFO">Info</option>
|
||||
<option value="WARN">Warning</option>
|
||||
<option value="ERROR">Error</option>
|
||||
</select>
|
||||
</label>
|
||||
</div>
|
||||
<div class="btn-group" id="plex-log-levels" style="display: none;">
|
||||
<label>
|
||||
<select name="plex-log-level-filter" id="plex-log-level-filter" class="btn" style="color: inherit;">
|
||||
<option value="">All log levels</option>
|
||||
<option disabled>────────────</option>
|
||||
<option value="DEBUG">Debug</option>
|
||||
<option value="INFO">Info</option>
|
||||
<option value="WARN">Warning</option>
|
||||
<option value="ERROR">Error</option>
|
||||
</select>
|
||||
</label>
|
||||
</div>
|
||||
<button class="btn btn-dark" id="download-plexpylog"><i class="fa fa-download"></i> Download logs</button>
|
||||
<button class="btn btn-dark" id="download-plexserverlog" style="display: none;"><i class="fa fa-download"></i> Download logs</button>
|
||||
<button class="btn btn-dark" id="download-plexscannerlog" style="display: none;"><i class="fa fa-download"></i> Download logs</button>
|
||||
<button class="btn btn-dark" id="clear-logs"><i class="fa fa-trash-o"></i> Clear logs</button>
|
||||
<button class="btn btn-dark" id="clear-notify-logs" style="display: none;"><i class="fa fa-trash-o"></i> Clear logs</button>
|
||||
<button class="btn btn-dark" id="clear-login-logs" style="display: none;"><i class="fa fa-trash-o"></i> Clear logs</button>
|
||||
@@ -40,27 +66,25 @@
|
||||
<div role="tabpanel" class="tab-pane active" id="tabs-1">
|
||||
<table class="display" id="log_table" width="100%">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="min-tablet" align="left" id="timestamp">Timestamp</th>
|
||||
<th class="desktop" align="left" id="level">Level</th>
|
||||
<th class="all" align="left" id="message">Message</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th class="min-tablet" align="left" id="timestamp">Timestamp</th>
|
||||
<th class="desktop" align="left" id="level">Level</th>
|
||||
<th class="all" align="left" id="message">Message</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
</tbody>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-2">
|
||||
<table class="display" id="plex_log_table" width="100%">
|
||||
<thead>
|
||||
<tr>
|
||||
<th align="left" id="plex_timestamp">Timestamp</th>
|
||||
<th align="left" id="plex_level">Level</th>
|
||||
<th align="left" id="plex_message">Message</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th align="left" id="plex_timestamp">Timestamp</th>
|
||||
<th align="left" id="plex_level">Level</th>
|
||||
<th align="left" id="plex_message">Message</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
</tbody>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-3">
|
||||
@@ -114,7 +138,8 @@
|
||||
</div>
|
||||
|
||||
<br>
|
||||
<div align="center">Refresh rate:
|
||||
<div align="center">
|
||||
Refresh rate:
|
||||
<select id="refreshrate" onchange="setRefresh()">
|
||||
<option value="0" selected="selected">No Refresh</option>
|
||||
<option value="5">5 Seconds</option>
|
||||
@@ -139,21 +164,62 @@
|
||||
<script>
|
||||
|
||||
$(document).ready(function() {
|
||||
loadPlexPyLogs();
|
||||
loadPlexPyLogs(selected_log_level);
|
||||
clearSearchButton('log_table', log_table);
|
||||
});
|
||||
|
||||
function loadPlexPyLogs() {
|
||||
var log_levels = ['DEBUG', 'INFO', 'WARN', 'ERROR'];
|
||||
|
||||
function bindLogLevelFilter() {
|
||||
clearLogLevelFilter();
|
||||
var log_level_column = this.api().column(1);
|
||||
var select = $('#plex-log-level-filter');
|
||||
select.on('change', function () {
|
||||
var val = $.fn.dataTable.util.escapeRegex(
|
||||
$(this).val()
|
||||
);
|
||||
var search_string = '';
|
||||
var levelIndex = log_levels.indexOf(val);
|
||||
if (levelIndex >= 0) {
|
||||
search_string = '^' + log_levels
|
||||
.slice(levelIndex)
|
||||
.join('|') + '$';
|
||||
}
|
||||
log_level_column
|
||||
.search(search_string, true, false)
|
||||
.draw();
|
||||
}).change();
|
||||
}
|
||||
|
||||
function clearLogLevelFilter() {
|
||||
$('#plex-log-level-filter').off('change');
|
||||
}
|
||||
|
||||
var selected_log_level = null;
|
||||
function loadPlexPyLogs(selected_log_level) {
|
||||
log_table_options.ajax = {
|
||||
url: "getLog"
|
||||
url: "getLog",
|
||||
type: 'post',
|
||||
data: function (d) {
|
||||
return {
|
||||
json_data: JSON.stringify(d),
|
||||
log_level: selected_log_level
|
||||
};
|
||||
}
|
||||
}
|
||||
log_table = $('#log_table').DataTable(log_table_options);
|
||||
|
||||
$('#plexpy-log-level-filter').on('change', function () {
|
||||
selected_log_level = $(this).val() || null;
|
||||
log_table.draw();
|
||||
});
|
||||
}
|
||||
|
||||
function loadPlexLogs() {
|
||||
plex_log_table_options.ajax = {
|
||||
url: "get_plex_log?log_type=server"
|
||||
}
|
||||
plex_log_table_options.initComplete = bindLogLevelFilter;
|
||||
plex_log_table = $('#plex_log_table').DataTable(plex_log_table_options);
|
||||
}
|
||||
|
||||
@@ -161,6 +227,7 @@
|
||||
plex_log_table_options.ajax = {
|
||||
url: "get_plex_log?log_type=scanner"
|
||||
}
|
||||
plex_log_table_options.initComplete = bindLogLevelFilter;
|
||||
plex_scanner_log_table = $('#plex_scanner_log_table').DataTable(plex_log_table_options);
|
||||
}
|
||||
|
||||
@@ -190,17 +257,25 @@
|
||||
}
|
||||
|
||||
$("#plexpy-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").show();
|
||||
$("#plex-log-levels").hide();
|
||||
$("#clear-logs").show();
|
||||
$("#download-plexpylog").show()
|
||||
$("#download-plexserverlog").hide()
|
||||
$("#download-plexscannerlog").hide()
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadPlexPyLogs();
|
||||
loadPlexPyLogs(selected_log_level);
|
||||
clearSearchButton('log_table', log_table);
|
||||
});
|
||||
|
||||
$("#plex-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").hide();
|
||||
$("#plex-log-levels").show();
|
||||
$("#clear-logs").hide();
|
||||
$("#download-plexpylog").hide()
|
||||
$("#download-plexserverlog").show()
|
||||
$("#download-plexscannerlog").hide()
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadPlexLogs();
|
||||
@@ -208,8 +283,12 @@
|
||||
});
|
||||
|
||||
$("#plex-scanner-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").hide();
|
||||
$("#plex-log-levels").show();
|
||||
$("#clear-logs").hide();
|
||||
$("#download-plexpylog").hide()
|
||||
$("#download-plexserverlog").hide()
|
||||
$("#download-plexscannerlog").show()
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadPlexScannerLogs();
|
||||
@@ -217,8 +296,12 @@
|
||||
});
|
||||
|
||||
$("#notification-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").hide();
|
||||
$("#plex-log-levels").hide();
|
||||
$("#clear-logs").hide();
|
||||
$("#download-plexpylog").hide()
|
||||
$("#download-plexserverlog").hide()
|
||||
$("#download-plexscannerlog").hide()
|
||||
$("#clear-notify-logs").show();
|
||||
$("#clear-login-logs").hide();
|
||||
loadNotificationLogs();
|
||||
@@ -226,8 +309,12 @@
|
||||
});
|
||||
|
||||
$("#login-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").hide();
|
||||
$("#plex-log-levels").hide();
|
||||
$("#clear-logs").hide();
|
||||
$("#download-plexpylog").hide()
|
||||
$("#download-plexserverlog").hide()
|
||||
$("#download-plexscannerlog").hide()
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-login-logs").show();
|
||||
loadLoginLogs();
|
||||
@@ -263,6 +350,13 @@
|
||||
window.location.href = "download_log";
|
||||
});
|
||||
|
||||
$("#download-plexserverlog").click(function () {
|
||||
window.location.href = "download_plex_log?log_type=server";
|
||||
});
|
||||
|
||||
$("#download-plexscannerlog").click(function () {
|
||||
window.location.href = "download_plex_log?log_type=scanner";
|
||||
});
|
||||
|
||||
$("#clear-notify-logs").click(function () {
|
||||
$("#confirm-message").text("Are you sure you want to clear the PlexPy notification logs?");
|
||||
|
@@ -217,7 +217,7 @@
|
||||
}
|
||||
}
|
||||
|
||||
$('#pushbullet_apikey, #pushover_apitoken, #scripts_folder').on('change', function () {
|
||||
$('#pushbullet_apikey, #pushover_apitoken, #scripts_folder, #join_apikey').on('change', function () {
|
||||
// Reload modal to update certain fields
|
||||
doAjaxCall('set_notification_config', $(this), 'tabs', true, reloadModal);
|
||||
return false;
|
||||
|
@@ -57,6 +57,20 @@
|
||||
</label>
|
||||
<p class="help-block">Trigger notification when a media item triggers the defined buffer threshold.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_concurrent" ${helpers.checked(data['on_concurrent'])} class="toggle-switches">
|
||||
Notify on user concurrent streams
|
||||
</label>
|
||||
<p class="help-block">Trigger notification when a user has concurrent streams.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_newdevice" ${helpers.checked(data['on_newdevice'])} class="toggle-switches">
|
||||
Notify on user new device
|
||||
</label>
|
||||
<p class="help-block">Trigger notification when a user streams from a new device.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_created" ${helpers.checked(data['on_created'])} class="toggle-switches">
|
||||
|
@@ -49,6 +49,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
@@ -69,6 +72,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
@@ -91,6 +97,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
@@ -109,6 +118,5 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
% else:
|
||||
<div class="text-muted">There was an error communicating with your Plex Server. Please check your <a href="settings">settings</a>.
|
||||
</div><br>
|
||||
<div class="text-muted">There was an error communicating with your Plex Server.</div><br>
|
||||
% endif
|
@@ -4,7 +4,7 @@
|
||||
import sys
|
||||
|
||||
import plexpy
|
||||
from plexpy import common, logger, notifiers, versioncheck
|
||||
from plexpy import common, notifiers, versioncheck
|
||||
from plexpy.helpers import anon_url
|
||||
|
||||
available_notification_agents = sorted(notifiers.available_notification_agents(), key=lambda k: k['name'])
|
||||
@@ -62,78 +62,10 @@
|
||||
<div class="padded-header">
|
||||
<h3>PlexPy Configuration</h3>
|
||||
</div>
|
||||
<table class="config-info-table small-muted">
|
||||
<tbody>
|
||||
% if plexpy.CURRENT_VERSION:
|
||||
<tr>
|
||||
<td>Git Branch:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/tree/%s' % plexpy.CONFIG.GIT_BRANCH)}">${plexpy.CONFIG.GIT_BRANCH}</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Git Commit Hash:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/commit/%s' % plexpy.CURRENT_VERSION)}">${plexpy.CURRENT_VERSION}</a></td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Configuration File:</td>
|
||||
<td>${plexpy.CONFIG_FILE}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Database File:</td>
|
||||
<td>${plexpy.DB_FILE}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Log File:</td>
|
||||
<td><a class="no-highlight" href="logFile" target="_blank">${os.path.join(config['log_dir'], logger.FILENAME)}</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Backup Directory:</td>
|
||||
<td>${config['backup_dir']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Cache Directory:</td>
|
||||
<td>${config['cache_dir']}</td>
|
||||
</tr>
|
||||
% if plexpy.ARGS:
|
||||
<tr>
|
||||
<td>Arguments:</td>
|
||||
<td>${plexpy.ARGS}</td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Platform:</td>
|
||||
<td>${common.PLATFORM} ${common.PLATFORM_VERSION}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Python Version:</td>
|
||||
<td>${sys.version}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="top-line">Plex Forums:</td>
|
||||
<td class="top-line"><a class="no-highlight" href="${anon_url('https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program')}" target="_blank">https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Source:</td>
|
||||
<td><a id="source-link" class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy')}" target="_blank">https://github.com/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Wiki:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/wiki')}" target="_blank">https://github.com/drzoidberg33/plexpy/wiki</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Issues:</td>
|
||||
<td><a class="no-highlight guidelines-modal-link" href="${anon_url('https://github.com/drzoidberg33/plexpy/issues')}" data-id="issue">https://github.com/drzoidberg33/plexpy/issues</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Feature Requests:</td>
|
||||
<td><a class="no-highlight guidelines-modal-link" href="${anon_url('http://feathub.com/drzoidberg33/plexpy')}" data-id="feature request">http://feathub.com/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Gitter Chat:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://gitter.im/drzoidberg33/plexpy')}" target="_blank">https://gitter.im/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<div id="plexpy-configuration-table">
|
||||
<div class='text-muted'><i class="fa fa-refresh fa-spin"></i> Loading configuration table...</div>
|
||||
<br>
|
||||
</div>
|
||||
<div class="padded-header">
|
||||
<h3>PlexPy Scheduled Tasks</h3>
|
||||
</div>
|
||||
@@ -370,7 +302,7 @@
|
||||
</div>
|
||||
<div id="home_stats_count_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">Specify the number of items to show in the top lists for the watch statistics on the home page. Max is 10 items, default is 5 items, 0 to disable.</p>
|
||||
<p class="help-block">Specify the number of items to show in the top lists for the watch statistics on the home page. Maximum 10 items, default 5 items, 0 to disable.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
@@ -432,6 +364,13 @@
|
||||
</div>
|
||||
<p class="help-block">The base URL of the web server. Used for reverse proxies.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" class="http-settings" name="http_proxy" id="http_proxy" value="1" ${config['http_proxy']}> Enable HTTP Proxy
|
||||
</label>
|
||||
<p class="help-block">Respect the X-Forwarded-Proto header. Used for reverse proxies with SSL.</p>
|
||||
</div>
|
||||
<br />
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" name="launch_browser" id="launch_browser" value="1" ${config['launch_browser']}> Launch Browser on Startup
|
||||
@@ -521,10 +460,17 @@
|
||||
<label>
|
||||
<input type="checkbox" name="http_hash_password" id="http_hash_password" value="1" ${config['http_hash_password']} data-parsley-trigger="change"> Hash Password in the Config File
|
||||
</label>
|
||||
<span id="hashPasswordCheck" style="color: #eb8600; padding-left: 10px;"></span>
|
||||
<p class="help-block">Store a hashed password in the config file.<br />Warning: Your password cannot be recovered if forgotten!</p>
|
||||
</div>
|
||||
<input type="text" id="http_hashed_password" name="http_hashed_password" value="${config['http_hashed_password']}" style="display: none;" data-parsley-trigger="change" data-parsley-type="integer" data-parsley-range="[0, 1]"
|
||||
data-parsley-errors-container="#http_hash_password_error" data-parsley-error-message="Cannot un-hash password, please set a new password." data-parsley-no-focus required>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" class="auth-settings" name="http_basic_auth" id="http_basic_auth" value="1" ${config['http_basic_auth']} data-parsley-trigger="change"> Use Basic Authentication
|
||||
</label>
|
||||
<p class="help-block">Use basic HTTP authentication instead of the HTML login form.</p>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="padded-header">
|
||||
@@ -559,7 +505,7 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Current API key: <strong><br/>${config['api_key']}</strong></p>
|
||||
<p class="help-block">Current API key: <strong> ${config['api_key']}</strong></p>
|
||||
</div>
|
||||
|
||||
<p><input type="button" class="btn btn-bright save-button" value="Save" data-success="Changes saved successfully"></p>
|
||||
@@ -567,15 +513,31 @@
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-5">
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Plex Media Server <small style="color: #fff;">Version <span id="pms_version">unknown</span></small></h3>
|
||||
<h3>Plex Media Server <small style="color: #fff;">Version <span id="pms_version">${config['pms_version']}</span></small></h3>
|
||||
</div>
|
||||
<p class="help-block">If you're using websocket monitoring, any server changes require a restart of PlexPy.</p>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="monitor_pms_updates" name="monitor_pms_updates" value="1" ${config['monitor_pms_updates']}> Monitor Plex Updates
|
||||
</label>
|
||||
<p class="help-block">Enable to have PlexPy check if updates are available for the Plex Media Server.<br />
|
||||
Note: The Plex updater is broken on certain Plex Pass version of Plex Media Server. PlexPy will automatically disable checking for Plex updates if one of these versions is found.</p>
|
||||
<p class="help-block">Enable to have PlexPy check if updates are available for the Plex Media Server.</p>
|
||||
</div>
|
||||
<div id="pms_update_options">
|
||||
<div class="form-group">
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<label for="pms_update_channel">Update Channel</label>
|
||||
<select class="form-control" id="pms_update_channel" name="pms_update_channel">
|
||||
<option value="public">Public</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="col-md-5">
|
||||
<label for="pms_update_distro_build">Release</label>
|
||||
<select class="form-control" id="pms_update_distro_build" name="pms_update_distro_build">
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
@@ -778,7 +740,7 @@
|
||||
</div>
|
||||
<div id="monitoring_interval_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">The interval (in seconds) PlexPy will ping your Plex Server. Min 30 seconds, recommended 60 seconds.</p>
|
||||
<p class="help-block">The interval (in seconds) PlexPy will ping your Plex Server. Minimum 30 seconds, recommended 60 seconds.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
@@ -920,6 +882,22 @@
|
||||
</label>
|
||||
<p class="help-block">Disable to prevent consecutive notifications (i.e. both watched & stopped notifications).</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" name="notify_concurrent_by_ip" id="notify_concurrent_by_ip" value="1" ${config['notify_concurrent_by_ip']}> User Concurrent Streams Notifications by IP Address
|
||||
</label>
|
||||
<p class="help-block">Enable to only get notified of concurrent streams by a single user from different IP addresses.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="notify_concurrent_threshold">User Concurrent Stream Threshold</label>
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<input type="text" class="form-control" data-parsley-type="integer" id="notify_concurrent_threshold" name="notify_concurrent_threshold" value="${config['notify_concurrent_threshold']}" data-parsley-min="2" data-parsley-trigger="change" data-parsley-errors-container="#notify_concurrent_threshold_error" required>
|
||||
</div>
|
||||
<div id="notify_concurrent_threshold_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">The number of concurrent streams by a single user for PlexPy to trigger a notification. Minimum 2.</p>
|
||||
</div>
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Recently Added Notifications</h3>
|
||||
@@ -1064,6 +1042,40 @@
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<div class="link"><i class="fa fa-arrow-circle-o-right fa-fw"></i> User Concurrent Streams<i class="fa fa-chevron-down"></i></div>
|
||||
<ul class="submenu">
|
||||
<li>
|
||||
<div class="form-group">
|
||||
<label for="notify_on_concurrent_subject_text">Subject Line</label>
|
||||
<input class="form-control" type="text" id="notify_on_concurrent_subject_text" name="notify_on_concurrent_subject_text" value="${config['notify_on_concurrent_subject_text']}" data-parsley-trigger="change" required>
|
||||
<p class="help-block">Set a custom subject line.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="notify_on_buffer_body_text">Message Body</label>
|
||||
<textarea class="form-control" id="notify_on_concurrent_body_text" name="notify_on_concurrent_body_text" data-parsley-trigger="change" data-autoresize required>${config['notify_on_concurrent_body_text']}</textarea>
|
||||
<p class="help-block">Set a custom body.</p>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<div class="link"><i class="fa fa-desktop fa-fw"></i> User New Device<i class="fa fa-chevron-down"></i></div>
|
||||
<ul class="submenu">
|
||||
<li>
|
||||
<div class="form-group">
|
||||
<label for="notify_on_newdevice_subject_text">Subject Line</label>
|
||||
<input class="form-control" type="text" id="notify_on_newdevice_subject_text" name="notify_on_newdevice_subject_text" value="${config['notify_on_newdevice_subject_text']}" data-parsley-trigger="change" required>
|
||||
<p class="help-block">Set a custom subject line.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="notify_on_buffer_body_text">Message Body</label>
|
||||
<textarea class="form-control" id="notify_on_newdevice_body_text" name="notify_on_newdevice_body_text" data-parsley-trigger="change" data-autoresize required>${config['notify_on_newdevice_body_text']}</textarea>
|
||||
<p class="help-block">Set a custom body.</p>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
<ul id="accordion-timeline" class="accordion list-unstyled">
|
||||
<li>
|
||||
@@ -1203,11 +1215,7 @@
|
||||
% else:
|
||||
<a href="javascript:void(0)" data-target="#notification-triggers-modal" data-id="${agent['id']}" class="toggle-notification-triggers-modal toggle-left" data-toggle="modal"><i class="fa fa-lg fa-bell"></i></a>
|
||||
% endif
|
||||
% if agent['id'] == 17:
|
||||
${agent['name']} <span style="color: #eb8600; padding-left: 10px;">[experimental]</span>
|
||||
% else:
|
||||
${agent['name']}
|
||||
% endif
|
||||
% if agent['has_config']:
|
||||
<a href="javascript:void(0)" rel="tooltip" data-target="#notification-config-modal" data-placement="top" title data-title="Open configuration" data-id="${agent['id']}" class="toggle-notification-config-modal toggle-right" data-toggle="modal"><i class="fa fa-lg fa-cog"></i></a>
|
||||
% endif
|
||||
@@ -1600,6 +1608,10 @@
|
||||
<td><strong>{streams}</strong></td>
|
||||
<td>The number of concurrent streams.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{user_streams}</strong></td>
|
||||
<td>The number of concurrent streams by the person streaming.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{user}</strong></td>
|
||||
<td>The friendly name of the person streaming.</td>
|
||||
@@ -1811,6 +1823,26 @@
|
||||
<td><strong>{year}</strong></td>
|
||||
<td>The release year for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{release_date}</strong></td>
|
||||
<td>The release date (in date format) for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{air_date}</strong></td>
|
||||
<td>The air date (in date format) for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{added_date}</strong></td>
|
||||
<td>The date (in date format) the item was added to Plex.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{updated_date}</strong></td>
|
||||
<td>The date (in date format) the item was updated on Plex.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{last_viewed_date}</strong></td>
|
||||
<td>The date (in date format) the item was last viewed on Plex.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{studio}</strong></td>
|
||||
<td>The studio for the item.</td>
|
||||
@@ -1934,8 +1966,40 @@
|
||||
<td>The available update download URL.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_changelog}</strong></td>
|
||||
<td>The changelog for the available update.</td>
|
||||
<td><strong>{update_release_date}</strong></td>
|
||||
<td>The release date of the update version.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_channel}</strong></td>
|
||||
<td>The update channel. <span class="small-muted">(Public or Plex Pass)</span></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_platform}</strong></td>
|
||||
<td>The platform of your Plex Server.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_distro}</strong></td>
|
||||
<td>The distro of your Plex Server.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_distro_build}</strong></td>
|
||||
<td>The distro build of your Plex Server.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_requirements}</strong></td>
|
||||
<td>The requirements for the available update.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_extra_info}</strong></td>
|
||||
<td>Any extra info for the available update.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_changelog_added}</strong></td>
|
||||
<td>The added changelog for the available update.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_changelog_fixed}</strong></td>
|
||||
<td>The fixed changelog for the available update.</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
@@ -2044,6 +2108,33 @@
|
||||
<script src="${http_root}js/Sortable.min.js"></script>
|
||||
<script src="${http_root}js/moment-with-locale.js"></script>
|
||||
<script>
|
||||
function getConfigurationTable() {
|
||||
$.ajax({
|
||||
url: 'get_configuration_table',
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-configuration-table").html(xhr.responseText);
|
||||
if ("${kwargs.get('install_geoip')}" == 'true') {
|
||||
$('#install_geoip_db').removeClass('no-highlight').css('color','#e9a049');
|
||||
} else if ("${kwargs.get('reinstall_geoip')}" == 'true') {
|
||||
$('#reinstall_geoip_db').removeClass('no-highlight').css('color','#e9a049');
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function getSchedulerTable() {
|
||||
$.ajax({
|
||||
url: 'get_scheduler_table',
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-scheduler-table").html(xhr.responseText);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
$(document).ready(function() {
|
||||
|
||||
// Javascript to enable link to tab
|
||||
@@ -2080,7 +2171,9 @@ $(document).ready(function() {
|
||||
$('#restart-modal').modal('show');
|
||||
}
|
||||
$("#http_hashed_password").val($("#http_hash_password").is(":checked") ? 1 : 0)
|
||||
getConfigurationTable();
|
||||
getSchedulerTable();
|
||||
loadUpdateDistros();
|
||||
settingsChanged = false;
|
||||
}
|
||||
|
||||
@@ -2111,7 +2204,8 @@ $(document).ready(function() {
|
||||
initConfigCheckbox('#https_create_cert');
|
||||
initConfigCheckbox('#check_github');
|
||||
initConfigCheckbox('#notify_upload_posters');
|
||||
|
||||
initConfigCheckbox('#monitor_pms_updates');
|
||||
|
||||
$("#menu_link_shutdown").click(function() {
|
||||
$("#confirm-message").text("Are you sure you want to shutdown PlexPy?");
|
||||
$('#confirm-modal').modal();
|
||||
@@ -2140,38 +2234,9 @@ $(document).ready(function() {
|
||||
window.location.href = "restart";
|
||||
});
|
||||
|
||||
function getSchedulerTable() {
|
||||
$.ajax({
|
||||
url: 'get_scheduler_table',
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-scheduler-table").html(xhr.responseText);
|
||||
}
|
||||
});
|
||||
}
|
||||
getConfigurationTable();
|
||||
getSchedulerTable();
|
||||
|
||||
function confirmAjaxCall (url, msg) {
|
||||
$("#confirm-message").text(msg);
|
||||
$('#confirm-modal').modal();
|
||||
$('#confirm-modal').one('click', '#confirm-button', function () {
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: 'POST',
|
||||
complete: function (xhr, status) {
|
||||
result = $.parseJSON(xhr.responseText);
|
||||
msg = result.message;
|
||||
if (result.result == 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 5000)
|
||||
} else {
|
||||
showMsg('<i class="fa fa-times"></i> ' + msg, false, true, 5000, true)
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
$("#backup_config").click(function () {
|
||||
var msg = 'Are you sure you want to create a backup of the PlexPy config?';
|
||||
var url = 'backup_config';
|
||||
@@ -2202,7 +2267,6 @@ $(document).ready(function() {
|
||||
confirmAjaxCall(url, msg);
|
||||
});
|
||||
|
||||
|
||||
$('#api_key').click(function(){ $('#api_key').select() });
|
||||
$("#generate_api").click(function() {
|
||||
$.get('generateAPI',
|
||||
@@ -2234,7 +2298,6 @@ $(document).ready(function() {
|
||||
$( ".pms-settings" ).change(function() {
|
||||
serverChanged = true;
|
||||
$("#pms_identifier").val("");
|
||||
$("#pms-verify-status").html("");
|
||||
$("#server_changed").prop('checked', true);
|
||||
verifyServer();
|
||||
});
|
||||
@@ -2287,6 +2350,7 @@ $(document).ready(function() {
|
||||
}
|
||||
|
||||
$('#verify_server_button').on('click', function(){
|
||||
$("#pms_identifier").val("");
|
||||
verifyServer();
|
||||
});
|
||||
|
||||
@@ -2314,6 +2378,7 @@ $(document).ready(function() {
|
||||
} else {
|
||||
$("#pms-token-status").html('<i class="fa fa-exclamation-circle"></i> Invalid username or password.');
|
||||
}
|
||||
loadUpdateDistros();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
@@ -2372,33 +2437,26 @@ $(document).ready(function() {
|
||||
pms_logs = false;
|
||||
|
||||
// Checks to see if PMS server version is >= 0.9.14 with automaatically logged IP addresses
|
||||
$.ajax({
|
||||
url: 'get_server_identity',
|
||||
async: true,
|
||||
success: function(data) {
|
||||
if (data.version){ $("#pms_version").text(data.version); }
|
||||
var version = (data.version ? data.version.split('.') : null);
|
||||
if (version && parseInt(version[0]) >= 0 && parseInt(version[1]) >= 9 && parseInt(version[2]) >= 14) {
|
||||
$("#debugLogCheck").html("IP address is automatically logged for PMS version 0.9.14 and above.");
|
||||
$("#ip_logging_enable").attr("disabled", true);
|
||||
$("#ip_logging_enable").attr("checked", true);
|
||||
pms_version = true;
|
||||
var version = "${config['pms_version']}".split('.');
|
||||
if (version && parseInt(version[0]) >= 0 && parseInt(version[1]) >= 9 && parseInt(version[2]) >= 14) {
|
||||
$("#debugLogCheck").html("IP address is automatically logged for PMS version 0.9.14 and above.");
|
||||
$("#ip_logging_enable").attr("disabled", true);
|
||||
$("#ip_logging_enable").attr("checked", true);
|
||||
pms_version = true;
|
||||
checkLogsPath();
|
||||
} else {
|
||||
// Check to see if debug logs are enabled on the PMS.
|
||||
$.ajax({
|
||||
url: 'get_server_pref',
|
||||
data: { pref: 'logDebug' },
|
||||
async: true,
|
||||
success: function(data) {
|
||||
pms_logs_debug = (data == 'true' ? true : false);
|
||||
// Check to see if our logs folder is set before allowing IP logging to be enabled.
|
||||
checkLogsPath();
|
||||
} else {
|
||||
// Check to see if debug logs are enabled on the PMS.
|
||||
$.ajax({
|
||||
url: 'get_server_pref',
|
||||
data: { pref: 'logDebug' },
|
||||
async: true,
|
||||
success: function(data) {
|
||||
pms_logs_debug = (data == 'true' ? true : false);
|
||||
// Check to see if our logs folder is set before allowing IP logging to be enabled.
|
||||
checkLogsPath();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
$("#pms_logs_folder").change(function() {
|
||||
checkLogsPath();
|
||||
@@ -2552,18 +2610,12 @@ $(document).ready(function() {
|
||||
$('#notify_recently_added_grandparent_note').css('color', c);
|
||||
});
|
||||
|
||||
$('.guidelines-modal-link').on('click', function (e) {
|
||||
e.preventDefault();
|
||||
$('#guidelines-link').attr('href', $('#source-link').attr('href'));
|
||||
$('#guidelines-type').text($(this).data('id'))
|
||||
$('#guidelines-modal').modal();
|
||||
$('#guidelines-continue').attr('href', $(this).attr('href')).on('click', function () {
|
||||
$('#guidelines-modal').modal('hide');
|
||||
});
|
||||
});
|
||||
|
||||
function allowGuestAccessCheck () {
|
||||
if ($('#http_username').val() == '' || $('#http_password').val() == '') {
|
||||
if ($("#http_basic_auth").is(":checked")) {
|
||||
$("#allow_guest_access").attr("disabled", true);
|
||||
$("#allow_guest_access").attr("checked", false);
|
||||
$("#allowGuestCheck").html("Guest access cannot be enabled with basic authentication.");
|
||||
} else if ($('#http_username').val() == '' || $('#http_password').val() == '') {
|
||||
$("#allow_guest_access").attr("disabled", true);
|
||||
$("#allow_guest_access").attr("checked", false);
|
||||
$("#allowGuestCheck").html("You must set an admin password above to allow guest access.");
|
||||
@@ -2574,24 +2626,77 @@ $(document).ready(function() {
|
||||
}
|
||||
allowGuestAccessCheck();
|
||||
|
||||
$('#http_username, #http_password').change(function () {
|
||||
$('#http_username, #http_password, #http_basic_auth').change(function () {
|
||||
allowGuestAccessCheck();
|
||||
});
|
||||
|
||||
|
||||
$("#http_hash_password").click(function(){
|
||||
function hashPasswordCheck () {
|
||||
if ($("#http_basic_auth").is(":checked")) {
|
||||
$("#http_hash_password").attr("checked", false);
|
||||
$("#http_hash_password").attr("disabled", true);
|
||||
$("#hashPasswordCheck").html("Password cannot be hashed with basic authentication.");
|
||||
} else {
|
||||
$("#http_hash_password").attr("disabled", false);
|
||||
$("#hashPasswordCheck").html("");
|
||||
}
|
||||
if (!($("#http_hash_password").is(":checked")) && $("#http_hashed_password").val() == "1" && $("#http_password").val() == " ") {
|
||||
$("#http_hashed_password").val(-1);
|
||||
} else if ($("#http_hash_password").is(":checked") && $("#http_hashed_password").val() == "-1" && $("#http_password").val() == " ") {
|
||||
$("#http_hashed_password").val(1);
|
||||
$("#http_hash_password_error").html("");
|
||||
}
|
||||
}
|
||||
hashPasswordCheck();
|
||||
|
||||
$('#http_password, #http_hash_password, #http_basic_auth').change(function () {
|
||||
hashPasswordCheck();
|
||||
});
|
||||
|
||||
$('#http_password').change(function () {
|
||||
$("#http_hashed_password").val($("#http_hash_password").is(":checked") ? 1 : 0);
|
||||
$("#http_hash_password_error").html("");
|
||||
});
|
||||
|
||||
// Load PMS downloads
|
||||
function loadUpdateDistros(distro_build) {
|
||||
var update_params_ajax = $.getJSON('get_server_update_params', function (data) { return data; });
|
||||
|
||||
$.when(update_params_ajax).done(function() {
|
||||
var update_params = update_params_ajax.responseJSON;
|
||||
|
||||
var plexpass = update_params.plexpass;
|
||||
var platform = update_params.pms_platform;
|
||||
var update_channel = update_params.pms_update_channel;
|
||||
var update_distro_build = update_params.pms_update_distro_build;
|
||||
|
||||
$("#pms_update_channel option[value='plexpass']").remove();
|
||||
if (plexpass) {
|
||||
var selected = (update_channel == 'plexpass') ? true : false;
|
||||
$('#pms_update_channel')
|
||||
.append($('<option></option>')
|
||||
.text('Plex Pass')
|
||||
.val('plexpass')
|
||||
.prop('selected', selected));
|
||||
}
|
||||
|
||||
$.getJSON('https://plex.tv/api/downloads/1.json?channel=' + update_channel, function (downloads) {
|
||||
platform_downloads = downloads.computer[platform] || downloads.nas[platform];
|
||||
if (platform_downloads) {
|
||||
$("#pms_update_distro_build option").remove();
|
||||
$.each(platform_downloads.releases, function (index, item) {
|
||||
var label = (platform_downloads.releases.length == 1) ? platform_downloads.name : platform_downloads.name + ' - ' + item.label;
|
||||
var selected = (item.build == update_distro_build) ? true : false;
|
||||
$('#pms_update_distro_build')
|
||||
.append($('<option></option>')
|
||||
.text(label)
|
||||
.val(item.build)
|
||||
.prop('selected', selected));
|
||||
})
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
loadUpdateDistros();
|
||||
});
|
||||
</script>
|
||||
</%def>
|
||||
</%def>
|
||||
|
@@ -53,13 +53,15 @@ DOCUMENTATION :: END
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<h4><strong>Stream Details</strong></h4>
|
||||
<div class="col-sm-12">
|
||||
<h4><strong>Stream Details</strong></h4>
|
||||
</div>
|
||||
<div class="col-sm-4">
|
||||
<h5>Media</h5>
|
||||
<ul class="list-unstyled">
|
||||
<li>Container: <strong>${data['transcode_container'] if data['transcode_container'] else data['container']}</strong></li>
|
||||
% if data['media_type'] != 'track':
|
||||
<li>Resolution: <strong>${data['video_resolution'] + 'p' if data['video_resolution'] != 'sd' else data['video_resolution']}</strong></li>
|
||||
<li>Resolution: <strong>${data['transcode_height'] if data['transcode_height'] else data['height']}p</strong></li>
|
||||
% endif
|
||||
</ul>
|
||||
</div>
|
||||
@@ -95,13 +97,15 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<h4><strong>Source Details</strong></h4>
|
||||
<div class="col-sm-12">
|
||||
<h4><strong>Source Details</strong></h4>
|
||||
</div>
|
||||
<div class="col-sm-4">
|
||||
<h5>Media</h5>
|
||||
<ul class="list-unstyled">
|
||||
<li>Container: <strong>${data['container']}</strong></li>
|
||||
% if data['media_type'] != 'track':
|
||||
<li>Resolution: <strong>${data['height']}p</strong></li>
|
||||
<li>Resolution: <strong>${data['video_resolution'] + 'p' if data['video_resolution'] != 'sd' else data['video_resolution']}</strong></li>
|
||||
% endif
|
||||
<li>Bitrate: <strong>${data['bitrate']} kbps</strong></li>
|
||||
</ul>
|
||||
|
@@ -383,7 +383,7 @@ DOCUMENTATION :: END
|
||||
|
||||
// Populate watch time stats
|
||||
$.ajax({
|
||||
url: 'get_user_watch_time_stats',
|
||||
url: 'user_watch_time_stats',
|
||||
async: true,
|
||||
data: { user_id: user_id, user: username },
|
||||
complete: function(xhr, status) {
|
||||
@@ -393,7 +393,7 @@ DOCUMENTATION :: END
|
||||
|
||||
// Populate platform stats
|
||||
$.ajax({
|
||||
url: 'get_user_player_stats',
|
||||
url: 'user_player_stats',
|
||||
async: true,
|
||||
data: { user_id: user_id, user: username },
|
||||
complete: function(xhr, status) {
|
||||
|
@@ -49,6 +49,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
|
@@ -14,7 +14,7 @@
|
||||
# default. Do not sets it as empty or it will run
|
||||
# as root.
|
||||
# plexpy_dir: Directory where PlexPy lives.
|
||||
# Default: /usr/local/plexpy
|
||||
# Default: /usr/local/share/plexpy
|
||||
# plexpy_chdir: Change to this directory before running PlexPy.
|
||||
# Default is same as plexpy_dir.
|
||||
# plexpy_pid: The name of the pidfile to create.
|
||||
@@ -30,7 +30,7 @@ load_rc_config ${name}
|
||||
|
||||
: ${plexpy_enable:="NO"}
|
||||
: ${plexpy_user:="_sabnzbd"}
|
||||
: ${plexpy_dir:="/usr/local/plexpy"}
|
||||
: ${plexpy_dir:="/usr/local/share/plexpy"}
|
||||
: ${plexpy_chdir:="${plexpy_dir}"}
|
||||
: ${plexpy_pid:="${plexpy_dir}/plexpy.pid"}
|
||||
: ${plexpy_flags:=""}
|
||||
|
@@ -14,7 +14,7 @@
|
||||
# default. Do not sets it as empty or it will run
|
||||
# as root.
|
||||
# plexpy_dir: Directory where PlexPy lives.
|
||||
# Default: /usr/local/plexpy
|
||||
# Default: /usr/local/share/plexpy
|
||||
# plexpy_chdir: Change to this directory before running PlexPy.
|
||||
# Default is same as plexpy_dir.
|
||||
# plexpy_pid: The name of the pidfile to create.
|
||||
|
@@ -195,7 +195,7 @@ def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
|
||||
if not base:
|
||||
base = request.headers.get('Host', '127.0.0.1')
|
||||
port = request.local.port
|
||||
if port != 80:
|
||||
if port != 80 and not base.endswith(':%s' % port):
|
||||
base += ':%s' % port
|
||||
|
||||
if base.find("://") == -1:
|
||||
|
7
lib/geoip2/__init__.py
Normal file
7
lib/geoip2/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# pylint:disable=C0111
|
||||
|
||||
__title__ = 'geoip2'
|
||||
__version__ = '2.4.0'
|
||||
__author__ = 'Gregory Oschwald'
|
||||
__license__ = 'Apache License, Version 2.0'
|
||||
__copyright__ = 'Copyright (c) 2013-2016 Maxmind, Inc.'
|
17
lib/geoip2/compat.py
Normal file
17
lib/geoip2/compat.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Intended for internal use only."""
|
||||
import sys
|
||||
|
||||
import ipaddress
|
||||
|
||||
# pylint: skip-file
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
def compat_ip_address(address):
|
||||
"""Intended for internal use only."""
|
||||
if isinstance(address, bytes):
|
||||
address = address.decode()
|
||||
return ipaddress.ip_address(address)
|
||||
else:
|
||||
def compat_ip_address(address):
|
||||
"""Intended for internal use only."""
|
||||
return ipaddress.ip_address(address)
|
199
lib/geoip2/database.py
Normal file
199
lib/geoip2/database.py
Normal file
@@ -0,0 +1,199 @@
|
||||
"""
|
||||
======================
|
||||
GeoIP2 Database Reader
|
||||
======================
|
||||
|
||||
"""
|
||||
import inspect
|
||||
|
||||
import maxminddb
|
||||
# pylint: disable=unused-import
|
||||
from maxminddb import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE,
|
||||
MODE_MEMORY)
|
||||
|
||||
import geoip2
|
||||
import geoip2.models
|
||||
import geoip2.errors
|
||||
|
||||
|
||||
class Reader(object):
|
||||
"""GeoIP2 database Reader object.
|
||||
|
||||
Instances of this class provide a reader for the GeoIP2 database format.
|
||||
IP addresses can be looked up using the ``country`` and ``city`` methods.
|
||||
|
||||
The basic API for this class is the same for every database. First, you
|
||||
create a reader object, specifying a file name. You then call the method
|
||||
corresponding to the specific database, passing it the IP address you want
|
||||
to look up.
|
||||
|
||||
If the request succeeds, the method call will return a model class for the
|
||||
method you called. This model in turn contains multiple record classes,
|
||||
each of which represents part of the data returned by the database. If the
|
||||
database does not contain the requested information, the attributes on the
|
||||
record class will have a ``None`` value.
|
||||
|
||||
If the address is not in the database, an
|
||||
``geoip2.errors.AddressNotFoundError`` exception will be thrown. If the
|
||||
database is corrupt or invalid, a ``maxminddb.InvalidDatabaseError`` will
|
||||
be thrown.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, filename, locales=None, mode=MODE_AUTO):
|
||||
"""Create GeoIP2 Reader.
|
||||
|
||||
:param filename: The path to the GeoIP2 database.
|
||||
:param locales: This is list of locale codes. This argument will be
|
||||
passed on to record classes to use when their name properties are
|
||||
called. The default value is ['en'].
|
||||
|
||||
The order of the locales is significant. When a record class has
|
||||
multiple names (country, city, etc.), its name property will return
|
||||
the name in the first locale that has one.
|
||||
|
||||
Note that the only locale which is always present in the GeoIP2
|
||||
data is "en". If you do not include this locale, the name property
|
||||
may end up returning None even when the record has an English name.
|
||||
|
||||
Currently, the valid locale codes are:
|
||||
|
||||
* de -- German
|
||||
* en -- English names may still include accented characters if that
|
||||
is the accepted spelling in English. In other words, English does
|
||||
not mean ASCII.
|
||||
* es -- Spanish
|
||||
* fr -- French
|
||||
* ja -- Japanese
|
||||
* pt-BR -- Brazilian Portuguese
|
||||
* ru -- Russian
|
||||
* zh-CN -- Simplified Chinese.
|
||||
:param mode: The mode to open the database with. Valid mode are:
|
||||
* MODE_MMAP_EXT - use the C extension with memory map.
|
||||
* MODE_MMAP - read from memory map. Pure Python.
|
||||
* MODE_FILE - read database as standard file. Pure Python.
|
||||
* MODE_MEMORY - load database into memory. Pure Python.
|
||||
* MODE_AUTO - try MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that order.
|
||||
Default.
|
||||
|
||||
"""
|
||||
if locales is None:
|
||||
locales = ['en']
|
||||
self._db_reader = maxminddb.open_database(filename, mode)
|
||||
self._locales = locales
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
def country(self, ip_address):
|
||||
"""Get the Country object for the IP address.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string.
|
||||
|
||||
:returns: :py:class:`geoip2.models.Country` object
|
||||
|
||||
"""
|
||||
|
||||
return self._model_for(geoip2.models.Country, 'Country', ip_address)
|
||||
|
||||
def city(self, ip_address):
|
||||
"""Get the City object for the IP address.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string.
|
||||
|
||||
:returns: :py:class:`geoip2.models.City` object
|
||||
|
||||
"""
|
||||
return self._model_for(geoip2.models.City, 'City', ip_address)
|
||||
|
||||
def anonymous_ip(self, ip_address):
|
||||
"""Get the AnonymousIP object for the IP address.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string.
|
||||
|
||||
:returns: :py:class:`geoip2.models.AnonymousIP` object
|
||||
|
||||
"""
|
||||
return self._flat_model_for(geoip2.models.AnonymousIP,
|
||||
'GeoIP2-Anonymous-IP', ip_address)
|
||||
|
||||
def connection_type(self, ip_address):
|
||||
"""Get the ConnectionType object for the IP address.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string.
|
||||
|
||||
:returns: :py:class:`geoip2.models.ConnectionType` object
|
||||
|
||||
"""
|
||||
return self._flat_model_for(geoip2.models.ConnectionType,
|
||||
'GeoIP2-Connection-Type', ip_address)
|
||||
|
||||
def domain(self, ip_address):
|
||||
"""Get the Domain object for the IP address.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string.
|
||||
|
||||
:returns: :py:class:`geoip2.models.Domain` object
|
||||
|
||||
"""
|
||||
return self._flat_model_for(geoip2.models.Domain, 'GeoIP2-Domain',
|
||||
ip_address)
|
||||
|
||||
def enterprise(self, ip_address):
|
||||
"""Get the Enterprise object for the IP address.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string.
|
||||
|
||||
:returns: :py:class:`geoip2.models.Enterprise` object
|
||||
|
||||
"""
|
||||
return self._model_for(geoip2.models.Enterprise, 'Enterprise',
|
||||
ip_address)
|
||||
|
||||
def isp(self, ip_address):
|
||||
"""Get the ISP object for the IP address.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string.
|
||||
|
||||
:returns: :py:class:`geoip2.models.ISP` object
|
||||
|
||||
"""
|
||||
return self._flat_model_for(geoip2.models.ISP, 'GeoIP2-ISP',
|
||||
ip_address)
|
||||
|
||||
def _get(self, database_type, ip_address):
|
||||
if database_type not in self.metadata().database_type:
|
||||
caller = inspect.stack()[2][3]
|
||||
raise TypeError("The %s method cannot be used with the "
|
||||
"%s database" %
|
||||
(caller, self.metadata().database_type))
|
||||
record = self._db_reader.get(ip_address)
|
||||
if record is None:
|
||||
raise geoip2.errors.AddressNotFoundError(
|
||||
"The address %s is not in the database." % ip_address)
|
||||
return record
|
||||
|
||||
def _model_for(self, model_class, types, ip_address):
|
||||
record = self._get(types, ip_address)
|
||||
record.setdefault('traits', {})['ip_address'] = ip_address
|
||||
return model_class(record, locales=self._locales)
|
||||
|
||||
def _flat_model_for(self, model_class, types, ip_address):
|
||||
record = self._get(types, ip_address)
|
||||
record['ip_address'] = ip_address
|
||||
return model_class(record)
|
||||
|
||||
def metadata(self):
|
||||
"""The metadata for the open database.
|
||||
|
||||
:returns: :py:class:`maxminddb.reader.Metadata` object
|
||||
"""
|
||||
return self._db_reader.metadata()
|
||||
|
||||
def close(self):
|
||||
"""Closes the GeoIP2 database."""
|
||||
|
||||
self._db_reader.close()
|
51
lib/geoip2/errors.py
Normal file
51
lib/geoip2/errors.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""
|
||||
Errors
|
||||
======
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class GeoIP2Error(RuntimeError):
|
||||
"""There was a generic error in GeoIP2.
|
||||
|
||||
This class represents a generic error. It extends :py:exc:`RuntimeError`
|
||||
and does not add any additional attributes.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class AddressNotFoundError(GeoIP2Error):
|
||||
"""The address you were looking up was not found."""
|
||||
|
||||
|
||||
class AuthenticationError(GeoIP2Error):
|
||||
"""There was a problem authenticating the request."""
|
||||
|
||||
|
||||
class HTTPError(GeoIP2Error):
|
||||
"""There was an error when making your HTTP request.
|
||||
|
||||
This class represents an HTTP transport error. It extends
|
||||
:py:exc:`GeoIP2Error` and adds attributes of its own.
|
||||
|
||||
:ivar http_status: The HTTP status code returned
|
||||
:ivar uri: The URI queried
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, message, http_status=None, uri=None):
|
||||
super(HTTPError, self).__init__(message)
|
||||
self.http_status = http_status
|
||||
self.uri = uri
|
||||
|
||||
|
||||
class InvalidRequestError(GeoIP2Error):
|
||||
"""The request was invalid."""
|
||||
|
||||
|
||||
class OutOfQueriesError(GeoIP2Error):
|
||||
"""Your account is out of funds for the service queried."""
|
||||
|
||||
|
||||
class PermissionRequiredError(GeoIP2Error):
|
||||
"""Your account does not have permission to access this service."""
|
16
lib/geoip2/mixins.py
Normal file
16
lib/geoip2/mixins.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""This package contains utility mixins"""
|
||||
# pylint: disable=too-few-public-methods
|
||||
from abc import ABCMeta
|
||||
|
||||
|
||||
class SimpleEquality(object):
|
||||
"""Naive __dict__ equality mixin"""
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, self.__class__) and
|
||||
self.__dict__ == other.__dict__)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
472
lib/geoip2/models.py
Normal file
472
lib/geoip2/models.py
Normal file
@@ -0,0 +1,472 @@
|
||||
"""
|
||||
Models
|
||||
======
|
||||
|
||||
These classes provide models for the data returned by the GeoIP2
|
||||
web service and databases.
|
||||
|
||||
The only difference between the City and Insights model classes is which
|
||||
fields in each record may be populated. See
|
||||
http://dev.maxmind.com/geoip/geoip2/web-services for more details.
|
||||
|
||||
"""
|
||||
# pylint: disable=too-many-instance-attributes,too-few-public-methods
|
||||
from abc import ABCMeta
|
||||
|
||||
import geoip2.records
|
||||
from geoip2.mixins import SimpleEquality
|
||||
|
||||
|
||||
class Country(SimpleEquality):
|
||||
"""Model for the GeoIP2 Precision: Country and the GeoIP2 Country database.
|
||||
|
||||
This class provides the following attributes:
|
||||
|
||||
.. attribute:: continent
|
||||
|
||||
Continent object for the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Continent`
|
||||
|
||||
.. attribute:: country
|
||||
|
||||
Country object for the requested IP address. This record represents the
|
||||
country where MaxMind believes the IP is located.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: maxmind
|
||||
|
||||
Information related to your MaxMind account.
|
||||
|
||||
:type: :py:class:`geoip2.records.MaxMind`
|
||||
|
||||
.. attribute:: registered_country
|
||||
|
||||
The registered country object for the requested IP address. This record
|
||||
represents the country where the ISP has registered a given IP block in
|
||||
and may differ from the user's country.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: represented_country
|
||||
|
||||
Object for the country represented by the users of the IP address
|
||||
when that country is different than the country in ``country``. For
|
||||
instance, the country represented by an overseas military base.
|
||||
|
||||
:type: :py:class:`geoip2.records.RepresentedCountry`
|
||||
|
||||
.. attribute:: traits
|
||||
|
||||
Object with the traits of the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Traits`
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, raw_response, locales=None):
|
||||
if locales is None:
|
||||
locales = ['en']
|
||||
self._locales = locales
|
||||
self.continent = \
|
||||
geoip2.records.Continent(locales,
|
||||
**raw_response.get('continent', {}))
|
||||
self.country = \
|
||||
geoip2.records.Country(locales,
|
||||
**raw_response.get('country', {}))
|
||||
self.registered_country = \
|
||||
geoip2.records.Country(locales,
|
||||
**raw_response.get('registered_country',
|
||||
{}))
|
||||
self.represented_country \
|
||||
= geoip2.records.RepresentedCountry(locales,
|
||||
**raw_response.get(
|
||||
'represented_country', {}))
|
||||
|
||||
self.maxmind = \
|
||||
geoip2.records.MaxMind(**raw_response.get('maxmind', {}))
|
||||
|
||||
self.traits = geoip2.records.Traits(**raw_response.get('traits', {}))
|
||||
self.raw = raw_response
|
||||
|
||||
def __repr__(self):
|
||||
return '{module}.{class_name}({data}, {locales})'.format(
|
||||
module=self.__module__,
|
||||
class_name=self.__class__.__name__,
|
||||
data=self.raw,
|
||||
locales=self._locales)
|
||||
|
||||
|
||||
class City(Country):
|
||||
"""Model for the GeoIP2 Precision: City and the GeoIP2 City database.
|
||||
|
||||
.. attribute:: city
|
||||
|
||||
City object for the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.City`
|
||||
|
||||
.. attribute:: continent
|
||||
|
||||
Continent object for the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Continent`
|
||||
|
||||
.. attribute:: country
|
||||
|
||||
Country object for the requested IP address. This record represents the
|
||||
country where MaxMind believes the IP is located.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: location
|
||||
|
||||
Location object for the requested IP address.
|
||||
|
||||
.. attribute:: maxmind
|
||||
|
||||
Information related to your MaxMind account.
|
||||
|
||||
:type: :py:class:`geoip2.records.MaxMind`
|
||||
|
||||
.. attribute:: registered_country
|
||||
|
||||
The registered country object for the requested IP address. This record
|
||||
represents the country where the ISP has registered a given IP block in
|
||||
and may differ from the user's country.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: represented_country
|
||||
|
||||
Object for the country represented by the users of the IP address
|
||||
when that country is different than the country in ``country``. For
|
||||
instance, the country represented by an overseas military base.
|
||||
|
||||
:type: :py:class:`geoip2.records.RepresentedCountry`
|
||||
|
||||
.. attribute:: subdivisions
|
||||
|
||||
Object (tuple) representing the subdivisions of the country to which
|
||||
the location of the requested IP address belongs.
|
||||
|
||||
:type: :py:class:`geoip2.records.Subdivisions`
|
||||
|
||||
.. attribute:: traits
|
||||
|
||||
Object with the traits of the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Traits`
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, raw_response, locales=None):
|
||||
super(City, self).__init__(raw_response, locales)
|
||||
self.city = \
|
||||
geoip2.records.City(locales, **raw_response.get('city', {}))
|
||||
self.location = \
|
||||
geoip2.records.Location(**raw_response.get('location', {}))
|
||||
self.postal = \
|
||||
geoip2.records.Postal(**raw_response.get('postal', {}))
|
||||
self.subdivisions = \
|
||||
geoip2.records.Subdivisions(locales,
|
||||
*raw_response.get('subdivisions', []))
|
||||
|
||||
|
||||
class Insights(City):
|
||||
"""Model for the GeoIP2 Precision: Insights web service endpoint.
|
||||
|
||||
.. attribute:: city
|
||||
|
||||
City object for the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.City`
|
||||
|
||||
.. attribute:: continent
|
||||
|
||||
Continent object for the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Continent`
|
||||
|
||||
.. attribute:: country
|
||||
|
||||
Country object for the requested IP address. This record represents the
|
||||
country where MaxMind believes the IP is located.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: location
|
||||
|
||||
Location object for the requested IP address.
|
||||
|
||||
.. attribute:: maxmind
|
||||
|
||||
Information related to your MaxMind account.
|
||||
|
||||
:type: :py:class:`geoip2.records.MaxMind`
|
||||
|
||||
.. attribute:: registered_country
|
||||
|
||||
The registered country object for the requested IP address. This record
|
||||
represents the country where the ISP has registered a given IP block in
|
||||
and may differ from the user's country.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: represented_country
|
||||
|
||||
Object for the country represented by the users of the IP address
|
||||
when that country is different than the country in ``country``. For
|
||||
instance, the country represented by an overseas military base.
|
||||
|
||||
:type: :py:class:`geoip2.records.RepresentedCountry`
|
||||
|
||||
.. attribute:: subdivisions
|
||||
|
||||
Object (tuple) representing the subdivisions of the country to which
|
||||
the location of the requested IP address belongs.
|
||||
|
||||
:type: :py:class:`geoip2.records.Subdivisions`
|
||||
|
||||
.. attribute:: traits
|
||||
|
||||
Object with the traits of the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Traits`
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class Enterprise(City):
|
||||
"""Model for the GeoIP2 Enterprise database.
|
||||
|
||||
.. attribute:: city
|
||||
|
||||
City object for the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.City`
|
||||
|
||||
.. attribute:: continent
|
||||
|
||||
Continent object for the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Continent`
|
||||
|
||||
.. attribute:: country
|
||||
|
||||
Country object for the requested IP address. This record represents the
|
||||
country where MaxMind believes the IP is located.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: location
|
||||
|
||||
Location object for the requested IP address.
|
||||
|
||||
.. attribute:: maxmind
|
||||
|
||||
Information related to your MaxMind account.
|
||||
|
||||
:type: :py:class:`geoip2.records.MaxMind`
|
||||
|
||||
.. attribute:: registered_country
|
||||
|
||||
The registered country object for the requested IP address. This record
|
||||
represents the country where the ISP has registered a given IP block in
|
||||
and may differ from the user's country.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: represented_country
|
||||
|
||||
Object for the country represented by the users of the IP address
|
||||
when that country is different than the country in ``country``. For
|
||||
instance, the country represented by an overseas military base.
|
||||
|
||||
:type: :py:class:`geoip2.records.RepresentedCountry`
|
||||
|
||||
.. attribute:: subdivisions
|
||||
|
||||
Object (tuple) representing the subdivisions of the country to which
|
||||
the location of the requested IP address belongs.
|
||||
|
||||
:type: :py:class:`geoip2.records.Subdivisions`
|
||||
|
||||
.. attribute:: traits
|
||||
|
||||
Object with the traits of the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Traits`
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class SimpleModel(SimpleEquality):
|
||||
"""Provides basic methods for non-location models"""
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __repr__(self):
|
||||
# pylint: disable=no-member
|
||||
return '{module}.{class_name}({data})'.format(
|
||||
module=self.__module__,
|
||||
class_name=self.__class__.__name__,
|
||||
data=str(self.raw))
|
||||
|
||||
|
||||
class AnonymousIP(SimpleModel):
|
||||
"""Model class for the GeoIP2 Anonymous IP.
|
||||
|
||||
This class provides the following attribute:
|
||||
|
||||
.. attribute:: is_anonymous
|
||||
|
||||
This is true if the IP address belongs to any sort of anonymous network.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. attribute:: is_anonymous_vpn
|
||||
|
||||
This is true if the IP address belongs to an anonymous VPN system.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. attribute:: is_hosting_provider
|
||||
|
||||
This is true if the IP address belongs to a hosting provider.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. attribute:: is_public_proxy
|
||||
|
||||
This is true if the IP address belongs to a public proxy.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. attribute:: is_tor_exit_node
|
||||
|
||||
This is true if the IP address is a Tor exit node.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. attribute:: ip_address
|
||||
|
||||
The IP address used in the lookup.
|
||||
|
||||
:type: unicode
|
||||
"""
|
||||
|
||||
def __init__(self, raw):
|
||||
self.is_anonymous = raw.get('is_anonymous', False)
|
||||
self.is_anonymous_vpn = raw.get('is_anonymous_vpn', False)
|
||||
self.is_hosting_provider = raw.get('is_hosting_provider', False)
|
||||
self.is_public_proxy = raw.get('is_public_proxy', False)
|
||||
self.is_tor_exit_node = raw.get('is_tor_exit_node', False)
|
||||
|
||||
self.ip_address = raw.get('ip_address')
|
||||
self.raw = raw
|
||||
|
||||
|
||||
class ConnectionType(SimpleModel):
|
||||
"""Model class for the GeoIP2 Connection-Type.
|
||||
|
||||
This class provides the following attribute:
|
||||
|
||||
.. attribute:: connection_type
|
||||
|
||||
The connection type may take the following values:
|
||||
|
||||
- Dialup
|
||||
- Cable/DSL
|
||||
- Corporate
|
||||
- Cellular
|
||||
|
||||
Additional values may be added in the future.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: ip_address
|
||||
|
||||
The IP address used in the lookup.
|
||||
|
||||
:type: unicode
|
||||
"""
|
||||
|
||||
def __init__(self, raw):
|
||||
self.connection_type = raw.get('connection_type')
|
||||
self.ip_address = raw.get('ip_address')
|
||||
self.raw = raw
|
||||
|
||||
|
||||
class Domain(SimpleModel):
|
||||
"""Model class for the GeoIP2 Domain.
|
||||
|
||||
This class provides the following attribute:
|
||||
|
||||
.. attribute:: domain
|
||||
|
||||
The domain associated with the IP address.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: ip_address
|
||||
|
||||
The IP address used in the lookup.
|
||||
|
||||
:type: unicode
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, raw):
|
||||
self.domain = raw.get('domain')
|
||||
self.ip_address = raw.get('ip_address')
|
||||
self.raw = raw
|
||||
|
||||
|
||||
class ISP(SimpleModel):
|
||||
"""Model class for the GeoIP2 ISP.
|
||||
|
||||
This class provides the following attribute:
|
||||
|
||||
.. attribute:: autonomous_system_number
|
||||
|
||||
The autonomous system number associated with the IP address.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: autonomous_system_organization
|
||||
|
||||
The organization associated with the registered autonomous system number
|
||||
for the IP address.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: isp
|
||||
|
||||
The name of the ISP associated with the IP address.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: organization
|
||||
|
||||
The name of the organization associated with the IP address.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: ip_address
|
||||
|
||||
The IP address used in the lookup.
|
||||
|
||||
:type: unicode
|
||||
"""
|
||||
|
||||
# pylint:disable=too-many-arguments
|
||||
def __init__(self, raw):
|
||||
self.autonomous_system_number = raw.get('autonomous_system_number')
|
||||
self.autonomous_system_organization = raw.get(
|
||||
'autonomous_system_organization')
|
||||
self.isp = raw.get('isp')
|
||||
self.organization = raw.get('organization')
|
||||
self.ip_address = raw.get('ip_address')
|
||||
self.raw = raw
|
605
lib/geoip2/records.py
Normal file
605
lib/geoip2/records.py
Normal file
@@ -0,0 +1,605 @@
|
||||
"""
|
||||
|
||||
Records
|
||||
=======
|
||||
|
||||
"""
|
||||
|
||||
# pylint:disable=R0903
|
||||
from abc import ABCMeta
|
||||
|
||||
from geoip2.mixins import SimpleEquality
|
||||
|
||||
|
||||
class Record(SimpleEquality):
|
||||
"""All records are subclasses of the abstract class ``Record``."""
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
_valid_attributes = set()
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
valid_args = dict((k, kwargs.get(k)) for k in self._valid_attributes)
|
||||
self.__dict__.update(valid_args)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
raise AttributeError("can't set attribute")
|
||||
|
||||
def __repr__(self):
|
||||
args = ', '.join('%s=%r' % x for x in self.__dict__.items())
|
||||
return '{module}.{class_name}({data})'.format(
|
||||
module=self.__module__,
|
||||
class_name=self.__class__.__name__,
|
||||
data=args)
|
||||
|
||||
|
||||
class PlaceRecord(Record):
|
||||
"""All records with :py:attr:`names` subclass :py:class:`PlaceRecord`."""
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(self, locales=None, **kwargs):
|
||||
if locales is None:
|
||||
locales = ['en']
|
||||
if kwargs.get('names') is None:
|
||||
kwargs['names'] = {}
|
||||
object.__setattr__(self, '_locales', locales)
|
||||
super(PlaceRecord, self).__init__(**kwargs)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Dict with locale codes as keys and localized name as value."""
|
||||
# pylint:disable=E1101
|
||||
return next(
|
||||
(self.names.get(x) for x in self._locales
|
||||
if x in self.names), None)
|
||||
|
||||
|
||||
class City(PlaceRecord):
|
||||
"""Contains data for the city record associated with an IP address.
|
||||
|
||||
This class contains the city-level data associated with an IP address.
|
||||
|
||||
This record is returned by ``city``, ``enterprise``, and ``insights``.
|
||||
|
||||
Attributes:
|
||||
|
||||
.. attribute:: confidence
|
||||
|
||||
A value from 0-100 indicating MaxMind's
|
||||
confidence that the city is correct. This attribute is only available
|
||||
from the Insights end point and the GeoIP2 Enterprise database.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: geoname_id
|
||||
|
||||
The GeoName ID for the city.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: name
|
||||
|
||||
The name of the city based on the locales list passed to the
|
||||
constructor.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: names
|
||||
|
||||
A dictionary where the keys are locale codes
|
||||
and the values are names.
|
||||
|
||||
:type: dict
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['confidence', 'geoname_id', 'names'])
|
||||
|
||||
|
||||
class Continent(PlaceRecord):
|
||||
"""Contains data for the continent record associated with an IP address.
|
||||
|
||||
This class contains the continent-level data associated with an IP
|
||||
address.
|
||||
|
||||
Attributes:
|
||||
|
||||
|
||||
.. attribute:: code
|
||||
|
||||
A two character continent code like "NA" (North America)
|
||||
or "OC" (Oceania).
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: geoname_id
|
||||
|
||||
The GeoName ID for the continent.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: name
|
||||
|
||||
Returns the name of the continent based on the locales list passed to
|
||||
the constructor.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: names
|
||||
|
||||
A dictionary where the keys are locale codes
|
||||
and the values are names.
|
||||
|
||||
:type: dict
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['code', 'geoname_id', 'names'])
|
||||
|
||||
|
||||
class Country(PlaceRecord):
|
||||
"""Contains data for the country record associated with an IP address.
|
||||
|
||||
This class contains the country-level data associated with an IP address.
|
||||
|
||||
Attributes:
|
||||
|
||||
|
||||
.. attribute:: confidence
|
||||
|
||||
A value from 0-100 indicating MaxMind's confidence that
|
||||
the country is correct. This attribute is only available from the
|
||||
Insights end point and the GeoIP2 Enterprise database.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: geoname_id
|
||||
|
||||
The GeoName ID for the country.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: iso_code
|
||||
|
||||
The two-character `ISO 3166-1
|
||||
<http://en.wikipedia.org/wiki/ISO_3166-1>`_ alpha code for the
|
||||
country.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: name
|
||||
|
||||
The name of the country based on the locales list passed to the
|
||||
constructor.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: names
|
||||
|
||||
A dictionary where the keys are locale codes and the values
|
||||
are names.
|
||||
|
||||
:type: dict
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names'])
|
||||
|
||||
|
||||
class RepresentedCountry(Country):
|
||||
"""Contains data for the represented country associated with an IP address.
|
||||
|
||||
This class contains the country-level data associated with an IP address
|
||||
for the IP's represented country. The represented country is the country
|
||||
represented by something like a military base.
|
||||
|
||||
Attributes:
|
||||
|
||||
|
||||
.. attribute:: confidence
|
||||
|
||||
A value from 0-100 indicating MaxMind's confidence that
|
||||
the country is correct. This attribute is only available from the
|
||||
Insights end point and the GeoIP2 Enterprise database.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: geoname_id
|
||||
|
||||
The GeoName ID for the country.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: iso_code
|
||||
|
||||
The two-character `ISO 3166-1
|
||||
<http://en.wikipedia.org/wiki/ISO_3166-1>`_ alpha code for the country.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: name
|
||||
|
||||
The name of the country based on the locales list passed to the
|
||||
constructor.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: names
|
||||
|
||||
A dictionary where the keys are locale codes and the values
|
||||
are names.
|
||||
|
||||
:type: dict
|
||||
|
||||
|
||||
.. attribute:: type
|
||||
|
||||
A string indicating the type of entity that is representing the
|
||||
country. Currently we only return ``military`` but this could expand to
|
||||
include other types in the future.
|
||||
|
||||
:type: unicode
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names',
|
||||
'type'])
|
||||
|
||||
|
||||
class Location(Record):
|
||||
"""Contains data for the location record associated with an IP address.
|
||||
|
||||
This class contains the location data associated with an IP address.
|
||||
|
||||
This record is returned by ``city``, ``enterprise``, and ``insights``.
|
||||
|
||||
Attributes:
|
||||
|
||||
.. attribute:: average_income
|
||||
|
||||
The average income in US dollars associated with the requested IP
|
||||
address. This attribute is only available from the Insights end point.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: accuracy_radius
|
||||
|
||||
The radius in kilometers around the specified location where the IP
|
||||
address is likely to be.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: latitude
|
||||
|
||||
The approximate latitude of the location associated with the IP
|
||||
address. This value is not precise and should not be used to identify a
|
||||
particular address or household.
|
||||
|
||||
:type: float
|
||||
|
||||
.. attribute:: longitude
|
||||
|
||||
The approximate longitude of the location associated with the IP
|
||||
address. This value is not precise and should not be used to identify a
|
||||
particular address or household.
|
||||
|
||||
:type: float
|
||||
|
||||
.. attribute:: metro_code
|
||||
|
||||
The metro code of the location if the
|
||||
location is in the US. MaxMind returns the same metro codes as the
|
||||
`Google AdWords API
|
||||
<https://developers.google.com/adwords/api/docs/appendix/cities-DMAregions>`_.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: population_density
|
||||
|
||||
The estimated population per square kilometer associated with the IP
|
||||
address. This attribute is only available from the Insights end point.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: time_zone
|
||||
|
||||
The time zone associated with location, as specified by the `IANA Time
|
||||
Zone Database <http://www.iana.org/time-zones>`_, e.g.,
|
||||
"America/New_York".
|
||||
|
||||
:type: unicode
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['average_income', 'accuracy_radius', 'latitude',
|
||||
'longitude', 'metro_code', 'population_density',
|
||||
'postal_code', 'postal_confidence', 'time_zone'])
|
||||
|
||||
|
||||
class MaxMind(Record):
|
||||
"""Contains data related to your MaxMind account.
|
||||
|
||||
Attributes:
|
||||
|
||||
.. attribute:: queries_remaining
|
||||
|
||||
The number of remaining queries you have
|
||||
for the end point you are calling.
|
||||
|
||||
:type: int
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['queries_remaining'])
|
||||
|
||||
|
||||
class Postal(Record):
|
||||
"""Contains data for the postal record associated with an IP address.
|
||||
|
||||
This class contains the postal data associated with an IP address.
|
||||
|
||||
This attribute is returned by ``city``, ``enterprise``, and ``insights``.
|
||||
|
||||
Attributes:
|
||||
|
||||
.. attribute:: code
|
||||
|
||||
The postal code of the location. Postal
|
||||
codes are not available for all countries. In some countries, this will
|
||||
only contain part of the postal code.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: confidence
|
||||
|
||||
A value from 0-100 indicating
|
||||
MaxMind's confidence that the postal code is correct. This attribute is
|
||||
only available from the Insights end point and the GeoIP2 Enterprise
|
||||
database.
|
||||
|
||||
:type: int
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['code', 'confidence'])
|
||||
|
||||
|
||||
class Subdivision(PlaceRecord):
|
||||
"""Contains data for the subdivisions associated with an IP address.
|
||||
|
||||
This class contains the subdivision data associated with an IP address.
|
||||
|
||||
This attribute is returned by ``city``, ``enterprise``, and ``insights``.
|
||||
|
||||
Attributes:
|
||||
|
||||
.. attribute:: confidence
|
||||
|
||||
This is a value from 0-100 indicating MaxMind's
|
||||
confidence that the subdivision is correct. This attribute is only
|
||||
available from the Insights end point and the GeoIP2 Enterprise
|
||||
database.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: geoname_id
|
||||
|
||||
This is a GeoName ID for the subdivision.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: iso_code
|
||||
|
||||
This is a string up to three characters long
|
||||
contain the subdivision portion of the `ISO 3166-2 code
|
||||
<http://en.wikipedia.org/wiki/ISO_3166-2>`_.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: name
|
||||
|
||||
The name of the subdivision based on the locales list passed to the
|
||||
constructor.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: names
|
||||
|
||||
A dictionary where the keys are locale codes and the
|
||||
values are names
|
||||
|
||||
:type: dict
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names'])
|
||||
|
||||
|
||||
class Subdivisions(tuple):
|
||||
"""A tuple-like collection of subdivisions associated with an IP address.
|
||||
|
||||
This class contains the subdivisions of the country associated with the
|
||||
IP address from largest to smallest.
|
||||
|
||||
For instance, the response for Oxford in the United Kingdom would have
|
||||
England as the first element and Oxfordshire as the second element.
|
||||
|
||||
This attribute is returned by ``city``, ``enterprise``, and ``insights``.
|
||||
"""
|
||||
|
||||
def __new__(cls, locales, *subdivisions):
|
||||
subdivisions = [Subdivision(locales, **x) for x in subdivisions]
|
||||
obj = super(cls, Subdivisions).__new__(cls, subdivisions)
|
||||
return obj
|
||||
|
||||
def __init__(self, locales, *subdivisions): # pylint:disable=W0613
|
||||
self._locales = locales
|
||||
super(Subdivisions, self).__init__()
|
||||
|
||||
@property
|
||||
def most_specific(self):
|
||||
"""The most specific (smallest) subdivision available.
|
||||
|
||||
If there are no :py:class:`Subdivision` objects for the response,
|
||||
this returns an empty :py:class:`Subdivision`.
|
||||
|
||||
:type: :py:class:`Subdivision`
|
||||
"""
|
||||
try:
|
||||
return self[-1]
|
||||
except IndexError:
|
||||
return Subdivision(self._locales)
|
||||
|
||||
|
||||
class Traits(Record):
|
||||
"""Contains data for the traits record associated with an IP address.
|
||||
|
||||
This class contains the traits data associated with an IP address.
|
||||
|
||||
This class has the following attributes:
|
||||
|
||||
|
||||
.. attribute:: autonomous_system_number
|
||||
|
||||
The `autonomous system
|
||||
number <http://en.wikipedia.org/wiki/Autonomous_system_(Internet)>`_
|
||||
associated with the IP address. This attribute is only available from
|
||||
the City and Insights web service end points and the GeoIP2 Enterprise
|
||||
database.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: autonomous_system_organization
|
||||
|
||||
The organization associated with the registered `autonomous system
|
||||
number <http://en.wikipedia.org/wiki/Autonomous_system_(Internet)>`_ for
|
||||
the IP address. This attribute is only available from the City and
|
||||
Insights web service end points and the GeoIP2 Enterprise database.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: connection_type
|
||||
|
||||
The connection type may take the following values:
|
||||
|
||||
- Dialup
|
||||
- Cable/DSL
|
||||
- Corporate
|
||||
- Cellular
|
||||
|
||||
Additional values may be added in the future.
|
||||
|
||||
This attribute is only available in the GeoIP2 Enterprise database.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: domain
|
||||
|
||||
The second level domain associated with the
|
||||
IP address. This will be something like "example.com" or
|
||||
"example.co.uk", not "foo.example.com". This attribute is only available
|
||||
from the City and Insights web service end points and the GeoIP2
|
||||
Enterprise database.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: ip_address
|
||||
|
||||
The IP address that the data in the model
|
||||
is for. If you performed a "me" lookup against the web service, this
|
||||
will be the externally routable IP address for the system the code is
|
||||
running on. If the system is behind a NAT, this may differ from the IP
|
||||
address locally assigned to it.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: is_anonymous_proxy
|
||||
|
||||
This is true if the IP is an anonymous
|
||||
proxy. See http://dev.maxmind.com/faq/geoip#anonproxy for further
|
||||
details.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. deprecated:: 2.2.0
|
||||
Use our our `GeoIP2 Anonymous IP database
|
||||
<https://www.maxmind.com/en/geoip2-anonymous-ip-database GeoIP2>`_
|
||||
instead.
|
||||
|
||||
.. attribute:: is_legitimate_proxy
|
||||
|
||||
This attribute is true if MaxMind believes this IP address to be a
|
||||
legitimate proxy, such as an internal VPN used by a corporation. This
|
||||
attribute is only available in the GeoIP2 Enterprise database.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. attribute:: is_satellite_provider
|
||||
|
||||
This is true if the IP address is from a satellite provider that
|
||||
provides service to multiple countries.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. deprecated:: 2.2.0
|
||||
Due to the increased coverage by mobile carriers, very few
|
||||
satellite providers now serve multiple countries. As a result, the
|
||||
output does not provide sufficiently relevant data for us to maintain
|
||||
it.
|
||||
|
||||
.. attribute:: isp
|
||||
|
||||
The name of the ISP associated with the IP address. This attribute is
|
||||
only available from the City and Insights web service end points and the
|
||||
GeoIP2 Enterprise database.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: organization
|
||||
|
||||
The name of the organization associated with the IP address. This
|
||||
attribute is only available from the City and Insights web service end
|
||||
points and the GeoIP2 Enterprise database.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: user_type
|
||||
|
||||
The user type associated with the IP
|
||||
address. This can be one of the following values:
|
||||
|
||||
* business
|
||||
* cafe
|
||||
* cellular
|
||||
* college
|
||||
* content_delivery_network
|
||||
* dialup
|
||||
* government
|
||||
* hosting
|
||||
* library
|
||||
* military
|
||||
* residential
|
||||
* router
|
||||
* school
|
||||
* search_engine_spider
|
||||
* traveler
|
||||
|
||||
This attribute is only available from the Insights end point and the
|
||||
GeoIP2 Enterprise database.
|
||||
|
||||
:type: unicode
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(
|
||||
['autonomous_system_number', 'autonomous_system_organization',
|
||||
'connection_type', 'domain', 'is_anonymous_proxy',
|
||||
'is_legitimate_proxy', 'is_satellite_provider', 'isp', 'ip_address',
|
||||
'organization', 'user_type'])
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for k in ['is_anonymous_proxy', 'is_legitimate_proxy',
|
||||
'is_satellite_provider']:
|
||||
kwargs[k] = bool(kwargs.get(k, False))
|
||||
super(Traits, self).__init__(**kwargs)
|
219
lib/geoip2/webservice.py
Normal file
219
lib/geoip2/webservice.py
Normal file
@@ -0,0 +1,219 @@
|
||||
"""
|
||||
============================
|
||||
WebServices Client API
|
||||
============================
|
||||
|
||||
This class provides a client API for all the GeoIP2 Precision web service end
|
||||
points. The end points are Country, City, and Insights. Each end point returns
|
||||
a different set of data about an IP address, with Country returning the least
|
||||
data and Insights the most.
|
||||
|
||||
Each web service end point is represented by a different model class, and
|
||||
these model classes in turn contain multiple record classes. The record
|
||||
classes have attributes which contain data about the IP address.
|
||||
|
||||
If the web service does not return a particular piece of data for an IP
|
||||
address, the associated attribute is not populated.
|
||||
|
||||
The web service may not return any information for an entire record, in which
|
||||
case all of the attributes for that record class will be empty.
|
||||
|
||||
SSL
|
||||
---
|
||||
|
||||
Requests to the GeoIP2 Precision web service are always made with SSL.
|
||||
|
||||
"""
|
||||
|
||||
import requests
|
||||
|
||||
from requests.utils import default_user_agent
|
||||
|
||||
import geoip2
|
||||
import geoip2.models
|
||||
|
||||
from .compat import compat_ip_address
|
||||
|
||||
from .errors import (AddressNotFoundError, AuthenticationError, GeoIP2Error,
|
||||
HTTPError, InvalidRequestError, OutOfQueriesError,
|
||||
PermissionRequiredError)
|
||||
|
||||
|
||||
class Client(object):
|
||||
"""Creates a new client object.
|
||||
|
||||
It accepts the following required arguments:
|
||||
|
||||
:param user_id: Your MaxMind User ID.
|
||||
:param license_key: Your MaxMind license key.
|
||||
|
||||
Go to https://www.maxmind.com/en/my_license_key to see your MaxMind
|
||||
User ID and license key.
|
||||
|
||||
The following keyword arguments are also accepted:
|
||||
|
||||
:param host: The hostname to make a request against. This defaults to
|
||||
"geoip.maxmind.com". In most cases, you should not need to set this
|
||||
explicitly.
|
||||
:param locales: This is list of locale codes. This argument will be
|
||||
passed on to record classes to use when their name properties are
|
||||
called. The default value is ['en'].
|
||||
|
||||
The order of the locales is significant. When a record class has
|
||||
multiple names (country, city, etc.), its name property will return
|
||||
the name in the first locale that has one.
|
||||
|
||||
Note that the only locale which is always present in the GeoIP2
|
||||
data is "en". If you do not include this locale, the name property
|
||||
may end up returning None even when the record has an English name.
|
||||
|
||||
Currently, the valid locale codes are:
|
||||
|
||||
* de -- German
|
||||
* en -- English names may still include accented characters if that is
|
||||
the accepted spelling in English. In other words, English does not
|
||||
mean ASCII.
|
||||
* es -- Spanish
|
||||
* fr -- French
|
||||
* ja -- Japanese
|
||||
* pt-BR -- Brazilian Portuguese
|
||||
* ru -- Russian
|
||||
* zh-CN -- Simplified Chinese.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
user_id,
|
||||
license_key,
|
||||
host='geoip.maxmind.com',
|
||||
locales=None,
|
||||
timeout=None):
|
||||
"""Construct a Client."""
|
||||
# pylint: disable=too-many-arguments
|
||||
if locales is None:
|
||||
locales = ['en']
|
||||
self._locales = locales
|
||||
self._user_id = user_id
|
||||
self._license_key = license_key
|
||||
self._base_uri = 'https://%s/geoip/v2.1' % host
|
||||
self._timeout = timeout
|
||||
|
||||
def city(self, ip_address='me'):
|
||||
"""Call GeoIP2 Precision City endpoint with the specified IP.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string. If no
|
||||
address is provided, the address that the web service is
|
||||
called from will be used.
|
||||
|
||||
:returns: :py:class:`geoip2.models.City` object
|
||||
|
||||
"""
|
||||
return self._response_for('city', geoip2.models.City, ip_address)
|
||||
|
||||
def country(self, ip_address='me'):
|
||||
"""Call the GeoIP2 Country endpoint with the specified IP.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string. If no address
|
||||
is provided, the address that the web service is called from will
|
||||
be used.
|
||||
|
||||
:returns: :py:class:`geoip2.models.Country` object
|
||||
|
||||
"""
|
||||
return self._response_for('country', geoip2.models.Country, ip_address)
|
||||
|
||||
def insights(self, ip_address='me'):
|
||||
"""Call the GeoIP2 Precision: Insights endpoint with the specified IP.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string. If no address
|
||||
is provided, the address that the web service is called from will
|
||||
be used.
|
||||
|
||||
:returns: :py:class:`geoip2.models.Insights` object
|
||||
|
||||
"""
|
||||
return self._response_for('insights', geoip2.models.Insights,
|
||||
ip_address)
|
||||
|
||||
def _response_for(self, path, model_class, ip_address):
|
||||
if ip_address != 'me':
|
||||
ip_address = str(compat_ip_address(ip_address))
|
||||
uri = '/'.join([self._base_uri, path, ip_address])
|
||||
response = requests.get(uri,
|
||||
auth=(self._user_id, self._license_key),
|
||||
headers={'Accept': 'application/json',
|
||||
'User-Agent': self._user_agent()},
|
||||
timeout=self._timeout)
|
||||
if response.status_code == 200:
|
||||
body = self._handle_success(response, uri)
|
||||
return model_class(body, locales=self._locales)
|
||||
else:
|
||||
self._handle_error(response, uri)
|
||||
|
||||
def _user_agent(self):
|
||||
return 'GeoIP2 Python Client v%s (%s)' % (geoip2.__version__,
|
||||
default_user_agent())
|
||||
|
||||
def _handle_success(self, response, uri):
|
||||
try:
|
||||
return response.json()
|
||||
except ValueError as ex:
|
||||
raise GeoIP2Error('Received a 200 response for %(uri)s'
|
||||
' but could not decode the response as '
|
||||
'JSON: ' % locals() + ', '.join(ex.args), 200,
|
||||
uri)
|
||||
|
||||
def _handle_error(self, response, uri):
|
||||
status = response.status_code
|
||||
|
||||
if 400 <= status < 500:
|
||||
self._handle_4xx_status(response, status, uri)
|
||||
elif 500 <= status < 600:
|
||||
self._handle_5xx_status(status, uri)
|
||||
else:
|
||||
self._handle_non_200_status(status, uri)
|
||||
|
||||
def _handle_4xx_status(self, response, status, uri):
|
||||
if not response.content:
|
||||
raise HTTPError('Received a %(status)i error for %(uri)s '
|
||||
'with no body.' % locals(), status, uri)
|
||||
elif response.headers['Content-Type'].find('json') == -1:
|
||||
raise HTTPError('Received a %i for %s with the following '
|
||||
'body: %s' % (status, uri, response.content),
|
||||
status, uri)
|
||||
try:
|
||||
body = response.json()
|
||||
except ValueError as ex:
|
||||
raise HTTPError(
|
||||
'Received a %(status)i error for %(uri)s but it did'
|
||||
' not include the expected JSON body: ' % locals() +
|
||||
', '.join(ex.args), status, uri)
|
||||
else:
|
||||
if 'code' in body and 'error' in body:
|
||||
self._handle_web_service_error(
|
||||
body.get('error'), body.get('code'), status, uri)
|
||||
else:
|
||||
raise HTTPError(
|
||||
'Response contains JSON but it does not specify '
|
||||
'code or error keys', status, uri)
|
||||
|
||||
def _handle_web_service_error(self, message, code, status, uri):
|
||||
if code in ('IP_ADDRESS_NOT_FOUND', 'IP_ADDRESS_RESERVED'):
|
||||
raise AddressNotFoundError(message)
|
||||
elif code in ('AUTHORIZATION_INVALID', 'LICENSE_KEY_REQUIRED',
|
||||
'USER_ID_REQUIRED', 'USER_ID_UNKNOWN'):
|
||||
raise AuthenticationError(message)
|
||||
elif code in ('INSUFFICIENT_FUNDS', 'OUT_OF_QUERIES'):
|
||||
raise OutOfQueriesError(message)
|
||||
elif code == 'PERMISSION_REQUIRED':
|
||||
raise PermissionRequiredError(message)
|
||||
|
||||
raise InvalidRequestError(message, code, status, uri)
|
||||
|
||||
def _handle_5xx_status(self, status, uri):
|
||||
raise HTTPError('Received a server error (%(status)i) for '
|
||||
'%(uri)s' % locals(), status, uri)
|
||||
|
||||
def _handle_non_200_status(self, status, uri):
|
||||
raise HTTPError('Received a very surprising HTTP status '
|
||||
'(%(status)i) for %(uri)s' % locals(), status, uri)
|
@@ -32,7 +32,7 @@ HASH_FUNCTION = 'sha256' # Must be in hashlib.
|
||||
# Linear to the hashing time. Adjust to be high but take a reasonable
|
||||
# amount of time on your server. Measure with:
|
||||
# python -m timeit -s 'import passwords as p' 'p.make_hash("something")'
|
||||
COST_FACTOR = 29000
|
||||
COST_FACTOR = 10000
|
||||
|
||||
|
||||
def make_hash(password):
|
||||
|
2417
lib/ipaddress.py
Normal file
2417
lib/ipaddress.py
Normal file
File diff suppressed because it is too large
Load Diff
46
lib/maxminddb/__init__.py
Normal file
46
lib/maxminddb/__init__.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# pylint:disable=C0111
|
||||
import os
|
||||
|
||||
import maxminddb.reader
|
||||
|
||||
try:
|
||||
import maxminddb.extension
|
||||
except ImportError:
|
||||
maxminddb.extension = None
|
||||
|
||||
from maxminddb.const import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE,
|
||||
MODE_MEMORY)
|
||||
from maxminddb.decoder import InvalidDatabaseError
|
||||
|
||||
|
||||
def open_database(database, mode=MODE_AUTO):
|
||||
"""Open a Maxmind DB database
|
||||
|
||||
Arguments:
|
||||
database -- A path to a valid MaxMind DB file such as a GeoIP2
|
||||
database file.
|
||||
mode -- mode to open the database with. Valid mode are:
|
||||
* MODE_MMAP_EXT - use the C extension with memory map.
|
||||
* MODE_MMAP - read from memory map. Pure Python.
|
||||
* MODE_FILE - read database as standard file. Pure Python.
|
||||
* MODE_MEMORY - load database into memory. Pure Python.
|
||||
* MODE_AUTO - tries MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that
|
||||
order. Default mode.
|
||||
"""
|
||||
if (mode == MODE_AUTO and maxminddb.extension and
|
||||
hasattr(maxminddb.extension, 'Reader')) or mode == MODE_MMAP_EXT:
|
||||
return maxminddb.extension.Reader(database)
|
||||
elif mode in (MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY):
|
||||
return maxminddb.reader.Reader(database, mode)
|
||||
raise ValueError('Unsupported open mode: {0}'.format(mode))
|
||||
|
||||
|
||||
def Reader(database): # pylint: disable=invalid-name
|
||||
"""This exists for backwards compatibility. Use open_database instead"""
|
||||
return open_database(database)
|
||||
|
||||
__title__ = 'maxminddb'
|
||||
__version__ = '1.2.1'
|
||||
__author__ = 'Gregory Oschwald'
|
||||
__license__ = 'Apache License, Version 2.0'
|
||||
__copyright__ = 'Copyright 2014 Maxmind, Inc.'
|
33
lib/maxminddb/compat.py
Normal file
33
lib/maxminddb/compat.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import sys
|
||||
|
||||
import ipaddress
|
||||
|
||||
# pylint: skip-file
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
def compat_ip_address(address):
|
||||
if isinstance(address, bytes):
|
||||
address = address.decode()
|
||||
return ipaddress.ip_address(address)
|
||||
|
||||
int_from_byte = ord
|
||||
|
||||
FileNotFoundError = IOError
|
||||
|
||||
def int_from_bytes(b):
|
||||
if b:
|
||||
return int(b.encode("hex"), 16)
|
||||
return 0
|
||||
|
||||
byte_from_int = chr
|
||||
else:
|
||||
def compat_ip_address(address):
|
||||
return ipaddress.ip_address(address)
|
||||
|
||||
int_from_byte = lambda x: x
|
||||
|
||||
FileNotFoundError = FileNotFoundError
|
||||
|
||||
int_from_bytes = lambda x: int.from_bytes(x, 'big')
|
||||
|
||||
byte_from_int = lambda x: bytes([x])
|
7
lib/maxminddb/const.py
Normal file
7
lib/maxminddb/const.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""Constants used in the API"""
|
||||
|
||||
MODE_AUTO = 0
|
||||
MODE_MMAP_EXT = 1
|
||||
MODE_MMAP = 2
|
||||
MODE_FILE = 4
|
||||
MODE_MEMORY = 8
|
173
lib/maxminddb/decoder.py
Normal file
173
lib/maxminddb/decoder.py
Normal file
@@ -0,0 +1,173 @@
|
||||
"""
|
||||
maxminddb.decoder
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
This package contains code for decoding the MaxMind DB data section.
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import struct
|
||||
|
||||
from maxminddb.compat import byte_from_int, int_from_bytes
|
||||
from maxminddb.errors import InvalidDatabaseError
|
||||
|
||||
|
||||
class Decoder(object): # pylint: disable=too-few-public-methods
|
||||
|
||||
"""Decoder for the data section of the MaxMind DB"""
|
||||
|
||||
def __init__(self, database_buffer, pointer_base=0, pointer_test=False):
|
||||
"""Created a Decoder for a MaxMind DB
|
||||
|
||||
Arguments:
|
||||
database_buffer -- an mmap'd MaxMind DB file.
|
||||
pointer_base -- the base number to use when decoding a pointer
|
||||
pointer_test -- used for internal unit testing of pointer code
|
||||
"""
|
||||
self._pointer_test = pointer_test
|
||||
self._buffer = database_buffer
|
||||
self._pointer_base = pointer_base
|
||||
|
||||
def _decode_array(self, size, offset):
|
||||
array = []
|
||||
for _ in range(size):
|
||||
(value, offset) = self.decode(offset)
|
||||
array.append(value)
|
||||
return array, offset
|
||||
|
||||
def _decode_boolean(self, size, offset):
|
||||
return size != 0, offset
|
||||
|
||||
def _decode_bytes(self, size, offset):
|
||||
new_offset = offset + size
|
||||
return self._buffer[offset:new_offset], new_offset
|
||||
|
||||
# pylint: disable=no-self-argument
|
||||
# |-> I am open to better ways of doing this as long as it doesn't involve
|
||||
# lots of code duplication.
|
||||
def _decode_packed_type(type_code, type_size, pad=False):
|
||||
# pylint: disable=protected-access, missing-docstring
|
||||
def unpack_type(self, size, offset):
|
||||
if not pad:
|
||||
self._verify_size(size, type_size)
|
||||
new_offset = offset + type_size
|
||||
packed_bytes = self._buffer[offset:new_offset]
|
||||
if pad:
|
||||
packed_bytes = packed_bytes.rjust(type_size, b'\x00')
|
||||
(value,) = struct.unpack(type_code, packed_bytes)
|
||||
return value, new_offset
|
||||
return unpack_type
|
||||
|
||||
def _decode_map(self, size, offset):
|
||||
container = {}
|
||||
for _ in range(size):
|
||||
(key, offset) = self.decode(offset)
|
||||
(value, offset) = self.decode(offset)
|
||||
container[key] = value
|
||||
return container, offset
|
||||
|
||||
_pointer_value_offset = {
|
||||
1: 0,
|
||||
2: 2048,
|
||||
3: 526336,
|
||||
4: 0,
|
||||
}
|
||||
|
||||
def _decode_pointer(self, size, offset):
|
||||
pointer_size = ((size >> 3) & 0x3) + 1
|
||||
new_offset = offset + pointer_size
|
||||
pointer_bytes = self._buffer[offset:new_offset]
|
||||
packed = pointer_bytes if pointer_size == 4 else struct.pack(
|
||||
b'!c', byte_from_int(size & 0x7)) + pointer_bytes
|
||||
unpacked = int_from_bytes(packed)
|
||||
pointer = unpacked + self._pointer_base + \
|
||||
self._pointer_value_offset[pointer_size]
|
||||
if self._pointer_test:
|
||||
return pointer, new_offset
|
||||
(value, _) = self.decode(pointer)
|
||||
return value, new_offset
|
||||
|
||||
def _decode_uint(self, size, offset):
|
||||
new_offset = offset + size
|
||||
uint_bytes = self._buffer[offset:new_offset]
|
||||
return int_from_bytes(uint_bytes), new_offset
|
||||
|
||||
def _decode_utf8_string(self, size, offset):
|
||||
new_offset = offset + size
|
||||
return self._buffer[offset:new_offset].decode('utf-8'), new_offset
|
||||
|
||||
_type_decoder = {
|
||||
1: _decode_pointer,
|
||||
2: _decode_utf8_string,
|
||||
3: _decode_packed_type(b'!d', 8), # double,
|
||||
4: _decode_bytes,
|
||||
5: _decode_uint, # uint16
|
||||
6: _decode_uint, # uint32
|
||||
7: _decode_map,
|
||||
8: _decode_packed_type(b'!i', 4, pad=True), # int32
|
||||
9: _decode_uint, # uint64
|
||||
10: _decode_uint, # uint128
|
||||
11: _decode_array,
|
||||
14: _decode_boolean,
|
||||
15: _decode_packed_type(b'!f', 4), # float,
|
||||
}
|
||||
|
||||
def decode(self, offset):
|
||||
"""Decode a section of the data section starting at offset
|
||||
|
||||
Arguments:
|
||||
offset -- the location of the data structure to decode
|
||||
"""
|
||||
new_offset = offset + 1
|
||||
(ctrl_byte,) = struct.unpack(b'!B', self._buffer[offset:new_offset])
|
||||
type_num = ctrl_byte >> 5
|
||||
# Extended type
|
||||
if not type_num:
|
||||
(type_num, new_offset) = self._read_extended(new_offset)
|
||||
|
||||
if type_num not in self._type_decoder:
|
||||
raise InvalidDatabaseError('Unexpected type number ({type}) '
|
||||
'encountered'.format(type=type_num))
|
||||
|
||||
(size, new_offset) = self._size_from_ctrl_byte(
|
||||
ctrl_byte, new_offset, type_num)
|
||||
return self._type_decoder[type_num](self, size, new_offset)
|
||||
|
||||
def _read_extended(self, offset):
|
||||
(next_byte,) = struct.unpack(b'!B', self._buffer[offset:offset + 1])
|
||||
type_num = next_byte + 7
|
||||
if type_num < 7:
|
||||
raise InvalidDatabaseError(
|
||||
'Something went horribly wrong in the decoder. An '
|
||||
'extended type resolved to a type number < 8 '
|
||||
'({type})'.format(type=type_num))
|
||||
return type_num, offset + 1
|
||||
|
||||
def _verify_size(self, expected, actual):
|
||||
if expected != actual:
|
||||
raise InvalidDatabaseError(
|
||||
'The MaxMind DB file\'s data section contains bad data '
|
||||
'(unknown data type or corrupt data)'
|
||||
)
|
||||
|
||||
def _size_from_ctrl_byte(self, ctrl_byte, offset, type_num):
|
||||
size = ctrl_byte & 0x1f
|
||||
if type_num == 1:
|
||||
return size, offset
|
||||
bytes_to_read = 0 if size < 29 else size - 28
|
||||
|
||||
new_offset = offset + bytes_to_read
|
||||
size_bytes = self._buffer[offset:new_offset]
|
||||
|
||||
# Using unpack rather than int_from_bytes as it is about 200 lookups
|
||||
# per second faster here.
|
||||
if size == 29:
|
||||
size = 29 + struct.unpack(b'!B', size_bytes)[0]
|
||||
elif size == 30:
|
||||
size = 285 + struct.unpack(b'!H', size_bytes)[0]
|
||||
elif size > 30:
|
||||
size = struct.unpack(
|
||||
b'!I', size_bytes.rjust(4, b'\x00'))[0] + 65821
|
||||
|
||||
return size, new_offset
|
11
lib/maxminddb/errors.py
Normal file
11
lib/maxminddb/errors.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""
|
||||
maxminddb.errors
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains custom errors for the MaxMind DB reader
|
||||
"""
|
||||
|
||||
|
||||
class InvalidDatabaseError(RuntimeError):
|
||||
|
||||
"""This error is thrown when unexpected data is found in the database."""
|
570
lib/maxminddb/extension/maxminddb.c
Normal file
570
lib/maxminddb/extension/maxminddb.c
Normal file
@@ -0,0 +1,570 @@
|
||||
#include <Python.h>
|
||||
#include <maxminddb.h>
|
||||
#include "structmember.h"
|
||||
|
||||
#define __STDC_FORMAT_MACROS
|
||||
#include <inttypes.h>
|
||||
|
||||
static PyTypeObject Reader_Type;
|
||||
static PyTypeObject Metadata_Type;
|
||||
static PyObject *MaxMindDB_error;
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD /* no semicolon */
|
||||
MMDB_s *mmdb;
|
||||
} Reader_obj;
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD /* no semicolon */
|
||||
PyObject *binary_format_major_version;
|
||||
PyObject *binary_format_minor_version;
|
||||
PyObject *build_epoch;
|
||||
PyObject *database_type;
|
||||
PyObject *description;
|
||||
PyObject *ip_version;
|
||||
PyObject *languages;
|
||||
PyObject *node_count;
|
||||
PyObject *record_size;
|
||||
} Metadata_obj;
|
||||
|
||||
static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list);
|
||||
static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list);
|
||||
static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list);
|
||||
static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list);
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
#define MOD_INIT(name) PyMODINIT_FUNC PyInit_ ## name(void)
|
||||
#define RETURN_MOD_INIT(m) return (m)
|
||||
#define FILE_NOT_FOUND_ERROR PyExc_FileNotFoundError
|
||||
#else
|
||||
#define MOD_INIT(name) PyMODINIT_FUNC init ## name(void)
|
||||
#define RETURN_MOD_INIT(m) return
|
||||
#define PyInt_FromLong PyLong_FromLong
|
||||
#define FILE_NOT_FOUND_ERROR PyExc_IOError
|
||||
#endif
|
||||
|
||||
#ifdef __GNUC__
|
||||
# define UNUSED(x) UNUSED_ ## x __attribute__((__unused__))
|
||||
#else
|
||||
# define UNUSED(x) UNUSED_ ## x
|
||||
#endif
|
||||
|
||||
static int Reader_init(PyObject *self, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
char *filename;
|
||||
int mode = 0;
|
||||
|
||||
static char *kwlist[] = {"database", "mode", NULL};
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|i", kwlist, &filename, &mode)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (mode != 0 && mode != 1) {
|
||||
PyErr_Format(PyExc_ValueError, "Unsupported open mode (%i). Only "
|
||||
"MODE_AUTO and MODE_MMAP_EXT are supported by this extension.",
|
||||
mode);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (0 != access(filename, R_OK)) {
|
||||
PyErr_Format(FILE_NOT_FOUND_ERROR,
|
||||
"No such file or directory: '%s'",
|
||||
filename);
|
||||
return -1;
|
||||
}
|
||||
|
||||
MMDB_s *mmdb = (MMDB_s *)malloc(sizeof(MMDB_s));
|
||||
if (NULL == mmdb) {
|
||||
PyErr_NoMemory();
|
||||
return -1;
|
||||
}
|
||||
|
||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
||||
if (!mmdb_obj) {
|
||||
free(mmdb);
|
||||
PyErr_NoMemory();
|
||||
return -1;
|
||||
}
|
||||
|
||||
uint16_t status = MMDB_open(filename, MMDB_MODE_MMAP, mmdb);
|
||||
|
||||
if (MMDB_SUCCESS != status) {
|
||||
free(mmdb);
|
||||
PyErr_Format(
|
||||
MaxMindDB_error,
|
||||
"Error opening database file (%s). Is this a valid MaxMind DB file?",
|
||||
filename
|
||||
);
|
||||
return -1;
|
||||
}
|
||||
|
||||
mmdb_obj->mmdb = mmdb;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject *Reader_get(PyObject *self, PyObject *args)
|
||||
{
|
||||
char *ip_address = NULL;
|
||||
|
||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
||||
if (!PyArg_ParseTuple(args, "s", &ip_address)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
MMDB_s *mmdb = mmdb_obj->mmdb;
|
||||
|
||||
if (NULL == mmdb) {
|
||||
PyErr_SetString(PyExc_ValueError,
|
||||
"Attempt to read from a closed MaxMind DB.");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int gai_error = 0;
|
||||
int mmdb_error = MMDB_SUCCESS;
|
||||
MMDB_lookup_result_s result =
|
||||
MMDB_lookup_string(mmdb, ip_address, &gai_error,
|
||||
&mmdb_error);
|
||||
|
||||
if (0 != gai_error) {
|
||||
PyErr_Format(PyExc_ValueError,
|
||||
"'%s' does not appear to be an IPv4 or IPv6 address.",
|
||||
ip_address);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (MMDB_SUCCESS != mmdb_error) {
|
||||
PyObject *exception;
|
||||
if (MMDB_IPV6_LOOKUP_IN_IPV4_DATABASE_ERROR == mmdb_error) {
|
||||
exception = PyExc_ValueError;
|
||||
} else {
|
||||
exception = MaxMindDB_error;
|
||||
}
|
||||
PyErr_Format(exception, "Error looking up %s. %s",
|
||||
ip_address, MMDB_strerror(mmdb_error));
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (!result.found_entry) {
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
MMDB_entry_data_list_s *entry_data_list = NULL;
|
||||
int status = MMDB_get_entry_data_list(&result.entry, &entry_data_list);
|
||||
if (MMDB_SUCCESS != status) {
|
||||
PyErr_Format(MaxMindDB_error,
|
||||
"Error while looking up data for %s. %s",
|
||||
ip_address, MMDB_strerror(status));
|
||||
MMDB_free_entry_data_list(entry_data_list);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
MMDB_entry_data_list_s *original_entry_data_list = entry_data_list;
|
||||
PyObject *py_obj = from_entry_data_list(&entry_data_list);
|
||||
MMDB_free_entry_data_list(original_entry_data_list);
|
||||
return py_obj;
|
||||
}
|
||||
|
||||
static PyObject *Reader_metadata(PyObject *self, PyObject *UNUSED(args))
|
||||
{
|
||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
||||
|
||||
if (NULL == mmdb_obj->mmdb) {
|
||||
PyErr_SetString(PyExc_IOError,
|
||||
"Attempt to read from a closed MaxMind DB.");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
MMDB_entry_data_list_s *entry_data_list;
|
||||
MMDB_get_metadata_as_entry_data_list(mmdb_obj->mmdb, &entry_data_list);
|
||||
MMDB_entry_data_list_s *original_entry_data_list = entry_data_list;
|
||||
|
||||
PyObject *metadata_dict = from_entry_data_list(&entry_data_list);
|
||||
MMDB_free_entry_data_list(original_entry_data_list);
|
||||
if (NULL == metadata_dict || !PyDict_Check(metadata_dict)) {
|
||||
PyErr_SetString(MaxMindDB_error,
|
||||
"Error decoding metadata.");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
PyObject *args = PyTuple_New(0);
|
||||
if (NULL == args) {
|
||||
Py_DECREF(metadata_dict);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
PyObject *metadata = PyObject_Call((PyObject *)&Metadata_Type, args,
|
||||
metadata_dict);
|
||||
|
||||
Py_DECREF(metadata_dict);
|
||||
return metadata;
|
||||
}
|
||||
|
||||
static PyObject *Reader_close(PyObject *self, PyObject *UNUSED(args))
|
||||
{
|
||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
||||
|
||||
if (NULL != mmdb_obj->mmdb) {
|
||||
MMDB_close(mmdb_obj->mmdb);
|
||||
free(mmdb_obj->mmdb);
|
||||
mmdb_obj->mmdb = NULL;
|
||||
}
|
||||
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static void Reader_dealloc(PyObject *self)
|
||||
{
|
||||
Reader_obj *obj = (Reader_obj *)self;
|
||||
if (NULL != obj->mmdb) {
|
||||
Reader_close(self, NULL);
|
||||
}
|
||||
|
||||
PyObject_Del(self);
|
||||
}
|
||||
|
||||
static int Metadata_init(PyObject *self, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
|
||||
PyObject
|
||||
*binary_format_major_version,
|
||||
*binary_format_minor_version,
|
||||
*build_epoch,
|
||||
*database_type,
|
||||
*description,
|
||||
*ip_version,
|
||||
*languages,
|
||||
*node_count,
|
||||
*record_size;
|
||||
|
||||
static char *kwlist[] = {
|
||||
"binary_format_major_version",
|
||||
"binary_format_minor_version",
|
||||
"build_epoch",
|
||||
"database_type",
|
||||
"description",
|
||||
"ip_version",
|
||||
"languages",
|
||||
"node_count",
|
||||
"record_size",
|
||||
NULL
|
||||
};
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOOOOOOOO", kwlist,
|
||||
&binary_format_major_version,
|
||||
&binary_format_minor_version,
|
||||
&build_epoch,
|
||||
&database_type,
|
||||
&description,
|
||||
&ip_version,
|
||||
&languages,
|
||||
&node_count,
|
||||
&record_size)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
Metadata_obj *obj = (Metadata_obj *)self;
|
||||
|
||||
obj->binary_format_major_version = binary_format_major_version;
|
||||
obj->binary_format_minor_version = binary_format_minor_version;
|
||||
obj->build_epoch = build_epoch;
|
||||
obj->database_type = database_type;
|
||||
obj->description = description;
|
||||
obj->ip_version = ip_version;
|
||||
obj->languages = languages;
|
||||
obj->node_count = node_count;
|
||||
obj->record_size = record_size;
|
||||
|
||||
Py_INCREF(obj->binary_format_major_version);
|
||||
Py_INCREF(obj->binary_format_minor_version);
|
||||
Py_INCREF(obj->build_epoch);
|
||||
Py_INCREF(obj->database_type);
|
||||
Py_INCREF(obj->description);
|
||||
Py_INCREF(obj->ip_version);
|
||||
Py_INCREF(obj->languages);
|
||||
Py_INCREF(obj->node_count);
|
||||
Py_INCREF(obj->record_size);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void Metadata_dealloc(PyObject *self)
|
||||
{
|
||||
Metadata_obj *obj = (Metadata_obj *)self;
|
||||
Py_DECREF(obj->binary_format_major_version);
|
||||
Py_DECREF(obj->binary_format_minor_version);
|
||||
Py_DECREF(obj->build_epoch);
|
||||
Py_DECREF(obj->database_type);
|
||||
Py_DECREF(obj->description);
|
||||
Py_DECREF(obj->ip_version);
|
||||
Py_DECREF(obj->languages);
|
||||
Py_DECREF(obj->node_count);
|
||||
Py_DECREF(obj->record_size);
|
||||
PyObject_Del(self);
|
||||
}
|
||||
|
||||
static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list)
|
||||
{
|
||||
if (NULL == entry_data_list || NULL == *entry_data_list) {
|
||||
PyErr_SetString(
|
||||
MaxMindDB_error,
|
||||
"Error while looking up data. Your database may be corrupt or you have found a bug in libmaxminddb."
|
||||
);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
switch ((*entry_data_list)->entry_data.type) {
|
||||
case MMDB_DATA_TYPE_MAP:
|
||||
return from_map(entry_data_list);
|
||||
case MMDB_DATA_TYPE_ARRAY:
|
||||
return from_array(entry_data_list);
|
||||
case MMDB_DATA_TYPE_UTF8_STRING:
|
||||
return PyUnicode_FromStringAndSize(
|
||||
(*entry_data_list)->entry_data.utf8_string,
|
||||
(*entry_data_list)->entry_data.data_size
|
||||
);
|
||||
case MMDB_DATA_TYPE_BYTES:
|
||||
return PyByteArray_FromStringAndSize(
|
||||
(const char *)(*entry_data_list)->entry_data.bytes,
|
||||
(Py_ssize_t)(*entry_data_list)->entry_data.data_size);
|
||||
case MMDB_DATA_TYPE_DOUBLE:
|
||||
return PyFloat_FromDouble((*entry_data_list)->entry_data.double_value);
|
||||
case MMDB_DATA_TYPE_FLOAT:
|
||||
return PyFloat_FromDouble((*entry_data_list)->entry_data.float_value);
|
||||
case MMDB_DATA_TYPE_UINT16:
|
||||
return PyLong_FromLong( (*entry_data_list)->entry_data.uint16);
|
||||
case MMDB_DATA_TYPE_UINT32:
|
||||
return PyLong_FromLong((*entry_data_list)->entry_data.uint32);
|
||||
case MMDB_DATA_TYPE_BOOLEAN:
|
||||
return PyBool_FromLong((*entry_data_list)->entry_data.boolean);
|
||||
case MMDB_DATA_TYPE_UINT64:
|
||||
return PyLong_FromUnsignedLongLong(
|
||||
(*entry_data_list)->entry_data.uint64);
|
||||
case MMDB_DATA_TYPE_UINT128:
|
||||
return from_uint128(*entry_data_list);
|
||||
case MMDB_DATA_TYPE_INT32:
|
||||
return PyLong_FromLong((*entry_data_list)->entry_data.int32);
|
||||
default:
|
||||
PyErr_Format(MaxMindDB_error,
|
||||
"Invalid data type arguments: %d",
|
||||
(*entry_data_list)->entry_data.type);
|
||||
return NULL;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list)
|
||||
{
|
||||
PyObject *py_obj = PyDict_New();
|
||||
if (NULL == py_obj) {
|
||||
PyErr_NoMemory();
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const uint32_t map_size = (*entry_data_list)->entry_data.data_size;
|
||||
|
||||
uint i;
|
||||
// entry_data_list cannot start out NULL (see from_entry_data_list). We
|
||||
// check it in the loop because it may become NULL.
|
||||
// coverity[check_after_deref]
|
||||
for (i = 0; i < map_size && entry_data_list; i++) {
|
||||
*entry_data_list = (*entry_data_list)->next;
|
||||
|
||||
PyObject *key = PyUnicode_FromStringAndSize(
|
||||
(char *)(*entry_data_list)->entry_data.utf8_string,
|
||||
(*entry_data_list)->entry_data.data_size
|
||||
);
|
||||
|
||||
*entry_data_list = (*entry_data_list)->next;
|
||||
|
||||
PyObject *value = from_entry_data_list(entry_data_list);
|
||||
if (NULL == value) {
|
||||
Py_DECREF(key);
|
||||
Py_DECREF(py_obj);
|
||||
return NULL;
|
||||
}
|
||||
PyDict_SetItem(py_obj, key, value);
|
||||
Py_DECREF(value);
|
||||
Py_DECREF(key);
|
||||
}
|
||||
|
||||
return py_obj;
|
||||
}
|
||||
|
||||
static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list)
|
||||
{
|
||||
const uint32_t size = (*entry_data_list)->entry_data.data_size;
|
||||
|
||||
PyObject *py_obj = PyList_New(size);
|
||||
if (NULL == py_obj) {
|
||||
PyErr_NoMemory();
|
||||
return NULL;
|
||||
}
|
||||
|
||||
uint i;
|
||||
// entry_data_list cannot start out NULL (see from_entry_data_list). We
|
||||
// check it in the loop because it may become NULL.
|
||||
// coverity[check_after_deref]
|
||||
for (i = 0; i < size && entry_data_list; i++) {
|
||||
*entry_data_list = (*entry_data_list)->next;
|
||||
PyObject *value = from_entry_data_list(entry_data_list);
|
||||
if (NULL == value) {
|
||||
Py_DECREF(py_obj);
|
||||
return NULL;
|
||||
}
|
||||
// PyList_SetItem 'steals' the reference
|
||||
PyList_SetItem(py_obj, i, value);
|
||||
}
|
||||
return py_obj;
|
||||
}
|
||||
|
||||
static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list)
|
||||
{
|
||||
uint64_t high = 0;
|
||||
uint64_t low = 0;
|
||||
#if MMDB_UINT128_IS_BYTE_ARRAY
|
||||
int i;
|
||||
for (i = 0; i < 8; i++) {
|
||||
high = (high << 8) | entry_data_list->entry_data.uint128[i];
|
||||
}
|
||||
|
||||
for (i = 8; i < 16; i++) {
|
||||
low = (low << 8) | entry_data_list->entry_data.uint128[i];
|
||||
}
|
||||
#else
|
||||
high = entry_data_list->entry_data.uint128 >> 64;
|
||||
low = (uint64_t)entry_data_list->entry_data.uint128;
|
||||
#endif
|
||||
|
||||
char *num_str = malloc(33);
|
||||
if (NULL == num_str) {
|
||||
PyErr_NoMemory();
|
||||
return NULL;
|
||||
}
|
||||
|
||||
snprintf(num_str, 33, "%016" PRIX64 "%016" PRIX64, high, low);
|
||||
|
||||
PyObject *py_obj = PyLong_FromString(num_str, NULL, 16);
|
||||
|
||||
free(num_str);
|
||||
return py_obj;
|
||||
}
|
||||
|
||||
static PyMethodDef Reader_methods[] = {
|
||||
{ "get", Reader_get, METH_VARARGS,
|
||||
"Get record for IP address" },
|
||||
{ "metadata", Reader_metadata, METH_NOARGS,
|
||||
"Returns metadata object for database" },
|
||||
{ "close", Reader_close, METH_NOARGS, "Closes database"},
|
||||
{ NULL, NULL, 0, NULL }
|
||||
};
|
||||
|
||||
static PyTypeObject Reader_Type = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
.tp_basicsize = sizeof(Reader_obj),
|
||||
.tp_dealloc = Reader_dealloc,
|
||||
.tp_doc = "Reader object",
|
||||
.tp_flags = Py_TPFLAGS_DEFAULT,
|
||||
.tp_methods = Reader_methods,
|
||||
.tp_name = "Reader",
|
||||
.tp_init = Reader_init,
|
||||
};
|
||||
|
||||
static PyMethodDef Metadata_methods[] = {
|
||||
{ NULL, NULL, 0, NULL }
|
||||
};
|
||||
|
||||
/* *INDENT-OFF* */
|
||||
static PyMemberDef Metadata_members[] = {
|
||||
{ "binary_format_major_version", T_OBJECT, offsetof(
|
||||
Metadata_obj, binary_format_major_version), READONLY, NULL },
|
||||
{ "binary_format_minor_version", T_OBJECT, offsetof(
|
||||
Metadata_obj, binary_format_minor_version), READONLY, NULL },
|
||||
{ "build_epoch", T_OBJECT, offsetof(Metadata_obj, build_epoch),
|
||||
READONLY, NULL },
|
||||
{ "database_type", T_OBJECT, offsetof(Metadata_obj, database_type),
|
||||
READONLY, NULL },
|
||||
{ "description", T_OBJECT, offsetof(Metadata_obj, description),
|
||||
READONLY, NULL },
|
||||
{ "ip_version", T_OBJECT, offsetof(Metadata_obj, ip_version),
|
||||
READONLY, NULL },
|
||||
{ "languages", T_OBJECT, offsetof(Metadata_obj, languages), READONLY,
|
||||
NULL },
|
||||
{ "node_count", T_OBJECT, offsetof(Metadata_obj, node_count),
|
||||
READONLY, NULL },
|
||||
{ "record_size", T_OBJECT, offsetof(Metadata_obj, record_size),
|
||||
READONLY, NULL },
|
||||
{ NULL, 0, 0, 0, NULL }
|
||||
};
|
||||
/* *INDENT-ON* */
|
||||
|
||||
static PyTypeObject Metadata_Type = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
.tp_basicsize = sizeof(Metadata_obj),
|
||||
.tp_dealloc = Metadata_dealloc,
|
||||
.tp_doc = "Metadata object",
|
||||
.tp_flags = Py_TPFLAGS_DEFAULT,
|
||||
.tp_members = Metadata_members,
|
||||
.tp_methods = Metadata_methods,
|
||||
.tp_name = "Metadata",
|
||||
.tp_init = Metadata_init
|
||||
};
|
||||
|
||||
static PyMethodDef MaxMindDB_methods[] = {
|
||||
{ NULL, NULL, 0, NULL }
|
||||
};
|
||||
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
static struct PyModuleDef MaxMindDB_module = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
.m_name = "extension",
|
||||
.m_doc = "This is a C extension to read MaxMind DB file format",
|
||||
.m_methods = MaxMindDB_methods,
|
||||
};
|
||||
#endif
|
||||
|
||||
MOD_INIT(extension){
|
||||
PyObject *m;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
m = PyModule_Create(&MaxMindDB_module);
|
||||
#else
|
||||
m = Py_InitModule("extension", MaxMindDB_methods);
|
||||
#endif
|
||||
|
||||
if (!m) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
|
||||
Reader_Type.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&Reader_Type)) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
Py_INCREF(&Reader_Type);
|
||||
PyModule_AddObject(m, "Reader", (PyObject *)&Reader_Type);
|
||||
|
||||
Metadata_Type.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&Metadata_Type)) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
PyModule_AddObject(m, "extension", (PyObject *)&Metadata_Type);
|
||||
|
||||
PyObject* error_mod = PyImport_ImportModule("maxminddb.errors");
|
||||
if (error_mod == NULL) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
|
||||
MaxMindDB_error = PyObject_GetAttrString(error_mod, "InvalidDatabaseError");
|
||||
Py_DECREF(error_mod);
|
||||
|
||||
if (MaxMindDB_error == NULL) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
|
||||
Py_INCREF(MaxMindDB_error);
|
||||
|
||||
/* We primarily add it to the module for backwards compatibility */
|
||||
PyModule_AddObject(m, "InvalidDatabaseError", MaxMindDB_error);
|
||||
|
||||
RETURN_MOD_INIT(m);
|
||||
}
|
66
lib/maxminddb/file.py
Normal file
66
lib/maxminddb/file.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""For internal use only. It provides a slice-like file reader."""
|
||||
|
||||
import os
|
||||
|
||||
try:
|
||||
# pylint: disable=no-name-in-module
|
||||
from multiprocessing import Lock
|
||||
except ImportError:
|
||||
from threading import Lock
|
||||
|
||||
|
||||
class FileBuffer(object):
|
||||
|
||||
"""A slice-able file reader"""
|
||||
|
||||
def __init__(self, database):
|
||||
self._handle = open(database, 'rb')
|
||||
self._size = os.fstat(self._handle.fileno()).st_size
|
||||
if not hasattr(os, 'pread'):
|
||||
self._lock = Lock()
|
||||
|
||||
def __getitem__(self, key):
|
||||
if isinstance(key, slice):
|
||||
return self._read(key.stop - key.start, key.start)
|
||||
elif isinstance(key, int):
|
||||
return self._read(1, key)
|
||||
else:
|
||||
raise TypeError("Invalid argument type.")
|
||||
|
||||
def rfind(self, needle, start):
|
||||
"""Reverse find needle from start"""
|
||||
pos = self._read(self._size - start - 1, start).rfind(needle)
|
||||
if pos == -1:
|
||||
return pos
|
||||
return start + pos
|
||||
|
||||
def size(self):
|
||||
"""Size of file"""
|
||||
return self._size
|
||||
|
||||
def close(self):
|
||||
"""Close file"""
|
||||
self._handle.close()
|
||||
|
||||
if hasattr(os, 'pread'):
|
||||
|
||||
def _read(self, buffersize, offset):
|
||||
"""read that uses pread"""
|
||||
# pylint: disable=no-member
|
||||
return os.pread(self._handle.fileno(), buffersize, offset)
|
||||
|
||||
else:
|
||||
|
||||
def _read(self, buffersize, offset):
|
||||
"""read with a lock
|
||||
|
||||
This lock is necessary as after a fork, the different processes
|
||||
will share the same file table entry, even if we dup the fd, and
|
||||
as such the same offsets. There does not appear to be a way to
|
||||
duplicate the file table entry and we cannot re-open based on the
|
||||
original path as that file may have replaced with another or
|
||||
unlinked.
|
||||
"""
|
||||
with self._lock:
|
||||
self._handle.seek(offset)
|
||||
return self._handle.read(buffersize)
|
223
lib/maxminddb/reader.py
Normal file
223
lib/maxminddb/reader.py
Normal file
@@ -0,0 +1,223 @@
|
||||
"""
|
||||
maxminddb.reader
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains the pure Python database reader and related classes.
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
try:
|
||||
import mmap
|
||||
except ImportError:
|
||||
# pylint: disable=invalid-name
|
||||
mmap = None
|
||||
|
||||
import struct
|
||||
|
||||
from maxminddb.compat import byte_from_int, int_from_byte, compat_ip_address
|
||||
from maxminddb.const import MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY
|
||||
from maxminddb.decoder import Decoder
|
||||
from maxminddb.errors import InvalidDatabaseError
|
||||
from maxminddb.file import FileBuffer
|
||||
|
||||
|
||||
class Reader(object):
|
||||
|
||||
"""
|
||||
Instances of this class provide a reader for the MaxMind DB format. IP
|
||||
addresses can be looked up using the ``get`` method.
|
||||
"""
|
||||
|
||||
_DATA_SECTION_SEPARATOR_SIZE = 16
|
||||
_METADATA_START_MARKER = b"\xAB\xCD\xEFMaxMind.com"
|
||||
|
||||
_ipv4_start = None
|
||||
|
||||
def __init__(self, database, mode=MODE_AUTO):
|
||||
"""Reader for the MaxMind DB file format
|
||||
|
||||
Arguments:
|
||||
database -- A path to a valid MaxMind DB file such as a GeoIP2
|
||||
database file.
|
||||
mode -- mode to open the database with. Valid mode are:
|
||||
* MODE_MMAP - read from memory map.
|
||||
* MODE_FILE - read database as standard file.
|
||||
* MODE_MEMORY - load database into memory.
|
||||
* MODE_AUTO - tries MODE_MMAP and then MODE_FILE. Default.
|
||||
"""
|
||||
# pylint: disable=redefined-variable-type
|
||||
if (mode == MODE_AUTO and mmap) or mode == MODE_MMAP:
|
||||
with open(database, 'rb') as db_file:
|
||||
self._buffer = mmap.mmap(
|
||||
db_file.fileno(), 0, access=mmap.ACCESS_READ)
|
||||
self._buffer_size = self._buffer.size()
|
||||
elif mode in (MODE_AUTO, MODE_FILE):
|
||||
self._buffer = FileBuffer(database)
|
||||
self._buffer_size = self._buffer.size()
|
||||
elif mode == MODE_MEMORY:
|
||||
with open(database, 'rb') as db_file:
|
||||
self._buffer = db_file.read()
|
||||
self._buffer_size = len(self._buffer)
|
||||
else:
|
||||
raise ValueError('Unsupported open mode ({0}). Only MODE_AUTO, '
|
||||
' MODE_FILE, and MODE_MEMORY are support by the pure Python '
|
||||
'Reader'.format(mode))
|
||||
|
||||
metadata_start = self._buffer.rfind(self._METADATA_START_MARKER,
|
||||
max(0, self._buffer_size
|
||||
- 128 * 1024))
|
||||
|
||||
if metadata_start == -1:
|
||||
self.close()
|
||||
raise InvalidDatabaseError('Error opening database file ({0}). '
|
||||
'Is this a valid MaxMind DB file?'
|
||||
''.format(database))
|
||||
|
||||
metadata_start += len(self._METADATA_START_MARKER)
|
||||
metadata_decoder = Decoder(self._buffer, metadata_start)
|
||||
(metadata, _) = metadata_decoder.decode(metadata_start)
|
||||
self._metadata = Metadata(
|
||||
**metadata) # pylint: disable=bad-option-value
|
||||
|
||||
self._decoder = Decoder(self._buffer, self._metadata.search_tree_size
|
||||
+ self._DATA_SECTION_SEPARATOR_SIZE)
|
||||
|
||||
def metadata(self):
|
||||
"""Return the metadata associated with the MaxMind DB file"""
|
||||
return self._metadata
|
||||
|
||||
def get(self, ip_address):
|
||||
"""Return the record for the ip_address in the MaxMind DB
|
||||
|
||||
|
||||
Arguments:
|
||||
ip_address -- an IP address in the standard string notation
|
||||
"""
|
||||
|
||||
address = compat_ip_address(ip_address)
|
||||
|
||||
if address.version == 6 and self._metadata.ip_version == 4:
|
||||
raise ValueError('Error looking up {0}. You attempted to look up '
|
||||
'an IPv6 address in an IPv4-only database.'.format(
|
||||
ip_address))
|
||||
pointer = self._find_address_in_tree(address)
|
||||
|
||||
return self._resolve_data_pointer(pointer) if pointer else None
|
||||
|
||||
def _find_address_in_tree(self, ip_address):
|
||||
packed = ip_address.packed
|
||||
|
||||
bit_count = len(packed) * 8
|
||||
node = self._start_node(bit_count)
|
||||
|
||||
for i in range(bit_count):
|
||||
if node >= self._metadata.node_count:
|
||||
break
|
||||
bit = 1 & (int_from_byte(packed[i >> 3]) >> 7 - (i % 8))
|
||||
node = self._read_node(node, bit)
|
||||
if node == self._metadata.node_count:
|
||||
# Record is empty
|
||||
return 0
|
||||
elif node > self._metadata.node_count:
|
||||
return node
|
||||
|
||||
raise InvalidDatabaseError('Invalid node in search tree')
|
||||
|
||||
def _start_node(self, length):
|
||||
if self._metadata.ip_version != 6 or length == 128:
|
||||
return 0
|
||||
|
||||
# We are looking up an IPv4 address in an IPv6 tree. Skip over the
|
||||
# first 96 nodes.
|
||||
if self._ipv4_start:
|
||||
return self._ipv4_start
|
||||
|
||||
node = 0
|
||||
for _ in range(96):
|
||||
if node >= self._metadata.node_count:
|
||||
break
|
||||
node = self._read_node(node, 0)
|
||||
self._ipv4_start = node
|
||||
return node
|
||||
|
||||
def _read_node(self, node_number, index):
|
||||
base_offset = node_number * self._metadata.node_byte_size
|
||||
|
||||
record_size = self._metadata.record_size
|
||||
if record_size == 24:
|
||||
offset = base_offset + index * 3
|
||||
node_bytes = b'\x00' + self._buffer[offset:offset + 3]
|
||||
elif record_size == 28:
|
||||
(middle,) = struct.unpack(
|
||||
b'!B', self._buffer[base_offset + 3:base_offset + 4])
|
||||
if index:
|
||||
middle &= 0x0F
|
||||
else:
|
||||
middle = (0xF0 & middle) >> 4
|
||||
offset = base_offset + index * 4
|
||||
node_bytes = byte_from_int(
|
||||
middle) + self._buffer[offset:offset + 3]
|
||||
elif record_size == 32:
|
||||
offset = base_offset + index * 4
|
||||
node_bytes = self._buffer[offset:offset + 4]
|
||||
else:
|
||||
raise InvalidDatabaseError(
|
||||
'Unknown record size: {0}'.format(record_size))
|
||||
return struct.unpack(b'!I', node_bytes)[0]
|
||||
|
||||
def _resolve_data_pointer(self, pointer):
|
||||
resolved = pointer - self._metadata.node_count + \
|
||||
self._metadata.search_tree_size
|
||||
|
||||
if resolved > self._buffer_size:
|
||||
raise InvalidDatabaseError(
|
||||
"The MaxMind DB file's search tree is corrupt")
|
||||
|
||||
(data, _) = self._decoder.decode(resolved)
|
||||
return data
|
||||
|
||||
def close(self):
|
||||
"""Closes the MaxMind DB file and returns the resources to the system"""
|
||||
# pylint: disable=unidiomatic-typecheck
|
||||
if type(self._buffer) not in (str, bytes):
|
||||
self._buffer.close()
|
||||
|
||||
|
||||
class Metadata(object):
|
||||
|
||||
"""Metadata for the MaxMind DB reader"""
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, **kwargs):
|
||||
"""Creates new Metadata object. kwargs are key/value pairs from spec"""
|
||||
# Although I could just update __dict__, that is less obvious and it
|
||||
# doesn't work well with static analysis tools and some IDEs
|
||||
self.node_count = kwargs['node_count']
|
||||
self.record_size = kwargs['record_size']
|
||||
self.ip_version = kwargs['ip_version']
|
||||
self.database_type = kwargs['database_type']
|
||||
self.languages = kwargs['languages']
|
||||
self.binary_format_major_version = kwargs[
|
||||
'binary_format_major_version']
|
||||
self.binary_format_minor_version = kwargs[
|
||||
'binary_format_minor_version']
|
||||
self.build_epoch = kwargs['build_epoch']
|
||||
self.description = kwargs['description']
|
||||
|
||||
@property
|
||||
def node_byte_size(self):
|
||||
"""The size of a node in bytes"""
|
||||
return self.record_size // 4
|
||||
|
||||
@property
|
||||
def search_tree_size(self):
|
||||
"""The size of the search tree"""
|
||||
return self.node_count * self.node_byte_size
|
||||
|
||||
def __repr__(self):
|
||||
args = ', '.join('%s=%r' % x for x in self.__dict__.items())
|
||||
return '{module}.{class_name}({data})'.format(
|
||||
module=self.__module__,
|
||||
class_name=self.__class__.__name__,
|
||||
data=args)
|
@@ -72,7 +72,7 @@ def pbkdf2_bin(data, salt, iterations=1000, keylen=24, hashfunc=None):
|
||||
rv = u = _pseudorandom(salt + _pack_int(block))
|
||||
for i in xrange(iterations - 1):
|
||||
u = _pseudorandom(''.join(map(chr, u)))
|
||||
rv = starmap(xor, izip(rv, u))
|
||||
rv = list(starmap(xor, izip(rv, u)))
|
||||
buf.extend(rv)
|
||||
return ''.join(map(chr, buf))[:keylen]
|
||||
|
||||
|
@@ -18,6 +18,7 @@ import time
|
||||
|
||||
import plexpy
|
||||
import activity_processor
|
||||
import datafactory
|
||||
import helpers
|
||||
import logger
|
||||
import notification_handler
|
||||
@@ -48,29 +49,53 @@ class ActivityHandler(object):
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
session_list = pms_connect.get_current_activity()
|
||||
|
||||
for session in session_list['sessions']:
|
||||
if int(session['session_key']) == self.get_session_key():
|
||||
return session
|
||||
if session_list:
|
||||
for session in session_list['sessions']:
|
||||
if int(session['session_key']) == self.get_session_key():
|
||||
return session
|
||||
|
||||
return None
|
||||
|
||||
def update_db_session(self):
|
||||
def update_db_session(self, session=None):
|
||||
# Update our session temp table values
|
||||
monitor_proc = activity_processor.ActivityProcessor()
|
||||
monitor_proc.write_session(session=self.get_live_session(), notify=False)
|
||||
monitor_proc.write_session(session=session, notify=False)
|
||||
|
||||
def on_start(self):
|
||||
if self.is_valid_session() and self.get_live_session():
|
||||
logger.debug(u"PlexPy ActivityHandler :: Session %s has started." % str(self.get_session_key()))
|
||||
|
||||
session = self.get_live_session()
|
||||
|
||||
# Check if any notification agents have notifications enabled
|
||||
if any(d['on_play'] for d in notifiers.available_notification_agents()):
|
||||
# Fire off notifications
|
||||
threading.Thread(target=notification_handler.notify,
|
||||
kwargs=dict(stream_data=self.get_live_session(), notify_action='play')).start()
|
||||
kwargs=dict(stream_data=session, notify_action='play')).start()
|
||||
|
||||
# Write the new session to our temp session table
|
||||
self.update_db_session()
|
||||
self.update_db_session(session=session)
|
||||
|
||||
# Check if any notification agents have notifications enabled
|
||||
if any(d['on_concurrent'] for d in notifiers.available_notification_agents()):
|
||||
# Check if any concurrent streams by the user
|
||||
ip = True if plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP else None
|
||||
ap = activity_processor.ActivityProcessor()
|
||||
user_sessions = ap.get_session_by_user_id(user_id=session['user_id'], ip_address=ip)
|
||||
if len(user_sessions) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD:
|
||||
# Push any notifications - Push it on it's own thread so we don't hold up our db actions
|
||||
threading.Thread(target=notification_handler.notify,
|
||||
kwargs=dict(stream_data=session, notify_action='concurrent')).start()
|
||||
|
||||
# Check if any notification agents have notifications enabled
|
||||
if any(d['on_newdevice'] for d in notifiers.available_notification_agents()):
|
||||
# Check if any concurrent streams by the user
|
||||
data_factory = datafactory.DataFactory()
|
||||
user_devices = data_factory.get_user_devices(user_id=session['user_id'])
|
||||
if session['machine_id'] not in user_devices:
|
||||
# Push any notifications - Push it on it's own thread so we don't hold up our db actions
|
||||
threading.Thread(target=notification_handler.notify,
|
||||
kwargs=dict(stream_data=session, notify_action='newdevice')).start()
|
||||
|
||||
def on_stop(self, force_stop=False):
|
||||
if self.is_valid_session():
|
||||
|
@@ -24,6 +24,7 @@ import libraries
|
||||
import logger
|
||||
import notification_handler
|
||||
import notifiers
|
||||
import plextv
|
||||
import pmsconnect
|
||||
|
||||
|
||||
@@ -372,23 +373,19 @@ def check_server_updates():
|
||||
with monitor_lock:
|
||||
logger.info(u"PlexPy Monitor :: Checking for PMS updates...")
|
||||
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
plex_tv = plextv.PlexTV()
|
||||
download_info = plex_tv.get_plex_downloads()
|
||||
|
||||
server_identity = pms_connect.get_server_identity()
|
||||
update_status = pms_connect.get_update_staus()
|
||||
if download_info:
|
||||
logger.info(u"PlexPy Monitor :: Current PMS version: %s", plexpy.CONFIG.PMS_VERSION)
|
||||
|
||||
if server_identity and update_status:
|
||||
version = server_identity['version']
|
||||
logger.info(u"PlexPy Monitor :: Current PMS version: %s", version)
|
||||
|
||||
if update_status['state'] == 'available':
|
||||
update_version = update_status['version']
|
||||
logger.info(u"PlexPy Monitor :: PMS update available version: %s", update_version)
|
||||
if download_info['update_available']:
|
||||
logger.info(u"PlexPy Monitor :: PMS update available version: %s", download_info['version'])
|
||||
|
||||
# Check if any notification agents have notifications enabled
|
||||
if any(d['on_pmsupdate'] for d in notifiers.available_notification_agents()):
|
||||
# Fire off notifications
|
||||
threading.Thread(target=notification_handler.notify_timeline,
|
||||
kwargs=dict(notify_action='pmsupdate')).start()
|
||||
kwargs=dict(notify_action='pmsupdate')).start()
|
||||
else:
|
||||
logger.info(u"PlexPy Monitor :: No PMS update available.")
|
@@ -19,6 +19,7 @@ import re
|
||||
|
||||
import plexpy
|
||||
import database
|
||||
import datafactory
|
||||
import libraries
|
||||
import log_reader
|
||||
import logger
|
||||
@@ -106,6 +107,26 @@ class ActivityProcessor(object):
|
||||
ip_address = {'ip_address': ip_address}
|
||||
self.db.upsert('sessions', ip_address, keys)
|
||||
|
||||
# Check if any notification agents have notifications enabled
|
||||
if notify and any(d['on_concurrent'] for d in notifiers.available_notification_agents()):
|
||||
# Check if any concurrent streams by the user
|
||||
ip = True if plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP else None
|
||||
user_sessions = self.get_session_by_user_id(user_id=session['user_id'], ip_address=ip)
|
||||
if len(user_sessions) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD:
|
||||
# Push any notifications - Push it on it's own thread so we don't hold up our db actions
|
||||
threading.Thread(target=notification_handler.notify,
|
||||
kwargs=dict(stream_data=values, notify_action='concurrent')).start()
|
||||
|
||||
# Check if any notification agents have notifications enabled
|
||||
if notify and any(d['on_newdevice'] for d in notifiers.available_notification_agents()):
|
||||
# Check if any concurrent streams by the user
|
||||
data_factory = datafactory.DataFactory()
|
||||
user_devices = data_factory.get_user_devices(user_id=session['user_id'])
|
||||
if session['machine_id'] not in user_devices:
|
||||
# Push any notifications - Push it on it's own thread so we don't hold up our db actions
|
||||
threading.Thread(target=notification_handler.notify,
|
||||
kwargs=dict(stream_data=values, notify_action='newdevice')).start()
|
||||
|
||||
return True
|
||||
|
||||
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0):
|
||||
@@ -219,26 +240,30 @@ class ActivityProcessor(object):
|
||||
args = [session['user_id']]
|
||||
|
||||
result = self.db.select(query=query, args=args)
|
||||
|
||||
new_session = {'id': result[0]['id'],
|
||||
'rating_key': result[0]['rating_key'],
|
||||
'view_offset': result[0]['view_offset'],
|
||||
'user_id': result[0]['user_id'],
|
||||
'reference_id': result[0]['reference_id']}
|
||||
|
||||
if len(result) == 1:
|
||||
prev_session = None
|
||||
else:
|
||||
new_session = prev_session = last_id = None
|
||||
if len(result) > 1:
|
||||
new_session = {'id': result[0]['id'],
|
||||
'rating_key': result[0]['rating_key'],
|
||||
'view_offset': result[0]['view_offset'],
|
||||
'user_id': result[0]['user_id'],
|
||||
'reference_id': result[0]['reference_id']}
|
||||
|
||||
prev_session = {'id': result[1]['id'],
|
||||
'rating_key': result[1]['rating_key'],
|
||||
'view_offset': result[1]['view_offset'],
|
||||
'user_id': result[1]['user_id'],
|
||||
'reference_id': result[1]['reference_id']}
|
||||
else:
|
||||
# Get the last insert row id
|
||||
result = self.db.select(query='SELECT last_insert_rowid() AS last_id')
|
||||
last_id = result[0]['last_id'] if result else None
|
||||
|
||||
query = 'UPDATE session_history SET reference_id = ? WHERE id = ? '
|
||||
# If rating_key is the same in the previous session, then set the reference_id to the previous row, else set the reference_id to the new id
|
||||
if (prev_session is not None) and (prev_session['rating_key'] == new_session['rating_key'] \
|
||||
and prev_session['view_offset'] <= new_session['view_offset']):
|
||||
if prev_session == new_session == None:
|
||||
args = [last_id, last_id]
|
||||
elif prev_session['rating_key'] == new_session['rating_key'] and prev_session['view_offset'] <= new_session['view_offset']:
|
||||
args = [prev_session['reference_id'], new_session['id']]
|
||||
else:
|
||||
args = [new_session['id'], new_session['id']]
|
||||
@@ -466,3 +491,13 @@ class ActivityProcessor(object):
|
||||
return last_time['buffer_last_triggered']
|
||||
|
||||
return None
|
||||
|
||||
def get_session_by_user_id(self, user_id=None, ip_address=None):
|
||||
sessions = []
|
||||
if str(user_id).isdigit():
|
||||
ip = 'GROUP BY ip_address' if ip_address else ''
|
||||
sessions = self.db.select('SELECT * '
|
||||
'FROM sessions '
|
||||
'WHERE user_id = ? %s' % ip,
|
||||
[user_id])
|
||||
return sessions
|
@@ -46,6 +46,7 @@ _CONFIG_DEFINITIONS = {
|
||||
'PMS_IP': (str, 'PMS', '127.0.0.1'),
|
||||
'PMS_IS_REMOTE': (int, 'PMS', 0),
|
||||
'PMS_LOGS_FOLDER': (str, 'PMS', ''),
|
||||
'PMS_LOGS_LINE_CAP': (int, 'PMS', 1000),
|
||||
'PMS_NAME': (unicode, 'PMS', ''),
|
||||
'PMS_PORT': (int, 'PMS', 32400),
|
||||
'PMS_TOKEN': (str, 'PMS', ''),
|
||||
@@ -54,6 +55,11 @@ _CONFIG_DEFINITIONS = {
|
||||
'PMS_USE_BIF': (int, 'PMS', 0),
|
||||
'PMS_UUID': (str, 'PMS', ''),
|
||||
'PMS_TIMEOUT': (int, 'Advanced', 15),
|
||||
'PMS_PLEXPASS': (int, 'PMS', 0),
|
||||
'PMS_PLATFORM': (str, 'PMS', ''),
|
||||
'PMS_VERSION': (str, 'PMS', ''),
|
||||
'PMS_UPDATE_CHANNEL': (str, 'PMS', 'public'),
|
||||
'PMS_UPDATE_DISTRO_BUILD': (str, 'PMS', ''),
|
||||
'TIME_FORMAT': (str, 'General', 'HH:mm'),
|
||||
'ANON_REDIRECT': (str, 'General', 'http://dereferer.org/?'),
|
||||
'API_ENABLED': (int, 'General', 0),
|
||||
@@ -74,6 +80,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'BOXCAR_ON_EXTUP': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_INTUP': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_PMSUPDATE': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_CONCURRENT': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_NEWDEVICE': (int, 'Boxcar', 0),
|
||||
'BROWSER_ENABLED': (int, 'Boxcar', 0),
|
||||
'BROWSER_AUTO_HIDE_DELAY': (int, 'Boxcar', 5),
|
||||
'BROWSER_ON_PLAY': (int, 'BROWSER', 0),
|
||||
@@ -88,6 +96,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'BROWSER_ON_EXTUP': (int, 'BROWSER', 0),
|
||||
'BROWSER_ON_INTUP': (int, 'BROWSER', 0),
|
||||
'BROWSER_ON_PMSUPDATE': (int, 'BROWSER', 0),
|
||||
'BROWSER_ON_CONCURRENT': (int, 'BROWSER', 0),
|
||||
'BROWSER_ON_NEWDEVICE': (int, 'BROWSER', 0),
|
||||
'BUFFER_THRESHOLD': (int, 'Monitoring', 3),
|
||||
'BUFFER_WAIT': (int, 'Monitoring', 900),
|
||||
'BACKUP_DIR': (str, 'General', ''),
|
||||
@@ -124,6 +134,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'EMAIL_ON_EXTUP': (int, 'Email', 0),
|
||||
'EMAIL_ON_INTUP': (int, 'Email', 0),
|
||||
'EMAIL_ON_PMSUPDATE': (int, 'Email', 0),
|
||||
'EMAIL_ON_CONCURRENT': (int, 'Email', 0),
|
||||
'EMAIL_ON_NEWDEVICE': (int, 'Email', 0),
|
||||
'ENABLE_HTTPS': (int, 'General', 0),
|
||||
'FACEBOOK_ENABLED': (int, 'Facebook', 0),
|
||||
'FACEBOOK_REDIRECT_URI': (str, 'Facebook', ''),
|
||||
@@ -146,8 +158,11 @@ _CONFIG_DEFINITIONS = {
|
||||
'FACEBOOK_ON_EXTUP': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_INTUP': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_PMSUPDATE': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_CONCURRENT': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_NEWDEVICE': (int, 'Facebook', 0),
|
||||
'FIRST_RUN_COMPLETE': (int, 'General', 0),
|
||||
'FREEZE_DB': (int, 'General', 0),
|
||||
'GEOIP_DB': (str, 'General', ''),
|
||||
'GET_FILE_SIZES': (int, 'General', 0),
|
||||
'GET_FILE_SIZES_HOLD': (dict, 'General', {'section_ids': [], 'rating_keys': []}),
|
||||
'GIT_BRANCH': (str, 'General', 'master'),
|
||||
@@ -173,6 +188,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'GROWL_ON_EXTUP': (int, 'Growl', 0),
|
||||
'GROWL_ON_INTUP': (int, 'Growl', 0),
|
||||
'GROWL_ON_PMSUPDATE': (int, 'Growl', 0),
|
||||
'GROWL_ON_CONCURRENT': (int, 'Growl', 0),
|
||||
'GROWL_ON_NEWDEVICE': (int, 'Growl', 0),
|
||||
'HOME_SECTIONS': (list, 'General', ['current_activity','watch_stats','library_stats','recently_added']),
|
||||
'HOME_LIBRARY_CARDS': (list, 'General', ['first_run']),
|
||||
'HOME_STATS_LENGTH': (int, 'General', 30),
|
||||
@@ -185,6 +202,7 @@ _CONFIG_DEFINITIONS = {
|
||||
'HTTPS_KEY': (str, 'General', ''),
|
||||
'HTTPS_DOMAIN': (str, 'General', 'localhost'),
|
||||
'HTTPS_IP': (str, 'General', '127.0.0.1'),
|
||||
'HTTP_BASIC_AUTH': (int, 'General', 0),
|
||||
'HTTP_ENVIRONMENT': (str, 'General', 'production'),
|
||||
'HTTP_HASH_PASSWORD': (int, 'General', 0),
|
||||
'HTTP_HASHED_PASSWORD': (int, 'General', 0),
|
||||
@@ -194,6 +212,27 @@ _CONFIG_DEFINITIONS = {
|
||||
'HTTP_PROXY': (int, 'General', 0),
|
||||
'HTTP_ROOT': (str, 'General', ''),
|
||||
'HTTP_USERNAME': (str, 'General', ''),
|
||||
'HIPCHAT_URL': (str, 'Hipchat', ''),
|
||||
'HIPCHAT_COLOR': (str, 'Hipchat', ''),
|
||||
'HIPCHAT_INCL_SUBJECT': (int, 'Hipchat', 1),
|
||||
'HIPCHAT_INCL_PMSLINK': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_INCL_POSTER': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_EMOTICON': (str, 'Hipchat', ''),
|
||||
'HIPCHAT_ENABLED': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_PLAY': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_STOP': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_PAUSE': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_RESUME': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_BUFFER': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_WATCHED': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_CREATED': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_EXTDOWN': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_INTDOWN': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_EXTUP': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_INTUP': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_PMSUPDATE': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_CONCURRENT': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_NEWDEVICE': (int, 'Hipchat', 0),
|
||||
'INTERFACE': (str, 'General', 'default'),
|
||||
'IP_LOGGING_ENABLE': (int, 'General', 0),
|
||||
'IFTTT_KEY': (str, 'IFTTT', ''),
|
||||
@@ -211,10 +250,13 @@ _CONFIG_DEFINITIONS = {
|
||||
'IFTTT_ON_EXTUP': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_INTUP': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_PMSUPDATE': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_CONCURRENT': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_NEWDEVICE': (int, 'IFTTT', 0),
|
||||
'IMGUR_CLIENT_ID': (str, 'Monitoring', ''),
|
||||
'JOIN_APIKEY': (str, 'Join', ''),
|
||||
'JOIN_DEVICEID': (str, 'Join', ''),
|
||||
'JOIN_ENABLED': (int, 'Join', 0),
|
||||
'JOIN_INCL_SUBJECT': (int, 'Join', 1),
|
||||
'JOIN_ON_PLAY': (int, 'Join', 0),
|
||||
'JOIN_ON_STOP': (int, 'Join', 0),
|
||||
'JOIN_ON_PAUSE': (int, 'Join', 0),
|
||||
@@ -227,6 +269,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'JOIN_ON_EXTUP': (int, 'Join', 0),
|
||||
'JOIN_ON_INTUP': (int, 'Join', 0),
|
||||
'JOIN_ON_PMSUPDATE': (int, 'Join', 0),
|
||||
'JOIN_ON_CONCURRENT': (int, 'Join', 0),
|
||||
'JOIN_ON_NEWDEVICE': (int, 'Join', 0),
|
||||
'JOURNAL_MODE': (str, 'Advanced', 'wal'),
|
||||
'LAUNCH_BROWSER': (int, 'General', 1),
|
||||
'LOG_BLACKLIST': (int, 'General', 1),
|
||||
@@ -261,11 +305,15 @@ _CONFIG_DEFINITIONS = {
|
||||
'NMA_ON_EXTUP': (int, 'NMA', 0),
|
||||
'NMA_ON_INTUP': (int, 'NMA', 0),
|
||||
'NMA_ON_PMSUPDATE': (int, 'NMA', 0),
|
||||
'NMA_ON_CONCURRENT': (int, 'NMA', 0),
|
||||
'NMA_ON_NEWDEVICE': (int, 'NMA', 0),
|
||||
'NOTIFY_CONSECUTIVE': (int, 'Monitoring', 1),
|
||||
'NOTIFY_UPLOAD_POSTERS': (int, 'Monitoring', 0),
|
||||
'NOTIFY_RECENTLY_ADDED': (int, 'Monitoring', 0),
|
||||
'NOTIFY_RECENTLY_ADDED_GRANDPARENT': (int, 'Monitoring', 0),
|
||||
'NOTIFY_RECENTLY_ADDED_DELAY': (int, 'Monitoring', 60),
|
||||
'NOTIFY_CONCURRENT_BY_IP': (int, 'Monitoring', 0),
|
||||
'NOTIFY_CONCURRENT_THRESHOLD': (int, 'Monitoring', 2),
|
||||
'NOTIFY_WATCHED_PERCENT': (int, 'Monitoring', 85),
|
||||
'NOTIFY_ON_START_SUBJECT_TEXT': (unicode, 'Monitoring', 'PlexPy ({server_name})'),
|
||||
'NOTIFY_ON_START_BODY_TEXT': (unicode, 'Monitoring', '{user} ({player}) started playing {title}.'),
|
||||
@@ -291,6 +339,10 @@ _CONFIG_DEFINITIONS = {
|
||||
'NOTIFY_ON_INTUP_BODY_TEXT': (unicode, 'Monitoring', 'The Plex Media Server is back up.'),
|
||||
'NOTIFY_ON_PMSUPDATE_SUBJECT_TEXT': (unicode, 'Monitoring', 'PlexPy ({server_name})'),
|
||||
'NOTIFY_ON_PMSUPDATE_BODY_TEXT': (unicode, 'Monitoring', 'An update is available for the Plex Media Server (version {update_version}).'),
|
||||
'NOTIFY_ON_CONCURRENT_SUBJECT_TEXT': (unicode, 'Monitoring', 'PlexPy ({server_name})'),
|
||||
'NOTIFY_ON_CONCURRENT_BODY_TEXT': (unicode, 'Monitoring', '{user} has {user_streams} concurrent streams.'),
|
||||
'NOTIFY_ON_NEWDEVICE_SUBJECT_TEXT': (unicode, 'Monitoring', 'PlexPy ({server_name})'),
|
||||
'NOTIFY_ON_NEWDEVICE_BODY_TEXT': (unicode, 'Monitoring', '{user} is streaming from a new device: {player}.'),
|
||||
'NOTIFY_SCRIPTS_ARGS_TEXT': (unicode, 'Monitoring', ''),
|
||||
'OSX_NOTIFY_APP': (str, 'OSX_Notify', '/Applications/PlexPy'),
|
||||
'OSX_NOTIFY_ENABLED': (int, 'OSX_Notify', 0),
|
||||
@@ -306,6 +358,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'OSX_NOTIFY_ON_EXTUP': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_INTUP': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_PMSUPDATE': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_CONCURRENT': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_NEWDEVICE': (int, 'OSX_Notify', 0),
|
||||
'PLEX_CLIENT_HOST': (str, 'Plex', ''),
|
||||
'PLEX_ENABLED': (int, 'Plex', 0),
|
||||
'PLEX_PASSWORD': (str, 'Plex', ''),
|
||||
@@ -322,6 +376,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'PLEX_ON_EXTUP': (int, 'Plex', 0),
|
||||
'PLEX_ON_INTUP': (int, 'Plex', 0),
|
||||
'PLEX_ON_PMSUPDATE': (int, 'Plex', 0),
|
||||
'PLEX_ON_CONCURRENT': (int, 'Plex', 0),
|
||||
'PLEX_ON_NEWDEVICE': (int, 'Plex', 0),
|
||||
'PROWL_ENABLED': (int, 'Prowl', 0),
|
||||
'PROWL_KEYS': (str, 'Prowl', ''),
|
||||
'PROWL_PRIORITY': (int, 'Prowl', 0),
|
||||
@@ -337,6 +393,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'PROWL_ON_EXTUP': (int, 'Prowl', 0),
|
||||
'PROWL_ON_INTUP': (int, 'Prowl', 0),
|
||||
'PROWL_ON_PMSUPDATE': (int, 'Prowl', 0),
|
||||
'PROWL_ON_CONCURRENT': (int, 'Prowl', 0),
|
||||
'PROWL_ON_NEWDEVICE': (int, 'Prowl', 0),
|
||||
'PUSHALOT_APIKEY': (str, 'Pushalot', ''),
|
||||
'PUSHALOT_ENABLED': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_PLAY': (int, 'Pushalot', 0),
|
||||
@@ -351,6 +409,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'PUSHALOT_ON_EXTUP': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_INTUP': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_PMSUPDATE': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_CONCURRENT': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_NEWDEVICE': (int, 'Pushalot', 0),
|
||||
'PUSHBULLET_APIKEY': (str, 'PushBullet', ''),
|
||||
'PUSHBULLET_DEVICEID': (str, 'PushBullet', ''),
|
||||
'PUSHBULLET_CHANNEL_TAG': (str, 'PushBullet', ''),
|
||||
@@ -367,6 +427,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'PUSHBULLET_ON_EXTUP': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_INTUP': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_PMSUPDATE': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_CONCURRENT': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_NEWDEVICE': (int, 'PushBullet', 0),
|
||||
'PUSHOVER_APITOKEN': (str, 'Pushover', ''),
|
||||
'PUSHOVER_ENABLED': (int, 'Pushover', 0),
|
||||
'PUSHOVER_HTML_SUPPORT': (int, 'Pushover', 1),
|
||||
@@ -385,6 +447,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'PUSHOVER_ON_EXTUP': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_INTUP': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_PMSUPDATE': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_CONCURRENT': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_NEWDEVICE': (int, 'Pushover', 0),
|
||||
'REFRESH_LIBRARIES_INTERVAL': (int, 'Monitoring', 12),
|
||||
'REFRESH_LIBRARIES_ON_STARTUP': (int, 'Monitoring', 1),
|
||||
'REFRESH_USERS_INTERVAL': (int, 'Monitoring', 12),
|
||||
@@ -410,6 +474,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'SLACK_ON_EXTUP': (int, 'Slack', 0),
|
||||
'SLACK_ON_INTUP': (int, 'Slack', 0),
|
||||
'SLACK_ON_PMSUPDATE': (int, 'Slack', 0),
|
||||
'SLACK_ON_CONCURRENT': (int, 'Slack', 0),
|
||||
'SLACK_ON_NEWDEVICE': (int, 'Slack', 0),
|
||||
'SCRIPTS_ENABLED': (int, 'Scripts', 0),
|
||||
'SCRIPTS_FOLDER': (unicode, 'Scripts', ''),
|
||||
'SCRIPTS_ON_PLAY': (int, 'Scripts', 0),
|
||||
@@ -424,6 +490,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'SCRIPTS_ON_INTDOWN': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_INTUP': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_PMSUPDATE': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_CONCURRENT': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_NEWDEVICE': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_PLAY_SCRIPT': (unicode, 'Scripts', ''),
|
||||
'SCRIPTS_ON_STOP_SCRIPT': (unicode, 'Scripts', ''),
|
||||
'SCRIPTS_ON_PAUSE_SCRIPT': (unicode, 'Scripts', ''),
|
||||
@@ -436,6 +504,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'SCRIPTS_ON_INTDOWN_SCRIPT': (unicode, 'Scripts', ''),
|
||||
'SCRIPTS_ON_INTUP_SCRIPT': (unicode, 'Scripts', ''),
|
||||
'SCRIPTS_ON_PMSUPDATE_SCRIPT': (unicode, 'Scripts', ''),
|
||||
'SCRIPTS_ON_CONCURRENT_SCRIPT': (unicode, 'Scripts', ''),
|
||||
'SCRIPTS_ON_NEWDEVICE_SCRIPT': (unicode, 'Scripts', ''),
|
||||
'TELEGRAM_BOT_TOKEN': (str, 'Telegram', ''),
|
||||
'TELEGRAM_ENABLED': (int, 'Telegram', 0),
|
||||
'TELEGRAM_CHAT_ID': (str, 'Telegram', ''),
|
||||
@@ -454,6 +524,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'TELEGRAM_ON_EXTUP': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_INTUP': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_PMSUPDATE': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_CONCURRENT': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_NEWDEVICE': (int, 'Telegram', 0),
|
||||
'TV_LOGGING_ENABLE': (int, 'Monitoring', 1),
|
||||
'TV_NOTIFY_ENABLE': (int, 'Monitoring', 0),
|
||||
'TV_NOTIFY_ON_START': (int, 'Monitoring', 1),
|
||||
@@ -478,6 +550,8 @@ _CONFIG_DEFINITIONS = {
|
||||
'TWITTER_ON_EXTUP': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_INTUP': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_PMSUPDATE': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_CONCURRENT': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_NEWDEVICE': (int, 'Twitter', 0),
|
||||
'UPDATE_DB_INTERVAL': (int, 'General', 24),
|
||||
'UPDATE_SECTION_IDS': (int, 'General', 1),
|
||||
'UPDATE_LABELS': (int, 'General', 1),
|
||||
@@ -498,7 +572,9 @@ _CONFIG_DEFINITIONS = {
|
||||
'XBMC_ON_INTDOWN': (int, 'XBMC', 0),
|
||||
'XBMC_ON_EXTUP': (int, 'XBMC', 0),
|
||||
'XBMC_ON_INTUP': (int, 'XBMC', 0),
|
||||
'XBMC_ON_PMSUPDATE': (int, 'XBMC', 0)
|
||||
'XBMC_ON_PMSUPDATE': (int, 'XBMC', 0),
|
||||
'XBMC_ON_CONCURRENT': (int, 'XBMC', 0),
|
||||
'XBMC_ON_NEWDEVICE': (int, 'XBMC', 0)
|
||||
}
|
||||
|
||||
_BLACKLIST_KEYS = ['_APITOKEN', '_TOKEN', '_KEY', '_SECRET', '_PASSWORD', '_APIKEY', '_ID']
|
||||
@@ -717,4 +793,8 @@ class Config(object):
|
||||
home_sections = self.HOME_SECTIONS
|
||||
home_sections.remove('library_stats')
|
||||
self.HOME_SECTIONS = home_sections
|
||||
self.CONFIG_VERSION = '5'
|
||||
self.CONFIG_VERSION = '5'
|
||||
|
||||
if self.CONFIG_VERSION == '5':
|
||||
self.MONITOR_PMS_UPDATES = 0
|
||||
self.CONFIG_VERSION = '6'
|
@@ -69,8 +69,8 @@ class DataFactory(object):
|
||||
'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter',
|
||||
'session_history.user_id',
|
||||
'session_history.user',
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) \
|
||||
AS friendly_name',
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
|
||||
THEN users.username ELSE users.friendly_name END) AS friendly_name',
|
||||
'platform',
|
||||
'player',
|
||||
'ip_address',
|
||||
@@ -198,7 +198,7 @@ class DataFactory(object):
|
||||
top_tv = []
|
||||
try:
|
||||
query = 'SELECT t.id, t.grandparent_title, t.grandparent_rating_key, t.grandparent_thumb, t.section_id, ' \
|
||||
't.media_type, t.content_rating, t.labels, ' \
|
||||
't.media_type, t.content_rating, t.labels, t.started, ' \
|
||||
'MAX(t.started) AS last_watch, COUNT(t.id) AS total_plays, SUM(t.d) AS total_duration ' \
|
||||
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
|
||||
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
|
||||
@@ -210,7 +210,7 @@ class DataFactory(object):
|
||||
' AND session_history.media_type = "episode" ' \
|
||||
' GROUP BY %s) AS t ' \
|
||||
'GROUP BY t.grandparent_title ' \
|
||||
'ORDER BY %s DESC ' \
|
||||
'ORDER BY %s DESC, started DESC ' \
|
||||
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
@@ -246,7 +246,7 @@ class DataFactory(object):
|
||||
popular_tv = []
|
||||
try:
|
||||
query = 'SELECT t.id, t.grandparent_title, t.grandparent_rating_key, t.grandparent_thumb, t.section_id, ' \
|
||||
't.media_type, t.content_rating, t.labels, ' \
|
||||
't.media_type, t.content_rating, t.labels, t.started, ' \
|
||||
'COUNT(DISTINCT t.user_id) AS users_watched, ' \
|
||||
'MAX(t.started) AS last_watch, COUNT(t.id) as total_plays, SUM(t.d) AS total_duration ' \
|
||||
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
|
||||
@@ -259,7 +259,7 @@ class DataFactory(object):
|
||||
' AND session_history.media_type = "episode" ' \
|
||||
' GROUP BY %s) AS t ' \
|
||||
'GROUP BY t.grandparent_title ' \
|
||||
'ORDER BY users_watched DESC, %s DESC ' \
|
||||
'ORDER BY users_watched DESC, %s DESC, started DESC ' \
|
||||
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
@@ -293,7 +293,7 @@ class DataFactory(object):
|
||||
top_movies = []
|
||||
try:
|
||||
query = 'SELECT t.id, t.full_title, t.rating_key, t.thumb, t.section_id, ' \
|
||||
't.media_type, t.content_rating, t.labels, ' \
|
||||
't.media_type, t.content_rating, t.labels, t.started, ' \
|
||||
'MAX(t.started) AS last_watch, COUNT(t.id) AS total_plays, SUM(t.d) AS total_duration ' \
|
||||
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
|
||||
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
|
||||
@@ -305,7 +305,7 @@ class DataFactory(object):
|
||||
' AND session_history.media_type = "movie" ' \
|
||||
' GROUP BY %s) AS t ' \
|
||||
'GROUP BY t.full_title ' \
|
||||
'ORDER BY %s DESC ' \
|
||||
'ORDER BY %s DESC, started DESC ' \
|
||||
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
@@ -341,7 +341,7 @@ class DataFactory(object):
|
||||
popular_movies = []
|
||||
try:
|
||||
query = 'SELECT t.id, t.full_title, t.rating_key, t.thumb, t.section_id, ' \
|
||||
't.media_type, t.content_rating, t.labels, ' \
|
||||
't.media_type, t.content_rating, t.labels, t.started, ' \
|
||||
'COUNT(DISTINCT t.user_id) AS users_watched, ' \
|
||||
'MAX(t.started) AS last_watch, COUNT(t.id) as total_plays, SUM(t.d) AS total_duration ' \
|
||||
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
|
||||
@@ -354,7 +354,7 @@ class DataFactory(object):
|
||||
' AND session_history.media_type = "movie" ' \
|
||||
' GROUP BY %s) AS t ' \
|
||||
'GROUP BY t.full_title ' \
|
||||
'ORDER BY users_watched DESC, %s DESC ' \
|
||||
'ORDER BY users_watched DESC, %s DESC, started DESC ' \
|
||||
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
@@ -388,7 +388,7 @@ class DataFactory(object):
|
||||
top_music = []
|
||||
try:
|
||||
query = 'SELECT t.id, t.grandparent_title, t.grandparent_rating_key, t.grandparent_thumb, t.section_id, ' \
|
||||
't.media_type, t.content_rating, t.labels, ' \
|
||||
't.media_type, t.content_rating, t.labels, t.started, ' \
|
||||
'MAX(t.started) AS last_watch, COUNT(t.id) AS total_plays, SUM(t.d) AS total_duration ' \
|
||||
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
|
||||
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
|
||||
@@ -400,7 +400,7 @@ class DataFactory(object):
|
||||
' AND session_history.media_type = "track" ' \
|
||||
' GROUP BY %s) AS t ' \
|
||||
'GROUP BY t.grandparent_title ' \
|
||||
'ORDER BY %s DESC ' \
|
||||
'ORDER BY %s DESC, started DESC ' \
|
||||
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
@@ -436,7 +436,7 @@ class DataFactory(object):
|
||||
popular_music = []
|
||||
try:
|
||||
query = 'SELECT t.id, t.grandparent_title, t.grandparent_rating_key, t.grandparent_thumb, t.section_id, ' \
|
||||
't.media_type, t.content_rating, t.labels, ' \
|
||||
't.media_type, t.content_rating, t.labels, t.started, ' \
|
||||
'COUNT(DISTINCT t.user_id) AS users_watched, ' \
|
||||
'MAX(t.started) AS last_watch, COUNT(t.id) as total_plays, SUM(t.d) AS total_duration ' \
|
||||
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
|
||||
@@ -449,7 +449,7 @@ class DataFactory(object):
|
||||
' AND session_history.media_type = "track" ' \
|
||||
' GROUP BY %s) AS t ' \
|
||||
'GROUP BY t.grandparent_title ' \
|
||||
'ORDER BY users_watched DESC, %s DESC ' \
|
||||
'ORDER BY users_watched DESC, %s DESC, started DESC ' \
|
||||
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
@@ -482,7 +482,7 @@ class DataFactory(object):
|
||||
elif stat == 'top_users':
|
||||
top_users = []
|
||||
try:
|
||||
query = 'SELECT t.user, t.user_id, t.user_thumb, t.custom_thumb, ' \
|
||||
query = 'SELECT t.user, t.user_id, t.user_thumb, t.custom_thumb, t.started, ' \
|
||||
'(CASE WHEN t.friendly_name IS NULL THEN t.username ELSE t.friendly_name END) ' \
|
||||
' AS friendly_name, ' \
|
||||
'MAX(t.started) AS last_watch, COUNT(t.id) AS total_plays, SUM(t.d) AS total_duration ' \
|
||||
@@ -496,7 +496,7 @@ class DataFactory(object):
|
||||
' >= datetime("now", "-%s days", "localtime") ' \
|
||||
' GROUP BY %s) AS t ' \
|
||||
'GROUP BY t.user_id ' \
|
||||
'ORDER BY %s DESC ' \
|
||||
'ORDER BY %s DESC, started DESC ' \
|
||||
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
@@ -536,7 +536,7 @@ class DataFactory(object):
|
||||
top_platform = []
|
||||
|
||||
try:
|
||||
query = 'SELECT t.platform, ' \
|
||||
query = 'SELECT t.platform, t.started, ' \
|
||||
'MAX(t.started) AS last_watch, COUNT(t.id) AS total_plays, SUM(t.d) AS total_duration ' \
|
||||
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
|
||||
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
|
||||
@@ -547,7 +547,7 @@ class DataFactory(object):
|
||||
' >= datetime("now", "-%s days", "localtime") ' \
|
||||
' GROUP BY %s) AS t ' \
|
||||
'GROUP BY t.platform ' \
|
||||
'ORDER BY %s DESC ' \
|
||||
'ORDER BY %s DESC, started DESC ' \
|
||||
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
@@ -1306,4 +1306,19 @@ class DataFactory(object):
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for delete_notification_log: %s." % e)
|
||||
return False
|
||||
return False
|
||||
|
||||
def get_user_devices(self, user_id=''):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
if user_id:
|
||||
try:
|
||||
query = 'SELECT machine_id FROM session_history WHERE user_id = ? GROUP BY machine_id'
|
||||
result = monitor_db.select(query=query, args=[user_id])
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_user_devices: %s." % e)
|
||||
return []
|
||||
else:
|
||||
return []
|
||||
|
||||
return [d['machine_id'] for d in result]
|
@@ -463,7 +463,8 @@ class Graphs(object):
|
||||
if y_axis == 'plays':
|
||||
query = 'SELECT ' \
|
||||
'users.user_id, users.username, ' \
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
|
||||
' THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
|
||||
'SUM(CASE WHEN media_type = "episode" THEN 1 ELSE 0 END) AS tv_count, ' \
|
||||
'SUM(CASE WHEN media_type = "movie" THEN 1 ELSE 0 END) AS movie_count, ' \
|
||||
'SUM(CASE WHEN media_type = "track" THEN 1 ELSE 0 END) AS music_count, ' \
|
||||
@@ -479,7 +480,8 @@ class Graphs(object):
|
||||
else:
|
||||
query = 'SELECT ' \
|
||||
'users.user_id, users.username, ' \
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
|
||||
' THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
|
||||
'SUM(CASE WHEN media_type = "episode" AND stopped > 0 THEN (stopped - started) ' \
|
||||
' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS tv_count, ' \
|
||||
'SUM(CASE WHEN media_type = "movie" AND stopped > 0 THEN (stopped - started) ' \
|
||||
@@ -904,7 +906,8 @@ class Graphs(object):
|
||||
if y_axis == 'plays':
|
||||
query = 'SELECT ' \
|
||||
'users.user_id, users.username, ' \
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS friendly_name, ' \
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
|
||||
' THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
|
||||
'SUM(CASE WHEN session_history_media_info.transcode_decision = "direct play" ' \
|
||||
'THEN 1 ELSE 0 END) AS dp_count, ' \
|
||||
'SUM(CASE WHEN session_history_media_info.transcode_decision = "copy" ' \
|
||||
@@ -925,7 +928,8 @@ class Graphs(object):
|
||||
else:
|
||||
query = 'SELECT ' \
|
||||
'users.user_id, users.username, ' \
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS friendly_name, ' \
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
|
||||
' THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
|
||||
'SUM(CASE WHEN session_history_media_info.transcode_decision = "direct play" ' \
|
||||
'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \
|
||||
' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS dp_count, ' \
|
||||
|
@@ -16,11 +16,14 @@
|
||||
import base64
|
||||
import datetime
|
||||
from functools import wraps
|
||||
import geoip2.database, geoip2.errors
|
||||
import gzip
|
||||
import hashlib
|
||||
import imghdr
|
||||
from IPy import IP
|
||||
import json
|
||||
import math
|
||||
import maxminddb
|
||||
from operator import itemgetter
|
||||
import os
|
||||
import re
|
||||
@@ -514,6 +517,118 @@ def get_ip(host):
|
||||
|
||||
return ip_address
|
||||
|
||||
def install_geoip_db():
|
||||
maxmind_url = 'http://geolite.maxmind.com/download/geoip/database/'
|
||||
geolite2_gz = 'GeoLite2-City.mmdb.gz'
|
||||
geolite2_md5 = 'GeoLite2-City.md5'
|
||||
geolite2_db = geolite2_gz[:-3]
|
||||
md5_checksum = ''
|
||||
|
||||
temp_gz = os.path.join(plexpy.CONFIG.CACHE_DIR, geolite2_gz)
|
||||
geolite2_db = plexpy.CONFIG.GEOIP_DB or os.path.join(plexpy.DATA_DIR, geolite2_db)
|
||||
|
||||
# Retrieve the GeoLite2 gzip file
|
||||
logger.debug(u"PlexPy Helpers :: Downloading GeoLite2 gzip file from MaxMind...")
|
||||
try:
|
||||
maxmind = urllib.URLopener()
|
||||
maxmind.retrieve(maxmind_url + geolite2_gz, temp_gz)
|
||||
md5_checksum = urllib2.urlopen(maxmind_url + geolite2_md5).read()
|
||||
except Exception as e:
|
||||
logger.error(u"PlexPy Helpers :: Failed to download GeoLite2 gzip file from MaxMind: %s" % e)
|
||||
return False
|
||||
|
||||
# Extract the GeoLite2 database file
|
||||
logger.debug(u"PlexPy Helpers :: Extracting GeoLite2 database...")
|
||||
try:
|
||||
with gzip.open(temp_gz, 'rb') as gz:
|
||||
with open(geolite2_db, 'wb') as db:
|
||||
db.write(gz.read())
|
||||
except Exception as e:
|
||||
logger.error(u"PlexPy Helpers :: Failed to extract the GeoLite2 database: %s" % e)
|
||||
return False
|
||||
|
||||
# Check MD5 hash for GeoLite2 database file
|
||||
logger.debug(u"PlexPy Helpers :: Checking MD5 checksum for GeoLite2 database...")
|
||||
try:
|
||||
hash_md5 = hashlib.md5()
|
||||
with open(geolite2_db, 'rb') as f:
|
||||
for chunk in iter(lambda: f.read(4096), b""):
|
||||
hash_md5.update(chunk)
|
||||
md5_hash = hash_md5.hexdigest()
|
||||
|
||||
if md5_hash != md5_checksum:
|
||||
logger.error(u"PlexPy Helpers :: MD5 checksum doesn't match for GeoLite2 database. "
|
||||
"Checksum: %s, file hash: %s" % (md5_checksum, md5_hash))
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(u"PlexPy Helpers :: Failed to generate MD5 checksum for GeoLite2 database: %s" % e)
|
||||
return False
|
||||
|
||||
# Delete temportary GeoLite2 gzip file
|
||||
logger.debug(u"PlexPy Helpers :: Deleting temporary GeoLite2 gzip file...")
|
||||
try:
|
||||
os.remove(temp_gz)
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy Helpers :: Failed to remove temporary GeoLite2 gzip file: %s" % e)
|
||||
|
||||
logger.debug(u"PlexPy Helpers :: GeoLite2 database installed successfully.")
|
||||
plexpy.CONFIG.__setattr__('GEOIP_DB', geolite2_db)
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
return True
|
||||
|
||||
def uninstall_geoip_db():
|
||||
logger.debug(u"PlexPy Helpers :: Uninstalling the GeoLite2 database...")
|
||||
try:
|
||||
os.remove(plexpy.CONFIG.GEOIP_DB)
|
||||
plexpy.CONFIG.__setattr__('GEOIP_DB', '')
|
||||
plexpy.CONFIG.write()
|
||||
except Exception as e:
|
||||
logger.error(u"PlexPy Helpers :: Failed to uninstall the GeoLite2 database: %s" % e)
|
||||
return False
|
||||
|
||||
logger.debug(u"PlexPy Helpers :: GeoLite2 database uninstalled successfully.")
|
||||
return True
|
||||
|
||||
def geoip_lookup(ip_address):
|
||||
if not plexpy.CONFIG.GEOIP_DB:
|
||||
return 'GeoLite2 database not installed. Please install from the ' \
|
||||
'<a href="settings?install_geoip=true">Settings</a> page.'
|
||||
|
||||
if not ip_address:
|
||||
return 'No IP address provided.'
|
||||
|
||||
try:
|
||||
reader = geoip2.database.Reader(plexpy.CONFIG.GEOIP_DB)
|
||||
geo = reader.city(ip_address)
|
||||
reader.close()
|
||||
except IOError as e:
|
||||
return 'Missing GeoLite2 database. Please reinstall from the ' \
|
||||
'<a href="settings?install_geoip=true">Settings</a> page.'
|
||||
except ValueError as e:
|
||||
return 'Unable to read GeoLite2 database. Please reinstall from the ' \
|
||||
'<a href="settings?reinstall_geoip=true">Settings</a> page.'
|
||||
except maxminddb.InvalidDatabaseError as e:
|
||||
return 'Invalid GeoLite2 database. Please reinstall from the ' \
|
||||
'<a href="settings?reinstall_geoip=true">Settings</a> page.'
|
||||
except geoip2.errors.AddressNotFoundError as e:
|
||||
return '%s' % e
|
||||
except Exception as e:
|
||||
return 'Error: %s' % e
|
||||
|
||||
geo_info = {'continent': geo.continent.name,
|
||||
'country': geo.country.name,
|
||||
'region': geo.subdivisions.most_specific.name,
|
||||
'city': geo.city.name,
|
||||
'postal_code': geo.postal.code,
|
||||
'timezone': geo.location.time_zone,
|
||||
'latitude': geo.location.latitude,
|
||||
'longitude': geo.location.longitude,
|
||||
'accuracy': geo.location.accuracy_radius
|
||||
}
|
||||
|
||||
return geo_info
|
||||
|
||||
# Taken from SickRage
|
||||
def anon_url(*url):
|
||||
"""
|
||||
|
@@ -753,8 +753,9 @@ class Libraries(object):
|
||||
|
||||
try:
|
||||
if str(section_id).isdigit():
|
||||
query = 'SELECT (CASE WHEN users.friendly_name IS NULL THEN users.username ' \
|
||||
'ELSE users.friendly_name END) AS friendly_name, users.user_id, users.thumb, COUNT(user) AS user_count ' \
|
||||
query = 'SELECT (CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
|
||||
'THEN users.username ELSE users.friendly_name END) AS friendly_name, ' \
|
||||
'users.user_id, users.thumb, COUNT(user) AS user_count ' \
|
||||
'FROM session_history ' \
|
||||
'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
|
||||
'JOIN users ON users.user_id = session_history.user_id ' \
|
||||
|
@@ -90,14 +90,14 @@ class PublicIPFilter(logging.Filter):
|
||||
|
||||
try:
|
||||
# Currently only checking for ipv4 addresses
|
||||
ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', record.msg)
|
||||
ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}(?!\d*-[a-z0-9]{6})', record.msg)
|
||||
for ip in ipv4:
|
||||
if helpers.is_ip_public(ip):
|
||||
record.msg = record.msg.replace(ip, ip.partition('.')[0] + '.***.***.***')
|
||||
|
||||
args = []
|
||||
for arg in record.args:
|
||||
ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', arg) if isinstance(arg, basestring) else []
|
||||
ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}(?!\d*-[a-z0-9]{6})', arg) if isinstance(arg, basestring) else []
|
||||
for ip in ipv4:
|
||||
if helpers.is_ip_public(ip):
|
||||
arg = arg.replace(ip, ip.partition('.')[0] + '.***.***.***')
|
||||
|
@@ -182,6 +182,46 @@ def notify(stream_data=None, notify_action=None):
|
||||
notify_strings=notify_strings,
|
||||
metadata=metadata)
|
||||
|
||||
elif agent['on_concurrent'] and notify_action == 'concurrent':
|
||||
# Build and send notification
|
||||
notify_strings, metadata = build_notify_text(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_id=agent['id'])
|
||||
|
||||
notifiers.send_notification(agent_id=agent['id'],
|
||||
subject=notify_strings[0],
|
||||
body=notify_strings[1],
|
||||
script_args=notify_strings[2],
|
||||
notify_action=notify_action,
|
||||
metadata=metadata)
|
||||
|
||||
# Set the notification state in the db
|
||||
set_notify_state(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_info=agent,
|
||||
notify_strings=notify_strings,
|
||||
metadata=metadata)
|
||||
|
||||
elif agent['on_newdevice'] and notify_action == 'newdevice':
|
||||
# Build and send notification
|
||||
notify_strings, metadata = build_notify_text(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_id=agent['id'])
|
||||
|
||||
notifiers.send_notification(agent_id=agent['id'],
|
||||
subject=notify_strings[0],
|
||||
body=notify_strings[1],
|
||||
script_args=notify_strings[2],
|
||||
notify_action=notify_action,
|
||||
metadata=metadata)
|
||||
|
||||
# Set the notification state in the db
|
||||
set_notify_state(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_info=agent,
|
||||
notify_strings=notify_strings,
|
||||
metadata=metadata)
|
||||
|
||||
elif (stream_data['media_type'] == 'track' and plexpy.CONFIG.MUSIC_NOTIFY_ENABLE):
|
||||
|
||||
for agent in notifiers.available_notification_agents():
|
||||
@@ -285,6 +325,46 @@ def notify(stream_data=None, notify_action=None):
|
||||
notify_strings=notify_strings,
|
||||
metadata=metadata)
|
||||
|
||||
elif agent['on_concurrent'] and notify_action == 'concurrent':
|
||||
# Build and send notification
|
||||
notify_strings, metadata = build_notify_text(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_id=agent['id'])
|
||||
|
||||
notifiers.send_notification(agent_id=agent['id'],
|
||||
subject=notify_strings[0],
|
||||
body=notify_strings[1],
|
||||
script_args=notify_strings[2],
|
||||
notify_action=notify_action,
|
||||
metadata=metadata)
|
||||
|
||||
# Set the notification state in the db
|
||||
set_notify_state(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_info=agent,
|
||||
notify_strings=notify_strings,
|
||||
metadata=metadata)
|
||||
|
||||
elif agent['on_newdevice'] and notify_action == 'newdevice':
|
||||
# Build and send notification
|
||||
notify_strings, metadata = build_notify_text(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_id=agent['id'])
|
||||
|
||||
notifiers.send_notification(agent_id=agent['id'],
|
||||
subject=notify_strings[0],
|
||||
body=notify_strings[1],
|
||||
script_args=notify_strings[2],
|
||||
notify_action=notify_action,
|
||||
metadata=metadata)
|
||||
|
||||
# Set the notification state in the db
|
||||
set_notify_state(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_info=agent,
|
||||
notify_strings=notify_strings,
|
||||
metadata=metadata)
|
||||
|
||||
elif stream_data['media_type'] == 'clip':
|
||||
pass
|
||||
else:
|
||||
@@ -485,7 +565,10 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
metadata_list = pms_connect.get_metadata_details(rating_key=rating_key)
|
||||
|
||||
stream_count = pms_connect.get_current_activity().get('stream_count', '')
|
||||
current_activity = pms_connect.get_current_activity()
|
||||
sessions = current_activity.get('sessions', [])
|
||||
stream_count = current_activity.get('stream_count', '')
|
||||
user_stream_count = sum(1 for d in sessions if d['user_id'] == session['user_id']) if session else ''
|
||||
|
||||
if metadata_list:
|
||||
metadata = metadata_list['metadata']
|
||||
@@ -525,6 +608,10 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
|
||||
on_watched_body = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_WATCHED_BODY_TEXT), agent_id)
|
||||
on_created_subject = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_CREATED_SUBJECT_TEXT), agent_id)
|
||||
on_created_body = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_CREATED_BODY_TEXT), agent_id)
|
||||
on_concurrent_subject = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_CONCURRENT_SUBJECT_TEXT), agent_id)
|
||||
on_concurrent_body = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_CONCURRENT_BODY_TEXT), agent_id)
|
||||
on_newdevice_subject = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_SUBJECT_TEXT), agent_id)
|
||||
on_newdevice_body = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_BODY_TEXT), agent_id)
|
||||
script_args_text = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_SCRIPTS_ARGS_TEXT), agent_id)
|
||||
else:
|
||||
on_start_subject = strip_tag(plexpy.CONFIG.NOTIFY_ON_START_SUBJECT_TEXT, agent_id)
|
||||
@@ -541,6 +628,10 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
|
||||
on_watched_body = strip_tag(plexpy.CONFIG.NOTIFY_ON_WATCHED_BODY_TEXT, agent_id)
|
||||
on_created_subject = strip_tag(plexpy.CONFIG.NOTIFY_ON_CREATED_SUBJECT_TEXT, agent_id)
|
||||
on_created_body = strip_tag(plexpy.CONFIG.NOTIFY_ON_CREATED_BODY_TEXT, agent_id)
|
||||
on_concurrent_subject = strip_tag(plexpy.CONFIG.NOTIFY_ON_CONCURRENT_SUBJECT_TEXT, agent_id)
|
||||
on_concurrent_body = strip_tag(plexpy.CONFIG.NOTIFY_ON_CONCURRENT_BODY_TEXT, agent_id)
|
||||
on_newdevice_subject = strip_tag(plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_SUBJECT_TEXT, agent_id)
|
||||
on_newdevice_body = strip_tag(plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_BODY_TEXT, agent_id)
|
||||
script_args_text = strip_tag(plexpy.CONFIG.NOTIFY_SCRIPTS_ARGS_TEXT, agent_id)
|
||||
|
||||
# Create a title
|
||||
@@ -624,7 +715,7 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
|
||||
else:
|
||||
thumb = None
|
||||
|
||||
if thumb:
|
||||
if plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS and thumb:
|
||||
# Try to retrieve a poster_url from the database
|
||||
data_factory = datafactory.DataFactory()
|
||||
poster_url = data_factory.get_poster_url(rating_key=poster_key)
|
||||
@@ -676,6 +767,7 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
|
||||
'timestamp': arrow.now().format(time_format),
|
||||
# Stream parameters
|
||||
'streams': stream_count,
|
||||
'user_streams': user_stream_count,
|
||||
'user': session.get('friendly_name',''),
|
||||
'username': session.get('user',''),
|
||||
'platform': session.get('platform',''),
|
||||
@@ -727,6 +819,16 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
|
||||
'track_num': metadata['media_index'].zfill(1),
|
||||
'track_num00': metadata['media_index'].zfill(2),
|
||||
'year': metadata['year'],
|
||||
'release_date': arrow.get(metadata['originally_available_at']).format(date_format)
|
||||
if metadata['originally_available_at'] else '',
|
||||
'air_date': arrow.get(metadata['originally_available_at']).format(date_format)
|
||||
if metadata['originally_available_at'] else '',
|
||||
'added_date': arrow.get(metadata['added_at']).format(date_format)
|
||||
if metadata['added_at'] else '',
|
||||
'updated_date': arrow.get(metadata['updated_at']).format(date_format)
|
||||
if metadata['updated_at'] else '',
|
||||
'last_viewed_date': arrow.get(metadata['last_viewed_at']).format(date_format)
|
||||
if metadata['last_viewed_at'] else '',
|
||||
'studio': metadata['studio'],
|
||||
'content_rating': metadata['content_rating'],
|
||||
'directors': ', '.join(metadata['directors']),
|
||||
@@ -930,6 +1032,52 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
|
||||
except:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
|
||||
|
||||
return [subject_text, body_text, script_args], metadata
|
||||
else:
|
||||
return [subject_text, body_text, script_args], metadata
|
||||
elif notify_action == 'concurrent':
|
||||
# Default body text
|
||||
body_text = '%s has %s concurrent streams.' % (session['friendly_name'],
|
||||
user_stream_count)
|
||||
|
||||
if on_concurrent_subject and on_concurrent_body:
|
||||
try:
|
||||
subject_text = unicode(on_concurrent_subject).format(**available_params)
|
||||
except LookupError as e:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e)
|
||||
except:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.")
|
||||
|
||||
try:
|
||||
body_text = unicode(on_concurrent_body).format(**available_params)
|
||||
except LookupError as e:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e)
|
||||
except:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
|
||||
|
||||
return [subject_text, body_text, script_args], metadata
|
||||
else:
|
||||
return [subject_text, body_text, script_args], metadata
|
||||
elif notify_action == 'newdevice':
|
||||
# Default body text
|
||||
body_text = '%s is streaming from a new device: %s.' % (session['friendly_name'],
|
||||
session['player'])
|
||||
|
||||
if on_newdevice_subject and on_newdevice_body:
|
||||
try:
|
||||
subject_text = unicode(on_newdevice_subject).format(**available_params)
|
||||
except LookupError as e:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e)
|
||||
except:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.")
|
||||
|
||||
try:
|
||||
body_text = unicode(on_newdevice_body).format(**available_params)
|
||||
except LookupError as e:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e)
|
||||
except:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
|
||||
|
||||
return [subject_text, body_text, script_args], metadata
|
||||
else:
|
||||
return [subject_text, body_text, script_args], metadata
|
||||
@@ -951,8 +1099,7 @@ def build_server_notify_text(notify_action=None, agent_id=None):
|
||||
|
||||
update_status = {}
|
||||
if notify_action == 'pmsupdate':
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
update_status = pms_connect.get_update_staus()
|
||||
update_status = plex_tv.get_plex_downloads()
|
||||
|
||||
if server_times:
|
||||
updated_at = server_times['updated_at']
|
||||
@@ -985,7 +1132,16 @@ def build_server_notify_text(notify_action=None, agent_id=None):
|
||||
# Update parameters
|
||||
'update_version': update_status.get('version',''),
|
||||
'update_url': update_status.get('download_url',''),
|
||||
'update_changelog': update_status.get('changelog','')}
|
||||
'update_release_date': arrow.get(update_status.get('release_date','')).format(date_format)
|
||||
if update_status.get('release_date','') else '',
|
||||
'update_channel': 'Plex Pass' if plexpy.CONFIG.PMS_UPDATE_CHANNEL == 'plexpass' else 'Public',
|
||||
'update_platform': update_status.get('platform',''),
|
||||
'update_distro': update_status.get('distro',''),
|
||||
'update_distro_build': update_status.get('build',''),
|
||||
'update_requirements': update_status.get('requirements',''),
|
||||
'update_extra_info': update_status.get('extra_info',''),
|
||||
'update_changelog_added': update_status.get('changelog_added',''),
|
||||
'update_changelog_fixed': update_status.get('changelog_fixed','')}
|
||||
|
||||
# Default text
|
||||
subject_text = 'PlexPy (%s)' % server_name
|
||||
@@ -1136,10 +1292,10 @@ def strip_tag(data, agent_id=None):
|
||||
elif agent_id == 13:
|
||||
# Allow tags b, i, code, pre, a[href] for Telegram
|
||||
whitelist = {'b': [],
|
||||
'i': [],
|
||||
'code': [],
|
||||
'pre': [],
|
||||
'a': ['href']}
|
||||
'i': [],
|
||||
'code': [],
|
||||
'pre': [],
|
||||
'a': ['href']}
|
||||
return bleach.clean(data, tags=whitelist.keys(), attributes=whitelist, strip=True)
|
||||
|
||||
else:
|
||||
|
@@ -31,6 +31,7 @@ import urllib
|
||||
from urllib import urlencode
|
||||
import urllib2
|
||||
from urlparse import urlparse
|
||||
import uuid
|
||||
|
||||
import gntp.notifier
|
||||
import facebook
|
||||
@@ -62,7 +63,8 @@ AGENT_IDS = {"Growl": 0,
|
||||
"Scripts": 15,
|
||||
"Facebook": 16,
|
||||
"Browser": 17,
|
||||
"Join": 18}
|
||||
"Join": 18,
|
||||
"Hipchat": 19}
|
||||
|
||||
|
||||
def available_notification_agents():
|
||||
@@ -82,7 +84,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.GROWL_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.GROWL_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.GROWL_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.GROWL_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.GROWL_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.GROWL_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.GROWL_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Prowl',
|
||||
'id': AGENT_IDS['Prowl'],
|
||||
@@ -100,7 +104,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.PROWL_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.PROWL_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.PROWL_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.PROWL_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.PROWL_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.PROWL_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.PROWL_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'XBMC',
|
||||
'id': AGENT_IDS['XBMC'],
|
||||
@@ -118,7 +124,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.XBMC_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.XBMC_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.XBMC_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.XBMC_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.XBMC_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.XBMC_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.XBMC_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Plex Home Theater',
|
||||
'id': AGENT_IDS['Plex'],
|
||||
@@ -136,7 +144,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.PLEX_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.PLEX_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.PLEX_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.PLEX_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.PLEX_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.PLEX_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.PLEX_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'NotifyMyAndroid',
|
||||
'id': AGENT_IDS['NMA'],
|
||||
@@ -154,7 +164,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.NMA_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.NMA_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.NMA_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.NMA_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.NMA_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.NMA_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.NMA_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Pushalot',
|
||||
'id': AGENT_IDS['Pushalot'],
|
||||
@@ -172,7 +184,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.PUSHALOT_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.PUSHALOT_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.PUSHALOT_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.PUSHALOT_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.PUSHALOT_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.PUSHALOT_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.PUSHALOT_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Pushbullet',
|
||||
'id': AGENT_IDS['Pushbullet'],
|
||||
@@ -190,7 +204,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.PUSHBULLET_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.PUSHBULLET_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.PUSHBULLET_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.PUSHBULLET_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.PUSHBULLET_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.PUSHBULLET_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.PUSHBULLET_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Pushover',
|
||||
'id': AGENT_IDS['Pushover'],
|
||||
@@ -208,7 +224,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.PUSHOVER_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.PUSHOVER_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.PUSHOVER_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.PUSHOVER_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.PUSHOVER_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.PUSHOVER_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.PUSHOVER_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Boxcar2',
|
||||
'id': AGENT_IDS['Boxcar2'],
|
||||
@@ -226,7 +244,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.BOXCAR_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.BOXCAR_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.BOXCAR_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.BOXCAR_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.BOXCAR_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.BOXCAR_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.BOXCAR_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'E-mail',
|
||||
'id': AGENT_IDS['Email'],
|
||||
@@ -244,7 +264,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.EMAIL_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.EMAIL_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.EMAIL_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.EMAIL_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.EMAIL_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.EMAIL_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.EMAIL_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Twitter',
|
||||
'id': AGENT_IDS['Twitter'],
|
||||
@@ -262,7 +284,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.TWITTER_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.TWITTER_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.TWITTER_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.TWITTER_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.TWITTER_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.TWITTER_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.TWITTER_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'IFTTT',
|
||||
'id': AGENT_IDS['IFTTT'],
|
||||
@@ -280,7 +304,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.IFTTT_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.IFTTT_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.IFTTT_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.IFTTT_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.IFTTT_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.IFTTT_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.IFTTT_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Telegram',
|
||||
'id': AGENT_IDS['Telegram'],
|
||||
@@ -298,7 +324,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.TELEGRAM_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.TELEGRAM_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.TELEGRAM_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.TELEGRAM_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.TELEGRAM_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.TELEGRAM_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.TELEGRAM_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Slack',
|
||||
'id': AGENT_IDS['Slack'],
|
||||
@@ -316,7 +344,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.SLACK_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.SLACK_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.SLACK_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.SLACK_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.SLACK_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.SLACK_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.SLACK_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Scripts',
|
||||
'id': AGENT_IDS['Scripts'],
|
||||
@@ -334,7 +364,9 @@ def available_notification_agents():
|
||||
'on_extup': plexpy.CONFIG.SCRIPTS_ON_EXTUP,
|
||||
'on_intdown': plexpy.CONFIG.SCRIPTS_ON_INTDOWN,
|
||||
'on_intup': plexpy.CONFIG.SCRIPTS_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.SCRIPTS_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.SCRIPTS_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.SCRIPTS_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.SCRIPTS_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Facebook',
|
||||
'id': AGENT_IDS['Facebook'],
|
||||
@@ -352,7 +384,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.FACEBOOK_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.FACEBOOK_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.FACEBOOK_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.FACEBOOK_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.FACEBOOK_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.FACEBOOK_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.FACEBOOK_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Browser',
|
||||
'id': AGENT_IDS['Browser'],
|
||||
@@ -370,7 +404,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.BROWSER_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.BROWSER_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.BROWSER_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.BROWSER_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.BROWSER_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.BROWSER_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.BROWSER_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Join',
|
||||
'id': AGENT_IDS['Join'],
|
||||
@@ -388,7 +424,29 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.JOIN_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.JOIN_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.JOIN_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.JOIN_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.JOIN_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.JOIN_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.JOIN_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Hipchat',
|
||||
'id': AGENT_IDS['Hipchat'],
|
||||
'config_prefix': 'hipchat',
|
||||
'has_config': True,
|
||||
'state': checked(plexpy.CONFIG.HIPCHAT_ENABLED),
|
||||
'on_play': plexpy.CONFIG.HIPCHAT_ON_PLAY,
|
||||
'on_stop': plexpy.CONFIG.HIPCHAT_ON_STOP,
|
||||
'on_pause': plexpy.CONFIG.HIPCHAT_ON_PAUSE,
|
||||
'on_resume': plexpy.CONFIG.HIPCHAT_ON_RESUME,
|
||||
'on_buffer': plexpy.CONFIG.HIPCHAT_ON_BUFFER,
|
||||
'on_watched': plexpy.CONFIG.HIPCHAT_ON_WATCHED,
|
||||
'on_created': plexpy.CONFIG.HIPCHAT_ON_CREATED,
|
||||
'on_extdown': plexpy.CONFIG.HIPCHAT_ON_EXTDOWN,
|
||||
'on_intdown': plexpy.CONFIG.HIPCHAT_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.HIPCHAT_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.HIPCHAT_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.HIPCHAT_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.HIPCHAT_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.HIPCHAT_ON_NEWDEVICE
|
||||
}
|
||||
]
|
||||
|
||||
@@ -411,7 +469,9 @@ def available_notification_agents():
|
||||
'on_intdown': plexpy.CONFIG.OSX_NOTIFY_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.OSX_NOTIFY_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.OSX_NOTIFY_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.OSX_NOTIFY_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.OSX_NOTIFY_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.OSX_NOTIFY_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.OSX_NOTIFY_ON_NEWDEVICE
|
||||
})
|
||||
|
||||
return agents
|
||||
@@ -478,6 +538,9 @@ def get_notification_agent_config(agent_id):
|
||||
elif agent_id == 18:
|
||||
join = JOIN()
|
||||
return join.return_config_options()
|
||||
elif agent_id == 19:
|
||||
hipchat = HIPCHAT()
|
||||
return hipchat.return_config_options()
|
||||
else:
|
||||
return []
|
||||
else:
|
||||
@@ -545,11 +608,80 @@ def send_notification(agent_id, subject, body, notify_action, **kwargs):
|
||||
elif agent_id == 18:
|
||||
join = JOIN()
|
||||
return join.notify(message=body, subject=subject)
|
||||
elif agent_id == 19:
|
||||
hipchat = HIPCHAT()
|
||||
return hipchat.notify(message=body, subject=subject, **kwargs)
|
||||
else:
|
||||
logger.debug(u"PlexPy Notifiers :: Unknown agent id received.")
|
||||
else:
|
||||
logger.debug(u"PlexPy Notifiers :: Notification requested but no agent id received.")
|
||||
|
||||
class PrettyMetadata(object):
|
||||
def __init__(self, metadata):
|
||||
self.metadata = metadata
|
||||
self.media_type = metadata['media_type']
|
||||
|
||||
def get_poster_url(self):
|
||||
self.poster_url = self.metadata.get('poster_url','')
|
||||
if not self.poster_url:
|
||||
if self.metadata['media_type'] in ['artist', 'track']:
|
||||
self.poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/cover.png'
|
||||
else:
|
||||
self.poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/poster.png'
|
||||
return self.poster_url
|
||||
|
||||
def get_poster_link(self):
|
||||
self.poster_link = ''
|
||||
if self.metadata.get('thetvdb_url',''):
|
||||
self.poster_link = self.metadata.get('thetvdb_url', '')
|
||||
elif self.metadata.get('themoviedb_url',''):
|
||||
self.poster_link = self.metadata.get('themoviedb_url', '')
|
||||
elif self.metadata.get('imdb_url',''):
|
||||
self.poster_link = self.metadata.get('imdb_url', '')
|
||||
elif self.metadata.get('lastfm_url',''):
|
||||
self.poster_link = self.metadata.get('lastfm_url', '')
|
||||
return self.poster_link
|
||||
|
||||
def get_caption(self):
|
||||
self.caption = ''
|
||||
if self.metadata.get('thetvdb_url',''):
|
||||
self.caption = 'View on TheTVDB'
|
||||
elif self.metadata.get('themoviedb_url',''):
|
||||
self.caption = 'View on The Movie Database'
|
||||
elif self.metadata.get('imdb_url',''):
|
||||
self.caption = 'View on IMDB'
|
||||
elif self.metadata.get('lastfm_url',''):
|
||||
self.caption = 'View on Last.fm'
|
||||
return self.caption
|
||||
|
||||
def get_title(self, divider = '-'):
|
||||
self.title = None
|
||||
if self.media_type == 'movie':
|
||||
self.title = '%s (%s)' % (self.metadata['title'], self.metadata['year'])
|
||||
elif self.media_type == 'show':
|
||||
self.title = '%s (%s)' % (self.metadata['title'], self.metadata['year'])
|
||||
elif self.media_type == 'artist':
|
||||
self.title = self.metadata['title']
|
||||
elif self.media_type == 'track':
|
||||
self.title = '%s - %s' % (self.metadata['grandparent_title'], self.metadata['title'])
|
||||
elif self.media_type == 'episode':
|
||||
self.title = '%s - %s (S%s %s E%s)' % (self.metadata['grandparent_title'],
|
||||
self.metadata['title'],
|
||||
self.metadata['parent_media_index'],
|
||||
divider,
|
||||
self.metadata['media_index'])
|
||||
return self.title.encode("utf-8")
|
||||
|
||||
def get_subtitle(self):
|
||||
if self.media_type == 'track':
|
||||
self.subtitle = self.metadata['parent_title']
|
||||
else:
|
||||
self.subtitle = self.metadata['summary']
|
||||
return self.subtitle.encode("utf-8")
|
||||
|
||||
def get_plex_url(self):
|
||||
self.plex_url = self.metadata['plex_url']
|
||||
return self.plex_url
|
||||
|
||||
class GROWL(object):
|
||||
"""
|
||||
@@ -786,12 +918,13 @@ class XBMC(object):
|
||||
raise Exception
|
||||
else:
|
||||
logger.info(u"PlexPy Notifiers :: XBMC notification sent.")
|
||||
return True
|
||||
|
||||
except Exception:
|
||||
logger.warn(u"PlexPy Notifiers :: XBMC notification filed.")
|
||||
logger.warn(u"PlexPy Notifiers :: XBMC notification failed.")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def return_config_options(self):
|
||||
config_option = [{'label': 'XBMC Host:Port',
|
||||
'value': self.hosts,
|
||||
@@ -870,11 +1003,12 @@ class Plex(object):
|
||||
raise Exception
|
||||
else:
|
||||
logger.info(u"PlexPy Notifiers :: Plex Home Theater notification sent.")
|
||||
return True
|
||||
|
||||
except Exception:
|
||||
logger.warn(u"PlexPy Notifiers :: Plex Home Theater notification filed.")
|
||||
logger.warn(u"PlexPy Notifiers :: Plex Home Theater notification failed.")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def return_config_options(self):
|
||||
config_option = [{'label': 'Plex Home Theater Host:Port',
|
||||
@@ -1262,7 +1396,7 @@ class TwitterNotifier(object):
|
||||
poster_url = metadata.get('poster_url','')
|
||||
|
||||
if self.incl_subject:
|
||||
self._send_tweet(subject + ': ' + message, attachment=poster_url)
|
||||
self._send_tweet(subject + '\r\n' + message, attachment=poster_url)
|
||||
else:
|
||||
self._send_tweet(message, attachment=poster_url)
|
||||
|
||||
@@ -1719,18 +1853,21 @@ class TELEGRAM(object):
|
||||
data = {'chat_id': self.chat_id}
|
||||
|
||||
if self.incl_subject:
|
||||
text = event.encode('utf-8') + ': ' + message.encode('utf-8')
|
||||
text = event.encode('utf-8') + '\r\n' + message.encode('utf-8')
|
||||
else:
|
||||
text = message.encode('utf-8')
|
||||
|
||||
if self.incl_poster and 'metadata' in kwargs:
|
||||
poster_data = {'chat_id': self.chat_id,
|
||||
'disable_notification': True}
|
||||
|
||||
metadata = kwargs['metadata']
|
||||
poster_url = metadata.get('poster_url','')
|
||||
|
||||
if poster_url:
|
||||
files = {'photo': (poster_url, urllib.urlopen(poster_url).read())}
|
||||
response = requests.post('https://api.telegram.org/bot%s/%s' % (self.bot_token, 'sendPhoto'),
|
||||
data=data,
|
||||
data=poster_data,
|
||||
files=files)
|
||||
request_status = response.status_code
|
||||
request_content = json.loads(response.text)
|
||||
@@ -1838,7 +1975,7 @@ class SLACK(object):
|
||||
return
|
||||
|
||||
if self.incl_subject:
|
||||
text = event.encode('utf-8') + ': ' + message.encode("utf-8")
|
||||
text = event.encode('utf-8') + '\r\n' + message.encode("utf-8")
|
||||
else:
|
||||
text = message.encode("utf-8")
|
||||
|
||||
@@ -1852,81 +1989,42 @@ class SLACK(object):
|
||||
data['icon_url'] = self.icon_emoji
|
||||
|
||||
if self.incl_poster and 'metadata' in kwargs:
|
||||
attachment = {}
|
||||
metadata = kwargs['metadata']
|
||||
poster_url = metadata.get('poster_url','')
|
||||
poster_link = ''
|
||||
caption = ''
|
||||
# Grab formatted metadata
|
||||
pretty_metadata = PrettyMetadata(kwargs['metadata'])
|
||||
poster_url = pretty_metadata.get_poster_url()
|
||||
plex_url = pretty_metadata.get_plex_url()
|
||||
poster_link = pretty_metadata.get_poster_link()
|
||||
caption = pretty_metadata.get_caption()
|
||||
title = pretty_metadata.get_title()
|
||||
subtitle = pretty_metadata.get_subtitle()
|
||||
|
||||
# Use default posters if no poster_url
|
||||
if not poster_url:
|
||||
if metadata['media_type'] in ['artist', 'track']:
|
||||
poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/cover.png'
|
||||
else:
|
||||
poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/poster.png'
|
||||
# Build Slack post attachment
|
||||
attachment = {'fallback': 'Image for %s' % title,
|
||||
'title': title,
|
||||
'text': subtitle,
|
||||
'image_url': poster_url,
|
||||
'thumb_url': poster_url
|
||||
}
|
||||
|
||||
if metadata['media_type'] == 'movie':
|
||||
title = '%s (%s)' % (metadata['title'], metadata['year'])
|
||||
if metadata.get('imdb_url',''):
|
||||
poster_link = metadata.get('imdb_url', '')
|
||||
caption = 'View on IMDB'
|
||||
elif metadata.get('themoviedb_url',''):
|
||||
poster_link = metadata.get('themoviedb_url', '')
|
||||
caption = 'View on The Movie Database'
|
||||
|
||||
elif metadata['media_type'] == 'show':
|
||||
title = '%s (%s)' % (metadata['title'], metadata['year'])
|
||||
if metadata.get('thetvdb_url',''):
|
||||
poster_link = metadata.get('thetvdb_url', '')
|
||||
caption = 'View on TheTVDB'
|
||||
elif metadata.get('themoviedb_url',''):
|
||||
poster_link = metadata.get('themoviedb_url', '')
|
||||
caption = 'View on The Movie Database'
|
||||
|
||||
elif metadata['media_type'] == 'episode':
|
||||
title = '%s - %s (S%s - E%s)' % (metadata['grandparent_title'],
|
||||
metadata['title'],
|
||||
metadata['parent_media_index'],
|
||||
metadata['media_index'])
|
||||
if metadata.get('thetvdb_url',''):
|
||||
poster_link = metadata.get('thetvdb_url', '')
|
||||
caption = 'View on TheTVDB'
|
||||
elif metadata.get('themoviedb_url',''):
|
||||
poster_link = metadata.get('themoviedb_url', '')
|
||||
caption = 'View on The Movie Database'
|
||||
|
||||
elif metadata['media_type'] == 'artist':
|
||||
title = metadata['title']
|
||||
if metadata.get('lastfm_url',''):
|
||||
poster_link = metadata.get('lastfm_url', '')
|
||||
caption = 'View on Last.fm'
|
||||
|
||||
elif metadata['media_type'] == 'track':
|
||||
title = '%s - %s' % (metadata['grandparent_title'], metadata['title'])
|
||||
if metadata.get('lastfm_url',''):
|
||||
poster_link = metadata.get('lastfm_url', '')
|
||||
caption = 'View on Last.fm'
|
||||
|
||||
# Build Facebook post attachment
|
||||
if self.incl_pmslink:
|
||||
caption = 'View on Plex Web'
|
||||
attachment['title_link'] = metadata['plex_url']
|
||||
attachment['text'] = caption
|
||||
elif poster_link:
|
||||
fields = []
|
||||
if poster_link:
|
||||
attachment['title_link'] = poster_link
|
||||
attachment['text'] = caption
|
||||
|
||||
attachment['fallback'] = 'Image for %s' % title
|
||||
attachment['title'] = title
|
||||
attachment['image_url'] = poster_url
|
||||
fields.append({'value': '<%s|%s>' % (poster_link, caption),
|
||||
'short': True})
|
||||
if self.incl_pmslink:
|
||||
fields.append({'value': '<%s|%s>' % (plex_url, 'View on Plex Web'),
|
||||
'short': True})
|
||||
if fields:
|
||||
attachment['fields'] = fields
|
||||
|
||||
data['attachments'] = [attachment]
|
||||
|
||||
url = urlparse(self.slack_hook).path
|
||||
slackhost = urlparse(self.slack_hook).hostname
|
||||
slackpath = urlparse(self.slack_hook).path
|
||||
|
||||
http_handler = HTTPSConnection("hooks.slack.com")
|
||||
http_handler = HTTPSConnection(slackhost)
|
||||
http_handler.request("POST",
|
||||
url,
|
||||
slackpath,
|
||||
headers={'Content-type': "application/x-www-form-urlencoded"},
|
||||
body=json.dumps(data))
|
||||
|
||||
@@ -2088,6 +2186,12 @@ class Scripts(object):
|
||||
elif notify_action == 'pmsupdate':
|
||||
script = plexpy.CONFIG.SCRIPTS_ON_PMSUPDATE_SCRIPT
|
||||
|
||||
elif notify_action == 'concurrent':
|
||||
script = plexpy.CONFIG.SCRIPTS_ON_CONCURRENT_SCRIPT
|
||||
|
||||
elif notify_action == 'newdevice':
|
||||
script = plexpy.CONFIG.SCRIPTS_ON_NEWDEVICE_SCRIPT
|
||||
|
||||
else:
|
||||
# For manual scripts
|
||||
script = kwargs.get('script', '')
|
||||
@@ -2264,6 +2368,20 @@ class Scripts(object):
|
||||
'description': 'Choose the script for Plex update available.',
|
||||
'input_type': 'select',
|
||||
'select_options': self.list_scripts()
|
||||
},
|
||||
{'label': 'User Concurrent Streams',
|
||||
'value': plexpy.CONFIG.SCRIPTS_ON_CONCURRENT_SCRIPT,
|
||||
'name': 'scripts_on_concurrent_script',
|
||||
'description': 'Choose the script for user concurrent streams.',
|
||||
'input_type': 'select',
|
||||
'select_options': self.list_scripts()
|
||||
},
|
||||
{'label': 'User New Device',
|
||||
'value': plexpy.CONFIG.SCRIPTS_ON_NEWDEVICE_SCRIPT,
|
||||
'name': 'scripts_on_newdevice_script',
|
||||
'description': 'Choose the script for user new device.',
|
||||
'input_type': 'select',
|
||||
'select_options': self.list_scripts()
|
||||
}
|
||||
]
|
||||
|
||||
@@ -2289,71 +2407,19 @@ class FacebookNotifier(object):
|
||||
attachment = {}
|
||||
|
||||
if self.incl_poster and 'metadata' in kwargs:
|
||||
metadata = kwargs['metadata']
|
||||
poster_url = metadata.get('poster_url','')
|
||||
poster_link = ''
|
||||
caption = ''
|
||||
|
||||
# Use default posters if no poster_url
|
||||
if not poster_url:
|
||||
if metadata['media_type'] in ['artist', 'track']:
|
||||
poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/cover.png'
|
||||
else:
|
||||
poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/poster.png'
|
||||
|
||||
if metadata['media_type'] == 'movie':
|
||||
title = '%s (%s)' % (metadata['title'], metadata['year'])
|
||||
subtitle = metadata['summary']
|
||||
if metadata.get('imdb_url',''):
|
||||
poster_link = metadata.get('imdb_url', '')
|
||||
caption = 'View on IMDB'
|
||||
elif metadata.get('themoviedb_url',''):
|
||||
poster_link = metadata.get('themoviedb_url', '')
|
||||
caption = 'View on The Movie Database'
|
||||
|
||||
elif metadata['media_type'] == 'show':
|
||||
title = '%s (%s)' % (metadata['title'], metadata['year'])
|
||||
subtitle = metadata['summary']
|
||||
if metadata.get('thetvdb_url',''):
|
||||
poster_link = metadata.get('thetvdb_url', '')
|
||||
caption = 'View on TheTVDB'
|
||||
elif metadata.get('themoviedb_url',''):
|
||||
poster_link = metadata.get('themoviedb_url', '')
|
||||
caption = 'View on The Movie Database'
|
||||
|
||||
elif metadata['media_type'] == 'episode':
|
||||
title = '%s - %s (S%s %s E%s)' % (metadata['grandparent_title'],
|
||||
metadata['title'],
|
||||
metadata['parent_media_index'],
|
||||
'\xc2\xb7'.decode('utf8'),
|
||||
metadata['media_index'])
|
||||
subtitle = metadata['summary']
|
||||
if metadata.get('thetvdb_url',''):
|
||||
poster_link = metadata.get('thetvdb_url', '')
|
||||
caption = 'View on TheTVDB'
|
||||
elif metadata.get('themoviedb_url',''):
|
||||
poster_link = metadata.get('themoviedb_url', '')
|
||||
caption = 'View on The Movie Database'
|
||||
|
||||
elif metadata['media_type'] == 'artist':
|
||||
title = metadata['title']
|
||||
subtitle = metadata['summary']
|
||||
if metadata.get('lastfm_url',''):
|
||||
poster_link = metadata.get('lastfm_url', '')
|
||||
caption = 'View on Last.fm'
|
||||
|
||||
elif metadata['media_type'] == 'track':
|
||||
title = '%s - %s' % (metadata['grandparent_title'], metadata['title'])
|
||||
subtitle = metadata['parent_title']
|
||||
if metadata.get('lastfm_url',''):
|
||||
poster_link = metadata.get('lastfm_url', '')
|
||||
caption = 'View on Last.fm'
|
||||
# Grab formatted metadata
|
||||
pretty_metadata = PrettyMetadata(kwargs['metadata'])
|
||||
poster_url = pretty_metadata.get_poster_url()
|
||||
plex_url = pretty_metadata.get_plex_url()
|
||||
poster_link = pretty_metadata.get_poster_link()
|
||||
caption = pretty_metadata.get_caption()
|
||||
title = pretty_metadata.get_title('\xc2\xb7'.decode('utf8'))
|
||||
subtitle = pretty_metadata.get_subtitle()
|
||||
|
||||
# Build Facebook post attachment
|
||||
if self.incl_pmslink:
|
||||
caption = 'View on Plex Web'
|
||||
attachment['link'] = metadata['plex_url']
|
||||
attachment['caption'] = caption
|
||||
attachment['link'] = plex_url
|
||||
attachment['caption'] = 'View on Plex Web'
|
||||
elif poster_link:
|
||||
attachment['link'] = poster_link
|
||||
attachment['caption'] = caption
|
||||
@@ -2365,7 +2431,7 @@ class FacebookNotifier(object):
|
||||
attachment['description'] = subtitle
|
||||
|
||||
if self.incl_subject:
|
||||
self._post_facebook(subject + ': ' + message, attachment=attachment)
|
||||
self._post_facebook(subject + '\r\n' + message, attachment=attachment)
|
||||
else:
|
||||
self._post_facebook(message, attachment=attachment)
|
||||
|
||||
@@ -2423,13 +2489,14 @@ class FacebookNotifier(object):
|
||||
config_option = [{'label': 'Instructions',
|
||||
'description': 'Step 1: Visit <a href="' + helpers.anon_url('https://developers.facebook.com/apps') + '" target="_blank"> \
|
||||
Facebook Developers</a> to add a new app using <strong>basic setup</strong>.<br>\
|
||||
Step 2: Go to <strong>Settings > Advanced</strong> and fill in \
|
||||
<strong>Valid OAuth redirect URIs</strong> with your PlexPy URL (e.g. http://localhost:8181).<br>\
|
||||
Step 3: Go to <strong>App Review</strong> and toggle public to <strong>Yes</strong>.<br>\
|
||||
Step 4: Fill in the <strong>PlexPy URL</strong> below with the exact same URL from Step 3.<br>\
|
||||
Step 5: Fill in the <strong>App ID</strong> and <strong>App Secret</strong> below.<br>\
|
||||
Step 6: Click the <strong>Request Authorization</strong> button below.<br>\
|
||||
Step 7: Fill in your <strong>Group ID</strong> below.',
|
||||
Step 2: Click <strong>Add Product</strong> on the left, then <strong>Get Started</strong> \
|
||||
for <strong>Facebook Login</strong>.<br>\
|
||||
Step 3: Fill in <strong>Valid OAuth redirect URIs</strong> with your PlexPy URL (e.g. http://localhost:8181).<br>\
|
||||
Step 4: Click <strong>App Review</strong> on the left and toggle "make public" to <strong>Yes</strong>.<br>\
|
||||
Step 5: Fill in the <strong>PlexPy URL</strong> below with the exact same URL from Step 3.<br>\
|
||||
Step 6: Fill in the <strong>App ID</strong> and <strong>App Secret</strong> below.<br>\
|
||||
Step 7: Click the <strong>Request Authorization</strong> button below.<br>\
|
||||
Step 8: Fill in your <strong>Group ID</strong> below.',
|
||||
'input_type': 'help'
|
||||
},
|
||||
{'label': 'PlexPy URL',
|
||||
@@ -2552,6 +2619,7 @@ class JOIN(object):
|
||||
def __init__(self):
|
||||
self.apikey = plexpy.CONFIG.JOIN_APIKEY
|
||||
self.deviceid = plexpy.CONFIG.JOIN_DEVICEID
|
||||
self.incl_subject = plexpy.CONFIG.JOIN_INCL_SUBJECT
|
||||
|
||||
def conf(self, options):
|
||||
return cherrypy.config['config'].get('PUSHBULLET', options)
|
||||
@@ -2564,21 +2632,17 @@ class JOIN(object):
|
||||
|
||||
data = {'apikey': self.apikey,
|
||||
deviceid_key: self.deviceid,
|
||||
'title': subject.encode("utf-8"),
|
||||
'text': message.encode("utf-8")}
|
||||
|
||||
http_handler = HTTPSConnection("joinjoaomgcd.appspot.com")
|
||||
http_handler.request("POST",
|
||||
"/_ah/api/messaging/v1/sendPush?%s" % urlencode(data))
|
||||
if self.incl_subject:
|
||||
data['title'] = subject.encode("utf-8")
|
||||
|
||||
response = http_handler.getresponse()
|
||||
request_status = response.status
|
||||
# logger.debug(u"PushBullet response status: %r" % request_status)
|
||||
# logger.debug(u"PushBullet response headers: %r" % response.getheaders())
|
||||
# logger.debug(u"PushBullet response body: %r" % response.read())
|
||||
response = requests.post('https://joinjoaomgcd.appspot.com/_ah/api/messaging/v1/sendPush',
|
||||
params=data)
|
||||
request_status = response.status_code
|
||||
|
||||
if request_status == 200:
|
||||
data = json.loads(response.read())
|
||||
data = json.loads(response.text)
|
||||
if data.get('success'):
|
||||
logger.info(u"PlexPy Notifiers :: Join notification sent.")
|
||||
return True
|
||||
@@ -2632,7 +2696,10 @@ class JOIN(object):
|
||||
return {'': ''}
|
||||
|
||||
def return_config_options(self):
|
||||
devices = '<br>'.join(['%s: %s' % (v, k) for k, v in self.get_devices().iteritems() if k])
|
||||
devices = '<br>'.join(['%s: <span class="inline-pre">%s</span>'
|
||||
% (v, k) for k, v in self.get_devices().iteritems() if k])
|
||||
if not devices:
|
||||
devices = 'Enter your Join API key to load your device list.'
|
||||
|
||||
config_option = [{'label': 'Join API Key',
|
||||
'value': self.apikey,
|
||||
@@ -2650,7 +2717,160 @@ class JOIN(object):
|
||||
{'label': 'Your Devices IDs',
|
||||
'description': devices,
|
||||
'input_type': 'help'
|
||||
},
|
||||
{'label': 'Include Subject Line',
|
||||
'value': self.incl_subject,
|
||||
'name': 'join_incl_subject',
|
||||
'description': 'Include the subject line with the notifications.',
|
||||
'input_type': 'checkbox'
|
||||
}
|
||||
]
|
||||
|
||||
return config_option
|
||||
|
||||
class HIPCHAT(object):
|
||||
|
||||
def __init__(self):
|
||||
self.apiurl = plexpy.CONFIG.HIPCHAT_URL
|
||||
self.color = plexpy.CONFIG.HIPCHAT_COLOR
|
||||
self.emoticon = plexpy.CONFIG.HIPCHAT_EMOTICON
|
||||
self.incl_pmslink = plexpy.CONFIG.HIPCHAT_INCL_PMSLINK
|
||||
self.incl_poster = plexpy.CONFIG.HIPCHAT_INCL_POSTER
|
||||
self.incl_subject = plexpy.CONFIG.HIPCHAT_INCL_SUBJECT
|
||||
|
||||
def notify(self, message, subject, **kwargs):
|
||||
if not message or not subject:
|
||||
return
|
||||
|
||||
data = {'notify': 'false'}
|
||||
|
||||
text = message.encode('utf-8')
|
||||
|
||||
if self.incl_subject:
|
||||
data['from'] = subject.encode('utf-8')
|
||||
|
||||
if self.color:
|
||||
data['color'] = self.color
|
||||
|
||||
if self.incl_poster and 'metadata' in kwargs:
|
||||
pretty_metadata = PrettyMetadata(kwargs['metadata'])
|
||||
poster_url = pretty_metadata.get_poster_url()
|
||||
poster_link = pretty_metadata.get_poster_link()
|
||||
caption = pretty_metadata.get_caption()
|
||||
title = pretty_metadata.get_title()
|
||||
subtitle = pretty_metadata.get_subtitle()
|
||||
plex_url = pretty_metadata.get_plex_url()
|
||||
|
||||
card = {'title': title,
|
||||
'format': 'medium',
|
||||
'style': 'application',
|
||||
'id': uuid.uuid4().hex,
|
||||
'activity': {'html': text,
|
||||
'icon': {'url': poster_url}},
|
||||
'description': {'format': 'text',
|
||||
'value': subtitle},
|
||||
'thumbnail': {'url': poster_url}
|
||||
}
|
||||
|
||||
attributes = []
|
||||
if poster_link:
|
||||
card['url'] = poster_link
|
||||
attributes.append({'value': {'label': caption,
|
||||
'url': poster_link}})
|
||||
if self.incl_pmslink:
|
||||
attributes.append({'value': {'label': 'View on Plex Web',
|
||||
'url': plex_url}})
|
||||
if attributes:
|
||||
card['attributes'] = attributes
|
||||
|
||||
data['message'] = text
|
||||
data['card'] = card
|
||||
|
||||
else:
|
||||
if self.emoticon:
|
||||
text = self.emoticon + ' ' + text
|
||||
data['message'] = text
|
||||
data['message_format'] = 'text'
|
||||
|
||||
hiphost = urlparse(self.apiurl).hostname
|
||||
hipfullq = urlparse(self.apiurl).path + '?' + urlparse(self.apiurl).query
|
||||
|
||||
http_handler = HTTPSConnection(hiphost)
|
||||
http_handler.request("POST",
|
||||
hipfullq,
|
||||
headers={'Content-type': "application/json"},
|
||||
body=json.dumps(data))
|
||||
response = http_handler.getresponse()
|
||||
request_status = response.status
|
||||
|
||||
if request_status == 200 or request_status == 204:
|
||||
logger.info(u"PlexPy Notifiers :: Hipchat notification sent.")
|
||||
return True
|
||||
elif request_status >= 400 and request_status < 500:
|
||||
logger.warn(u"PlexPy Notifiers :: Hipchat notification failed: [%s] %s" % (request_status, response.reason))
|
||||
return False
|
||||
else:
|
||||
logger.warn(u"PlexPy Notifiers :: Hipchat notification failed.")
|
||||
return False
|
||||
|
||||
def test(self, apiurl, color, hipchat_emoticon, hipchat_incl_subject):
|
||||
|
||||
self.enabled = True
|
||||
self.apiurl = apiurl
|
||||
self.color = color
|
||||
self.emoticon = hipchat_emoticon
|
||||
self.incl_subject = hipchat_incl_subject
|
||||
|
||||
return self.notify('PlexPy', 'Test Message')
|
||||
|
||||
def return_config_options(self):
|
||||
config_option = [{'label': 'Hipchat Custom Integrations Full URL',
|
||||
'value': self.apiurl,
|
||||
'name': 'hipchat_url',
|
||||
'description': 'Your Hipchat BYO integration URL. You can get a key from'
|
||||
' <a href="' + helpers.anon_url('https://www.hipchat.com/addons/') + '" target="_blank">here</a>.',
|
||||
'input_type': 'text'
|
||||
},
|
||||
{'label': 'Hipchat Color',
|
||||
'value': self.color,
|
||||
'name': 'hipchat_color',
|
||||
'description': 'Background color for the message.',
|
||||
'input_type': 'select',
|
||||
'select_options': {'': '',
|
||||
'gray': 'gray',
|
||||
'green': 'green',
|
||||
'purple': 'purple',
|
||||
'random': 'random',
|
||||
'red': 'red',
|
||||
'yellow': 'yellow'
|
||||
}
|
||||
},
|
||||
{'label': 'Hipchat Emoticon',
|
||||
'value': self.emoticon,
|
||||
'name': 'hipchat_emoticon',
|
||||
'description': 'Include an emoticon tag at the beginning of text notifications (e.g. (taco)). Leave blank for none.'
|
||||
' Use a stock emoticon or create a custom emoticon'
|
||||
' <a href="' + helpers.anon_url('https://www.hipchat.com/emoticons/') + '" target="_blank">here</a>.',
|
||||
'input_type': 'text'
|
||||
},
|
||||
{'label': 'Include Poster',
|
||||
'value': self.incl_poster,
|
||||
'name': 'hipchat_incl_poster',
|
||||
'description': 'Include a poster in the notifications.<br>This will change the notification type to HTML and emoticons will no longer work.',
|
||||
'input_type': 'checkbox'
|
||||
},
|
||||
{'label': 'Include Link to Plex Web',
|
||||
'value': self.incl_pmslink,
|
||||
'name': 'hipchat_incl_pmslink',
|
||||
'description': 'Include a link to the media in Plex Web with the notifications.',
|
||||
'input_type': 'checkbox'
|
||||
},
|
||||
{'label': 'Include Subject Line',
|
||||
'value': self.incl_subject,
|
||||
'name': 'hipchat_incl_subject',
|
||||
'description': 'Includes the subject with the notifications.',
|
||||
'input_type': 'checkbox'
|
||||
}
|
||||
]
|
||||
|
||||
return config_option
|
@@ -100,6 +100,7 @@ def extract_plexivity_xml(xml=None):
|
||||
video_resolution = helpers.get_xml_attr(c, 'videoResolution')
|
||||
width = helpers.get_xml_attr(c, 'width')
|
||||
|
||||
ip_address = ''
|
||||
machine_id = ''
|
||||
platform = ''
|
||||
player = ''
|
||||
@@ -107,7 +108,7 @@ def extract_plexivity_xml(xml=None):
|
||||
if a.getElementsByTagName('Player'):
|
||||
player_elem = a.getElementsByTagName('Player')
|
||||
for d in player_elem:
|
||||
ip_address = helpers.get_xml_attr(d, 'address')
|
||||
ip_address = helpers.get_xml_attr(d, 'address').split('::ffff:')[-1]
|
||||
machine_id = helpers.get_xml_attr(d, 'machineIdentifier')
|
||||
platform = helpers.get_xml_attr(d, 'platform')
|
||||
player = helpers.get_xml_attr(d, 'title')
|
||||
|
162
plexpy/plextv.py
162
plexpy/plextv.py
@@ -17,6 +17,7 @@
|
||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import base64
|
||||
import json
|
||||
from xml.dom import minidom
|
||||
|
||||
import plexpy
|
||||
@@ -44,7 +45,11 @@ def refresh_users():
|
||||
if user_tokens and user_tokens['server_token']:
|
||||
pms_connect = pmsconnect.PmsConnect(token=user_tokens['server_token'])
|
||||
library_details = pms_connect.get_server_children()
|
||||
shared_libraries = ';'.join(d['section_id'] for d in library_details['libraries_list'])
|
||||
|
||||
if library_details:
|
||||
shared_libraries = ';'.join(d['section_id'] for d in library_details['libraries_list'])
|
||||
else:
|
||||
shared_libraries = ''
|
||||
|
||||
control_value_dict = {"user_id": item['user_id']}
|
||||
new_value_dict = {"username": item['username'],
|
||||
@@ -91,24 +96,31 @@ def get_real_pms_url():
|
||||
|
||||
fallback_url = 'http://' + plexpy.CONFIG.PMS_IP + ':' + str(plexpy.CONFIG.PMS_PORT)
|
||||
|
||||
if plexpy.CONFIG.PMS_SSL:
|
||||
result = PlexTV().get_server_urls(include_https=True)
|
||||
else:
|
||||
result = PlexTV().get_server_urls(include_https=False)
|
||||
plex_tv = PlexTV()
|
||||
result = plex_tv.get_server_urls(include_https=plexpy.CONFIG.PMS_SSL)
|
||||
plexpass = plex_tv.get_plexpass_status()
|
||||
|
||||
connections = []
|
||||
if result:
|
||||
plexpy.CONFIG.__setattr__('PMS_VERSION', result['version'])
|
||||
plexpy.CONFIG.__setattr__('PMS_PLATFORM', result['platform'])
|
||||
plexpy.CONFIG.__setattr__('PMS_PLEXPASS', plexpass)
|
||||
connections = result['connections']
|
||||
|
||||
# Only need to retrieve PMS_URL if using SSL
|
||||
if plexpy.CONFIG.PMS_SSL:
|
||||
if result:
|
||||
if connections:
|
||||
if plexpy.CONFIG.PMS_IS_REMOTE:
|
||||
# Get all remote connections
|
||||
connections = [c for c in result if c['local'] == '0' and 'plex.direct' in c['uri']]
|
||||
conns = [c for c in connections if c['local'] == '0' and 'plex.direct' in c['uri']]
|
||||
else:
|
||||
# Get all local connections
|
||||
connections = [c for c in result if c['local'] == '1' and 'plex.direct' in c['uri']]
|
||||
conns = [c for c in connections if c['local'] == '1' and 'plex.direct' in c['uri']]
|
||||
|
||||
if connections:
|
||||
if conns:
|
||||
# Get connection with matching address, otherwise return first connection
|
||||
conn = next((c for c in connections if c['address'] == plexpy.CONFIG.PMS_IP), connections[0])
|
||||
conn = next((c for c in conns if c['address'] == plexpy.CONFIG.PMS_IP
|
||||
and c['port'] == str(plexpy.CONFIG.PMS_PORT)), conns[0])
|
||||
plexpy.CONFIG.__setattr__('PMS_URL', conn['uri'])
|
||||
plexpy.CONFIG.write()
|
||||
logger.info(u"PlexPy PlexTV :: Server URL retrieved.")
|
||||
@@ -142,13 +154,15 @@ class PlexTV(object):
|
||||
if session.get_session_user_id():
|
||||
user_data = users.Users()
|
||||
user_tokens = user_data.get_tokens(user_id=session.get_session_user_id())
|
||||
token = user_tokens['server_token']
|
||||
self.token = user_tokens['server_token']
|
||||
else:
|
||||
token = plexpy.CONFIG.PMS_TOKEN
|
||||
self.token = plexpy.CONFIG.PMS_TOKEN
|
||||
else:
|
||||
self.token = token
|
||||
|
||||
self.request_handler = http_handler.HTTPHandler(host='plex.tv',
|
||||
port=443,
|
||||
token=token,
|
||||
token=self.token,
|
||||
ssl_verify=self.ssl_verify)
|
||||
|
||||
def get_plex_auth(self, output_format='raw'):
|
||||
@@ -266,6 +280,18 @@ class PlexTV(object):
|
||||
|
||||
return request
|
||||
|
||||
def get_plextv_downloads(self, plexpass=False, output_format=''):
|
||||
if plexpass:
|
||||
uri = '/api/downloads/1.json?channel=plexpass'
|
||||
else:
|
||||
uri = '/api/downloads/1.json'
|
||||
request = self.request_handler.make_request(uri=uri,
|
||||
proto=self.protocol,
|
||||
request_type='GET',
|
||||
output_format=output_format)
|
||||
|
||||
return request
|
||||
|
||||
def get_full_users_list(self):
|
||||
friends_list = self.get_plextv_friends()
|
||||
own_account = self.get_plextv_user_details()
|
||||
@@ -447,7 +473,7 @@ class PlexTV(object):
|
||||
server_id = plexpy.CONFIG.PMS_IDENTIFIER
|
||||
else:
|
||||
logger.error(u"PlexPy PlexTV :: Unable to retrieve server identity.")
|
||||
return []
|
||||
return {}
|
||||
|
||||
plextv_resources = self.get_plextv_resources(include_https=include_https)
|
||||
|
||||
@@ -455,22 +481,26 @@ class PlexTV(object):
|
||||
xml_parse = minidom.parseString(plextv_resources)
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_urls: %s" % e)
|
||||
return []
|
||||
return {}
|
||||
except:
|
||||
logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_urls.")
|
||||
return []
|
||||
return {}
|
||||
|
||||
try:
|
||||
xml_head = xml_parse.getElementsByTagName('Device')
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_urls: %s." % e)
|
||||
return []
|
||||
return {}
|
||||
|
||||
# Function to get all connections for a device
|
||||
def get_connections(device):
|
||||
conn = []
|
||||
connections = device.getElementsByTagName('Connection')
|
||||
|
||||
server = {"platform": helpers.get_xml_attr(device, 'platform'),
|
||||
"version": helpers.get_xml_attr(device, 'productVersion')
|
||||
}
|
||||
|
||||
for c in connections:
|
||||
server_details = {"protocol": helpers.get_xml_attr(c, 'protocol'),
|
||||
"address": helpers.get_xml_attr(c, 'address'),
|
||||
@@ -480,18 +510,19 @@ class PlexTV(object):
|
||||
}
|
||||
conn.append(server_details)
|
||||
|
||||
return conn
|
||||
server['connections'] = conn
|
||||
return server
|
||||
|
||||
server_urls = []
|
||||
server = {}
|
||||
|
||||
# Try to match the device
|
||||
for a in xml_head:
|
||||
if helpers.get_xml_attr(a, 'clientIdentifier') == server_id:
|
||||
server_urls = get_connections(a)
|
||||
server = get_connections(a)
|
||||
break
|
||||
|
||||
# Else no device match found
|
||||
if not server_urls:
|
||||
if not server:
|
||||
# Try to match the PMS_IP and PMS_PORT
|
||||
for a in xml_head:
|
||||
if helpers.get_xml_attr(a, 'provides') == 'server':
|
||||
@@ -504,16 +535,16 @@ class PlexTV(object):
|
||||
plexpy.CONFIG.PMS_IDENTIFIER = helpers.get_xml_attr(a, 'clientIdentifier')
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
logger.info(u"PlexPy PlexTV :: PMS identifier changed from %s to %s." % \
|
||||
(server_id, plexpy.CONFIG.PMS_IDENTIFIER))
|
||||
logger.info(u"PlexPy PlexTV :: PMS identifier changed from %s to %s."
|
||||
% (server_id, plexpy.CONFIG.PMS_IDENTIFIER))
|
||||
|
||||
server_urls = get_connections(a)
|
||||
server = get_connections(a)
|
||||
break
|
||||
|
||||
if server_urls:
|
||||
if server.get('connections'):
|
||||
break
|
||||
|
||||
return server_urls
|
||||
return server
|
||||
|
||||
def get_server_times(self):
|
||||
servers = self.get_plextv_server_list(output_format='xml')
|
||||
@@ -581,4 +612,81 @@ class PlexTV(object):
|
||||
}
|
||||
clean_servers.append(server)
|
||||
|
||||
return clean_servers
|
||||
return clean_servers
|
||||
|
||||
def get_plex_downloads(self):
|
||||
logger.debug(u"PlexPy PlexTV :: Plex update channel is %s." % plexpy.CONFIG.PMS_UPDATE_CHANNEL)
|
||||
plex_downloads = self.get_plextv_downloads(plexpass=(plexpy.CONFIG.PMS_UPDATE_CHANNEL == 'plexpass'))
|
||||
|
||||
try:
|
||||
available_downloads = json.loads(plex_downloads)
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy PlexTV :: Unable to load JSON for get_plex_updates.")
|
||||
return {}
|
||||
|
||||
# Get the updates for the platform
|
||||
platform_downloads = available_downloads.get('computer').get(plexpy.CONFIG.PMS_PLATFORM) or \
|
||||
available_downloads.get('nas').get(plexpy.CONFIG.PMS_PLATFORM)
|
||||
|
||||
if not platform_downloads:
|
||||
logger.error(u"PlexPy PlexTV :: Unable to retrieve Plex updates: Could not match server platform: %s."
|
||||
% plexpy.CONFIG.PMS_PLATFORM)
|
||||
return {}
|
||||
|
||||
v_old = plexpy.CONFIG.PMS_VERSION.split('-')[0].split('.')
|
||||
v_new = platform_downloads.get('version', '').split('-')[0].split('.')
|
||||
|
||||
if len(v_old) < 4:
|
||||
logger.error(u"PlexPy PlexTV :: Unable to retrieve Plex updates: Invalid current server version: %s."
|
||||
% plexpy.CONFIG.PMS_VERSION)
|
||||
return {}
|
||||
if len(v_new) < 4:
|
||||
logger.error(u"PlexPy PlexTV :: Unable to retrieve Plex updates: Invalid new server version: %s."
|
||||
% platform_downloads.get('version'))
|
||||
return {}
|
||||
|
||||
# Compare versions
|
||||
if v_new[0] > v_old[0] or \
|
||||
v_new[0] == v_old[0] and v_new[1] > v_old[1] or \
|
||||
v_new[0] == v_old[0] and v_new[1] == v_old[1] and v_new[2] > v_old[2] or \
|
||||
v_new[0] == v_old[0] and v_new[1] == v_old[1] and v_new[2] == v_old[2] and v_new[3] > v_old[3]:
|
||||
update_available = True
|
||||
else:
|
||||
update_available = False
|
||||
|
||||
# Get proper download
|
||||
releases = platform_downloads.get('releases', [{}])
|
||||
release = next((r for r in releases if r['build'] == plexpy.CONFIG.PMS_UPDATE_DISTRO_BUILD), releases[0])
|
||||
|
||||
download_info = {'update_available': update_available,
|
||||
'platform': platform_downloads.get('name'),
|
||||
'release_date': platform_downloads.get('release_date'),
|
||||
'version': platform_downloads.get('version'),
|
||||
'requirements': platform_downloads.get('requirements'),
|
||||
'extra_info': platform_downloads.get('extra_info'),
|
||||
'changelog_added': platform_downloads.get('items_added'),
|
||||
'changelog_fixed': platform_downloads.get('items_fixed'),
|
||||
'label': release.get('label'),
|
||||
'distro': release.get('distro'),
|
||||
'distro_build': release.get('build'),
|
||||
'download_url': release.get('url'),
|
||||
}
|
||||
|
||||
return download_info
|
||||
|
||||
def get_plexpass_status(self):
|
||||
account_data = self.get_plextv_user_details(output_format='xml')
|
||||
|
||||
try:
|
||||
subscription = account_data.getElementsByTagName('subscription')
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_plexpass_status: %s." % e)
|
||||
return False
|
||||
|
||||
if subscription and helpers.get_xml_attr(subscription[0], 'active') == '1':
|
||||
return True
|
||||
else:
|
||||
logger.debug(u"PlexPy PlexTV :: Plex Pass subscription not found.")
|
||||
plexpy.CONFIG.__setattr__('PMS_PLEXPASS', 0)
|
||||
plexpy.CONFIG.write()
|
||||
return False
|
@@ -14,7 +14,7 @@
|
||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import threading
|
||||
import urllib2
|
||||
import urllib
|
||||
from urlparse import urlparse
|
||||
|
||||
import plexpy
|
||||
@@ -121,13 +121,15 @@ class PmsConnect(object):
|
||||
if session.get_session_user_id():
|
||||
user_data = users.Users()
|
||||
user_tokens = user_data.get_tokens(user_id=session.get_session_user_id())
|
||||
token = user_tokens['server_token']
|
||||
self.token = user_tokens['server_token']
|
||||
else:
|
||||
token = plexpy.CONFIG.PMS_TOKEN
|
||||
self.token = plexpy.CONFIG.PMS_TOKEN
|
||||
else:
|
||||
self.token = token
|
||||
|
||||
self.request_handler = http_handler.HTTPHandler(host=hostname,
|
||||
port=port,
|
||||
token=token)
|
||||
token=self.token)
|
||||
|
||||
def get_sessions(self, output_format=''):
|
||||
"""
|
||||
@@ -179,7 +181,7 @@ class PmsConnect(object):
|
||||
|
||||
return request
|
||||
|
||||
def get_recently_added(self, count='0', output_format=''):
|
||||
def get_recently_added(self, start='0', count='0', output_format=''):
|
||||
"""
|
||||
Return list of recently added items.
|
||||
|
||||
@@ -188,7 +190,7 @@ class PmsConnect(object):
|
||||
|
||||
Output: array
|
||||
"""
|
||||
uri = '/library/recentlyAdded?X-Plex-Container-Start=0&X-Plex-Container-Size=' + count
|
||||
uri = '/library/recentlyAdded?X-Plex-Container-Start=%s&X-Plex-Container-Size=%s' % (start, count)
|
||||
request = self.request_handler.make_request(uri=uri,
|
||||
proto=self.protocol,
|
||||
request_type='GET',
|
||||
@@ -196,7 +198,7 @@ class PmsConnect(object):
|
||||
|
||||
return request
|
||||
|
||||
def get_library_recently_added(self, section_id='', count='0', output_format=''):
|
||||
def get_library_recently_added(self, section_id='', start='0', count='0', output_format=''):
|
||||
"""
|
||||
Return list of recently added items.
|
||||
|
||||
@@ -205,7 +207,7 @@ class PmsConnect(object):
|
||||
|
||||
Output: array
|
||||
"""
|
||||
uri = '/library/sections/' + section_id + '/recentlyAdded?X-Plex-Container-Start=0&X-Plex-Container-Size=' + count
|
||||
uri = '/library/sections/%s/recentlyAdded?X-Plex-Container-Start=%s&X-Plex-Container-Size=%s' % (section_id, start, count)
|
||||
request = self.request_handler.make_request(uri=uri,
|
||||
proto=self.protocol,
|
||||
request_type='GET',
|
||||
@@ -387,7 +389,7 @@ class PmsConnect(object):
|
||||
|
||||
Output: array
|
||||
"""
|
||||
uri = '/search?query=' + urllib2.quote(query.encode('utf8')) + track
|
||||
uri = '/search?query=' + urllib.quote(query.encode('utf8')) + track
|
||||
request = self.request_handler.make_request(uri=uri,
|
||||
proto=self.protocol,
|
||||
request_type='GET',
|
||||
@@ -458,7 +460,7 @@ class PmsConnect(object):
|
||||
|
||||
return request
|
||||
|
||||
def get_recently_added_details(self, section_id='', count='0'):
|
||||
def get_recently_added_details(self, section_id='', start='0', count='0'):
|
||||
"""
|
||||
Return processed and validated list of recently added items.
|
||||
|
||||
@@ -467,9 +469,9 @@ class PmsConnect(object):
|
||||
Output: array
|
||||
"""
|
||||
if section_id:
|
||||
recent = self.get_library_recently_added(section_id, count, output_format='xml')
|
||||
recent = self.get_library_recently_added(section_id, start, count, output_format='xml')
|
||||
else:
|
||||
recent = self.get_recently_added(count, output_format='xml')
|
||||
recent = self.get_recently_added(start, count, output_format='xml')
|
||||
|
||||
try:
|
||||
xml_head = recent.getElementsByTagName('MediaContainer')
|
||||
@@ -1021,6 +1023,8 @@ class PmsConnect(object):
|
||||
session_output = self.get_session_each(session_type, session_)
|
||||
session_list.append(session_output)
|
||||
|
||||
session_list = sorted(session_list, key=lambda k: k['session_key'])
|
||||
|
||||
output = {'stream_count': helpers.get_xml_attr(xml_head[0], 'size'),
|
||||
'sessions': session.mask_session_info(session_list)
|
||||
}
|
||||
@@ -1902,10 +1906,12 @@ class PmsConnect(object):
|
||||
"""
|
||||
|
||||
if img:
|
||||
uri = '/photo/:/transcode?url=http://127.0.0.1:32400%s' % img
|
||||
params = {'url': 'http://127.0.0.1:32400%s?%s' % (img, urllib.urlencode({'X-Plex-Token': self.token}))}
|
||||
if width.isdigit() and height.isdigit():
|
||||
uri += '&width=%s&height=%s' % (width, height)
|
||||
params['width'] = width
|
||||
params['height'] = height
|
||||
|
||||
uri = '/photo/:/transcode?%s' % urllib.urlencode(params)
|
||||
result = self.request_handler.make_request(uri=uri,
|
||||
proto=self.protocol,
|
||||
request_type='GET',
|
||||
|
@@ -181,8 +181,8 @@ class Users(object):
|
||||
'session_history_media_info.transcode_decision',
|
||||
'session_history.user',
|
||||
'session_history.user_id as custom_user_id',
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE \
|
||||
users.friendly_name END) AS friendly_name'
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
|
||||
THEN users.username ELSE users.friendly_name END) AS friendly_name'
|
||||
]
|
||||
|
||||
try:
|
||||
@@ -717,8 +717,8 @@ class Users(object):
|
||||
'user_login.host',
|
||||
'user_login.user_agent',
|
||||
'user_login.timestamp',
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN user_login.user ELSE users.friendly_name END) \
|
||||
AS friendly_name'
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
|
||||
THEN users.username ELSE users.friendly_name END) AS friendly_name'
|
||||
]
|
||||
|
||||
try:
|
||||
|
@@ -1,2 +1,2 @@
|
||||
PLEXPY_VERSION = "master"
|
||||
PLEXPY_RELEASE_VERSION = "1.4.0"
|
||||
PLEXPY_RELEASE_VERSION = "1.4.7"
|
||||
|
@@ -86,7 +86,7 @@ def run():
|
||||
|
||||
# successfully received data, reset reconnects counter
|
||||
reconnects = 0
|
||||
except websocket.WebSocketConnectionClosedException:
|
||||
except (websocket.WebSocketConnectionClosedException, Exception):
|
||||
if reconnects <= 15:
|
||||
reconnects += 1
|
||||
|
||||
@@ -94,7 +94,7 @@ def run():
|
||||
if reconnects > 1:
|
||||
time.sleep(5)
|
||||
|
||||
logger.warn(u"PlexPy WebSocket :: Connection has closed, reconnecting...")
|
||||
logger.warn(u"PlexPy WebSocket :: Connection has closed, reconnection attempt %s." % reconnects)
|
||||
try:
|
||||
ws = create_connection(uri, header=header)
|
||||
except IOError as e:
|
||||
|
@@ -36,7 +36,7 @@ from plexpy.plextv import PlexTV
|
||||
SESSION_KEY = '_cp_username'
|
||||
|
||||
def user_login(username=None, password=None):
|
||||
if not username and not password:
|
||||
if not username or not password:
|
||||
return None
|
||||
|
||||
# Try to login to Plex.tv to check if the user has a vaild account
|
||||
@@ -119,7 +119,7 @@ def check_auth(*args, **kwargs):
|
||||
if not condition():
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT)
|
||||
else:
|
||||
raise cherrypy.HTTPRedirect("auth/logout")
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "auth/logout")
|
||||
|
||||
def requireAuth(*conditions):
|
||||
"""A decorator that appends conditions to the auth.require config
|
||||
@@ -204,14 +204,14 @@ class AuthController(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self):
|
||||
raise cherrypy.HTTPRedirect("login")
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "auth/login")
|
||||
|
||||
@cherrypy.expose
|
||||
def login(self, username=None, password=None, remember_me='0', admin_login='0'):
|
||||
if not cherrypy.config.get('tools.sessions.on'):
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT)
|
||||
|
||||
if username is None or password is None:
|
||||
if not username and not password:
|
||||
return self.get_loginform()
|
||||
|
||||
(vaild_login, user_group) = check_credentials(username, password, admin_login)
|
||||
@@ -257,4 +257,4 @@ class AuthController(object):
|
||||
if _session and _session['user']:
|
||||
cherrypy.request.login = None
|
||||
self.on_logout(_session['user'], _session['user_group'])
|
||||
raise cherrypy.HTTPRedirect("login")
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "auth/login")
|
@@ -34,6 +34,7 @@ import config
|
||||
import database
|
||||
import datafactory
|
||||
import graphs
|
||||
import helpers
|
||||
import http_handler
|
||||
import libraries
|
||||
import log_reader
|
||||
@@ -82,9 +83,9 @@ class WebInterface(object):
|
||||
@requireAuth()
|
||||
def index(self):
|
||||
if plexpy.CONFIG.FIRST_RUN_COMPLETE:
|
||||
raise cherrypy.HTTPRedirect("home")
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "home")
|
||||
else:
|
||||
raise cherrypy.HTTPRedirect("welcome")
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "welcome")
|
||||
|
||||
|
||||
##### Welcome #####
|
||||
@@ -118,7 +119,7 @@ class WebInterface(object):
|
||||
# The setup wizard just refreshes the page on submit so we must redirect to home if config set.
|
||||
if plexpy.CONFIG.FIRST_RUN_COMPLETE:
|
||||
plexpy.initialize_scheduler()
|
||||
raise cherrypy.HTTPRedirect("home")
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "home")
|
||||
else:
|
||||
return serve_template(templatename="welcome.html", title="Welcome", config=config)
|
||||
|
||||
@@ -268,7 +269,7 @@ class WebInterface(object):
|
||||
else:
|
||||
if s['video_decision'] == 'transcode' or s['audio_decision'] == 'transcode':
|
||||
data['transcode'] += 1
|
||||
elif s['video_decision'] == 'direct copy' or s['audio_decision'] == 'copy play':
|
||||
elif s['video_decision'] == 'copy' or s['audio_decision'] == 'copy':
|
||||
data['direct_stream'] += 1
|
||||
else:
|
||||
data['direct_play'] += 1
|
||||
@@ -477,9 +478,9 @@ class WebInterface(object):
|
||||
"get_file_sizes_hold": plexpy.CONFIG.GET_FILE_SIZES_HOLD
|
||||
}
|
||||
|
||||
library_data = libraries.Libraries()
|
||||
if section_id:
|
||||
try:
|
||||
library_data = libraries.Libraries()
|
||||
library_details = library_data.get_details(section_id=section_id)
|
||||
except:
|
||||
logger.warn(u"Unable to retrieve library details for section_id %s " % section_id)
|
||||
@@ -493,8 +494,8 @@ class WebInterface(object):
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def edit_library_dialog(self, section_id=None, **kwargs):
|
||||
library_data = libraries.Libraries()
|
||||
if section_id:
|
||||
library_data = libraries.Libraries()
|
||||
result = library_data.get_details(section_id=section_id)
|
||||
status_message = ''
|
||||
else:
|
||||
@@ -528,9 +529,9 @@ class WebInterface(object):
|
||||
do_notify_created = kwargs.get('do_notify_created', 0)
|
||||
keep_history = kwargs.get('keep_history', 0)
|
||||
|
||||
library_data = libraries.Libraries()
|
||||
if section_id:
|
||||
try:
|
||||
library_data = libraries.Libraries()
|
||||
library_data.set_config(section_id=section_id,
|
||||
custom_thumb=custom_thumb,
|
||||
do_notify=do_notify,
|
||||
@@ -543,7 +544,7 @@ class WebInterface(object):
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth()
|
||||
def get_library_watch_time_stats(self, section_id=None, **kwargs):
|
||||
def library_watch_time_stats(self, section_id=None, **kwargs):
|
||||
if not allow_session_library(section_id):
|
||||
return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats")
|
||||
|
||||
@@ -556,12 +557,12 @@ class WebInterface(object):
|
||||
if result:
|
||||
return serve_template(templatename="user_watch_time_stats.html", data=result, title="Watch Stats")
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_library_watch_time_stats.")
|
||||
logger.warn(u"Unable to retrieve data for library_watch_time_stats.")
|
||||
return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats")
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth()
|
||||
def get_library_user_stats(self, section_id=None, **kwargs):
|
||||
def library_user_stats(self, section_id=None, **kwargs):
|
||||
if not allow_session_library(section_id):
|
||||
return serve_template(templatename="library_user_stats.html", data=None, title="Player Stats")
|
||||
|
||||
@@ -574,12 +575,12 @@ class WebInterface(object):
|
||||
if result:
|
||||
return serve_template(templatename="library_user_stats.html", data=result, title="Player Stats")
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_library_user_stats.")
|
||||
logger.warn(u"Unable to retrieve data for library_user_stats.")
|
||||
return serve_template(templatename="library_user_stats.html", data=None, title="Player Stats")
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth()
|
||||
def get_library_recently_watched(self, section_id=None, limit='10', **kwargs):
|
||||
def library_recently_watched(self, section_id=None, limit='10', **kwargs):
|
||||
if not allow_session_library(section_id):
|
||||
return serve_template(templatename="user_recently_watched.html", data=None, title="Recently Watched")
|
||||
|
||||
@@ -592,12 +593,12 @@ class WebInterface(object):
|
||||
if result:
|
||||
return serve_template(templatename="user_recently_watched.html", data=result, title="Recently Watched")
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_library_recently_watched.")
|
||||
logger.warn(u"Unable to retrieve data for library_recently_watched.")
|
||||
return serve_template(templatename="user_recently_watched.html", data=None, title="Recently Watched")
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth()
|
||||
def get_library_recently_added(self, section_id=None, limit='10', **kwargs):
|
||||
def library_recently_added(self, section_id=None, limit='10', **kwargs):
|
||||
if not allow_session_library(section_id):
|
||||
return serve_template(templatename="library_recently_added.html", data=None, title="Recently Added")
|
||||
|
||||
@@ -610,7 +611,7 @@ class WebInterface(object):
|
||||
if result:
|
||||
return serve_template(templatename="library_recently_added.html", data=result['recently_added'], title="Recently Added")
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_library_recently_added.")
|
||||
logger.warn(u"Unable to retrieve data for library_recently_added.")
|
||||
return serve_template(templatename="library_recently_added.html", data=None, title="Recently Added")
|
||||
|
||||
@cherrypy.expose
|
||||
@@ -733,6 +734,132 @@ class WebInterface(object):
|
||||
|
||||
return {'success': result}
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def get_library(self, section_id=None, **kwargs):
|
||||
""" Get a library's details.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"child_count": null,
|
||||
"count": 887,
|
||||
"do_notify": 1,
|
||||
"do_notify_created": 1,
|
||||
"keep_history": 1,
|
||||
"library_art": "/:/resources/movie-fanart.jpg",
|
||||
"library_thumb": "/:/resources/movie.png",
|
||||
"parent_count": null,
|
||||
"section_id": 1,
|
||||
"section_name": "Movies",
|
||||
"section_type": "movie"
|
||||
}
|
||||
```
|
||||
"""
|
||||
if section_id:
|
||||
library_data = libraries.Libraries()
|
||||
library_details = library_data.get_details(section_id=section_id)
|
||||
if library_details:
|
||||
return library_details
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_library.")
|
||||
else:
|
||||
logger.warn(u"Library details requested but no section_id received.")
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def get_library_watch_time_stats(self, section_id=None, **kwargs):
|
||||
""" Get a library's watch time statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"query_days": 1,
|
||||
"total_plays": 0,
|
||||
"total_time": 0
|
||||
},
|
||||
{"query_days": 7,
|
||||
"total_plays": 3,
|
||||
"total_time": 15694
|
||||
},
|
||||
{"query_days": 30,
|
||||
"total_plays": 35,
|
||||
"total_time": 63054
|
||||
},
|
||||
{"query_days": 0,
|
||||
"total_plays": 508,
|
||||
"total_time": 1183080
|
||||
}
|
||||
]
|
||||
```
|
||||
"""
|
||||
if section_id:
|
||||
library_data = libraries.Libraries()
|
||||
result = library_data.get_watch_time_stats(section_id=section_id)
|
||||
if result:
|
||||
return result
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_library_watch_time_stats.")
|
||||
else:
|
||||
logger.warn(u"Library watch time stats requested but no section_id received.")
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def get_library_user_stats(self, section_id=None, **kwargs):
|
||||
""" Get a library's user statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"friendly_name": "Jon Snow",
|
||||
"total_plays": 170,
|
||||
"user_id": 133788,
|
||||
"user_thumb": "https://plex.tv/users/k10w42309cynaopq/avatar"
|
||||
},
|
||||
{"platform_type": "DanyKhaleesi69",
|
||||
"total_plays": 42,
|
||||
"user_id": 8008135,
|
||||
"user_thumb": "https://plex.tv/users/568gwwoib5t98a3a/avatar"
|
||||
},
|
||||
{...},
|
||||
{...}
|
||||
]
|
||||
```
|
||||
"""
|
||||
if section_id:
|
||||
library_data = libraries.Libraries()
|
||||
result = library_data.get_user_stats(section_id=section_id)
|
||||
if result:
|
||||
return result
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_library_user_stats.")
|
||||
else:
|
||||
logger.warn(u"Library user stats requested but no section_id received.")
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@@ -977,9 +1104,9 @@ class WebInterface(object):
|
||||
if not allow_session_user(user_id):
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT)
|
||||
|
||||
user_data = users.Users()
|
||||
if user_id:
|
||||
try:
|
||||
user_data = users.Users()
|
||||
user_details = user_data.get_details(user_id=user_id)
|
||||
except:
|
||||
logger.warn(u"Unable to retrieve user details for user_id %s " % user_id)
|
||||
@@ -993,8 +1120,8 @@ class WebInterface(object):
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def edit_user_dialog(self, user=None, user_id=None, **kwargs):
|
||||
user_data = users.Users()
|
||||
if user_id:
|
||||
user_data = users.Users()
|
||||
result = user_data.get_details(user_id=user_id)
|
||||
status_message = ''
|
||||
else:
|
||||
@@ -1030,9 +1157,9 @@ class WebInterface(object):
|
||||
keep_history = kwargs.get('keep_history', 0)
|
||||
allow_guest = kwargs.get('allow_guest', 0)
|
||||
|
||||
user_data = users.Users()
|
||||
if user_id:
|
||||
try:
|
||||
user_data = users.Users()
|
||||
user_data.set_config(user_id=user_id,
|
||||
friendly_name=friendly_name,
|
||||
custom_thumb=custom_thumb,
|
||||
@@ -1047,7 +1174,7 @@ class WebInterface(object):
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth()
|
||||
def get_user_watch_time_stats(self, user=None, user_id=None, **kwargs):
|
||||
def user_watch_time_stats(self, user=None, user_id=None, **kwargs):
|
||||
if not allow_session_user(user_id):
|
||||
return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats")
|
||||
|
||||
@@ -1060,12 +1187,12 @@ class WebInterface(object):
|
||||
if result:
|
||||
return serve_template(templatename="user_watch_time_stats.html", data=result, title="Watch Stats")
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_user_watch_time_stats.")
|
||||
logger.warn(u"Unable to retrieve data for user_watch_time_stats.")
|
||||
return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats")
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth()
|
||||
def get_user_player_stats(self, user=None, user_id=None, **kwargs):
|
||||
def user_player_stats(self, user=None, user_id=None, **kwargs):
|
||||
if not allow_session_user(user_id):
|
||||
return serve_template(templatename="user_player_stats.html", data=None, title="Player Stats")
|
||||
|
||||
@@ -1078,7 +1205,7 @@ class WebInterface(object):
|
||||
if result:
|
||||
return serve_template(templatename="user_player_stats.html", data=result, title="Player Stats")
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_user_player_stats.")
|
||||
logger.warn(u"Unable to retrieve data for user_player_stats.")
|
||||
return serve_template(templatename="user_player_stats.html", data=None, title="Player Stats")
|
||||
|
||||
@cherrypy.expose
|
||||
@@ -1170,7 +1297,7 @@ class WebInterface(object):
|
||||
@requireAuth()
|
||||
@addtoapi()
|
||||
def get_user_logins(self, user_id=None, **kwargs):
|
||||
""" Get the data on PlexPy user login table.
|
||||
""" Get the data on PlexPy user login table.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
@@ -1189,15 +1316,15 @@ class WebInterface(object):
|
||||
"recordsTotal": 2344,
|
||||
"recordsFiltered": 10,
|
||||
"data":
|
||||
[{"browser": "Safari 7.0.3",
|
||||
"friendly_name": "Jon Snow",
|
||||
"host": "http://plexpy.castleblack.com",
|
||||
"ip_address": "xxx.xxx.xxx.xxx",
|
||||
"os": "Mac OS X",
|
||||
"timestamp": 1462591869,
|
||||
"user": "LordCommanderSnow",
|
||||
"user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A",
|
||||
"user_group": "guest",
|
||||
[{"browser": "Safari 7.0.3",
|
||||
"friendly_name": "Jon Snow",
|
||||
"host": "http://plexpy.castleblack.com",
|
||||
"ip_address": "xxx.xxx.xxx.xxx",
|
||||
"os": "Mac OS X",
|
||||
"timestamp": 1462591869,
|
||||
"user": "LordCommanderSnow",
|
||||
"user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A",
|
||||
"user_group": "guest",
|
||||
"user_id": 133788
|
||||
},
|
||||
{...},
|
||||
@@ -1222,6 +1349,134 @@ class WebInterface(object):
|
||||
|
||||
return history
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def get_user(self, user_id=None, **kwargs):
|
||||
""" Get a user's details.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
user_id (str): The id of the Plex user
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"allow_guest": 1,
|
||||
"deleted_user": 0,
|
||||
"do_notify": 1,
|
||||
"email": "Jon.Snow.1337@CastleBlack.com",
|
||||
"friendly_name": "Jon Snow",
|
||||
"is_allow_sync": 1,
|
||||
"is_home_user": 1,
|
||||
"is_restricted": 0,
|
||||
"keep_history": 1,
|
||||
"shared_libraries": ["10", "1", "4", "5", "15", "20", "2"],
|
||||
"user_id": 133788,
|
||||
"user_thumb": "https://plex.tv/users/k10w42309cynaopq/avatar",
|
||||
"username": "LordCommanderSnow"
|
||||
}
|
||||
```
|
||||
"""
|
||||
if user_id:
|
||||
user_data = users.Users()
|
||||
user_details = user_data.get_details(user_id=user_id)
|
||||
if user_details:
|
||||
return user_details
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_user.")
|
||||
else:
|
||||
logger.warn(u"User details requested but no user_id received.")
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def get_user_watch_time_stats(self, user_id=None, **kwargs):
|
||||
""" Get a user's watch time statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
user_id (str): The id of the Plex user
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"query_days": 1,
|
||||
"total_plays": 0,
|
||||
"total_time": 0
|
||||
},
|
||||
{"query_days": 7,
|
||||
"total_plays": 3,
|
||||
"total_time": 15694
|
||||
},
|
||||
{"query_days": 30,
|
||||
"total_plays": 35,
|
||||
"total_time": 63054
|
||||
},
|
||||
{"query_days": 0,
|
||||
"total_plays": 508,
|
||||
"total_time": 1183080
|
||||
}
|
||||
]
|
||||
```
|
||||
"""
|
||||
if user_id:
|
||||
user_data = users.Users()
|
||||
result = user_data.get_watch_time_stats(user_id=user_id)
|
||||
if result:
|
||||
return result
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_user_watch_time_stats.")
|
||||
else:
|
||||
logger.warn(u"User watch time stats requested but no user_id received.")
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def get_user_player_stats(self, user_id=None, **kwargs):
|
||||
""" Get a user's player statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
user_id (str): The id of the Plex user
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"platform_type": "Chrome",
|
||||
"player_name": "Plex Web (Chrome)",
|
||||
"result_id": 1,
|
||||
"total_plays": 170
|
||||
},
|
||||
{"platform_type": "Chromecast",
|
||||
"player_name": "Chromecast",
|
||||
"result_id": 2,
|
||||
"total_plays": 42
|
||||
},
|
||||
{...},
|
||||
{...}
|
||||
]
|
||||
```
|
||||
"""
|
||||
if user_id:
|
||||
user_data = users.Users()
|
||||
result = user_data.get_player_stats(user_id=user_id)
|
||||
if result:
|
||||
return result
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_user_player_stats.")
|
||||
else:
|
||||
logger.warn(u"User watch time stats requested but no user_id received.")
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@@ -1240,9 +1495,8 @@ class WebInterface(object):
|
||||
None
|
||||
```
|
||||
"""
|
||||
user_data = users.Users()
|
||||
|
||||
if user_id:
|
||||
user_data = users.Users()
|
||||
delete_row = user_data.delete_all_history(user_id=user_id)
|
||||
if delete_row:
|
||||
return {'message': delete_row}
|
||||
@@ -1267,11 +1521,9 @@ class WebInterface(object):
|
||||
None
|
||||
```
|
||||
"""
|
||||
user_data = users.Users()
|
||||
|
||||
if user_id:
|
||||
user_data = users.Users()
|
||||
delete_row = user_data.delete(user_id=user_id)
|
||||
|
||||
if delete_row:
|
||||
return {'message': delete_row}
|
||||
else:
|
||||
@@ -1296,16 +1548,14 @@ class WebInterface(object):
|
||||
None
|
||||
```
|
||||
"""
|
||||
user_data = users.Users()
|
||||
|
||||
if user_id:
|
||||
user_data = users.Users()
|
||||
delete_row = user_data.undelete(user_id=user_id)
|
||||
|
||||
if delete_row:
|
||||
return {'message': delete_row}
|
||||
elif username:
|
||||
user_data = users.Users()
|
||||
delete_row = user_data.undelete(username=username)
|
||||
|
||||
if delete_row:
|
||||
return {'message': delete_row}
|
||||
else:
|
||||
@@ -1984,55 +2234,54 @@ class WebInterface(object):
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def getLog(self, start=0, length=100, **kwargs):
|
||||
start = int(start)
|
||||
length = int(length)
|
||||
search_value = ""
|
||||
search_regex = ""
|
||||
order_column = 0
|
||||
order_dir = "desc"
|
||||
def getLog(self, **kwargs):
|
||||
json_data = helpers.process_json_kwargs(json_kwargs=kwargs.get('json_data'))
|
||||
log_level = kwargs.get('log_level', "")
|
||||
|
||||
if 'order[0][dir]' in kwargs:
|
||||
order_dir = kwargs.get('order[0][dir]', "desc")
|
||||
|
||||
if 'order[0][column]' in kwargs:
|
||||
order_column = kwargs.get('order[0][column]', "0")
|
||||
|
||||
if 'search[value]' in kwargs:
|
||||
search_value = kwargs.get('search[value]', "")
|
||||
|
||||
if 'search[regex]' in kwargs:
|
||||
search_regex = kwargs.get('search[regex]', "")
|
||||
start = json_data['start']
|
||||
length = json_data['length']
|
||||
order_column = json_data['order'][0]['column']
|
||||
order_dir = json_data['order'][0]['dir']
|
||||
search_value = json_data['search']['value']
|
||||
sortcolumn = 0
|
||||
|
||||
filt = []
|
||||
filtered = []
|
||||
fa = filt.append
|
||||
with open(os.path.join(plexpy.CONFIG.LOG_DIR, logger.FILENAME)) as f:
|
||||
for l in f.readlines():
|
||||
try:
|
||||
temp_loglevel_and_time = l.split(' - ', 1)
|
||||
loglvl = temp_loglevel_and_time[1].split(' ::', 1)[0].strip()
|
||||
msg = l.split(' : ', 1)[1].replace('\n', '')
|
||||
msg = unicode(l.split(' : ', 1)[1].replace('\n', ''), 'utf-8')
|
||||
fa([temp_loglevel_and_time[0], loglvl, msg])
|
||||
except IndexError:
|
||||
# Add traceback message to previous msg.
|
||||
tl = (len(filt) - 1)
|
||||
n = len(l) - len(l.lstrip(' '))
|
||||
l = ' ' * (2*n) + l[n:]
|
||||
l = ' ' * (2 * n) + l[n:]
|
||||
filt[tl][2] += '<br>' + l
|
||||
continue
|
||||
|
||||
filtered = []
|
||||
if search_value == '':
|
||||
filtered = filt
|
||||
log_levels = ['DEBUG', 'INFO', 'WARN', 'ERROR']
|
||||
if log_level in log_levels:
|
||||
log_levels = log_levels[log_levels.index(log_level)::]
|
||||
filtered = [row for row in filt if row[1] in log_levels]
|
||||
else:
|
||||
filtered = [row for row in filt for column in row if search_value.lower() in column.lower()]
|
||||
filtered = filt
|
||||
|
||||
if search_value:
|
||||
filtered = [row for row in filtered for column in row if search_value.lower() in column.lower()]
|
||||
|
||||
sortcolumn = 0
|
||||
if order_column == '1':
|
||||
sortcolumn = 2
|
||||
elif order_column == '2':
|
||||
sortcolumn = 1
|
||||
filtered.sort(key=lambda x: x[sortcolumn], reverse=order_dir == "desc")
|
||||
|
||||
filtered.sort(key=lambda x: x[sortcolumn])
|
||||
|
||||
if order_dir == 'desc':
|
||||
filtered = filtered[::-1]
|
||||
|
||||
rows = filtered[start:(start + length)]
|
||||
|
||||
@@ -2046,7 +2295,7 @@ class WebInterface(object):
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def get_plex_log(self, window=1000, **kwargs):
|
||||
def get_plex_log(self, **kwargs):
|
||||
""" Get the PMS logs.
|
||||
|
||||
```
|
||||
@@ -2068,6 +2317,7 @@ class WebInterface(object):
|
||||
]
|
||||
```
|
||||
"""
|
||||
window = int(kwargs.get('window', plexpy.CONFIG.PMS_LOGS_LINE_CAP))
|
||||
log_lines = []
|
||||
log_type = kwargs.get('log_type', 'server')
|
||||
|
||||
@@ -2215,7 +2465,7 @@ class WebInterface(object):
|
||||
log_dir=plexpy.CONFIG.LOG_DIR, verbose=plexpy.VERBOSE)
|
||||
logger.info(u"Verbose toggled, set to %s", plexpy.VERBOSE)
|
||||
logger.debug(u"If you read this message, debug logging is available")
|
||||
raise cherrypy.HTTPRedirect("logs")
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "logs")
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth()
|
||||
@@ -2241,7 +2491,7 @@ class WebInterface(object):
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def settings(self):
|
||||
def settings(self, **kwargs):
|
||||
interface_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/')
|
||||
interface_list = [name for name in os.listdir(interface_dir) if
|
||||
os.path.isdir(os.path.join(interface_dir, name))]
|
||||
@@ -2255,6 +2505,7 @@ class WebInterface(object):
|
||||
|
||||
config = {
|
||||
"allow_guest_access": checked(plexpy.CONFIG.ALLOW_GUEST_ACCESS),
|
||||
"http_basic_auth": checked(plexpy.CONFIG.HTTP_BASIC_AUTH),
|
||||
"http_hash_password": checked(plexpy.CONFIG.HTTP_HASH_PASSWORD),
|
||||
"http_hashed_password": plexpy.CONFIG.HTTP_HASHED_PASSWORD,
|
||||
"http_host": plexpy.CONFIG.HTTP_HOST,
|
||||
@@ -2262,6 +2513,7 @@ class WebInterface(object):
|
||||
"http_port": plexpy.CONFIG.HTTP_PORT,
|
||||
"http_password": http_password,
|
||||
"http_root": plexpy.CONFIG.HTTP_ROOT,
|
||||
"http_proxy": checked(plexpy.CONFIG.HTTP_PROXY),
|
||||
"launch_browser": checked(plexpy.CONFIG.LAUNCH_BROWSER),
|
||||
"enable_https": checked(plexpy.CONFIG.ENABLE_HTTPS),
|
||||
"https_create_cert": checked(plexpy.CONFIG.HTTPS_CREATE_CERT),
|
||||
@@ -2317,6 +2569,8 @@ class WebInterface(object):
|
||||
"notify_recently_added": checked(plexpy.CONFIG.NOTIFY_RECENTLY_ADDED),
|
||||
"notify_recently_added_grandparent": checked(plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_GRANDPARENT),
|
||||
"notify_recently_added_delay": plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY,
|
||||
"notify_concurrent_by_ip": plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP,
|
||||
"notify_concurrent_threshold": plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD,
|
||||
"notify_watched_percent": plexpy.CONFIG.NOTIFY_WATCHED_PERCENT,
|
||||
"notify_on_start_subject_text": plexpy.CONFIG.NOTIFY_ON_START_SUBJECT_TEXT,
|
||||
"notify_on_start_body_text": plexpy.CONFIG.NOTIFY_ON_START_BODY_TEXT,
|
||||
@@ -2342,6 +2596,10 @@ class WebInterface(object):
|
||||
"notify_on_intup_body_text": plexpy.CONFIG.NOTIFY_ON_INTUP_BODY_TEXT,
|
||||
"notify_on_pmsupdate_subject_text": plexpy.CONFIG.NOTIFY_ON_PMSUPDATE_SUBJECT_TEXT,
|
||||
"notify_on_pmsupdate_body_text": plexpy.CONFIG.NOTIFY_ON_PMSUPDATE_BODY_TEXT,
|
||||
"notify_on_concurrent_subject_text": plexpy.CONFIG.NOTIFY_ON_CONCURRENT_SUBJECT_TEXT,
|
||||
"notify_on_concurrent_body_text": plexpy.CONFIG.NOTIFY_ON_CONCURRENT_BODY_TEXT,
|
||||
"notify_on_newdevice_subject_text": plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_SUBJECT_TEXT,
|
||||
"notify_on_newdevice_body_text": plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_BODY_TEXT,
|
||||
"notify_scripts_args_text": plexpy.CONFIG.NOTIFY_SCRIPTS_ARGS_TEXT,
|
||||
"home_sections": json.dumps(plexpy.CONFIG.HOME_SECTIONS),
|
||||
"home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH,
|
||||
@@ -2354,10 +2612,11 @@ class WebInterface(object):
|
||||
"group_history_tables": checked(plexpy.CONFIG.GROUP_HISTORY_TABLES),
|
||||
"git_token": plexpy.CONFIG.GIT_TOKEN,
|
||||
"imgur_client_id": plexpy.CONFIG.IMGUR_CLIENT_ID,
|
||||
"cache_images": checked(plexpy.CONFIG.CACHE_IMAGES)
|
||||
"cache_images": checked(plexpy.CONFIG.CACHE_IMAGES),
|
||||
"pms_version": plexpy.CONFIG.PMS_VERSION
|
||||
}
|
||||
|
||||
return serve_template(templatename="settings.html", title="Settings", config=config)
|
||||
return serve_template(templatename="settings.html", title="Settings", config=config, kwargs=kwargs)
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -2374,7 +2633,7 @@ class WebInterface(object):
|
||||
"ip_logging_enable", "movie_logging_enable", "tv_logging_enable", "music_logging_enable",
|
||||
"notify_consecutive", "notify_upload_posters", "notify_recently_added", "notify_recently_added_grandparent",
|
||||
"monitor_pms_updates", "monitor_remote_access", "get_file_sizes", "log_blacklist", "http_hash_password",
|
||||
"allow_guest_access", "cache_images"
|
||||
"allow_guest_access", "cache_images", "http_proxy", "http_basic_auth"
|
||||
]
|
||||
for checked_config in checked_configs:
|
||||
if checked_config not in kwargs:
|
||||
@@ -2512,11 +2771,27 @@ class WebInterface(object):
|
||||
else:
|
||||
return {'result': 'error', 'message': 'Config backup failed.'}
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def get_configuration_table(self, **kwargs):
|
||||
return serve_template(templatename="configuration_table.html")
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def get_scheduler_table(self, **kwargs):
|
||||
return serve_template(templatename="scheduler_table.html")
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
def get_server_update_params(self):
|
||||
plex_tv = plextv.PlexTV()
|
||||
plexpass = plex_tv.get_plexpass_status()
|
||||
return {'plexpass': plexpass,
|
||||
'pms_platform': plexpy.CONFIG.PMS_PLATFORM,
|
||||
'pms_update_channel': plexpy.CONFIG.PMS_UPDATE_CHANNEL,
|
||||
'pms_update_distro_build': plexpy.CONFIG.PMS_UPDATE_DISTRO_BUILD}
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@@ -2530,6 +2805,34 @@ class WebInterface(object):
|
||||
else:
|
||||
return {'result': 'error', 'message': 'Database backup failed.'}
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def install_geoip_db(self):
|
||||
""" Downloads and installs the GeoLite2 database """
|
||||
|
||||
result = helpers.install_geoip_db()
|
||||
|
||||
if result:
|
||||
return {'result': 'success', 'message': 'GeoLite2 database installed successful.'}
|
||||
else:
|
||||
return {'result': 'error', 'message': 'GeoLite2 database install failed.'}
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def uninstall_geoip_db(self):
|
||||
""" Uninstalls the GeoLite2 database """
|
||||
|
||||
result = helpers.uninstall_geoip_db()
|
||||
|
||||
if result:
|
||||
return {'result': 'success', 'message': 'GeoLite2 database uninstalled successfully.'}
|
||||
else:
|
||||
return {'result': 'error', 'message': 'GeoLite2 database uninstall failed.'}
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def get_notification_agent_config(self, agent_id, **kwargs):
|
||||
@@ -2576,6 +2879,25 @@ class WebInterface(object):
|
||||
```
|
||||
Required parameters:
|
||||
agent_id(str): The id of the notification agent to use
|
||||
9 # Boxcar2
|
||||
17 # Browser
|
||||
10 # Email
|
||||
16 # Facebook
|
||||
0 # Growl
|
||||
19 # Hipchat
|
||||
12 # IFTTT
|
||||
18 # Join
|
||||
4 # NotifyMyAndroid
|
||||
3 # Plex Home Theater
|
||||
1 # Prowl
|
||||
5 # Pushalot
|
||||
6 # Pushbullet
|
||||
7 # Pushover
|
||||
15 # Scripts
|
||||
14 # Slack
|
||||
13 # Telegram
|
||||
11 # Twitter
|
||||
2 # XBMC
|
||||
subject(str): The subject of the message
|
||||
body(str): The body of the message
|
||||
|
||||
@@ -2857,7 +3179,7 @@ class WebInterface(object):
|
||||
@requireAuth(member_of("admin"))
|
||||
def checkGithub(self):
|
||||
versioncheck.checkGithub()
|
||||
raise cherrypy.HTTPRedirect("home")
|
||||
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "home")
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
@@ -2938,13 +3260,19 @@ class WebInterface(object):
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth()
|
||||
def pms_image_proxy(self, img='', rating_key=None, width='0', height='0', fallback=None, **kwargs):
|
||||
def pms_image_proxy(self, img='', rating_key=None, width='0', height='0',
|
||||
fallback=None, refresh=False, **kwargs):
|
||||
|
||||
""" Gets an image from the PMS and saves it to the image cache directory. """
|
||||
|
||||
if not img and not rating_key:
|
||||
logger.error('No image input received.')
|
||||
return
|
||||
|
||||
if refresh:
|
||||
mo = member_of('admin')
|
||||
refresh = True if mo() else False
|
||||
|
||||
if rating_key and not img:
|
||||
img = '/library/metadata/%s/thumb/1337' % rating_key
|
||||
|
||||
@@ -2959,8 +3287,9 @@ class WebInterface(object):
|
||||
os.mkdir(c_dir)
|
||||
|
||||
try:
|
||||
if 'indexes' in img:
|
||||
if not plexpy.CONFIG.CACHE_IMAGES or refresh or 'indexes' in img:
|
||||
raise NotFound
|
||||
|
||||
return serve_file(path=ffp, content_type='image/jpeg')
|
||||
|
||||
except NotFound:
|
||||
@@ -2980,7 +3309,7 @@ class WebInterface(object):
|
||||
raise Exception(u'PMS image request failed')
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(u'Failed to get image %s, falling back to %s.' % (img, fallback))
|
||||
logger.warn(u'Failed to get image %s, falling back to %s.' % (img, fallback))
|
||||
fbi = None
|
||||
if fallback == 'poster':
|
||||
fbi = common.DEFAULT_POSTER_THUMB
|
||||
@@ -3007,6 +3336,30 @@ class WebInterface(object):
|
||||
|
||||
return serve_download(os.path.join(plexpy.CONFIG.LOG_DIR, log_file), name=log_file)
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def download_plex_log(self, **kwargs):
|
||||
""" Download the Plex log file. """
|
||||
log_type = kwargs.get('log_type', 'server')
|
||||
|
||||
log_file = ""
|
||||
if plexpy.CONFIG.PMS_LOGS_FOLDER:
|
||||
if log_type == "server":
|
||||
log_file = 'Plex Media Server.log'
|
||||
log_file_path = os.path.join(plexpy.CONFIG.PMS_LOGS_FOLDER, log_file)
|
||||
elif log_type == "scanner":
|
||||
log_file = 'Plex Media Scanner.log'
|
||||
log_file_path = os.path.join(plexpy.CONFIG.PMS_LOGS_FOLDER, log_file)
|
||||
else:
|
||||
return "Plex log folder not set in the settings."
|
||||
|
||||
|
||||
if log_file and os.path.isfile(log_file_path):
|
||||
return serve_download(log_file_path, name=log_file)
|
||||
else:
|
||||
return "Plex %s log file not found." % log_type
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@@ -3336,7 +3689,7 @@ class WebInterface(object):
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi("get_recently_added")
|
||||
def get_recently_added_details(self, count='0', section_id='', **kwargs):
|
||||
def get_recently_added_details(self, start='0', count='0', section_id='', **kwargs):
|
||||
""" Get all items that where recelty added to plex.
|
||||
|
||||
```
|
||||
@@ -3344,6 +3697,7 @@ class WebInterface(object):
|
||||
count (str): Number of items to return
|
||||
|
||||
Optional parameters:
|
||||
start (str): The item number to start at
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Returns:
|
||||
@@ -3373,7 +3727,7 @@ class WebInterface(object):
|
||||
```
|
||||
"""
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
result = pms_connect.get_recently_added_details(count=count, section_id=section_id)
|
||||
result = pms_connect.get_recently_added_details(start=start, count=count, section_id=section_id)
|
||||
|
||||
if result:
|
||||
return result
|
||||
@@ -3615,19 +3969,22 @@ class WebInterface(object):
|
||||
}
|
||||
```
|
||||
"""
|
||||
pms_connect = pmsconnect.PmsConnect(token=plexpy.CONFIG.PMS_TOKEN)
|
||||
result = pms_connect.get_current_activity()
|
||||
try:
|
||||
pms_connect = pmsconnect.PmsConnect(token=plexpy.CONFIG.PMS_TOKEN)
|
||||
result = pms_connect.get_current_activity()
|
||||
|
||||
data_factory = datafactory.DataFactory()
|
||||
for session in result['sessions']:
|
||||
if not session['ip_address']:
|
||||
ip_address = data_factory.get_session_ip(session['session_key'])
|
||||
session['ip_address'] = ip_address
|
||||
if result:
|
||||
data_factory = datafactory.DataFactory()
|
||||
for session in result['sessions']:
|
||||
if not session['ip_address']:
|
||||
ip_address = data_factory.get_session_ip(session['session_key'])
|
||||
session['ip_address'] = ip_address
|
||||
|
||||
if result:
|
||||
return result
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_activity.")
|
||||
return result
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_activity.")
|
||||
except Exception as e:
|
||||
logger.exception(u"Unable to retrieve data for get_activity: %s" % e)
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -3899,7 +4256,7 @@ class WebInterface(object):
|
||||
'Can you hurry up. My horse is getting tired.',
|
||||
'What killed the dinosaurs? The Ice Age!',
|
||||
'That\'s for sleeping with my wife!',
|
||||
'Remember when I said I’d kill you last... I lied!',
|
||||
'Remember when I said I\'d kill you last... I lied!',
|
||||
'You want to be a farmer? Here\'s a couple of acres',
|
||||
'Now, this is the plan. Get your ass to Mars.',
|
||||
'I just had a terrible thought... What if this is a dream?'
|
||||
@@ -3926,3 +4283,39 @@ class WebInterface(object):
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
result = pms_connect.get_update_staus()
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth()
|
||||
@addtoapi()
|
||||
def get_geoip_lookup(self, ip_address='', **kwargs):
|
||||
""" Get the geolocation info for an IP address. The GeoLite2 database must be installed.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
ip_address
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"continent": "North America",
|
||||
"country": "United States",
|
||||
"region": "California",
|
||||
"city": "Mountain View",
|
||||
"postal_code": "94035",
|
||||
"timezone": "America/Los_Angeles",
|
||||
"latitude": 37.386,
|
||||
"longitude": -122.0838,
|
||||
"accuracy": 1000
|
||||
}
|
||||
json:
|
||||
{"error": "The address 127.0.0.1 is not in the database."
|
||||
}
|
||||
```
|
||||
"""
|
||||
geo_info = helpers.geoip_lookup(ip_address)
|
||||
if isinstance(geo_info, basestring):
|
||||
return {'error': geo_info}
|
||||
return geo_info
|
||||
|
@@ -66,10 +66,15 @@ def initialize(options):
|
||||
|
||||
if options['http_password']:
|
||||
logger.info(u"PlexPy WebStart :: Web server authentication is enabled, username is '%s'", options['http_username'])
|
||||
options_dict['tools.sessions.on'] = auth_enabled = session_enabled = True
|
||||
cherrypy.tools.auth = cherrypy.Tool('before_handler', webauth.check_auth)
|
||||
if options['http_basic_auth']:
|
||||
auth_enabled = session_enabled = False
|
||||
basic_auth_enabled = True
|
||||
else:
|
||||
options_dict['tools.sessions.on'] = auth_enabled = session_enabled = True
|
||||
basic_auth_enabled = False
|
||||
cherrypy.tools.auth = cherrypy.Tool('before_handler', webauth.check_auth)
|
||||
else:
|
||||
auth_enabled = session_enabled = False
|
||||
auth_enabled = session_enabled = basic_auth_enabled = False
|
||||
|
||||
if not options['http_root'] or options['http_root'] == '/':
|
||||
plexpy.HTTP_ROOT = options['http_root'] = '/'
|
||||
@@ -88,7 +93,14 @@ def initialize(options):
|
||||
'application/javascript'],
|
||||
'tools.auth.on': auth_enabled,
|
||||
'tools.sessions.on': session_enabled,
|
||||
'tools.sessions.timeout': 30 * 24 * 60 # 30 days
|
||||
'tools.sessions.timeout': 30 * 24 * 60, # 30 days
|
||||
'tools.auth_basic.on': basic_auth_enabled,
|
||||
'tools.auth_basic.realm': 'PlexPy web server',
|
||||
'tools.auth_basic.checkpassword': cherrypy.lib.auth_basic.checkpassword_dict({
|
||||
options['http_username']: options['http_password']})
|
||||
},
|
||||
'/api': {
|
||||
'tools.auth_basic.on': False
|
||||
},
|
||||
'/interfaces': {
|
||||
'tools.staticdir.on': True,
|
||||
@@ -178,17 +190,17 @@ def initialize(options):
|
||||
'tools.auth.on': False,
|
||||
'tools.sessions.on': False
|
||||
},
|
||||
'/pms_image_proxy': {
|
||||
'tools.staticdir.on': True,
|
||||
'tools.staticdir.dir': os.path.join(plexpy.CONFIG.CACHE_DIR, 'images'),
|
||||
'tools.caching.on': True,
|
||||
'tools.caching.force': True,
|
||||
'tools.caching.delay': 0,
|
||||
'tools.expires.on': True,
|
||||
'tools.expires.secs': 60 * 60 * 24 * 30, # 30 days
|
||||
'tools.auth.on': False,
|
||||
'tools.sessions.on': False
|
||||
},
|
||||
#'/pms_image_proxy': {
|
||||
# 'tools.staticdir.on': True,
|
||||
# 'tools.staticdir.dir': os.path.join(plexpy.CONFIG.CACHE_DIR, 'images'),
|
||||
# 'tools.caching.on': True,
|
||||
# 'tools.caching.force': True,
|
||||
# 'tools.caching.delay': 0,
|
||||
# 'tools.expires.on': True,
|
||||
# 'tools.expires.secs': 60 * 60 * 24 * 30, # 30 days
|
||||
# 'tools.auth.on': False,
|
||||
# 'tools.sessions.on': False
|
||||
#},
|
||||
'/favicon.ico': {
|
||||
'tools.staticfile.on': True,
|
||||
'tools.staticfile.filename': os.path.abspath(os.path.join(plexpy.PROG_DIR, 'data/interfaces/default/images/favicon.ico')),
|
||||
@@ -199,7 +211,7 @@ def initialize(options):
|
||||
'tools.expires.secs': 60 * 60 * 24 * 30, # 30 days
|
||||
'tools.auth.on': False,
|
||||
'tools.sessions.on': False
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
# Prevent time-outs
|
||||
|
Reference in New Issue
Block a user