Compare commits

...

130 Commits

Author SHA1 Message Date
JonnyWong16
6542997520 Merge branch 'dev' 2016-02-21 23:07:33 -08:00
JonnyWong16
a58b2e2038 v1.3.9 2016-02-21 23:06:22 -08:00
JonnyWong16
6860e348dc Fix typo in setting recently added notification state 2016-02-21 23:01:18 -08:00
JonnyWong16
5e094e7597 Merge pull request #477 from elseym/pushover-html-support
Pushover HTML Support
2016-02-21 21:35:07 -08:00
JonnyWong16
2f2cb8386b Change wording for enable posters in notification help text 2016-02-21 20:41:18 -08:00
JonnyWong16
965fd170bd Merge branch 'dev' 2016-02-21 20:32:14 -08:00
JonnyWong16
2610d29b60 v1.3.8 2016-02-21 20:31:51 -08:00
JonnyWong16
064131c842 Uncheck monitor remote access if remote access diabled on server 2016-02-21 17:20:18 -08:00
JonnyWong16
00b6bf8394 Return default ip_address/poster_url if database query fails 2016-02-21 17:11:49 -08:00
JonnyWong16
2a885d709d Allow disabling poster upload to Imgur
* Disabled by default
2016-02-21 17:06:05 -08:00
JonnyWong16
5bed46c0aa Encode poster title to UTF-8 for Imgur upload 2016-02-21 17:05:10 -08:00
JonnyWong16
8b27c7e01a Remove poster url from notification logs table 2016-02-21 16:42:29 -08:00
JonnyWong16
177902a286 Remove media tags from script_args for server notifications 2016-02-21 16:42:08 -08:00
JonnyWong16
48b0f7dc27 Fix NoneType error in set_notify_state 2016-02-21 16:33:42 -08:00
JonnyWong16
d5f4a1a48a Make readme consistent with settings page 2016-02-21 16:18:26 -08:00
JonnyWong16
de9f60aa7f Add notification log table 2016-02-21 15:45:28 -08:00
JonnyWong16
c93b65b299 Rework notify_log table to save each notification separately 2016-02-21 15:44:21 -08:00
JonnyWong16
3c6a6cdc5b Fix wording on settings page 2016-02-21 14:56:19 -08:00
JonnyWong16
b669f3d715 Fix regression unable to clear the http password 2016-02-21 09:58:27 -08:00
JonnyWong16
f663fac220 Save Imgur URL to database 2016-02-21 09:34:51 -08:00
JonnyWong16
bc42e79bb5 Catch HTTP errors for Imgur upload 2016-02-21 09:33:31 -08:00
JonnyWong16
ca29333cd0 Log if opening secure websocket 2016-02-20 20:50:20 -08:00
JonnyWong16
f9f478e100 Update CONTRIBUTING.md with info about issue reporting and feature requests 2016-02-20 20:47:33 -08:00
JonnyWong16
97c414d1ad Merge branch 'dev' 2016-02-20 19:54:37 -08:00
JonnyWong16
7afbd98d17 v1.3.7 2016-02-20 19:53:21 -08:00
JonnyWong16
1f5c60588e Change Facebook help text 2016-02-20 08:45:00 -08:00
JonnyWong16
284ab45a17 Upload Plex posters to Imgur for notifications 2016-02-19 23:25:33 -08:00
JonnyWong16
eab6365af9 Disable IP logging checkbox depending on server version 2016-02-19 21:02:48 -08:00
JonnyWong16
de86516a0a Disable monitor remote access checkbox if remote access is disabled
* And anonymize URLs
2016-02-18 22:48:02 -08:00
JonnyWong16
3e50e11933 Simplify log_type 2016-02-18 22:26:13 -08:00
JonnyWong16
e2ac8be451 Cleanup save settings 2016-02-18 22:24:19 -08:00
JonnyWong16
0e53252a27 Move get poster to notification handler 2016-02-18 21:09:07 -08:00
JonnyWong16
b1ecff3d10 Add TV posters to Facebook notifications 2016-02-18 18:52:07 -08:00
JonnyWong16
0fee4fee2a Fix typo from e38e98d9e7 2016-02-18 12:03:28 -08:00
drzoidberg33
66282d817c Merge pull request #551 from drzoidberg33/scanner-log-view
Add Plex Media Scanner log files to Log viewer.
2016-02-18 18:08:11 +02:00
Tim Van
932c93e573 Ensure we default to the server log. 2016-02-18 18:06:36 +02:00
Tim Van
71d30af582 Add Plex Media Scanner log files to Log viewer. 2016-02-18 18:01:42 +02:00
JonnyWong16
1c8428c3ea Add backup back to api 2016-02-18 06:53:07 -08:00
JonnyWong16
e38e98d9e7 Some code cleanup for libraries and users 2016-02-17 22:10:00 -08:00
JonnyWong16
85b3f081bf Add scheduled database backups 2016-02-17 18:41:55 -08:00
JonnyWong16
3926d97fc6 Open settings links in new tabs 2016-02-17 18:41:16 -08:00
JonnyWong16
13ac8f2ea4 Revert homepage watch statistic back to "last watched" 2016-02-17 18:33:01 -08:00
JonnyWong16
d94f991ab5 Add icon to scheduler status 2016-02-17 18:32:17 -08:00
JonnyWong16
d476d2e96a Merge pull request #541 from JonnyWong16/ssl-certificates
Create self-signed HTTPS certificates
2016-02-15 18:38:15 -08:00
JonnyWong16
635bf364ac Hide HTTPS Domains and IPs if not creating self-signed certificate 2016-02-15 18:36:01 -08:00
JonnyWong16
e1c7a37f62 Only create self-signed certificate if enabled 2016-02-15 18:36:01 -08:00
JonnyWong16
9d780701f5 Create self-signed HTTPS certificates 2016-02-15 18:36:01 -08:00
JonnyWong16
0bd40405b5 Test poster images for Facebook notifications 2016-02-14 22:36:19 -08:00
JonnyWong16
25c2f95e48 Separate out scheduler table to allow reloading 2016-02-14 21:02:14 -08:00
JonnyWong16
5d738e58eb Schedule PlexPy database backup task 2016-02-14 18:25:58 -08:00
JonnyWong16
70325f9247 Bold "bell icon" on notification agents page 2016-02-14 18:25:36 -08:00
JonnyWong16
38c9c5a6ea Add configuration and scheduler info to settings page 2016-02-14 17:51:14 -08:00
JonnyWong16
c90dd147bb Rename config_id to agent_id 2016-02-14 11:39:03 -08:00
JonnyWong16
322f106e75 Log JS errors from the WebUI 2016-02-14 11:35:14 -08:00
JonnyWong16
91a5529438 Some APIv2 cleanup 2016-02-14 11:03:32 -08:00
JonnyWong16
8f7dd2df6a Merge pull request #377 from Hellowlol/api2
Api2
2016-02-13 09:30:29 -08:00
Hellowlol
2fcd55eb60 API2 2016-02-10 22:09:41 +01:00
JonnyWong16
9359567a8a Add optional subject line to notification agents 2016-02-09 23:00:10 -08:00
JonnyWong16
42bfacfb19 Add IMDB, TVDB, TMDb, last.fm, and trakt to notification options 2016-02-09 22:20:17 -08:00
JonnyWong16
71131c699e Add total duration to libraries and users tables 2016-02-09 17:58:56 -08:00
JonnyWong16
6ebfc516a6 Add ETA to current activity 2016-02-09 17:08:59 -08:00
JonnyWong16
5c952b1d86 Fix regression where {stream_duration} not reported 2016-02-09 17:08:44 -08:00
JonnyWong16
1d9a4e0b99 Add view_offset to history grouping logic 2016-02-08 17:34:24 -08:00
JonnyWong16
ebae628d8d Fix typo in notification exclusion tag usage modal 2016-02-07 23:23:36 -08:00
JonnyWong16
9865460fe5 Move PMS_SSL to correct section in config file 2016-02-07 16:12:37 -08:00
drzoidberg33
39884b71fe Merge pull request #529 from drzoidberg33/machine-id-fix
Fix bad SSL connections.
2016-02-08 02:08:51 +02:00
Tim Van
82b7128c04 Allow secure websocket connections. 2016-02-08 01:29:57 +02:00
Tim Van
16756ddb8c Don't chose a custom URL when picking a hostname for local SSL configs. 2016-02-08 00:21:40 +02:00
JonnyWong16
877002961f Use custom library icons in library statistics 2016-02-07 12:42:24 -08:00
JonnyWong16
7e9e68ecd8 Fix video media flags for tracks 2016-02-07 12:42:07 -08:00
Tim Van
6419190272 Revert silly naming bug. 2016-02-07 22:34:38 +02:00
Tim Van
ac42563c5e Refresh PMS URL when changing is_remote option in settings. 2016-02-07 22:28:04 +02:00
Tim Van
98c1063e07 Allows us to retrieve the serverId again if we have secure connections required. 2016-02-07 22:21:46 +02:00
Tim Van
a4dfc57cbe Fix some issues with possible mismatching serverIDs causing bad ssl connections. 2016-02-07 15:00:06 +02:00
JonnyWong16
db543b8912 Add {machine_id} to notification options 2016-02-04 08:34:15 -08:00
JonnyWong16
49fb4540a2 Merge branch 'dev' 2016-02-03 20:55:10 -08:00
JonnyWong16
e2120393a2 v1.3.6 2016-02-03 20:54:28 -08:00
JonnyWong16
0b301fff3f Fix regression where duration not reported as min 2016-02-03 09:32:32 -08:00
JonnyWong16
eeb351e991 Update readme 2016-02-02 22:34:46 -08:00
JonnyWong16
1095e29b4d Fix FreeBSD and FreeNAS init scripts daemonizing 2016-02-02 21:23:48 -08:00
JonnyWong16
be058eaff7 Merge branch 'dev' 2016-02-02 21:13:34 -08:00
JonnyWong16
f409dda2ef v1.3.5 2016-02-02 21:12:53 -08:00
JonnyWong16
f409cdda8f Merge pull request #502 from JonnyWong16/startup-tasks-after-daemonizing
Run startup tasks after daemonizing
2016-02-02 21:03:52 -08:00
JonnyWong16
9cd6396c35 Add method to delete duplicate libraries 2016-02-02 20:54:34 -08:00
JonnyWong16
ee754ea533 Remove trailing slash from Facebook redirect URI 2016-02-02 20:38:16 -08:00
JonnyWong16
36de20dd75 Fix getting new pms_identifier for server only 2016-02-02 20:33:47 -08:00
JonnyWong16
a957e8eb4f Clean up time formats for server notifications 2016-02-02 20:33:08 -08:00
JonnyWong16
14a90d84ec Add {stream_time}, {remaining_time}, and {progress_time} to notification options 2016-01-31 16:15:06 -08:00
JonnyWong16
fae9bc618a Initialize PlexPy after daemonizing 2016-01-31 15:13:35 -08:00
JonnyWong16
3248e6500e Clean up build_notify_text
* session is now a dict, so no need for "default values"
2016-01-31 13:34:51 -08:00
JonnyWong16
c17bf79d79 Fix server verification for unpublished servers 2016-01-31 11:32:44 -08:00
JonnyWong16
1ff1270bfa Clean up powershell for scripts 2016-01-30 16:18:45 -08:00
JonnyWong16
b1a2cf33d8 Merge pull request #498 from Hellowlol/ps1
add support for powershell
2016-01-30 15:54:34 -08:00
Hellowlol
b2292e98c1 add support for powershell 2016-01-31 00:18:58 +01:00
JonnyWong16
4d156a8911 Allow expanding of media info table when missing added at date 2016-01-30 00:48:51 -08:00
JonnyWong16
7193b6518b Fix removing unique constraints from database 2016-01-30 00:40:06 -08:00
JonnyWong16
cff6b44109 Merge branch 'dev' 2016-01-29 21:32:37 -08:00
JonnyWong16
fb7ad9438e v1.3.4 2016-01-29 21:31:25 -08:00
JonnyWong16
afc265a188 Fix schedulers not starting with library update 2016-01-29 21:26:27 -08:00
JonnyWong16
01fe7bf612 Reorganize notification options 2016-01-29 19:06:08 -08:00
JonnyWong16
1cb75bd053 Remove unnecessary quoting of script arguments 2016-01-29 18:47:12 -08:00
JonnyWong16
0eaea4d011 Fix empty libraries not added 2016-01-29 18:38:19 -08:00
JonnyWong16
67377a2561 Fix server verification in settings 2016-01-27 23:32:21 -08:00
JonnyWong16
a8aae9f1f5 Fix libraries and users refresh 2016-01-27 23:32:01 -08:00
JonnyWong16
a9ce92decb Change Telegram wording 2016-01-27 21:30:35 -08:00
JonnyWong16
c19162295a Update Facebook instructions 2016-01-27 21:20:04 -08:00
JonnyWong16
58796c45ed Remove built in Twitter consumer key and secret 2016-01-27 21:19:18 -08:00
JonnyWong16
d94b348780 Fix buffer notifications even when disabled with websockets 2016-01-27 19:52:30 -08:00
JonnyWong16
95f92bd292 Add unique identifiers to notification options 2016-01-27 19:51:58 -08:00
JonnyWong16
bc52ac3559 Remove media type toggles from recently added notifications 2016-01-27 19:51:36 -08:00
JonnyWong16
8bbc6a6611 Fix libraries without section_id in database 2016-01-27 19:51:10 -08:00
JonnyWong16
8902b93a26 Merge branch 'dev' 2016-01-26 00:14:38 -08:00
JonnyWong16
ae36af807d v1.3.3 2016-01-26 00:13:58 -08:00
JonnyWong16
fd256625c6 Fix Plays by Month graph not loading 2016-01-25 18:43:51 -08:00
JonnyWong16
bee543a25a Disable datatables caching 2016-01-25 18:30:30 -08:00
JonnyWong16
55eb79cb52 Even faster library updating 2016-01-25 12:01:59 -08:00
JonnyWong16
35965a8320 Merge branch 'dev' 2016-01-24 22:52:47 -08:00
JonnyWong16
8a902ae3e6 v1.3.2 2016-01-24 22:51:36 -08:00
JonnyWong16
52bed5bf98 Attempt at improved library updating 2016-01-24 22:19:48 -08:00
JonnyWong16
9e83f6d779 Another fix for 'datestamp' and 'timestamp' 2016-01-24 09:52:20 -08:00
elseym
0873beaed2 enable pushover html support by default, introduce option to deactivate 2016-01-24 18:06:36 +01:00
JonnyWong16
0ba5012464 Merge branch 'dev' 2016-01-23 22:59:46 -08:00
JonnyWong16
73ff28465d v1.3.1 2016-01-23 22:58:44 -08:00
JonnyWong16
7484d65dbb Fix datestamp and timestamp notification options 2016-01-23 22:53:13 -08:00
JonnyWong16
4a120e7a54 Fix unable to startup if library refresh fails 2016-01-23 21:06:41 -08:00
JonnyWong16
8d63d85821 Fix star rating overlapping text 2016-01-23 19:14:54 -08:00
JonnyWong16
5cec84a802 More descriptive libraries updating message 2016-01-23 19:13:16 -08:00
JonnyWong16
48da41690d Fix empty brackets on tables 2016-01-23 19:12:13 -08:00
JonnyWong16
1c82241f30 Fix notifier config ajax calls for reverse proxies 2016-01-23 16:23:20 -08:00
JonnyWong16
b1ea3bcd4e Rename last watched to last played 2016-01-23 14:06:25 -08:00
73 changed files with 13684 additions and 1489 deletions

5
.gitignore vendored
View File

@@ -23,6 +23,9 @@ cache/*
*.csr
*.pem
# Mergetool
*.orgin
# OS generated files #
######################
.DS_Store?
@@ -32,7 +35,7 @@ Icon?
Thumbs.db
#Ignore files generated by PyCharm
.idea/*
*.idea/*
#Ignore files generated by vi
*.swp

View File

@@ -1,5 +1,110 @@
# Changelog
## v1.3.9 (2016-02-21)
* Fix: Recently added notification not sent to all notification agents.
* Add: Pushover HTML support. (Thanks @elseym)
## v1.3.8 (2016-02-21)
* Fix: Regression unable to clear HTTP password.
* Fix: Remove media tags from script arguments for server notifications.
* Fix: Encode poster titles to UTF-8 for Imgur upload.
* Fix: Allow notifications to send without poster if Imgur upload fails.
* Add: Notification Logs table in the Logs tab.
* Add: Toggle in settings to enable posters in notifications. (Disabled by default.)
* Change: Save Imgur poster URL to database so upload is not needed every time.
* Change: Notify log in database to log each event as a separate entry.
* Change: Monitor remote access is unchecked if remote access is disabled on server.
## v1.3.7 (2016-02-20)
* Fix: Verifying server with SSL enabled.
* Fix: Regression where {stream_duration} reported as 0.
* Fix: Video metadata flags showing up for track info.
* Fix: Custom library icons not applied to Library Statistics.
* Fix: Typos in the Web UI.
* Add: ETA to Current Activity overlay.
* Add: Total duration to Libraries and Users tables.
* Add: {machine_id} to notification options.
* Add: IMDB, TVDB, TMDb, Last.fm, and Trackt IDs/URLs to notification options.
* Add: {poster_url} to notification options using Imgur.
* Add: Poster and link for Facebook notifications.
* Add: Log javascript errors from the Web UI.
* Add: Configuration and Scheduler info to the settings page.
* Add: Schedule background task to backup the PlexPy database.
* Add: URL anonymizer for external links.
* Add: Plex Media Scanner log file to Log viewer.
* Add: API v2 (sill very experimental). (Thanks @Hellowlol)
* Change: Allow secure websocket connections.
* Change: History grouping now accounts for the view offset.
* Change: Subject line can be toggled off for Facebook, Slack, Telegram, and Twitter.
* Change: Create self-signed SSL certificates when enabling HTTPS.
* Change: Revert homepage "Last Played" to "Last Watched".
* Change: Disable monitor remote access checkbox if remote access is not enabled on the PMS.
* Change: Disable IP logging checkbox if PMS version is 0.9.14 or greater.
## v1.3.6 (2016-02-03)
* Fix: Regression where {duration} not reported in minutes.
* Fix: Proper daemonizing in FreeBSD and FreeNAS init scripts.
* Change: Update readme documentation.
## v1.3.5 (2016-02-02)
* Fix: Removing unique constraints from database.
* Fix: Unable to expand media info table when missing "Added At" date.
* Fix: Server verification for unpublished servers.
* Fix: Updating PMS identifier for server change.
* Add: {stream_time}, {remaining_time}, and {progress_time} to notification options.
* Add: Powershell script support. (Thanks @Hellowlol)
* Add: Method to delete duplicate libraries.
* Change: Daemonize before running start up tasks.
## v1.3.4 (2016-01-29)
* Fix: Activity checker not starting with library update (history not logging).
* Fix: Libraries duplicated in database.
* Fix: Buffer notifications even when disabled when using websockets.
* Fix: Libraries and Users lists not refreshing.
* Fix: Server verification in settings.
* Fix: Empty libraries not added to database.
* Add: Unique identifiers to notification options.
* Remove: Requirement of media type toggles for recently added notifications.
* Remove: Built in Twitter key and secret.
* Change: Unnecessary quoting of script arguments.
* Change: Facebook notification instructions.
## v1.3.3 (2016-01-26)
* Fix: Plays by Month graph not loading.
* Change: Disable caching for datatables.
* Change: Improved updating library data in the database again.
## v1.3.2 (2016-01-24)
* Fix: 'datestamp' and 'timestamp' for server notifications.
* Change: New method for updating library data in database.
## v1.3.1 (2016-01-23)
* Fix: Notifiers authorization popups for reverse proxies.
* Fix: Empty brackets in titles on tables.
* Fix: Star rating overlapping text.
* Fix: Unable to startup when library refresh fails.
* Fix: Unable to parse 'datestamp' and 'timestamp' format.
* Change: Rename "Last Watched" to "Last Played".
* Change: More descriptive libraries updating message.
## v1.3.0 (2016-01-23)
* Add: Brand new Libraries section.

View File

@@ -3,10 +3,43 @@
## Issues
In case you read this because you are posting an issue, please take a minute and conside the things below. The issue tracker is not a support forum. It is primarily intended to submit bugs, improvements or feature requests. However, we are glad to help you, and make sure the problem is not caused by PlexPy, but don't expect step-by-step answers.
* Use the search function. Chances are that your problem is already discussed. Do not append to (closed) issues if your problem does not fit the discussion.
* Visit the [Troubleshooting](../../wiki/TroubleShooting) wiki first.
* Use [proper formatting](https://help.github.com/articles/github-flavored-markdown/). Paste your logs in code blocks.
* Close your issue if you resolved it.
##### Many issues can simply be solved by:
- Making sure you update to the latest version.
- Turning your device off and on again.
- Analyzing your logs, you just might find the solution yourself!
- Using the **search** function to see if this issue has already been reported/solved.
- Checking the [Wiki](https://github.com/drzoidberg33/plexpy/wiki) for
[ [Installation] ](https://github.com/drzoidberg33/plexpy/wiki/Installation) and
[ [FAQs] ](https://github.com/drzoidberg33/plexpy/wiki/Frequently-Asked-Questions-(FAQ)).
- For basic questions try asking on [Gitter](https://gitter.im/drzoidberg33/plexpy) or the [Plex Forums](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program) first before opening an issue.
##### If nothing has worked:
1. Open a new issue on the GitHub [issue tracker](http://github.com/drzoidberg33/plexpy/issues).
2. Provide a clear title to easily help identify your problem.
3. Use proper [markdown syntax](https://help.github.com/articles/github-flavored-markdown) to structure your post (i.e. code/log in code blocks).
4. Make sure you provide the following information:
- [ ] Version
- [ ] Branch
- [ ] Commit hash
- [ ] Operating system
- [ ] Python version
- [ ] What you did?
- [ ] What happened?
- [ ] What you expected?
- [ ] How can we reproduce your issue?
- [ ] What are your (relevant) settings?
- [ ] Include a link to your **FULL** (not just a few lines!) log file that has the error. Please use [Gist](http://gist.github.com) or [Pastebin](http://pastebin.com/).
5. Close your issue when it's solved! If you found the solution yourself please comment so that others benefit from it.
## Feature Requests
1. Search for similar existing 'issues', feature requests can be recognized by the blue `enhancement` label.
2. If a similar request exists, post a comment (+1, or add a new idea to the existing request).
3. If no similar requests exist, you can create a new one.
4. Provide a clear title to easily identify the feature request.
5. Tag your feature request with `[Feature Request]` so it can be identified easily.
## Pull Requests
If you think you can contribute code to the PlexPy repository, do not hesitate to submit a pull request.

View File

@@ -153,12 +153,12 @@ def main():
# Put the database in the DATA_DIR
plexpy.DB_FILE = os.path.join(plexpy.DATA_DIR, 'plexpy.db')
# Read config and start logging
plexpy.initialize(config_file)
if plexpy.DAEMON:
plexpy.daemonize()
# Read config and start logging
plexpy.initialize(config_file)
# Force the http port if neccessary
if args.port:
http_port = args.port

168
README.md
View File

@@ -1,121 +1,77 @@
#PlexPy
# PlexPy
[![Join the chat at https://gitter.im/drzoidberg33/plexpy](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/drzoidberg33/plexpy?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
A python based web application for monitoring, analytics and notifications for Plex Media Server (www.plex.tv).
This project is based on code from Headphones (https://github.com/rembo10/headphones) and PlexWatchWeb (https://github.com/ecleese/plexWatchWeb).
This project is based on code from [Headphones](https://github.com/rembo10/headphones) and [PlexWatchWeb](https://github.com/ecleese/plexWatchWeb).
* PlexPy forum thread: https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program
* PlexPy [forum thread](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program)
## Features
* Responsive web design viewable on desktop, tablet and mobile web browsers.
* Themed to complement Plex/Web.
* Easy configuration setup (no separate web server required).
* Monitor current Plex Media Server activity.
* Fully customizable notifications for stream activity and recently added media.
* Top statistics on home page with configurable duration and measurement metric.
* Global watching history with search/filtering & dynamic column sorting.
* Full user list with general information and comparison stats.
* Individual user information including devices IP addresses.
* Complete library statistics and media file information.
* Rich analytics presented using Highcharts graphing.
* Beautiful content information pages.
* Full sync list data on all users syncing items from your library.
* And many more!!
###Support
-----------
* PlexPy Wiki: https://github.com/drzoidberg33/plexpy/wiki
## Installation and Support
###Features
-----------
* Responsive web design viewable on desktop, tablet and mobile web browsers
* Themed to complement Plex/Web
* Easy configuration setup via html form
* Current Plex Media Server viewing activity including:
* number of current users
* title
* progress
* platform
* user
* state (playing, paused, buffering, etc)
* stream type (direct, transcoded)
* video type & resolution
* audio type & channel count.
* Top statistics on home page with configurable duration and measurement metric:
* Most watched TV
* Most popular TV
* Most watched Movie
* Most popular Movie
* Most active user
* Most active platform
* Recently added media and how long ago it was added
* Global watching history with search/filtering & dynamic column sorting
* date
* user
* platform
* ip address
* title
* stream information details
* start time
* paused duration length
* stop time
* duration length
* watched progress
* show/hide columns
* delete mode - allows deletion of specific history items
* Full user list with general information and comparison stats
* Individual user information
* username and gravatar (if available)
* daily, weekly, monthly, all time stats for play count and duration length
* individual platform stats for each user
* public ip address history with last seen date and geo tag location
* recently watched content
* watching history
* synced items
* assign users custom friendly names within PlexPy
* assign users custom avatar URL within PlexPy
* disable history logging per user
* disable notifications per user
* option to purge all history per user.
* Rich analytics presented using Highcharts graphing
* user-selectable time periods of 30, 90 or 365 days
* daily watch count and duration
* totals by day of week and hours of the day
* totals by top 10 platform
* totals by top 10 users
* detailed breakdown by transcode decision
* source and stream resolutions
* transcode decision counts by user and platform
* total monthly counts
* Content information pages
* movies (includes watching history)
* tv shows (includes watching history)
* tv seasons
* tv episodes (includes watching history)
* Full sync list data on all users syncing items from your library
## Installation and Notes
* [Installation page](../../wiki/Installation) shows you how to install PlexPy.
* [Usage guide](../../wiki/Usage-guide) introduces you to PlexPy.
* [Troubleshooting page](../../wiki/TroubleShooting) in the wiki can help you with common problems.
**Issues** can be reported on the GitHub issue tracker considering these rules:
1. Analyze your log, you just might find the solution yourself!
2. You read the wiki and searched existing issues, but this is not solving your problem.
3. Post the issue with a clear title, description and the HP log and use [proper markdown syntax](https://help.github.com/articles/github-flavored-markdown) to structure your text (code/log in code blocks).
4. Close your issue when it's solved! If you found the solution yourself please comment so that others benefit from it.
**Feature requests** can be reported on the GitHub issue tracker too:
1. Search for similar existing 'issues', feature requests can be recognized by the label 'Request'.
2. If a similar Request exists, post a comment (+1, or add a new idea to the existing request), otherwise you can create a new one.
If you **comply with these rules** you can [post your request/issue](http://github.com/drzoidberg33/plexpy/issues).
* [Installation Guides](https://github.com/drzoidberg33/plexpy/wiki/Installation) shows you how to install PlexPy.
* [FAQs](https://github.com/drzoidberg33/plexpy/wiki/Frequently-Asked-Questions-(FAQ)) in the wiki can help you with common problems.
**Support** the project by implementing new features, solving support tickets and provide bug fixes.
## Issues
##### Many issues can simply be solved by:
- Making sure you update to the latest version.
- Turning your device off and on again.
- Analyzing your logs, you just might find the solution yourself!
- Using the **search** function to see if this issue has already been reported/solved.
- Checking the [Wiki](https://github.com/drzoidberg33/plexpy/wiki) for
[ [Installation] ](https://github.com/drzoidberg33/plexpy/wiki/Installation) and
[ [FAQs] ](https://github.com/drzoidberg33/plexpy/wiki/Frequently-Asked-Questions-(FAQ)).
- For basic questions try asking on [Gitter](https://gitter.im/drzoidberg33/plexpy) or the [Plex Forums](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program) first before opening an issue.
##### If nothing has worked:
1. Open a new issue on the GitHub [issue tracker](http://github.com/drzoidberg33/plexpy/issues).
2. Provide a clear title to easily help identify your problem.
3. Use proper [markdown syntax](https://help.github.com/articles/github-flavored-markdown) to structure your post (i.e. code/log in code blocks).
4. Make sure you provide the following information:
- [ ] Version
- [ ] Branch
- [ ] Commit hash
- [ ] Operating system
- [ ] Python version
- [ ] What you did?
- [ ] What happened?
- [ ] What you expected?
- [ ] How can we reproduce your issue?
- [ ] What are your (relevant) settings?
- [ ] Include a link to your **FULL** (not just a few lines!) log file that has the error. Please use [Gist](http://gist.github.com) or [Pastebin](http://pastebin.com/).
5. Close your issue when it's solved! If you found the solution yourself please comment so that others benefit from it.
## Feature Requests
1. Search for similar existing 'issues', feature requests can be recognized by the blue `enhancement` label.
2. If a similar request exists, post a comment (+1, or add a new idea to the existing request).
3. If no similar requests exist, you can create a new one.
4. Provide a clear title to easily identify the feature request.
5. Tag your feature request with `[Feature Request]` so it can be identified easily.
## License
This is free software under the GPL v3 open source license. Feel free to do with it what you wish, but any modification must be open sourced. A copy of the license is included.
This software includes Highsoft software libraries which you may freely distribute for non-commercial use. Commerical users must licence this software, for more information visit https://shop.highsoft.com/faq/non-commercial#non-commercial-redistribution.
This software includes Highsoft software libraries which you may freely distribute for non-commercial use. Commerical users must licence this software, for more information visit https://shop.highsoft.com/faq/non-commercial#non-commercial-redistribution.

View File

@@ -501,7 +501,8 @@ textarea.form-control:focus {
.libraries-poster-face {
overflow: hidden;
float: left;
background-size: contain;
background-size: cover;
background-position: center;
height: 40px;
width: 40px;
/*-webkit-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
@@ -1326,8 +1327,7 @@ a:hover .summary-poster-face-track .summary-poster-face-overlay span {
white-space: nowrap;
margin-top: 3px;
height: 21px;
position: absolute;
right: 0;
float: right;
}
.star-rating .star-icon {
width: auto;
@@ -1718,7 +1718,8 @@ a:hover .item-children-poster {
float: left;
margin-top: 15px;
margin-right: 15px;
background-size: contain;
background-size: cover;
background-position: center;
height: 80px;
width: 80px;
/*-webkit-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
@@ -2179,6 +2180,10 @@ a .home-platforms-instance-list-oval:hover,
.refresh-libraries-button {
float: right;
}
.refresh-users-button,
.refresh-libraries-button {
margin-right: 5px;
}
.nav-settings,
.nav-settings ul {
margin: 0px 0px 20px 0px;
@@ -2713,4 +2718,44 @@ table[id^='media_info_child'] table[id^='media_info_child'] thead th {
}
.selectize-input input[type='text'] {
height: 20px;
}
.small-muted {
font-size: small;
color: #777;
}
.config-info-table,
.config-scheduler-table {
width: 100%
}
.config-info-table td,
.config-info-table th,
.config-scheduler-table td,
.config-scheduler-table th {
padding-bottom: 5px;
}
.config-info-table td:first-child {
width: 150px;
}
.config-scheduler-table td:first-child {
width: 225px;
}
.config-scheduler-table th {
color: #fff;
}
a.no-highlight {
color: #777;
}
a.no-highlight:hover {
color: #fff;
}
.top-line {
border-top: 1px dotted #777;
padding-top: 5px;
}
.help-bold {
font-weight: bold;
color: #fff;
}
.save-button {
margin-top: 15px;
}

View File

@@ -198,6 +198,13 @@ DOCUMENTATION :: END
% else:
<span>IP: N/A</span>
% endif
<br />
ETA:
<span id="stream-eta-${a['session_key']}">
<script>
$("#stream-eta-${a['session_key']}").html(moment().add(parseInt(${a['duration']}) - parseInt(${a['view_offset']}), 'milliseconds').format(time_format));
</script>
</span>
</div>
<div class="dashboard-activity-poster-info-time">
<span class="progress_time">${a['view_offset']}</span>/<span class="progress_time">${a['duration']}</span>

View File

@@ -171,10 +171,10 @@ DOCUMENTATION :: END
% endif
% if data['media_type'] == 'movie' or data['media_type'] == 'episode' or data['media_type'] == 'track':
<div class="summary-content-media-info-wrapper">
% if data['video_codec']:
% if data['media_type'] != 'track' and data['video_codec']:
<img class="summary-content-media-flag" title="${data['video_codec']}" src="interfaces/default/images/media_flags/video_codec/${data['video_codec'] | vf}.png" />
% endif
% if data['video_resolution']:
% if data['media_type'] != 'track' and data['video_resolution']:
<img class="summary-content-media-flag" title="${data['video_resolution']}" src="interfaces/default/images/media_flags/video_resolution/${data['video_resolution']}.png" />
% endif
% if data['audio_codec']:

View File

@@ -54,7 +54,7 @@ function showMsg(msg,loader,timeout,ms,error) {
}
}
function doAjaxCall(url,elem,reload,form) {
function doAjaxCall(url, elem, reload, form, callback) {
// Set Message
feedback = $("#ajaxMsg");
update = $("#updatebar");
@@ -157,6 +157,9 @@ function doAjaxCall(url,elem,reload,form) {
complete: function(jqXHR, textStatus) {
// Remove loaders and stuff, ajax request is complete!
loader.remove();
if (typeof callback === "function") {
callback();
}
}
});
}
@@ -252,13 +255,13 @@ function isPrivateIP(ip_address) {
function humanTime(seconds) {
if (seconds >= 86400) {
text = '<h3>' + Math.floor(moment.duration(seconds, 'seconds').asDays()) +
'</h3><p> days </p><h3>' + Math.floor(moment.duration((seconds % 86400), 'seconds').asHours()) +
'</h3><p> hrs</p><h3>' + Math.floor(moment.duration(((seconds % 86400) % 3600), 'seconds').asMinutes()) + '</h3><p> mins</p>';
text = '<h3>' + Math.floor(moment.duration(seconds, 'seconds').asDays()) + '</h3><p> days</p>' +
'<h3>' + Math.floor(moment.duration((seconds % 86400), 'seconds').asHours()) + '</h3><p> hrs</p>' +
'<h3>' + Math.floor(moment.duration(((seconds % 86400) % 3600), 'seconds').asMinutes()) + '</h3><p> mins</p>';
return text;
} else if (seconds >= 3600) {
text = '<h3>' + Math.floor(moment.duration((seconds % 86400), 'seconds').asHours()) +
'</h3><p>hrs</p><h3>' + Math.floor(moment.duration(((seconds % 86400) % 3600), 'seconds').asMinutes()) + '</h3><p> mins</p>';
text = '<h3>' + Math.floor(moment.duration((seconds % 86400), 'seconds').asHours()) + '</h3><p> hrs</p>' +
'<h3>' + Math.floor(moment.duration(((seconds % 86400) % 3600), 'seconds').asMinutes()) + '</h3><p> mins</p>';
return text;
} else if (seconds >= 60) {
text = '<h3>' + Math.floor(moment.duration(((seconds % 86400) % 3600), 'seconds').asMinutes()) + '</h3><p> mins</p>';
@@ -269,6 +272,25 @@ function humanTime(seconds) {
}
}
function humanTimeClean(seconds) {
if (seconds >= 86400) {
text = Math.floor(moment.duration(seconds, 'seconds').asDays()) + ' days ' +
Math.floor(moment.duration((seconds % 86400), 'seconds').asHours()) + ' hrs ' +
Math.floor(moment.duration(((seconds % 86400) % 3600), 'seconds').asMinutes()) + ' mins';
return text;
} else if (seconds >= 3600) {
text = Math.floor(moment.duration((seconds % 86400), 'seconds').asHours()) + ' hrs ' +
Math.floor(moment.duration(((seconds % 86400) % 3600), 'seconds').asMinutes()) + ' mins';
return text;
} else if (seconds >= 60) {
text = Math.floor(moment.duration(((seconds % 86400) % 3600), 'seconds').asMinutes()) + ' mins';
return text;
} else {
text = '0';
return text;
}
}
String.prototype.toProperCase = function () {
return this.replace(/\w\S*/g, function(txt){return txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase();});
};
@@ -372,3 +394,16 @@ function clearSearchButton(tableName, table) {
table.search('').draw();
});
}
// Taken from https://github.com/Hellowlol/HTPC-Manager
window.onerror = function (message, file, line) {
var e = {
'page': window.location.href,
'message': message,
'file': file,
'line': line
};
$.post("log_js_errors", e, function (data) {
});
};

View File

@@ -22,7 +22,7 @@ history_table_options = {
"emptyTable": "No data in table"
},
"pagingType": "bootstrap",
"stateSave": true,
"stateSave": false,
"processing": false,
"serverSide": true,
"pageLength": 25,
@@ -133,20 +133,23 @@ history_table_options = {
"data":"full_title",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== '') {
var parent_info = '';
var media_type = '';
var thumb_popover = '';
if (rowData['media_type'] === 'movie') {
if (rowData['year']) { parent_info = ' (' + rowData['year'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + ' (' + rowData['year'] + ')</span>'
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else if (rowData['media_type'] === 'episode') {
if (rowData['parent_media_index'] && rowData['media_index']) { parent_info = ' (S' + rowData['parent_media_index'] + '&middot; E' + rowData['media_index'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Episode"><i class="fa fa-television fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + ' \
(S' + rowData['parent_media_index'] + '&middot; E' + rowData['media_index'] + ')</span>'
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else if (rowData['media_type'] === 'track') {
if (rowData['parent_title']) { parent_info = ' (' + rowData['parent_title'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Track"><i class="fa fa-music fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80" data-width="80">' + cellData + ' (' + rowData['parent_title'] + ')</span>'
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else {
$(td).html('<a href="info?rating_key=' + rowData['rating_key'] + '">' + cellData + '</a>');

View File

@@ -97,21 +97,24 @@ history_table_modal_options = {
"data":"full_title",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== '') {
var parent_info = '';
var media_type = '';
var thumb_popover = '';
if (rowData['media_type'] === 'movie') {
if (rowData['year']) { parent_info = ' (' + rowData['year'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + ' (' + rowData['year'] + ')</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp' + thumb_popover + '</div></a></div>');
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else if (rowData['media_type'] === 'episode') {
if (rowData['parent_media_index'] && rowData['media_index']) { parent_info = ' (S' + rowData['parent_media_index'] + '&middot; E' + rowData['media_index'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Episode"><i class="fa fa-television fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + ' \
(S' + rowData['parent_media_index'] + '&middot; E' + rowData['media_index'] + ')</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + '&nbsp' + thumb_popover + '</div></a></div>');
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else if (rowData['media_type'] === 'track') {
if (rowData['parent_title']) { parent_info = ' (' + rowData['parent_title'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Track"><i class="fa fa-music fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80" data-width="80">' + cellData + ' (' + rowData['parent_title'] + ')</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp' + thumb_popover + '</div></a></div>');
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else {
$(td).html('<a href="info?rating_key=' + rowData['rating_key'] + '">' + cellData + '</a>');
}

View File

@@ -16,7 +16,7 @@ libraries_list_table_options = {
"pageLength": 10,
"order": [ 2, 'asc'],
"autoWidth": true,
"stateSave": true,
"stateSave": false,
"pagingType": "bootstrap",
"columnDefs": [
{
@@ -133,23 +133,27 @@ libraries_list_table_options = {
},
{
"targets": [8],
"data":"last_watched",
"data":"last_played",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== null && cellData !== '') {
var parent_info = '';
var media_type = '';
var thumb_popover = ''
var thumb_popover = '';
if (rowData['media_type'] === 'movie') {
if (rowData['year']) { parent_info = ' (' + rowData['year'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=120&fallback=poster" data-height="120" data-width="80">' + cellData + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp' + thumb_popover + '</div></a></div>');
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else if (rowData['media_type'] === 'episode') {
if (rowData['parent_media_index'] && rowData['media_index']) { parent_info = ' (S' + rowData['parent_media_index'] + '&middot; E' + rowData['media_index'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Episode"><i class="fa fa-television fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=120&fallback=poster" data-height="120" data-width="80">' + cellData + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + '&nbsp' + thumb_popover + '</div></a></div>');
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else if (rowData['media_type'] === 'track') {
if (rowData['parent_title']) { parent_info = ' (' + rowData['parent_title'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Track"><i class="fa fa-music fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=80&fallback=poster" data-height="80" data-width="80">' + cellData + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp' + thumb_popover + '</div></a></div>');
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else if (rowData['media_type']) {
$(td).html('<a href="info?rating_key=' + rowData['rating_key'] + '">' + cellData + '</a>');
}
@@ -157,12 +161,28 @@ libraries_list_table_options = {
$(td).html('n/a');
}
},
"width": "25%",
"width": "18%",
"className": "hidden-sm hidden-xs"
},
{
"targets": [9],
"data": "plays",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== null && cellData !== '') {
$(td).html(cellData);
}
},
"searchable": false,
"width": "7%"
},
{
"targets": [10],
"data": "duration",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== null && cellData !== '') {
$(td).html(humanTimeClean(cellData));
}
},
"searchable": false,
"width": "10%"
}

View File

@@ -5,7 +5,7 @@ var log_table_options = {
"pagingType": "bootstrap",
"order": [ 0, 'desc'],
"pageLength": 50,
"stateSave": true,
"stateSave": false,
"language": {
"search":"Search: ",
"lengthMenu":"Show _MENU_ lines per page",

View File

@@ -34,9 +34,12 @@ media_info_table_options = {
"targets": [0],
"data": "added_at",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== null && cellData !== '') {
if (rowData) {
var expand_details = '';
var date = moment(cellData, "X").format(date_format);
var date = '';
if (cellData !== null && cellData !== '') {
date = moment(cellData, "X").format(date_format);
}
if (rowData['media_type'] === 'show') {
expand_details = '<span class="expand-media-info-tooltip" data-toggle="tooltip" title="Show Seasons"><i class="fa fa-plus-circle fa-fw"></i></span>';
$(td).html('<div><a href="#"><div style="float: left;">' + expand_details + '&nbsp;' + date + '</div></a></div>');
@@ -66,11 +69,13 @@ media_info_table_options = {
"data": "title",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== null && cellData !== '') {
var parent_info = '';
var media_type = '';
var thumb_popover = '';
if (rowData['media_type'] === 'movie') {
if (rowData['year']) { parent_info = ' (' + rowData['year'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + ' (' + rowData['year'] + ')</span>'
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else if (rowData['media_type'] === 'show') {
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="TV Show"><i class="fa fa-television fa-fw"></i></span>';
@@ -199,7 +204,7 @@ media_info_table_options = {
},
{
"targets": [10],
"data": "last_watched",
"data": "last_played",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== null && cellData !== '') {
date = moment(cellData, "X").format(date_format);
@@ -427,7 +432,7 @@ function childTableFormatMedia(rowData) {
'<th align="left" id="audio_codec">Audio Codec</th>' +
'<th align="left" id="audio_channels">Audio Channels</th>' +
'<th align="left" id="file_size">File Size</th>' +
'<th align="left" id="last_watched">Last Watched</th>' +
'<th align="left" id="last_played">Last Played</th>' +
'<th align="left" id="total_plays">Total Plays</th>' +
'</tr>' +
'</thead>' +

View File

@@ -0,0 +1,89 @@
notification_log_table_options = {
"destroy": true,
"serverSide": true,
"processing": false,
"pagingType": "bootstrap",
"order": [ 0, 'desc'],
"pageLength": 50,
"stateSave": false,
"language": {
"search":"Search: ",
"lengthMenu":"Show _MENU_ lines per page",
"emptyTable": "No log information available",
"info":"Showing _START_ to _END_ of _TOTAL_ lines",
"infoEmpty":"Showing 0 to 0 of 0 lines",
"infoFiltered":"(filtered from _MAX_ total lines)"},
"columnDefs": [
{
"targets": [0],
"data": "timestamp",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== '') {
$(td).html(moment(cellData, "X").format('YYYY-MM-DD HH:mm:ss'));
}
},
"width": "10%",
"className": "no-wrap hidden-xs"
},
{
"targets": [1],
"data": "agent_name",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== '') {
$(td).html(cellData);
}
},
"width": "7%",
"className": "no-wrap hidden-sm hidden-xs"
},
{
"targets": [2],
"data": "notify_action",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== '') {
$(td).html(cellData);
}
},
"width": "5%"
},
{
"targets": [3],
"data": "subject_text",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== '') {
$(td).html(cellData);
}
},
"width": "20%"
},
{
"targets": [4],
"data": "body_text",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== '') {
$(td).html(cellData);
}
},
"width": "38%"
},
{
"targets": [5],
"data": "script_args",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== '') {
$(td).html(cellData);
}
},
"width": "20%"
}
],
"drawCallback": function (settings) {
// Jump to top of page
//$('html,body').scrollTop(0);
$('#ajaxMsg').fadeOut();
},
"preDrawCallback": function(settings) {
var msg = "<i class='fa fa-refresh fa-spin'></i>&nbspFetching rows...";
showMsg(msg, false, false, 0)
}
}

View File

@@ -5,7 +5,7 @@ var plex_log_table_options = {
"pagingType": "bootstrap",
"order": [ 0, 'desc'],
"pageLength": 50,
"stateSave": true,
"stateSave": false,
"language": {
"search":"Search: ",
"lengthMenu":"Show _MENU_ lines per page",

View File

@@ -4,7 +4,7 @@ sync_table_options = {
"pagingType": "bootstrap",
"order": [ [ 0, 'desc'], [ 1, 'asc'], [2, 'asc'] ],
"pageLength": 25,
"stateSave": true,
"stateSave": false,
"language": {
"search":"Search: ",
"lengthMenu":"Show _MENU_ lines per page",

View File

@@ -8,7 +8,7 @@ user_ip_table_options = {
"infoFiltered":"(filtered from _MAX_ total entries)",
"emptyTable": "No data in table",
},
"stateSave": true,
"stateSave": false,
"pagingType": "bootstrap",
"processing": false,
"serverSide": true,
@@ -64,12 +64,12 @@ user_ip_table_options = {
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData) {
var transcode_dec = '';
if (rowData['video_decision'] === 'transcode') {
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>&nbsp';
} else if (rowData['video_decision'] === 'copy') {
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>&nbsp';
} else if (rowData['video_decision'] === 'direct play' || rowData['video_decision'] === '') {
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>&nbsp';
if (rowData['video_decision'] === 'transcode' || rowData['audio_decision'] === 'transcode') {
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>';
} else if (rowData['video_decision'] === 'copy' || rowData['audio_decision'] === 'copy') {
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>';
} else if (rowData['video_decision'] === 'direct play' || rowData['audio_decision'] === 'direct play') {
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>';
}
$(td).html('<div><a href="#" data-target="#info-modal" data-toggle="modal"><div style="float: left;">' + transcode_dec + '&nbsp' + cellData + '</div></a></div>');
} else {
@@ -81,23 +81,27 @@ user_ip_table_options = {
},
{
"targets": [4],
"data":"last_watched",
"data":"last_played",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== '') {
var parent_info = '';
var media_type = '';
var thumb_popover = ''
var thumb_popover = '';
if (rowData['media_type'] === 'movie') {
if (rowData['year']) { parent_info = ' (' + rowData['year'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=120&fallback=poster" data-height="120" data-width="80">' + cellData + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp' + thumb_popover + '</div></a></div>');
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else if (rowData['media_type'] === 'episode') {
if (rowData['parent_media_index'] && rowData['media_index']) { parent_info = ' (S' + rowData['parent_media_index'] + '&middot; E' + rowData['media_index'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Episode"><i class="fa fa-television fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=120&fallback=poster" data-height="120" data-width="80">' + cellData + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + '&nbsp' + thumb_popover + '</div></a></div>');
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else if (rowData['media_type'] === 'track') {
if (rowData['parent_title']) { parent_info = ' (' + rowData['parent_title'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Track"><i class="fa fa-music fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=80&fallback=poster" data-height="80" data-width="80">' + cellData + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp' + thumb_popover + '</div></a></div>');
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else if (rowData['media_type']) {
$(td).html('<a href="info?rating_key=' + rowData['rating_key'] + '">' + cellData + '</a>');
} else {

View File

@@ -16,7 +16,7 @@ users_list_table_options = {
"pageLength": 10,
"order": [ 2, 'asc'],
"autoWidth": true,
"stateSave": true,
"stateSave": false,
"pagingType": "bootstrap",
"columnDefs": [
{
@@ -137,23 +137,27 @@ users_list_table_options = {
},
{
"targets": [7],
"data":"last_watched",
"data":"last_played",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== null && cellData !== '') {
var parent_info = '';
var media_type = '';
var thumb_popover = ''
var thumb_popover = '';
if (rowData['media_type'] === 'movie') {
if (rowData['year']) { parent_info = ' (' + rowData['year'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=120&fallback=poster" data-height="120" data-width="80">' + cellData + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp' + thumb_popover + '</div></a></div>');
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else if (rowData['media_type'] === 'episode') {
if (rowData['parent_media_index'] && rowData['media_index']) { parent_info = ' (S' + rowData['parent_media_index'] + '&middot; E' + rowData['media_index'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Episode"><i class="fa fa-television fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=120&fallback=poster" data-height="120" data-width="80">' + cellData + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + '&nbsp' + thumb_popover + '</div></a></div>');
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else if (rowData['media_type'] === 'track') {
if (rowData['parent_title']) { parent_info = ' (' + rowData['parent_title'] + ')'; }
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Track"><i class="fa fa-music fa-fw"></i></span>';
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=80&fallback=poster" data-height="80" data-width="80">' + cellData + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp' + thumb_popover + '</div></a></div>');
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80" data-width="80">' + cellData + parent_info + '</span>'
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + '&nbsp;' + thumb_popover + '</div></a></div>');
} else if (rowData['media_type']) {
$(td).html('<a href="info?rating_key=' + rowData['rating_key'] + '">' + cellData + '</a>');
}
@@ -161,12 +165,28 @@ users_list_table_options = {
$(td).html('n/a');
}
},
"width": "30%",
"width": "23%",
"className": "hidden-sm hidden-xs"
},
{
"targets": [8],
"data": "plays",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== null && cellData !== '') {
$(td).html(cellData);
}
},
"searchable": false,
"width": "7%"
},
{
"targets": [9],
"data": "duration",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== null && cellData !== '') {
$(td).html(humanTimeClean(cellData));
}
},
"searchable": false,
"width": "10%"
}

View File

@@ -2,6 +2,7 @@
<%def name="headIncludes()">
<link rel="stylesheet" href="interfaces/default/css/dataTables.bootstrap.css">
<link rel="stylesheet" href="interfaces/default/css/dataTables.colVis.css">
<link rel="stylesheet" href="interfaces/default/css/plexpy-dataTables.css">
</%def>
@@ -13,7 +14,7 @@
</div>
% elif config['update_section_ids'] == -1:
<div id="update_section_ids_message" style="text-align: center; margin-top: 20px;">
<i class="fa fa-refresh fa-spin"></i> PlexPy is updating library IDs in the database. This could take a few minutes depending on the size of your database.
<i class="fa fa-refresh fa-spin"></i> PlexPy is updating library IDs in the database. This could take a few minutes to hours depending on the size of your database.
<br />
You may leave this page and come back later.
</div>
@@ -23,6 +24,7 @@
<span><i class="fa fa-book"></i> All Libraries</span>
</div>
<div class="button-bar">
<div class="colvis-button-bar hidden-xs"></div>
% if config['update_section_ids'] == -1:
<button class="btn btn-dark refresh-libraries-button" id="refresh-libraries-list" disabled><i class="fa fa-refresh"></i> Refresh libraries</button>
% else:
@@ -46,8 +48,9 @@
<th align="left" id="parent_count">Total Seasons / Albums</th>
<th align="left" id="child_count">Total Episodes / Tracks</th>
<th align="left" id="last_accessed">Last Accessed</th>
<th align="left" id="last_watched">Last Watched</th>
<th align="left" id="last_played">Last Played</th>
<th align="left" id="total_plays">Total Plays</th>
<th align="left" id="total_duration">Total Duration</th>
</tr>
</thead>
<tbody>
@@ -79,6 +82,7 @@
<%def name="javascriptIncludes()">
<script src="interfaces/default/js/jquery.dataTables.min.js"></script>
<script src="interfaces/default/js/dataTables.colVis.js"></script>
<script src="interfaces/default/js/dataTables.bootstrap.min.js"></script>
<script src="interfaces/default/js/dataTables.bootstrap.pagination.js"></script>
<script src="interfaces/default/js/moment-with-locale.js"></script>
@@ -96,6 +100,8 @@
}
libraries_list_table = $('#libraries_list_table').DataTable(libraries_list_table_options);
var colvis = new $.fn.dataTable.ColVis(libraries_list_table, { buttonText: '<i class="fa fa-columns"></i> Select columns', buttonClass: 'btn btn-dark', exclude: [0, 1] });
$(colvis.button()).appendTo('div.colvis-button-bar');
clearSearchButton('libraries_list_table', libraries_list_table);
@@ -177,7 +183,7 @@
$("#refresh-libraries-list").click(function () {
if ("${config['update_section_ids']}" == "1") {
$('#update_section_ids_message').html(
'<i class="fa fa-refresh fa-spin"></i> PlexPy is updating library IDs in the database. This could take a few minutes depending on the size of your database.' +
'<i class="fa fa-refresh fa-spin"></i> PlexPy is updating library IDs in the database. This could take a few minutes to hours depending on the size of your database.' +
'<br />' +
'You may leave this page and come back later.');
$(this).prop('disabled', true);

View File

@@ -37,7 +37,9 @@ DOCUMENTATION :: END
% if data:
<div class="container-fluid">
<div class="row">
% if data['library_art']:
<div class="art-face" style="background-image:url(pms_image_proxy?img=${data['library_art']}&width=1920&height=1080)"></div>
% endif
<div class="summary-container">
<div class="summary-navbar">
<div class="col-md-12">
@@ -52,7 +54,7 @@ DOCUMENTATION :: END
<div class="col-md-12">
<div class="table-card-back">
<div class="user-info-wrapper">
% if data['library_thumb'][:4] == 'http':
% if data['library_thumb'][:4] == 'http' or data['library_thumb'][:10] == 'interfaces':
<div class="library-info-poster-face" style="background-image: url(${data['library_thumb']});"></div>
% else:
<div class="library-info-poster-face" style="background-image: url(pms_image_proxy?img=${data['library_thumb']}&width=80&height=80&fallback=cover);"></div>
@@ -240,7 +242,7 @@ DOCUMENTATION :: END
<th align="left" id="audio_codec">Audio Codec</th>
<th align="left" id="audio_channels">Audio Channels</th>
<th align="left" id="file_size">File Size</th>
<th align="left" id="last_watched">Last Watched</th>
<th align="left" id="last_played">Last Played</th>
<th align="left" id="total_plays">Total Plays</th>
</tr>
</thead>

View File

@@ -91,6 +91,6 @@ DOCUMENTATION :: END
</ul>
</div>
% else:
<div class="text-muted">Unable to retrieve data from database.
<div class="text-muted">No stats to show.
</div><br>
% endif

View File

@@ -75,13 +75,13 @@ DOCUMENTATION :: END
</div>
% endif
</div>
% if library['thumb']:
% if library['thumb'].startswith("http"):
<div class="home-platforms-instance-poster">
<div class="home-platforms-library-thumb" style="background-image: url(pms_image_proxy?img=${library['thumb']}&width=300&height=300&fallback=poster);"></div>
<div class="home-platforms-library-thumb" style="background-image: url(${library['thumb']});"></div>
</div>
% else:
<div class="home-platforms-instance-poster">
<div class="home-platforms-library-thumb" style="background-image: url(interfaces/default/images/poster.png);"></div>
<div class="home-platforms-library-thumb" style="background-image: url(pms_image_proxy?img=${library['thumb']}&width=300&height=300&fallback=cover);"></div>
</div>
% endif
</li>

View File

@@ -29,6 +29,8 @@ from plexpy import helpers
<ul class="nav nav-pills" role="tablist">
<li role="presentation" class="active"><a id="plexpy-logs-btn" href="#tabs-1" aria-controls="tabs-1" role="tab" data-toggle="tab">PlexPy Logs</a></li>
<li role="presentation"><a id="plex-logs-btn" href="#tabs-2" aria-controls="tabs-2" role="tab" data-toggle="tab">Plex Media Server Logs</a></li>
<li role="presentation"><a id="plex-scanner-logs-btn" href="#tabs-3" aria-controls="tabs-3" role="tab" data-toggle="tab">Plex Media Scanner Logs</a></li>
<li role="presentation"><a id="notification-logs-btn" href="#tabs-4" aria-controls="tabs-4" role="tab" data-toggle="tab">Notification Logs</a></li>
</ul>
<div class="tab-content">
<div role="tabpanel" class="tab-pane active" id="tabs-1">
@@ -57,6 +59,33 @@ from plexpy import helpers
</tbody>
</table>
</div>
<div role="tabpanel" class="tab-pane" id="tabs-3">
<table class="display" id="plex_scanner_log_table" width="100%">
<thead>
<tr>
<th align='left' id="plex_scanner_timestamp">Timestamp</th>
<th align='left' id="plex_scanner_level">Level</th>
<th align='left' id="plex_scanner_message">Message</th>
</tr>
</thead>
<tbody></tbody>
</table>
</div>
<div role="tabpanel" class="tab-pane" id="tabs-4">
<table class="display" id="notification_log_table" width="100%">
<thead>
<tr>
<th align='left' id="notification_timestamp">Timestamp</th>
<th align='left' id="notification_agent_name">Agent</th>
<th align='left' id="notification_action">Action</th>
<th align='left' id="notification_poster_url">Subject Text</th>
<th align='left' id="notification_poster_url">Body Text</th>
<th align='left' id="notification_poster_url">Script Args</th>
</tr>
</thead>
<tbody></tbody>
</table>
</div>
</div>
</div>
</div>
@@ -80,8 +109,10 @@ from plexpy import helpers
<script src="interfaces/default/js/jquery.dataTables.min.js"></script>
<script src="interfaces/default/js/dataTables.bootstrap.min.js"></script>
<script src="interfaces/default/js/dataTables.bootstrap.pagination.js"></script>
<script src="interfaces/default/js/moment-with-locale.js"></script>
<script src="interfaces/default/js/tables/logs.js"></script>
<script src="interfaces/default/js/tables/plex_logs.js"></script>
<script src="interfaces/default/js/tables/notification_logs.js"></script>
<script>
$(document).ready(function() {
@@ -91,31 +122,63 @@ from plexpy import helpers
function LoadPlexPyLogs() {
log_table_options.ajax = {
"url": "getLog"
url: "getLog"
}
log_table = $('#log_table').DataTable(log_table_options);
}
function LoadPlexLogs() {
plex_log_table_options.ajax = {
"url": "get_plex_log"
url: "get_plex_log?log_type=server"
}
plex_log_table = $('#plex_log_table').DataTable(plex_log_table_options);
}
$("#plexpy-logs-btn").click(function() {
function LoadPlexScannerLogs() {
plex_log_table_options.ajax = {
url: "get_plex_log?log_type=scanner"
}
plex_scanner_log_table = $('#plex_scanner_log_table').DataTable(plex_log_table_options);
}
function LoadNotificationLogs() {
notification_log_table_options.ajax = {
url: "get_notification_log",
type: 'post',
data: function (d) {
return {
json_data: JSON.stringify(d)
};
}
}
notification_log_table = $('#notification_log_table').DataTable(notification_log_table_options);
}
$("#plexpy-logs-btn").click(function () {
$("#clear-logs").show();
LoadPlexPyLogs();
clearSearchButton('log_table', log_table);
});
$("#plex-logs-btn").click(function() {
$("#plex-logs-btn").click(function () {
$("#clear-logs").hide();
LoadPlexLogs();
clearSearchButton('plex_log_table', plex_log_table);
});
$("#clear-logs").click(function() {
$("#plex-scanner-logs-btn").click(function () {
$("#clear-logs").hide();
LoadPlexScannerLogs();
clearSearchButton('plex_scanner_log_table', plex_scanner_log_table);
});
$("#notification-logs-btn").click(function () {
$("#clear-logs").hide();
LoadNotificationLogs();
clearSearchButton('notification_log_table', notification_log_table);
});
$("#clear-logs").click(function () {
var r = confirm("Are you sure you want to clear the PlexPy log?");
if (r == true) {
window.location.href = "clearLogs";

View File

@@ -132,7 +132,7 @@ from plexpy import helpers
function reloadModal() {
$.ajax({
url: 'get_notification_agent_config',
data: { config_id: '${agent["id"]}' },
data: { agent_id: '${agent["id"]}' },
cache: false,
async: true,
complete: function (xhr, status) {
@@ -143,13 +143,12 @@ from plexpy import helpers
$('#osxnotifyregister').click(function () {
var osx_notify_app = $('#osx_notify_app').val();
$.get('/osxnotifyregister', { 'app': osx_notify_app }, function (data) { showMsg('<i class="fa fa-check"></i> ' + data, false, true, 3000); });
$.get('osxnotifyregister', { 'app': osx_notify_app }, function (data) { showMsg('<i class="fa fa-check"></i> ' + data, false, true, 3000); });
})
$('#save-notification-item').click(function () {
doAjaxCall('set_notification_config', $(this), 'tabs', true);
// Reload modal to update certain fields
reloadModal();
doAjaxCall('set_notification_config', $(this), 'tabs', true, reloadModal);
return false;
});
@@ -163,12 +162,12 @@ from plexpy import helpers
});
$('#twitterStep1').click(function () {
$.get('/twitterStep1', function (data) {window.open(data); })
$.get('twitterStep1', function (data) {window.open(data); })
.done(function () { showMsg('<i class="fa fa-check"></i> Confirm Authorization. Check pop-up blocker if no response.', false, true, 3000); });
});
$('#twitterStep2').click(function () {
var twitter_key = $('#twitter_key').val();
$.get('/twitterStep2', { 'key': twitter_key }, function (data) { showMsg('<i class="fa fa-check"></i> ' + data, false, true, 3000); });
$.get('twitterStep2', { 'key': twitter_key }, function (data) { showMsg('<i class="fa fa-check"></i> ' + data, false, true, 3000); });
});
function disableFacebookRequest() {
@@ -181,8 +180,12 @@ from plexpy import helpers
});
$('#facebookStep1').click(function () {
// Remove trailing '/' from Facebook redirect URI
if ($('#facebook_redirect_uri') && $('#facebook_redirect_uri').val().endsWith('/')) {
$('#facebook_redirect_uri').val($('#facebook_redirect_uri').val().slice(0, -1));
}
doAjaxCall('set_notification_config', $(this), 'tabs', true);
$.get('/facebookStep1', function (data) { window.open(data); })
$.get('facebookStep1', function (data) { window.open(data); })
.done(function () { showMsg('<i class="fa fa-check"></i> Confirm Authorization. Check pop-up blocker if no response.', false, true, 3000); });
});
@@ -191,7 +194,7 @@ from plexpy import helpers
$.ajax({
url: 'test_notifier',
data: {
config_id: '${agent["id"]}',
agent_id: '${agent["id"]}',
subject: $('#test_subject').val(),
body: $('#test_body').val(),
script: $('#test_script').val(),
@@ -207,8 +210,8 @@ from plexpy import helpers
});
$('#pushbullet_apikey, #pushover_apitoken, #scripts_folder').on('change', function () {
doAjaxCall('set_notification_config', $(this), 'tabs', true);
reloadModal();
// Reload modal to update certain fields
doAjaxCall('set_notification_config', $(this), 'tabs', true, reloadModal);
return false;
});

View File

@@ -64,13 +64,6 @@ from plexpy import helpers
</label>
<p class="help-block">Trigger notification when a media item is added to the Plex Media Server.</p>
</div>
<div class="checkbox">
<label>
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_extdown" ${helpers.checked(data['on_extdown'])} class="toggle-switches">
Notify on Plex remote access down
</label>
<p class="help-block">Trigger notification when the Plex Media Server cannot be reached externally.</p>
</div>
<div class="checkbox">
<label>
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_intdown" ${helpers.checked(data['on_intdown'])} class="toggle-switches">
@@ -78,13 +71,6 @@ from plexpy import helpers
</label>
<p class="help-block">Trigger notification when the Plex Media Server cannot be reached internally.</p>
</div>
<div class="checkbox">
<label>
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_extup" ${helpers.checked(data['on_extup'])} class="toggle-switches">
Notify on Plex remote access back up
</label>
<p class="help-block">Trigger notification when the Plex Media Server can be reached externally after being down.</p>
</div>
<div class="checkbox">
<label>
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_intup" ${helpers.checked(data['on_intup'])} class="toggle-switches">
@@ -92,6 +78,20 @@ from plexpy import helpers
</label>
<p class="help-block">Trigger notification when the Plex Media Server can be reached internally after being down.</p>
</div>
<div class="checkbox">
<label>
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_extdown" ${helpers.checked(data['on_extdown'])} class="toggle-switches">
Notify on Plex remote access down
</label>
<p class="help-block">Trigger notification when the Plex Media Server cannot be reached externally.</p>
</div>
<div class="checkbox">
<label>
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_extup" ${helpers.checked(data['on_extup'])} class="toggle-switches">
Notify on Plex remote access back up
</label>
<p class="help-block">Trigger notification when the Plex Media Server can be reached externally after being down.</p>
</div>
</div>
</div>
</div>

View File

@@ -0,0 +1,64 @@
<%doc>
USAGE DOCUMENTATION :: PLEASE LEAVE THIS AT THE TOP OF THIS FILE
For Mako templating syntax documentation please visit: http://docs.makotemplates.org/en/latest/
Filename: scheduler_table.html
Version: 0.1
DOCUMENTATION :: END
</%doc>
<%!
import arrow
import plexpy
from plexpy import common
scheduled_jobs = [j.id for j in plexpy.SCHED.get_jobs()]
%>
<table class="config-scheduler-table small-muted">
<thead>
<tr>
<th>Scheduled Task</th>
<th>State</th>
<th>Interval</th>
<th>Next Run In</th>
<th>Next Run Time</th>
</tr>
</thead>
<tbody>
% for job in common.SCHEDULER_LIST:
% if job in scheduled_jobs:
<%
sched_job = plexpy.SCHED.get_job(job)
run_interval = arrow.get(str(sched_job.trigger.interval), ['H:mm:ss', 'HH:mm:ss'])
next_run_interval = arrow.get(sched_job.next_run_time).timestamp - arrow.now().timestamp
%>
<tr>
<td>${sched_job.id}</td>
<td><i class="fa fa-sm fa-fw fa-check"></i> Active</td>
<td>${arrow.get(run_interval).format('HH:mm:ss')}</td>
<td>${arrow.get(next_run_interval).format('HH:mm:ss')}</td>
<td>${arrow.get(sched_job.next_run_time).format('YYYY-MM-DD HH:mm:ss')}</td>
</tr>
% elif job == 'Check for active sessions' and plexpy.CONFIG.MONITORING_USE_WEBSOCKET and not plexpy.POLLING_FAILOVER:
<tr>
<td>${job}</td>
<td><i class="fa fa-sm fa-fw fa-check"></i> Using Websocket</td>
<td>N/A</td>
<td>N/A</td>
<td>N/A</td>
</tr>
% else:
<tr>
<td>${job}</td>
<td><i class="fa fa-sm fa-fw fa-times"></i> Inactive</td>
<td>N/A</td>
<td>N/A</td>
<td>N/A</td>
</tr>
% endif
% endfor
</tbody>
</table>

File diff suppressed because it is too large Load Diff

View File

@@ -147,7 +147,7 @@ from plexpy import helpers
<th align="left">IP Address</th>
<th align="left">Last Platform</th>
<th align="left">Last Player</th>
<th align="left">Last Watched</th>
<th align="left">Last Played</th>
<th align="left">Play Count</th>
</tr>
</thead>

View File

@@ -2,6 +2,7 @@
<%def name="headIncludes()">
<link rel="stylesheet" href="interfaces/default/css/dataTables.bootstrap.css">
<link rel="stylesheet" href="interfaces/default/css/dataTables.colVis.css">
<link rel="stylesheet" href="interfaces/default/css/plexpy-dataTables.css">
</%def>
@@ -12,6 +13,7 @@
<span><i class="fa fa-group"></i> All Users</span>
</div>
<div class="button-bar">
<div class="colvis-button-bar hidden-xs"></div>
<button class="btn btn-dark refresh-users-button" id="refresh-users-list"><i class="fa fa-refresh"></i> Refresh users</button>
<button class="btn btn-danger btn-edit" data-toggle="button" aria-pressed="false" autocomplete="off" id="row-edit-mode">
<i class="fa fa-pencil"></i> Edit mode
@@ -30,8 +32,9 @@
<th align="left" id="last_known_ip">Last Known IP</th>
<th align="left" id="last_platform">Last Platform</th>
<th align="left" id="last_player">Last Player</th>
<th align="left" id="last_watched">Last Watched</th>
<th align="left" id="last_played">Last Played</th>
<th align="left" id="total_plays">Total Plays</th>
<th align="left" id="total_duration">Total Duration</th>
</tr>
</thead>
<tbody>
@@ -67,6 +70,7 @@
<%def name="javascriptIncludes()">
<script src="interfaces/default/js/jquery.dataTables.min.js"></script>
<script src="interfaces/default/js/dataTables.colVis.js"></script>
<script src="interfaces/default/js/dataTables.bootstrap.min.js"></script>
<script src="interfaces/default/js/dataTables.bootstrap.pagination.js"></script>
<script src="interfaces/default/js/moment-with-locale.js"></script>
@@ -84,6 +88,8 @@
}
users_list_table = $('#users_list_table').DataTable(users_list_table_options);
var colvis = new $.fn.dataTable.ColVis(users_list_table, { buttonText: '<i class="fa fa-columns"></i> Select columns', buttonClass: 'btn btn-dark', exclude: [0, 1] });
$(colvis.button()).appendTo('div.colvis-button-bar');
clearSearchButton('users_list_table', users_list_table);

View File

@@ -83,7 +83,7 @@ from plexpy import common
<div class="col-xs-4">
<div class="checkbox">
<label>
<input type="checkbox" id="pms_ssl" name="pms_ssl" value="1"> Force SSL
<input type="checkbox" id="pms_ssl" name="pms_ssl" value="1"> Use SSL
</label>
</div>
</div>
@@ -169,6 +169,7 @@ from plexpy import common
<input type="checkbox" name="launch_browser" id="launch_browser" value="1" ${config['launch_browser']}>
<input type="checkbox" name="refresh_users_on_startup" id="refresh_users_on_startup" value="1" ${config['refresh_users_on_startup']}>
<input type="checkbox" name="refresh_libraries_on_startup" id="refresh_libraries_on_startup" value="1" ${config['refresh_libraries_on_startup']}>
<input type="checkbox" name="server_changed" id="server_changed" value="1" checked>
<input type="checkbox" name="first_run_complete" id="first_run_complete" value="1" checked>
<input type="checkbox" name="check_github" id="check_github" value="1" checked>
<input type="text" name="home_stats_cards" id="home_stats_cards" value="first_run_wizard">
@@ -243,7 +244,7 @@ from plexpy import common
},
render: {
option: function (item, escape) {
return '<div data-use_ssl="' + item.httpsRequired + '" data-local="' + item.local + '" data-ci="' + item.clientIdentifier + '" data-ip="' + item.ip + '" data-port="' + item.port + '">' + item.value + '</div>';
return '<div data-use_ssl="' + item.httpsRequired + '" data-local="' + item.local + '" data-ci="' + item.clientIdentifier + '" data-ip="' + item.ip + '" data-port="' + item.port + '" data-label="' + item.label + '">' + item.value + ' (' + item.label + ')</div>';
},
item: function (item, escape) {
// first item is rendered before initialization bug?
@@ -253,7 +254,7 @@ from plexpy import common
.filter('[value="' + item.value + '"]').data());
}
return '<div data-use_ssl="' + item.httpsRequired + '" data-local="' + item.local + '" data-ci="' + item.clientIdentifier + '" data-ip="' + item.ip + '" data-port="' + item.port + '">' + item.value + '</div>';
return '<div data-use_ssl="' + item.httpsRequired + '" data-local="' + item.local + '" data-ci="' + item.clientIdentifier + '" data-ip="' + item.ip + '" data-port="' + item.port + '" data-label="' + item.label + '">' + item.value + ' (' + item.label + ')</div>';
}
},
onChange: function (item) {
@@ -377,8 +378,8 @@ from plexpy import common
var pms_ip = $("#pms_ip").val().trim();
var pms_port = $("#pms_port").val().trim();
var pms_identifier = $("#pms_identifier").val();
var pms_ssl = $("#pms_ssl").val();
var pms_is_remote = $("#pms_is_remote").val();
var pms_ssl = $("#pms_ssl").is(':checked') ? 1 : 0;
var pms_is_remote = $("#pms_is_remote").is(':checked') ? 1 : 0;
if ((pms_ip !== '') || (pms_port !== '')) {
$("#pms-verify-status").html('<i class="fa fa-refresh fa-spin"></i> Validating server...');
$('#pms-verify-status').fadeIn('fast');
@@ -392,9 +393,10 @@ from plexpy import common
$("#pms-verify-status").html('<i class="fa fa-exclamation-circle"></i> This is not a Plex Server!');
$('#pms-verify-status').fadeIn('fast');
},
success: function (xml) {
if ($(xml).find('MediaContainer').attr('machineIdentifier')) {
$("#pms_identifier").val($(xml).find('MediaContainer').attr('machineIdentifier'));
success: function (json) {
var machine_identifier = json;
if (machine_identifier) {
$("#pms_identifier").val(machine_identifier);
$("#pms-verify-status").html('<i class="fa fa-check"></i> Server found!');
$('#pms-verify-status').fadeIn('fast');
pms_verified = true;

View File

@@ -38,7 +38,7 @@ status_cmd="${name}_status"
stop_cmd="${name}_stop"
command="/usr/sbin/daemon"
command_args="-f -p ${plexpy_pid} python ${plexpy_dir}/PlexPy.py ${plexpy_flags} --quiet --nolaunch"
command_args="python2 ${plexpy_dir}/PlexPy.py --daemon --pidfile ${plexpy_pid} --quiet --nolaunch"
# Ensure user is root when running this script.
if [ `id -u` != "0" ]; then

View File

@@ -38,7 +38,7 @@ status_cmd="${name}_status"
stop_cmd="${name}_stop"
command="/usr/sbin/daemon"
command_args="-f -p ${plexpy_pid} python2 ${plexpy_dir}/PlexPy.py ${plexpy_flags} --quiet --nolaunch"
command_args="python2 ${plexpy_dir}/PlexPy.py --daemon --pidfile ${plexpy_pid} --quiet --nolaunch"
# Ensure user is root when running this script.
if [ `id -u` != "0" ]; then

1652
lib/IPy.py Normal file

File diff suppressed because it is too large Load Diff

8
lib/arrow/__init__.py Normal file
View File

@@ -0,0 +1,8 @@
# -*- coding: utf-8 -*-
from .arrow import Arrow
from .factory import ArrowFactory
from .api import get, now, utcnow
__version__ = '0.7.0'
VERSION = __version__

55
lib/arrow/api.py Normal file
View File

@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*-
'''
Provides the default implementation of :class:`ArrowFactory <arrow.factory.ArrowFactory>`
methods for use as a module API.
'''
from __future__ import absolute_import
from arrow.factory import ArrowFactory
# internal default factory.
_factory = ArrowFactory()
def get(*args, **kwargs):
''' Implements the default :class:`ArrowFactory <arrow.factory.ArrowFactory>`
``get`` method.
'''
return _factory.get(*args, **kwargs)
def utcnow():
''' Implements the default :class:`ArrowFactory <arrow.factory.ArrowFactory>`
``utcnow`` method.
'''
return _factory.utcnow()
def now(tz=None):
''' Implements the default :class:`ArrowFactory <arrow.factory.ArrowFactory>`
``now`` method.
'''
return _factory.now(tz)
def factory(type):
''' Returns an :class:`.ArrowFactory` for the specified :class:`Arrow <arrow.arrow.Arrow>`
or derived type.
:param type: the type, :class:`Arrow <arrow.arrow.Arrow>` or derived.
'''
return ArrowFactory(type)
__all__ = ['get', 'utcnow', 'now', 'factory', 'iso']

896
lib/arrow/arrow.py Normal file
View File

@@ -0,0 +1,896 @@
# -*- coding: utf-8 -*-
'''
Provides the :class:`Arrow <arrow.arrow.Arrow>` class, an enhanced ``datetime``
replacement.
'''
from __future__ import absolute_import
from datetime import datetime, timedelta, tzinfo
from dateutil import tz as dateutil_tz
from dateutil.relativedelta import relativedelta
import calendar
import sys
from arrow import util, locales, parser, formatter
class Arrow(object):
'''An :class:`Arrow <arrow.arrow.Arrow>` object.
Implements the ``datetime`` interface, behaving as an aware ``datetime`` while implementing
additional functionality.
:param year: the calendar year.
:param month: the calendar month.
:param day: the calendar day.
:param hour: (optional) the hour. Defaults to 0.
:param minute: (optional) the minute, Defaults to 0.
:param second: (optional) the second, Defaults to 0.
:param microsecond: (optional) the microsecond. Defaults 0.
:param tzinfo: (optional) the ``tzinfo`` object. Defaults to ``None``.
If tzinfo is None, it is assumed to be UTC on creation.
Usage::
>>> import arrow
>>> arrow.Arrow(2013, 5, 5, 12, 30, 45)
<Arrow [2013-05-05T12:30:45+00:00]>
'''
resolution = datetime.resolution
_ATTRS = ['year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond']
_ATTRS_PLURAL = ['{0}s'.format(a) for a in _ATTRS]
def __init__(self, year, month, day, hour=0, minute=0, second=0, microsecond=0,
tzinfo=None):
if util.isstr(tzinfo):
tzinfo = parser.TzinfoParser.parse(tzinfo)
tzinfo = tzinfo or dateutil_tz.tzutc()
self._datetime = datetime(year, month, day, hour, minute, second,
microsecond, tzinfo)
# factories: single object, both original and from datetime.
@classmethod
def now(cls, tzinfo=None):
'''Constructs an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now".
:param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time.
'''
utc = datetime.utcnow().replace(tzinfo=dateutil_tz.tzutc())
dt = utc.astimezone(dateutil_tz.tzlocal() if tzinfo is None else tzinfo)
return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
dt.microsecond, dt.tzinfo)
@classmethod
def utcnow(cls):
''' Constructs an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now" in UTC
time.
'''
dt = datetime.utcnow()
return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
dt.microsecond, dateutil_tz.tzutc())
@classmethod
def fromtimestamp(cls, timestamp, tzinfo=None):
''' Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a timestamp.
:param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either.
:param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time.
'''
tzinfo = tzinfo or dateutil_tz.tzlocal()
timestamp = cls._get_timestamp_from_input(timestamp)
dt = datetime.fromtimestamp(timestamp, tzinfo)
return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
dt.microsecond, tzinfo)
@classmethod
def utcfromtimestamp(cls, timestamp):
'''Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a timestamp, in UTC time.
:param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either.
'''
timestamp = cls._get_timestamp_from_input(timestamp)
dt = datetime.utcfromtimestamp(timestamp)
return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
dt.microsecond, dateutil_tz.tzutc())
@classmethod
def fromdatetime(cls, dt, tzinfo=None):
''' Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a ``datetime`` and optional
``tzinfo`` object.
:param dt: the ``datetime``
:param tzinfo: (optional) a ``tzinfo`` object. Defaults to UTC.
'''
tzinfo = tzinfo or dt.tzinfo or dateutil_tz.tzutc()
return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
dt.microsecond, tzinfo)
@classmethod
def fromdate(cls, date, tzinfo=None):
''' Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a ``date`` and optional
``tzinfo`` object. Time values are set to 0.
:param date: the ``date``
:param tzinfo: (optional) a ``tzinfo`` object. Defaults to UTC.
'''
tzinfo = tzinfo or dateutil_tz.tzutc()
return cls(date.year, date.month, date.day, tzinfo=tzinfo)
@classmethod
def strptime(cls, date_str, fmt, tzinfo=None):
''' Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a date string and format,
in the style of ``datetime.strptime``.
:param date_str: the date string.
:param fmt: the format string.
:param tzinfo: (optional) an optional ``tzinfo``
'''
dt = datetime.strptime(date_str, fmt)
tzinfo = tzinfo or dt.tzinfo
return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
dt.microsecond, tzinfo)
# factories: ranges and spans
@classmethod
def range(cls, frame, start, end=None, tz=None, limit=None):
''' Returns an array of :class:`Arrow <arrow.arrow.Arrow>` objects, representing
an iteration of time between two inputs.
:param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...).
:param start: A datetime expression, the start of the range.
:param end: (optional) A datetime expression, the end of the range.
:param tz: (optional) A timezone expression. Defaults to UTC.
:param limit: (optional) A maximum number of tuples to return.
**NOTE**: the **end** or **limit** must be provided. Call with **end** alone to
return the entire range, with **limit** alone to return a maximum # of results from the
start, and with both to cap a range at a maximum # of results.
Supported frame values: year, quarter, month, week, day, hour, minute, second
Recognized datetime expressions:
- An :class:`Arrow <arrow.arrow.Arrow>` object.
- A ``datetime`` object.
Recognized timezone expressions:
- A ``tzinfo`` object.
- A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'.
- A ``str`` in ISO-8601 style, as in '+07:00'.
- A ``str``, one of the following: 'local', 'utc', 'UTC'.
Usage:
>>> start = datetime(2013, 5, 5, 12, 30)
>>> end = datetime(2013, 5, 5, 17, 15)
>>> for r in arrow.Arrow.range('hour', start, end):
... print repr(r)
...
<Arrow [2013-05-05T12:30:00+00:00]>
<Arrow [2013-05-05T13:30:00+00:00]>
<Arrow [2013-05-05T14:30:00+00:00]>
<Arrow [2013-05-05T15:30:00+00:00]>
<Arrow [2013-05-05T16:30:00+00:00]>
'''
_, frame_relative, relative_steps = cls._get_frames(frame)
tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz)
start = cls._get_datetime(start).replace(tzinfo=tzinfo)
end, limit = cls._get_iteration_params(end, limit)
end = cls._get_datetime(end).replace(tzinfo=tzinfo)
current = cls.fromdatetime(start)
results = []
while current <= end and len(results) < limit:
results.append(current)
values = [getattr(current, f) for f in cls._ATTRS]
current = cls(*values, tzinfo=tzinfo) + relativedelta(**{frame_relative: relative_steps})
return results
@classmethod
def span_range(cls, frame, start, end, tz=None, limit=None):
''' Returns an array of tuples, each :class:`Arrow <arrow.arrow.Arrow>` objects,
representing a series of timespans between two inputs.
:param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...).
:param start: A datetime expression, the start of the range.
:param end: (optional) A datetime expression, the end of the range.
:param tz: (optional) A timezone expression. Defaults to UTC.
:param limit: (optional) A maximum number of tuples to return.
**NOTE**: the **end** or **limit** must be provided. Call with **end** alone to
return the entire range, with **limit** alone to return a maximum # of results from the
start, and with both to cap a range at a maximum # of results.
Supported frame values: year, quarter, month, week, day, hour, minute, second
Recognized datetime expressions:
- An :class:`Arrow <arrow.arrow.Arrow>` object.
- A ``datetime`` object.
Recognized timezone expressions:
- A ``tzinfo`` object.
- A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'.
- A ``str`` in ISO-8601 style, as in '+07:00'.
- A ``str``, one of the following: 'local', 'utc', 'UTC'.
Usage:
>>> start = datetime(2013, 5, 5, 12, 30)
>>> end = datetime(2013, 5, 5, 17, 15)
>>> for r in arrow.Arrow.span_range('hour', start, end):
... print r
...
(<Arrow [2013-05-05T12:00:00+00:00]>, <Arrow [2013-05-05T12:59:59.999999+00:00]>)
(<Arrow [2013-05-05T13:00:00+00:00]>, <Arrow [2013-05-05T13:59:59.999999+00:00]>)
(<Arrow [2013-05-05T14:00:00+00:00]>, <Arrow [2013-05-05T14:59:59.999999+00:00]>)
(<Arrow [2013-05-05T15:00:00+00:00]>, <Arrow [2013-05-05T15:59:59.999999+00:00]>)
(<Arrow [2013-05-05T16:00:00+00:00]>, <Arrow [2013-05-05T16:59:59.999999+00:00]>)
'''
tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz)
start = cls.fromdatetime(start, tzinfo).span(frame)[0]
_range = cls.range(frame, start, end, tz, limit)
return [r.span(frame) for r in _range]
# representations
def __repr__(self):
dt = self._datetime
attrs = ', '.join([str(i) for i in [dt.year, dt.month, dt.day, dt.hour, dt.minute,
dt.second, dt.microsecond]])
return '<{0} [{1}]>'.format(self.__class__.__name__, self.__str__())
def __str__(self):
return self._datetime.isoformat()
def __format__(self, formatstr):
if len(formatstr) > 0:
return self.format(formatstr)
return str(self)
def __hash__(self):
return self._datetime.__hash__()
# attributes & properties
def __getattr__(self, name):
if name == 'week':
return self.isocalendar()[1]
if not name.startswith('_'):
value = getattr(self._datetime, name, None)
if value is not None:
return value
return object.__getattribute__(self, name)
@property
def tzinfo(self):
''' Gets the ``tzinfo`` of the :class:`Arrow <arrow.arrow.Arrow>` object. '''
return self._datetime.tzinfo
@tzinfo.setter
def tzinfo(self, tzinfo):
''' Sets the ``tzinfo`` of the :class:`Arrow <arrow.arrow.Arrow>` object. '''
self._datetime = self._datetime.replace(tzinfo=tzinfo)
@property
def datetime(self):
''' Returns a datetime representation of the :class:`Arrow <arrow.arrow.Arrow>` object. '''
return self._datetime
@property
def naive(self):
''' Returns a naive datetime representation of the :class:`Arrow <arrow.arrow.Arrow>` object. '''
return self._datetime.replace(tzinfo=None)
@property
def timestamp(self):
''' Returns a timestamp representation of the :class:`Arrow <arrow.arrow.Arrow>` object. '''
return calendar.timegm(self._datetime.utctimetuple())
@property
def float_timestamp(self):
''' Returns a floating-point representation of the :class:`Arrow <arrow.arrow.Arrow>` object. '''
return self.timestamp + float(self.microsecond) / 1000000
# mutation and duplication.
def clone(self):
''' Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, cloned from the current one.
Usage:
>>> arw = arrow.utcnow()
>>> cloned = arw.clone()
'''
return self.fromdatetime(self._datetime)
def replace(self, **kwargs):
''' Returns a new :class:`Arrow <arrow.arrow.Arrow>` object with attributes updated
according to inputs.
Use single property names to set their value absolutely:
>>> import arrow
>>> arw = arrow.utcnow()
>>> arw
<Arrow [2013-05-11T22:27:34.787885+00:00]>
>>> arw.replace(year=2014, month=6)
<Arrow [2014-06-11T22:27:34.787885+00:00]>
Use plural property names to shift their current value relatively:
>>> arw.replace(years=1, months=-1)
<Arrow [2014-04-11T22:27:34.787885+00:00]>
You can also provide a timezone expression can also be replaced:
>>> arw.replace(tzinfo=tz.tzlocal())
<Arrow [2013-05-11T22:27:34.787885-07:00]>
Recognized timezone expressions:
- A ``tzinfo`` object.
- A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'.
- A ``str`` in ISO-8601 style, as in '+07:00'.
- A ``str``, one of the following: 'local', 'utc', 'UTC'.
'''
absolute_kwargs = {}
relative_kwargs = {}
for key, value in kwargs.items():
if key in self._ATTRS:
absolute_kwargs[key] = value
elif key in self._ATTRS_PLURAL or key == 'weeks':
relative_kwargs[key] = value
elif key == 'week':
raise AttributeError('setting absolute week is not supported')
elif key !='tzinfo':
raise AttributeError()
current = self._datetime.replace(**absolute_kwargs)
current += relativedelta(**relative_kwargs)
tzinfo = kwargs.get('tzinfo')
if tzinfo is not None:
tzinfo = self._get_tzinfo(tzinfo)
current = current.replace(tzinfo=tzinfo)
return self.fromdatetime(current)
def to(self, tz):
''' Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, converted to the target
timezone.
:param tz: an expression representing a timezone.
Recognized timezone expressions:
- A ``tzinfo`` object.
- A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'.
- A ``str`` in ISO-8601 style, as in '+07:00'.
- A ``str``, one of the following: 'local', 'utc', 'UTC'.
Usage::
>>> utc = arrow.utcnow()
>>> utc
<Arrow [2013-05-09T03:49:12.311072+00:00]>
>>> utc.to('US/Pacific')
<Arrow [2013-05-08T20:49:12.311072-07:00]>
>>> utc.to(tz.tzlocal())
<Arrow [2013-05-08T20:49:12.311072-07:00]>
>>> utc.to('-07:00')
<Arrow [2013-05-08T20:49:12.311072-07:00]>
>>> utc.to('local')
<Arrow [2013-05-08T20:49:12.311072-07:00]>
>>> utc.to('local').to('utc')
<Arrow [2013-05-09T03:49:12.311072+00:00]>
'''
if not isinstance(tz, tzinfo):
tz = parser.TzinfoParser.parse(tz)
dt = self._datetime.astimezone(tz)
return self.__class__(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
dt.microsecond, dt.tzinfo)
def span(self, frame, count=1):
''' Returns two new :class:`Arrow <arrow.arrow.Arrow>` objects, representing the timespan
of the :class:`Arrow <arrow.arrow.Arrow>` object in a given timeframe.
:param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...).
:param count: (optional) the number of frames to span.
Supported frame values: year, quarter, month, week, day, hour, minute, second
Usage::
>>> arrow.utcnow()
<Arrow [2013-05-09T03:32:36.186203+00:00]>
>>> arrow.utcnow().span('hour')
(<Arrow [2013-05-09T03:00:00+00:00]>, <Arrow [2013-05-09T03:59:59.999999+00:00]>)
>>> arrow.utcnow().span('day')
(<Arrow [2013-05-09T00:00:00+00:00]>, <Arrow [2013-05-09T23:59:59.999999+00:00]>)
>>> arrow.utcnow().span('day', count=2)
(<Arrow [2013-05-09T00:00:00+00:00]>, <Arrow [2013-05-10T23:59:59.999999+00:00]>)
'''
frame_absolute, frame_relative, relative_steps = self._get_frames(frame)
if frame_absolute == 'week':
attr = 'day'
elif frame_absolute == 'quarter':
attr = 'month'
else:
attr = frame_absolute
index = self._ATTRS.index(attr)
frames = self._ATTRS[:index + 1]
values = [getattr(self, f) for f in frames]
for i in range(3 - len(values)):
values.append(1)
floor = self.__class__(*values, tzinfo=self.tzinfo)
if frame_absolute == 'week':
floor = floor + relativedelta(days=-(self.isoweekday() - 1))
elif frame_absolute == 'quarter':
floor = floor + relativedelta(months=-((self.month - 1) % 3))
ceil = floor + relativedelta(
**{frame_relative: count * relative_steps}) + relativedelta(microseconds=-1)
return floor, ceil
def floor(self, frame):
''' Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, representing the "floor"
of the timespan of the :class:`Arrow <arrow.arrow.Arrow>` object in a given timeframe.
Equivalent to the first element in the 2-tuple returned by
:func:`span <arrow.arrow.Arrow.span>`.
:param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...).
Usage::
>>> arrow.utcnow().floor('hour')
<Arrow [2013-05-09T03:00:00+00:00]>
'''
return self.span(frame)[0]
def ceil(self, frame):
''' Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, representing the "ceiling"
of the timespan of the :class:`Arrow <arrow.arrow.Arrow>` object in a given timeframe.
Equivalent to the second element in the 2-tuple returned by
:func:`span <arrow.arrow.Arrow.span>`.
:param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...).
Usage::
>>> arrow.utcnow().ceil('hour')
<Arrow [2013-05-09T03:59:59.999999+00:00]>
'''
return self.span(frame)[1]
# string output and formatting.
def format(self, fmt='YYYY-MM-DD HH:mm:ssZZ', locale='en_us'):
''' Returns a string representation of the :class:`Arrow <arrow.arrow.Arrow>` object,
formatted according to a format string.
:param fmt: the format string.
Usage::
>>> arrow.utcnow().format('YYYY-MM-DD HH:mm:ss ZZ')
'2013-05-09 03:56:47 -00:00'
>>> arrow.utcnow().format('X')
'1368071882'
>>> arrow.utcnow().format('MMMM DD, YYYY')
'May 09, 2013'
>>> arrow.utcnow().format()
'2013-05-09 03:56:47 -00:00'
'''
return formatter.DateTimeFormatter(locale).format(self._datetime, fmt)
def humanize(self, other=None, locale='en_us', only_distance=False):
''' Returns a localized, humanized representation of a relative difference in time.
:param other: (optional) an :class:`Arrow <arrow.arrow.Arrow>` or ``datetime`` object.
Defaults to now in the current :class:`Arrow <arrow.arrow.Arrow>` object's timezone.
:param locale: (optional) a ``str`` specifying a locale. Defaults to 'en_us'.
:param only_distance: (optional) returns only time difference eg: "11 seconds" without "in" or "ago" part.
Usage::
>>> earlier = arrow.utcnow().replace(hours=-2)
>>> earlier.humanize()
'2 hours ago'
>>> later = later = earlier.replace(hours=4)
>>> later.humanize(earlier)
'in 4 hours'
'''
locale = locales.get_locale(locale)
if other is None:
utc = datetime.utcnow().replace(tzinfo=dateutil_tz.tzutc())
dt = utc.astimezone(self._datetime.tzinfo)
elif isinstance(other, Arrow):
dt = other._datetime
elif isinstance(other, datetime):
if other.tzinfo is None:
dt = other.replace(tzinfo=self._datetime.tzinfo)
else:
dt = other.astimezone(self._datetime.tzinfo)
else:
raise TypeError()
delta = int(util.total_seconds(self._datetime - dt))
sign = -1 if delta < 0 else 1
diff = abs(delta)
delta = diff
if diff < 10:
return locale.describe('now', only_distance=only_distance)
if diff < 45:
return locale.describe('seconds', sign, only_distance=only_distance)
elif diff < 90:
return locale.describe('minute', sign, only_distance=only_distance)
elif diff < 2700:
minutes = sign * int(max(delta / 60, 2))
return locale.describe('minutes', minutes, only_distance=only_distance)
elif diff < 5400:
return locale.describe('hour', sign, only_distance=only_distance)
elif diff < 79200:
hours = sign * int(max(delta / 3600, 2))
return locale.describe('hours', hours, only_distance=only_distance)
elif diff < 129600:
return locale.describe('day', sign, only_distance=only_distance)
elif diff < 2160000:
days = sign * int(max(delta / 86400, 2))
return locale.describe('days', days, only_distance=only_distance)
elif diff < 3888000:
return locale.describe('month', sign, only_distance=only_distance)
elif diff < 29808000:
self_months = self._datetime.year * 12 + self._datetime.month
other_months = dt.year * 12 + dt.month
months = sign * abs(other_months - self_months)
return locale.describe('months', months, only_distance=only_distance)
elif diff < 47260800:
return locale.describe('year', sign, only_distance=only_distance)
else:
years = sign * int(max(delta / 31536000, 2))
return locale.describe('years', years, only_distance=only_distance)
# math
def __add__(self, other):
if isinstance(other, (timedelta, relativedelta)):
return self.fromdatetime(self._datetime + other, self._datetime.tzinfo)
raise TypeError()
def __radd__(self, other):
return self.__add__(other)
def __sub__(self, other):
if isinstance(other, timedelta):
return self.fromdatetime(self._datetime - other, self._datetime.tzinfo)
elif isinstance(other, datetime):
return self._datetime - other
elif isinstance(other, Arrow):
return self._datetime - other._datetime
raise TypeError()
def __rsub__(self, other):
return self.__sub__(other)
# comparisons
def _cmperror(self, other):
raise TypeError('can\'t compare \'{0}\' to \'{1}\''.format(
type(self), type(other)))
def __eq__(self, other):
if not isinstance(other, (Arrow, datetime)):
return False
other = self._get_datetime(other)
return self._datetime == self._get_datetime(other)
def __ne__(self, other):
return not self.__eq__(other)
def __gt__(self, other):
if not isinstance(other, (Arrow, datetime)):
self._cmperror(other)
return self._datetime > self._get_datetime(other)
def __ge__(self, other):
if not isinstance(other, (Arrow, datetime)):
self._cmperror(other)
return self._datetime >= self._get_datetime(other)
def __lt__(self, other):
if not isinstance(other, (Arrow, datetime)):
self._cmperror(other)
return self._datetime < self._get_datetime(other)
def __le__(self, other):
if not isinstance(other, (Arrow, datetime)):
self._cmperror(other)
return self._datetime <= self._get_datetime(other)
# datetime methods
def date(self):
''' Returns a ``date`` object with the same year, month and day. '''
return self._datetime.date()
def time(self):
''' Returns a ``time`` object with the same hour, minute, second, microsecond. '''
return self._datetime.time()
def timetz(self):
''' Returns a ``time`` object with the same hour, minute, second, microsecond and tzinfo. '''
return self._datetime.timetz()
def astimezone(self, tz):
''' Returns a ``datetime`` object, adjusted to the specified tzinfo.
:param tz: a ``tzinfo`` object.
'''
return self._datetime.astimezone(tz)
def utcoffset(self):
''' Returns a ``timedelta`` object representing the whole number of minutes difference from UTC time. '''
return self._datetime.utcoffset()
def dst(self):
''' Returns the daylight savings time adjustment. '''
return self._datetime.dst()
def timetuple(self):
''' Returns a ``time.struct_time``, in the current timezone. '''
return self._datetime.timetuple()
def utctimetuple(self):
''' Returns a ``time.struct_time``, in UTC time. '''
return self._datetime.utctimetuple()
def toordinal(self):
''' Returns the proleptic Gregorian ordinal of the date. '''
return self._datetime.toordinal()
def weekday(self):
''' Returns the day of the week as an integer (0-6). '''
return self._datetime.weekday()
def isoweekday(self):
''' Returns the ISO day of the week as an integer (1-7). '''
return self._datetime.isoweekday()
def isocalendar(self):
''' Returns a 3-tuple, (ISO year, ISO week number, ISO weekday). '''
return self._datetime.isocalendar()
def isoformat(self, sep='T'):
'''Returns an ISO 8601 formatted representation of the date and time. '''
return self._datetime.isoformat(sep)
def ctime(self):
''' Returns a ctime formatted representation of the date and time. '''
return self._datetime.ctime()
def strftime(self, format):
''' Formats in the style of ``datetime.strptime``.
:param format: the format string.
'''
return self._datetime.strftime(format)
def for_json(self):
'''Serializes for the ``for_json`` protocol of simplejson.'''
return self.isoformat()
# internal tools.
@staticmethod
def _get_tzinfo(tz_expr):
if tz_expr is None:
return dateutil_tz.tzutc()
if isinstance(tz_expr, tzinfo):
return tz_expr
else:
try:
return parser.TzinfoParser.parse(tz_expr)
except parser.ParserError:
raise ValueError('\'{0}\' not recognized as a timezone'.format(
tz_expr))
@classmethod
def _get_datetime(cls, expr):
if isinstance(expr, Arrow):
return expr.datetime
if isinstance(expr, datetime):
return expr
try:
expr = float(expr)
return cls.utcfromtimestamp(expr).datetime
except:
raise ValueError(
'\'{0}\' not recognized as a timestamp or datetime'.format(expr))
@classmethod
def _get_frames(cls, name):
if name in cls._ATTRS:
return name, '{0}s'.format(name), 1
elif name in ['week', 'weeks']:
return 'week', 'weeks', 1
elif name in ['quarter', 'quarters']:
return 'quarter', 'months', 3
raise AttributeError()
@classmethod
def _get_iteration_params(cls, end, limit):
if end is None:
if limit is None:
raise Exception('one of \'end\' or \'limit\' is required')
return cls.max, limit
else:
return end, sys.maxsize
@staticmethod
def _get_timestamp_from_input(timestamp):
try:
return float(timestamp)
except:
raise ValueError('cannot parse \'{0}\' as a timestamp'.format(timestamp))
Arrow.min = Arrow.fromdatetime(datetime.min)
Arrow.max = Arrow.fromdatetime(datetime.max)

254
lib/arrow/factory.py Normal file
View File

@@ -0,0 +1,254 @@
# -*- coding: utf-8 -*-
"""
Implements the :class:`ArrowFactory <arrow.factory.ArrowFactory>` class,
providing factory methods for common :class:`Arrow <arrow.arrow.Arrow>`
construction scenarios.
"""
from __future__ import absolute_import
from arrow.arrow import Arrow
from arrow import parser
from arrow.util import is_timestamp, isstr
from datetime import datetime, tzinfo, date
from dateutil import tz as dateutil_tz
from time import struct_time
import calendar
class ArrowFactory(object):
''' A factory for generating :class:`Arrow <arrow.arrow.Arrow>` objects.
:param type: (optional) the :class:`Arrow <arrow.arrow.Arrow>`-based class to construct from.
Defaults to :class:`Arrow <arrow.arrow.Arrow>`.
'''
def __init__(self, type=Arrow):
self.type = type
def get(self, *args, **kwargs):
''' Returns an :class:`Arrow <arrow.arrow.Arrow>` object based on flexible inputs.
Usage::
>>> import arrow
**No inputs** to get current UTC time::
>>> arrow.get()
<Arrow [2013-05-08T05:51:43.316458+00:00]>
**None** to also get current UTC time::
>>> arrow.get(None)
<Arrow [2013-05-08T05:51:43.316458+00:00]>
**One** :class:`Arrow <arrow.arrow.Arrow>` object, to get a copy.
>>> arw = arrow.utcnow()
>>> arrow.get(arw)
<Arrow [2013-10-23T15:21:54.354846+00:00]>
**One** ``str``, ``float``, or ``int``, convertible to a floating-point timestamp, to get that timestamp in UTC::
>>> arrow.get(1367992474.293378)
<Arrow [2013-05-08T05:54:34.293378+00:00]>
>>> arrow.get(1367992474)
<Arrow [2013-05-08T05:54:34+00:00]>
>>> arrow.get('1367992474.293378')
<Arrow [2013-05-08T05:54:34.293378+00:00]>
>>> arrow.get('1367992474')
<Arrow [2013-05-08T05:54:34+0struct_time0:00]>
**One** ISO-8601-formatted ``str``, to parse it::
>>> arrow.get('2013-09-29T01:26:43.830580')
<Arrow [2013-09-29T01:26:43.830580+00:00]>
**One** ``tzinfo``, to get the current time in that timezone::
>>> arrow.get(tz.tzlocal())
<Arrow [2013-05-07T22:57:28.484717-07:00]>
**One** naive ``datetime``, to get that datetime in UTC::
>>> arrow.get(datetime(2013, 5, 5))
<Arrow [2013-05-05T00:00:00+00:00]>
**One** aware ``datetime``, to get that datetime::
>>> arrow.get(datetime(2013, 5, 5, tzinfo=tz.tzlocal()))
<Arrow [2013-05-05T00:00:00-07:00]>
**One** naive ``date``, to get that date in UTC::
>>> arrow.get(date(2013, 5, 5))
<Arrow [2013-05-05T00:00:00+00:00]>
**Two** arguments, a naive or aware ``datetime``, and a timezone expression (as above)::
>>> arrow.get(datetime(2013, 5, 5), 'US/Pacific')
<Arrow [2013-05-05T00:00:00-07:00]>
**Two** arguments, a naive ``date``, and a timezone expression (as above)::
>>> arrow.get(date(2013, 5, 5), 'US/Pacific')
<Arrow [2013-05-05T00:00:00-07:00]>
**Two** arguments, both ``str``, to parse the first according to the format of the second::
>>> arrow.get('2013-05-05 12:30:45', 'YYYY-MM-DD HH:mm:ss')
<Arrow [2013-05-05T12:30:45+00:00]>
**Two** arguments, first a ``str`` to parse and second a ``list`` of formats to try::
>>> arrow.get('2013-05-05 12:30:45', ['MM/DD/YYYY', 'YYYY-MM-DD HH:mm:ss'])
<Arrow [2013-05-05T12:30:45+00:00]>
**Three or more** arguments, as for the constructor of a ``datetime``::
>>> arrow.get(2013, 5, 5, 12, 30, 45)
<Arrow [2013-05-05T12:30:45+00:00]>
**One** time.struct time::
>>> arrow.get(gmtime(0))
<Arrow [1970-01-01T00:00:00+00:00]>
'''
arg_count = len(args)
locale = kwargs.get('locale', 'en_us')
tz = kwargs.get('tzinfo', None)
# () -> now, @ utc.
if arg_count == 0:
if isinstance(tz, tzinfo):
return self.type.now(tz)
return self.type.utcnow()
if arg_count == 1:
arg = args[0]
# (None) -> now, @ utc.
if arg is None:
return self.type.utcnow()
# try (int, float, str(int), str(float)) -> utc, from timestamp.
if is_timestamp(arg):
return self.type.utcfromtimestamp(arg)
# (Arrow) -> from the object's datetime.
if isinstance(arg, Arrow):
return self.type.fromdatetime(arg.datetime)
# (datetime) -> from datetime.
if isinstance(arg, datetime):
return self.type.fromdatetime(arg)
# (date) -> from date.
if isinstance(arg, date):
return self.type.fromdate(arg)
# (tzinfo) -> now, @ tzinfo.
elif isinstance(arg, tzinfo):
return self.type.now(arg)
# (str) -> now, @ tzinfo.
elif isstr(arg):
dt = parser.DateTimeParser(locale).parse_iso(arg)
return self.type.fromdatetime(dt)
# (struct_time) -> from struct_time
elif isinstance(arg, struct_time):
return self.type.utcfromtimestamp(calendar.timegm(arg))
else:
raise TypeError('Can\'t parse single argument type of \'{0}\''.format(type(arg)))
elif arg_count == 2:
arg_1, arg_2 = args[0], args[1]
if isinstance(arg_1, datetime):
# (datetime, tzinfo) -> fromdatetime @ tzinfo/string.
if isinstance(arg_2, tzinfo) or isstr(arg_2):
return self.type.fromdatetime(arg_1, arg_2)
else:
raise TypeError('Can\'t parse two arguments of types \'datetime\', \'{0}\''.format(
type(arg_2)))
# (date, tzinfo/str) -> fromdate @ tzinfo/string.
elif isinstance(arg_1, date):
if isinstance(arg_2, tzinfo) or isstr(arg_2):
return self.type.fromdate(arg_1, tzinfo=arg_2)
else:
raise TypeError('Can\'t parse two arguments of types \'date\', \'{0}\''.format(
type(arg_2)))
# (str, format) -> parse.
elif isstr(arg_1) and (isstr(arg_2) or isinstance(arg_2, list)):
dt = parser.DateTimeParser(locale).parse(args[0], args[1])
return self.type.fromdatetime(dt, tzinfo=tz)
else:
raise TypeError('Can\'t parse two arguments of types \'{0}\', \'{1}\''.format(
type(arg_1), type(arg_2)))
# 3+ args -> datetime-like via constructor.
else:
return self.type(*args, **kwargs)
def utcnow(self):
'''Returns an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now" in UTC time.
Usage::
>>> import arrow
>>> arrow.utcnow()
<Arrow [2013-05-08T05:19:07.018993+00:00]>
'''
return self.type.utcnow()
def now(self, tz=None):
'''Returns an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now".
:param tz: (optional) An expression representing a timezone. Defaults to local time.
Recognized timezone expressions:
- A ``tzinfo`` object.
- A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'.
- A ``str`` in ISO-8601 style, as in '+07:00'.
- A ``str``, one of the following: 'local', 'utc', 'UTC'.
Usage::
>>> import arrow
>>> arrow.now()
<Arrow [2013-05-07T22:19:11.363410-07:00]>
>>> arrow.now('US/Pacific')
<Arrow [2013-05-07T22:19:15.251821-07:00]>
>>> arrow.now('+02:00')
<Arrow [2013-05-08T07:19:25.618646+02:00]>
>>> arrow.now('local')
<Arrow [2013-05-07T22:19:39.130059-07:00]>
'''
if tz is None:
tz = dateutil_tz.tzlocal()
elif not isinstance(tz, tzinfo):
tz = parser.TzinfoParser.parse(tz)
return self.type.now(tz)

105
lib/arrow/formatter.py Normal file
View File

@@ -0,0 +1,105 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import calendar
import re
from dateutil import tz as dateutil_tz
from arrow import util, locales
class DateTimeFormatter(object):
_FORMAT_RE = re.compile('(YYY?Y?|MM?M?M?|Do|DD?D?D?|d?dd?d?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?|a|A|X)')
def __init__(self, locale='en_us'):
self.locale = locales.get_locale(locale)
def format(cls, dt, fmt):
return cls._FORMAT_RE.sub(lambda m: cls._format_token(dt, m.group(0)), fmt)
def _format_token(self, dt, token):
if token == 'YYYY':
return self.locale.year_full(dt.year)
if token == 'YY':
return self.locale.year_abbreviation(dt.year)
if token == 'MMMM':
return self.locale.month_name(dt.month)
if token == 'MMM':
return self.locale.month_abbreviation(dt.month)
if token == 'MM':
return '{0:02d}'.format(dt.month)
if token == 'M':
return str(dt.month)
if token == 'DDDD':
return '{0:03d}'.format(dt.timetuple().tm_yday)
if token == 'DDD':
return str(dt.timetuple().tm_yday)
if token == 'DD':
return '{0:02d}'.format(dt.day)
if token == 'D':
return str(dt.day)
if token == 'Do':
return self.locale.ordinal_number(dt.day)
if token == 'dddd':
return self.locale.day_name(dt.isoweekday())
if token == 'ddd':
return self.locale.day_abbreviation(dt.isoweekday())
if token == 'd':
return str(dt.isoweekday())
if token == 'HH':
return '{0:02d}'.format(dt.hour)
if token == 'H':
return str(dt.hour)
if token == 'hh':
return '{0:02d}'.format(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12))
if token == 'h':
return str(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12))
if token == 'mm':
return '{0:02d}'.format(dt.minute)
if token == 'm':
return str(dt.minute)
if token == 'ss':
return '{0:02d}'.format(dt.second)
if token == 's':
return str(dt.second)
if token == 'SSSSSS':
return str('{0:06d}'.format(int(dt.microsecond)))
if token == 'SSSSS':
return str('{0:05d}'.format(int(dt.microsecond / 10)))
if token == 'SSSS':
return str('{0:04d}'.format(int(dt.microsecond / 100)))
if token == 'SSS':
return str('{0:03d}'.format(int(dt.microsecond / 1000)))
if token == 'SS':
return str('{0:02d}'.format(int(dt.microsecond / 10000)))
if token == 'S':
return str(int(dt.microsecond / 100000))
if token == 'X':
return str(calendar.timegm(dt.utctimetuple()))
if token in ['ZZ', 'Z']:
separator = ':' if token == 'ZZ' else ''
tz = dateutil_tz.tzutc() if dt.tzinfo is None else dt.tzinfo
total_minutes = int(util.total_seconds(tz.utcoffset(dt)) / 60)
sign = '+' if total_minutes > 0 else '-'
total_minutes = abs(total_minutes)
hour, minute = divmod(total_minutes, 60)
return '{0}{1:02d}{2}{3:02d}'.format(sign, hour, separator, minute)
if token in ('a', 'A'):
return self.locale.meridian(dt.hour, token)

1703
lib/arrow/locales.py Normal file

File diff suppressed because it is too large Load Diff

308
lib/arrow/parser.py Normal file
View File

@@ -0,0 +1,308 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from datetime import datetime
from dateutil import tz
import re
from arrow import locales
class ParserError(RuntimeError):
pass
class DateTimeParser(object):
_FORMAT_RE = re.compile('(YYY?Y?|MM?M?M?|Do|DD?D?D?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?Z?|a|A|X)')
_ONE_THROUGH_SIX_DIGIT_RE = re.compile('\d{1,6}')
_ONE_THROUGH_FIVE_DIGIT_RE = re.compile('\d{1,5}')
_ONE_THROUGH_FOUR_DIGIT_RE = re.compile('\d{1,4}')
_ONE_TWO_OR_THREE_DIGIT_RE = re.compile('\d{1,3}')
_ONE_OR_TWO_DIGIT_RE = re.compile('\d{1,2}')
_FOUR_DIGIT_RE = re.compile('\d{4}')
_TWO_DIGIT_RE = re.compile('\d{2}')
_TZ_RE = re.compile('[+\-]?\d{2}:?\d{2}')
_TZ_NAME_RE = re.compile('\w[\w+\-/]+')
_BASE_INPUT_RE_MAP = {
'YYYY': _FOUR_DIGIT_RE,
'YY': _TWO_DIGIT_RE,
'MM': _TWO_DIGIT_RE,
'M': _ONE_OR_TWO_DIGIT_RE,
'DD': _TWO_DIGIT_RE,
'D': _ONE_OR_TWO_DIGIT_RE,
'HH': _TWO_DIGIT_RE,
'H': _ONE_OR_TWO_DIGIT_RE,
'hh': _TWO_DIGIT_RE,
'h': _ONE_OR_TWO_DIGIT_RE,
'mm': _TWO_DIGIT_RE,
'm': _ONE_OR_TWO_DIGIT_RE,
'ss': _TWO_DIGIT_RE,
's': _ONE_OR_TWO_DIGIT_RE,
'X': re.compile('\d+'),
'ZZZ': _TZ_NAME_RE,
'ZZ': _TZ_RE,
'Z': _TZ_RE,
'SSSSSS': _ONE_THROUGH_SIX_DIGIT_RE,
'SSSSS': _ONE_THROUGH_FIVE_DIGIT_RE,
'SSSS': _ONE_THROUGH_FOUR_DIGIT_RE,
'SSS': _ONE_TWO_OR_THREE_DIGIT_RE,
'SS': _ONE_OR_TWO_DIGIT_RE,
'S': re.compile('\d'),
}
MARKERS = ['YYYY', 'MM', 'DD']
SEPARATORS = ['-', '/', '.']
def __init__(self, locale='en_us'):
self.locale = locales.get_locale(locale)
self._input_re_map = self._BASE_INPUT_RE_MAP.copy()
self._input_re_map.update({
'MMMM': self._choice_re(self.locale.month_names[1:], re.IGNORECASE),
'MMM': self._choice_re(self.locale.month_abbreviations[1:],
re.IGNORECASE),
'Do': re.compile(self.locale.ordinal_day_re),
'a': self._choice_re(
(self.locale.meridians['am'], self.locale.meridians['pm'])
),
# note: 'A' token accepts both 'am/pm' and 'AM/PM' formats to
# ensure backwards compatibility of this token
'A': self._choice_re(self.locale.meridians.values())
})
def parse_iso(self, string):
has_time = 'T' in string or ' ' in string.strip()
space_divider = ' ' in string.strip()
if has_time:
if space_divider:
date_string, time_string = string.split(' ', 1)
else:
date_string, time_string = string.split('T', 1)
time_parts = re.split('[+-]', time_string, 1)
has_tz = len(time_parts) > 1
has_seconds = time_parts[0].count(':') > 1
has_subseconds = '.' in time_parts[0]
if has_subseconds:
subseconds_token = 'S' * min(len(re.split('\D+', time_parts[0].split('.')[1], 1)[0]), 6)
formats = ['YYYY-MM-DDTHH:mm:ss.%s' % subseconds_token]
elif has_seconds:
formats = ['YYYY-MM-DDTHH:mm:ss']
else:
formats = ['YYYY-MM-DDTHH:mm']
else:
has_tz = False
# generate required formats: YYYY-MM-DD, YYYY-MM-DD, YYYY
# using various separators: -, /, .
l = len(self.MARKERS)
formats = [separator.join(self.MARKERS[:l-i])
for i in range(l)
for separator in self.SEPARATORS]
if has_time and has_tz:
formats = [f + 'Z' for f in formats]
if space_divider:
formats = [item.replace('T', ' ', 1) for item in formats]
return self._parse_multiformat(string, formats)
def parse(self, string, fmt):
if isinstance(fmt, list):
return self._parse_multiformat(string, fmt)
# fmt is a string of tokens like 'YYYY-MM-DD'
# we construct a new string by replacing each
# token by its pattern:
# 'YYYY-MM-DD' -> '(?P<YYYY>\d{4})-(?P<MM>\d{2})-(?P<DD>\d{2})'
fmt_pattern = fmt
tokens = []
offset = 0
for m in self._FORMAT_RE.finditer(fmt):
token = m.group(0)
try:
input_re = self._input_re_map[token]
except KeyError:
raise ParserError('Unrecognized token \'{0}\''.format(token))
input_pattern = '(?P<{0}>{1})'.format(token, input_re.pattern)
tokens.append(token)
# a pattern doesn't have the same length as the token
# it replaces! We keep the difference in the offset variable.
# This works because the string is scanned left-to-right and matches
# are returned in the order found by finditer.
fmt_pattern = fmt_pattern[:m.start() + offset] + input_pattern + fmt_pattern[m.end() + offset:]
offset += len(input_pattern) - (m.end() - m.start())
match = re.search(fmt_pattern, string, flags=re.IGNORECASE)
if match is None:
raise ParserError('Failed to match \'{0}\' when parsing \'{1}\''.format(fmt_pattern, string))
parts = {}
for token in tokens:
if token == 'Do':
value = match.group('value')
else:
value = match.group(token)
self._parse_token(token, value, parts)
return self._build_datetime(parts)
def _parse_token(self, token, value, parts):
if token == 'YYYY':
parts['year'] = int(value)
elif token == 'YY':
value = int(value)
parts['year'] = 1900 + value if value > 68 else 2000 + value
elif token in ['MMMM', 'MMM']:
parts['month'] = self.locale.month_number(value.lower())
elif token in ['MM', 'M']:
parts['month'] = int(value)
elif token in ['DD', 'D']:
parts['day'] = int(value)
elif token in ['Do']:
parts['day'] = int(value)
elif token.upper() in ['HH', 'H']:
parts['hour'] = int(value)
elif token in ['mm', 'm']:
parts['minute'] = int(value)
elif token in ['ss', 's']:
parts['second'] = int(value)
elif token == 'SSSSSS':
parts['microsecond'] = int(value)
elif token == 'SSSSS':
parts['microsecond'] = int(value) * 10
elif token == 'SSSS':
parts['microsecond'] = int(value) * 100
elif token == 'SSS':
parts['microsecond'] = int(value) * 1000
elif token == 'SS':
parts['microsecond'] = int(value) * 10000
elif token == 'S':
parts['microsecond'] = int(value) * 100000
elif token == 'X':
parts['timestamp'] = int(value)
elif token in ['ZZZ', 'ZZ', 'Z']:
parts['tzinfo'] = TzinfoParser.parse(value)
elif token in ['a', 'A']:
if value in (
self.locale.meridians['am'],
self.locale.meridians['AM']
):
parts['am_pm'] = 'am'
elif value in (
self.locale.meridians['pm'],
self.locale.meridians['PM']
):
parts['am_pm'] = 'pm'
@staticmethod
def _build_datetime(parts):
timestamp = parts.get('timestamp')
if timestamp:
tz_utc = tz.tzutc()
return datetime.fromtimestamp(timestamp, tz=tz_utc)
am_pm = parts.get('am_pm')
hour = parts.get('hour', 0)
if am_pm == 'pm' and hour < 12:
hour += 12
elif am_pm == 'am' and hour == 12:
hour = 0
return datetime(year=parts.get('year', 1), month=parts.get('month', 1),
day=parts.get('day', 1), hour=hour, minute=parts.get('minute', 0),
second=parts.get('second', 0), microsecond=parts.get('microsecond', 0),
tzinfo=parts.get('tzinfo'))
def _parse_multiformat(self, string, formats):
_datetime = None
for fmt in formats:
try:
_datetime = self.parse(string, fmt)
break
except:
pass
if _datetime is None:
raise ParserError('Could not match input to any of {0} on \'{1}\''.format(formats, string))
return _datetime
@staticmethod
def _map_lookup(input_map, key):
try:
return input_map[key]
except KeyError:
raise ParserError('Could not match "{0}" to {1}'.format(key, input_map))
@staticmethod
def _try_timestamp(string):
try:
return float(string)
except:
return None
@staticmethod
def _choice_re(choices, flags=0):
return re.compile('({0})'.format('|'.join(choices)), flags=flags)
class TzinfoParser(object):
_TZINFO_RE = re.compile('([+\-])?(\d\d):?(\d\d)')
@classmethod
def parse(cls, string):
tzinfo = None
if string == 'local':
tzinfo = tz.tzlocal()
elif string in ['utc', 'UTC']:
tzinfo = tz.tzutc()
else:
iso_match = cls._TZINFO_RE.match(string)
if iso_match:
sign, hours, minutes = iso_match.groups()
seconds = int(hours) * 3600 + int(minutes) * 60
if sign == '-':
seconds *= -1
tzinfo = tz.tzoffset(None, seconds)
else:
tzinfo = tz.gettz(string)
if tzinfo is None:
raise ParserError('Could not parse timezone expression "{0}"', string)
return tzinfo

45
lib/arrow/util.py Normal file
View File

@@ -0,0 +1,45 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import sys
# python 2.6 / 2.7 definitions for total_seconds function.
def _total_seconds_27(td): # pragma: no cover
return td.total_seconds()
def _total_seconds_26(td):
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 1e6) / 1e6
# get version info and assign correct total_seconds function.
version = '{0}.{1}.{2}'.format(*sys.version_info[:3])
if version < '2.7': # pragma: no cover
total_seconds = _total_seconds_26
else: # pragma: no cover
total_seconds = _total_seconds_27
def is_timestamp(value):
try:
float(value)
return True
except:
return False
# python 2.7 / 3.0+ definitions for isstr function.
try: # pragma: no cover
basestring
def isstr(s):
return isinstance(s, basestring)
except NameError: #pragma: no cover
def isstr(s):
return isinstance(s, str)
__all__ = ['total_seconds', 'is_timestamp', 'isstr']

View File

@@ -1,26 +1,21 @@
# -*- coding: latin-1 -*-
#
# Copyright (C) Martin Sj<53>gren and AB Strakt 2001, All rights reserved
# Copyright (C) Jean-Paul Calderone 2008, All rights reserved
# This file is licenced under the GNU LESSER GENERAL PUBLIC LICENSE Version 2.1 or later (aka LGPL v2.1)
# Please see LGPL2.1.txt for more information
# Copyright (C) AB Strakt
# Copyright (C) Jean-Paul Calderone
# See LICENSE for details.
"""
Certificate generation module.
"""
from OpenSSL import crypto
import time
TYPE_RSA = crypto.TYPE_RSA
TYPE_DSA = crypto.TYPE_DSA
serial = int(time.time())
def createKeyPair(type, bits):
"""
Create a public/private key pair.
Arguments: type - Key type, must be one of TYPE_RSA and TYPE_DSA
bits - Number of bits to use in the key
Returns: The public/private key pair in a PKey object
@@ -29,12 +24,11 @@ def createKeyPair(type, bits):
pkey.generate_key(type, bits)
return pkey
def createCertRequest(pkey, digest="md5", **name):
def createCertRequest(pkey, digest="sha256", **name):
"""
Create a certificate request.
Arguments: pkey - The key to associate with the request
digest - Digestion method to use for signing, default is md5
digest - Digestion method to use for signing, default is sha256
**name - The name of the subject of the request, possible
arguments are:
C - Country name
@@ -49,18 +43,17 @@ def createCertRequest(pkey, digest="md5", **name):
req = crypto.X509Req()
subj = req.get_subject()
for (key,value) in name.items():
for key, value in name.items():
setattr(subj, key, value)
req.set_pubkey(pkey)
req.sign(pkey, digest)
return req
def createCertificate(req, (issuerCert, issuerKey), serial, (notBefore, notAfter), digest="md5"):
def createCertificate(req, issuerCertKey, serial, validityPeriod, digest="sha256"):
"""
Generate a certificate given a certificate request.
Arguments: req - Certificate reqeust to use
Arguments: req - Certificate request to use
issuerCert - The certificate of the issuer
issuerKey - The private key of the issuer
serial - Serial number for the certificate
@@ -68,9 +61,11 @@ def createCertificate(req, (issuerCert, issuerKey), serial, (notBefore, notAfter
starts being valid
notAfter - Timestamp (relative to now) when the certificate
stops being valid
digest - Digest method to use for signing, default is md5
digest - Digest method to use for signing, default is sha256
Returns: The signed certificate in an X509 object
"""
issuerCert, issuerKey = issuerCertKey
notBefore, notAfter = validityPeriod
cert = crypto.X509()
cert.set_serial_number(serial)
cert.gmtime_adj_notBefore(notBefore)
@@ -80,3 +75,32 @@ def createCertificate(req, (issuerCert, issuerKey), serial, (notBefore, notAfter
cert.set_pubkey(req.get_pubkey())
cert.sign(issuerKey, digest)
return cert
def createSelfSignedCertificate((issuerName, issuerKey), serial, (notBefore, notAfter), altNames, digest="sha256"):
"""
Generate a certificate given a certificate request.
Arguments: issuerName - The name of the issuer
issuerKey - The private key of the issuer
serial - Serial number for the certificate
notBefore - Timestamp (relative to now) when the certificate
starts being valid
notAfter - Timestamp (relative to now) when the certificate
stops being valid
altNames - The alternative names
digest - Digest method to use for signing, default is sha256
Returns: The signed certificate in an X509 object
"""
cert = crypto.X509()
cert.set_version(2)
cert.set_serial_number(serial)
cert.get_subject().CN = issuerName
cert.gmtime_adj_notBefore(notBefore)
cert.gmtime_adj_notAfter(notAfter)
cert.set_issuer(cert.get_subject())
cert.set_pubkey(issuerKey)
if altNames:
cert.add_extensions([crypto.X509Extension("subjectAltName", False, altNames)])
cert.sign(issuerKey, digest)
return cert

2
lib/dateutil/__init__.py Normal file
View File

@@ -0,0 +1,2 @@
# -*- coding: utf-8 -*-
__version__ = "2.4.2"

89
lib/dateutil/easter.py Normal file
View File

@@ -0,0 +1,89 @@
# -*- coding: utf-8 -*-
"""
This module offers a generic easter computing method for any given year, using
Western, Orthodox or Julian algorithms.
"""
import datetime
__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"]
EASTER_JULIAN = 1
EASTER_ORTHODOX = 2
EASTER_WESTERN = 3
def easter(year, method=EASTER_WESTERN):
"""
This method was ported from the work done by GM Arts,
on top of the algorithm by Claus Tondering, which was
based in part on the algorithm of Ouding (1940), as
quoted in "Explanatory Supplement to the Astronomical
Almanac", P. Kenneth Seidelmann, editor.
This algorithm implements three different easter
calculation methods:
1 - Original calculation in Julian calendar, valid in
dates after 326 AD
2 - Original method, with date converted to Gregorian
calendar, valid in years 1583 to 4099
3 - Revised method, in Gregorian calendar, valid in
years 1583 to 4099 as well
These methods are represented by the constants:
EASTER_JULIAN = 1
EASTER_ORTHODOX = 2
EASTER_WESTERN = 3
The default method is method 3.
More about the algorithm may be found at:
http://users.chariot.net.au/~gmarts/eastalg.htm
and
http://www.tondering.dk/claus/calendar.html
"""
if not (1 <= method <= 3):
raise ValueError("invalid method")
# g - Golden year - 1
# c - Century
# h - (23 - Epact) mod 30
# i - Number of days from March 21 to Paschal Full Moon
# j - Weekday for PFM (0=Sunday, etc)
# p - Number of days from March 21 to Sunday on or before PFM
# (-6 to 28 methods 1 & 3, to 56 for method 2)
# e - Extra days to add for method 2 (converting Julian
# date to Gregorian date)
y = year
g = y % 19
e = 0
if method < 3:
# Old method
i = (19*g + 15) % 30
j = (y + y//4 + i) % 7
if method == 2:
# Extra dates to convert Julian to Gregorian date
e = 10
if y > 1600:
e = e + y//100 - 16 - (y//100 - 16)//4
else:
# New method
c = y//100
h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30
i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11))
j = (y + y//4 + i + 2 - c + c//4) % 7
# p can be from -6 to 56 corresponding to dates 22 March to 23 May
# (later dates apply to method 2, although 23 May never actually occurs)
p = i - j + e
d = 1 + (p + 27 + (p + 6)//40) % 31
m = 3 + (p + 26)//30
return datetime.date(int(y), int(m), int(d))

1205
lib/dateutil/parser.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,450 @@
# -*- coding: utf-8 -*-
import datetime
import calendar
from six import integer_types
__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"]
class weekday(object):
__slots__ = ["weekday", "n"]
def __init__(self, weekday, n=None):
self.weekday = weekday
self.n = n
def __call__(self, n):
if n == self.n:
return self
else:
return self.__class__(self.weekday, n)
def __eq__(self, other):
try:
if self.weekday != other.weekday or self.n != other.n:
return False
except AttributeError:
return False
return True
def __repr__(self):
s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday]
if not self.n:
return s
else:
return "%s(%+d)" % (s, self.n)
MO, TU, WE, TH, FR, SA, SU = weekdays = tuple([weekday(x) for x in range(7)])
class relativedelta(object):
"""
The relativedelta type is based on the specification of the excellent
work done by M.-A. Lemburg in his
`mx.DateTime <http://www.egenix.com/files/python/mxDateTime.html>`_ extension.
However, notice that this type does *NOT* implement the same algorithm as
his work. Do *NOT* expect it to behave like mx.DateTime's counterpart.
There are two different ways to build a relativedelta instance. The
first one is passing it two date/datetime classes::
relativedelta(datetime1, datetime2)
The second one is passing it any number of the following keyword arguments::
relativedelta(arg1=x,arg2=y,arg3=z...)
year, month, day, hour, minute, second, microsecond:
Absolute information (argument is singular); adding or subtracting a
relativedelta with absolute information does not perform an aritmetic
operation, but rather REPLACES the corresponding value in the
original datetime with the value(s) in relativedelta.
years, months, weeks, days, hours, minutes, seconds, microseconds:
Relative information, may be negative (argument is plural); adding
or subtracting a relativedelta with relative information performs
the corresponding aritmetic operation on the original datetime value
with the information in the relativedelta.
weekday:
One of the weekday instances (MO, TU, etc). These instances may
receive a parameter N, specifying the Nth weekday, which could
be positive or negative (like MO(+1) or MO(-2). Not specifying
it is the same as specifying +1. You can also use an integer,
where 0=MO.
leapdays:
Will add given days to the date found, if year is a leap
year, and the date found is post 28 of february.
yearday, nlyearday:
Set the yearday or the non-leap year day (jump leap days).
These are converted to day/month/leapdays information.
Here is the behavior of operations with relativedelta:
1. Calculate the absolute year, using the 'year' argument, or the
original datetime year, if the argument is not present.
2. Add the relative 'years' argument to the absolute year.
3. Do steps 1 and 2 for month/months.
4. Calculate the absolute day, using the 'day' argument, or the
original datetime day, if the argument is not present. Then,
subtract from the day until it fits in the year and month
found after their operations.
5. Add the relative 'days' argument to the absolute day. Notice
that the 'weeks' argument is multiplied by 7 and added to
'days'.
6. Do steps 1 and 2 for hour/hours, minute/minutes, second/seconds,
microsecond/microseconds.
7. If the 'weekday' argument is present, calculate the weekday,
with the given (wday, nth) tuple. wday is the index of the
weekday (0-6, 0=Mon), and nth is the number of weeks to add
forward or backward, depending on its signal. Notice that if
the calculated date is already Monday, for example, using
(0, 1) or (0, -1) won't change the day.
"""
def __init__(self, dt1=None, dt2=None,
years=0, months=0, days=0, leapdays=0, weeks=0,
hours=0, minutes=0, seconds=0, microseconds=0,
year=None, month=None, day=None, weekday=None,
yearday=None, nlyearday=None,
hour=None, minute=None, second=None, microsecond=None):
if dt1 and dt2:
# datetime is a subclass of date. So both must be date
if not (isinstance(dt1, datetime.date) and
isinstance(dt2, datetime.date)):
raise TypeError("relativedelta only diffs datetime/date")
# We allow two dates, or two datetimes, so we coerce them to be
# of the same type
if (isinstance(dt1, datetime.datetime) !=
isinstance(dt2, datetime.datetime)):
if not isinstance(dt1, datetime.datetime):
dt1 = datetime.datetime.fromordinal(dt1.toordinal())
elif not isinstance(dt2, datetime.datetime):
dt2 = datetime.datetime.fromordinal(dt2.toordinal())
self.years = 0
self.months = 0
self.days = 0
self.leapdays = 0
self.hours = 0
self.minutes = 0
self.seconds = 0
self.microseconds = 0
self.year = None
self.month = None
self.day = None
self.weekday = None
self.hour = None
self.minute = None
self.second = None
self.microsecond = None
self._has_time = 0
months = (dt1.year*12+dt1.month)-(dt2.year*12+dt2.month)
self._set_months(months)
dtm = self.__radd__(dt2)
if dt1 < dt2:
while dt1 > dtm:
months += 1
self._set_months(months)
dtm = self.__radd__(dt2)
else:
while dt1 < dtm:
months -= 1
self._set_months(months)
dtm = self.__radd__(dt2)
delta = dt1 - dtm
self.seconds = delta.seconds+delta.days*86400
self.microseconds = delta.microseconds
else:
self.years = years
self.months = months
self.days = days+weeks*7
self.leapdays = leapdays
self.hours = hours
self.minutes = minutes
self.seconds = seconds
self.microseconds = microseconds
self.year = year
self.month = month
self.day = day
self.hour = hour
self.minute = minute
self.second = second
self.microsecond = microsecond
if isinstance(weekday, integer_types):
self.weekday = weekdays[weekday]
else:
self.weekday = weekday
yday = 0
if nlyearday:
yday = nlyearday
elif yearday:
yday = yearday
if yearday > 59:
self.leapdays = -1
if yday:
ydayidx = [31, 59, 90, 120, 151, 181, 212,
243, 273, 304, 334, 366]
for idx, ydays in enumerate(ydayidx):
if yday <= ydays:
self.month = idx+1
if idx == 0:
self.day = yday
else:
self.day = yday-ydayidx[idx-1]
break
else:
raise ValueError("invalid year day (%d)" % yday)
self._fix()
def _fix(self):
if abs(self.microseconds) > 999999:
s = self.microseconds//abs(self.microseconds)
div, mod = divmod(self.microseconds*s, 1000000)
self.microseconds = mod*s
self.seconds += div*s
if abs(self.seconds) > 59:
s = self.seconds//abs(self.seconds)
div, mod = divmod(self.seconds*s, 60)
self.seconds = mod*s
self.minutes += div*s
if abs(self.minutes) > 59:
s = self.minutes//abs(self.minutes)
div, mod = divmod(self.minutes*s, 60)
self.minutes = mod*s
self.hours += div*s
if abs(self.hours) > 23:
s = self.hours//abs(self.hours)
div, mod = divmod(self.hours*s, 24)
self.hours = mod*s
self.days += div*s
if abs(self.months) > 11:
s = self.months//abs(self.months)
div, mod = divmod(self.months*s, 12)
self.months = mod*s
self.years += div*s
if (self.hours or self.minutes or self.seconds or self.microseconds
or self.hour is not None or self.minute is not None or
self.second is not None or self.microsecond is not None):
self._has_time = 1
else:
self._has_time = 0
def _set_months(self, months):
self.months = months
if abs(self.months) > 11:
s = self.months//abs(self.months)
div, mod = divmod(self.months*s, 12)
self.months = mod*s
self.years = div*s
else:
self.years = 0
def __add__(self, other):
if isinstance(other, relativedelta):
return relativedelta(years=other.years+self.years,
months=other.months+self.months,
days=other.days+self.days,
hours=other.hours+self.hours,
minutes=other.minutes+self.minutes,
seconds=other.seconds+self.seconds,
microseconds=(other.microseconds +
self.microseconds),
leapdays=other.leapdays or self.leapdays,
year=other.year or self.year,
month=other.month or self.month,
day=other.day or self.day,
weekday=other.weekday or self.weekday,
hour=other.hour or self.hour,
minute=other.minute or self.minute,
second=other.second or self.second,
microsecond=(other.microsecond or
self.microsecond))
if not isinstance(other, datetime.date):
raise TypeError("unsupported type for add operation")
elif self._has_time and not isinstance(other, datetime.datetime):
other = datetime.datetime.fromordinal(other.toordinal())
year = (self.year or other.year)+self.years
month = self.month or other.month
if self.months:
assert 1 <= abs(self.months) <= 12
month += self.months
if month > 12:
year += 1
month -= 12
elif month < 1:
year -= 1
month += 12
day = min(calendar.monthrange(year, month)[1],
self.day or other.day)
repl = {"year": year, "month": month, "day": day}
for attr in ["hour", "minute", "second", "microsecond"]:
value = getattr(self, attr)
if value is not None:
repl[attr] = value
days = self.days
if self.leapdays and month > 2 and calendar.isleap(year):
days += self.leapdays
ret = (other.replace(**repl)
+ datetime.timedelta(days=days,
hours=self.hours,
minutes=self.minutes,
seconds=self.seconds,
microseconds=self.microseconds))
if self.weekday:
weekday, nth = self.weekday.weekday, self.weekday.n or 1
jumpdays = (abs(nth)-1)*7
if nth > 0:
jumpdays += (7-ret.weekday()+weekday) % 7
else:
jumpdays += (ret.weekday()-weekday) % 7
jumpdays *= -1
ret += datetime.timedelta(days=jumpdays)
return ret
def __radd__(self, other):
return self.__add__(other)
def __rsub__(self, other):
return self.__neg__().__radd__(other)
def __sub__(self, other):
if not isinstance(other, relativedelta):
raise TypeError("unsupported type for sub operation")
return relativedelta(years=self.years-other.years,
months=self.months-other.months,
days=self.days-other.days,
hours=self.hours-other.hours,
minutes=self.minutes-other.minutes,
seconds=self.seconds-other.seconds,
microseconds=self.microseconds-other.microseconds,
leapdays=self.leapdays or other.leapdays,
year=self.year or other.year,
month=self.month or other.month,
day=self.day or other.day,
weekday=self.weekday or other.weekday,
hour=self.hour or other.hour,
minute=self.minute or other.minute,
second=self.second or other.second,
microsecond=self.microsecond or other.microsecond)
def __neg__(self):
return relativedelta(years=-self.years,
months=-self.months,
days=-self.days,
hours=-self.hours,
minutes=-self.minutes,
seconds=-self.seconds,
microseconds=-self.microseconds,
leapdays=self.leapdays,
year=self.year,
month=self.month,
day=self.day,
weekday=self.weekday,
hour=self.hour,
minute=self.minute,
second=self.second,
microsecond=self.microsecond)
def __bool__(self):
return not (not self.years and
not self.months and
not self.days and
not self.hours and
not self.minutes and
not self.seconds and
not self.microseconds and
not self.leapdays and
self.year is None and
self.month is None and
self.day is None and
self.weekday is None and
self.hour is None and
self.minute is None and
self.second is None and
self.microsecond is None)
# Compatibility with Python 2.x
__nonzero__ = __bool__
def __mul__(self, other):
f = float(other)
return relativedelta(years=int(self.years*f),
months=int(self.months*f),
days=int(self.days*f),
hours=int(self.hours*f),
minutes=int(self.minutes*f),
seconds=int(self.seconds*f),
microseconds=int(self.microseconds*f),
leapdays=self.leapdays,
year=self.year,
month=self.month,
day=self.day,
weekday=self.weekday,
hour=self.hour,
minute=self.minute,
second=self.second,
microsecond=self.microsecond)
__rmul__ = __mul__
def __eq__(self, other):
if not isinstance(other, relativedelta):
return False
if self.weekday or other.weekday:
if not self.weekday or not other.weekday:
return False
if self.weekday.weekday != other.weekday.weekday:
return False
n1, n2 = self.weekday.n, other.weekday.n
if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)):
return False
return (self.years == other.years and
self.months == other.months and
self.days == other.days and
self.hours == other.hours and
self.minutes == other.minutes and
self.seconds == other.seconds and
self.leapdays == other.leapdays and
self.year == other.year and
self.month == other.month and
self.day == other.day and
self.hour == other.hour and
self.minute == other.minute and
self.second == other.second and
self.microsecond == other.microsecond)
def __ne__(self, other):
return not self.__eq__(other)
def __div__(self, other):
return self.__mul__(1/float(other))
__truediv__ = __div__
def __repr__(self):
l = []
for attr in ["years", "months", "days", "leapdays",
"hours", "minutes", "seconds", "microseconds"]:
value = getattr(self, attr)
if value:
l.append("%s=%+d" % (attr, value))
for attr in ["year", "month", "day", "weekday",
"hour", "minute", "second", "microsecond"]:
value = getattr(self, attr)
if value is not None:
l.append("%s=%s" % (attr, repr(value)))
return "%s(%s)" % (self.__class__.__name__, ", ".join(l))
# vim:ts=4:sw=4:et

1375
lib/dateutil/rrule.py Normal file

File diff suppressed because it is too large Load Diff

986
lib/dateutil/tz.py Normal file
View File

@@ -0,0 +1,986 @@
# -*- coding: utf-8 -*-
"""
This module offers timezone implementations subclassing the abstract
:py:`datetime.tzinfo` type. There are classes to handle tzfile format files
(usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`, etc), TZ
environment string (in all known formats), given ranges (with help from
relative deltas), local machine timezone, fixed offset timezone, and UTC
timezone.
"""
import datetime
import struct
import time
import sys
import os
from six import string_types, PY3
try:
from dateutil.tzwin import tzwin, tzwinlocal
except ImportError:
tzwin = tzwinlocal = None
relativedelta = None
parser = None
rrule = None
__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange",
"tzstr", "tzical", "tzwin", "tzwinlocal", "gettz"]
def tzname_in_python2(myfunc):
"""Change unicode output into bytestrings in Python 2
tzname() API changed in Python 3. It used to return bytes, but was changed
to unicode strings
"""
def inner_func(*args, **kwargs):
if PY3:
return myfunc(*args, **kwargs)
else:
return myfunc(*args, **kwargs).encode()
return inner_func
ZERO = datetime.timedelta(0)
EPOCHORDINAL = datetime.datetime.utcfromtimestamp(0).toordinal()
class tzutc(datetime.tzinfo):
def utcoffset(self, dt):
return ZERO
def dst(self, dt):
return ZERO
@tzname_in_python2
def tzname(self, dt):
return "UTC"
def __eq__(self, other):
return (isinstance(other, tzutc) or
(isinstance(other, tzoffset) and other._offset == ZERO))
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "%s()" % self.__class__.__name__
__reduce__ = object.__reduce__
class tzoffset(datetime.tzinfo):
def __init__(self, name, offset):
self._name = name
self._offset = datetime.timedelta(seconds=offset)
def utcoffset(self, dt):
return self._offset
def dst(self, dt):
return ZERO
@tzname_in_python2
def tzname(self, dt):
return self._name
def __eq__(self, other):
return (isinstance(other, tzoffset) and
self._offset == other._offset)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "%s(%s, %s)" % (self.__class__.__name__,
repr(self._name),
self._offset.days*86400+self._offset.seconds)
__reduce__ = object.__reduce__
class tzlocal(datetime.tzinfo):
_std_offset = datetime.timedelta(seconds=-time.timezone)
if time.daylight:
_dst_offset = datetime.timedelta(seconds=-time.altzone)
else:
_dst_offset = _std_offset
def utcoffset(self, dt):
if self._isdst(dt):
return self._dst_offset
else:
return self._std_offset
def dst(self, dt):
if self._isdst(dt):
return self._dst_offset-self._std_offset
else:
return ZERO
@tzname_in_python2
def tzname(self, dt):
return time.tzname[self._isdst(dt)]
def _isdst(self, dt):
# We can't use mktime here. It is unstable when deciding if
# the hour near to a change is DST or not.
#
# timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour,
# dt.minute, dt.second, dt.weekday(), 0, -1))
# return time.localtime(timestamp).tm_isdst
#
# The code above yields the following result:
#
# >>> import tz, datetime
# >>> t = tz.tzlocal()
# >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname()
# 'BRDT'
# >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname()
# 'BRST'
# >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname()
# 'BRST'
# >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname()
# 'BRDT'
# >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname()
# 'BRDT'
#
# Here is a more stable implementation:
#
timestamp = ((dt.toordinal() - EPOCHORDINAL) * 86400
+ dt.hour * 3600
+ dt.minute * 60
+ dt.second)
return time.localtime(timestamp+time.timezone).tm_isdst
def __eq__(self, other):
if not isinstance(other, tzlocal):
return False
return (self._std_offset == other._std_offset and
self._dst_offset == other._dst_offset)
return True
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "%s()" % self.__class__.__name__
__reduce__ = object.__reduce__
class _ttinfo(object):
__slots__ = ["offset", "delta", "isdst", "abbr", "isstd", "isgmt"]
def __init__(self):
for attr in self.__slots__:
setattr(self, attr, None)
def __repr__(self):
l = []
for attr in self.__slots__:
value = getattr(self, attr)
if value is not None:
l.append("%s=%s" % (attr, repr(value)))
return "%s(%s)" % (self.__class__.__name__, ", ".join(l))
def __eq__(self, other):
if not isinstance(other, _ttinfo):
return False
return (self.offset == other.offset and
self.delta == other.delta and
self.isdst == other.isdst and
self.abbr == other.abbr and
self.isstd == other.isstd and
self.isgmt == other.isgmt)
def __ne__(self, other):
return not self.__eq__(other)
def __getstate__(self):
state = {}
for name in self.__slots__:
state[name] = getattr(self, name, None)
return state
def __setstate__(self, state):
for name in self.__slots__:
if name in state:
setattr(self, name, state[name])
class tzfile(datetime.tzinfo):
# http://www.twinsun.com/tz/tz-link.htm
# ftp://ftp.iana.org/tz/tz*.tar.gz
def __init__(self, fileobj, filename=None):
file_opened_here = False
if isinstance(fileobj, string_types):
self._filename = fileobj
fileobj = open(fileobj, 'rb')
file_opened_here = True
elif filename is not None:
self._filename = filename
elif hasattr(fileobj, "name"):
self._filename = fileobj.name
else:
self._filename = repr(fileobj)
# From tzfile(5):
#
# The time zone information files used by tzset(3)
# begin with the magic characters "TZif" to identify
# them as time zone information files, followed by
# sixteen bytes reserved for future use, followed by
# six four-byte values of type long, written in a
# ``standard'' byte order (the high-order byte
# of the value is written first).
try:
if fileobj.read(4).decode() != "TZif":
raise ValueError("magic not found")
fileobj.read(16)
(
# The number of UTC/local indicators stored in the file.
ttisgmtcnt,
# The number of standard/wall indicators stored in the file.
ttisstdcnt,
# The number of leap seconds for which data is
# stored in the file.
leapcnt,
# The number of "transition times" for which data
# is stored in the file.
timecnt,
# The number of "local time types" for which data
# is stored in the file (must not be zero).
typecnt,
# The number of characters of "time zone
# abbreviation strings" stored in the file.
charcnt,
) = struct.unpack(">6l", fileobj.read(24))
# The above header is followed by tzh_timecnt four-byte
# values of type long, sorted in ascending order.
# These values are written in ``standard'' byte order.
# Each is used as a transition time (as returned by
# time(2)) at which the rules for computing local time
# change.
if timecnt:
self._trans_list = struct.unpack(">%dl" % timecnt,
fileobj.read(timecnt*4))
else:
self._trans_list = []
# Next come tzh_timecnt one-byte values of type unsigned
# char; each one tells which of the different types of
# ``local time'' types described in the file is associated
# with the same-indexed transition time. These values
# serve as indices into an array of ttinfo structures that
# appears next in the file.
if timecnt:
self._trans_idx = struct.unpack(">%dB" % timecnt,
fileobj.read(timecnt))
else:
self._trans_idx = []
# Each ttinfo structure is written as a four-byte value
# for tt_gmtoff of type long, in a standard byte
# order, followed by a one-byte value for tt_isdst
# and a one-byte value for tt_abbrind. In each
# structure, tt_gmtoff gives the number of
# seconds to be added to UTC, tt_isdst tells whether
# tm_isdst should be set by localtime(3), and
# tt_abbrind serves as an index into the array of
# time zone abbreviation characters that follow the
# ttinfo structure(s) in the file.
ttinfo = []
for i in range(typecnt):
ttinfo.append(struct.unpack(">lbb", fileobj.read(6)))
abbr = fileobj.read(charcnt).decode()
# Then there are tzh_leapcnt pairs of four-byte
# values, written in standard byte order; the
# first value of each pair gives the time (as
# returned by time(2)) at which a leap second
# occurs; the second gives the total number of
# leap seconds to be applied after the given time.
# The pairs of values are sorted in ascending order
# by time.
# Not used, for now
# if leapcnt:
# leap = struct.unpack(">%dl" % (leapcnt*2),
# fileobj.read(leapcnt*8))
# Then there are tzh_ttisstdcnt standard/wall
# indicators, each stored as a one-byte value;
# they tell whether the transition times associated
# with local time types were specified as standard
# time or wall clock time, and are used when
# a time zone file is used in handling POSIX-style
# time zone environment variables.
if ttisstdcnt:
isstd = struct.unpack(">%db" % ttisstdcnt,
fileobj.read(ttisstdcnt))
# Finally, there are tzh_ttisgmtcnt UTC/local
# indicators, each stored as a one-byte value;
# they tell whether the transition times associated
# with local time types were specified as UTC or
# local time, and are used when a time zone file
# is used in handling POSIX-style time zone envi-
# ronment variables.
if ttisgmtcnt:
isgmt = struct.unpack(">%db" % ttisgmtcnt,
fileobj.read(ttisgmtcnt))
# ** Everything has been read **
finally:
if file_opened_here:
fileobj.close()
# Build ttinfo list
self._ttinfo_list = []
for i in range(typecnt):
gmtoff, isdst, abbrind = ttinfo[i]
# Round to full-minutes if that's not the case. Python's
# datetime doesn't accept sub-minute timezones. Check
# http://python.org/sf/1447945 for some information.
gmtoff = (gmtoff+30)//60*60
tti = _ttinfo()
tti.offset = gmtoff
tti.delta = datetime.timedelta(seconds=gmtoff)
tti.isdst = isdst
tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)]
tti.isstd = (ttisstdcnt > i and isstd[i] != 0)
tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0)
self._ttinfo_list.append(tti)
# Replace ttinfo indexes for ttinfo objects.
trans_idx = []
for idx in self._trans_idx:
trans_idx.append(self._ttinfo_list[idx])
self._trans_idx = tuple(trans_idx)
# Set standard, dst, and before ttinfos. before will be
# used when a given time is before any transitions,
# and will be set to the first non-dst ttinfo, or to
# the first dst, if all of them are dst.
self._ttinfo_std = None
self._ttinfo_dst = None
self._ttinfo_before = None
if self._ttinfo_list:
if not self._trans_list:
self._ttinfo_std = self._ttinfo_first = self._ttinfo_list[0]
else:
for i in range(timecnt-1, -1, -1):
tti = self._trans_idx[i]
if not self._ttinfo_std and not tti.isdst:
self._ttinfo_std = tti
elif not self._ttinfo_dst and tti.isdst:
self._ttinfo_dst = tti
if self._ttinfo_std and self._ttinfo_dst:
break
else:
if self._ttinfo_dst and not self._ttinfo_std:
self._ttinfo_std = self._ttinfo_dst
for tti in self._ttinfo_list:
if not tti.isdst:
self._ttinfo_before = tti
break
else:
self._ttinfo_before = self._ttinfo_list[0]
# Now fix transition times to become relative to wall time.
#
# I'm not sure about this. In my tests, the tz source file
# is setup to wall time, and in the binary file isstd and
# isgmt are off, so it should be in wall time. OTOH, it's
# always in gmt time. Let me know if you have comments
# about this.
laststdoffset = 0
self._trans_list = list(self._trans_list)
for i in range(len(self._trans_list)):
tti = self._trans_idx[i]
if not tti.isdst:
# This is std time.
self._trans_list[i] += tti.offset
laststdoffset = tti.offset
else:
# This is dst time. Convert to std.
self._trans_list[i] += laststdoffset
self._trans_list = tuple(self._trans_list)
def _find_ttinfo(self, dt, laststd=0):
timestamp = ((dt.toordinal() - EPOCHORDINAL) * 86400
+ dt.hour * 3600
+ dt.minute * 60
+ dt.second)
idx = 0
for trans in self._trans_list:
if timestamp < trans:
break
idx += 1
else:
return self._ttinfo_std
if idx == 0:
return self._ttinfo_before
if laststd:
while idx > 0:
tti = self._trans_idx[idx-1]
if not tti.isdst:
return tti
idx -= 1
else:
return self._ttinfo_std
else:
return self._trans_idx[idx-1]
def utcoffset(self, dt):
if not self._ttinfo_std:
return ZERO
return self._find_ttinfo(dt).delta
def dst(self, dt):
if not self._ttinfo_dst:
return ZERO
tti = self._find_ttinfo(dt)
if not tti.isdst:
return ZERO
# The documentation says that utcoffset()-dst() must
# be constant for every dt.
return tti.delta-self._find_ttinfo(dt, laststd=1).delta
# An alternative for that would be:
#
# return self._ttinfo_dst.offset-self._ttinfo_std.offset
#
# However, this class stores historical changes in the
# dst offset, so I belive that this wouldn't be the right
# way to implement this.
@tzname_in_python2
def tzname(self, dt):
if not self._ttinfo_std:
return None
return self._find_ttinfo(dt).abbr
def __eq__(self, other):
if not isinstance(other, tzfile):
return False
return (self._trans_list == other._trans_list and
self._trans_idx == other._trans_idx and
self._ttinfo_list == other._ttinfo_list)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, repr(self._filename))
def __reduce__(self):
if not os.path.isfile(self._filename):
raise ValueError("Unpickable %s class" % self.__class__.__name__)
return (self.__class__, (self._filename,))
class tzrange(datetime.tzinfo):
def __init__(self, stdabbr, stdoffset=None,
dstabbr=None, dstoffset=None,
start=None, end=None):
global relativedelta
if not relativedelta:
from dateutil import relativedelta
self._std_abbr = stdabbr
self._dst_abbr = dstabbr
if stdoffset is not None:
self._std_offset = datetime.timedelta(seconds=stdoffset)
else:
self._std_offset = ZERO
if dstoffset is not None:
self._dst_offset = datetime.timedelta(seconds=dstoffset)
elif dstabbr and stdoffset is not None:
self._dst_offset = self._std_offset+datetime.timedelta(hours=+1)
else:
self._dst_offset = ZERO
if dstabbr and start is None:
self._start_delta = relativedelta.relativedelta(
hours=+2, month=4, day=1, weekday=relativedelta.SU(+1))
else:
self._start_delta = start
if dstabbr and end is None:
self._end_delta = relativedelta.relativedelta(
hours=+1, month=10, day=31, weekday=relativedelta.SU(-1))
else:
self._end_delta = end
def utcoffset(self, dt):
if self._isdst(dt):
return self._dst_offset
else:
return self._std_offset
def dst(self, dt):
if self._isdst(dt):
return self._dst_offset-self._std_offset
else:
return ZERO
@tzname_in_python2
def tzname(self, dt):
if self._isdst(dt):
return self._dst_abbr
else:
return self._std_abbr
def _isdst(self, dt):
if not self._start_delta:
return False
year = datetime.datetime(dt.year, 1, 1)
start = year+self._start_delta
end = year+self._end_delta
dt = dt.replace(tzinfo=None)
if start < end:
return dt >= start and dt < end
else:
return dt >= start or dt < end
def __eq__(self, other):
if not isinstance(other, tzrange):
return False
return (self._std_abbr == other._std_abbr and
self._dst_abbr == other._dst_abbr and
self._std_offset == other._std_offset and
self._dst_offset == other._dst_offset and
self._start_delta == other._start_delta and
self._end_delta == other._end_delta)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "%s(...)" % self.__class__.__name__
__reduce__ = object.__reduce__
class tzstr(tzrange):
def __init__(self, s):
global parser
if not parser:
from dateutil import parser
self._s = s
res = parser._parsetz(s)
if res is None:
raise ValueError("unknown string format")
# Here we break the compatibility with the TZ variable handling.
# GMT-3 actually *means* the timezone -3.
if res.stdabbr in ("GMT", "UTC"):
res.stdoffset *= -1
# We must initialize it first, since _delta() needs
# _std_offset and _dst_offset set. Use False in start/end
# to avoid building it two times.
tzrange.__init__(self, res.stdabbr, res.stdoffset,
res.dstabbr, res.dstoffset,
start=False, end=False)
if not res.dstabbr:
self._start_delta = None
self._end_delta = None
else:
self._start_delta = self._delta(res.start)
if self._start_delta:
self._end_delta = self._delta(res.end, isend=1)
def _delta(self, x, isend=0):
kwargs = {}
if x.month is not None:
kwargs["month"] = x.month
if x.weekday is not None:
kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week)
if x.week > 0:
kwargs["day"] = 1
else:
kwargs["day"] = 31
elif x.day:
kwargs["day"] = x.day
elif x.yday is not None:
kwargs["yearday"] = x.yday
elif x.jyday is not None:
kwargs["nlyearday"] = x.jyday
if not kwargs:
# Default is to start on first sunday of april, and end
# on last sunday of october.
if not isend:
kwargs["month"] = 4
kwargs["day"] = 1
kwargs["weekday"] = relativedelta.SU(+1)
else:
kwargs["month"] = 10
kwargs["day"] = 31
kwargs["weekday"] = relativedelta.SU(-1)
if x.time is not None:
kwargs["seconds"] = x.time
else:
# Default is 2AM.
kwargs["seconds"] = 7200
if isend:
# Convert to standard time, to follow the documented way
# of working with the extra hour. See the documentation
# of the tzinfo class.
delta = self._dst_offset-self._std_offset
kwargs["seconds"] -= delta.seconds+delta.days*86400
return relativedelta.relativedelta(**kwargs)
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, repr(self._s))
class _tzicalvtzcomp(object):
def __init__(self, tzoffsetfrom, tzoffsetto, isdst,
tzname=None, rrule=None):
self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom)
self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto)
self.tzoffsetdiff = self.tzoffsetto-self.tzoffsetfrom
self.isdst = isdst
self.tzname = tzname
self.rrule = rrule
class _tzicalvtz(datetime.tzinfo):
def __init__(self, tzid, comps=[]):
self._tzid = tzid
self._comps = comps
self._cachedate = []
self._cachecomp = []
def _find_comp(self, dt):
if len(self._comps) == 1:
return self._comps[0]
dt = dt.replace(tzinfo=None)
try:
return self._cachecomp[self._cachedate.index(dt)]
except ValueError:
pass
lastcomp = None
lastcompdt = None
for comp in self._comps:
if not comp.isdst:
# Handle the extra hour in DST -> STD
compdt = comp.rrule.before(dt-comp.tzoffsetdiff, inc=True)
else:
compdt = comp.rrule.before(dt, inc=True)
if compdt and (not lastcompdt or lastcompdt < compdt):
lastcompdt = compdt
lastcomp = comp
if not lastcomp:
# RFC says nothing about what to do when a given
# time is before the first onset date. We'll look for the
# first standard component, or the first component, if
# none is found.
for comp in self._comps:
if not comp.isdst:
lastcomp = comp
break
else:
lastcomp = comp[0]
self._cachedate.insert(0, dt)
self._cachecomp.insert(0, lastcomp)
if len(self._cachedate) > 10:
self._cachedate.pop()
self._cachecomp.pop()
return lastcomp
def utcoffset(self, dt):
return self._find_comp(dt).tzoffsetto
def dst(self, dt):
comp = self._find_comp(dt)
if comp.isdst:
return comp.tzoffsetdiff
else:
return ZERO
@tzname_in_python2
def tzname(self, dt):
return self._find_comp(dt).tzname
def __repr__(self):
return "<tzicalvtz %s>" % repr(self._tzid)
__reduce__ = object.__reduce__
class tzical(object):
def __init__(self, fileobj):
global rrule
if not rrule:
from dateutil import rrule
if isinstance(fileobj, string_types):
self._s = fileobj
# ical should be encoded in UTF-8 with CRLF
fileobj = open(fileobj, 'r')
elif hasattr(fileobj, "name"):
self._s = fileobj.name
else:
self._s = repr(fileobj)
self._vtz = {}
self._parse_rfc(fileobj.read())
def keys(self):
return list(self._vtz.keys())
def get(self, tzid=None):
if tzid is None:
keys = list(self._vtz.keys())
if len(keys) == 0:
raise ValueError("no timezones defined")
elif len(keys) > 1:
raise ValueError("more than one timezone available")
tzid = keys[0]
return self._vtz.get(tzid)
def _parse_offset(self, s):
s = s.strip()
if not s:
raise ValueError("empty offset")
if s[0] in ('+', '-'):
signal = (-1, +1)[s[0] == '+']
s = s[1:]
else:
signal = +1
if len(s) == 4:
return (int(s[:2])*3600+int(s[2:])*60)*signal
elif len(s) == 6:
return (int(s[:2])*3600+int(s[2:4])*60+int(s[4:]))*signal
else:
raise ValueError("invalid offset: "+s)
def _parse_rfc(self, s):
lines = s.splitlines()
if not lines:
raise ValueError("empty string")
# Unfold
i = 0
while i < len(lines):
line = lines[i].rstrip()
if not line:
del lines[i]
elif i > 0 and line[0] == " ":
lines[i-1] += line[1:]
del lines[i]
else:
i += 1
tzid = None
comps = []
invtz = False
comptype = None
for line in lines:
if not line:
continue
name, value = line.split(':', 1)
parms = name.split(';')
if not parms:
raise ValueError("empty property name")
name = parms[0].upper()
parms = parms[1:]
if invtz:
if name == "BEGIN":
if value in ("STANDARD", "DAYLIGHT"):
# Process component
pass
else:
raise ValueError("unknown component: "+value)
comptype = value
founddtstart = False
tzoffsetfrom = None
tzoffsetto = None
rrulelines = []
tzname = None
elif name == "END":
if value == "VTIMEZONE":
if comptype:
raise ValueError("component not closed: "+comptype)
if not tzid:
raise ValueError("mandatory TZID not found")
if not comps:
raise ValueError(
"at least one component is needed")
# Process vtimezone
self._vtz[tzid] = _tzicalvtz(tzid, comps)
invtz = False
elif value == comptype:
if not founddtstart:
raise ValueError("mandatory DTSTART not found")
if tzoffsetfrom is None:
raise ValueError(
"mandatory TZOFFSETFROM not found")
if tzoffsetto is None:
raise ValueError(
"mandatory TZOFFSETFROM not found")
# Process component
rr = None
if rrulelines:
rr = rrule.rrulestr("\n".join(rrulelines),
compatible=True,
ignoretz=True,
cache=True)
comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto,
(comptype == "DAYLIGHT"),
tzname, rr)
comps.append(comp)
comptype = None
else:
raise ValueError("invalid component end: "+value)
elif comptype:
if name == "DTSTART":
rrulelines.append(line)
founddtstart = True
elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"):
rrulelines.append(line)
elif name == "TZOFFSETFROM":
if parms:
raise ValueError(
"unsupported %s parm: %s " % (name, parms[0]))
tzoffsetfrom = self._parse_offset(value)
elif name == "TZOFFSETTO":
if parms:
raise ValueError(
"unsupported TZOFFSETTO parm: "+parms[0])
tzoffsetto = self._parse_offset(value)
elif name == "TZNAME":
if parms:
raise ValueError(
"unsupported TZNAME parm: "+parms[0])
tzname = value
elif name == "COMMENT":
pass
else:
raise ValueError("unsupported property: "+name)
else:
if name == "TZID":
if parms:
raise ValueError(
"unsupported TZID parm: "+parms[0])
tzid = value
elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"):
pass
else:
raise ValueError("unsupported property: "+name)
elif name == "BEGIN" and value == "VTIMEZONE":
tzid = None
comps = []
invtz = True
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, repr(self._s))
if sys.platform != "win32":
TZFILES = ["/etc/localtime", "localtime"]
TZPATHS = ["/usr/share/zoneinfo", "/usr/lib/zoneinfo", "/etc/zoneinfo"]
else:
TZFILES = []
TZPATHS = []
def gettz(name=None):
tz = None
if not name:
try:
name = os.environ["TZ"]
except KeyError:
pass
if name is None or name == ":":
for filepath in TZFILES:
if not os.path.isabs(filepath):
filename = filepath
for path in TZPATHS:
filepath = os.path.join(path, filename)
if os.path.isfile(filepath):
break
else:
continue
if os.path.isfile(filepath):
try:
tz = tzfile(filepath)
break
except (IOError, OSError, ValueError):
pass
else:
tz = tzlocal()
else:
if name.startswith(":"):
name = name[:-1]
if os.path.isabs(name):
if os.path.isfile(name):
tz = tzfile(name)
else:
tz = None
else:
for path in TZPATHS:
filepath = os.path.join(path, name)
if not os.path.isfile(filepath):
filepath = filepath.replace(' ', '_')
if not os.path.isfile(filepath):
continue
try:
tz = tzfile(filepath)
break
except (IOError, OSError, ValueError):
pass
else:
tz = None
if tzwin is not None:
try:
tz = tzwin(name)
except WindowsError:
tz = None
if not tz:
from dateutil.zoneinfo import gettz
tz = gettz(name)
if not tz:
for c in name:
# name must have at least one offset to be a tzstr
if c in "0123456789":
try:
tz = tzstr(name)
except ValueError:
pass
break
else:
if name in ("GMT", "UTC"):
tz = tzutc()
elif name in time.tzname:
tz = tzlocal()
return tz
# vim:ts=4:sw=4:et

184
lib/dateutil/tzwin.py Normal file
View File

@@ -0,0 +1,184 @@
# This code was originally contributed by Jeffrey Harris.
import datetime
import struct
from six.moves import winreg
__all__ = ["tzwin", "tzwinlocal"]
ONEWEEK = datetime.timedelta(7)
TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones"
TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones"
TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation"
def _settzkeyname():
handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
try:
winreg.OpenKey(handle, TZKEYNAMENT).Close()
TZKEYNAME = TZKEYNAMENT
except WindowsError:
TZKEYNAME = TZKEYNAME9X
handle.Close()
return TZKEYNAME
TZKEYNAME = _settzkeyname()
class tzwinbase(datetime.tzinfo):
"""tzinfo class based on win32's timezones available in the registry."""
def utcoffset(self, dt):
if self._isdst(dt):
return datetime.timedelta(minutes=self._dstoffset)
else:
return datetime.timedelta(minutes=self._stdoffset)
def dst(self, dt):
if self._isdst(dt):
minutes = self._dstoffset - self._stdoffset
return datetime.timedelta(minutes=minutes)
else:
return datetime.timedelta(0)
def tzname(self, dt):
if self._isdst(dt):
return self._dstname
else:
return self._stdname
def list():
"""Return a list of all time zones known to the system."""
handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
tzkey = winreg.OpenKey(handle, TZKEYNAME)
result = [winreg.EnumKey(tzkey, i)
for i in range(winreg.QueryInfoKey(tzkey)[0])]
tzkey.Close()
handle.Close()
return result
list = staticmethod(list)
def display(self):
return self._display
def _isdst(self, dt):
if not self._dstmonth:
# dstmonth == 0 signals the zone has no daylight saving time
return False
dston = picknthweekday(dt.year, self._dstmonth, self._dstdayofweek,
self._dsthour, self._dstminute,
self._dstweeknumber)
dstoff = picknthweekday(dt.year, self._stdmonth, self._stddayofweek,
self._stdhour, self._stdminute,
self._stdweeknumber)
if dston < dstoff:
return dston <= dt.replace(tzinfo=None) < dstoff
else:
return not dstoff <= dt.replace(tzinfo=None) < dston
class tzwin(tzwinbase):
def __init__(self, name):
self._name = name
# multiple contexts only possible in 2.7 and 3.1, we still support 2.6
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
with winreg.OpenKey(handle,
"%s\%s" % (TZKEYNAME, name)) as tzkey:
keydict = valuestodict(tzkey)
self._stdname = keydict["Std"].encode("iso-8859-1")
self._dstname = keydict["Dlt"].encode("iso-8859-1")
self._display = keydict["Display"]
# See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm
tup = struct.unpack("=3l16h", keydict["TZI"])
self._stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1
self._dstoffset = self._stdoffset-tup[2] # + DaylightBias * -1
# for the meaning see the win32 TIME_ZONE_INFORMATION structure docs
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx
(self._stdmonth,
self._stddayofweek, # Sunday = 0
self._stdweeknumber, # Last = 5
self._stdhour,
self._stdminute) = tup[4:9]
(self._dstmonth,
self._dstdayofweek, # Sunday = 0
self._dstweeknumber, # Last = 5
self._dsthour,
self._dstminute) = tup[12:17]
def __repr__(self):
return "tzwin(%s)" % repr(self._name)
def __reduce__(self):
return (self.__class__, (self._name,))
class tzwinlocal(tzwinbase):
def __init__(self):
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey:
keydict = valuestodict(tzlocalkey)
self._stdname = keydict["StandardName"].encode("iso-8859-1")
self._dstname = keydict["DaylightName"].encode("iso-8859-1")
try:
with winreg.OpenKey(
handle, "%s\%s" % (TZKEYNAME, self._stdname)) as tzkey:
_keydict = valuestodict(tzkey)
self._display = _keydict["Display"]
except OSError:
self._display = None
self._stdoffset = -keydict["Bias"]-keydict["StandardBias"]
self._dstoffset = self._stdoffset-keydict["DaylightBias"]
# See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm
tup = struct.unpack("=8h", keydict["StandardStart"])
(self._stdmonth,
self._stddayofweek, # Sunday = 0
self._stdweeknumber, # Last = 5
self._stdhour,
self._stdminute) = tup[1:6]
tup = struct.unpack("=8h", keydict["DaylightStart"])
(self._dstmonth,
self._dstdayofweek, # Sunday = 0
self._dstweeknumber, # Last = 5
self._dsthour,
self._dstminute) = tup[1:6]
def __reduce__(self):
return (self.__class__, ())
def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
"""dayofweek == 0 means Sunday, whichweek 5 means last instance"""
first = datetime.datetime(year, month, 1, hour, minute)
weekdayone = first.replace(day=((dayofweek-first.isoweekday()) % 7+1))
for n in range(whichweek):
dt = weekdayone+(whichweek-n)*ONEWEEK
if dt.month == month:
return dt
def valuestodict(key):
"""Convert a registry key's values to a dictionary."""
dict = {}
size = winreg.QueryInfoKey(key)[1]
for i in range(size):
data = winreg.EnumValue(key, i)
dict[data[0]] = data[1]
return dict

732
lib/profilehooks.py Normal file
View File

@@ -0,0 +1,732 @@
"""
Profiling hooks
This module contains a couple of decorators (`profile` and `coverage`) that
can be used to wrap functions and/or methods to produce profiles and line
coverage reports. There's a third convenient decorator (`timecall`) that
measures the duration of function execution without the extra profiling
overhead.
Usage example (Python 2.4 or newer)::
from profilehooks import profile, coverage
@profile # or @coverage
def fn(n):
if n < 2: return 1
else: return n * fn(n-1)
print fn(42)
Usage example (Python 2.3 or older)::
from profilehooks import profile, coverage
def fn(n):
if n < 2: return 1
else: return n * fn(n-1)
# Now wrap that function in a decorator
fn = profile(fn) # or coverage(fn)
print fn(42)
Reports for all thusly decorated functions will be printed to sys.stdout
on program termination. You can alternatively request for immediate
reports for each call by passing immediate=True to the profile decorator.
There's also a @timecall decorator for printing the time to sys.stderr
every time a function is called, when you just want to get a rough measure
instead of a detailed (but costly) profile.
Caveats
A thread on python-dev convinced me that hotshot produces bogus numbers.
See http://mail.python.org/pipermail/python-dev/2005-November/058264.html
I don't know what will happen if a decorated function will try to call
another decorated function. All decorators probably need to explicitly
support nested profiling (currently TraceFuncCoverage is the only one
that supports this, while HotShotFuncProfile has support for recursive
functions.)
Profiling with hotshot creates temporary files (*.prof for profiling,
*.cprof for coverage) in the current directory. These files are not
cleaned up. Exception: when you specify a filename to the profile
decorator (to store the pstats.Stats object for later inspection),
the temporary file will be the filename you specified with '.raw'
appended at the end.
Coverage analysis with hotshot seems to miss some executions resulting
in lower line counts and some lines errorneously marked as never
executed. For this reason coverage analysis now uses trace.py which is
slower, but more accurate.
Copyright (c) 2004--2008 Marius Gedminas <marius@pov.lt>
Copyright (c) 2007 Hanno Schlichting
Copyright (c) 2008 Florian Schulze
Released under the MIT licence since December 2006:
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
(Previously it was distributed under the GNU General Public Licence.)
"""
# $Id: profilehooks.py 29 2010-08-13 16:29:20Z mg $
__author__ = "Marius Gedminas (marius@gedmin.as)"
__copyright__ = "Copyright 2004-2009 Marius Gedminas"
__license__ = "MIT"
__version__ = "1.4"
__date__ = "2009-03-31"
import atexit
import inspect
import sys
import re
# For profiling
from profile import Profile
import pstats
# For hotshot profiling (inaccurate!)
try:
import hotshot
import hotshot.stats
except ImportError:
hotshot = None
# For trace.py coverage
import trace
# For hotshot coverage (inaccurate!; uses undocumented APIs; might break)
if hotshot is not None:
import _hotshot
import hotshot.log
# For cProfile profiling (best)
try:
import cProfile
except ImportError:
cProfile = None
# For timecall
import time
# registry of available profilers
AVAILABLE_PROFILERS = {}
def profile(fn=None, skip=0, filename=None, immediate=False, dirs=False,
sort=None, entries=40,
profiler=('cProfile', 'profile', 'hotshot')):
"""Mark `fn` for profiling.
If `skip` is > 0, first `skip` calls to `fn` will not be profiled.
If `immediate` is False, profiling results will be printed to
sys.stdout on program termination. Otherwise results will be printed
after each call.
If `dirs` is False only the name of the file will be printed.
Otherwise the full path is used.
`sort` can be a list of sort keys (defaulting to ['cumulative',
'time', 'calls']). The following ones are recognized::
'calls' -- call count
'cumulative' -- cumulative time
'file' -- file name
'line' -- line number
'module' -- file name
'name' -- function name
'nfl' -- name/file/line
'pcalls' -- call count
'stdname' -- standard name
'time' -- internal time
`entries` limits the output to the first N entries.
`profiler` can be used to select the preferred profiler, or specify a
sequence of them, in order of preference. The default is ('cProfile'.
'profile', 'hotshot').
If `filename` is specified, the profile stats will be stored in the
named file. You can load them pstats.Stats(filename).
Usage::
def fn(...):
...
fn = profile(fn, skip=1)
If you are using Python 2.4, you should be able to use the decorator
syntax::
@profile(skip=3)
def fn(...):
...
or just ::
@profile
def fn(...):
...
"""
if fn is None: # @profile() syntax -- we are a decorator maker
def decorator(fn):
return profile(fn, skip=skip, filename=filename,
immediate=immediate, dirs=dirs,
sort=sort, entries=entries,
profiler=profiler)
return decorator
# @profile syntax -- we are a decorator.
if isinstance(profiler, str):
profiler = [profiler]
for p in profiler:
if p in AVAILABLE_PROFILERS:
profiler_class = AVAILABLE_PROFILERS[p]
break
else:
raise ValueError('only these profilers are available: %s'
% ', '.join(AVAILABLE_PROFILERS))
fp = profiler_class(fn, skip=skip, filename=filename,
immediate=immediate, dirs=dirs,
sort=sort, entries=entries)
# fp = HotShotFuncProfile(fn, skip=skip, filename=filename, ...)
# or HotShotFuncProfile
# We cannot return fp or fp.__call__ directly as that would break method
# definitions, instead we need to return a plain function.
def new_fn(*args, **kw):
return fp(*args, **kw)
new_fn.__doc__ = fn.__doc__
new_fn.__name__ = fn.__name__
new_fn.__dict__ = fn.__dict__
new_fn.__module__ = fn.__module__
return new_fn
def coverage(fn):
"""Mark `fn` for line coverage analysis.
Results will be printed to sys.stdout on program termination.
Usage::
def fn(...):
...
fn = coverage(fn)
If you are using Python 2.4, you should be able to use the decorator
syntax::
@coverage
def fn(...):
...
"""
fp = TraceFuncCoverage(fn) # or HotShotFuncCoverage
# We cannot return fp or fp.__call__ directly as that would break method
# definitions, instead we need to return a plain function.
def new_fn(*args, **kw):
return fp(*args, **kw)
new_fn.__doc__ = fn.__doc__
new_fn.__name__ = fn.__name__
new_fn.__dict__ = fn.__dict__
new_fn.__module__ = fn.__module__
return new_fn
def coverage_with_hotshot(fn):
"""Mark `fn` for line coverage analysis.
Uses the 'hotshot' module for fast coverage analysis.
BUG: Produces inaccurate results.
See the docstring of `coverage` for usage examples.
"""
fp = HotShotFuncCoverage(fn)
# We cannot return fp or fp.__call__ directly as that would break method
# definitions, instead we need to return a plain function.
def new_fn(*args, **kw):
return fp(*args, **kw)
new_fn.__doc__ = fn.__doc__
new_fn.__name__ = fn.__name__
new_fn.__dict__ = fn.__dict__
new_fn.__module__ = fn.__module__
return new_fn
class FuncProfile(object):
"""Profiler for a function (uses profile)."""
# This flag is shared between all instances
in_profiler = False
Profile = Profile
def __init__(self, fn, skip=0, filename=None, immediate=False, dirs=False,
sort=None, entries=40):
"""Creates a profiler for a function.
Every profiler has its own log file (the name of which is derived
from the function name).
FuncProfile registers an atexit handler that prints profiling
information to sys.stderr when the program terminates.
"""
self.fn = fn
self.skip = skip
self.filename = filename
self.immediate = immediate
self.dirs = dirs
self.sort = sort or ('cumulative', 'time', 'calls')
if isinstance(self.sort, str):
self.sort = (self.sort, )
self.entries = entries
self.reset_stats()
atexit.register(self.atexit)
def __call__(self, *args, **kw):
"""Profile a singe call to the function."""
self.ncalls += 1
if self.skip > 0:
self.skip -= 1
self.skipped += 1
return self.fn(*args, **kw)
if FuncProfile.in_profiler:
# handle recursive calls
return self.fn(*args, **kw)
# You cannot reuse the same profiler for many calls and accumulate
# stats that way. :-/
profiler = self.Profile()
try:
FuncProfile.in_profiler = True
return profiler.runcall(self.fn, *args, **kw)
finally:
FuncProfile.in_profiler = False
self.stats.add(profiler)
if self.immediate:
self.print_stats()
self.reset_stats()
def print_stats(self):
"""Print profile information to sys.stdout."""
funcname = self.fn.__name__
filename = self.fn.func_code.co_filename
lineno = self.fn.func_code.co_firstlineno
print
print "*** PROFILER RESULTS ***"
print "%s (%s:%s)" % (funcname, filename, lineno)
print "function called %d times" % self.ncalls,
if self.skipped:
print "(%d calls not profiled)" % self.skipped
else:
print
print
stats = self.stats
if self.filename:
stats.dump_stats(self.filename)
if not self.dirs:
stats.strip_dirs()
stats.sort_stats(*self.sort)
stats.print_stats(self.entries)
def reset_stats(self):
"""Reset accumulated profiler statistics."""
# Note: not using self.Profile, since pstats.Stats() fails then
self.stats = pstats.Stats(Profile())
self.ncalls = 0
self.skipped = 0
def atexit(self):
"""Stop profiling and print profile information to sys.stdout.
This function is registered as an atexit hook.
"""
if not self.immediate:
self.print_stats()
AVAILABLE_PROFILERS['profile'] = FuncProfile
if cProfile is not None:
class CProfileFuncProfile(FuncProfile):
"""Profiler for a function (uses cProfile)."""
Profile = cProfile.Profile
AVAILABLE_PROFILERS['cProfile'] = CProfileFuncProfile
if hotshot is not None:
class HotShotFuncProfile(object):
"""Profiler for a function (uses hotshot)."""
# This flag is shared between all instances
in_profiler = False
def __init__(self, fn, skip=0, filename=None):
"""Creates a profiler for a function.
Every profiler has its own log file (the name of which is derived
from the function name).
HotShotFuncProfile registers an atexit handler that prints
profiling information to sys.stderr when the program terminates.
The log file is not removed and remains there to clutter the
current working directory.
"""
self.fn = fn
self.filename = filename
if self.filename:
self.logfilename = filename + ".raw"
else:
self.logfilename = fn.__name__ + ".prof"
self.profiler = hotshot.Profile(self.logfilename)
self.ncalls = 0
self.skip = skip
self.skipped = 0
atexit.register(self.atexit)
def __call__(self, *args, **kw):
"""Profile a singe call to the function."""
self.ncalls += 1
if self.skip > 0:
self.skip -= 1
self.skipped += 1
return self.fn(*args, **kw)
if HotShotFuncProfile.in_profiler:
# handle recursive calls
return self.fn(*args, **kw)
try:
HotShotFuncProfile.in_profiler = True
return self.profiler.runcall(self.fn, *args, **kw)
finally:
HotShotFuncProfile.in_profiler = False
def atexit(self):
"""Stop profiling and print profile information to sys.stderr.
This function is registered as an atexit hook.
"""
self.profiler.close()
funcname = self.fn.__name__
filename = self.fn.func_code.co_filename
lineno = self.fn.func_code.co_firstlineno
print
print "*** PROFILER RESULTS ***"
print "%s (%s:%s)" % (funcname, filename, lineno)
print "function called %d times" % self.ncalls,
if self.skipped:
print "(%d calls not profiled)" % self.skipped
else:
print
print
stats = hotshot.stats.load(self.logfilename)
# hotshot.stats.load takes ages, and the .prof file eats megabytes, but
# a saved stats object is small and fast
if self.filename:
stats.dump_stats(self.filename)
# it is best to save before strip_dirs
stats.strip_dirs()
stats.sort_stats('cumulative', 'time', 'calls')
stats.print_stats(40)
AVAILABLE_PROFILERS['hotshot'] = HotShotFuncProfile
class HotShotFuncCoverage:
"""Coverage analysis for a function (uses _hotshot).
HotShot coverage is reportedly faster than trace.py, but it appears to
have problems with exceptions; also line counts in coverage reports
are generally lower from line counts produced by TraceFuncCoverage.
Is this my bug, or is it a problem with _hotshot?
"""
def __init__(self, fn):
"""Creates a profiler for a function.
Every profiler has its own log file (the name of which is derived
from the function name).
HotShotFuncCoverage registers an atexit handler that prints
profiling information to sys.stderr when the program terminates.
The log file is not removed and remains there to clutter the
current working directory.
"""
self.fn = fn
self.logfilename = fn.__name__ + ".cprof"
self.profiler = _hotshot.coverage(self.logfilename)
self.ncalls = 0
atexit.register(self.atexit)
def __call__(self, *args, **kw):
"""Profile a singe call to the function."""
self.ncalls += 1
return self.profiler.runcall(self.fn, args, kw)
def atexit(self):
"""Stop profiling and print profile information to sys.stderr.
This function is registered as an atexit hook.
"""
self.profiler.close()
funcname = self.fn.__name__
filename = self.fn.func_code.co_filename
lineno = self.fn.func_code.co_firstlineno
print
print "*** COVERAGE RESULTS ***"
print "%s (%s:%s)" % (funcname, filename, lineno)
print "function called %d times" % self.ncalls
print
fs = FuncSource(self.fn)
reader = hotshot.log.LogReader(self.logfilename)
for what, (filename, lineno, funcname), tdelta in reader:
if filename != fs.filename:
continue
if what == hotshot.log.LINE:
fs.mark(lineno)
if what == hotshot.log.ENTER:
# hotshot gives us the line number of the function definition
# and never gives us a LINE event for the first statement in
# a function, so if we didn't perform this mapping, the first
# statement would be marked as never executed
if lineno == fs.firstlineno:
lineno = fs.firstcodelineno
fs.mark(lineno)
reader.close()
print fs
class TraceFuncCoverage:
"""Coverage analysis for a function (uses trace module).
HotShot coverage analysis is reportedly faster, but it appears to have
problems with exceptions.
"""
# Shared between all instances so that nested calls work
tracer = trace.Trace(count=True, trace=False,
ignoredirs=[sys.prefix, sys.exec_prefix])
# This flag is also shared between all instances
tracing = False
def __init__(self, fn):
"""Creates a profiler for a function.
Every profiler has its own log file (the name of which is derived
from the function name).
TraceFuncCoverage registers an atexit handler that prints
profiling information to sys.stderr when the program terminates.
The log file is not removed and remains there to clutter the
current working directory.
"""
self.fn = fn
self.logfilename = fn.__name__ + ".cprof"
self.ncalls = 0
atexit.register(self.atexit)
def __call__(self, *args, **kw):
"""Profile a singe call to the function."""
self.ncalls += 1
if TraceFuncCoverage.tracing:
return self.fn(*args, **kw)
try:
TraceFuncCoverage.tracing = True
return self.tracer.runfunc(self.fn, *args, **kw)
finally:
TraceFuncCoverage.tracing = False
def atexit(self):
"""Stop profiling and print profile information to sys.stderr.
This function is registered as an atexit hook.
"""
funcname = self.fn.__name__
filename = self.fn.func_code.co_filename
lineno = self.fn.func_code.co_firstlineno
print
print "*** COVERAGE RESULTS ***"
print "%s (%s:%s)" % (funcname, filename, lineno)
print "function called %d times" % self.ncalls
print
fs = FuncSource(self.fn)
for (filename, lineno), count in self.tracer.counts.items():
if filename != fs.filename:
continue
fs.mark(lineno, count)
print fs
never_executed = fs.count_never_executed()
if never_executed:
print "%d lines were not executed." % never_executed
class FuncSource:
"""Source code annotator for a function."""
blank_rx = re.compile(r"^\s*finally:\s*(#.*)?$")
def __init__(self, fn):
self.fn = fn
self.filename = inspect.getsourcefile(fn)
self.source, self.firstlineno = inspect.getsourcelines(fn)
self.sourcelines = {}
self.firstcodelineno = self.firstlineno
self.find_source_lines()
def find_source_lines(self):
"""Mark all executable source lines in fn as executed 0 times."""
strs = trace.find_strings(self.filename)
lines = trace.find_lines_from_code(self.fn.func_code, strs)
self.firstcodelineno = sys.maxint
for lineno in lines:
self.firstcodelineno = min(self.firstcodelineno, lineno)
self.sourcelines.setdefault(lineno, 0)
if self.firstcodelineno == sys.maxint:
self.firstcodelineno = self.firstlineno
def mark(self, lineno, count=1):
"""Mark a given source line as executed count times.
Multiple calls to mark for the same lineno add up.
"""
self.sourcelines[lineno] = self.sourcelines.get(lineno, 0) + count
def count_never_executed(self):
"""Count statements that were never executed."""
lineno = self.firstlineno
counter = 0
for line in self.source:
if self.sourcelines.get(lineno) == 0:
if not self.blank_rx.match(line):
counter += 1
lineno += 1
return counter
def __str__(self):
"""Return annotated source code for the function."""
lines = []
lineno = self.firstlineno
for line in self.source:
counter = self.sourcelines.get(lineno)
if counter is None:
prefix = ' ' * 7
elif counter == 0:
if self.blank_rx.match(line):
prefix = ' ' * 7
else:
prefix = '>' * 6 + ' '
else:
prefix = '%5d: ' % counter
lines.append(prefix + line)
lineno += 1
return ''.join(lines)
def timecall(fn=None, immediate=True, timer=time.time):
"""Wrap `fn` and print its execution time.
Example::
@timecall
def somefunc(x, y):
time.sleep(x * y)
somefunc(2, 3)
will print the time taken by somefunc on every call. If you want just
a summary at program termination, use
@timecall(immediate=False)
You can also choose a timing method other than the default ``time.time()``,
e.g.:
@timecall(timer=time.clock)
"""
if fn is None: # @timecall() syntax -- we are a decorator maker
def decorator(fn):
return timecall(fn, immediate=immediate, timer=timer)
return decorator
# @timecall syntax -- we are a decorator.
fp = FuncTimer(fn, immediate=immediate, timer=timer)
# We cannot return fp or fp.__call__ directly as that would break method
# definitions, instead we need to return a plain function.
def new_fn(*args, **kw):
return fp(*args, **kw)
new_fn.__doc__ = fn.__doc__
new_fn.__name__ = fn.__name__
new_fn.__dict__ = fn.__dict__
new_fn.__module__ = fn.__module__
return new_fn
class FuncTimer(object):
def __init__(self, fn, immediate, timer):
self.fn = fn
self.ncalls = 0
self.totaltime = 0
self.immediate = immediate
self.timer = timer
if not immediate:
atexit.register(self.atexit)
def __call__(self, *args, **kw):
"""Profile a singe call to the function."""
fn = self.fn
timer = self.timer
self.ncalls += 1
try:
start = timer()
return fn(*args, **kw)
finally:
duration = timer() - start
self.totaltime += duration
if self.immediate:
funcname = fn.__name__
filename = fn.func_code.co_filename
lineno = fn.func_code.co_firstlineno
print >> sys.stderr, "\n %s (%s:%s):\n %.3f seconds\n" % (
funcname, filename, lineno, duration)
def atexit(self):
if not self.ncalls:
return
funcname = self.fn.__name__
filename = self.fn.func_code.co_filename
lineno = self.fn.func_code.co_firstlineno
print ("\n %s (%s:%s):\n"
" %d calls, %.3f seconds (%.3f seconds per call)\n" % (
funcname, filename, lineno, self.ncalls,
self.totaltime, self.totaltime / self.ncalls))

View File

@@ -59,6 +59,7 @@ started = False
DATA_DIR = None
CONFIG = None
CONFIG_FILE = None
DB_FILE = None
@@ -73,17 +74,19 @@ UMASK = None
POLLING_FAILOVER = False
def initialize(config_file):
with INIT_LOCK:
global CONFIG
global CONFIG_FILE
global _INITIALIZED
global CURRENT_VERSION
global LATEST_VERSION
global UMASK
global POLLING_FAILOVER
CONFIG = plexpy.config.Config(config_file)
CONFIG_FILE = config_file
assert CONFIG is not None
@@ -117,6 +120,15 @@ def initialize(config_file):
logger.initLogger(console=not QUIET, log_dir=CONFIG.LOG_DIR,
verbose=VERBOSE)
if not CONFIG.BACKUP_DIR.startswith(os.path.abspath(DATA_DIR)):
# Put the backup dir in the data dir for now
CONFIG.BACKUP_DIR = os.path.join(DATA_DIR, 'backups')
if not os.path.exists(CONFIG.BACKUP_DIR):
try:
os.makedirs(CONFIG.BACKUP_DIR)
except OSError as e:
logger.error("Could not create backup dir '%s': %s", BACKUP_DIR, e)
if not CONFIG.CACHE_DIR.startswith(os.path.abspath(DATA_DIR)):
# Put the cache dir in the data dir for now
CONFIG.CACHE_DIR = os.path.join(DATA_DIR, 'cache')
@@ -176,7 +188,7 @@ def initialize(config_file):
plextv.refresh_users()
# Refresh the libraries list on startup
if CONFIG.PMS_TOKEN and CONFIG.REFRESH_LIBRARIES_ON_STARTUP:
if CONFIG.PMS_IP and CONFIG.PMS_TOKEN and CONFIG.REFRESH_LIBRARIES_ON_STARTUP:
pmsconnect.refresh_libraries()
# Store the original umask
@@ -186,7 +198,6 @@ def initialize(config_file):
_INITIALIZED = True
return True
def daemonize():
if threading.activeCount() != 1:
logger.warn(
@@ -282,10 +293,10 @@ def initialize_scheduler():
else:
seconds = 0
if CONFIG.PMS_IP and CONFIG.PMS_TOKEN and CONFIG.UPDATE_SECTION_IDS != -1:
schedule_job(plextv.get_real_pms_url, 'Refresh Plex Server URLs',
if CONFIG.PMS_IP and CONFIG.PMS_TOKEN:
schedule_job(plextv.get_real_pms_url, 'Refresh Plex server URLs',
hours=12, minutes=0, seconds=0)
schedule_job(pmsconnect.get_server_friendly_name, 'Refresh Plex Server Name',
schedule_job(pmsconnect.get_server_friendly_name, 'Refresh Plex server name',
hours=12, minutes=0, seconds=0)
if CONFIG.NOTIFY_RECENTLY_ADDED:
@@ -296,10 +307,10 @@ def initialize_scheduler():
hours=0, minutes=0, seconds=0)
if CONFIG.MONITOR_REMOTE_ACCESS:
schedule_job(activity_pinger.check_server_response, 'Check for server response',
schedule_job(activity_pinger.check_server_response, 'Check for Plex remote access',
hours=0, minutes=0, seconds=seconds)
else:
schedule_job(activity_pinger.check_server_response, 'Check for server response',
schedule_job(activity_pinger.check_server_response, 'Check for Plex remote access',
hours=0, minutes=0, seconds=0)
# If we're not using websockets then fall back to polling
@@ -322,6 +333,8 @@ def initialize_scheduler():
schedule_job(pmsconnect.refresh_libraries, 'Refresh libraries list',
hours=hours, minutes=0, seconds=0)
schedule_job(database.make_backup, 'Backup PlexPy database', hours=6, minutes=0, seconds=0, args=(True, True))
# Start scheduler
if start_jobs and len(SCHED.get_jobs()):
try:
@@ -333,7 +346,7 @@ def initialize_scheduler():
#SCHED.print_jobs()
def schedule_job(function, name, hours=0, minutes=0, seconds=0):
def schedule_job(function, name, hours=0, minutes=0, seconds=0, args=None):
"""
Start scheduled job if starting or restarting plexpy.
Reschedule job if Interval Settings have changed.
@@ -348,11 +361,11 @@ def schedule_job(function, name, hours=0, minutes=0, seconds=0):
logger.info("Removed background task: %s", name)
elif job.trigger.interval != datetime.timedelta(hours=hours, minutes=minutes):
SCHED.reschedule_job(name, trigger=IntervalTrigger(
hours=hours, minutes=minutes, seconds=seconds))
hours=hours, minutes=minutes, seconds=seconds), args=args)
logger.info("Re-scheduled background task: %s", name)
elif hours > 0 or minutes > 0 or seconds > 0:
SCHED.add_job(function, id=name, trigger=IntervalTrigger(
hours=hours, minutes=minutes, seconds=seconds))
hours=hours, minutes=minutes, seconds=seconds), args=args)
logger.info("Scheduled background task: %s", name)
@@ -430,10 +443,10 @@ def dbcheck():
# notify_log table :: This is a table which logs notifications sent
c_db.execute(
'CREATE TABLE IF NOT EXISTS notify_log (id INTEGER PRIMARY KEY AUTOINCREMENT, '
'session_key INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, '
'agent_id INTEGER, agent_name TEXT, on_play INTEGER, on_stop INTEGER, on_watched INTEGER, '
'on_pause INTEGER, on_resume INTEGER, on_buffer INTEGER, on_created INTEGER)'
'CREATE TABLE IF NOT EXISTS notify_log (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, '
'session_key INTEGER, rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, '
'user_id INTEGER, user TEXT, agent_id INTEGER, agent_name TEXT, notify_action TEXT, '
'subject_text TEXT, body_text TEXT, script_args TEXT, poster_url TEXT)'
)
# library_sections table :: This table keeps record of the servers library sections
@@ -711,10 +724,57 @@ def dbcheck():
'ALTER TABLE notify_log ADD COLUMN on_created INTEGER'
)
# Upgrade notify_log table from earlier versions
try:
c_db.execute('SELECT poster_url FROM notify_log')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table notify_log.")
c_db.execute(
'ALTER TABLE notify_log ADD COLUMN poster_url TEXT'
)
# Upgrade notify_log table from earlier versions (populate table with data from notify_log)
try:
c_db.execute('SELECT timestamp FROM notify_log')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table notify_log.")
c_db.execute(
'CREATE TABLE IF NOT EXISTS notify_log_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, '
'session_key INTEGER, rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, '
'user_id INTEGER, user TEXT, agent_id INTEGER, agent_name TEXT, notify_action TEXT, '
'subject_text TEXT, body_text TEXT, script_args TEXT, poster_url TEXT)'
)
c_db.execute(
'INSERT INTO notify_log_temp (session_key, rating_key, user_id, user, agent_id, agent_name, '
'poster_url, timestamp, notify_action) '
'SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, timestamp, '
'notify_action FROM notify_log_temp '
'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, '
'on_play, "play" FROM notify_log WHERE on_play '
'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, '
'on_stop, "stop" FROM notify_log WHERE on_stop '
'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, '
'on_watched, "watched" FROM notify_log WHERE on_watched '
'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, '
'on_pause, "pause" FROM notify_log WHERE on_pause '
'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, '
'on_resume, "resume" FROM notify_log WHERE on_resume '
'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, '
'on_buffer, "buffer" FROM notify_log WHERE on_buffer '
'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, '
'on_created, "created" FROM notify_log WHERE on_created '
'ORDER BY timestamp ')
c_db.execute(
'DROP TABLE notify_log'
)
c_db.execute(
'ALTER TABLE notify_log_temp RENAME TO notify_log'
)
# Upgrade library_sections table from earlier versions (remove UNIQUE constraint on section_id)
try:
result = c_db.execute('PRAGMA index_xinfo("sqlite_autoindex_library_sections_1")')
if result and 'server_id' not in [row[2] for row in result]:
result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="library_sections"').fetchone()
if 'section_id INTEGER UNIQUE' in result[0]:
logger.debug(u"Altering database. Removing unique constraint on section_id from library_sections table.")
c_db.execute(
'CREATE TABLE library_sections_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, '
@@ -739,7 +799,7 @@ def dbcheck():
'ALTER TABLE library_sections_temp RENAME TO library_sections'
)
except sqlite3.OperationalError:
logger.debug(u"Unable to remove section_id unique constraint from library_sections.")
logger.warn(u"Unable to remove section_id unique constraint from library_sections.")
try:
c_db.execute(
'DROP TABLE library_sections_temp'
@@ -747,10 +807,21 @@ def dbcheck():
except:
pass
# Upgrade library_sections table from earlier versions (remove duplicated libraries)
try:
result = c_db.execute('SELECT * FROM library_sections WHERE server_id = ""')
if result.rowcount > 0:
logger.debug(u"Altering database. Removing duplicate libraries from library_sections table.")
c_db.execute(
'DELETE FROM library_sections WHERE server_id = ""'
)
except sqlite3.OperationalError:
logger.warn(u"Unable to remove duplicate libraries from library_sections table.")
# Upgrade users table from earlier versions (remove UNIQUE constraint on username)
try:
result = c_db.execute('PRAGMA index_xinfo("sqlite_autoindex_users_2")')
if result and 'username' in [row[2] for row in result]:
result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="users"').fetchone()
if 'username TEXT NOT NULL UNIQUE' in result[0]:
logger.debug(u"Altering database. Removing unique constraint on username from users table.")
c_db.execute(
'CREATE TABLE users_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, '
@@ -773,7 +844,7 @@ def dbcheck():
'ALTER TABLE users_temp RENAME TO users'
)
except sqlite3.OperationalError:
logger.debug(u"Unable to remove username unique constraint from users.")
logger.warn(u"Unable to remove username unique constraint from users.")
try:
c_db.execute(
'DROP TABLE users_temp'
@@ -790,6 +861,7 @@ def dbcheck():
conn_db.commit()
c_db.close()
def shutdown(restart=False, update=False):
cherrypy.engine.exit()
SCHED.shutdown(wait=False)
@@ -822,6 +894,7 @@ def shutdown(restart=False, update=False):
os._exit(0)
def generate_uuid():
logger.debug(u"Generating UUID...")
return uuid.uuid4().hex

View File

@@ -156,8 +156,8 @@ class ActivityHandler(object):
(self.get_session_key(), buffer_last_triggered))
time_since_last_trigger = int(time.time()) - int(buffer_last_triggered)
if current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \
time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT:
if plexpy.CONFIG.BUFFER_THRESHOLD > 0 and (current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and \
time_since_last_trigger == 0 or time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT):
ap.set_session_buffer_trigger_time(session_key=self.get_session_key())
threading.Thread(target=notification_handler.notify,
kwargs=dict(stream_data=db_stream, notify_action='buffer')).start()

View File

@@ -182,7 +182,7 @@ class ActivityProcessor(object):
self.db.action(query=query, args=args)
# Check if we should group the session, select the last two rows from the user
query = 'SELECT id, rating_key, user_id, reference_id FROM session_history \
query = 'SELECT id, rating_key, view_offset, user_id, reference_id FROM session_history \
WHERE user_id = ? ORDER BY id DESC LIMIT 2 '
args = [session['user_id']]
@@ -191,6 +191,7 @@ class ActivityProcessor(object):
new_session = {'id': result[0]['id'],
'rating_key': result[0]['rating_key'],
'view_offset': result[0]['view_offset'],
'user_id': result[0]['user_id'],
'reference_id': result[0]['reference_id']}
@@ -199,12 +200,14 @@ class ActivityProcessor(object):
else:
prev_session = {'id': result[1]['id'],
'rating_key': result[1]['rating_key'],
'view_offset': result[1]['view_offset'],
'user_id': result[1]['user_id'],
'reference_id': result[1]['reference_id']}
query = 'UPDATE session_history SET reference_id = ? WHERE id = ? '
# If rating_key is the same in the previous session, then set the reference_id to the previous row, else set the reference_id to the new id
if (prev_session is not None) and (prev_session['rating_key'] == new_session['rating_key']):
if (prev_session is not None) and (prev_session['rating_key'] == new_session['rating_key'] \
and prev_session['view_offset'] <= new_session['view_offset']):
args = [prev_session['reference_id'], new_session['id']]
else:
args = [new_session['id'], new_session['id']]

491
plexpy/api2.py Normal file
View File

@@ -0,0 +1,491 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of PlexPy.
#
# PlexPy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PlexPy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
import hashlib
import inspect
import json
import os
import random
import re
import time
import traceback
import cherrypy
import xmltodict
import database
import logger
import plexpy
class API2:
def __init__(self, **kwargs):
self._api_valid_methods = self._api_docs().keys()
self._api_authenticated = False
self._api_out_type = 'json' # default
self._api_msg = None
self._api_debug = None
self._api_cmd = None
self._api_apikey = None
self._api_callback = None # JSONP
self._api_result_type = 'failed'
self._api_profileme = None # For profiling the api call
self._api_kwargs = None # Cleaned kwargs
def _api_docs(self, md=False):
""" Makes the api docs """
docs = {}
for f, _ in inspect.getmembers(self, predicate=inspect.ismethod):
if not f.startswith('_') and not f.startswith('_api'):
if md is True:
docs[f] = inspect.getdoc(getattr(self, f)) if inspect.getdoc(getattr(self, f)) else None
else:
docs[f] = ' '.join(inspect.getdoc(getattr(self, f)).split()) if inspect.getdoc(getattr(self, f)) else None
return docs
def docs_md(self):
""" Return a API.md to simplify api docs because of the decorator. """
return self._api_make_md()
def docs(self):
""" Returns a dict where commands are keys, docstring are value. """
return self._api_docs()
def _api_validate(self, *args, **kwargs):
""" sets class vars and remove unneeded parameters. """
if not plexpy.CONFIG.API_ENABLED:
self._api_msg = 'API not enabled'
elif not plexpy.CONFIG.API_KEY:
self._api_msg = 'API key not generated'
elif len(plexpy.CONFIG.API_KEY) != 32:
self._api_msg = 'API key not generated correctly'
elif 'apikey' not in kwargs:
self._api_msg = 'Parameter apikey is required'
elif kwargs.get('apikey', '') != plexpy.CONFIG.API_KEY:
self._api_msg = 'Invalid apikey'
elif 'cmd' not in kwargs:
self._api_msg = 'Parameter cmd is required. Possible commands are: %s' % ', '.join(self._api_valid_methods)
elif 'cmd' in kwargs and kwargs.get('cmd') not in self._api_valid_methods:
self._api_msg = 'Unknown command: %s. Possible commands are: %s' % (kwargs.get('cmd', ''), ', '.join(self._api_valid_methods))
self._api_callback = kwargs.pop('callback', None)
self._api_apikey = kwargs.pop('apikey', None)
self._api_cmd = kwargs.pop('cmd', None)
self._api_debug = kwargs.pop('debug', False)
self._api_profileme = kwargs.pop('profileme', None)
# Allow override for the api.
self._api_out_type = kwargs.pop('out_type', 'json')
if self._api_apikey == plexpy.CONFIG.API_KEY and plexpy.CONFIG.API_ENABLED and self._api_cmd in self._api_valid_methods:
self._api_authenticated = True
self._api_msg = None
self._api_kwargs = kwargs
elif self._api_cmd in ('get_apikey', 'docs', 'docs_md') and plexpy.CONFIG.API_ENABLED:
self._api_authenticated = True
# Remove the old error msg
self._api_msg = None
self._api_kwargs = kwargs
logger.debug(u'PlexPy APIv2 :: Cleaned kwargs %s' % self._api_kwargs)
return self._api_kwargs
def get_logs(self, sort='', search='', order='desc', regex='', start=0, end=0, **kwargs):
"""
Returns the log
Args:
sort(string, optional): time, thread, msg, loglevel
search(string, optional): 'string'
order(string, optional): desc, asc
regex(string, optional): 'regexstring'
start(int, optional): int
end(int, optional): int
Returns:
```{"response":
{"msg": "Hey",
"result": "success"},
"data": [
{"time": "29-sept.2015",
"thread: "MainThread",
"msg: "Called x from y",
"loglevel": "DEBUG"
}
]
}
```
"""
logfile = os.path.join(plexpy.CONFIG.LOG_DIR, 'plexpy.log')
templog = []
start = int(kwargs.get('start', 0))
end = int(kwargs.get('end', 0))
if regex:
logger.debug(u'PlexPy APIv2 :: Filtering log using regex %s' % regex)
reg = re.compile('u' + regex, flags=re.I)
for line in open(logfile, 'r').readlines():
temp_loglevel_and_time = None
try:
temp_loglevel_and_time = line.split('- ')
loglvl = temp_loglevel_and_time[1].split(' :')[0].strip()
tl_tread = line.split(' :: ')
if loglvl is None:
msg = line.replace('\n', '')
else:
msg = line.split(' : ')[1].replace('\n', '')
thread = tl_tread[1].split(' : ')[0]
except IndexError:
# We assume this is a traceback
tl = (len(templog) - 1)
templog[tl]['msg'] += line.replace('\n', '')
continue
if len(line) > 1 and temp_loglevel_and_time is not None and loglvl in line:
d = {
'time': temp_loglevel_and_time[0],
'loglevel': loglvl,
'msg': msg.replace('\n', ''),
'thread': thread
}
templog.append(d)
if end > 0 or start > 0:
logger.debug(u'PlexPy APIv2 :: Slicing the log from %s to %s' % (start, end))
templog = templog[start:end]
if sort:
logger.debug(u'PlexPy APIv2 :: Sorting log based on %s' % sort)
templog = sorted(templog, key=lambda k: k[sort])
if search:
logger.debug(u'PlexPy APIv2 :: Searching log values for %s' % search)
tt = [d for d in templog for k, v in d.items() if search.lower() in v.lower()]
if len(tt):
templog = tt
if regex:
tt = []
for l in templog:
stringdict = ' '.join('{}{}'.format(k, v) for k, v in l.items())
if reg.search(stringdict):
tt.append(l)
if len(tt):
templog = tt
if order == 'desc':
templog = templog[::-1]
self.data = templog
return templog
def get_settings(self, key=''):
""" Fetches all settings from the config file
Args:
key(string, optional): 'Run the it without args to see all args'
Returns:
json:
```
{General: {api_enabled: true, ...}
Advanced: {cache_sizemb: "32", ...}}
```
"""
interface_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/')
interface_list = [name for name in os.listdir(interface_dir) if
os.path.isdir(os.path.join(interface_dir, name))]
conf = plexpy.CONFIG._config
config = {}
# Truthify the dict
for k, v in conf.iteritems():
if isinstance(v, dict):
d = {}
for kk, vv in v.iteritems():
if vv == '0' or vv == '1':
d[kk] = bool(vv)
else:
d[kk] = vv
config[k] = d
if k == 'General':
config[k]['interface'] = interface_dir
config[k]['interface_list'] = interface_list
if key:
return config.get(key, None)
return config
def sql(self, query=''):
""" Query the db with raw sql, makes backup of
the db if the backup is older then 24h
"""
if not plexpy.CONFIG.API_SQL or not query:
return
# allow the user to shoot them self
# in the foot but not in the head..
if not len(os.listdir(plexpy.BACKUP_DIR)):
self.backupdb()
else:
# If the backup is less then 24 h old lets make a backup
if any([os.path.getctime(os.path.join(plexpy.BACKUP_DIR, file_)) <
(time.time() - 86400) for file_ in os.listdir(plexpy.BACKUP_DIR)]):
self.backupdb()
db = database.MonitorDatabase()
rows = db.select(query)
self.data = rows
return rows
def backupdb(self):
""" Creates a manual backup of the plexpy.db file """
data = database.make_backup()
if data:
self.result_type = 'success'
else:
self.result_type = 'failed'
return data
def restart(self, **kwargs):
""" Restarts plexpy """
plexpy.SIGNAL = 'restart'
self.msg = 'Restarting plexpy'
self.result_type = 'success'
def update(self, **kwargs):
""" Check for updates on Github """
plexpy.SIGNAL = 'update'
self.msg = 'Updating plexpy'
self.result_type = 'success'
def _api_make_md(self):
""" Tries to make a API.md to simplify the api docs """
head = '''# API Reference\n
The API is still pretty new and needs some serious cleaning up on the backend but should be reasonably functional. There are no error codes yet.
## General structure
The API endpoint is `http://ip:port + HTTP_ROOT + /api?apikey=$apikey&cmd=$command`
Response example
```
{
"response": {
"data": [
{
"loglevel": "INFO",
"msg": "Signal 2 caught, saving and exiting...",
"thread": "MainThread",
"time": "22-sep-2015 01:42:56 "
}
],
"message": null,
"result": "success"
}
}
```
General parameters:
out_type: 'xml',
callback: 'pong',
'debug': 1
## API methods'''
body = ''
doc = self._api_docs(md=True)
for k in sorted(doc):
v = doc.get(k)
body += '### %s\n' % k
body += '' if not v else v + '\n'
body += '\n\n'
result = head + '\n\n' + body
return '<div style="white-space: pre-wrap">' + result + '</div>'
def get_apikey(self, username='', password=''):
""" Fetches apikey
Args:
username(string, optional): Your username
password(string, optional): Your password
Returns:
string: Apikey, args are required if auth is enabled
makes and saves the apikey it does not exist
"""
apikey = hashlib.sha224(str(random.getrandbits(256))).hexdigest()[0:32]
if plexpy.CONFIG.HTTP_USERNAME and plexpy.CONFIG.HTTP_PASSWORD:
if username == plexpy.HTTP_USERNAME and password == plexpy.CONFIG.HTTP_PASSWORD:
if plexpy.CONFIG.API_KEY:
self.data = plexpy.CONFIG.API_KEY
else:
self.data = apikey
plexpy.CONFIG.API_KEY = apikey
plexpy.CONFIG.write()
else:
self.msg = 'Authentication is enabled, please add the correct username and password to the parameters'
else:
if plexpy.CONFIG.API_KEY:
self.data = plexpy.CONFIG.API_KEY
else:
# Make a apikey if the doesn't exist
self.data = apikey
plexpy.CONFIG.API_KEY = apikey
plexpy.CONFIG.write()
return self.data
def _api_responds(self, result_type='success', data=None, msg=''):
""" Formats the result to a predefined dict so we can hange it the to
the desired output by _api_out_as """
if data is None:
data = {}
return {"response": {"result": result_type, "message": msg, "data": data}}
def _api_out_as(self, out):
""" Formats the response to the desired output """
if self._api_cmd == 'docs_md':
return out['response']['data']
if self._api_out_type == 'json':
cherrypy.response.headers['Content-Type'] = 'application/json;charset=UTF-8'
try:
if self._api_debug:
out = json.dumps(out, indent=4, sort_keys=True)
else:
out = json.dumps(out)
if self._api_callback is not None:
cherrypy.response.headers['Content-Type'] = 'application/javascript'
# wrap with JSONP call if requested
out = self._api_callback + '(' + out + ');'
# if we fail to generate the output fake an error
except Exception as e:
logger.info(u'PlexPy APIv2 :: ' + traceback.format_exc())
out['message'] = traceback.format_exc()
out['result'] = 'error'
elif self._api_out_type == 'xml':
cherrypy.response.headers['Content-Type'] = 'application/xml'
try:
out = xmltodict.unparse(out, pretty=True)
except Exception as e:
logger.error(u'PlexPy APIv2 :: Failed to parse xml result')
try:
out['message'] = e
out['result'] = 'error'
out = xmltodict.unparse(out, pretty=True)
except Exception as e:
logger.error(u'PlexPy APIv2 :: Failed to parse xml result error message %s' % e)
out = '''<?xml version="1.0" encoding="utf-8"?>
<response>
<message>%s</message>
<data></data>
<result>error</result>
</response>
''' % e
return out
def _api_run(self, *args, **kwargs):
""" handles the stuff from the handler """
result = {}
logger.debug(u'PlexPy APIv2 :: Original kwargs was %s' % kwargs)
self._api_validate(**kwargs)
if self._api_cmd and self._api_authenticated:
call = getattr(self, self._api_cmd)
# Profile is written to console.
if self._api_profileme:
from profilehooks import profile
call = profile(call, immediate=True)
# We allow this to fail so we get a
# traceback in the browser
if self._api_debug:
result = call(**self._api_kwargs)
else:
try:
result = call(**self._api_kwargs)
except Exception as e:
logger.error(u'PlexPy APIv2 :: Failed to run %s %s %s' % (self._api_cmd, self._api_kwargs, e))
ret = None
# The api decorated function can return different result types.
# convert it to a list/dict before we change it to the users
# wanted output
try:
if isinstance(result, (dict, list)):
ret = result
else:
raise
except:
try:
ret = json.loads(result)
except (ValueError, TypeError):
try:
ret = xmltodict.parse(result, attr_prefix='')
except:
pass
# Fallback if we cant "parse the reponse"
if ret is None:
ret = result
if ret or self._api_result_type == 'success':
# To allow override for restart etc
# if the call returns some data we are gonna assume its a success
self._api_result_type = 'success'
else:
self._api_result_type = 'error'
return self._api_out_as(self._api_responds(result_type=self._api_result_type, msg=self._api_msg, data=ret))

View File

@@ -57,4 +57,15 @@ MEDIA_FLAGS_AUDIO = {'ac.?3': 'dolby_digital',
MEDIA_FLAGS_VIDEO = {'avc1': 'h264',
'wmv(1|2)': 'wmv',
'wmv3': 'wmvhd'
}
}
SCHEDULER_LIST = ['Check GitHub for updates',
'Check for active sessions',
'Check for recently added items',
'Check for Plex remote access',
'Refresh users list',
'Refresh libraries list',
'Refresh Plex server URLs',
'Refresh Plex server name',
'Backup PlexPy database'
]

View File

@@ -25,13 +25,15 @@ _CONFIG_DEFINITIONS = {
'PMS_NAME': (unicode, 'PMS', ''),
'PMS_PORT': (int, 'PMS', 32400),
'PMS_TOKEN': (str, 'PMS', ''),
'PMS_SSL': (int, 'General', 0),
'PMS_SSL': (int, 'PMS', 0),
'PMS_URL': (str, 'PMS', ''),
'PMS_USE_BIF': (int, 'PMS', 0),
'PMS_UUID': (str, 'PMS', ''),
'TIME_FORMAT': (str, 'General', 'HH:mm'),
'ANON_REDIRECT': (str, 'General', 'http://dereferer.org/?'),
'API_ENABLED': (int, 'General', 0),
'API_KEY': (str, 'General', ''),
'API_SQL': (int, 'General', 0),
'BOXCAR_ENABLED': (int, 'Boxcar', 0),
'BOXCAR_TOKEN': (str, 'Boxcar', ''),
'BOXCAR_SOUND': (str, 'Boxcar', ''),
@@ -48,6 +50,7 @@ _CONFIG_DEFINITIONS = {
'BOXCAR_ON_INTUP': (int, 'Boxcar', 0),
'BUFFER_THRESHOLD': (int, 'Monitoring', 3),
'BUFFER_WAIT': (int, 'Monitoring', 900),
'BACKUP_DIR': (str, 'General', ''),
'CACHE_DIR': (str, 'General', ''),
'CACHE_SIZEMB': (int, 'Advanced', 32),
'CHECK_GITHUB': (int, 'General', 1),
@@ -85,6 +88,8 @@ _CONFIG_DEFINITIONS = {
'FACEBOOK_APP_SECRET': (str, 'Facebook', ''),
'FACEBOOK_TOKEN': (str, 'Facebook', ''),
'FACEBOOK_GROUP': (str, 'Facebook', ''),
'FACEBOOK_INCL_POSTER': (int, 'Facebook', 1),
'FACEBOOK_INCL_SUBJECT': (int, 'Facebook', 1),
'FACEBOOK_ON_PLAY': (int, 'Facebook', 0),
'FACEBOOK_ON_STOP': (int, 'Facebook', 0),
'FACEBOOK_ON_PAUSE': (int, 'Facebook', 0),
@@ -127,8 +132,11 @@ _CONFIG_DEFINITIONS = {
'HOME_STATS_COUNT': (int, 'General', 5),
'HOME_STATS_CARDS': (list, 'General', ['top_tv', 'popular_tv', 'top_movies', 'popular_movies', 'top_music', \
'popular_music', 'last_watched', 'top_users', 'top_platforms', 'most_concurrent']),
'HTTPS_CREATE_CERT': (int, 'General', 1),
'HTTPS_CERT': (str, 'General', ''),
'HTTPS_KEY': (str, 'General', ''),
'HTTPS_DOMAIN': (str, 'General', 'localhost'),
'HTTPS_IP': (str, 'General', '127.0.0.1'),
'HTTP_HOST': (str, 'General', '0.0.0.0'),
'HTTP_PASSWORD': (str, 'General', ''),
'HTTP_PORT': (int, 'General', 8181),
@@ -183,6 +191,7 @@ _CONFIG_DEFINITIONS = {
'NMA_ON_EXTUP': (int, 'NMA', 0),
'NMA_ON_INTUP': (int, 'NMA', 0),
'NOTIFY_CONSECUTIVE': (int, 'Monitoring', 1),
'NOTIFY_UPLOAD_POSTERS': (int, 'Monitoring', 0),
'NOTIFY_RECENTLY_ADDED': (int, 'Monitoring', 0),
'NOTIFY_RECENTLY_ADDED_GRANDPARENT': (int, 'Monitoring', 0),
'NOTIFY_RECENTLY_ADDED_DELAY': (int, 'Monitoring', 60),
@@ -282,6 +291,7 @@ _CONFIG_DEFINITIONS = {
'PUSHBULLET_ON_INTUP': (int, 'PushBullet', 0),
'PUSHOVER_APITOKEN': (str, 'Pushover', ''),
'PUSHOVER_ENABLED': (int, 'Pushover', 0),
'PUSHOVER_HTML_SUPPORT': (int, 'Pushover', 1),
'PUSHOVER_KEYS': (str, 'Pushover', ''),
'PUSHOVER_PRIORITY': (int, 'Pushover', 0),
'PUSHOVER_SOUND': (str, 'Pushover', ''),
@@ -304,6 +314,7 @@ _CONFIG_DEFINITIONS = {
'SLACK_HOOK': (str, 'Slack', ''),
'SLACK_CHANNEL': (str, 'Slack', ''),
'SLACK_ICON_EMOJI': (str, 'Slack', ''),
'SLACK_INCL_SUBJECT': (int, 'Slack', 1),
'SLACK_USERNAME': (str, 'Slack', ''),
'SLACK_ON_PLAY': (int, 'Slack', 0),
'SLACK_ON_STOP': (int, 'Slack', 0),
@@ -343,6 +354,7 @@ _CONFIG_DEFINITIONS = {
'TELEGRAM_BOT_TOKEN': (str, 'Telegram', ''),
'TELEGRAM_ENABLED': (int, 'Telegram', 0),
'TELEGRAM_CHAT_ID': (str, 'Telegram', ''),
'TELEGRAM_INCL_SUBJECT': (int, 'Telegram', 1),
'TELEGRAM_ON_PLAY': (int, 'Telegram', 0),
'TELEGRAM_ON_STOP': (int, 'Telegram', 0),
'TELEGRAM_ON_PAUSE': (int, 'Telegram', 0),
@@ -360,9 +372,11 @@ _CONFIG_DEFINITIONS = {
'TV_NOTIFY_ON_STOP': (int, 'Monitoring', 0),
'TV_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),
'TWITTER_ENABLED': (int, 'Twitter', 0),
'TWITTER_PASSWORD': (str, 'Twitter', ''),
'TWITTER_PREFIX': (str, 'Twitter', 'PlexPy'),
'TWITTER_USERNAME': (str, 'Twitter', ''),
'TWITTER_ACCESS_TOKEN': (str, 'Twitter', ''),
'TWITTER_ACCESS_TOKEN_SECRET': (str, 'Twitter', ''),
'TWITTER_CONSUMER_KEY': (str, 'Twitter', ''),
'TWITTER_CONSUMER_SECRET': (str, 'Twitter', ''),
'TWITTER_INCL_SUBJECT': (int, 'Twitter', 1),
'TWITTER_ON_PLAY': (int, 'Twitter', 0),
'TWITTER_ON_STOP': (int, 'Twitter', 0),
'TWITTER_ON_PAUSE': (int, 'Twitter', 0),
@@ -511,6 +525,7 @@ class Config(object):
self.MOVIE_LOGGING_ENABLE = 0
self.TV_LOGGING_ENABLE = 0
self.CONFIG_VERSION = '1'
if self.CONFIG_VERSION == '1':
# Change home_stats_cards to list
if self.HOME_STATS_CARDS:
@@ -524,4 +539,20 @@ class Config(object):
if 'library_statistics' in home_library_cards:
home_library_cards.remove('library_statistics')
self.HOME_LIBRARY_CARDS = home_library_cards
self.CONFIG_VERSION = '2'
self.CONFIG_VERSION = '2'
if self.CONFIG_VERSION == '2':
self.NOTIFY_ON_START_SUBJECT_TEXT = self.NOTIFY_ON_START_SUBJECT_TEXT.replace('{progress}','{progress_duration}')
self.NOTIFY_ON_START_BODY_TEXT = self.NOTIFY_ON_START_BODY_TEXT.replace('{progress}','{progress_duration}')
self.NOTIFY_ON_STOP_SUBJECT_TEXT = self.NOTIFY_ON_STOP_SUBJECT_TEXT.replace('{progress}','{progress_duration}')
self.NOTIFY_ON_STOP_BODY_TEXT = self.NOTIFY_ON_STOP_BODY_TEXT.replace('{progress}','{progress_duration}')
self.NOTIFY_ON_PAUSE_SUBJECT_TEXT = self.NOTIFY_ON_PAUSE_SUBJECT_TEXT.replace('{progress}','{progress_duration}')
self.NOTIFY_ON_PAUSE_BODY_TEXT = self.NOTIFY_ON_PAUSE_BODY_TEXT.replace('{progress}','{progress_duration}')
self.NOTIFY_ON_RESUME_SUBJECT_TEXT = self.NOTIFY_ON_RESUME_SUBJECT_TEXT.replace('{progress}','{progress_duration}')
self.NOTIFY_ON_RESUME_BODY_TEXT = self.NOTIFY_ON_RESUME_BODY_TEXT.replace('{progress}','{progress_duration}')
self.NOTIFY_ON_BUFFER_SUBJECT_TEXT = self.NOTIFY_ON_BUFFER_SUBJECT_TEXT.replace('{progress}','{progress_duration}')
self.NOTIFY_ON_BUFFER_BODY_TEXT = self.NOTIFY_ON_BUFFER_BODY_TEXT.replace('{progress}','{progress_duration}')
self.NOTIFY_ON_WATCHED_SUBJECT_TEXT = self.NOTIFY_ON_WATCHED_SUBJECT_TEXT.replace('{progress}','{progress_duration}')
self.NOTIFY_ON_WATCHED_BODY_TEXT = self.NOTIFY_ON_WATCHED_BODY_TEXT.replace('{progress}','{progress_duration}')
self.NOTIFY_SCRIPTS_ARGS_TEXT = self.NOTIFY_SCRIPTS_ARGS_TEXT.replace('{progress}','{progress_duration}')
self.CONFIG_VERSION = '3'

View File

@@ -13,20 +13,24 @@
# You should have received a copy of the GNU General Public License
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
from plexpy import logger
import sqlite3
import arrow
import os
import plexpy
import time
import sqlite3
import shutil
import threading
import logger
import plexpy
db_lock = threading.Lock()
def drop_session_db():
monitor_db = MonitorDatabase()
monitor_db.action('DROP TABLE sessions')
def clear_history_tables():
logger.debug(u"PlexPy Database :: Deleting all session_history records... No turning back now bub.")
monitor_db = MonitorDatabase()
@@ -35,10 +39,52 @@ def clear_history_tables():
monitor_db.action('DELETE FROM session_history_metadata')
monitor_db.action('VACUUM;')
def db_filename(filename="plexpy.db"):
""" Returns the filepath to the db """
return os.path.join(plexpy.DATA_DIR, filename)
def make_backup(cleanup=False, scheduler=False):
""" Makes a backup of db, removes all but the last 5 backups """
if scheduler:
backup_file = 'plexpy.backup-%s.sched.db' % arrow.now().format('YYYYMMDDHHmmss')
else:
backup_file = 'plexpy.backup-%s.db' % arrow.now().format('YYYYMMDDHHmmss')
backup_folder = plexpy.CONFIG.BACKUP_DIR
backup_file_fp = os.path.join(backup_folder, backup_file)
# In case the user has deleted it manually
if not os.path.exists(backup_folder):
os.makedirs(backup_folder)
db = MonitorDatabase()
db.connection.execute('begin immediate')
shutil.copyfile(db_filename(), backup_file_fp)
db.connection.rollback()
if cleanup:
# Delete all scheduled backup files except from the last 5.
for root, dirs, files in os.walk(backup_folder):
db_files = [os.path.join(root, f) for f in files if f.endswith('.sched.db')]
if len(db_files) > 5:
backups_sorted_on_age = sorted(db_files, key=os.path.getctime, reverse=True)
for file_ in backups_sorted_on_age[5:]:
try:
os.remove(file_)
except OSError as e:
logger.error(u"PlexPy Database :: Failed to delete %s from the backup folder: %s" % (file_, e))
if backup_file in os.listdir(backup_folder):
logger.debug(u"PlexPy Database :: Successfully backed up %s to %s" % (db_filename(), backup_file))
return True
else:
logger.warn(u"PlexPy Database :: Failed to backup %s to %s" % (db_filename(), backup_file))
return False
def get_cache_size():
# This will protect against typecasting problems produced by empty string and None settings
if not plexpy.CONFIG.CACHE_SIZEMB:
@@ -46,6 +92,7 @@ def get_cache_size():
return 0
return int(plexpy.CONFIG.CACHE_SIZEMB)
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
@@ -87,15 +134,15 @@ class MonitorDatabase(object):
except sqlite3.OperationalError, e:
if "unable to open database file" in e.message or "database is locked" in e.message:
logger.warn('Database Error: %s', e)
logger.warn(u"PlexPy Database :: Database Error: %s", e)
attempts += 1
time.sleep(1)
else:
logger.error('Database error: %s', e)
logger.error(u"PlexPy Database :: Database error: %s", e)
raise
except sqlite3.DatabaseError, e:
logger.error('Fatal Error executing %s :: %s', query, e)
logger.error(u"PlexPy Database :: Fatal Error executing %s :: %s", query, e)
raise
return sql_result
@@ -139,7 +186,7 @@ class MonitorDatabase(object):
try:
self.action(insert_query, value_dict.values() + key_dict.values())
except sqlite3.IntegrityError:
logger.info('Queries failed: %s and %s', update_query, insert_query)
logger.info(u"PlexPy Database :: Queries failed: %s and %s", update_query, insert_query)
# We want to know if it was an update or insert
return trans_type

View File

@@ -58,7 +58,7 @@ class DataFactory(object):
'session_history_metadata.thumb',
'session_history_metadata.parent_thumb',
'session_history_metadata.grandparent_thumb',
'((CASE WHEN view_offset IS NULL THEN 0.1 ELSE view_offset * 1.0 END) / \
'MAX((CASE WHEN view_offset IS NULL THEN 0.1 ELSE view_offset * 1.0 END) / \
(CASE WHEN session_history_metadata.duration IS NULL THEN 1.0 \
ELSE session_history_metadata.duration * 1.0 END) * 100) AS percent_complete',
'session_history_media_info.video_decision',
@@ -664,7 +664,8 @@ class DataFactory(object):
for id in library_cards:
if id.isdigit():
try:
query = 'SELECT section_id, section_name, section_type, thumb, count, parent_count, child_count ' \
query = 'SELECT section_id, section_name, section_type, thumb AS library_thumb, ' \
'custom_thumb_url AS custom_thumb, count, parent_count, child_count ' \
'FROM library_sections ' \
'WHERE section_id = %s ' % id
result = monitor_db.select(query)
@@ -673,10 +674,17 @@ class DataFactory(object):
return None
for item in result:
if item['custom_thumb'] and item['custom_thumb'] != item['library_thumb']:
library_thumb = item['custom_thumb']
elif item['library_thumb']:
library_thumb = item['library_thumb']
else:
library_thumb = common.DEFAULT_COVER_THUMB
library = {'section_id': item['section_id'],
'section_name': item['section_name'],
'section_type': item['section_type'],
'thumb': item['thumb'],
'thumb': library_thumb,
'count': item['count'],
'parent_count': item['parent_count'],
'child_count': item['child_count']
@@ -835,19 +843,45 @@ class DataFactory(object):
def get_session_ip(self, session_key=''):
monitor_db = database.MonitorDatabase()
if session_key:
query = 'SELECT ip_address FROM sessions WHERE session_key = %d' % int(session_key)
result = monitor_db.select(query)
else:
return None
ip_address = 'N/A'
if session_key:
try:
query = 'SELECT ip_address FROM sessions WHERE session_key = %d' % int(session_key)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_session_ip: %s." % e)
return ip_address
else:
return ip_address
for item in result:
ip_address = item['ip_address']
return ip_address
def get_poster_url(self, rating_key=''):
monitor_db = database.MonitorDatabase()
poster_url = ''
if rating_key:
try:
query = 'SELECT id, poster_url FROM notify_log ' \
'WHERE rating_key = %d OR parent_rating_key = %d OR grandparent_rating_key = %d ' \
'ORDER BY id DESC LIMIT 1' % (int(rating_key), int(rating_key), int(rating_key))
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_poster_url: %s." % e)
return poster_url
else:
return poster_url
for item in result:
poster_url = item['poster_url']
return poster_url
def get_search_query(self, rating_key=''):
monitor_db = database.MonitorDatabase()
@@ -1086,4 +1120,72 @@ class DataFactory(object):
metadata['studio'],
old_rating_key]
monitor_db.action(query=query, args=args)
monitor_db.action(query=query, args=args)
def get_notification_log(self, kwargs=None):
data_tables = datatables.DataTables()
columns = ['notify_log.id',
'notify_log.timestamp',
'notify_log.session_key',
'notify_log.rating_key',
'notify_log.user_id',
'notify_log.user',
'notify_log.agent_id',
'notify_log.agent_name',
'notify_log.notify_action',
'notify_log.subject_text',
'notify_log.body_text',
'notify_log.script_args',
'notify_log.poster_url',
]
try:
query = data_tables.ssp_query(table_name='notify_log',
columns=columns,
custom_where=[],
group_by=[],
join_types=[],
join_tables=[],
join_evals=[],
kwargs=kwargs)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_notification_log: %s." % e)
return {'recordsFiltered': 0,
'recordsTotal': 0,
'draw': 0,
'data': 'null',
'error': 'Unable to execute database query.'}
notifications = query['result']
rows = []
for item in notifications:
if item['body_text']:
body_text = item['body_text'].replace('\r\n', '<br />').replace('\n', '<br />')
else:
body_text = ''
row = {'id': item['id'],
'timestamp': item['timestamp'],
'session_key': item['session_key'],
'rating_key': item['rating_key'],
'user_id': item['user_id'],
'user': item['user'],
'agent_id': item['agent_id'],
'agent_name': item['agent_name'],
'notify_action': item['notify_action'],
'subject_text': item['subject_text'],
'body_text': body_text,
'script_args': item['script_args'],
'poster_url': item['poster_url']
}
rows.append(row)
dict = {'recordsFiltered': query['filteredCount'],
'recordsTotal': query['totalCount'],
'data': rows,
'draw': query['draw']
}
return dict

View File

@@ -14,9 +14,9 @@
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
from plexpy import logger, database, helpers, common
import plexpy
import datetime
import locale
class Graphs(object):
@@ -321,7 +321,7 @@ class Graphs(object):
dt = datetime.datetime(*month_item[:6])
date_string = dt.strftime('%Y-%m')
categories.append(dt.strftime('%b %Y').decode(locale.getlocale()[1]))
categories.append(dt.strftime('%b %Y').decode(plexpy.SYS_ENCODING, 'replace'))
series_1_value = 0
series_2_value = 0
series_3_value = 0

View File

@@ -13,22 +13,61 @@
# You should have received a copy of the GNU General Public License
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
from operator import itemgetter
from xml.dom import minidom
import unicodedata
import plexpy
import base64
import datetime
import fnmatch
import shutil
import time
import sys
import re
import os
from functools import wraps
from IPy import IP
import json
import xmltodict
import math
from operator import itemgetter
import os
import re
import shutil
import socket
import sys
import time
import unicodedata
import urllib, urllib2
from xml.dom import minidom
import xmltodict
import plexpy
from api2 import API2
def addtoapi(*dargs, **dkwargs):
""" Helper decorator that adds function to the API class.
is used to reuse as much code as possible
args:
dargs: (string, optional) Used to rename a function
Example:
@addtoapi("i_was_renamed", "im_a_second_alias")
@addtoapi()
"""
def rd(function):
@wraps(function)
def wrapper(*args, **kwargs):
return function(*args, **kwargs)
if dargs:
# To rename the function if it sucks.. and
# allow compat with old api.
for n in dargs:
if function.__doc__ and len(function.__doc__):
function.__doc__ = function.__doc__.strip()
setattr(API2, n, function)
return wrapper
if function.__doc__ and len(function.__doc__):
function.__doc__ = function.__doc__.strip()
setattr(API2, function.__name__, function)
return wrapper
return rd
def multikeysort(items, columns):
comparers = [((itemgetter(col[1:].strip()), -1) if col.startswith('-') else (itemgetter(col.strip()), 1)) for col in columns]
@@ -135,6 +174,15 @@ def convert_seconds(s):
return minutes
def convert_seconds_to_minutes(s):
if str(s).isdigit():
minutes = round(float(s) / 60, 0)
return math.trunc(minutes)
return 0
def today():
today = datetime.date.today()
@@ -164,7 +212,7 @@ def human_duration(s, sig='dhms'):
if sig >= 'dh' and h > 0:
h = h + 1 if sig == 'dh' and m >= 30 else h
hd_list.append(str(h) + ' hrs')
if sig >= 'dhm' and m > 0:
m = m + 1 if sig == 'dhm' and s >= 30 else m
hd_list.append(str(m) + ' mins')
@@ -332,7 +380,7 @@ def split_string(mystring, splitvar=','):
def create_https_certificates(ssl_cert, ssl_key):
"""
Create a pair of self-signed HTTPS certificares and store in them in
Create a self-signed HTTPS certificate and store in it in
'ssl_cert' and 'ssl_key'. Method assumes pyOpenSSL is installed.
This code is stolen from SickBeard (http://github.com/midgetspy/Sick-Beard).
@@ -341,24 +389,24 @@ def create_https_certificates(ssl_cert, ssl_key):
from plexpy import logger
from OpenSSL import crypto
from certgen import createKeyPair, createCertRequest, createCertificate, \
TYPE_RSA, serial
from certgen import createKeyPair, createSelfSignedCertificate, TYPE_RSA
# Create the CA Certificate
cakey = createKeyPair(TYPE_RSA, 2048)
careq = createCertRequest(cakey, CN="Certificate Authority")
cacert = createCertificate(careq, (careq, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years
serial = int(time.time())
domains = ['DNS:' + d.strip() for d in plexpy.CONFIG.HTTPS_DOMAIN.split(',') if d]
ips = ['IP:' + d.strip() for d in plexpy.CONFIG.HTTPS_IP.split(',') if d]
altNames = ','.join(domains + ips)
# Create the self-signed PlexPy certificate
logger.debug(u"Generating self-signed SSL certificate.")
pkey = createKeyPair(TYPE_RSA, 2048)
req = createCertRequest(pkey, CN="PlexPy")
cert = createCertificate(req, (cacert, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years
cert = createSelfSignedCertificate(("PlexPy", pkey), serial, (0, 60 * 60 * 24 * 365 * 10), altNames) # ten years
# Save the key and certificate to disk
try:
with open(ssl_key, "w") as fp:
fp.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
with open(ssl_cert, "w") as fp:
fp.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
with open(ssl_key, "w") as fp:
fp.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
except IOError as e:
logger.error("Error creating SSL key and certificate: %s", e)
return False
@@ -369,14 +417,14 @@ def create_https_certificates(ssl_cert, ssl_key):
def cast_to_int(s):
try:
return int(s)
except ValueError:
return -1
except (ValueError, TypeError):
return 0
def cast_to_float(s):
try:
return float(s)
except ValueError:
return -1
except (ValueError, TypeError):
return 0
def convert_xml_to_json(xml):
o = xmltodict.parse(xml)
@@ -448,83 +496,69 @@ def sanitize(string):
else:
return ''
def parse_js_date(date):
"""
Taken from moment library.
def is_ip_public(host):
ip_address = get_ip(host)
ip = IP(ip_address)
if ip.iptype() == 'PUBLIC':
return True
Translate the easy-to-use JavaScript format strings to Python's cumbersome
strftime format. Also, this is some ugly code -- and it's completely
order-dependent.
"""
# AM/PM
if 'A' in date:
date = date.replace('A', '%p')
elif 'a' in date:
date = date.replace('a', '%P')
# 24 hours
if 'HH' in date:
date = date.replace('HH', '%H')
elif 'H' in date:
date = date.replace('H', '%k')
# 12 hours
elif 'hh' in date:
date = date.replace('hh', '%I')
elif 'h' in date:
date = date.replace('h', '%l')
# Minutes
if 'mm' in date:
date = date.replace('mm', '%min')
elif 'm' in date:
date = date.replace('m', '%min')
# Seconds
if 'ss' in date:
date = date.replace('ss', '%S')
elif 's' in date:
date = date.replace('s', '%S')
# Milliseconds
if 'SSS' in date:
date = date.replace('SSS', '%3')
# Years
if 'YYYY' in date:
date = date.replace('YYYY', '%Y')
elif 'YY' in date:
date = date.replace('YY', '%y')
# Months
if 'MMMM' in date:
date = date.replace('MMMM', '%B')
elif 'MMM' in date:
date = date.replace('MMM', '%b')
elif 'MM' in date:
date = date.replace('MM', '%m')
elif 'M' in date:
date = date.replace('M', '%m')
# Days of the week
if 'dddd' in date:
date = date.replace('dddd', '%A')
elif 'ddd' in date:
date = date.replace('ddd', '%a')
elif 'dd' in date:
date = date.replace('dd', '%w')
elif 'd' in date:
date = date.replace('d', '%u')
# Days of the year
if 'DDDD' in date:
date = date.replace('DDDD', '%j')
elif 'DDD' in date:
date = date.replace('DDD', '%j')
# Days of the month
elif 'DD' in date:
date = date.replace('DD', '%d')
# 'Do' not valid python time format
elif 'Do' in date:
date = date.replace('Do', '')
elif 'D' in date:
date = date.replace('D', '%d')
# Timezone
if 'zz' in date:
date = date.replace('zz', '%Z')
# A necessary evil right now...
if '%min' in date:
date = date.replace('%min', '%M')
return False
return date
def get_ip(host):
from plexpy import logger
ip_address = ''
try:
socket.inet_aton(host)
ip_address = host
except socket.error:
try:
ip_address = socket.gethostbyname(host)
logger.debug(u"IP Checker :: Resolved %s to %s." % (host, ip_address))
except:
logger.error(u"IP Checker :: Bad IP or hostname provided.")
return ip_address
# Taken from SickRage
def anon_url(*url):
"""
Return a URL string consisting of the Anonymous redirect URL and an arbitrary number of values appended.
"""
return '' if None in url else '%s%s' % (plexpy.CONFIG.ANON_REDIRECT, ''.join(str(s) for s in url))
def uploadToImgur(imgPath, imgTitle=''):
from plexpy import logger
client_id = '743b1a443ccd2b0'
img_url = ''
try:
with open(imgPath, 'rb') as imgFile:
img = imgFile.read()
except IOError as e:
logger.error(u"PlexPy Helpers :: Unable to read image file for Imgur: %s" % e)
return img_url
headers = {'Authorization': 'Client-ID %s' % client_id}
data = {'type': 'base64',
'image': base64.b64encode(img)}
if imgTitle:
data['title'] = imgTitle.encode('utf-8')
data['name'] = imgTitle.encode('utf-8') + '.jpg'
try:
request = urllib2.Request('https://api.imgur.com/3/image', headers=headers, data=urllib.urlencode(data))
response = urllib2.urlopen(request)
response = json.loads(response.read())
if response.get('status') == 200:
logger.debug(u"PlexPy Helpers :: Image uploaded to Imgur.")
img_url = response.get('data').get('link', '')
elif response.get('status') >= 400 and response.get('status') < 500:
logger.warn(u"PlexPy Helpers :: Unable to upload image to Imgur: %s" % response.reason)
else:
logger.warn(u"PlexPy Helpers :: Unable to upload image to Imgur.")
except urllib2.HTTPError as e:
logger.warn(u"PlexPy Helpers :: Unable to upload image to Imgur: %s" % e)
return img_url

View File

@@ -16,10 +16,10 @@
# You should have received a copy of the GNU General Public License
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
from plexpy import logger, helpers
from httplib import HTTPSConnection
from httplib import HTTPConnection
import ssl
from plexpy import logger, helpers
class HTTPHandler(object):
@@ -44,7 +44,8 @@ class HTTPHandler(object):
headers=None,
output_format='raw',
return_type=False,
no_token=False):
no_token=False,
timeout=20):
valid_request_types = ['GET', 'POST', 'PUT', 'DELETE']
@@ -56,12 +57,12 @@ class HTTPHandler(object):
if proto.upper() == 'HTTPS':
if not self.ssl_verify and hasattr(ssl, '_create_unverified_context'):
context = ssl._create_unverified_context()
handler = HTTPSConnection(host=self.host, port=self.port, timeout=20, context=context)
handler = HTTPSConnection(host=self.host, port=self.port, timeout=timeout, context=context)
logger.warn(u"PlexPy HTTP Handler :: Unverified HTTPS request made. This connection is not secure.")
else:
handler = HTTPSConnection(host=self.host, port=self.port, timeout=20)
handler = HTTPSConnection(host=self.host, port=self.port, timeout=timeout)
else:
handler = HTTPConnection(host=self.host, port=self.port, timeout=20)
handler = HTTPConnection(host=self.host, port=self.port, timeout=timeout)
token_string = ''
if not no_token:

View File

@@ -18,62 +18,86 @@ import plexpy
def update_section_ids():
from plexpy import pmsconnect, activity_pinger
import threading
#import threading
plexpy.CONFIG.UPDATE_SECTION_IDS = -1
logger.info(u"PlexPy Libraries :: Updating section_id's in database.")
logger.debug(u"PlexPy Libraries :: Disabling monitoring while update in progress.")
plexpy.schedule_job(activity_pinger.check_active_sessions, 'Check for active sessions',
hours=0, minutes=0, seconds=0)
plexpy.schedule_job(activity_pinger.check_recently_added, 'Check for recently added items',
hours=0, minutes=0, seconds=0)
plexpy.schedule_job(activity_pinger.check_server_response, 'Check for server response',
hours=0, minutes=0, seconds=0)
#logger.debug(u"PlexPy Libraries :: Disabling monitoring while update in progress.")
#plexpy.schedule_job(activity_pinger.check_active_sessions, 'Check for active sessions',
# hours=0, minutes=0, seconds=0)
#plexpy.schedule_job(activity_pinger.check_recently_added, 'Check for recently added items',
# hours=0, minutes=0, seconds=0)
#plexpy.schedule_job(activity_pinger.check_server_response, 'Check for server response',
# hours=0, minutes=0, seconds=0)
monitor_db = database.MonitorDatabase()
try:
query = 'SELECT id, rating_key FROM session_history_metadata WHERE section_id IS NULL'
result = monitor_db.select(query=query)
query = 'SELECT id, rating_key, grandparent_rating_key, media_type ' \
'FROM session_history_metadata WHERE section_id IS NULL'
history_results = monitor_db.select(query=query)
query = 'SELECT section_id, section_type FROM library_sections'
library_results = monitor_db.select(query=query)
except Exception as e:
logger.warn(u"PlexPy Libraries :: Unable to execute database query for update_section_ids: %s." % e)
logger.warn(u"PlexPy Libraries :: Unable to update section_id's in database.")
plexpy.CONFIG.__setattr__('UPDATE_SECTION_IDS', 1)
plexpy.CONFIG.UPDATE_SECTION_IDS = 1
plexpy.CONFIG.write()
logger.debug(u"PlexPy Libraries :: Re-enabling monitoring.")
plexpy.initialize_scheduler()
#logger.debug(u"PlexPy Libraries :: Re-enabling monitoring.")
#plexpy.initialize_scheduler()
return None
if not history_results:
plexpy.CONFIG.UPDATE_SECTION_IDS = 0
plexpy.CONFIG.write()
return None
logger.info(u"PlexPy Libraries :: Updating section_id's in database.")
# Add thread filter to the logger
logger.debug(u"PlexPy Libraries :: Disabling logging in the current thread while update in progress.")
thread_filter = logger.NoThreadFilter(threading.current_thread().name)
for handler in logger.logger.handlers:
handler.addFilter(thread_filter)
#logger.debug(u"PlexPy Libraries :: Disabling logging in the current thread while update in progress.")
#thread_filter = logger.NoThreadFilter(threading.current_thread().name)
#for handler in logger.logger.handlers:
# handler.addFilter(thread_filter)
# Get rating_key: section_id mapping pairs
key_mappings = {}
pms_connect = pmsconnect.PmsConnect()
for library in library_results:
section_id = library['section_id']
section_type = library['section_type']
if section_type != 'photo':
library_children = pms_connect.get_library_children_details(section_id=section_id,
section_type=section_type)
if library_children:
children_list = library_children['childern_list']
key_mappings.update({child['rating_key']:child['section_id'] for child in children_list})
else:
logger.warn(u"PlexPy Libraries :: Unable to get a list of library items for section_id %s." % section_id)
error_keys = set()
for item in result:
id = item['id']
rating_key = item['rating_key']
metadata = pms_connect.get_metadata_details(rating_key=rating_key)
if metadata:
metadata = metadata['metadata']
section_keys = {'id': id}
section_values = {'section_id': metadata['section_id']}
monitor_db.upsert('session_history_metadata', key_dict=section_keys, value_dict=section_values)
for item in history_results:
rating_key = item['grandparent_rating_key'] if item['media_type'] != 'movie' else item['rating_key']
section_id = key_mappings.get(str(rating_key), None)
if section_id:
try:
section_keys = {'id': item['id']}
section_values = {'section_id': section_id}
monitor_db.upsert('session_history_metadata', key_dict=section_keys, value_dict=section_values)
except:
error_keys.add(item['rating_key'])
else:
error_keys.add(rating_key)
error_keys.add(item['rating_key'])
# Remove thread filter from the logger
for handler in logger.logger.handlers:
handler.removeFilter(thread_filter)
logger.debug(u"PlexPy Libraries :: Re-enabling logging in the current thread.")
#for handler in logger.logger.handlers:
# handler.removeFilter(thread_filter)
#logger.debug(u"PlexPy Libraries :: Re-enabling logging in the current thread.")
if error_keys:
logger.info(u"PlexPy Libraries :: Updated all section_id's in database except for rating_keys: %s." %
@@ -81,11 +105,11 @@ def update_section_ids():
else:
logger.info(u"PlexPy Libraries :: Updated all section_id's in database.")
plexpy.CONFIG.__setattr__('UPDATE_SECTION_IDS', 0)
plexpy.CONFIG.UPDATE_SECTION_IDS = 0
plexpy.CONFIG.write()
logger.debug(u"PlexPy Libraries :: Re-enabling monitoring.")
plexpy.initialize_scheduler()
#logger.debug(u"PlexPy Libraries :: Re-enabling monitoring.")
#plexpy.initialize_scheduler()
return True
@@ -109,14 +133,21 @@ class Libraries(object):
'library_sections.custom_thumb_url AS custom_thumb',
'library_sections.art',
'COUNT(session_history.id) AS plays',
'SUM(CASE WHEN session_history.stopped > 0 THEN (session_history.stopped - session_history.started) \
ELSE 0 END) - SUM(CASE WHEN session_history.paused_counter IS NULL THEN 0 ELSE \
session_history.paused_counter END) AS duration',
'MAX(session_history.started) AS last_accessed',
'MAX(session_history.id) AS id',
'session_history_metadata.full_title AS last_watched',
'session_history_metadata.full_title AS last_played',
'session_history.rating_key',
'session_history_metadata.media_type',
'session_history_metadata.thumb',
'session_history_metadata.parent_thumb',
'session_history_metadata.grandparent_thumb',
'session_history_metadata.media_type',
'session_history.rating_key',
'session_history_metadata.parent_title',
'session_history_metadata.year',
'session_history_metadata.media_index',
'session_history_metadata.parent_media_index',
'session_history_media_info.video_decision',
'library_sections.do_notify',
'library_sections.do_notify_created',
@@ -172,13 +203,17 @@ class Libraries(object):
'library_thumb': library_thumb,
'library_art': item['art'],
'plays': item['plays'],
'duration': item['duration'],
'last_accessed': item['last_accessed'],
'id': item['id'],
'last_watched': item['last_watched'],
'thumb': thumb,
'media_type': item['media_type'],
'last_played': item['last_played'],
'rating_key': item['rating_key'],
'video_decision': item['video_decision'],
'media_type': item['media_type'],
'thumb': thumb,
'parent_title': item['parent_title'],
'year': item['year'],
'media_index': item['media_index'],
'parent_media_index': item['parent_media_index'],
'do_notify': helpers.checked(item['do_notify']),
'do_notify_created': helpers.checked(item['do_notify_created']),
'keep_history': helpers.checked(item['keep_history'])
@@ -236,7 +271,7 @@ class Libraries(object):
group_by = 'rating_key'
try:
query = 'SELECT MAX(session_history.started) AS last_watched, COUNT(DISTINCT session_history.%s) AS play_count, ' \
query = 'SELECT MAX(session_history.started) AS last_played, COUNT(DISTINCT session_history.%s) AS play_count, ' \
'session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key ' \
'FROM session_history ' \
'JOIN session_history_metadata ON session_history.id = session_history_metadata.id ' \
@@ -249,7 +284,7 @@ class Libraries(object):
watched_list = {}
for item in result:
watched_list[str(item[group_by])] = {'last_watched': item['last_watched'],
watched_list[str(item[group_by])] = {'last_played': item['last_played'],
'play_count': item['play_count']}
rows = []
@@ -344,14 +379,14 @@ class Libraries(object):
except IOError as e:
logger.debug(u"PlexPy Libraries :: Unable to create cache file for section_id %s." % section_id)
# Update the last_watched and play_count
# Update the last_played and play_count
for item in rows:
watched_item = watched_list.get(item['rating_key'], None)
if watched_item:
item['last_watched'] = watched_item['last_watched']
item['last_played'] = watched_item['last_played']
item['play_count'] = watched_item['play_count']
else:
item['last_watched'] = None
item['last_played'] = None
item['play_count'] = None
results = []
@@ -505,52 +540,27 @@ class Libraries(object):
def get_details(self, section_id=None):
from plexpy import pmsconnect
monitor_db = database.MonitorDatabase()
default_return = {'section_id': None,
'section_name': 'Local',
'section_type': '',
'library_thumb': common.DEFAULT_COVER_THUMB,
'library_art': '',
'count': 0,
'parent_count': 0,
'child_count': 0,
'do_notify': 0,
'do_notify_created': 0,
'keep_history': 0
}
try:
if section_id:
query = 'SELECT section_id, section_name, section_type, count, parent_count, child_count, ' \
'thumb AS library_thumb, custom_thumb_url AS custom_thumb, art, ' \
'do_notify, do_notify_created, keep_history ' \
'FROM library_sections ' \
'WHERE section_id = ? '
result = monitor_db.select(query, args=[section_id])
else:
result = []
except Exception as e:
logger.warn(u"PlexPy Libraries :: Unable to execute database query for get_details: %s." % e)
result = []
if not section_id:
return default_return
if result:
library_details = {}
for item in result:
if item['custom_thumb'] and item['custom_thumb'] != item['library_thumb']:
library_thumb = item['custom_thumb']
elif item['library_thumb']:
library_thumb = item['library_thumb']
else:
library_thumb = common.DEFAULT_COVER_THUMB
def get_library_details(section_id=section_id):
monitor_db = database.MonitorDatabase()
library_details = {'section_id': item['section_id'],
'section_name': item['section_name'],
'section_type': item['section_type'],
'library_thumb': library_thumb,
'library_art': item['art'],
'count': item['count'],
'parent_count': item['parent_count'],
'child_count': item['child_count'],
'do_notify': item['do_notify'],
'do_notify_created': item['do_notify_created'],
'keep_history': item['keep_history']
}
return library_details
else:
logger.warn(u"PlexPy Libraries :: Unable to retrieve library from local database. Requesting library list refresh.")
# Let's first refresh the user list to make sure the user isn't newly added and not in the db yet
try:
if section_id:
# Refresh libraries
pmsconnect.refresh_libraries()
if str(section_id).isdigit():
query = 'SELECT section_id, section_name, section_type, count, parent_count, child_count, ' \
'thumb AS library_thumb, custom_thumb_url AS custom_thumb, art, ' \
'do_notify, do_notify_created, keep_history ' \
@@ -559,12 +569,12 @@ class Libraries(object):
result = monitor_db.select(query, args=[section_id])
else:
result = []
except:
except Exception as e:
logger.warn(u"PlexPy Libraries :: Unable to execute database query for get_details: %s." % e)
result = []
library_details = {}
if result:
library_details = {}
for item in result:
if item['custom_thumb'] and item['custom_thumb'] != item['library_thumb']:
library_thumb = item['custom_thumb']
@@ -585,22 +595,28 @@ class Libraries(object):
'do_notify_created': item['do_notify_created'],
'keep_history': item['keep_history']
}
return library_details
library_details = get_library_details(section_id=section_id)
if library_details:
return library_details
else:
logger.warn(u"PlexPy Libraries :: Unable to retrieve library from local database. Requesting library list refresh.")
# Let's first refresh the libraries list to make sure the library isn't newly added and not in the db yet
pmsconnect.refresh_libraries()
library_details = get_library_details(section_id=section_id)
if library_details:
return library_details
else:
logger.warn(u"PlexPy Users :: Unable to retrieve user from local database. Returning 'Local' library.")
# If there is no library data we must return something
# Use "Local" user to retain compatibility with PlexWatch database value
return {'section_id': None,
'section_name': 'Local',
'section_type': '',
'library_thumb': common.DEFAULT_COVER_THUMB,
'library_art': '',
'count': 0,
'parent_count': 0,
'child_count': 0,
'do_notify': 0,
'do_notify_created': 0,
'keep_history': 0
}
# Use "Local" library to retain compatibility with PlexWatch database value
return default_return
def get_watch_time_stats(self, section_id=None):
monitor_db = database.MonitorDatabase()
@@ -849,3 +865,21 @@ class Libraries(object):
return 'Unable to delete media info table cache, section_id not valid.'
except Exception as e:
logger.warn(u"PlexPy Libraries :: Unable to delete media info table cache: %s." % e)
def delete_duplicate_libraries(self):
from plexpy import plextv
monitor_db = database.MonitorDatabase()
# Refresh the PMS_URL to make sure the server_id is updated
plextv.get_real_pms_url()
server_id = plexpy.CONFIG.PMS_IDENTIFIER
try:
logger.debug(u"PlexPy Libraries :: Deleting libraries where server_id does not match %s." % server_id)
monitor_db.action('DELETE FROM library_sections WHERE server_id != ?', [server_id])
return 'Deleted duplicate libraries from the database.'
except Exception as e:
logger.warn(u"PlexPy Libraries :: Unable to delete duplicate libraries: %s." % e)

View File

@@ -18,10 +18,14 @@ import re
import os
import plexpy
def get_log_tail(window=20, parsed=True):
def get_log_tail(window=20, parsed=True, log_type="server"):
if plexpy.CONFIG.PMS_LOGS_FOLDER:
log_file = os.path.join(plexpy.CONFIG.PMS_LOGS_FOLDER, 'Plex Media Server.log')
log_file = ""
if log_type == "server":
log_file = os.path.join(plexpy.CONFIG.PMS_LOGS_FOLDER, 'Plex Media Server.log')
elif log_type == "scanner":
log_file = os.path.join(plexpy.CONFIG.PMS_LOGS_FOLDER, 'Plex Media Scanner.log')
else:
return []

View File

@@ -14,10 +14,13 @@
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
import arrow
import os
import re
import time
import urllib
from plexpy import logger, config, notifiers, database, helpers, plextv, pmsconnect
from plexpy import logger, config, notifiers, database, helpers, plextv, pmsconnect, datafactory
import plexpy
@@ -47,157 +50,201 @@ def notify(stream_data=None, notify_action=None):
for agent in notifiers.available_notification_agents():
if agent['on_play'] and notify_action == 'play':
# Build and send notification
notify_strings = build_notify_text(session=stream_data, state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings, metadata = build_notify_text(session=stream_data, notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
script_args=notify_strings[2],
notify_action=notify_action,
script_args=notify_strings[2])
metadata=metadata)
# Set the notification state in the db
set_notify_state(session=stream_data, state=notify_action, agent_info=agent)
set_notify_state(session=stream_data,
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata=metadata)
elif agent['on_stop'] and notify_action == 'stop' \
and (plexpy.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < plexpy.CONFIG.NOTIFY_WATCHED_PERCENT):
# Build and send notification
notify_strings = build_notify_text(session=stream_data, state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings, metadata = build_notify_text(session=stream_data, notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
script_args=notify_strings[2],
notify_action=notify_action,
script_args=notify_strings[2])
metadata=metadata)
set_notify_state(session=stream_data, state=notify_action, agent_info=agent)
# Set the notification state in the db
set_notify_state(session=stream_data,
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata=metadata)
elif agent['on_pause'] and notify_action == 'pause' \
and (plexpy.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < 99):
# Build and send notification
notify_strings = build_notify_text(session=stream_data, state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings, metadata = build_notify_text(session=stream_data, notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
script_args=notify_strings[2],
notify_action=notify_action,
script_args=notify_strings[2])
metadata=metadata)
set_notify_state(session=stream_data, state=notify_action, agent_info=agent)
# Set the notification state in the db
set_notify_state(session=stream_data,
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata=metadata)
elif agent['on_resume'] and notify_action == 'resume' \
and (plexpy.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < 99):
# Build and send notification
notify_strings = build_notify_text(session=stream_data, state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings, metadata = build_notify_text(session=stream_data, notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
script_args=notify_strings[2],
notify_action=notify_action,
script_args=notify_strings[2])
metadata=metadata)
set_notify_state(session=stream_data, state=notify_action, agent_info=agent)
# Set the notification state in the db
set_notify_state(session=stream_data,
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata=metadata)
elif agent['on_buffer'] and notify_action == 'buffer':
# Build and send notification
notify_strings = build_notify_text(session=stream_data, state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings, metadata = build_notify_text(session=stream_data, notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
script_args=notify_strings[2],
notify_action=notify_action,
script_args=notify_strings[2])
metadata=metadata)
set_notify_state(session=stream_data, state=notify_action, agent_info=agent)
# Set the notification state in the db
set_notify_state(session=stream_data,
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata=metadata)
elif agent['on_watched'] and notify_action == 'watched':
# Get the current states for notifications from our db
notify_states = get_notify_state(session=stream_data)
# If there is nothing in the notify_log for our agent id but it is enabled we should notify
if not any(d['agent_id'] == agent['id'] for d in notify_states):
if not any(d['agent_id'] == agent['id'] and d['notify_action'] == notify_action for d in notify_states):
# Build and send notification
notify_strings = build_notify_text(session=stream_data, state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings, metadata = build_notify_text(session=stream_data, notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
script_args=notify_strings[2],
notify_action=notify_action,
script_args=notify_strings[2])
metadata=metadata)
# Set the notification state in the db
set_notify_state(session=stream_data, state=notify_action, agent_info=agent)
else:
# Check in our notify log if the notification has already been sent
for notify_state in notify_states:
if not notify_state['on_watched'] and (notify_state['agent_id'] == agent['id']):
# Build and send notification
notify_strings = build_notify_text(session=stream_data, state=notify_action)
notifiers.send_notification(config_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
notify_action=notify_action,
script_args=notify_strings[2])
# Set the notification state in the db
set_notify_state(session=stream_data, state=notify_action, agent_info=agent)
set_notify_state(session=stream_data,
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata=metadata)
elif (stream_data['media_type'] == 'track' and plexpy.CONFIG.MUSIC_NOTIFY_ENABLE):
for agent in notifiers.available_notification_agents():
if agent['on_play'] and notify_action == 'play':
# Build and send notification
notify_strings = build_notify_text(session=stream_data, state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings, metadata = build_notify_text(session=stream_data, notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
script_args=notify_strings[2],
notify_action=notify_action,
script_args=notify_strings[2])
metadata=metadata)
# Set the notification state in the db
set_notify_state(session=stream_data, state=notify_action, agent_info=agent)
set_notify_state(session=stream_data,
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata=metadata)
elif agent['on_stop'] and notify_action == 'stop':
# Build and send notification
notify_strings = build_notify_text(session=stream_data, state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings, metadata = build_notify_text(session=stream_data, notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
script_args=notify_strings[2],
notify_action=notify_action,
script_args=notify_strings[2])
metadata=metadata)
# Set the notification state in the db
set_notify_state(session=stream_data, state=notify_action, agent_info=agent)
set_notify_state(session=stream_data,
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata=metadata)
elif agent['on_pause'] and notify_action == 'pause':
# Build and send notification
notify_strings = build_notify_text(session=stream_data, state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings, metadata = build_notify_text(session=stream_data, notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
script_args=notify_strings[2],
notify_action=notify_action,
script_args=notify_strings[2])
metadata=metadata)
# Set the notification state in the db
set_notify_state(session=stream_data, state=notify_action, agent_info=agent)
set_notify_state(session=stream_data,
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata=metadata)
elif agent['on_resume'] and notify_action == 'resume':
# Build and send notification
notify_strings = build_notify_text(session=stream_data, state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings, metadata = build_notify_text(session=stream_data, notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
script_args=notify_strings[2],
notify_action=notify_action,
script_args=notify_strings[2])
metadata=metadata)
# Set the notification state in the db
set_notify_state(session=stream_data, state=notify_action, agent_info=agent)
set_notify_state(session=stream_data,
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata=metadata)
elif agent['on_buffer'] and notify_action == 'buffer':
# Build and send notification
notify_strings = build_notify_text(session=stream_data, state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings, metadata = build_notify_text(session=stream_data, notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
script_args=notify_strings[2],
notify_action=notify_action,
script_args=notify_strings[2])
metadata=metadata)
# Set the notification state in the db
set_notify_state(session=stream_data, state=notify_action, agent_info=agent)
set_notify_state(session=stream_data,
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata=metadata)
elif stream_data['media_type'] == 'clip':
pass
@@ -210,141 +257,150 @@ def notify(stream_data=None, notify_action=None):
def notify_timeline(timeline_data=None, notify_action=None):
if timeline_data and notify_action:
if (timeline_data['media_type'] == 'movie' and plexpy.CONFIG.MOVIE_NOTIFY_ENABLE) \
or ((timeline_data['media_type'] == 'show' or timeline_data['media_type'] == 'episode') \
and plexpy.CONFIG.TV_NOTIFY_ENABLE) \
or ((timeline_data['media_type'] == 'artist' or timeline_data['media_type'] == 'track') \
and plexpy.CONFIG.MUSIC_NOTIFY_ENABLE):
for agent in notifiers.available_notification_agents():
if agent['on_created'] and notify_action == 'created':
# Build and send notification
notify_strings, metadata = build_notify_text(timeline=timeline_data, notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
script_args=notify_strings[2],
notify_action=notify_action,
metadata=metadata)
for agent in notifiers.available_notification_agents():
if agent['on_created'] and notify_action == 'created':
# Build and send notification
notify_strings = build_notify_text(timeline=timeline_data, state=notify_action)
notifiers.send_notification(config_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
notify_action=notify_action,
script_args=notify_strings[2])
# Set the notification state in the db
set_notify_state(session=timeline_data, state=notify_action, agent_info=agent)
# Set the notification state in the db
set_notify_state(session=timeline_data,
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata=metadata)
elif not timeline_data and notify_action:
for agent in notifiers.available_notification_agents():
if agent['on_extdown'] and notify_action == 'extdown':
# Build and send notification
notify_strings = build_server_notify_text(state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings = build_server_notify_text(notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
notify_action=notify_action,
script_args=notify_strings[2])
script_args=notify_strings[2],
notify_action=notify_action)
# Set the notification state in the db
set_notify_state(session={},
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata={})
if agent['on_intdown'] and notify_action == 'intdown':
# Build and send notification
notify_strings = build_server_notify_text(state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings = build_server_notify_text(notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
notify_action=notify_action,
script_args=notify_strings[2])
script_args=notify_strings[2],
notify_action=notify_action)
# Set the notification state in the db
set_notify_state(session={},
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata={})
if agent['on_extup'] and notify_action == 'extup':
# Build and send notification
notify_strings = build_server_notify_text(state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings = build_server_notify_text(notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
notify_action=notify_action,
script_args=notify_strings[2])
script_args=notify_strings[2],
notify_action=notify_action)
# Set the notification state in the db
set_notify_state(session={},
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata={})
if agent['on_intup'] and notify_action == 'intup':
# Build and send notification
notify_strings = build_server_notify_text(state=notify_action)
notifiers.send_notification(config_id=agent['id'],
notify_strings = build_server_notify_text(notify_action=notify_action)
notifiers.send_notification(agent_id=agent['id'],
subject=notify_strings[0],
body=notify_strings[1],
notify_action=notify_action,
script_args=notify_strings[2])
script_args=notify_strings[2],
notify_action=notify_action)
# Set the notification state in the db
set_notify_state(session={},
notify_action=notify_action,
agent_info=agent,
notify_strings=notify_strings,
metadata={})
else:
logger.debug(u"PlexPy NotificationHandler :: Notify timeline called but incomplete data received.")
def get_notify_state(session):
monitor_db = database.MonitorDatabase()
result = monitor_db.select('SELECT on_play, on_stop, on_pause, on_resume, on_buffer, on_watched, agent_id '
result = monitor_db.select('SELECT timestamp, notify_action, agent_id '
'FROM notify_log '
'WHERE session_key = ? '
'AND rating_key = ? '
'AND user = ? '
'AND user_id = ? '
'ORDER BY id DESC',
args=[session['session_key'], session['rating_key'], session['user']])
args=[session['session_key'], session['rating_key'], session['user_id']])
notify_states = []
for item in result:
notify_state = {'on_play': item['on_play'],
'on_stop': item['on_stop'],
'on_pause': item['on_pause'],
'on_resume': item['on_resume'],
'on_buffer': item['on_buffer'],
'on_watched': item['on_watched'],
notify_state = {'timestamp': item['timestamp'],
'notify_action': item['notify_action'],
'agent_id': item['agent_id']}
notify_states.append(notify_state)
return notify_states
def get_notify_state_timeline(timeline):
monitor_db = database.MonitorDatabase()
result = monitor_db.select('SELECT on_created, agent_id '
'FROM notify_log '
'WHERE rating_key = ? '
'ORDER BY id DESC',
args=[timeline['rating_key']])
notify_states = []
for item in result:
notify_state = {'on_created': item['on_created'],
'agent_id': item['agent_id']}
notify_states.append(notify_state)
def set_notify_state(session, notify_action, agent_info, notify_strings, metadata):
return notify_states
def set_notify_state(session, state, agent_info):
if session and state and agent_info:
if notify_action and agent_info:
monitor_db = database.MonitorDatabase()
if state == 'play':
values = {'on_play': int(time.time())}
elif state == 'stop':
values = {'on_stop': int(time.time())}
elif state == 'pause':
values = {'on_pause': int(time.time())}
elif state == 'resume':
values = {'on_resume': int(time.time())}
elif state == 'buffer':
values = {'on_buffer': int(time.time())}
elif state == 'watched':
values = {'on_watched': int(time.time())}
elif state == 'created':
values = {'on_created': int(time.time())}
if notify_strings[2]:
script_args = '[' + ', '.join(notify_strings[2]) + ']'
else:
return
script_args = None
if state == 'created':
keys = {'rating_key': session['rating_key'],
'agent_id': agent_info['id'],
'agent_name': agent_info['name']}
else:
keys = {'session_key': session['session_key'],
'rating_key': session['rating_key'],
'user_id': session['user_id'],
'user': session['user'],
'agent_id': agent_info['id'],
'agent_name': agent_info['name']}
keys = {'timestamp': int(time.time()),
'session_key': session.get('session_key', None),
'rating_key': session.get('rating_key', None),
'user_id': session.get('user_id', None),
'agent_id': agent_info['id'],
'notify_action': notify_action}
values = {'parent_rating_key': session.get('parent_rating_key', None),
'grandparent_rating_key': session.get('grandparent_rating_key', None),
'user': session.get('user', None),
'agent_name': agent_info['name'],
'subject_text': notify_strings[0],
'body_text': notify_strings[1],
'script_args': script_args,
'poster_url': metadata.get('poster_url', None)}
monitor_db.upsert(table_name='notify_log', key_dict=keys, value_dict=values)
else:
logger.error(u"PlexPy NotificationHandler :: Unable to set notify state.")
def build_notify_text(session=None, timeline=None, state=None):
def build_notify_text(session=None, timeline=None, notify_action=None):
# Get time formats
date_format = plexpy.CONFIG.DATE_FORMAT.replace('Do','').replace('zz','')
time_format = plexpy.CONFIG.TIME_FORMAT.replace('Do','').replace('zz','')
duration_format = plexpy.CONFIG.TIME_FORMAT.replace('Do','').replace('zz','').replace('a','').replace('A','')
# Get the server name
server_name = plexpy.CONFIG.PMS_NAME
@@ -354,7 +410,7 @@ def build_notify_text(session=None, timeline=None, state=None):
if server_times:
updated_at = server_times[0]['updated_at']
server_uptime = helpers.human_duration(int(time.time() - helpers.cast_to_float(updated_at)))
server_uptime = helpers.human_duration(int(time.time() - helpers.cast_to_int(updated_at)))
else:
logger.error(u"PlexPy NotificationHandler :: Unable to retrieve server uptime.")
server_uptime = 'N/A'
@@ -433,81 +489,93 @@ def build_notify_text(session=None, timeline=None, state=None):
else:
full_title = metadata['title']
duration = helpers.convert_milliseconds_to_minutes(metadata['duration'])
# Default values
user = ''
platform = ''
player = ''
ip_address = 'N/A'
stream_duration = 0
view_offset = 0
container = ''
video_codec = ''
video_bitrate = ''
video_width = ''
video_height = ''
video_resolution = ''
video_framerate = ''
aspect_ratio = ''
audio_codec = ''
audio_channels = ''
transcode_decision = ''
video_decision = ''
audio_decision = ''
transcode_container = ''
transcode_video_codec = ''
transcode_video_width = ''
transcode_video_height = ''
transcode_audio_codec = ''
transcode_audio_channels = ''
# Session values
if session:
# Generate a combined transcode decision value
video_decision = session['video_decision'].title()
audio_decision = session['audio_decision'].title()
if session is None:
session = {}
if session['video_decision'] == 'transcode' or session['audio_decision'] == 'transcode':
transcode_decision = 'Transcode'
elif session['video_decision'] == 'copy' or session['audio_decision'] == 'copy':
transcode_decision = 'Direct Stream'
else:
transcode_decision = 'Direct Play'
if state != 'play':
if session['paused_counter']:
stream_duration = int((time.time() - helpers.cast_to_float(session['started']) -
helpers.cast_to_float(session['paused_counter'])) / 60)
else:
stream_duration = int((time.time() - helpers.cast_to_float(session['started'])) / 60)
view_offset = helpers.convert_milliseconds_to_minutes(session['view_offset'])
user = session['friendly_name']
platform = session['platform']
player = session['player']
ip_address = session['ip_address'] if session['ip_address'] else 'N/A'
container = session['container']
video_codec = session['video_codec']
video_bitrate = session['bitrate']
video_width = session['width']
video_height = session['height']
video_resolution = session['video_resolution']
video_framerate = session['video_framerate']
aspect_ratio = session['aspect_ratio']
audio_codec = session['audio_codec']
audio_channels = session['audio_channels']
transcode_container = session['transcode_container']
transcode_video_codec = session['transcode_video_codec']
transcode_video_width = session['transcode_width']
transcode_video_height = session['transcode_height']
transcode_audio_codec = session['transcode_audio_codec']
transcode_audio_channels = session['transcode_audio_channels']
# Generate a combined transcode decision value
if session.get('video_decision','') == 'transcode' or session.get('audio_decision','') == 'transcode':
transcode_decision = 'Transcode'
elif session.get('video_decision','') == 'copy' or session.get('audio_decision','') == 'copy':
transcode_decision = 'Direct Stream'
else:
transcode_decision = 'Direct Play'
if notify_action != 'play':
stream_duration = int((time.time() -
helpers.cast_to_int(session.get('started', 0)) -
helpers.cast_to_int(session.get('paused_counter', 0))) / 60)
else:
stream_duration = 0
view_offset = helpers.convert_milliseconds_to_minutes(session.get('view_offset', 0))
duration = helpers.convert_milliseconds_to_minutes(metadata['duration'])
progress_percent = helpers.get_percent(view_offset, duration)
remaining_duration = duration - view_offset
# Get media IDs from guid and build URLs
if 'imdb://' in metadata['guid']:
metadata['imdb_id'] = metadata['guid'].split('imdb://')[1].split('?')[0]
metadata['imdb_url'] = 'https://www.imdb.com/title/' + metadata['imdb_id']
metadata['trakt_url'] = 'https://trakt.tv/search/imdb/' + metadata['imdb_id']
if 'thetvdb://' in metadata['guid']:
metadata['thetvdb_id'] = metadata['guid'].split('thetvdb://')[1].split('/')[0]
metadata['thetvdb_url'] = 'https://thetvdb.com/?tab=series&id=' + metadata['thetvdb_id']
metadata['trakt_url'] = 'https://trakt.tv/search/tvdb/' + metadata['thetvdb_id'] + '?id_type=show'
elif 'thetvdbdvdorder://' in metadata['guid']:
metadata['thetvdb_id'] = metadata['guid'].split('thetvdbdvdorder://')[1].split('/')[0]
metadata['thetvdb_url'] = 'https://thetvdb.com/?tab=series&id=' + metadata['thetvdb_id']
metadata['trakt_url'] = 'https://trakt.tv/search/tvdb/' + metadata['thetvdb_id'] + '?id_type=show'
if 'themoviedb://' in metadata['guid']:
if metadata['media_type'] == 'movie':
metadata['themoviedb_id'] = metadata['guid'].split('themoviedb://')[1].split('?')[0]
metadata['themoviedb_url'] = 'https://www.themoviedb.org/movie/' + metadata['themoviedb_id']
metadata['trakt_url'] = 'https://trakt.tv/search/tmdb/' + metadata['themoviedb_id'] + '?id_type=movie'
elif metadata['media_type'] == 'show' or metadata['media_type'] == 'episode':
metadata['themoviedb_id'] = metadata['guid'].split('themoviedb://')[1].split('/')[0]
metadata['themoviedb_url'] = 'https://www.themoviedb.org/tv/' + metadata['themoviedb_id']
metadata['trakt_url'] = 'https://trakt.tv/search/tmdb/' + metadata['themoviedb_id'] + '?id_type=show'
if 'lastfm://' in metadata['guid']:
metadata['lastfm_id'] = metadata['guid'].split('lastfm://')[1].rsplit('/', 1)[0]
metadata['lastfm_url'] = 'https://www.last.fm/music/' + metadata['lastfm_id']
if metadata['media_type'] == 'movie' or metadata['media_type'] == 'show' or metadata['media_type'] == 'artist':
thumb = metadata['thumb']
poster_key = metadata['rating_key']
poster_title = metadata['title']
elif metadata['media_type'] == 'episode':
thumb = metadata['grandparent_thumb']
poster_key = metadata['grandparent_rating_key']
poster_title = metadata['grandparent_title']
elif metadata['media_type'] == 'track':
thumb = metadata['parent_thumb']
poster_key = metadata['parent_rating_key']
poster_title = metadata['parent_title']
else:
thumb = None
if thumb:
# Try to retrieve a poster_url from the database
data_factory = datafactory.DataFactory()
poster_url = data_factory.get_poster_url(rating_key=poster_key)
# If no previous poster_url
if not poster_url and plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS:
# Retrieve the poster from Plex and cache to file
urllib.urlretrieve(plexpy.CONFIG.PMS_URL + thumb + '?X-Plex-Token=' + plexpy.CONFIG.PMS_TOKEN,
os.path.join(plexpy.CONFIG.CACHE_DIR, 'cache-poster.jpg'))
# Upload thumb to Imgur and get link
poster_url = helpers.uploadToImgur(os.path.join(plexpy.CONFIG.CACHE_DIR, 'cache-poster.jpg'), poster_title)
metadata['poster_url'] = poster_url
# Fix metadata params for notify recently added grandparent
if state == 'created' and plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_GRANDPARENT:
if notify_action == 'created' and plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_GRANDPARENT:
show_name = metadata['title']
episode_name = ''
artist_name = metadata['title']
@@ -520,40 +588,49 @@ def build_notify_text(session=None, timeline=None, state=None):
album_name = metadata['parent_title']
track_name = metadata['title']
available_params = {'server_name': server_name,
available_params = {# Global paramaters
'server_name': server_name,
'server_uptime': server_uptime,
'action': notify_action.title(),
'datestamp': arrow.now().format(date_format),
'timestamp': arrow.now().format(time_format),
# Stream parameters
'streams': stream_count,
'action': state,
'datestamp': time.strftime(helpers.parse_js_date(plexpy.CONFIG.DATE_FORMAT)),
'timestamp': time.strftime(helpers.parse_js_date(plexpy.CONFIG.TIME_FORMAT)),
'user': user,
'platform': platform,
'player': player,
'ip_address': ip_address,
'media_type': metadata['media_type'],
'user': session.get('friendly_name',''),
'platform': session.get('platform',''),
'player': session.get('player',''),
'ip_address': session.get('ip_address','N/A'),
'stream_duration': stream_duration,
'remaining_duration': duration - view_offset,
'progress': view_offset,
'stream_time': arrow.get(stream_duration * 60).format(duration_format),
'remaining_duration': remaining_duration,
'remaining_time': arrow.get(remaining_duration * 60).format(duration_format),
'progress_duration': view_offset,
'progress_time': arrow.get(view_offset * 60).format(duration_format),
'progress_percent': progress_percent,
'container': container,
'video_codec': video_codec,
'video_bitrate': video_bitrate,
'video_width': video_width,
'video_height': video_height,
'video_resolution': video_resolution,
'video_framerate': video_framerate,
'aspect_ratio': aspect_ratio,
'audio_codec': audio_codec,
'audio_channels': audio_channels,
'container': session.get('container',''),
'video_codec': session.get('video_codec',''),
'video_bitrate': session.get('bitrate',''),
'video_width': session.get('width',''),
'video_height': session.get('height',''),
'video_resolution': session.get('video_resolution',''),
'video_framerate': session.get('video_framerate',''),
'aspect_ratio': session.get('aspect_ratio',''),
'audio_codec': session.get('audio_codec',''),
'audio_channels': session.get('audio_channels',''),
'transcode_decision': transcode_decision,
'video_decision': video_decision,
'audio_decision': audio_decision,
'transcode_container': transcode_container,
'transcode_video_codec': transcode_video_codec,
'transcode_video_width': transcode_video_width,
'transcode_video_height': transcode_video_height,
'transcode_audio_codec': transcode_audio_codec,
'transcode_audio_channels': transcode_audio_channels,
'video_decision': session.get('video_decision','').title(),
'audio_decision': session.get('audio_decision','').title(),
'transcode_container': session.get('transcode_container',''),
'transcode_video_codec': session.get('transcode_video_codec',''),
'transcode_video_width': session.get('transcode_width',''),
'transcode_video_height': session.get('transcode_height',''),
'transcode_audio_codec': session.get('transcode_audio_codec',''),
'transcode_audio_channels': session.get('transcode_audio_channels',''),
'session_key': session.get('session_key',''),
'user_id': session.get('user_id',''),
'machine_id': session.get('machine_id',''),
# Metadata parameters
'media_type': metadata['media_type'],
'title': full_title,
'library_name': metadata['library_name'],
'show_name': show_name,
@@ -565,6 +642,8 @@ def build_notify_text(session=None, timeline=None, state=None):
'season_num00': metadata['parent_media_index'].zfill(2),
'episode_num': metadata['media_index'].zfill(1),
'episode_num00': metadata['media_index'].zfill(2),
'track_num': metadata['media_index'].zfill(1),
'track_num00': metadata['media_index'].zfill(2),
'year': metadata['year'],
'studio': metadata['studio'],
'content_rating': metadata['content_rating'],
@@ -575,7 +654,20 @@ def build_notify_text(session=None, timeline=None, state=None):
'summary': metadata['summary'],
'tagline': metadata['tagline'],
'rating': metadata['rating'],
'duration': duration
'duration': duration,
'poster_url': metadata.get('poster_url',''),
'imdb_id': metadata.get('imdb_id',''),
'imdb_url': metadata.get('imdb_url',''),
'thetvdb_id': metadata.get('thetvdb_id',''),
'thetvdb_url': metadata.get('thetvdb_url',''),
'themoviedb_id': metadata.get('themoviedb_id',''),
'themoviedb_url': metadata.get('themoviedb_url',''),
'lastfm_url': metadata.get('lastfm_url',''),
'trakt_url': metadata.get('trakt_url',''),
'section_id': metadata['section_id'],
'rating_key': metadata['rating_key'],
'parent_rating_key': metadata['parent_rating_key'],
'grandparent_rating_key': metadata['grandparent_rating_key']
}
# Default subject text
@@ -584,10 +676,6 @@ def build_notify_text(session=None, timeline=None, state=None):
# Default scripts args
script_args = []
# Regex to match {param} but not "{param}"
params_to_quote = re.compile(r'(?<!\")([\{][^}]+[\}])(?!\"\})')
script_args_text = re.sub(params_to_quote, r'"\g<0>"', script_args_text)
if script_args_text:
try:
script_args = [unicode(arg).format(**available_params) for arg in script_args_text.split()]
@@ -596,11 +684,11 @@ def build_notify_text(session=None, timeline=None, state=None):
except Exception as e:
logger.error(u"PlexPy Notifier :: Unable to parse custom script arguments %s. Using fallback." % e)
if state == 'play':
if notify_action == 'play':
# Default body text
body_text = '%s (%s) is watching %s' % (session['friendly_name'],
session['player'],
full_title)
body_text = '%s (%s) started playing %s' % (session['friendly_name'],
session['player'],
full_title)
if on_start_subject and on_start_body:
try:
@@ -617,10 +705,10 @@ def build_notify_text(session=None, timeline=None, state=None):
except:
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
return [subject_text, body_text, script_args]
return [subject_text, body_text, script_args], metadata
else:
return [subject_text, body_text, script_args]
elif state == 'stop':
return [subject_text, body_text, script_args], metadata
elif notify_action == 'stop':
# Default body text
body_text = '%s (%s) has stopped %s' % (session['friendly_name'],
session['player'],
@@ -641,10 +729,10 @@ def build_notify_text(session=None, timeline=None, state=None):
except:
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
return [subject_text, body_text, script_args]
return [subject_text, body_text, script_args], metadata
else:
return [subject_text, body_text, script_args]
elif state == 'pause':
return [subject_text, body_text, script_args], metadata
elif notify_action == 'pause':
# Default body text
body_text = '%s (%s) has paused %s' % (session['friendly_name'],
session['player'],
@@ -665,10 +753,10 @@ def build_notify_text(session=None, timeline=None, state=None):
except:
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
return [subject_text, body_text, script_args]
return [subject_text, body_text, script_args], metadata
else:
return [subject_text, body_text, script_args]
elif state == 'resume':
return [subject_text, body_text, script_args], metadata
elif notify_action == 'resume':
# Default body text
body_text = '%s (%s) has resumed %s' % (session['friendly_name'],
session['player'],
@@ -689,10 +777,10 @@ def build_notify_text(session=None, timeline=None, state=None):
except:
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
return [subject_text, body_text, script_args]
return [subject_text, body_text, script_args], metadata
else:
return [subject_text, body_text, script_args]
elif state == 'buffer':
return [subject_text, body_text, script_args], metadata
elif notify_action == 'buffer':
# Default body text
body_text = '%s (%s) is buffering %s' % (session['friendly_name'],
session['player'],
@@ -713,10 +801,10 @@ def build_notify_text(session=None, timeline=None, state=None):
except:
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
return [subject_text, body_text, script_args]
return [subject_text, body_text, script_args], metadata
else:
return [subject_text, body_text, script_args]
elif state == 'watched':
return [subject_text, body_text, script_args], metadata
elif notify_action == 'watched':
# Default body text
body_text = '%s (%s) has watched %s' % (session['friendly_name'],
session['player'],
@@ -737,10 +825,10 @@ def build_notify_text(session=None, timeline=None, state=None):
except:
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
return [subject_text, body_text, script_args]
return [subject_text, body_text, script_args], metadata
else:
return [subject_text, body_text, script_args]
elif state == 'created':
return [subject_text, body_text, script_args], metadata
elif notify_action == 'created':
# Default body text
body_text = '%s was recently added to Plex.' % full_title
@@ -759,14 +847,18 @@ def build_notify_text(session=None, timeline=None, state=None):
except:
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
return [subject_text, body_text, script_args]
return [subject_text, body_text, script_args], metadata
else:
return [subject_text, body_text, script_args]
return [subject_text, body_text, script_args], metadata
else:
return None
def build_server_notify_text(state=None):
def build_server_notify_text(notify_action=None):
# Get time formats
date_format = plexpy.CONFIG.DATE_FORMAT.replace('Do','').replace('zz','')
time_format = plexpy.CONFIG.TIME_FORMAT.replace('Do','').replace('zz','')
# Get the server name
server_name = plexpy.CONFIG.PMS_NAME
@@ -776,11 +868,13 @@ def build_server_notify_text(state=None):
if server_times:
updated_at = server_times[0]['updated_at']
server_uptime = helpers.human_duration(int(time.time() - helpers.cast_to_float(updated_at)))
server_uptime = helpers.human_duration(int(time.time() - helpers.cast_to_int(updated_at)))
else:
logger.error(u"PlexPy NotificationHandler :: Unable to retrieve server uptime.")
server_uptime = 'N/A'
pattern = re.compile('\n*<tv>[^>]+.</tv>\n*|\n*<movie>[^>]+.</movie>\n*|\n*?<music>[^>]+.</music>\n*', re.IGNORECASE | re.DOTALL)
on_extdown_subject = plexpy.CONFIG.NOTIFY_ON_EXTDOWN_SUBJECT_TEXT
on_extdown_body = plexpy.CONFIG.NOTIFY_ON_EXTDOWN_BODY_TEXT
on_intdown_subject = plexpy.CONFIG.NOTIFY_ON_INTDOWN_SUBJECT_TEXT
@@ -789,13 +883,14 @@ def build_server_notify_text(state=None):
on_extup_body = plexpy.CONFIG.NOTIFY_ON_EXTUP_BODY_TEXT
on_intup_subject = plexpy.CONFIG.NOTIFY_ON_INTUP_SUBJECT_TEXT
on_intup_body = plexpy.CONFIG.NOTIFY_ON_INTUP_BODY_TEXT
script_args_text = plexpy.CONFIG.NOTIFY_SCRIPTS_ARGS_TEXT
script_args_text = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_SCRIPTS_ARGS_TEXT))
available_params = {'server_name': server_name,
available_params = {# Global paramaters
'server_name': server_name,
'server_uptime': server_uptime,
'action': state,
'datestamp': time.strftime(helpers.parse_js_date(plexpy.CONFIG.DATE_FORMAT)),
'timestamp': time.strftime(helpers.parse_js_date(plexpy.CONFIG.TIME_FORMAT))}
'action': notify_action.title(),
'datestamp': arrow.now().format(date_format),
'timestamp': arrow.now().format(time_format)}
# Default text
subject_text = 'PlexPy (%s)' % server_name
@@ -803,10 +898,6 @@ def build_server_notify_text(state=None):
# Default scripts args
script_args = []
# Regex to match {param} but not "{param}"
params_to_quote = re.compile(r'(?<!\")([\{][^}]+[\}])(?!\"\})')
script_args_text = re.sub(params_to_quote, r'"\g<0>"', script_args_text)
if script_args_text:
try:
script_args = [unicode(arg).format(**available_params) for arg in script_args_text.split()]
@@ -815,7 +906,7 @@ def build_server_notify_text(state=None):
except Exception as e:
logger.error(u"PlexPy Notifier :: Unable to parse custom script arguments %s. Using fallback." % e)
if state == 'extdown':
if notify_action == 'extdown':
# Default body text
body_text = 'The Plex Media Server remote access is down.'
@@ -838,7 +929,7 @@ def build_server_notify_text(state=None):
else:
return [subject_text, body_text, script_args]
elif state == 'intdown':
elif notify_action == 'intdown':
# Default body text
body_text = 'The Plex Media Server is down.'
@@ -860,7 +951,7 @@ def build_server_notify_text(state=None):
return [subject_text, body_text, script_args]
else:
return [subject_text, body_text, script_args]
if state == 'extup':
if notify_action == 'extup':
# Default body text
body_text = 'The Plex Media Server remote access is back up.'
@@ -882,7 +973,7 @@ def build_server_notify_text(state=None):
return [subject_text, body_text, script_args]
else:
return [subject_text, body_text, script_args]
elif state == 'intup':
elif notify_action == 'intup':
# Default body text
body_text = 'The Plex Media Server is back up.'
@@ -911,4 +1002,4 @@ def build_server_notify_text(state=None):
def strip_tag(data):
p = re.compile(r'<.*?>')
return p.sub('', data)
return p.sub('', data)

View File

@@ -34,7 +34,7 @@ from pynma import pynma
import gntp.notifier
import oauth2 as oauth
import pythontwitter as twitter
import pythonfacebook as facebook
import pythonfacebook as facebook
import plexpy
from plexpy import logger, helpers, request
@@ -58,7 +58,7 @@ AGENT_IDS = {"Growl": 0,
"Scripts": 15,
"Facebook": 16}
def available_notification_agents():
agents = [{'name': 'Growl',
'id': AGENT_IDS['Growl'],
@@ -358,59 +358,59 @@ def available_notification_agents():
return agents
def get_notification_agent_config(config_id):
if str(config_id).isdigit():
config_id = int(config_id)
def get_notification_agent_config(agent_id):
if str(agent_id).isdigit():
agent_id = int(agent_id)
if config_id == 0:
if agent_id == 0:
growl = GROWL()
return growl.return_config_options()
elif config_id == 1:
elif agent_id == 1:
prowl = PROWL()
return prowl.return_config_options()
elif config_id == 2:
elif agent_id == 2:
xbmc = XBMC()
return xbmc.return_config_options()
elif config_id == 3:
elif agent_id == 3:
plex = Plex()
return plex.return_config_options()
elif config_id == 4:
elif agent_id == 4:
nma = NMA()
return nma.return_config_options()
elif config_id == 5:
elif agent_id == 5:
pushalot = PUSHALOT()
return pushalot.return_config_options()
elif config_id == 6:
elif agent_id == 6:
pushbullet = PUSHBULLET()
return pushbullet.return_config_options()
elif config_id == 7:
elif agent_id == 7:
pushover = PUSHOVER()
return pushover.return_config_options()
elif config_id == 8:
elif agent_id == 8:
osx_notify = OSX_NOTIFY()
return osx_notify.return_config_options()
elif config_id == 9:
elif agent_id == 9:
boxcar = BOXCAR()
return boxcar.return_config_options()
elif config_id == 10:
elif agent_id == 10:
email = Email()
return email.return_config_options()
elif config_id == 11:
elif agent_id == 11:
tweet = TwitterNotifier()
return tweet.return_config_options()
elif config_id == 12:
elif agent_id == 12:
iftttClient = IFTTT()
return iftttClient.return_config_options()
elif config_id == 13:
elif agent_id == 13:
telegramClient = TELEGRAM()
return telegramClient.return_config_options()
elif config_id == 14:
elif agent_id == 14:
slackClient = SLACK()
return slackClient.return_config_options()
elif config_id == 15:
elif agent_id == 15:
script = Scripts()
return script.return_config_options()
elif config_id == 16:
elif agent_id == 16:
facebook = FacebookNotifier()
return facebook.return_config_options()
else:
@@ -419,61 +419,61 @@ def get_notification_agent_config(config_id):
return []
def send_notification(config_id, subject, body, **kwargs):
if str(config_id).isdigit():
config_id = int(config_id)
def send_notification(agent_id, subject, body, **kwargs):
if str(agent_id).isdigit():
agent_id = int(agent_id)
if config_id == 0:
if agent_id == 0:
growl = GROWL()
growl.notify(message=body, event=subject)
elif config_id == 1:
elif agent_id == 1:
prowl = PROWL()
prowl.notify(message=body, event=subject)
elif config_id == 2:
elif agent_id == 2:
xbmc = XBMC()
xbmc.notify(subject=subject, message=body)
elif config_id == 3:
elif agent_id == 3:
plex = Plex()
plex.notify(subject=subject, message=body)
elif config_id == 4:
elif agent_id == 4:
nma = NMA()
nma.notify(subject=subject, message=body)
elif config_id == 5:
elif agent_id == 5:
pushalot = PUSHALOT()
pushalot.notify(message=body, event=subject)
elif config_id == 6:
elif agent_id == 6:
pushbullet = PUSHBULLET()
pushbullet.notify(message=body, subject=subject)
elif config_id == 7:
elif agent_id == 7:
pushover = PUSHOVER()
pushover.notify(message=body, event=subject)
elif config_id == 8:
elif agent_id == 8:
osx_notify = OSX_NOTIFY()
osx_notify.notify(title=subject, text=body)
elif config_id == 9:
elif agent_id == 9:
boxcar = BOXCAR()
boxcar.notify(title=subject, message=body)
elif config_id == 10:
elif agent_id == 10:
email = Email()
email.notify(subject=subject, message=body)
elif config_id == 11:
elif agent_id == 11:
tweet = TwitterNotifier()
tweet.notify(subject=subject, message=body)
elif config_id == 12:
elif agent_id == 12:
iftttClient = IFTTT()
iftttClient.notify(subject=subject, message=body)
elif config_id == 13:
elif agent_id == 13:
telegramClient = TELEGRAM()
telegramClient.notify(message=body, event=subject)
elif config_id == 14:
elif agent_id == 14:
slackClient = SLACK()
slackClient.notify(message=body, event=subject)
elif config_id == 15:
elif agent_id == 15:
scripts = Scripts()
scripts.notify(message=body, subject=subject, **kwargs)
elif config_id == 16:
elif agent_id == 16:
facebook = FacebookNotifier()
facebook.notify(subject=subject, message=body)
facebook.notify(subject=subject, message=body, **kwargs)
else:
logger.debug(u"PlexPy Notifiers :: Unknown agent id received.")
else:
@@ -1054,6 +1054,7 @@ class PUSHOVER(object):
self.keys = plexpy.CONFIG.PUSHOVER_KEYS
self.priority = plexpy.CONFIG.PUSHOVER_PRIORITY
self.sound = plexpy.CONFIG.PUSHOVER_SOUND
self.html_support = plexpy.CONFIG.PUSHOVER_HTML_SUPPORT
def conf(self, options):
return cherrypy.config['config'].get('Pushover', options)
@@ -1069,6 +1070,7 @@ class PUSHOVER(object):
'title': event.encode("utf-8"),
'message': message.encode("utf-8"),
'sound': plexpy.CONFIG.PUSHOVER_SOUND,
'html': plexpy.CONFIG.PUSHOVER_HTML_SUPPORT,
'priority': plexpy.CONFIG.PUSHOVER_PRIORITY}
http_handler.request("POST",
@@ -1095,11 +1097,12 @@ class PUSHOVER(object):
# For uniformity reasons not removed
return
def test(self, keys, priority, sound):
def test(self, keys, priority, sound, html_support):
self.enabled = True
self.keys = keys
self.priority = priority
self.sound = sound
self.html_support = html_support
self.notify('Main Screen Activate', 'Test Message')
@@ -1151,6 +1154,12 @@ class PUSHOVER(object):
'description': 'Set the notification sound. Leave blank for the default sound.',
'input_type': 'select',
'select_options': self.get_sounds()
},
{'label': 'Enable HTML Support',
'value': self.html_support,
'name': 'pushover_html_support',
'description': 'Style your messages using these HTML Tags: b, i, u, a[href], font[color]',
'input_type': 'checkbox'
}
]
@@ -1165,14 +1174,20 @@ class TwitterNotifier(object):
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
def __init__(self):
self.consumer_key = "2LdJKXHDUwJtjYBsdwJisIOsh"
self.consumer_secret = "QWbUcZzAIiL4zbDCIhy2EdUkV8yEEav3qMdo5y3FugxCFelWrA"
self.access_token = plexpy.CONFIG.TWITTER_ACCESS_TOKEN
self.access_token_secret = plexpy.CONFIG.TWITTER_ACCESS_TOKEN_SECRET
self.consumer_key = plexpy.CONFIG.TWITTER_CONSUMER_KEY
self.consumer_secret = plexpy.CONFIG.TWITTER_CONSUMER_SECRET
self.incl_subject = plexpy.CONFIG.TWITTER_INCL_SUBJECT
def notify(self, subject, message):
if not subject or not message:
return
else:
self._send_tweet(subject + ': ' + message)
if self.incl_subject:
self._send_tweet(subject + ': ' + message)
else:
self._send_tweet(message)
def test_notify(self):
return self._send_tweet("This is a test notification from PlexPy at " + helpers.now())
@@ -1191,16 +1206,16 @@ class TwitterNotifier(object):
else:
request_token = dict(parse_qsl(content))
plexpy.CONFIG.TWITTER_USERNAME = request_token['oauth_token']
plexpy.CONFIG.TWITTER_PASSWORD = request_token['oauth_token_secret']
plexpy.CONFIG.TWITTER_ACCESS_TOKEN = request_token['oauth_token']
plexpy.CONFIG.TWITTER_ACCESS_TOKEN_SECRET = request_token['oauth_token_secret']
return self.AUTHORIZATION_URL + "?oauth_token=" + request_token['oauth_token']
def _get_credentials(self, key):
request_token = {}
request_token['oauth_token'] = plexpy.CONFIG.TWITTER_USERNAME
request_token['oauth_token_secret'] = plexpy.CONFIG.TWITTER_PASSWORD
request_token['oauth_token'] = plexpy.CONFIG.TWITTER_ACCESS_TOKEN
request_token['oauth_token_secret'] = plexpy.CONFIG.TWITTER_ACCESS_TOKEN_SECRET
request_token['oauth_callback_confirmed'] = 'true'
token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret'])
@@ -1225,20 +1240,20 @@ class TwitterNotifier(object):
else:
# logger.info(u"PlexPy Notifiers :: Your Twitter Access Token key: %s" % access_token['oauth_token'])
# logger.info(u"PlexPy Notifiers :: Access Token secret: %s" % access_token['oauth_token_secret'])
plexpy.CONFIG.TWITTER_USERNAME = access_token['oauth_token']
plexpy.CONFIG.TWITTER_PASSWORD = access_token['oauth_token_secret']
plexpy.CONFIG.TWITTER_ACCESS_TOKEN = access_token['oauth_token']
plexpy.CONFIG.TWITTER_ACCESS_TOKEN_SECRET = access_token['oauth_token_secret']
plexpy.CONFIG.write()
return True
def _send_tweet(self, message=None):
username = self.consumer_key
password = self.consumer_secret
access_token_key = plexpy.CONFIG.TWITTER_USERNAME
access_token_secret = plexpy.CONFIG.TWITTER_PASSWORD
consumer_key = self.consumer_key
consumer_secret = self.consumer_secret
access_token = self.access_token
access_token_secret = self.access_token_secret
# logger.info(u"PlexPy Notifiers :: Sending tweet: " + message)
api = twitter.Api(username, password, access_token_key, access_token_secret)
api = twitter.Api(consumer_key, consumer_secret, access_token, access_token_secret)
try:
api.PostUpdate(message)
@@ -1251,30 +1266,43 @@ class TwitterNotifier(object):
def return_config_options(self):
config_option = [{'label': 'Instructions',
'description': 'Step 1: Click the <strong>Request Authorization</strong> button below.<br>\
Step 2: Input the <strong>Authorization Key</strong> you received from Step 1 below.<br>\
Step 3: Click the <strong>Verify Key</strong> button below.',
'description': 'Step 1: Visit <a href="https://apps.twitter.com/" target="_blank"> \
Twitter Apps</a> to <strong>Create New App</strong>. A vaild "Website" is not required.<br>\
Step 2: Go to <strong>Keys and Access Tokens</strong> and click \
<strong>Create my access token</strong>.<br>\
Step 3: Fill in the <strong>Consumer Key</strong>, <strong>Consumer Secret</strong>, \
<strong>Access Token</strong>, and <strong>Access Token Secret</strong> below.',
'input_type': 'help'
},
{'label': 'Request Authorization',
'value': 'Request Authorization',
'name': 'twitterStep1',
'description': 'Request Twitter authorization. (Ensure you allow the browser pop-up).',
'input_type': 'button'
},
{'label': 'Authorization Key',
'value': '',
'name': 'twitter_key',
'description': 'Your Twitter authorization key.',
{'label': 'Twitter Consumer Key',
'value': self.consumer_key,
'name': 'twitter_consumer_key',
'description': 'Your Twitter consumer key.',
'input_type': 'text'
},
{'label': 'Verify Key',
'value': 'Verify Key',
'name': 'twitterStep2',
'description': 'Verify your Twitter authorization key.',
'input_type': 'button'
{'label': 'Twitter Consumer Secret',
'value': self.consumer_secret,
'name': 'twitter_consumer_secret',
'description': 'Your Twitter consumer secret.',
'input_type': 'text'
},
{'input_type': 'nosave'
{'label': 'Twitter Access Token',
'value': self.access_token,
'name': 'twitter_access_token',
'description': 'Your Twitter access token.',
'input_type': 'text'
},
{'label': 'Twitter Access Token Secret',
'value': self.access_token_secret,
'name': 'twitter_access_token_secret',
'description': 'Your Twitter access token secret.',
'input_type': 'text'
},
{'label': 'Include Subject Line',
'value': self.incl_subject,
'name': 'twitter_incl_subject',
'description': 'Include the subject line in the notifications.',
'input_type': 'checkbox'
}
]
@@ -1619,6 +1647,7 @@ class TELEGRAM(object):
self.enabled = plexpy.CONFIG.TELEGRAM_ENABLED
self.bot_token = plexpy.CONFIG.TELEGRAM_BOT_TOKEN
self.chat_id = plexpy.CONFIG.TELEGRAM_CHAT_ID
self.incl_subject = plexpy.CONFIG.TELEGRAM_INCL_SUBJECT
def conf(self, options):
return cherrypy.config['config'].get('Telegram', options)
@@ -1629,8 +1658,13 @@ class TELEGRAM(object):
http_handler = HTTPSConnection("api.telegram.org")
if self.incl_subject:
text = event.encode('utf-8') + ': ' + message.encode("utf-8")
else:
text = message.encode("utf-8")
data = {'chat_id': self.chat_id,
'text': event.encode('utf-8') + ': ' + message.encode("utf-8")}
'text': text}
http_handler.request("POST",
"/bot%s/%s" % (self.bot_token, "sendMessage"),
@@ -1668,11 +1702,17 @@ class TELEGRAM(object):
'description': 'Your Telegram bot token. Contact <a href="http://telegram.me/BotFather" target="_blank">@BotFather</a> on Telegram to get one.',
'input_type': 'text'
},
{'label': 'Telegram Chat ID',
{'label': 'Telegram Chat ID, Group ID, or Channel Username',
'value': self.chat_id,
'name': 'telegram_chat_id',
'description': 'Your Telegram Chat ID, Group ID, or channel username. Contact <a href="http://telegram.me/myidbot" target="_blank">@myidbot</a> on Telegram to get an ID.',
'description': 'Your Telegram Chat ID, Group ID, or @channelusername. Contact <a href="http://telegram.me/myidbot" target="_blank">@myidbot</a> on Telegram to get an ID.',
'input_type': 'text'
},
{'label': 'Include Subject Line',
'value': self.incl_subject,
'name': 'telegram_incl_subject',
'description': 'Include the subject line in the notifications.',
'input_type': 'checkbox'
}
]
@@ -1689,6 +1729,7 @@ class SLACK(object):
self.channel = plexpy.CONFIG.SLACK_CHANNEL
self.username = plexpy.CONFIG.SLACK_USERNAME
self.icon_emoji = plexpy.CONFIG.SLACK_ICON_EMOJI
self.incl_subject = plexpy.CONFIG.SLACK_INCL_SUBJECT
def conf(self, options):
return cherrypy.config['config'].get('Slack', options)
@@ -1698,7 +1739,12 @@ class SLACK(object):
return
http_handler = HTTPSConnection("hooks.slack.com")
data = {'text': event.encode('utf-8') + ': ' + message.encode("utf-8")}
if self.incl_subject:
text = event.encode('utf-8') + ': ' + message.encode("utf-8")
else:
text = message.encode("utf-8")
data = {'text': text}
if self.channel != '': data['channel'] = self.channel
if self.username != '': data['username'] = self.username
if self.icon_emoji != '':
@@ -1736,10 +1782,10 @@ class SLACK(object):
return self.notify('Main Screen Activate', 'Test Message')
def return_config_options(self):
config_option = [{'label': 'Slack Hook',
config_option = [{'label': 'Slack Webhook URL',
'value': self.slack_hook,
'name': 'slack_hook',
'description': 'Your Slack incoming webhook.',
'description': 'Your Slack incoming webhook URL.',
'input_type': 'text'
},
{'label': 'Slack Channel',
@@ -1759,6 +1805,12 @@ class SLACK(object):
'description': 'The icon you wish to show, use Slack emoji or image url. Leave blank for webhook integration default.',
'name': 'slack_icon_emoji',
'input_type': 'text'
},
{'label': 'Include Subject Line',
'value': self.incl_subject,
'name': 'slack_incl_subject',
'description': 'Include the subject line in the notifications.',
'input_type': 'checkbox'
}
]
@@ -1768,7 +1820,7 @@ class SLACK(object):
class Scripts(object):
def __init__(self, **kwargs):
self.script_exts = ('.bat', '.cmd', '.exe', '.php', '.pl', '.py', '.pyw', '.rb', '.sh')
self.script_exts = ('.bat', '.cmd', '.exe', '.php', '.pl', '.ps1', '.py', '.pyw', '.rb', '.sh')
def conf(self, options):
return cherrypy.config['config'].get('Scripts', options)
@@ -1798,7 +1850,7 @@ class Scripts(object):
return scripts
def notify(self, subject='', message='', notify_action='', script_args=[], *args, **kwargs):
def notify(self, subject='', message='', notify_action='', script_args=None, *args, **kwargs):
"""
Args:
subject(string, optional): Head text,
@@ -1808,7 +1860,10 @@ class Scripts(object):
"""
logger.debug(u"PlexPy Notifiers :: Trying to run notify script, action: %s, arguments: %s" %
(notify_action if notify_action else None, script_args if script_args else None))
if script_args is None:
script_args = []
if not plexpy.CONFIG.SCRIPTS_FOLDER:
return
@@ -1860,14 +1915,16 @@ class Scripts(object):
name, ext = os.path.splitext(script)
if ext == '.py':
prefix = 'python'
elif ext == '.pyw':
prefix = 'pythonw'
elif ext == '.php':
if ext == '.php':
prefix = 'php'
elif ext == '.pl':
prefix = 'perl'
elif ext == '.ps1':
prefix = 'powershell -executionPolicy bypass -file'
elif ext == '.py':
prefix = 'python'
elif ext == '.pyw':
prefix = 'pythonw'
elif ext == '.rb':
prefix = 'ruby'
else:
@@ -1877,7 +1934,7 @@ class Scripts(object):
script = script.encode(plexpy.SYS_ENCODING, 'ignore')
if prefix:
script = [prefix, script]
script = prefix.split() + [script]
else:
script = [script]
@@ -2016,20 +2073,26 @@ class Scripts(object):
return config_option
class FacebookNotifier(object):
def __init__(self):
self.redirect_uri = plexpy.CONFIG.FACEBOOK_REDIRECT_URI
self.access_token = plexpy.CONFIG.FACEBOOK_TOKEN
self.app_id = plexpy.CONFIG.FACEBOOK_APP_ID
self.app_secret = plexpy.CONFIG.FACEBOOK_APP_SECRET
self.group_id = plexpy.CONFIG.FACEBOOK_GROUP
self.incl_poster = plexpy.CONFIG.FACEBOOK_INCL_POSTER
self.incl_subject = plexpy.CONFIG.FACEBOOK_INCL_SUBJECT
def notify(self, subject, message):
def notify(self, subject, message, **kwargs):
if not subject or not message:
return
else:
self._post_facebook(subject + ': ' + message)
if self.incl_subject:
self._post_facebook(subject + ': ' + message, **kwargs)
else:
self._post_facebook(message, **kwargs)
def test_notify(self):
return self._post_facebook(u"PlexPy Notifiers :: This is a test notification from PlexPy at " + helpers.now())
@@ -2041,7 +2104,7 @@ class FacebookNotifier(object):
def _get_credentials(self, code):
logger.info(u"PlexPy Notifiers :: Requesting access token from Facebook")
try:
# Request user access token
api = facebook.GraphAPI(version='2.5')
@@ -2050,30 +2113,66 @@ class FacebookNotifier(object):
app_id=self.app_id,
app_secret=self.app_secret)
access_token = response['access_token']
# Request extended user access token
api = facebook.GraphAPI(access_token=access_token, version='2.5')
response = api.extend_access_token(app_id=self.app_id,
app_secret=self.app_secret)
access_token = response['access_token']
plexpy.CONFIG.FACEBOOK_TOKEN = access_token
plexpy.CONFIG.write()
except Exception as e:
logger.error(u"PlexPy Notifiers :: Error requesting Facebook access token: %s" % e)
return False
return True
def _post_facebook(self, message=None):
access_token = plexpy.CONFIG.FACEBOOK_TOKEN
group_id = plexpy.CONFIG.FACEBOOK_GROUP
def _post_facebook(self, message=None, **kwargs):
if self.group_id:
api = facebook.GraphAPI(access_token=self.access_token, version='2.5')
if group_id:
api = facebook.GraphAPI(access_token=access_token, version='2.5')
attachment = {}
if self.incl_poster and 'metadata' in kwargs:
metadata = kwargs['metadata']
poster_url = metadata.get('poster_url','')
if poster_url:
if metadata['media_type'] == 'movie' or metadata['media_type'] == 'show':
title = metadata['title']
subtitle = metadata['year']
rating_key = metadata['rating_key']
elif metadata['media_type'] == 'episode':
title = '%s - %s' % (metadata['grandparent_title'], metadata['title'])
subtitle = 'S%s %s E%s' % (metadata['parent_media_index'],
'\xc2\xb7'.decode('utf8'),
metadata['media_index'])
rating_key = metadata['rating_key']
elif metadata['media_type'] == 'artist':
title = metadata['title']
subtitle = ''
rating_key = metadata['rating_key']
elif metadata['media_type'] == 'track':
title = '%s - %s' % (metadata['grandparent_title'], metadata['title'])
subtitle = metadata['parent_title']
rating_key = metadata['parent_rating_key']
caption = 'View in Plex Web.'
# Build Facebook post attachment
attachment['link'] = 'http://app.plex.tv/web/app#!/server/' + plexpy.CONFIG.PMS_IDENTIFIER + \
'/details/%2Flibrary%2Fmetadata%2F' + rating_key
attachment['picture'] = poster_url
attachment['name'] = title
attachment['description'] = subtitle
attachment['caption'] = caption
try:
api.put_wall_post(profile_id=group_id, message=message)
api.put_wall_post(profile_id=self.group_id, message=message, attachment=attachment)
logger.info(u"PlexPy Notifiers :: Facebook notification sent.")
except Exception as e:
logger.warn(u"PlexPy Notifiers :: Error sending Facebook post: %s" % e)
@@ -2088,12 +2187,16 @@ class FacebookNotifier(object):
config_option = [{'label': 'Instructions',
'description': '<strong>Facebook notifications are currently experimental!</strong><br><br> \
Step 1: Visit <a href="https://developers.facebook.com/apps/" target="_blank"> \
Facebook Developers</a> to create a new app using <strong>advanced setup</strong>.<br>\
Step 2: Go to <strong>Settings > Advanced</strong> and fill in \
Facebook Developers</a> to add a new app using <strong>basic setup</strong>.<br>\
Step 2: Go to <strong>Settings > Basic</strong> and fill in a \
<strong>Contact Email</strong>.<br>\
Step 3: Go to <strong>Settings > Advanced</strong> and fill in \
<strong>Valid OAuth redirect URIs</strong> with your PlexPy URL (i.e. http://localhost:8181).<br>\
Step 3: Fill in the <strong>PlexPy URL</strong> below with the exact same URL from Step 2.<br>\
Step 4: Fill in the <strong>App ID</strong> and <strong>App Secret</strong> below.<br>\
Step 5: Click the <strong>Request Authorization</strong> button below.',
Step 4: Go to <strong>App Review</strong> and toggle public to <strong>Yes</strong>.<br>\
Step 5: Fill in the <strong>PlexPy URL</strong> below with the exact same URL from Step 3.<br>\
Step 6: Fill in the <strong>App ID</strong> and <strong>App Secret</strong> below.<br>\
Step 7: Click the <strong>Request Authorization</strong> button below.<br> \
Step 8: Fill in the <strong>Group ID</strong> below.',
'input_type': 'help'
},
{'label': 'PlexPy URL',
@@ -2125,6 +2228,18 @@ class FacebookNotifier(object):
'name': 'facebook_group',
'description': 'Your Facebook Group ID.',
'input_type': 'text'
},
{'label': 'Include Poster Image',
'value': self.incl_poster,
'name': 'facebook_incl_poster',
'description': 'Include a poster and link in the notifications.',
'input_type': 'checkbox'
},
{'label': 'Include Subject Line',
'value': self.incl_subject,
'name': 'facebook_incl_subject',
'description': 'Include the subject line in the notifications.',
'input_type': 'checkbox'
}
]

View File

@@ -86,7 +86,7 @@ def get_real_pms_url():
plexpy.CONFIG.__setattr__('PMS_URL', item['uri'])
plexpy.CONFIG.write()
logger.info(u"PlexPy PlexTV :: Server URL retrieved.")
if not plexpy.CONFIG.PMS_IS_REMOTE and item['local'] == '1':
if not plexpy.CONFIG.PMS_IS_REMOTE and item['local'] == '1' and 'plex.direct' in item['uri']:
plexpy.CONFIG.__setattr__('PMS_URL', item['uri'])
plexpy.CONFIG.write()
logger.info(u"PlexPy PlexTV :: Server URL retrieved.")
@@ -383,7 +383,6 @@ class PlexTV(object):
return []
plextv_resources = self.get_plextv_resources(include_https=include_https)
server_urls = []
try:
xml_parse = minidom.parseString(plextv_resources)
@@ -400,36 +399,51 @@ class PlexTV(object):
logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_urls: %s." % e)
return []
# Function to get all connections for a device
def get_connections(device):
conn = []
connections = device.getElementsByTagName('Connection')
for c in connections:
server_details = {"protocol": helpers.get_xml_attr(c, 'protocol'),
"address": helpers.get_xml_attr(c, 'address'),
"port": helpers.get_xml_attr(c, 'port'),
"uri": helpers.get_xml_attr(c, 'uri'),
"local": helpers.get_xml_attr(c, 'local')
}
conn.append(server_details)
return conn
server_urls = []
# Try to match the device
for a in xml_head:
if helpers.get_xml_attr(a, 'clientIdentifier') == server_id:
connections = a.getElementsByTagName('Connection')
for connection in connections:
server_details = {"protocol": helpers.get_xml_attr(connection, 'protocol'),
"address": helpers.get_xml_attr(connection, 'address'),
"port": helpers.get_xml_attr(connection, 'port'),
"uri": helpers.get_xml_attr(connection, 'uri'),
"local": helpers.get_xml_attr(connection, 'local')
}
server_urls = get_connections(a)
break
# Else no device match found
if not server_urls:
# Try to match the PMS_IP and PMS_PORT
for a in xml_head:
if helpers.get_xml_attr(a, 'provides') == 'server':
connections = a.getElementsByTagName('Connection')
server_urls.append(server_details)
# Else try to match the PMS_IP and PMS_PORT
else:
connections = a.getElementsByTagName('Connection')
for connection in connections:
if helpers.get_xml_attr(connection, 'address') == plexpy.CONFIG.PMS_IP and \
int(helpers.get_xml_attr(connection, 'port')) == plexpy.CONFIG.PMS_PORT:
for connection in connections:
if helpers.get_xml_attr(connection, 'address') == plexpy.CONFIG.PMS_IP and \
int(helpers.get_xml_attr(connection, 'port')) == plexpy.CONFIG.PMS_PORT:
plexpy.CONFIG.PMS_IDENTIFIER = helpers.get_xml_attr(a, 'clientIdentifier')
plexpy.CONFIG.write()
logger.info(u"PlexPy PlexTV :: PMS identifier changed from %s to %s." % \
(server_id, plexpy.CONFIG.PMS_IDENTIFIER))
server_urls = get_connections(a)
break
plexpy.CONFIG.PMS_IDENTIFIER = helpers.get_xml_attr(a, 'clientIdentifier')
logger.info(u"PlexPy PlexTV :: PMS identifier changed from %s to %s." % \
(server_id, plexpy.CONFIG.PMS_IDENTIFIER))
server_details = {"protocol": helpers.get_xml_attr(connection, 'protocol'),
"address": helpers.get_xml_attr(connection, 'address'),
"port": helpers.get_xml_attr(connection, 'port'),
"uri": helpers.get_xml_attr(connection, 'uri'),
"local": helpers.get_xml_attr(connection, 'local')
}
if server_urls:
break
return server_urls
@@ -479,6 +493,16 @@ class PlexTV(object):
connections = d.getElementsByTagName('Connection')
for c in connections:
# If this is a remote server don't show any local IPs.
if helpers.get_xml_attr(d, 'publicAddressMatches') == '0' and \
helpers.get_xml_attr(c, 'local') == '1':
continue
# If this is a local server don't show any remote IPs.
if helpers.get_xml_attr(d, 'publicAddressMatches') == '1' and \
helpers.get_xml_attr(c, 'local') == '0':
continue
server = {'httpsRequired': helpers.get_xml_attr(d, 'httpsRequired'),
'clientIdentifier': helpers.get_xml_attr(d, 'clientIdentifier'),
'label': helpers.get_xml_attr(d, 'name'),

View File

@@ -19,10 +19,11 @@ from urlparse import urlparse
import plexpy
import urllib2
def get_server_friendly_name():
logger.info(u"PlexPy Pmsconnect :: Requesting name from server...")
server_name = PmsConnect().get_server_pref(pref='FriendlyName')
# If friendly name is blank
if not server_name:
servers_info = PmsConnect().get_servers_info()
@@ -30,7 +31,7 @@ def get_server_friendly_name():
if server['machine_identifier'] == plexpy.CONFIG.PMS_IDENTIFIER:
server_name = server['name']
break
if server_name and server_name != plexpy.CONFIG.PMS_NAME:
plexpy.CONFIG.__setattr__('PMS_NAME', server_name)
plexpy.CONFIG.write()
@@ -38,17 +39,22 @@ def get_server_friendly_name():
return server_name
def refresh_libraries():
logger.info(u"PlexPy Pmsconnect :: Requesting libraries list refresh...")
library_sections = PmsConnect().get_library_details()
server_id = plexpy.CONFIG.PMS_IDENTIFIER
if not server_id:
logger.error(u"PlexPy Pmsconnect :: No PMS identifier, cannot refresh libraries. Verify server in settings.")
return
library_keys = []
library_sections = PmsConnect().get_library_details()
if library_sections:
monitor_db = database.MonitorDatabase()
library_keys = []
for section in library_sections:
section_keys = {'server_id': server_id,
'section_id': section['section_id']}
@@ -67,15 +73,15 @@ def refresh_libraries():
library_keys.append(section['section_id'])
if plexpy.CONFIG.HOME_LIBRARY_CARDS == ['first_run_wizard']:
plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', library_keys)
plexpy.CONFIG.write()
if plexpy.CONFIG.UPDATE_SECTION_IDS == 1:
if plexpy.CONFIG.UPDATE_SECTION_IDS == 1 or plexpy.CONFIG.UPDATE_SECTION_IDS == -1:
from plexpy import libraries
import threading
# Start library section_id update on it's own thread
threading.Thread(target=libraries.update_section_ids).start()
logger.info(u"PlexPy Pmsconnect :: Libraries list refreshed.")
@@ -201,7 +207,7 @@ class PmsConnect(object):
proto=self.protocol,
request_type='GET',
output_format=output_format)
return request
def get_childrens_list(self, rating_key='', output_format=''):
@@ -218,7 +224,7 @@ class PmsConnect(object):
proto=self.protocol,
request_type='GET',
output_format=output_format)
return request
def get_server_list(self, output_format=''):
@@ -295,7 +301,7 @@ class PmsConnect(object):
"""
count = '&X-Plex-Container-Size=' + count if count else ''
uri = '/library/sections/' + section_id + '/' + list_type +'?X-Plex-Container-Start=0' + count + sort_type
uri = '/library/sections/' + section_id + '/' + list_type + '?X-Plex-Container-Start=0' + count + sort_type
request = self.request_handler.make_request(uri=uri,
proto=self.protocol,
request_type='GET',
@@ -830,7 +836,7 @@ class PmsConnect(object):
metadata = self.get_metadata_details(str(child_rating_key), get_media_info)
if metadata:
metadata_list.append(metadata['metadata'])
elif get_children and a.getElementsByTagName('Directory'):
dir_main = a.getElementsByTagName('Directory')
metadata_main = [d for d in dir_main if helpers.get_xml_attr(d, 'ratingKey')]
@@ -839,7 +845,7 @@ class PmsConnect(object):
metadata = self.get_metadata_children_details(str(child_rating_key), get_children, get_media_info)
if metadata:
metadata_list.extend(metadata['metadata'])
output = {'metadata': metadata_list}
return output
@@ -887,7 +893,7 @@ class PmsConnect(object):
metadata['section_type'] = 'track'
metadata_list = {'metadata': metadata}
return metadata_list
def get_current_activity(self):
@@ -990,7 +996,7 @@ class PmsConnect(object):
machine_id = helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'machineIdentifier')
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'media_index': helpers.get_xml_attr(session, 'index'),
'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'),
'art': helpers.get_xml_attr(session, 'art'),
@@ -1112,7 +1118,7 @@ class PmsConnect(object):
if helpers.get_xml_attr(session, 'type') == 'episode':
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'media_index': helpers.get_xml_attr(session, 'index'),
'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'),
'art': helpers.get_xml_attr(session, 'art'),
@@ -1170,7 +1176,7 @@ class PmsConnect(object):
elif helpers.get_xml_attr(session, 'type') == 'movie':
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'media_index': helpers.get_xml_attr(session, 'index'),
'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'),
'art': helpers.get_xml_attr(session, 'art'),
@@ -1228,7 +1234,7 @@ class PmsConnect(object):
elif helpers.get_xml_attr(session, 'type') == 'clip':
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'media_index': helpers.get_xml_attr(session, 'index'),
'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'),
'art': helpers.get_xml_attr(session, 'art'),
@@ -1319,7 +1325,7 @@ class PmsConnect(object):
machine_id = helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'machineIdentifier')
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'media_index': helpers.get_xml_attr(session, 'index'),
'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'),
'art': helpers.get_xml_attr(session, 'art'),
@@ -1404,7 +1410,7 @@ class PmsConnect(object):
children_list = {'children_count': '0',
'children_list': []
}
return parent_list
return children_list
result_data = []
@@ -1551,7 +1557,7 @@ class PmsConnect(object):
'title': helpers.get_xml_attr(xml_head[0], 'title1'),
'libraries_list': libraries_list
}
return output
def get_library_children_details(self, section_id='', section_type='', list_type='all', count='', rating_key='', get_media_info=False):
@@ -1588,9 +1594,9 @@ class PmsConnect(object):
sort_type = ''
if str(section_id).isdigit():
library_data = self.get_library_list(section_id, list_type, count, sort_type, output_format='xml')
library_data = self.get_library_list(str(section_id), list_type, count, sort_type, output_format='xml')
elif str(rating_key).isdigit():
library_data = self.get_children_list(rating_key, output_format='xml')
library_data = self.get_children_list(str(rating_key), output_format='xml')
else:
logger.warn(u"PlexPy Pmsconnect :: get_library_children called by invalid section_id or rating_key provided.")
return []
@@ -1608,15 +1614,15 @@ class PmsConnect(object):
if a.getAttribute('size') == '0':
logger.debug(u"PlexPy Pmsconnect :: No library data.")
childern_list = {'library_count': '0',
'childern_list': []
}
'childern_list': []
}
return childern_list
if rating_key:
library_count = helpers.get_xml_attr(xml_head[0], 'size')
else:
library_count = helpers.get_xml_attr(xml_head[0], 'totalSize')
# Get show/season info from xml_head
item_main = []
@@ -1668,7 +1674,7 @@ class PmsConnect(object):
output = {'library_count': library_count,
'childern_list': childern_list
}
return output
def get_library_details(self):
@@ -1681,7 +1687,7 @@ class PmsConnect(object):
server_library_stats = []
if server_libraries['libraries_count'] != '0':
if server_libraries and server_libraries['libraries_count'] != '0':
libraries_list = server_libraries['libraries_list']
for library in libraries_list:
@@ -1689,8 +1695,8 @@ class PmsConnect(object):
section_id = library['section_id']
children_list = self.get_library_children_details(section_id=section_id, section_type=section_type, count='1')
if children_list and children_list['library_count'] != '0':
library_stats = {'section_id': library['section_id'],
if children_list:
library_stats = {'section_id': section_id,
'section_name': library['section_name'],
'section_type': section_type,
'thumb': library['thumb'],
@@ -1783,7 +1789,7 @@ class PmsConnect(object):
except Exception as e:
logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_search_result_details: %s." % e)
return []
search_results_count = 0
search_results_list = {'movie': [],
'show': [],
@@ -1801,8 +1807,8 @@ class PmsConnect(object):
if totalSize == 0:
logger.debug(u"PlexPy Pmsconnect :: No search results.")
search_results_list = {'results_count': search_results_count,
'results_list': []
}
'results_list': []
}
return search_results_list
for a in xml_head:
@@ -1907,7 +1913,7 @@ class PmsConnect(object):
if a.getAttribute('size'):
if a.getAttribute('size') == '0':
return {}
title = helpers.get_xml_attr(a, 'title2')
if a.getElementsByTagName('Directory'):
@@ -1952,34 +1958,33 @@ class PmsConnect(object):
if child_rating_key:
key = int(child_index)
children.update({key: {'rating_key': int(child_rating_key)}})
key = int(parent_index) if match_type == 'index' else parent_title
parents.update({key:
parents.update({key:
{'rating_key': int(parent_rating_key),
'children': children}
})
key = 0 if match_type == 'index' else title
key_list = {key:
{'rating_key': int(rating_key),
'children': parents },
'section_id': section_id,
'library_name': library_name
}
key_list = {key: {'rating_key': int(rating_key),
'children': parents},
'section_id': section_id,
'library_name': library_name
}
return key_list
def get_server_response(self):
# Refresh Plex remote access port mapping first
self.put_refresh_reachability()
account_data = self.get_account(output_format='xml')
try:
xml_head = account_data.getElementsByTagName('MyPlex')
except Exception as e:
logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_server_response: %s." % e)
return None
server_response = {}
for a in xml_head:
@@ -1988,5 +1993,5 @@ class PmsConnect(object):
'public_address': helpers.get_xml_attr(a, 'publicAddress'),
'public_port': helpers.get_xml_attr(a, 'publicPort')
}
return server_response
return server_response

View File

@@ -32,17 +32,24 @@ class Users(object):
'users.thumb AS user_thumb',
'users.custom_avatar_url AS custom_thumb',
'COUNT(session_history.id) AS plays',
'SUM(CASE WHEN session_history.stopped > 0 THEN (session_history.stopped - session_history.started) \
ELSE 0 END) - SUM(CASE WHEN session_history.paused_counter IS NULL THEN 0 ELSE \
session_history.paused_counter END) AS duration',
'MAX(session_history.started) AS last_seen',
'MAX(session_history.id) AS id',
'session_history_metadata.full_title AS last_watched',
'session_history_metadata.full_title AS last_played',
'session_history.ip_address',
'session_history.platform',
'session_history.player',
'session_history.rating_key',
'session_history_metadata.media_type',
'session_history_metadata.thumb',
'session_history_metadata.parent_thumb',
'session_history_metadata.grandparent_thumb',
'session_history_metadata.media_type',
'session_history.rating_key',
'session_history_metadata.parent_title',
'session_history_metadata.year',
'session_history_metadata.media_index',
'session_history_metadata.parent_media_index',
'session_history_media_info.video_decision',
'session_history_media_info.audio_decision',
'users.do_notify as do_notify',
@@ -96,15 +103,20 @@ class Users(object):
'friendly_name': item['friendly_name'],
'user_thumb': user_thumb,
'plays': item['plays'],
'duration': item['duration'],
'last_seen': item['last_seen'],
'last_watched': item['last_watched'],
'last_played': item['last_played'],
'id': item['id'],
'ip_address': item['ip_address'],
'platform': platform,
'player': item['player'],
'thumb': thumb,
'media_type': item['media_type'],
'rating_key': item['rating_key'],
'media_type': item['media_type'],
'thumb': thumb,
'parent_title': item['parent_title'],
'year': item['year'],
'media_index': item['media_index'],
'parent_media_index': item['parent_media_index'],
'video_decision': item['video_decision'],
'audio_decision': item['audio_decision'],
'do_notify': helpers.checked(item['do_notify']),
@@ -132,13 +144,18 @@ class Users(object):
'COUNT(session_history.id) AS play_count',
'session_history.platform',
'session_history.player',
'session_history_metadata.full_title AS last_watched',
'session_history.rating_key',
'session_history_metadata.full_title AS last_played',
'session_history_metadata.thumb',
'session_history_metadata.parent_thumb',
'session_history_metadata.grandparent_thumb',
'session_history_metadata.media_type',
'session_history.rating_key',
'session_history_metadata.parent_title',
'session_history_metadata.year',
'session_history_metadata.media_index',
'session_history_metadata.parent_media_index',
'session_history_media_info.video_decision',
'session_history_media_info.audio_decision',
'session_history.user',
'session_history.user_id as custom_user_id',
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE \
@@ -188,11 +205,16 @@ class Users(object):
'play_count': item['play_count'],
'platform': platform,
'player': item['player'],
'last_watched': item['last_watched'],
'last_played': item['last_played'],
'rating_key': item['rating_key'],
'thumb': thumb,
'media_type': item['media_type'],
'rating_key': item['rating_key'],
'parent_title': item['parent_title'],
'year': item['year'],
'media_index': item['media_index'],
'parent_media_index': item['parent_media_index'],
'video_decision': item['video_decision'],
'audio_decision': item['audio_decision'],
'friendly_name': item['friendly_name']
}
@@ -223,61 +245,26 @@ class Users(object):
def get_details(self, user_id=None, user=None):
from plexpy import plextv
monitor_db = database.MonitorDatabase()
try:
if str(user_id).isdigit():
query = 'SELECT user_id, username, friendly_name, thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \
'email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history ' \
'FROM users ' \
'WHERE user_id = ? '
result = monitor_db.select(query, args=[user_id])
elif user:
query = 'SELECT user_id, username, friendly_name, thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \
'email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history ' \
'FROM users ' \
'WHERE username = ? '
result = monitor_db.select(query, args=[user])
else:
result = []
except Exception as e:
logger.warn(u"PlexPy Users :: Unable to execute database query for get_details: %s." % e)
result = []
default_return = {'user_id': None,
'username': 'Local',
'friendly_name': 'Local',
'user_thumb': common.DEFAULT_USER_THUMB,
'email': '',
'is_home_user': 0,
'is_allow_sync': 0,
'is_restricted': 0,
'do_notify': 0,
'keep_history': 0
}
if result:
user_details = {}
for item in result:
if item['friendly_name']:
friendly_name = item['friendly_name']
else:
friendly_name = item['username']
if not user_id and not user:
return default_return
if item['custom_thumb'] and item['custom_thumb'] != item['user_thumb']:
user_thumb = item['custom_thumb']
elif item['user_thumb']:
user_thumb = item['user_thumb']
else:
user_thumb = common.DEFAULT_USER_THUMB
def get_user_details(user_id=user_id, user=user):
monitor_db = database.MonitorDatabase()
user_details = {'user_id': item['user_id'],
'username': item['username'],
'friendly_name': friendly_name,
'user_thumb': user_thumb,
'email': item['email'],
'is_home_user': item['is_home_user'],
'is_allow_sync': item['is_allow_sync'],
'is_restricted': item['is_restricted'],
'do_notify': item['do_notify'],
'keep_history': item['keep_history']
}
return user_details
else:
logger.warn(u"PlexPy Users :: Unable to retrieve user from local database. Requesting user list refresh.")
# Let's first refresh the user list to make sure the user isn't newly added and not in the db yet
try:
if str(user_id).isdigit():
# Refresh users
plextv.refresh_users()
query = 'SELECT user_id, username, friendly_name, thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \
'email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history ' \
'FROM users ' \
@@ -295,8 +282,8 @@ class Users(object):
logger.warn(u"PlexPy Users :: Unable to execute database query for get_details: %s." % e)
result = []
user_details = {}
if result:
user_details = {}
for item in result:
if item['friendly_name']:
friendly_name = item['friendly_name']
@@ -321,21 +308,28 @@ class Users(object):
'do_notify': item['do_notify'],
'keep_history': item['keep_history']
}
return user_details
user_details = get_user_details(user_id=user_id, user=user)
if user_details:
return user_details
else:
logger.warn(u"PlexPy Users :: Unable to retrieve user from local database. Requesting user list refresh.")
# Let's first refresh the user list to make sure the user isn't newly added and not in the db yet
plextv.refresh_users()
user_details = get_user_details(user_id=user_id, user=user)
if user_details:
return user_details
else:
logger.warn(u"PlexPy Users :: Unable to retrieve user from local database. Returning 'Local' user.")
# If there is no user data we must return something
# Use "Local" user to retain compatibility with PlexWatch database value
return {'user_id': None,
'username': 'Local',
'friendly_name': 'Local',
'user_thumb': common.DEFAULT_USER_THUMB,
'email': '',
'is_home_user': 0,
'is_allow_sync': 0,
'is_restricted': 0,
'do_notify': 0,
'keep_history': 0
}
return default_return
def get_watch_time_stats(self, user_id=None):
monitor_db = database.MonitorDatabase()

View File

@@ -1,2 +1,2 @@
PLEXPY_VERSION = "master"
PLEXPY_RELEASE_VERSION = "1.3.0"
PLEXPY_RELEASE_VERSION = "1.3.9"

View File

@@ -37,10 +37,15 @@ def start_thread():
def run():
from websocket import create_connection
uri = 'ws://%s:%s/:/websockets/notifications' % (
plexpy.CONFIG.PMS_IP,
plexpy.CONFIG.PMS_PORT
)
if plexpy.CONFIG.PMS_SSL and plexpy.CONFIG.PMS_URL[:5] == 'https':
uri = plexpy.CONFIG.PMS_URL.replace('https://', 'wss://') + '/:/websockets/notifications'
secure = ' secure'
else:
uri = 'ws://%s:%s/:/websockets/notifications' % (
plexpy.CONFIG.PMS_IP,
plexpy.CONFIG.PMS_PORT
)
secure = ''
# Set authentication token (if one is available)
if plexpy.CONFIG.PMS_TOKEN:
@@ -52,7 +57,7 @@ def run():
# Try an open the websocket connection - if it fails after 15 retries fallback to polling
while not ws_connected and reconnects <= 15:
try:
logger.info(u'PlexPy WebSocket :: Opening websocket, connection attempt %s.' % str(reconnects + 1))
logger.info(u'PlexPy WebSocket :: Opening%s websocket, connection attempt %s.' % (secure, str(reconnects + 1)))
ws = create_connection(uri)
reconnects = 0
ws_connected = True

File diff suppressed because it is too large Load Diff

View File

@@ -15,12 +15,13 @@
import os
import sys
import cherrypy
import plexpy
import cherrypy
from plexpy import logger
from plexpy.webserve import WebInterface
import plexpy
from plexpy.helpers import create_https_certificates
from plexpy.webserve import WebInterface
def initialize(options):
@@ -31,17 +32,15 @@ def initialize(options):
https_key = options['https_key']
if enable_https:
# If either the HTTPS certificate or key do not exist, try to make
# self-signed ones.
if not (https_cert and os.path.exists(https_cert)) or not (https_key and os.path.exists(https_key)):
# If either the HTTPS certificate or key do not exist, try to make self-signed ones.
if plexpy.CONFIG.HTTPS_CREATE_CERT and \
(not (https_cert and os.path.exists(https_cert)) or not (https_key and os.path.exists(https_key))):
if not create_https_certificates(https_cert, https_key):
logger.warn("Unable to create certificate and key. Disabling " \
"HTTPS")
logger.warn("Unable to create certificate and key. Disabling HTTPS")
enable_https = False
if not (os.path.exists(https_cert) and os.path.exists(https_key)):
logger.warn("Disabled HTTPS because of missing certificate and " \
"key.")
logger.warn("Disabled HTTPS because of missing certificate and key.")
enable_https = False
options_dict = {
@@ -63,13 +62,17 @@ def initialize(options):
protocol = "http"
logger.info("Starting PlexPy web server on %s://%s:%d/", protocol,
options['http_host'], options['http_port'])
options['http_host'], options['http_port'])
cherrypy.config.update(options_dict)
conf = {
'/': {
'tools.staticdir.root': os.path.join(plexpy.PROG_DIR, 'data'),
'tools.proxy.on': options['http_proxy'] # pay attention to X-Forwarded-Proto header
'tools.proxy.on': options['http_proxy'], # pay attention to X-Forwarded-Proto header
'tools.gzip.on': True,
'tools.gzip.mime_types': ['text/html', 'text/plain', 'text/css',
'text/javascript', 'application/json',
'application/javascript']
},
'/interfaces': {
'tools.staticdir.on': True,
@@ -87,15 +90,15 @@ def initialize(options):
'tools.staticdir.on': True,
'tools.staticdir.dir': "js"
},
'/favicon.ico': {
'tools.staticfile.on': True,
'tools.staticfile.filename': os.path.join(os.path.abspath(
os.curdir), "images" + os.sep + "favicon.ico")
},
'/cache': {
'tools.staticdir.on': True,
'tools.staticdir.dir': plexpy.CONFIG.CACHE_DIR
},
'/favicon.ico': {
'tools.staticfile.on': True,
'tools.staticfile.filename': os.path.abspath(os.path.join(plexpy.PROG_DIR, 'data/interfaces/default/images/favicon.ico'))
}
}
if options['http_password']: