Compare commits
159 Commits
v2.1.0-bet
...
v2.1.11-be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c19cc858bd | ||
|
|
668913fd60 | ||
|
|
50c5407a46 | ||
|
|
939755d3b7 | ||
|
|
54f4696713 | ||
|
|
c85af521fe | ||
|
|
917d19db85 | ||
|
|
7292f25eb9 | ||
|
|
22a2ad4bc7 | ||
|
|
95e56f5ea5 | ||
|
|
ed24232a0a | ||
|
|
15225faee7 | ||
|
|
041a35a35a | ||
|
|
6d365c174a | ||
|
|
b5f2f55972 | ||
|
|
ac207260c8 | ||
|
|
e93808381c | ||
|
|
7acb8f7dc5 | ||
|
|
ba9f4a1f9e | ||
|
|
8502c28e25 | ||
|
|
10add90451 | ||
|
|
ddb7fa04ca | ||
|
|
e21a13b7ff | ||
|
|
1245b4fbd3 | ||
|
|
94b00c75c2 | ||
|
|
2edcf26110 | ||
|
|
a9fdf73e8b | ||
|
|
4884cee309 | ||
|
|
b3c7256bcf | ||
|
|
2c9a7ced13 | ||
|
|
aa365eb6a3 | ||
|
|
2366a8811b | ||
|
|
53aafbd19e | ||
|
|
d5bffc374c | ||
|
|
5cd5c36d8c | ||
|
|
7f9e8f6211 | ||
|
|
f743a817ba | ||
|
|
8e4aba7ed4 | ||
|
|
8c0ef75d4c | ||
|
|
76c4b3bb71 | ||
|
|
112b1c7984 | ||
|
|
c22a2513e3 | ||
|
|
f336782fc1 | ||
|
|
c19afa06de | ||
|
|
e003850d31 | ||
|
|
23cf790079 | ||
|
|
e7f930bd0f | ||
|
|
348707b6b9 | ||
|
|
7ad78b4536 | ||
|
|
a408a62234 | ||
|
|
a1e9e7e87f | ||
|
|
fa99f6e684 | ||
|
|
11e9bd2d54 | ||
|
|
50165af4b7 | ||
|
|
5dd22c23f2 | ||
|
|
79b45c1c46 | ||
|
|
af917c4915 | ||
|
|
c3238b5a83 | ||
|
|
908dbc3243 | ||
|
|
14b6df8c25 | ||
|
|
d3e53cb97f | ||
|
|
445eea5c1e | ||
|
|
c5918d7d6c | ||
|
|
b8e025193e | ||
|
|
85772cdd83 | ||
|
|
7f2bab3082 | ||
|
|
8185cc1c40 | ||
|
|
63bfe96124 | ||
|
|
88b640f5e2 | ||
|
|
26b2342956 | ||
|
|
883280be09 | ||
|
|
7c76b0678a | ||
|
|
e91ba46265 | ||
|
|
62104c95e3 | ||
|
|
178bd89e7c | ||
|
|
365260401c | ||
|
|
04029bd4d3 | ||
|
|
9cf1128712 | ||
|
|
2eebce9f6c | ||
|
|
b08e071b81 | ||
|
|
7778d84728 | ||
|
|
8e3fe7bfa2 | ||
|
|
6f22c823be | ||
|
|
34d7c67813 | ||
|
|
862ed5ce9f | ||
|
|
84406e6797 | ||
|
|
19cf567366 | ||
|
|
8af697a157 | ||
|
|
76122bea5d | ||
|
|
1a12422908 | ||
|
|
2df9f0b48b | ||
|
|
8540b80e57 | ||
|
|
8ad565a444 | ||
|
|
f91b6481b3 | ||
|
|
826db082c9 | ||
|
|
f3d64a7886 | ||
|
|
031d078bc2 | ||
|
|
04fcd78102 | ||
|
|
53d1e0f541 | ||
|
|
9719f0b25b | ||
|
|
6d1d5bc822 | ||
|
|
0d7bbe044d | ||
|
|
b1dc5816a4 | ||
|
|
476011a783 | ||
|
|
e038c57c4c | ||
|
|
a989a53750 | ||
|
|
d8cfdea704 | ||
|
|
ed4722c4ce | ||
|
|
17ab5f05ed | ||
|
|
71ab2248d7 | ||
|
|
4fb4410552 | ||
|
|
a915d2333f | ||
|
|
aaf5a18251 | ||
|
|
b90026801b | ||
|
|
e9676e3651 | ||
|
|
c16d3288d8 | ||
|
|
0d7ade8ca4 | ||
|
|
87b1118e98 | ||
|
|
9f6422cc8d | ||
|
|
df1a42a4ee | ||
|
|
6554136a8f | ||
|
|
81e04269fd | ||
|
|
b6c6590a12 | ||
|
|
136260a822 | ||
|
|
5710bcb43c | ||
|
|
30bc3f8a66 | ||
|
|
e0e7d68df2 | ||
|
|
cf73639281 | ||
|
|
008e04d5cf | ||
|
|
5f7991665c | ||
|
|
5e000162c6 | ||
|
|
ea1aba2c87 | ||
|
|
f321bb869c | ||
|
|
abe496668a | ||
|
|
9cefc7f701 | ||
|
|
1d3cd431eb | ||
|
|
8f8318da6d | ||
|
|
36ce751875 | ||
|
|
858ea33680 | ||
|
|
eee759d0d0 | ||
|
|
dbe3b492fd | ||
|
|
4e4fde2e9a | ||
|
|
5283126608 | ||
|
|
df72ecebf5 | ||
|
|
d316aa34e2 | ||
|
|
405aec8bb8 | ||
|
|
4a62f8c395 | ||
|
|
eabea2deeb | ||
|
|
3742021dcc | ||
|
|
9c4219b42e | ||
|
|
f624908302 | ||
|
|
ab9132cdd4 | ||
|
|
0186363753 | ||
|
|
653ad36f17 | ||
|
|
5073f82d53 | ||
|
|
833937eced | ||
|
|
32df79bb83 | ||
|
|
fabced9942 | ||
|
|
8aa34321c9 |
300
API.md
300
API.md
@@ -32,6 +32,21 @@ General optional parameters:
|
||||
|
||||
## API methods
|
||||
|
||||
### add_newsletter_config
|
||||
Add a new notification agent.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
agent_id (int): The newsletter type to add
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
None
|
||||
```
|
||||
|
||||
|
||||
### add_notifier_config
|
||||
Add a new notification agent.
|
||||
|
||||
@@ -93,27 +108,30 @@ Returns:
|
||||
Delete and recreate the cache directory.
|
||||
|
||||
|
||||
### delete_image_cache
|
||||
Delete and recreate the image cache directory.
|
||||
|
||||
|
||||
### delete_imgur_poster
|
||||
Delete the Imgur poster.
|
||||
### delete_hosted_images
|
||||
Delete the images uploaded to image hosting services.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
None
|
||||
|
||||
Optional parameters:
|
||||
rating_key (int): 1234
|
||||
(Note: Must be the movie, show, season, artist, or album rating key)
|
||||
Optional parameters:
|
||||
None
|
||||
service (str): 'imgur' or 'cloudinary'
|
||||
delete_all (bool): 'true' to delete all images form the service
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"result": "success",
|
||||
"message": "Deleted Imgur poster."}
|
||||
"message": "Deleted hosted images from Imgur."}
|
||||
```
|
||||
|
||||
|
||||
### delete_image_cache
|
||||
Delete and recreate the image cache directory.
|
||||
|
||||
|
||||
### delete_library
|
||||
Delete a library section from Tautulli. Also erases all history for the library.
|
||||
|
||||
@@ -191,6 +209,36 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### delete_newsletter
|
||||
Remove a newsletter from the database.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
newsletter_id (int): The newsletter to delete
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
None
|
||||
```
|
||||
|
||||
|
||||
### delete_newsletter_log
|
||||
Delete the Tautulli newsletter logs.
|
||||
|
||||
```
|
||||
Required paramters:
|
||||
None
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
None
|
||||
```
|
||||
|
||||
|
||||
### delete_notification_log
|
||||
Delete the Tautulli notification logs.
|
||||
|
||||
@@ -386,6 +434,7 @@ Returns:
|
||||
"optimized_version_profile": "",
|
||||
"optimized_version_title": "",
|
||||
"originally_available_at": "2016-04-24",
|
||||
"original_title": "",
|
||||
"parent_media_index": "6",
|
||||
"parent_rating_key": "153036",
|
||||
"parent_thumb": "/library/metadata/153036/thumb/1503889210",
|
||||
@@ -630,6 +679,7 @@ Returns:
|
||||
"full_title": "Game of Thrones - The Red Woman",
|
||||
"grandparent_rating_key": 351,
|
||||
"grandparent_title": "Game of Thrones",
|
||||
"original_title": "",
|
||||
"group_count": 1,
|
||||
"group_ids": "1124",
|
||||
"id": 1124,
|
||||
@@ -916,9 +966,9 @@ Optional parameters:
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"section_id": 1, "section_name": "Movies"},
|
||||
{"section_id": 7, "section_name": "Music"},
|
||||
{"section_id": 2, "section_name": "TV Shows"},
|
||||
[{"section_id": 1, "section_name": "Movies", "section_type": "movie"},
|
||||
{"section_id": 7, "section_name": "Music", "section_type": "artist"},
|
||||
{"section_id": 2, "section_name": "TV Shows", "section_type": "show"},
|
||||
{...}
|
||||
]
|
||||
```
|
||||
@@ -1124,6 +1174,7 @@ Returns:
|
||||
}
|
||||
],
|
||||
"media_type": "episode",
|
||||
"original_title": "",
|
||||
"originally_available_at": "2016-04-24",
|
||||
"parent_media_index": "6",
|
||||
"parent_rating_key": "153036",
|
||||
@@ -1166,6 +1217,109 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_newsletter_config
|
||||
Get the configuration for an existing notification agent.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
newsletter_id (int): The newsletter config to retrieve
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"id": 1,
|
||||
"agent_id": 0,
|
||||
"agent_name": "recently_added",
|
||||
"agent_label": "Recently Added",
|
||||
"friendly_name": "",
|
||||
"id_name": "",
|
||||
"cron": "0 0 * * 1",
|
||||
"active": 1,
|
||||
"subject": "Recently Added to {server_name}! ({end_date})",
|
||||
"body": "View the newsletter here: {newsletter_url}",
|
||||
"message": "",
|
||||
"config": {"custom_cron": 0,
|
||||
"filename": "newsletter_{newsletter_uuid}.html",
|
||||
"formatted": 1,
|
||||
"incl_libraries": ["1", "2"],
|
||||
"notifier_id": 1,
|
||||
"save_only": 0,
|
||||
"time_frame": 7,
|
||||
"time_frame_units": "days"
|
||||
},
|
||||
"email_config": {...},
|
||||
"config_options": [{...}, ...],
|
||||
"email_config_options": [{...}, ...]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### get_newsletter_log
|
||||
Get the data on the Tautulli newsletter logs table.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
None
|
||||
|
||||
Optional parameters:
|
||||
order_column (str): "timestamp", "newsletter_id", "agent_name", "notify_action",
|
||||
"subject_text", "start_date", "end_date", "uuid"
|
||||
order_dir (str): "desc" or "asc"
|
||||
start (int): Row to start from, 0
|
||||
length (int): Number of items to return, 25
|
||||
search (str): A string to search for, "Telegram"
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"draw": 1,
|
||||
"recordsTotal": 1039,
|
||||
"recordsFiltered": 163,
|
||||
"data":
|
||||
[{"agent_id": 0,
|
||||
"agent_name": "recently_added",
|
||||
"end_date": "2018-03-18",
|
||||
"id": 7,
|
||||
"newsletter_id": 1,
|
||||
"notify_action": "on_cron",
|
||||
"start_date": "2018-03-05",
|
||||
"subject_text": "Recently Added to Plex (Winterfell-Server)! (2018-03-18)",
|
||||
"success": 1,
|
||||
"timestamp": 1462253821,
|
||||
"uuid": "7fe4g65i"
|
||||
},
|
||||
{...},
|
||||
{...}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### get_newsletters
|
||||
Get a list of configured newsletters.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
None
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"id": 1,
|
||||
"agent_id": 0,
|
||||
"agent_name": "recently_added",
|
||||
"agent_label": "Recently Added",
|
||||
"friendly_name": "",
|
||||
"cron": "0 0 * * 1",
|
||||
"active": 1
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
### get_notification_log
|
||||
Get the data on the Tautulli notification logs table.
|
||||
|
||||
@@ -1174,8 +1328,8 @@ Required parameters:
|
||||
None
|
||||
|
||||
Optional parameters:
|
||||
order_column (str): "timestamp", "agent_name", "notify_action",
|
||||
"subject_text", "body_text", "script_args"
|
||||
order_column (str): "timestamp", "notifier_id", "agent_name", "notify_action",
|
||||
"subject_text", "body_text",
|
||||
order_dir (str): "desc" or "asc"
|
||||
start (int): Row to start from, 0
|
||||
length (int): Number of items to return, 25
|
||||
@@ -1188,15 +1342,14 @@ Returns:
|
||||
"recordsFiltered": 163,
|
||||
"data":
|
||||
[{"agent_id": 13,
|
||||
"agent_name": "Telegram",
|
||||
"body_text": "Game of Thrones - S06E01 - The Red Woman [Transcode].",
|
||||
"agent_name": "telegram",
|
||||
"body_text": "DanyKhaleesi69 started playing The Red Woman.",
|
||||
"id": 1000,
|
||||
"notify_action": "play",
|
||||
"poster_url": "http://i.imgur.com/ZSqS8Ri.jpg",
|
||||
"notify_action": "on_play",
|
||||
"rating_key": 153037,
|
||||
"script_args": "[]",
|
||||
"session_key": 147,
|
||||
"subject_text": "Tautulli (Winterfell-Server)",
|
||||
"success": 1,
|
||||
"timestamp": 1462253821,
|
||||
"user": "DanyKhaleesi69",
|
||||
"user_id": 8008135
|
||||
@@ -1629,6 +1782,7 @@ Returns:
|
||||
"library_name": "",
|
||||
"media_index": "1",
|
||||
"media_type": "episode",
|
||||
"original_title": "",
|
||||
"parent_media_index": "6",
|
||||
"parent_rating_key": "153036",
|
||||
"parent_thumb": "/library/metadata/153036/thumb/1462175062",
|
||||
@@ -1777,6 +1931,69 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_stream_data
|
||||
Get the stream details from history or current stream.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
row_id (int): The row ID number for the history item, OR
|
||||
session_key (int): The session key of the current stream
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"aspect_ratio": "2.35",
|
||||
"audio_bitrate": 231,
|
||||
"audio_channels": 6,
|
||||
"audio_codec": "aac",
|
||||
"audio_decision": "transcode",
|
||||
"bitrate": 2731,
|
||||
"container": "mp4",
|
||||
"current_session": "",
|
||||
"grandparent_title": "",
|
||||
"media_type": "movie",
|
||||
"optimized_version": "",
|
||||
"optimized_version_profile": "",
|
||||
"optimized_version_title": "",
|
||||
"original_title": "",
|
||||
"pre_tautulli": "",
|
||||
"quality_profile": "1.5 Mbps 480p",
|
||||
"stream_audio_bitrate": 203,
|
||||
"stream_audio_channels": 2,
|
||||
"stream_audio_codec": "aac",
|
||||
"stream_audio_decision": "transcode",
|
||||
"stream_bitrate": 730,
|
||||
"stream_container": "mkv",
|
||||
"stream_container_decision": "transcode",
|
||||
"stream_subtitle_codec": "",
|
||||
"stream_subtitle_decision": "",
|
||||
"stream_video_bitrate": 527,
|
||||
"stream_video_codec": "h264",
|
||||
"stream_video_decision": "transcode",
|
||||
"stream_video_framerate": "24p",
|
||||
"stream_video_height": 306,
|
||||
"stream_video_resolution": "SD",
|
||||
"stream_video_width": 720,
|
||||
"subtitle_codec": "",
|
||||
"subtitles": "",
|
||||
"synced_version": "",
|
||||
"synced_version_profile": "",
|
||||
"title": "Frozen",
|
||||
"transcode_hw_decoding": "",
|
||||
"transcode_hw_encoding": "",
|
||||
"video_bitrate": 2500,
|
||||
"video_codec": "h264",
|
||||
"video_decision": "transcode",
|
||||
"video_framerate": "24p",
|
||||
"video_height": 816,
|
||||
"video_resolution": "1080",
|
||||
"video_width": 1920
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### get_stream_type_by_top_10_platforms
|
||||
Get graph data by stream type by top 10 platforms.
|
||||
|
||||
@@ -2215,6 +2432,23 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### notify_newsletter
|
||||
Send a newsletter using Tautulli.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
newsletter_id (int): The ID number of the newsletter agent
|
||||
|
||||
Optional parameters:
|
||||
subject (str): The subject of the newsletter
|
||||
body (str): The body of the newsletter
|
||||
message (str): The message of the newsletter
|
||||
|
||||
Returns:
|
||||
None
|
||||
```
|
||||
|
||||
|
||||
### notify_recently_added
|
||||
Send a recently added notification using Tautulli.
|
||||
|
||||
@@ -2244,8 +2478,12 @@ Required parameters:
|
||||
rating_key (str): 54321
|
||||
|
||||
Optional parameters:
|
||||
width (str): 150
|
||||
height (str): 255
|
||||
width (str): 300
|
||||
height (str): 450
|
||||
opacity (str): 25
|
||||
background (str): 282828
|
||||
blur (str): 3
|
||||
img_format (str): png
|
||||
fallback (str): "poster", "cover", "art"
|
||||
refresh (bool): True or False whether to refresh the image cache
|
||||
|
||||
@@ -2312,7 +2550,7 @@ Returns:
|
||||
|
||||
|
||||
### set_mobile_device_config
|
||||
Configure an exisitng notificaiton agent.
|
||||
Configure an existing notification agent.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
@@ -2326,8 +2564,24 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### set_newsletter_config
|
||||
Configure an existing newsletter agent.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
newsletter_id (int): The newsletter config to update
|
||||
agent_id (int): The newsletter type of the newsletter
|
||||
|
||||
Optional parameters:
|
||||
Pass all the config options for the agent with the 'newsletter_config_' and 'newsletter_email_' prefix.
|
||||
|
||||
Returns:
|
||||
None
|
||||
```
|
||||
|
||||
|
||||
### set_notifier_config
|
||||
Configure an exisitng notificaiton agent.
|
||||
Configure an existing notification agent.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
|
||||
128
CHANGELOG.md
128
CHANGELOG.md
@@ -1,5 +1,133 @@
|
||||
# Changelog
|
||||
|
||||
## v2.1.11-beta (2018-06-02)
|
||||
|
||||
* Monitoring:
|
||||
* Fix: Activity progress bar not updating in some cases.
|
||||
* Fix: Monitory Remote Access setting disabled due to Plex Media Server API changes.
|
||||
* Change: Improved logic for grouping history items without being successive plays.
|
||||
* Notifications:
|
||||
* New: Added filename to notification parameters.
|
||||
* Other:
|
||||
* Fix: Update metadata failing for tracks without track numbers.
|
||||
|
||||
|
||||
## v2.1.10-beta (2018-05-28)
|
||||
|
||||
* Monitoring:
|
||||
* Fix: Improved monitoring of live tv sessions.
|
||||
* Change: Use track artist instead of album artist.
|
||||
* Notifications:
|
||||
* New: Added timestamp to Discord notification embeds. (Thanks @samwiseg00)
|
||||
* New: Enable notifications for "clip" media types.
|
||||
* Fix: Actually add the "live" notification parameter.
|
||||
* Change: Update Twitter for 280 characters.
|
||||
* Change: Use HTTPS url for Cloudinary images.
|
||||
* Newsletters:
|
||||
* Fix: Artist summaries not showing up on newsletter cards.
|
||||
* Change: Do not send the newsletter if the template fails to render.
|
||||
|
||||
|
||||
## v2.1.9 (2018-05-21)
|
||||
|
||||
* Notifications:
|
||||
* New: Added "live" to notification parameters.
|
||||
|
||||
|
||||
## v2.1.8-beta (2018-05-19)
|
||||
|
||||
* Newsletters:
|
||||
* New: Added authentication options for self-hosted newsletters.
|
||||
* Change: Check if the Tautulli footer has been removed in custom newsletter templates.
|
||||
* Notifications:
|
||||
* Fix: Cloudinary images not working for Twitter notifications.
|
||||
* API:
|
||||
* Fix: Return proper HTTP status codes for errors.
|
||||
|
||||
|
||||
## v2.1.7-beta (2018-05-13)
|
||||
|
||||
* Newsletters:
|
||||
* New: Option to toggle between inline or internal CSS style templates.
|
||||
* New: Button to delete all uploaded images from Imgur/Cloudinary.
|
||||
* Fix: Long titles overflowing the newsletter cards.
|
||||
* Change: Self-hosted images on newsletters to use the /image endpoint instead of proxying through /newsletter/image.
|
||||
* Change: Strip whitespace from newsletter for smaller file size before sending to email.
|
||||
* API:
|
||||
* New: Added get_stream_data command to API.
|
||||
* New: Added newsletter API commands to documentation.
|
||||
|
||||
|
||||
## v2.1.6-beta (2018-05-09)
|
||||
|
||||
* Newsletters:
|
||||
* Change: Setting to specify static URL ID name instead of using the newsletter ID number.
|
||||
* Change: Reorganize newsletter config options.
|
||||
|
||||
|
||||
## v2.1.5-beta (2018-05-07)
|
||||
|
||||
* Newsletters:
|
||||
* New: Added setting for a custom newsletter template folder.
|
||||
* New: Added option to enable static newsletter URLs to retrieve the last sent scheduled newsletter.
|
||||
* New: Added ability to change the newsletter output directory and filenames.
|
||||
* New: Added option to save the newsletter file without sending it to a notification agent.
|
||||
* Fix: Check for disabled image hosting setting.
|
||||
* Fix: Cache newsletter images when refreshing the page.
|
||||
* Fix: Refresh image from the Plex server when uploading to image hosting.
|
||||
* Change: Allow all image hosting options with self-hosted newsletters.
|
||||
* UI:
|
||||
* Change: Don't retrieve recently added on the homepage if the Plex Cloud server is sleeping.
|
||||
* Other:
|
||||
* Fix: Imgur database upgrade migration.
|
||||
|
||||
|
||||
## v2.1.4 (2018-05-05)
|
||||
|
||||
* Newsletters:
|
||||
* Fix: Newsletter URL without an HTTP root.
|
||||
|
||||
|
||||
## v2.1.3-beta (2018-05-04)
|
||||
|
||||
* Newsletters:
|
||||
* Fix: HTTP root doubled in newsletter URL.
|
||||
* Fix: Configuration would not open with failed hostname resolution.
|
||||
* Fix: Schedule one day off when using weekday names in cron.
|
||||
* Fix: Images not refreshing when changed in Plex.
|
||||
* Fix: Cloudinary upload with non-ASCII image titles.
|
||||
* Other:
|
||||
* Fix: Potential XSS vulnerability in search.
|
||||
|
||||
|
||||
## v2.1.2-beta (2018-05-01)
|
||||
|
||||
* Newsletters:
|
||||
* New: Added Cloudinary option for image hosting.
|
||||
* Notifications:
|
||||
* New: Added Message-ID to Email header (Thanks @Dam64)
|
||||
* Fix: Posters not showing up on Twitter with self-hosted images.
|
||||
* Fix: Incorrect action parameter for new device notifications.
|
||||
* Change: Hardcode Pushover sound list instead of fetching the list every time.
|
||||
* API:
|
||||
* Fix: Success result for empty response data.
|
||||
* Change: Do not send notification when checking for Tautulli updates via the API.
|
||||
|
||||
|
||||
## v2.1.1-beta (2018-04-11)
|
||||
|
||||
* Monitoring:
|
||||
* Fix: Live TV transcoding showing incorrectly as direct play.
|
||||
* Newsletters:
|
||||
* New: Added week number as parameter. (Thanks @samip5)
|
||||
* Fix: Fallback to cover art on the newsletter cards.
|
||||
* Change: Option to set newsletter time frame by calendar days or hours.
|
||||
* Notifications:
|
||||
* New: Added week number as parameter. (Thanks @samip5)
|
||||
* Other:
|
||||
* New: Added plexapi library for custom scripts.
|
||||
|
||||
|
||||
## v2.1.0-beta (2018-04-07)
|
||||
|
||||
* Newsletters:
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
If you think you can contribute code to the Tautulli repository, do not hesitate to submit a pull request.
|
||||
|
||||
### Branches
|
||||
All pull requests should be based on the `dev` branch, to minimize cross merges. When you want to develop a new feature, clone the repository with `git clone origin/dev -b FEATURE_NAME`. Use meaningful commit messages.
|
||||
All pull requests should be based on the `nightly` branch, to minimize cross merges. When you want to develop a new feature, clone the repository with `git clone origin/nightly -b FEATURE_NAME`. Use meaningful commit messages.
|
||||
|
||||
### Python Code
|
||||
|
||||
#### Compatibility
|
||||
The code should work with Python 2.7. Note that Tautulli runs on different platforms, including Network Attached Storage devices such as Synology.
|
||||
The code should work with Python 2.7. Note that Tautulli runs on many different platforms.
|
||||
|
||||
Re-use existing code. Do not hesitate to add logging in your code. You can the logger module `plexpy.logger.*` for this. Web requests are invoked via `plexpy.request.*` and derived ones. Use these methods to automatically add proper and meaningful error handling.
|
||||
|
||||
@@ -29,13 +29,10 @@ Although Tautulli did not adapt a code convention in the past, we try to follow
|
||||
#### Documentation
|
||||
Document your code. Use docstrings See [PEP-257](https://www.python.org/dev/peps/pep-0257/) for more information.
|
||||
|
||||
#### Continuous Integration
|
||||
Tautulli has a configuration file for [travis-ci](https://travis-ci.org/). You can add your forked repo to Travis to have it check your code against PEP8, PyLint, and PyFlakes for you. Your pull request will show a green check mark or a red cross on each tested commit, depending on if linting passes.
|
||||
|
||||
### HTML/Template code
|
||||
|
||||
#### Compatibility
|
||||
HTML5 compatible browsers are targetted. There is no specific mobile version of Tautulli yet.
|
||||
HTML5 compatible browsers are targeted.
|
||||
|
||||
#### Conventions
|
||||
* 4 space indentation
|
||||
|
||||
@@ -27,9 +27,9 @@ This project is based on code from [Headphones](https://github.com/rembo10/headp
|
||||
|
||||
## Preview
|
||||
|
||||
* [Full preview gallery available on our website](http://tautulli.com)
|
||||
* [Full preview gallery available on our website](https://tautulli.com)
|
||||
|
||||

|
||||

|
||||
|
||||
## Installation and Support
|
||||
|
||||
|
||||
@@ -292,6 +292,7 @@ ${next.modalIncludes()}
|
||||
<script src="${http_root}js/pnotify.custom.min.js"></script>
|
||||
<script src="${http_root}js/script.js${cache_param}"></script>
|
||||
<script src="${http_root}js/jquery.qrcode.min.js"></script>
|
||||
<script src="${http_root}js/jquery.tripleclick.min.js"></script>
|
||||
% if _session['user_group'] == 'admin' and BROWSER_NOTIFIERS:
|
||||
<script src="${http_root}js/ajaxNotifications.js"></script>
|
||||
% endif
|
||||
|
||||
@@ -69,7 +69,7 @@ DOCUMENTATION :: END
|
||||
% endif
|
||||
<tr>
|
||||
<td>Platform:</td>
|
||||
<td>${common.PLATFORM} ${common.PLATFORM_VERSION}</td>
|
||||
<td>${common.PLATFORM} ${common.PLATFORM_RELEASE} (${common.PLATFORM_VERSION + (' - {}'.format(common.PLATFORM_LINUX_DISTRO) if common.PLATFORM_LINUX_DISTRO else '')})</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Python Version:</td>
|
||||
@@ -78,7 +78,7 @@ DOCUMENTATION :: END
|
||||
<tr>
|
||||
<td class="top-line">Resources:</td>
|
||||
<td class="top-line">
|
||||
<a class="no-highlight" href="${anon_url('http://tautulli.com')}" target="_blank">Tautulli Website</a> |
|
||||
<a class="no-highlight" href="${anon_url('https://tautulli.com')}" target="_blank">Tautulli Website</a> |
|
||||
<a class="no-highlight" href="${anon_url('https://github.com/%s/%s' % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO))}" target="_blank">GitHub Source</a> |
|
||||
<a class="no-highlight guidelines-modal-link" href="${anon_url('https://github.com/%s/%s-Issues' % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO))}" data-id="issue">GitHub Issues</a> |
|
||||
<a class="no-highlight" href="${anon_url('https://github.com/%s/%s-Wiki' % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO))}" target="_blank">GitHub Wiki</a> |
|
||||
|
||||
@@ -70,6 +70,7 @@ div.form-control .selectize-input {
|
||||
background-color: #555;
|
||||
border-radius: 3px;
|
||||
transition: background-color .3s;
|
||||
height: 32px !important;
|
||||
}
|
||||
.react-selectize.root-node .react-selectize-control,
|
||||
.selectize-control.form-control .selectize-input {
|
||||
@@ -2934,6 +2935,7 @@ a .home-platforms-list-cover-face:hover
|
||||
}
|
||||
.stacked-configs > li > span > a.toggle-left,
|
||||
.stacked-configs > li > span > span.toggle-left {
|
||||
float: left;
|
||||
color: #444;
|
||||
padding-right: 8px;
|
||||
}
|
||||
@@ -2944,16 +2946,6 @@ a .home-platforms-list-cover-face:hover
|
||||
.stacked-configs > li > span > span.active {
|
||||
color: #f9be03;
|
||||
}
|
||||
.stacked-configs > li.new-notification-agent,
|
||||
.stacked-configs > li.notification-agent,
|
||||
.stacked-configs > li.add-notification-agent,
|
||||
.stacked-configs > li.new-newsletter-agent,
|
||||
.stacked-configs > li.newsletter-agent,
|
||||
.stacked-configs > li.add-newsletter-agent,
|
||||
.stacked-configs > li.mobile-device,
|
||||
.stacked-configs > li.add-mobile-device {
|
||||
cursor: pointer;
|
||||
}
|
||||
.stacked-configs > li.mobile-device > span > a.toggle-left,
|
||||
.stacked-configs > li.mobile-device > span > span.toggle-left {
|
||||
color: #999;
|
||||
@@ -3524,8 +3516,7 @@ a.no-highlight:hover {
|
||||
}
|
||||
.login-logo {
|
||||
margin: 0 auto 50px auto;
|
||||
width: 340px;
|
||||
height: 100px;
|
||||
text-align: center;
|
||||
}
|
||||
.login-container .form-group {
|
||||
margin-bottom: 20px;
|
||||
@@ -4046,6 +4037,19 @@ a:hover .overlay-refresh-image:hover {
|
||||
-webkit-appearance: none;
|
||||
margin: 0;
|
||||
}
|
||||
.newsletter-time_frame .input-group-addon {
|
||||
height: 32px;
|
||||
width: 52px;
|
||||
margin-top: 5px;
|
||||
line-height: 1.42857143;
|
||||
}
|
||||
.newsletter-time_frame input.form-control {
|
||||
width: calc(50% - 37px);
|
||||
}
|
||||
.newsletter-time_frame select.form-control {
|
||||
width: calc(50% - 15px);
|
||||
height: 32px;
|
||||
}
|
||||
.newsletter-loader-container {
|
||||
font-family: 'Open Sans', Arial, sans-serif;
|
||||
position: absolute;
|
||||
@@ -4079,4 +4083,16 @@ a:hover .overlay-refresh-image:hover {
|
||||
}
|
||||
a[data-tab-destination] {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
.modal-config-section {
|
||||
margin-top: 10px !important;
|
||||
padding-top: 10px;
|
||||
border-top: 1px solid #444;
|
||||
}
|
||||
.newsletter-logo {
|
||||
margin: 0 auto 50px auto;
|
||||
text-align: center;
|
||||
}
|
||||
.pointer {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
@@ -387,8 +387,8 @@ DOCUMENTATION :: END
|
||||
<a href="${grandparent_href}" title="${data['grandparent_title']}">${data['grandparent_title']}</a>
|
||||
- <a href="${href}" title="${data['title']}">${data['title']}</a>
|
||||
% elif data['media_type'] == 'track':
|
||||
<a id="metadata-grandparent_title-${sk}" href="${grandparent_href}" title="${data['grandparent_title']}">${data['grandparent_title']}</a>
|
||||
- <a id="metadata-title-${sk}" href="${href}" title="${data['title']}">${data['title']}</a>
|
||||
<a id="metadata-title-${sk}" href="${href}" title="${data['title']}">${data['title']}</a>
|
||||
- <a id="metadata-grandparent_title-${sk}" href="${grandparent_href}" title="${data['original_title'] or data['grandparent_title']}">${data['original_title'] or data['grandparent_title']}</a>
|
||||
% elif data['media_type'] == 'photo':
|
||||
<span title="${data['parent_title']}">${data['parent_title']}</span>
|
||||
% elif data['media_type'] == 'clip':
|
||||
|
||||
@@ -5,13 +5,14 @@
|
||||
</%def>
|
||||
|
||||
<%def name="body()">
|
||||
<% from plexpy import PLEX_SERVER_UP %>
|
||||
<div class="container-fluid">
|
||||
% for section in config['home_sections']:
|
||||
% if section == 'current_activity':
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<div class="padded-header" id="current-activity-header">
|
||||
<h3><span id="sessions-shortcut">Activity</span>
|
||||
<h3><span id="sessions-xml">Activity</span>
|
||||
<small>
|
||||
<span id="currentActivityHeader" style="display: none;">
|
||||
Streams: <span id="currentActivityHeader-streams"></span> |
|
||||
@@ -22,9 +23,10 @@
|
||||
</h3>
|
||||
</div>
|
||||
<div id="currentActivity">
|
||||
<% from plexpy import PLEX_SERVER_UP %>
|
||||
% if PLEX_SERVER_UP:
|
||||
<div class="text-muted" id="dashboard-checking-activity"><i class="fa fa-refresh fa-spin"></i> Checking for activity...</div>
|
||||
% elif config['pms_is_cloud']:
|
||||
<div id="dashboard-no-activity" class="text-muted">Plex Cloud server is sleeping.</div>
|
||||
% else:
|
||||
<div id="dashboard-no-activity" class="text-muted">There was an error communicating with your Plex Server.
|
||||
% if _session['user_group'] == 'admin':
|
||||
@@ -133,7 +135,17 @@
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<div id="recentlyAdded" style="margin-right: -15px;">
|
||||
% if PLEX_SERVER_UP:
|
||||
<div class="text-muted"><i class="fa fa-refresh fa-spin"></i> Looking for new items...</div>
|
||||
% elif config['pms_is_cloud']:
|
||||
<div class="text-muted">Plex Cloud server is sleeping.</div>
|
||||
% else:
|
||||
<div class="text-muted">There was an error communicating with your Plex Server.
|
||||
% if _session['user_group'] == 'admin':
|
||||
Check the <a href="logs">logs</a> and verify your server connection in the <a href="settings#tab_tabs-plex_media_server">settings</a>.
|
||||
% endif
|
||||
</div>
|
||||
% endif
|
||||
<br>
|
||||
</div>
|
||||
</div>
|
||||
@@ -220,10 +232,10 @@
|
||||
</%def>
|
||||
|
||||
<%def name="javascriptIncludes()">
|
||||
<% from plexpy import PLEX_SERVER_UP %>
|
||||
<script src="${http_root}js/moment-with-locale.js"></script>
|
||||
<script src="${http_root}js/jquery.scrollbar.min.js"></script>
|
||||
<script src="${http_root}js/jquery.mousewheel.min.js"></script>
|
||||
<script src="${http_root}js/jquery.tripleclick.min.js"></script>
|
||||
<script>
|
||||
var date_format = 'YYYY-MM-DD';
|
||||
var time_format = 'hh:mm a';
|
||||
@@ -252,7 +264,6 @@
|
||||
});
|
||||
}
|
||||
</script>
|
||||
<% from plexpy import PLEX_SERVER_UP %>
|
||||
% if 'current_activity' in config['home_sections'] and PLEX_SERVER_UP:
|
||||
<script>
|
||||
var defaultHandler = {
|
||||
@@ -378,8 +389,8 @@
|
||||
$('#background-' + key).css('background-image', 'url(pms_image_proxy?img=' + s.art + '&width=500&height=280&opacity=40&background=282828&blur=3&fallback=art&refresh=true)');
|
||||
$('#metadata-grandparent_title-' + key)
|
||||
.attr('href', 'info?rating_key=' + s.grandparent_rating_key)
|
||||
.attr('title', s.grandparent_title)
|
||||
.text(s.grandparent_title);
|
||||
.attr('title', s.original_title || s.grandparent_title)
|
||||
.text(s.original_title || s.grandparent_title);
|
||||
}
|
||||
// Update cover if album changed
|
||||
if (s.parent_rating_key !== instance.data('parent_rating_key')) {
|
||||
@@ -394,7 +405,11 @@
|
||||
.text(s.parent_title);
|
||||
}
|
||||
// Update cover if track changed
|
||||
if (s.parent_rating_key !== instance.data('parent_rating_key')) {
|
||||
if (s.rating_key !== instance.data('rating_key')) {
|
||||
$('#metadata-grandparent_title-' + key)
|
||||
.attr('href', 'info?rating_key=' + s.grandparent_rating_key)
|
||||
.attr('title', s.original_title || s.grandparent_title)
|
||||
.text(s.original_title || s.grandparent_title);
|
||||
$('#metadata-title-' + key)
|
||||
.attr('href', 'info?rating_key=' + s.rating_key)
|
||||
.attr('title', s.title)
|
||||
@@ -530,7 +545,7 @@
|
||||
.attr('data-original-title', 'Transcoder Progress ' + s.transcode_progress + '%');
|
||||
var progress_bar = $('#progress-bar-' + key);
|
||||
progress_bar.data('state', s.state);
|
||||
if (progress_bar.data('last_view_offset') && progress_bar.data('last_view_offset') !== s.view_offset) {
|
||||
if (progress_bar.data('last_view_offset') !== s.view_offset) {
|
||||
progress_bar.data('last_view_offset', s.view_offset).data('view_offset', s.view_offset);
|
||||
}
|
||||
|
||||
@@ -678,10 +693,8 @@
|
||||
});
|
||||
});
|
||||
|
||||
$('#sessions-shortcut').on('tripleclick', function () {
|
||||
$.getJSON('return_sessions_url', function(sessions_url) {
|
||||
window.open(sessions_url, '_blank');
|
||||
});
|
||||
$('#sessions-xml').on('tripleclick', function () {
|
||||
openPlexXML('/status/sessions');
|
||||
});
|
||||
% endif
|
||||
</script>
|
||||
@@ -744,7 +757,7 @@
|
||||
getLibraryStats();
|
||||
</script>
|
||||
% endif
|
||||
% if 'recently_added' in config['home_sections']:
|
||||
% if 'recently_added' in config['home_sections'] and PLEX_SERVER_UP:
|
||||
<script>
|
||||
function recentlyAdded(recently_added_count, recently_added_type) {
|
||||
showMsg("Loading recently added items...", true, false, 0);
|
||||
|
||||
@@ -83,31 +83,31 @@ DOCUMENTATION :: END
|
||||
<ul class="list-unstyled breadcrumb">
|
||||
% if data['media_type'] in ('movie', 'collection'):
|
||||
<li><a href="library?section_id=${data['section_id']}">${data['library_name']}</a></li>
|
||||
<li class="active">${data['title']}</li>
|
||||
<li class="active metadata-xml">${data['title']}</li>
|
||||
% elif data['media_type'] == 'show':
|
||||
<li><a href="library?section_id=${data['section_id']}">${data['library_name']}</a></li>
|
||||
<li class="active">${data['title']}</li>
|
||||
<li class="active metadata-xml">${data['title']}</li>
|
||||
% elif data['media_type'] == 'season':
|
||||
<li class="hidden-xs hidden-sm"><a href="library?section_id=${data['section_id']}">${data['library_name']}</a></li>
|
||||
<li><a href="info?rating_key=${data['parent_rating_key']}">${data['parent_title']}</a></li>
|
||||
<li class="active">Season ${data['media_index']}</li>
|
||||
<li class="active metadata-xml">Season ${data['media_index']}</li>
|
||||
% elif data['media_type'] == 'episode':
|
||||
<li class="hidden-xs hidden-sm"><a href="library?section_id=${data['section_id']}">${data['library_name']}</a></li>
|
||||
<li class="hidden-xs hidden-sm"><a href="info?rating_key=${data['grandparent_rating_key']}">${data['grandparent_title']}</a></li>
|
||||
<li><a href="info?rating_key=${data['parent_rating_key']}">Season ${data['parent_media_index']}</a></li>
|
||||
<li class="active">Episode ${data['media_index']} - ${data['title']}</li>
|
||||
<li class="active metadata-xml">Episode ${data['media_index']} - ${data['title']}</li>
|
||||
% elif data['media_type'] == 'artist':
|
||||
<li><a href="library?section_id=${data['section_id']}">${data['library_name']}</a></li>
|
||||
<li class="active">${data['title']}</li>
|
||||
<li class="active metadata-xml">${data['title']}</li>
|
||||
% elif data['media_type'] == 'album':
|
||||
<li class="hidden-xs hidden-sm"><a href="library?section_id=${data['section_id']}">${data['library_name']}</a></li>
|
||||
<li><a href="info?rating_key=${data['parent_rating_key']}">${data['parent_title']}</a></li>
|
||||
<li class="active">${data['title']}</li>
|
||||
<li class="active metadata-xml">${data['title']}</li>
|
||||
% elif data['media_type'] == 'track':
|
||||
<li class="hidden-xs hidden-sm"><a href="library?section_id=${data['section_id']}">${data['library_name']}</a></li>
|
||||
<li class="hidden-xs hidden-sm"><a href="info?rating_key=${data['grandparent_rating_key']}">${data['grandparent_title']}</a></li>
|
||||
<li><a href="info?rating_key=${data['parent_rating_key']}">${data['parent_title']}</a></li>
|
||||
<li class="active">Track ${data['media_index']} - ${data['title']}</li>
|
||||
<li class="active metadata-xml">Track ${data['media_index']} - ${data['title']}</li>
|
||||
% endif
|
||||
</ul>
|
||||
</div>
|
||||
@@ -165,7 +165,7 @@ DOCUMENTATION :: END
|
||||
<h1><a href="info?rating_key=${data['parent_rating_key']}">${data['parent_title']}</a></h1>
|
||||
<h2>${data['title']}</h2>
|
||||
% elif data['media_type'] == 'track':
|
||||
<h1><a href="info?rating_key=${data['grandparent_rating_key']}">${data['grandparent_title']}</a></h1>
|
||||
<h1><a href="info?rating_key=${data['grandparent_rating_key']}">${data['original_title'] or data['grandparent_title']}</a></h1>
|
||||
<h2><a href="info?rating_key=${data['parent_rating_key']}">${data['parent_title']}</a> - ${data['title']}</h2>
|
||||
<h3 class="hidden-xs">T${data['media_index']}</h3>
|
||||
% endif
|
||||
@@ -371,7 +371,11 @@ DOCUMENTATION :: END
|
||||
<div class="col-md-12">
|
||||
<div class="table-card-header">
|
||||
<div class="header-bar">
|
||||
% if data['media_type'] in ('artist', 'album', 'track'):
|
||||
<span>Play History for <strong>${data['title']}</strong></span>
|
||||
% else:
|
||||
<span>Watch History for <strong>${data['title']}</strong></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="button-bar">
|
||||
% if _session['user_group'] == 'admin':
|
||||
@@ -400,14 +404,14 @@ DOCUMENTATION :: END
|
||||
% if data.get('poster_url'):
|
||||
<div class="btn-group">
|
||||
% if data['media_type'] == 'artist' or data['media_type'] == 'album' or data['media_type'] == 'track':
|
||||
<span class="imgur-poster-tooltip" data-toggle="popover" data-img="${data['poster_url']}" data-height="80" data-width="80" style="display: inline-flex;">
|
||||
<span class="hosted-poster-tooltip" data-toggle="popover" data-img="${data['poster_url']}" data-height="80" data-width="80" style="display: inline-flex;">
|
||||
% else:
|
||||
<span class="imgur-poster-tooltip" data-toggle="popover" data-img="${data['poster_url']}" data-height="120" data-width="80" style="display: inline-flex;">
|
||||
<span class="hosted-poster-tooltip" data-toggle="popover" data-img="${data['poster_url']}" data-height="120" data-width="80" style="display: inline-flex;">
|
||||
% endif
|
||||
<button class="btn btn-danger btn-edit" data-toggle="modal" aria-pressed="false" autocomplete="off" id="delete-imgur-poster"
|
||||
<button class="btn btn-danger btn-edit" data-toggle="modal" aria-pressed="false" autocomplete="off" id="delete-hosted-poster"
|
||||
data-id="${data['parent_rating_key'] if data['media_type'] in ('episode', 'track') else data['rating_key']}"
|
||||
data-title="${data["poster_title"]}">
|
||||
<i class="fa fa-picture-o"></i> Delete Imgur Poster
|
||||
<i class="fa fa-picture-o"></i> Delete ${data['img_service']} Poster
|
||||
</button>
|
||||
</span>
|
||||
</div>
|
||||
@@ -502,7 +506,7 @@ DOCUMENTATION :: END
|
||||
% elif data['media_type'] == 'album':
|
||||
${data['parent_title']}<br />${data['title']}
|
||||
% elif data['media_type'] == 'track':
|
||||
${data['grandparent_title']}<br />${data['title']}<br />${data['parent_title']}
|
||||
${data['original_title'] or data['grandparent_title']}<br />${data['title']}<br />${data['parent_title']}
|
||||
% endif
|
||||
</strong>
|
||||
</p>
|
||||
@@ -699,13 +703,17 @@ DOCUMENTATION :: END
|
||||
</script>
|
||||
% endif
|
||||
<script>
|
||||
$('.metadata-xml').on('tripleclick', function () {
|
||||
openPlexXML("/library/metadata/${data['rating_key']}");
|
||||
});
|
||||
|
||||
$("#airdate").html(moment($("#airdate").text()).format('MMM DD, YYYY'));
|
||||
$("#runtime").html(millisecondsToMinutes($("#runtime").text(), true));
|
||||
$('div.art-face').animate({ opacity: 0.2 }, { duration: 1000 });
|
||||
</script>
|
||||
% if data.get('poster_url'):
|
||||
<script>
|
||||
$('.imgur-poster-tooltip').popover({
|
||||
$('.hosted-poster-tooltip').popover({
|
||||
html: true,
|
||||
container: 'body',
|
||||
trigger: 'hover',
|
||||
@@ -716,14 +724,14 @@ DOCUMENTATION :: END
|
||||
}
|
||||
});
|
||||
|
||||
$('#delete-imgur-poster').on('click', function () {
|
||||
var msg = 'Are you sure you want to delete the Imgur poster for <strong>' + $(this).data('title') + '</strong>?<br><br>' +
|
||||
$('#delete-hosted-poster').on('click', function () {
|
||||
var msg = 'Are you sure you want to delete the ${data['img_service']} poster for <strong>' + $(this).data('title') + '</strong>?<br><br>' +
|
||||
'All previous links to this image will no longer work.';
|
||||
var url = 'delete_imgur_poster';
|
||||
var url = 'delete_hosted_images';
|
||||
var data = { rating_key: $(this).data('id') };
|
||||
var callback = function () {
|
||||
$('.imgur-poster-tooltip').popover('destroy');
|
||||
$('#delete-imgur-poster').closest('.btn-group').remove();
|
||||
$('.hosted-poster-tooltip').popover('destroy');
|
||||
$('#delete-hosted-poster').closest('.btn-group').remove();
|
||||
};
|
||||
confirmAjaxCall(url, msg, data, false, callback);
|
||||
});
|
||||
|
||||
@@ -91,7 +91,7 @@ DOCUMENTATION :: END
|
||||
<div class="item-children-poster-face episode-item" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=500&height=250&fallback=art);">
|
||||
<div class="item-children-card-overlay">
|
||||
<div class="item-children-overlay-text">
|
||||
Episode ${child['media_index']}
|
||||
Episode ${child['media_index'] or child['originally_available_at']}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -122,16 +122,24 @@ DOCUMENTATION :: END
|
||||
% elif data['children_type'] == 'track':
|
||||
% if loop.index % 2 == 0:
|
||||
<div class="item-children-list-item-even">
|
||||
<span class="item-children-list-item-index">${child['media_index']}</span>
|
||||
<span class="item-children-list-item-title"><a href="info?rating_key=${child['rating_key']}" title="${child['title']}">${child['title']}</a></span>
|
||||
<span class="item-children-list-item-index"> ${child['media_index']}</span>
|
||||
<span class="item-children-list-item-title"><a href="info?rating_key=${child['rating_key']}" title="${child['title']}">${child['title']}</a>
|
||||
% if child['original_title']:
|
||||
<span class="text-muted"> - ${child['original_title']}</span>
|
||||
% endif
|
||||
</span>
|
||||
<span class="item-children-list-item-duration" id="item-children-list-item-duration-${loop.index + 1}">
|
||||
<script>$('#item-children-list-item-duration-${loop.index + 1}').text(moment.utc(${child['duration']}).format("m:ss"));</script>
|
||||
</span>
|
||||
</div>
|
||||
% else:
|
||||
<div class="item-children-list-item-odd">
|
||||
<span class="item-children-list-item-index">${child['media_index']}</span>
|
||||
<span class="item-children-list-item-title"><a href="info?rating_key=${child['rating_key']}" title="${child['title']}">${child['title']}</a></span>
|
||||
<span class="item-children-list-item-index"> ${child['media_index']}</span>
|
||||
<span class="item-children-list-item-title"><a href="info?rating_key=${child['rating_key']}" title="${child['title']}">${child['title']}</a>
|
||||
% if child['original_title']:
|
||||
<span class="text-muted"> - ${child['original_title']}</span>
|
||||
% endif
|
||||
</span>
|
||||
<span class="item-children-list-item-duration" id="item-children-list-item-duration-${loop.index + 1}">
|
||||
<script>$('#item-children-list-item-duration-${loop.index + 1}').text(moment.utc(${child['duration']}).format("m:ss"));</script>
|
||||
</span>
|
||||
|
||||
@@ -251,7 +251,7 @@ DOCUMENTATION :: END
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
<h3 title="${child['grandparent_title']}">${child['grandparent_title']}</h3>
|
||||
<h3 title="${child['original_title'] or child['grandparent_title']}">${child['original_title'] or child['grandparent_title']}</h3>
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
<h3 title="${child['parent_title']}" class="text-muted">${child['parent_title']}</h3>
|
||||
</div>
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
function initConfigCheckbox(elem) {
|
||||
var config = $(elem).closest('div').next();
|
||||
function initConfigCheckbox(elem, toggleElem, reverse) {
|
||||
toggleElem = (toggleElem === undefined) ? null : toggleElem;
|
||||
reverse = (reverse === undefined) ? false : reverse;
|
||||
var config = toggleElem ? $(toggleElem) : $(elem).closest('div').next();
|
||||
config.css('overflow', 'hidden');
|
||||
if ($(elem).is(":checked")) {
|
||||
config.show();
|
||||
config.toggle(!reverse);
|
||||
} else {
|
||||
config.hide();
|
||||
config.toggle(reverse);
|
||||
}
|
||||
$(elem).click(function () {
|
||||
var config = $(this).closest('div').next();
|
||||
var config = toggleElem ? $(toggleElem) : $(this).closest('div').next();
|
||||
if ($(this).is(":checked")) {
|
||||
config.slideDown();
|
||||
config.slideToggleBool(!reverse);
|
||||
} else {
|
||||
config.slideUp();
|
||||
config.slideToggleBool(reverse);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -36,7 +38,7 @@ function showMsg(msg, loader, timeout, ms, error) {
|
||||
var message = $("<div class='msg'>" + msg + "</div>");
|
||||
if (loader) {
|
||||
message = $("<i class='fa fa-refresh fa-spin'></i> " + msg + "</div>");
|
||||
feedback.css("padding", "14px 10px")
|
||||
feedback.css("padding", "14px 10px");
|
||||
}
|
||||
if (error) {
|
||||
feedback.css("background-color", "rgba(255,0,0,0.5)");
|
||||
@@ -59,7 +61,7 @@ function confirmAjaxCall(url, msg, data, loader_msg, callback) {
|
||||
$('#confirm-modal').modal();
|
||||
$('#confirm-modal').one('click', '#confirm-button', function () {
|
||||
if (loader_msg) {
|
||||
showMsg(loader_msg, true, false)
|
||||
showMsg(loader_msg, true, false);
|
||||
}
|
||||
$.ajax({
|
||||
url: url,
|
||||
@@ -71,9 +73,9 @@ function confirmAjaxCall(url, msg, data, loader_msg, callback) {
|
||||
var result = $.parseJSON(xhr.responseText);
|
||||
var msg = result.message;
|
||||
if (result.result == 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 5000)
|
||||
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 5000);
|
||||
} else {
|
||||
showMsg('<i class="fa fa-times"></i> ' + msg, false, true, 5000, true)
|
||||
showMsg('<i class="fa fa-times"></i> ' + msg, false, true, 5000, true);
|
||||
}
|
||||
if (typeof callback === "function") {
|
||||
callback(result);
|
||||
@@ -85,8 +87,8 @@ function confirmAjaxCall(url, msg, data, loader_msg, callback) {
|
||||
|
||||
function doAjaxCall(url, elem, reload, form, showMsg, callback) {
|
||||
// Set Message
|
||||
feedback = (showMsg) ? $("#ajaxMsg") : $();
|
||||
update = $("#updatebar");
|
||||
var feedback = (showMsg) ? $("#ajaxMsg") : $();
|
||||
var update = $("#updatebar");
|
||||
if (update.is(":visible")) {
|
||||
var height = update.height() + 35;
|
||||
feedback.css("bottom", height + "px");
|
||||
@@ -96,8 +98,9 @@ function doAjaxCall(url, elem, reload, form, showMsg, callback) {
|
||||
feedback.fadeIn();
|
||||
// Get Form data
|
||||
var formID = "#" + url;
|
||||
if (form == true) {
|
||||
var dataString = $(formID).serialize();
|
||||
var dataString;
|
||||
if (form === true) {
|
||||
dataString = $(formID).serialize();
|
||||
}
|
||||
// Loader Image
|
||||
var loader = $("<i class='fa fa-refresh fa-spin'></i>");
|
||||
@@ -105,13 +108,13 @@ function doAjaxCall(url, elem, reload, form, showMsg, callback) {
|
||||
var dataSucces = $(elem).data('success');
|
||||
if (typeof dataSucces === "undefined") {
|
||||
// Standard Message when variable is not set
|
||||
var dataSucces = "Success!";
|
||||
dataSucces = "Success!";
|
||||
}
|
||||
// Data Errror Message
|
||||
var dataError = $(elem).data('error');
|
||||
if (typeof dataError === "undefined") {
|
||||
// Standard Message when variable is not set
|
||||
var dataError = "There was an error";
|
||||
dataError = "There was an error";
|
||||
}
|
||||
// Get Success & Error message from inline data, else use standard message
|
||||
var succesMsg = $("<div class='msg'><i class='fa fa-check'></i> " + dataSucces + "</div>");
|
||||
@@ -120,7 +123,7 @@ function doAjaxCall(url, elem, reload, form, showMsg, callback) {
|
||||
if (form) {
|
||||
if ($('td#select input[type=checkbox]').length > 0 && !$('td#select input[type=checkbox]').is(':checked') ||
|
||||
$('#importLastFM #username:visible').length > 0 && $("#importLastFM #username").val().length === 0) {
|
||||
feedback.addClass('error')
|
||||
feedback.addClass('error');
|
||||
$(feedback).prepend(errorMsg);
|
||||
setTimeout(function () {
|
||||
errorMsg.fadeOut(function () {
|
||||
@@ -128,7 +131,7 @@ function doAjaxCall(url, elem, reload, form, showMsg, callback) {
|
||||
feedback.fadeOut(function () {
|
||||
feedback.removeClass('error');
|
||||
});
|
||||
})
|
||||
});
|
||||
$(formID + " select").children('option[disabled=disabled]').attr('selected', 'selected');
|
||||
}, 2000);
|
||||
return false;
|
||||
@@ -144,33 +147,33 @@ function doAjaxCall(url, elem, reload, form, showMsg, callback) {
|
||||
feedback.prepend(loader);
|
||||
},
|
||||
error: function (jqXHR, textStatus, errorThrown) {
|
||||
feedback.addClass('error')
|
||||
feedback.addClass('error');
|
||||
feedback.prepend(errorMsg);
|
||||
setTimeout(function () {
|
||||
errorMsg.fadeOut(function () {
|
||||
$(this).remove();
|
||||
feedback.fadeOut(function () {
|
||||
feedback.removeClass('error')
|
||||
feedback.removeClass('error');
|
||||
});
|
||||
})
|
||||
});
|
||||
}, 2000);
|
||||
},
|
||||
success: function (data, jqXHR) {
|
||||
feedback.prepend(succesMsg);
|
||||
feedback.addClass('success')
|
||||
feedback.addClass('success');
|
||||
setTimeout(function (e) {
|
||||
succesMsg.fadeOut(function () {
|
||||
$(this).remove();
|
||||
feedback.fadeOut(function () {
|
||||
feedback.removeClass('success');
|
||||
});
|
||||
if (reload == true) refreshSubmenu();
|
||||
if (reload == "table") {
|
||||
if (reload === true) refreshSubmenu();
|
||||
if (reload === "table") {
|
||||
refreshTable();
|
||||
}
|
||||
if (reload == "tabs") refreshTab();
|
||||
if (reload == "page") location.reload();
|
||||
if (reload == "submenu&table") {
|
||||
if (reload === "tabs") refreshTab();
|
||||
if (reload === "page") location.reload();
|
||||
if (reload === "submenu&table") {
|
||||
refreshSubmenu();
|
||||
refreshTable();
|
||||
}
|
||||
@@ -179,7 +182,7 @@ function doAjaxCall(url, elem, reload, form, showMsg, callback) {
|
||||
$(formID + " select").children('option[disabled=disabled]').attr(
|
||||
'selected', 'selected');
|
||||
}
|
||||
})
|
||||
});
|
||||
}, 2000);
|
||||
},
|
||||
complete: function (jqXHR, textStatus) {
|
||||
@@ -215,19 +218,20 @@ function isPrivateIP(ip_address) {
|
||||
|
||||
$.cachedScript('js/ipaddr.min.js').done(function () {
|
||||
if (ipaddr.isValid(ip_address)) {
|
||||
var addr = ipaddr.process(ip_address)
|
||||
var addr = ipaddr.process(ip_address);
|
||||
|
||||
var rangeList = [];
|
||||
if (addr.kind() === 'ipv4') {
|
||||
var rangeList = [
|
||||
rangeList = [
|
||||
ipaddr.parseCIDR('127.0.0.0/8'),
|
||||
ipaddr.parseCIDR('10.0.0.0/8'),
|
||||
ipaddr.parseCIDR('172.16.0.0/12'),
|
||||
ipaddr.parseCIDR('192.168.0.0/16')
|
||||
]
|
||||
];
|
||||
} else {
|
||||
var rangeList = [
|
||||
rangeList = [
|
||||
ipaddr.parseCIDR('fd00::/8')
|
||||
]
|
||||
];
|
||||
}
|
||||
|
||||
if (ipaddr.subnetMatch(addr, rangeList, -1) >= 0) {
|
||||
@@ -238,12 +242,13 @@ function isPrivateIP(ip_address) {
|
||||
} else {
|
||||
defer.resolve('n/a');
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
return defer.promise();
|
||||
}
|
||||
|
||||
function humanTime(seconds) {
|
||||
var text;
|
||||
if (seconds >= 86400) {
|
||||
text = '<h3>' + Math.floor(moment.duration(seconds, 'seconds').asDays()) + '</h3><p> days</p>' + '<h3>' +
|
||||
Math.floor(moment.duration((seconds % 86400), 'seconds').asHours()) + '</h3><p> hrs</p>' + '<h3>' +
|
||||
@@ -265,6 +270,7 @@ function humanTime(seconds) {
|
||||
}
|
||||
|
||||
function humanTimeClean(seconds) {
|
||||
var text;
|
||||
if (seconds >= 86400) {
|
||||
text = Math.floor(moment.duration(seconds, 'seconds').asDays()) + ' days ' + Math.floor(moment.duration((
|
||||
seconds % 86400), 'seconds').asHours()) + ' hrs ' + Math.floor(moment.duration(
|
||||
@@ -341,7 +347,7 @@ function getCookie(cname) {
|
||||
for (var i = 0; i < ca.length; i++) {
|
||||
var c = ca[i];
|
||||
while (c.charAt(0) == ' ') c = c.substring(1);
|
||||
if (c.indexOf(name) == 0) return c.substring(name.length, c.length);
|
||||
if (c.indexOf(name) === 0) return c.substring(name.length, c.length);
|
||||
}
|
||||
return "";
|
||||
}
|
||||
@@ -354,24 +360,24 @@ var Accordion = function (el, multiple) {
|
||||
links.on('click', {
|
||||
el: this.el,
|
||||
multiple: this.multiple
|
||||
}, this.dropdown)
|
||||
}
|
||||
}, this.dropdown);
|
||||
};
|
||||
Accordion.prototype.dropdown = function (e) {
|
||||
var $el = e.data.el;
|
||||
$this = $(this),
|
||||
$next = $this.next();
|
||||
$this = $(this);
|
||||
$next = $this.next();
|
||||
$next.slideToggle();
|
||||
$this.parent().toggleClass('open');
|
||||
if (!e.data.multiple) {
|
||||
$el.find('.submenu').not($next).slideUp().parent().removeClass('open');
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function clearSearchButton(tableName, table) {
|
||||
$('#' + tableName + '_filter').find('input[type=search]').wrap(
|
||||
'<div class="input-group" role="group" aria-label="Search"></div>').after(
|
||||
'<span class="input-group-btn"><button class="btn btn-form" data-toggle="button" aria-pressed="false" autocomplete="off" id="clear-search-' +
|
||||
tableName + '"><i class="fa fa-remove"></i></button></span>')
|
||||
tableName + '"><i class="fa fa-remove"></i></button></span>');
|
||||
$('#clear-search-' + tableName).click(function () {
|
||||
table.search('').draw();
|
||||
});
|
||||
@@ -401,7 +407,6 @@ $('*').on('click', '.refresh_pms_image', function (e) {
|
||||
} else {
|
||||
if (pms_proxy_url.indexOf('refresh=true') > -1) {
|
||||
pms_proxy_url = pms_proxy_url.replace("&refresh=true", "");
|
||||
console.log(pms_proxy_url)
|
||||
background_div.css('background-image', 'url(' + pms_proxy_url + ')');
|
||||
background_div.css('background-image', 'url(' + pms_proxy_url + '&refresh=true)');
|
||||
} else {
|
||||
@@ -416,8 +421,7 @@ function humanFileSize(bytes, si) {
|
||||
if (Math.abs(bytes) < thresh) {
|
||||
return bytes + ' B';
|
||||
}
|
||||
var units = si
|
||||
? ['kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
|
||||
var units = si ? ['kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
|
||||
: ['KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'];
|
||||
var u = -1;
|
||||
do {
|
||||
@@ -436,10 +440,10 @@ function forceMinMax(elem) {
|
||||
if (isNaN(val)) {
|
||||
elem.val(default_val);
|
||||
}
|
||||
else if (min != undefined && val < min) {
|
||||
else if (min !== undefined && val < min) {
|
||||
elem.val(min);
|
||||
}
|
||||
else if (max != undefined && val > max) {
|
||||
else if (max !== undefined && val > max) {
|
||||
elem.val(max);
|
||||
}
|
||||
else {
|
||||
@@ -449,4 +453,15 @@ function forceMinMax(elem) {
|
||||
|
||||
function capitalizeFirstLetter(string) {
|
||||
return string.charAt(0).toUpperCase() + string.slice(1);
|
||||
}
|
||||
}
|
||||
|
||||
$.fn.slideToggleBool = function(bool, options) {
|
||||
return bool ? $(this).slideDown(options) : $(this).slideUp(options);
|
||||
};
|
||||
|
||||
function openPlexXML(endpoint, plextv, params) {
|
||||
var data = $.extend({endpoint: endpoint, plextv: plextv}, params);
|
||||
$.getJSON('return_plex_xml_url', data, function(xml_url) {
|
||||
window.open(xml_url, '_blank');
|
||||
});
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
<div class='container-fluid'>
|
||||
<div class='table-card-header'>
|
||||
<div class="header-bar">
|
||||
<span><i class="fa fa-book"></i> All Libraries</span>
|
||||
<span id="libraries-xml"><i class="fa fa-book"></i> All Libraries</span>
|
||||
</div>
|
||||
<div class="button-bar">
|
||||
% if _session['user_group'] == 'admin':
|
||||
@@ -198,5 +198,9 @@
|
||||
});
|
||||
});
|
||||
% endif
|
||||
|
||||
$('#libraries-xml').on('tripleclick', function () {
|
||||
openPlexXML('/library/sections/all');
|
||||
});
|
||||
</script>
|
||||
</%def>
|
||||
|
||||
@@ -11,12 +11,11 @@ DOCUMENTATION :: END
|
||||
|
||||
<ul class="stacked-configs list-unstyled">
|
||||
% for device in sorted(devices_list, key=lambda k: k['device_name']):
|
||||
<li class="mobile-device" data-id="${device['id']}" data-name="${device['device_name']}">
|
||||
<li class="mobile-device pointer" data-id="${device['id']}" data-name="${device['device_name']}">
|
||||
<span>
|
||||
<!--<span class="toggle-right mobile-device-tooltip edit-mobile-device" data-toggle="tooltip" data-placement="top" title="Edit Device"><i class="fa fa-lg fa-pencil"></i></span>-->
|
||||
<span class="toggle-left"><i class="fa fa-lg fa-mobile"></i></span>
|
||||
<span class="toggle-left"><i class="fa fa-lg fa-fw fa-mobile"></i></span>
|
||||
${device['friendly_name'] or device['device_name']} <span class="friendly_name">(${device['id']})</span>
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-cog"></i></span>
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-fw fa-cog"></i></span>
|
||||
<span class="toggle-right friendly_name" id="device-last_seen-${device['id']}">
|
||||
% if device['last_seen']:
|
||||
<script>
|
||||
@@ -26,14 +25,13 @@ DOCUMENTATION :: END
|
||||
never
|
||||
% endif
|
||||
</span>
|
||||
<!--<span class="toggle-right delete-mobile-device" data-toggle="tooltip" data-placement="top" title="Remove Device"><i class="fa fa-lg fa-times"></i></span>-->
|
||||
</span>
|
||||
</li>
|
||||
% endfor
|
||||
<li class="add-mobile-device" id="register-mobile-device" data-target="#api-qr-modal" data-toggle="modal">
|
||||
<li class="add-mobile-device pointer" id="register-mobile-device" data-target="#api-qr-modal" data-toggle="modal">
|
||||
<span>
|
||||
<span class="toggle-left"><i class="fa fa-lg fa-mobile"></i></span> Register a new device
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-plus"></i></span>
|
||||
<span class="toggle-left"><i class="fa fa-lg fa-fw fa-mobile"></i></span> Register a new device
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-fw fa-plus"></i></span>
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
43
data/interfaces/default/newsletter_auth.html
Normal file
43
data/interfaces/default/newsletter_auth.html
Normal file
@@ -0,0 +1,43 @@
|
||||
<%
|
||||
import urllib
|
||||
%>
|
||||
<!doctype html>
|
||||
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Tautulli - ${title}</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<link href="${http_root}css/bootstrap3/bootstrap.css" rel="stylesheet">
|
||||
<link href="${http_root}css/tautulli.css${cache_param}" rel="stylesheet">
|
||||
<link href="${http_root}css/opensans.min.css" rel="stylesheet">
|
||||
<link href="${http_root}css/font-awesome.min.css" rel="stylesheet">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class="body-container">
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<div class="login-container">
|
||||
<div class="newsletter-logo">
|
||||
<img src="${http_root}images/newsletter/newsletter-header.png" height="100" alt="PlexPy">
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-sm-6 col-sm-offset-3">
|
||||
<form action="${uri}" method="post" id="newsletter-form">
|
||||
<div class="form-group">
|
||||
<label for="password" class="control-label">
|
||||
Password
|
||||
</label>
|
||||
<input type="password" id="key" name="key" class="form-control" autofocus>
|
||||
</div>
|
||||
<button id="enter" type="submit" class="btn btn-bright login-button"><i class="fa fa-sign-in"></i> Enter</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
@@ -20,7 +20,7 @@
|
||||
<div class="row">
|
||||
<ul class="nav nav-tabs list-unstyled" role="tablist">
|
||||
<li role="presentation" class="active"><a href="#tabs-newsletter_config" aria-controls="tabs-newsletter_config" role="tab" data-toggle="tab">Configuration</a></li>
|
||||
<li role="presentation"><a href="#tabs-newsletter_agent" aria-controls="tabs-newsletter_agent" role="tab" data-toggle="tab">Notification Agent</a></li>
|
||||
<li role="presentation"><a href="#tabs-newsletter_saving_sending" aria-controls="tabs-newsletter_saving_sending" role="tab" data-toggle="tab">Saving & Sending</a></li>
|
||||
<li role="presentation"><a href="#tabs-newsletter_text" aria-controls="tabs-newsletter_text" role="tab" data-toggle="tab">Newsletter Text</a></li>
|
||||
<li role="presentation"><a href="#tabs-test_newsletter" aria-controls="tabs-test_newsletter" role="tab" data-toggle="tab">Test Newsletter</a></li>
|
||||
</ul>
|
||||
@@ -53,8 +53,24 @@
|
||||
<span id="custom_cron_message">Set the schedule for the newsletter using a <a href="${anon_url('https://crontab.guru')}" target="_blank">custom crontab</a>. Only standard cron values are valid.</span>
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="time_frame">Time Frame</label>
|
||||
<div class="row">
|
||||
<div class="col-md-4">
|
||||
<div class="input-group newsletter-time_frame">
|
||||
<span class="input-group-addon form-control btn-dark inactive">Last</span>
|
||||
<input type="number" class="form-control" id="newsletter_config_time_frame" name="newsletter_config_time_frame" value="${newsletter['config']['time_frame']}">
|
||||
<select class="form-control" id="newsletter_config_time_frame_units" name="newsletter_config_time_frame_units">
|
||||
<option value="days" ${'selected' if newsletter['config']['time_frame_units'] == 'days' else ''}>days</option>
|
||||
<option value="hours" ${'selected' if newsletter['config']['time_frame_units'] == 'hours' else ''}>hours</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Set the time frame to include in the newsletter. Note: Days uses calendar days (i.e. since midnight).</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-12" style="padding-top: 10px; border-top: 1px solid #444;">
|
||||
<div class="col-md-12 modal-config-section">
|
||||
<input type="hidden" id="newsletter_id" name="newsletter_id" value="${newsletter['id']}" />
|
||||
<input type="hidden" id="agent_id" name="agent_id" value="${newsletter['agent_id']}" />
|
||||
% for item in newsletter['config_options']:
|
||||
@@ -149,7 +165,16 @@
|
||||
% endif
|
||||
% endfor
|
||||
</div>
|
||||
<div class="col-md-12" style="margin-top: 10px; padding-top: 10px; border-top: 1px solid #444;">
|
||||
<div class="col-md-12 modal-config-section">
|
||||
<div class="form-group">
|
||||
<label for="id_name">Unique ID Name</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="text" class="form-control" id="id_name" name="id_name" value="${newsletter['id_name']}" size="30">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Optional: Enter a unique ID name to create a static URL to the last sent scheduled newsletter at <span class="inline-pre">${http_root}newsletter/id/<id_name></span>. Only letters (a-z), numbers (0-9), underscores (_) and hyphens (-) are allowed. Leave blank to disable.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="friendly_name">Description</label>
|
||||
<div class="row">
|
||||
@@ -162,12 +187,32 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-newsletter_agent">
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-newsletter_saving_sending">
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<label>Saving</label>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="newsletter_config_formatted_checkbox" data-id="newsletter_config_formatted" class="checkboxes" value="1" ${checked(newsletter['config']['formatted'])}> Send newsletter as an HTML formatted Email
|
||||
<input type="checkbox" id="newsletter_config_save_only_checkbox" data-id="newsletter_config_save_only" class="checkboxes" value="1" ${checked(newsletter['config']['save_only'])}> Save HTML File Only
|
||||
</label>
|
||||
<p class="help-block">Enable to save the newsletter HTML file without sending it to any notification agent.</p>
|
||||
<input type="hidden" id="newsletter_config_save_only" name="newsletter_config_save_only" value="${newsletter['config']['save_only']}">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="newsletter_config_filename">HTML File Name</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="text" class="form-control" id="newsletter_config_filename" name="newsletter_config_filename" value="${newsletter['config']['filename']}" size="30">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Optional: Enter the file name to use when saving the newsletter (ending with <span class="inline-pre">.html</span>). You may use any of the <a href="#newsletter-text-sub-modal" data-toggle="modal">newsletter text parameters</a>. Leave blank for default.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-12 modal-config-section" id="newsletter_agent_options">
|
||||
<label>Sending</label>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="newsletter_config_formatted_checkbox" data-id="newsletter_config_formatted" class="checkboxes" value="1" ${checked(newsletter['config']['formatted'])}> Send Newsletter as an HTML Formatted Email
|
||||
</label>
|
||||
<p class="help-block">Enable to send the newsletter as an HTML formatted Email. Disable to only send a subject and body message to a different notification agent.</p>
|
||||
<input type="hidden" id="newsletter_config_formatted" name="newsletter_config_formatted" value="${newsletter['config']['formatted']}">
|
||||
@@ -218,100 +263,100 @@
|
||||
Note: Self-hosted newsletters must be enabled under <a data-tab-destination="tabs-notifications" data-dismiss="modal" data-target="#newsletter_self_hosted">Newsletters</a> to include a link to the newsletter.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div id="newsletter-email-config" class="col-md-12" style="padding-top: 10px; border-top: 1px solid #444;">
|
||||
% for item in newsletter['email_config_options']:
|
||||
% if item['input_type'] == 'help':
|
||||
<div class="form-group">
|
||||
<label>${item['label']}</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'text' or item['input_type'] == 'password':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" ${'readonly' if item.get('readonly') else ''}>
|
||||
</div>
|
||||
<div id="newsletter-email-config">
|
||||
% for item in newsletter['email_config_options']:
|
||||
% if item['input_type'] == 'help':
|
||||
<div class="form-group">
|
||||
<label>${item['label']}</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'number':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-3">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30">
|
||||
% elif item['input_type'] == 'text' or item['input_type'] == 'password':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" ${'readonly' if item.get('readonly') else ''}>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'button':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="button" class="btn btn-bright" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
% elif item['input_type'] == 'number':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-3">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'checkbox' and item['name'] != 'newsletter_email_html_support':
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" ${checked(item['value'])}> ${item['label']}
|
||||
</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
<input type="hidden" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
</div>
|
||||
% elif item['input_type'] == 'select':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
% for key, value in sorted(item['select_options'].iteritems()):
|
||||
% if key == item['value']:
|
||||
<option value="${key}" selected>${value}</option>
|
||||
% else:
|
||||
<option value="${key}">${value}</option>
|
||||
% endif
|
||||
% endfor
|
||||
</select>
|
||||
% elif item['input_type'] == 'button':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="button" class="btn btn-bright" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'selectize':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
<option value="select-all">Select All</option>
|
||||
<option value="remove-all">Remove All</option>
|
||||
% if isinstance(item['select_options'], dict):
|
||||
% for section, options in item['select_options'].iteritems():
|
||||
<optgroup label="${section}">
|
||||
% for option in sorted(options, key=lambda x: x['text'].lower()):
|
||||
% elif item['input_type'] == 'checkbox' and item['name'] != 'newsletter_email_html_support':
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" ${checked(item['value'])}> ${item['label']}
|
||||
</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
<input type="hidden" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
</div>
|
||||
% elif item['input_type'] == 'select':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
% for key, value in sorted(item['select_options'].iteritems()):
|
||||
% if key == item['value']:
|
||||
<option value="${key}" selected>${value}</option>
|
||||
% else:
|
||||
<option value="${key}">${value}</option>
|
||||
% endif
|
||||
% endfor
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'selectize':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
<option value="select-all">Select All</option>
|
||||
<option value="remove-all">Remove All</option>
|
||||
% if isinstance(item['select_options'], dict):
|
||||
% for section, options in item['select_options'].iteritems():
|
||||
<optgroup label="${section}">
|
||||
% for option in sorted(options, key=lambda x: x['text'].lower()):
|
||||
<option value="${option['value']}">${option['text']}</option>
|
||||
% endfor
|
||||
</optgroup>
|
||||
% endfor
|
||||
% else:
|
||||
<option value="border-all"></option>
|
||||
% for option in sorted(item['select_options'], key=lambda x: x['text'].lower()):
|
||||
<option value="${option['value']}">${option['text']}</option>
|
||||
% endfor
|
||||
</optgroup>
|
||||
% endfor
|
||||
% else:
|
||||
<option value="border-all"></option>
|
||||
% for option in sorted(item['select_options'], key=lambda x: x['text'].lower()):
|
||||
<option value="${option['value']}">${option['text']}</option>
|
||||
% endfor
|
||||
% endif
|
||||
</select>
|
||||
% endif
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
% endif
|
||||
% endfor
|
||||
<input type="hidden" id="newsletter_email_html_support" name="newsletter_email_html_support" value="1">
|
||||
</div>
|
||||
% endif
|
||||
% endfor
|
||||
<input type="hidden" id="newsletter_email_html_support" name="newsletter_email_html_support" value="1">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -442,6 +487,26 @@
|
||||
toggleCustomCron();
|
||||
});
|
||||
|
||||
function validateFilename() {
|
||||
var filename = $('#newsletter_config_filename').val();
|
||||
if (filename !== '' && !(filename.endsWith('.html'))) {
|
||||
showMsg('<i class="fa fa-times"></i> Failed to save newsletter. Invalid file name.', false, true, 5000, true);
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
function validateIDName() {
|
||||
var id_name = $('#id_name').val();
|
||||
if (/^[a-zA-Z0-9_-]*$/.test(id_name)) {
|
||||
return true;
|
||||
} else {
|
||||
showMsg('<i class="fa fa-times"></i> Failed to save newsletter. Invalid unique ID name.', false, true, 5000, true);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
var $incl_libraries = $('#newsletter_config_incl_libraries').selectize({
|
||||
plugins: ['remove_button'],
|
||||
maxItems: null,
|
||||
@@ -469,6 +534,8 @@
|
||||
var incl_libraries = $incl_libraries[0].selectize;
|
||||
incl_libraries.setValue(${json.dumps(next((c['value'] for c in newsletter['config_options'] if c['name'] == 'newsletter_config_incl_libraries'), [])) | n});
|
||||
|
||||
initConfigCheckbox('#newsletter_config_save_only_checkbox', '#newsletter_agent_options', true);
|
||||
|
||||
function toggleEmailSelect () {
|
||||
if ($('#newsletter_config_formatted_checkbox').is(':checked')) {
|
||||
$('#newsletter_body').hide();
|
||||
@@ -627,7 +694,9 @@
|
||||
if ($('#custom_cron').val() === '0'){
|
||||
$("#cron_value").val(cron_widget.cron('value'));
|
||||
}
|
||||
doAjaxCall('set_newsletter_config', $(this), 'tabs', true, true, saveCallback);
|
||||
if (validateFilename() && validateIDName()){
|
||||
doAjaxCall('set_newsletter_config', $(this), 'tabs', true, true, saveCallback);
|
||||
}
|
||||
}
|
||||
|
||||
$('#delete-newsletter-item').click(function () {
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
var frame = $('<iframe></iframe>', {
|
||||
src: '${http_root}real_newsletter?${urllib.urlencode(kwargs) | n}',
|
||||
src: 'real_newsletter?${urllib.urlencode(kwargs) | n}',
|
||||
frameborder: '0',
|
||||
style: 'display: none; height: 100vh; width: 100vw;'
|
||||
});
|
||||
|
||||
@@ -12,15 +12,15 @@ DOCUMENTATION :: END
|
||||
<% from plexpy.newsletter_handler import NEWSLETTER_SCHED %>
|
||||
<ul class="stacked-configs list-unstyled">
|
||||
% for newsletter in sorted(newsletters_list, key=lambda k: (k['agent_label'], k['friendly_name'], k['id'])):
|
||||
<li class="newsletter-agent" data-id="${newsletter['id']}">
|
||||
<li class="newsletter-agent pointer" data-id="${newsletter['id']}">
|
||||
<span>
|
||||
<span class="toggle-left trigger-tooltip ${'active' if newsletter['active'] else ''}" data-toggle="tooltip" data-placement="top" title="Newsletter ${'active' if newsletter['active'] else 'inactive'}"><i class="fa fa-lg fa-newspaper-o"></i></span>
|
||||
<span class="toggle-left trigger-tooltip ${'active' if newsletter['active'] else ''}" data-toggle="tooltip" data-placement="top" title="Newsletter ${'active' if newsletter['active'] else 'inactive'}"><i class="fa fa-lg fa-fw fa-newspaper-o"></i></span>
|
||||
% if newsletter['friendly_name']:
|
||||
${newsletter['agent_label']} <span class="friendly_name">(${newsletter['id']} - ${newsletter['friendly_name']})</span>
|
||||
% else:
|
||||
${newsletter['agent_label']} <span class="friendly_name">(${newsletter['id']})</span>
|
||||
% endif
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-cog"></i></span>
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-fw fa-cog"></i></span>
|
||||
<span class="toggle-right friendly_name" id="newsletter-next_run-${newsletter['id']}">
|
||||
% if NEWSLETTER_SCHED.get_job('newsletter-{}'.format(newsletter['id'])):
|
||||
<% job = NEWSLETTER_SCHED.get_job('newsletter-{}'.format(newsletter['id'])) %>
|
||||
@@ -32,10 +32,10 @@ DOCUMENTATION :: END
|
||||
</span>
|
||||
</li>
|
||||
% endfor
|
||||
<li class="add-newsletter-agent" id="add-newsletter-agent" data-target="#add-newsletter-modal" data-toggle="modal">
|
||||
<li class="add-newsletter-agent pointer" id="add-newsletter-agent" data-target="#add-newsletter-modal" data-toggle="modal">
|
||||
<span>
|
||||
<span class="toggle-left"><i class="fa fa-lg fa-newspaper-o"></i></span> Add a new newsletter agent
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-plus"></i></span>
|
||||
<span class="toggle-left"><i class="fa fa-lg fa-fw fa-newspaper-o"></i></span> Add a new newsletter agent
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-fw fa-plus"></i></span>
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
% if notifier:
|
||||
<%!
|
||||
import json
|
||||
from plexpy import helpers, notifiers, users
|
||||
from plexpy import notifiers, users
|
||||
from plexpy.helpers import checked
|
||||
available_notification_actions = notifiers.available_notification_actions()
|
||||
|
||||
user_emails = [{'user': u['friendly_name'] or u['username'], 'email': u['email']} for u in users.Users().get_users() if u['email']]
|
||||
@@ -70,7 +71,7 @@
|
||||
% elif item['input_type'] == 'checkbox':
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" ${helpers.checked(item['value'])}> ${item['label']}
|
||||
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" ${checked(item['value'])}> ${item['label']}
|
||||
</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
<input type="hidden" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
@@ -123,7 +124,7 @@
|
||||
% endif
|
||||
% endfor
|
||||
</div>
|
||||
<div class="col-md-12" style="margin-top: 10px; padding-top: 10px; border-top: 1px solid #444;">
|
||||
<div class="col-md-12 modal-config-section">
|
||||
<div class="form-group">
|
||||
<label for="friendly_name">Description</label>
|
||||
<div class="row">
|
||||
@@ -146,7 +147,7 @@
|
||||
% for action in available_notification_actions:
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-id="${action['name']}" class="checkboxes" value="1" ${helpers.checked(notifier['actions'][action['name']])}> ${action['label']}
|
||||
<input type="checkbox" data-id="${action['name']}" class="checkboxes" value="1" ${checked(notifier['actions'][action['name']])}> ${action['label']}
|
||||
</label>
|
||||
<p class="help-block">${action['description'] | n}</p>
|
||||
<input type="hidden" id="${action['name']}" name="${action['name']}" value="${notifier['actions'][action['name']]}">
|
||||
|
||||
@@ -11,22 +11,22 @@ DOCUMENTATION :: END
|
||||
|
||||
<ul class="stacked-configs list-unstyled">
|
||||
% for notifier in sorted(notifiers_list, key=lambda k: (k['agent_label'].lower(), k['friendly_name'], k['id'])):
|
||||
<li class="notification-agent" data-id="${notifier['id']}">
|
||||
<li class="notification-agent pointer" data-id="${notifier['id']}">
|
||||
<span>
|
||||
<span class="toggle-left trigger-tooltip ${'active' if notifier['active'] else ''}" data-toggle="tooltip" data-placement="top" title="Triggers ${'active' if notifier['active'] else 'inactive'}"><i class="fa fa-lg fa-bell"></i></span>
|
||||
<span class="toggle-left trigger-tooltip ${'active' if notifier['active'] else ''}" data-toggle="tooltip" data-placement="top" title="Triggers ${'active' if notifier['active'] else 'inactive'}"><i class="fa fa-lg fa-fw fa-bell"></i></span>
|
||||
% if notifier['friendly_name']:
|
||||
${notifier['agent_label']} <span class="friendly_name">(${notifier['id']} - ${notifier['friendly_name']})</span>
|
||||
% else:
|
||||
${notifier['agent_label']} <span class="friendly_name">(${notifier['id']})</span>
|
||||
% endif
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-cog"></i></span>
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-fw fa-cog"></i></span>
|
||||
</span>
|
||||
</li>
|
||||
% endfor
|
||||
<li class="add-notification-agent" id="add-notification-agent" data-target="#add-notifier-modal" data-toggle="modal">
|
||||
<li class="add-notification-agent pointer" id="add-notification-agent" data-target="#add-notifier-modal" data-toggle="modal">
|
||||
<span>
|
||||
<span class="toggle-left"><i class="fa fa-lg fa-bell"></i></span> Add a new notification agent
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-plus"></i></span>
|
||||
<span class="toggle-left"><i class="fa fa-lg fa-fw fa-bell"></i></span> Add a new notification agent
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-fw fa-plus"></i></span>
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
@@ -28,15 +28,17 @@
|
||||
|
||||
<%def name="javascriptIncludes()">
|
||||
<script>
|
||||
var query_string = "${query.replace('"','\\"').replace('/','\\/') | n}";
|
||||
|
||||
$('#search_button').removeClass('btn-inactive');
|
||||
$('#query').val("${query.replace('"','\\"') | n}").css({ right: '0', width: '250px' }).addClass('active');
|
||||
$('#query').val(query_string).css({ right: '0', width: '250px' }).addClass('active');
|
||||
|
||||
$.ajax({
|
||||
url: 'get_search_results_children',
|
||||
type: "GET",
|
||||
type: "POST",
|
||||
async: true,
|
||||
data: {
|
||||
query: "${query.replace('"','\\"') | n}",
|
||||
query: query_string,
|
||||
limit: 30
|
||||
},
|
||||
complete: function (xhr, status) {
|
||||
|
||||
@@ -115,9 +115,9 @@
|
||||
</div>
|
||||
<div class="checkbox advanced-setting">
|
||||
<label>
|
||||
<input type="checkbox" id="group_history_tables" name="group_history_tables" value="1" ${config['group_history_tables']}> Group Successive Play History
|
||||
<input type="checkbox" id="group_history_tables" name="group_history_tables" value="1" ${config['group_history_tables']}> Group Play History
|
||||
</label>
|
||||
<p class="help-block">Group successive play history by the same user as a single entry in the watch statistics, tables, and graphs.</p>
|
||||
<p class="help-block">Group play history for the same item and user as a single entry when progress is less than the watched percent.</p>
|
||||
</div>
|
||||
<div class="checkbox advanced-setting">
|
||||
<label>
|
||||
@@ -274,7 +274,7 @@
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="home_refresh_interval">Activty Refresh Interval</label>
|
||||
<label for="home_refresh_interval">Activity Refresh Interval</label>
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<input type="text" class="form-control" data-parsley-type="integer" id="home_refresh_interval" name="home_refresh_interval" value="${config['home_refresh_interval']}" size="5" data-parsley-min="2" data-parsley-trigger="change" data-parsley-errors-container="#home_refresh_interval_error" required>
|
||||
@@ -646,11 +646,11 @@
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-plex_media_server">
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Plex Media Server <small style="color: #fff;">Version <span id="pms_version">${config['pms_version']}</span></small></h3>
|
||||
<h3 id="resources-xml">Plex Media Server <small style="color: #fff;">Version <span id="pms_version">${config['pms_version']}</span></small></h3>
|
||||
</div>
|
||||
|
||||
<div class="form-group has-feedback" id="pms_ip_group">
|
||||
<label for="pms_ip">Plex IP or Hostname</label>
|
||||
<label for="pms_ip">Plex IP Address or Hostname</label>
|
||||
<div class="row">
|
||||
<div class="col-md-9" id="selectize-pms-ip-container">
|
||||
<div class="input-group">
|
||||
@@ -702,6 +702,17 @@
|
||||
The server URL that Tautulli will use to connect to your Plex server. Retrieved automatically.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="pms_url">Plex Server Identifier</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="pms_identifier" name="pms_identifier" value="${config['pms_identifier']}" size="30" readonly>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
The unique identifier for your Plex server. Retrieved automatically.
|
||||
</p>
|
||||
</div>
|
||||
<div class="checkbox advanced-setting">
|
||||
<label>
|
||||
<input type="checkbox" class="pms-settings" id="pms_url_manual" name="pms_url_manual" value="1" ${config['pms_url_manual']}> Manual Connection
|
||||
@@ -728,7 +739,6 @@
|
||||
</div>
|
||||
|
||||
<input type="hidden" id="pms_is_cloud" name="pms_is_cloud" value="${config['pms_is_cloud']}">
|
||||
<input type="hidden" id="pms_identifier" name="pms_identifier" value="${config['pms_identifier']}">
|
||||
<input type="checkbox" name="server_changed" id="server_changed" value="1" style="display: none;">
|
||||
|
||||
<div class="form-group advanced-setting">
|
||||
@@ -955,8 +965,63 @@
|
||||
<p class="help-block">Enable to host newsletters on your own domain. This will generate a link to an HTML page where you can view the newsletter.</p>
|
||||
</div>
|
||||
<div id="self_host_newsletter_options" style="overlfow: hidden; display: ${'block' if config['newsletter_self_hosted'] == 'checked' else 'none'}">
|
||||
<p class="help-block" id="self_host_newsletter_message">Note: The <span class="inline-pre">${http_root}newsletter</span> endpoint on your domain must be publicly accessible from the internet.</p>
|
||||
<p class="help-block settings-warning base-url-warning">Warning: Public Tautulli domain not set under <a data-tab-destination="tabs-web_interface" data-target="#http_base_url">Web Interface</a>.</p>
|
||||
<div class="form-group">
|
||||
<p class="help-block" id="self_host_newsletter_message">
|
||||
Note: The <span class="inline-pre">${http_root}newsletter</span> endpoint on your domain must be publicly accessible from the internet.
|
||||
</p>
|
||||
<p class="help-block settings-warning base-url-warning">Warning: Public Tautulli domain not set under <a data-tab-destination="tabs-web_interface" data-target="#http_base_url">Web Interface</a>.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="newsletter_auth">Newsletter Authentication</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<select class="form-control" id="newsletter_auth" name="newsletter_auth">
|
||||
<option value="0" ${'selected' if config['newsletter_auth'] == 0 else ''}>Disabled</option>
|
||||
<option value="1" ${'selected' if config['newsletter_auth'] == 1 else ''}>Password</option>
|
||||
<option value="2" ${'selected' if config['newsletter_auth'] == 2 else ''}>Tautulli Guest Access</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Select the authentication method to use for self-hosted newsletters.</p>
|
||||
<p class="help-block settings-warning newsletter-guest-access-warning">Warning: Guest Access is not enabled under <a data-tab-destination="tabs-web_interface" data-target="#allow_guest_access">Web Interface</a>.</p>
|
||||
</div>
|
||||
<div class="form-group" id="newsletter_password_option">
|
||||
<label for="newsletter_password">Newsletter Password</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="newsletter_password" name="newsletter_password" value="${config['newsletter_password']}">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Enter the password that will be required to view self-hosted newsletters.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="checkbox advanced-setting">
|
||||
<label>
|
||||
<input type="checkbox" id="newsletter_inline_styles" name="newsletter_inline_styles" value="1" ${config['newsletter_inline_styles']}> Use Inline Styles Template
|
||||
</label>
|
||||
<p class="help-block">
|
||||
Enable to use newsletter templates with inline CSS styles. Inline styles render better in email clients, but are larger in size which may cause long newsletters to be clipped.<br>
|
||||
Note: This setting does not affect custom templates. CSS styles will depend on your own template.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="newsletter_dir">Custom Newsletter Templates Folder</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="newsletter_custom_dir" name="newsletter_custom_dir" value="${config['newsletter_custom_dir']}">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Optional: Enter the full path to your custom newsletter templates folder. Leave blank for default.</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="newsletter_dir">Newsletter Output Directory</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="newsletter_dir" name="newsletter_dir" value="${config['newsletter_dir']}">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Enter the full path to where newsletter files will be saved.</p>
|
||||
</div>
|
||||
|
||||
<div class="padded-header">
|
||||
@@ -967,16 +1032,30 @@
|
||||
<label for="notify_upload_posters">Image Hosting</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<select class="form-control" id="notify_upload_posters" name="notify_upload_posters">
|
||||
<option value="0" ${'selected' if config['notify_upload_posters'] == 0 else ''}>Disabled</option>
|
||||
<option value="1" ${'selected' if config['notify_upload_posters'] == 1 else ''}>Imgur</option>
|
||||
<option value="2" ${'selected' if config['notify_upload_posters'] == 2 else ''}>Self-hosted on public Tautulli domain</option>
|
||||
</select>
|
||||
<div class="${'input-group' if config['notify_upload_posters'] in (1, 3) else ''}">
|
||||
<select class="form-control" id="notify_upload_posters" name="notify_upload_posters">
|
||||
<option value="0" ${'selected' if config['notify_upload_posters'] == 0 else ''}>Disabled</option>
|
||||
<option value="1" ${'selected' if config['notify_upload_posters'] == 1 else ''}>Imgur</option>
|
||||
<option value="3" ${'selected' if config['notify_upload_posters'] == 3 else ''}>Cloudinary</option>
|
||||
<option value="2" ${'selected' if config['notify_upload_posters'] == 2 else ''}>Self-hosted on public domain</option>
|
||||
</select>
|
||||
% if config['notify_upload_posters'] in (1, 3):
|
||||
<span class="input-group-btn" id="delete_all_uploads_container">
|
||||
<button class="btn btn-form" type="button" id="delete_all_uploads">Delete All Uploads</button>
|
||||
</span>
|
||||
% endif
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Select where to host Plex images for notifications and newsletters.</p>
|
||||
</div>
|
||||
<div id="imgur_upload_options" style="overlfow: hidden; display: ${'none' if config['notify_upload_posters'] != 1 else 'block'}">
|
||||
<div class="form-group">
|
||||
<p class="help-block" id="imgur_upload_message">
|
||||
You can register a new Imgur application <a href="${anon_url('https://api.imgur.com/oauth2/addclient')}" target="_blank">here</a>.<br>
|
||||
Warning: Imgur uploads are rate-limited and newsletters may exceed the limit. Please use Cloudinary for newsletters instead.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="imgur_client_id">Imgur Client ID</label>
|
||||
<div class="row">
|
||||
@@ -984,15 +1063,54 @@
|
||||
<input type="text" class="form-control" id="imgur_client_id" name="imgur_client_id" value="${config['imgur_client_id']}" data-parsley-trigger="change">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Enter your Imgur API client ID in order to upload posters.
|
||||
You can register a new application <a href="${anon_url('https://api.imgur.com/oauth2/addclient')}" target="_blank">here</a>.
|
||||
</p>
|
||||
<p class="help-block">Enter your Imgur API Client ID.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div id="self_host_image_options" style="overlfow: hidden; display: ${'none' if config['notify_upload_posters'] != 2 else 'block'}">
|
||||
<p class="help-block" id="self_host_image_message">Note: The <span class="inline-pre">${http_root}image</span> endpoint on your domain must be publicly accessible from the internet.</p>
|
||||
<p class="help-block settings-warning base-url-warning">Warning: Public Tautulli domain not set under <a data-tab-destination="tabs-web_interface" data-target="#http_base_url">Web Interface</a>.</p>
|
||||
<div class="form-group">
|
||||
<p class="help-block" id="self_host_image_message">Note: The <span class="inline-pre">${http_root}image</span> endpoint on your domain must be publicly accessible from the internet.</p>
|
||||
<p class="help-block settings-warning base-url-warning">Warning: Public Tautulli domain not set under <a data-tab-destination="tabs-web_interface" data-target="#http_base_url">Web Interface</a>.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div id="cloudinary_upload_options" style="overlfow: hidden; display: ${'none' if config['notify_upload_posters'] != 3 else 'block'}">
|
||||
<div class="form-group">
|
||||
<p class="help-block" id="imgur_upload_message">
|
||||
You can sign up for Cloudinary <a href="${anon_url('https://cloudinary.com')}" target="_blank">here</a>.<br>
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="cloudinary_cloud_name">Cloudinary Cloud Name</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="cloudinary_cloud_name" name="cloudinary_cloud_name" value="${config['cloudinary_cloud_name']}" data-parsley-trigger="change">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Enter your Cloudinary Cloud Name.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="cloudinary_api_key">Cloudinary API Key</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="cloudinary_api_key" name="cloudinary_api_key" value="${config['cloudinary_api_key']}" data-parsley-trigger="change">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Enter your Cloudinary API Key.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="cloudinary_api_secret">Cloudinary API Secret</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="cloudinary_api_secret" name="cloudinary_api_secret" value="${config['cloudinary_api_secret']}" data-parsley-trigger="change">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Enter your Cloudinary API Secret.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
@@ -1041,7 +1159,7 @@
|
||||
Add a new newsletter agent, or configure an existing newsletter agent by clicking the settings icon on the right.
|
||||
</p>
|
||||
<p class="help-block settings-warning" id="newsletter_upload_warning">
|
||||
Note: Either <a data-tab-destination="tabs-notifications" data-target="#notify_upload_posters">Image Hosting</a> on Imgur or <a data-tab-destination="tabs-notifications" data-target="#newsletter_self_hosted">Self-Hosted Newsletters</a> must be enabled.</span>
|
||||
Warning: The <a data-tab-destination="tabs-notifications" data-target="#notify_upload_posters">Image Hosting</a> setting must be enabled for images to display on the newsletter.</span>
|
||||
</p>
|
||||
<br/>
|
||||
<div id="plexpy-newsletters-table">
|
||||
@@ -1130,14 +1248,6 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="newsletter_dir">Newsletter Directory</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control directory-settings" id="newsletter_dir" name="newsletter_dir" value="${config['newsletter_dir']}">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<p><input type="button" class="btn btn-bright save-button" value="Save" data-success="Changes saved successfully"></p>
|
||||
|
||||
@@ -1158,7 +1268,7 @@
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
<p class="form-group">
|
||||
<div class="form-group">
|
||||
<label>Registered Devices</label>
|
||||
<p class="help-block">Register a new device using a QR code, or configure an existing device by clicking the settings icon on the right.</p>
|
||||
<p id="app_api_msg" style="color: #eb8600;">The API must be enabled under <a data-tab-destination="tabs-web_interface" data-target="#api_enabled">Web Interface</a> to use the app.</p>
|
||||
@@ -1313,7 +1423,7 @@
|
||||
<div class="col-md-12">
|
||||
<ul class="stacked-configs list-unstyled">
|
||||
% for agent in sorted(available_notification_agents, key=lambda k: k['label'].lower()):
|
||||
<li class="new-notification-agent" data-id="${agent['id']}">
|
||||
<li class="new-notification-agent pointer" data-id="${agent['id']}">
|
||||
<span>${agent['label']}</span>
|
||||
</li>
|
||||
% endfor
|
||||
@@ -1341,7 +1451,7 @@
|
||||
<div class="col-md-12">
|
||||
<ul class="stacked-configs list-unstyled">
|
||||
% for agent in available_newsletter_agents:
|
||||
<li class="new-newsletter-agent" data-id="${agent['id']}">
|
||||
<li class="new-newsletter-agent pointer" data-id="${agent['id']}">
|
||||
<span>${agent['label']}</span>
|
||||
</li>
|
||||
% endfor
|
||||
@@ -1683,6 +1793,7 @@
|
||||
}
|
||||
|
||||
function loadNotifierConfig(notifier_id) {
|
||||
showMsg('<i class="fa fa-refresh fa-spin"></i> Loading Configuration', false);
|
||||
$.ajax({
|
||||
url: 'get_notifier_config_modal',
|
||||
data: { notifier_id: notifier_id },
|
||||
@@ -1690,6 +1801,7 @@
|
||||
async: true,
|
||||
complete: function (xhr, status) {
|
||||
$("#notifier-config-modal").html(xhr.responseText).modal('show');
|
||||
showMsg('<i class="fa fa-check"></i> Configuration Loaded', false, true, 2000);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -1706,6 +1818,7 @@
|
||||
}
|
||||
|
||||
function loadNewsletterConfig(newsletter_id) {
|
||||
showMsg('<i class="fa fa-refresh fa-spin"></i> Loading Configuration', false);
|
||||
$.ajax({
|
||||
url: 'get_newsletter_config_modal',
|
||||
data: { newsletter_id: newsletter_id },
|
||||
@@ -1713,6 +1826,7 @@
|
||||
async: true,
|
||||
complete: function (xhr, status) {
|
||||
$("#newsletter-config-modal").html(xhr.responseText).modal('show');
|
||||
showMsg('<i class="fa fa-check"></i> Configuration Loaded', false, true, 2000);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -1729,6 +1843,7 @@
|
||||
}
|
||||
|
||||
function loadMobileDeviceConfig(mobile_device_id) {
|
||||
showMsg('<i class="fa fa-refresh fa-spin"></i> Loading Configuration', false);
|
||||
$.ajax({
|
||||
url: 'get_mobile_device_config_modal',
|
||||
data: { mobile_device_id: mobile_device_id },
|
||||
@@ -1736,6 +1851,7 @@
|
||||
async: true,
|
||||
complete: function (xhr, status) {
|
||||
$("#mobile-device-config-modal").html(xhr.responseText).modal('show');
|
||||
showMsg('<i class="fa fa-check"></i> Configuration Loaded', false, true, 2000);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -2242,7 +2358,7 @@ $(document).ready(function() {
|
||||
data: { pref: 'PublishServerOnPlexOnlineKey' },
|
||||
async: true,
|
||||
success: function(data) {
|
||||
if (data !== 'true') {
|
||||
if (data === 'false' || data === '0') {
|
||||
$("#remoteAccessCheck").html("Remote access must be enabled on your Plex Server. <a target='_blank' href='${anon_url('https://support.plex.tv/hc/en-us/articles/200484543-Enabling-Remote-Access-for-a-Server')}'>Click here</a> for help.");
|
||||
$("#monitor_remote_access").attr("checked", false).attr("disabled", true);
|
||||
}
|
||||
@@ -2377,6 +2493,7 @@ $(document).ready(function() {
|
||||
$("#allow_guest_access").attr("disabled", false);
|
||||
$("#allowGuestCheck").html("");
|
||||
}
|
||||
newsletterPasswordEnabled();
|
||||
}
|
||||
allowGuestAccessCheck();
|
||||
|
||||
@@ -2480,7 +2597,7 @@ $(document).ready(function() {
|
||||
var result = $.parseJSON(xhr.responseText);
|
||||
var msg = result.message;
|
||||
$('#add-notifier-modal').modal('hide');
|
||||
if (result.result == 'success') {
|
||||
if (result.result === 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 5000);
|
||||
loadNotifierConfig(result.notifier_id);
|
||||
} else {
|
||||
@@ -2502,7 +2619,7 @@ $(document).ready(function() {
|
||||
var result = $.parseJSON(xhr.responseText);
|
||||
var msg = result.message;
|
||||
$('#add-newsletter-modal').modal('hide');
|
||||
if (result.result == 'success') {
|
||||
if (result.result === 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 5000);
|
||||
loadNewsletterConfig(result.newsletter_id);
|
||||
} else {
|
||||
@@ -2538,11 +2655,42 @@ $(document).ready(function() {
|
||||
} else {
|
||||
$('#self_host_image_options').slideUp();
|
||||
}
|
||||
if (upload_val === '3') {
|
||||
$('#cloudinary_upload_options').slideDown();
|
||||
} else {
|
||||
$('#cloudinary_upload_options').slideUp();
|
||||
}
|
||||
var parent;
|
||||
if (upload_val === '1' || upload_val === '3') {
|
||||
parent = $('#notify_upload_posters').parent();
|
||||
if ($('#delete_all_uploads_container').length === 0){
|
||||
parent.addClass('input-group');
|
||||
parent.append(
|
||||
'<span class="input-group-btn" id="delete_all_uploads_container">' +
|
||||
'<button class="btn btn-form" type="button" id="delete_all_uploads">Delete All Uploads</button>' +
|
||||
'</span>');
|
||||
}
|
||||
} else {
|
||||
parent = $('#notify_upload_posters').parent();
|
||||
parent.removeClass('input-group');
|
||||
$('#delete_all_uploads_container').remove();
|
||||
}
|
||||
}
|
||||
$('#notify_upload_posters').change(function () {
|
||||
imageUpload();
|
||||
});
|
||||
|
||||
$('body').on('click', '#delete_all_uploads', function () {
|
||||
var image_hosting_option = $('#notify_upload_posters').find(':selected');
|
||||
var name = image_hosting_option.text();
|
||||
|
||||
var msg = 'Are you sure you want to delete all uploaded images on <strong>' + name + '</strong>?' +
|
||||
'<br />All previous links to the images will no longer work. This cannot be undone!';
|
||||
var url = 'delete_hosted_images';
|
||||
var data = { service: name, delete_all: true };
|
||||
confirmAjaxCall(url, msg, data, false);
|
||||
});
|
||||
|
||||
function baseURLSet() {
|
||||
if ($('#http_base_url').val()) {
|
||||
$('.base-url-warning').hide();
|
||||
@@ -2557,10 +2705,10 @@ $(document).ready(function() {
|
||||
});
|
||||
|
||||
function newsletterUploadEnabled() {
|
||||
if ($('#notify_upload_posters').val() === '1' || $('#newsletter_self_hosted').is(':checked')) {
|
||||
$('#newsletter_upload_warning').hide();
|
||||
} else {
|
||||
if ($('#notify_upload_posters').val() === '0') {
|
||||
$('#newsletter_upload_warning').show();
|
||||
} else {
|
||||
$('#newsletter_upload_warning').hide();
|
||||
}
|
||||
}
|
||||
newsletterUploadEnabled();
|
||||
@@ -2570,6 +2718,28 @@ $(document).ready(function() {
|
||||
newsletterUploadEnabled();
|
||||
});
|
||||
|
||||
function newsletterPasswordEnabled() {
|
||||
if ($('#newsletter_auth').val() === '1') {
|
||||
$('#newsletter_password_option').slideDown();
|
||||
} else {
|
||||
$('#newsletter_password_option').slideUp();
|
||||
}
|
||||
if ($('#newsletter_auth').val() === '2' && !($('#allow_guest_access').is(':checked'))) {
|
||||
$('.newsletter-guest-access-warning').show();
|
||||
} else {
|
||||
$('.newsletter-guest-access-warning').hide();
|
||||
}
|
||||
}
|
||||
newsletterPasswordEnabled();
|
||||
|
||||
$('#newsletter_auth').change(function () {
|
||||
newsletterPasswordEnabled();
|
||||
});
|
||||
|
||||
$('#allow_guest_access').click(function () {
|
||||
newsletterPasswordEnabled();
|
||||
})
|
||||
|
||||
$('body').on('click', 'a[data-tab-destination]', function () {
|
||||
var tab = $(this).data('tab-destination');
|
||||
$("a[href=#" + tab + "]").click();
|
||||
@@ -2583,6 +2753,10 @@ $(document).ready(function() {
|
||||
body_container.animate({scrollTop: scroll_pos});
|
||||
}
|
||||
});
|
||||
|
||||
$('#resources-xml').on('tripleclick', function () {
|
||||
openPlexXML('/api/resources', true, {includeHttps: 1});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</%def>
|
||||
|
||||
@@ -46,8 +46,10 @@ DOCUMENTATION :: END
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">×</span></button>
|
||||
<h4 class="modal-title" id="info-modal-title">
|
||||
% if data['media_type'] == 'episode' or data['media_type'] == 'track':
|
||||
% if data['media_type'] == 'episode':
|
||||
Stream Info: <strong>${data['grandparent_title']} - ${data['title']} (${user})</strong>
|
||||
% elif data['media_type'] == 'track':
|
||||
Stream Info: <strong>${data['original_title'] or data['grandparent_title']} - ${data['title']} (${user})</strong>
|
||||
% else:
|
||||
Stream Info: <strong>${data['title']} (${user})</strong>
|
||||
% endif
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
<div class='container-fluid'>
|
||||
<div class='table-card-header'>
|
||||
<div class="header-bar">
|
||||
<span><i class="fa fa-cloud-download"></i> Synced Items</span>
|
||||
<span id="sync-xml"><i class="fa fa-cloud-download"></i> Synced Items</span>
|
||||
</div>
|
||||
<div class="button-bar">
|
||||
% if _session['user_group'] == 'admin':
|
||||
@@ -185,5 +185,9 @@
|
||||
$("#refresh-syncs-list").click(function() {
|
||||
sync_table.ajax.reload();
|
||||
});
|
||||
|
||||
$('#sync-xml').on('tripleclick', function () {
|
||||
openPlexXML('/servers/{machine_id}/sync_lists', true);
|
||||
});
|
||||
</script>
|
||||
</%def>
|
||||
|
||||
@@ -96,7 +96,7 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
<div class='table-card-back'>
|
||||
<div id="search-results-list"><i class="fa fa-refresh fa-spin"></i> Loading search results...</div>
|
||||
<div id="search-results-list" class="children-list"><i class="fa fa-refresh fa-spin"></i> Loading search results...</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -188,7 +188,7 @@ DOCUMENTATION :: END
|
||||
},
|
||||
complete: function (xhr, status) {
|
||||
$('#search-results-list').html(xhr.responseText);
|
||||
$('#update_query_title').html(query_string)
|
||||
$('#update_query_title').text(query_string)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -108,8 +108,8 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
<h3 title="${item['grandparent_title']}">
|
||||
<a href="info?rating_key=${item['grandparent_rating_key']}" title="${item['grandparent_title']}">${item['grandparent_title']}</a>
|
||||
<h3 title="${item['original_title'] or item['grandparent_title']}">
|
||||
<a href="info?rating_key=${item['grandparent_rating_key']}" title="${item['original_title'] or item['grandparent_title']}">${item['original_title'] or item['grandparent_title']}</a>
|
||||
</h3>
|
||||
<h3 class="text-muted" title="${item['title']}">
|
||||
<a href="info?source=history&rating_key=${item['rating_key']}" title="${item['title']}">${item['title']}</a>
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
<div class='container-fluid'>
|
||||
<div class='table-card-header'>
|
||||
<div class="header-bar">
|
||||
<span><i class="fa fa-group"></i> All Users</span>
|
||||
<span id="users-xml"><i class="fa fa-group"></i> All Users</span>
|
||||
</div>
|
||||
<div class="button-bar">
|
||||
% if _session['user_group'] == 'admin':
|
||||
@@ -202,5 +202,9 @@
|
||||
});
|
||||
});
|
||||
% endif
|
||||
|
||||
$('#users-xml').on('tripleclick', function () {
|
||||
openPlexXML('/api/users', true);
|
||||
});
|
||||
</script>
|
||||
</%def>
|
||||
@@ -1,17 +1,24 @@
|
||||
% if data:
|
||||
<%
|
||||
import plexpy
|
||||
from plexpy.helpers import grouper
|
||||
from plexpy.helpers import grouper, get_img_service
|
||||
|
||||
recently_added = data['recently_added']
|
||||
if plexpy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'newsletter/'
|
||||
base_url_image = base_url + 'image/'
|
||||
elif preview:
|
||||
base_url = 'newsletter/'
|
||||
base_url_image = base_url + 'image/'
|
||||
else:
|
||||
base_url = base_url_image = ''
|
||||
base_url = ''
|
||||
|
||||
service = get_img_service(include_self=True)
|
||||
if service == 'self-hosted' and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url_image = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'image/'
|
||||
elif preview and service and service != 'self-hosted':
|
||||
base_url_image = 'image/'
|
||||
else:
|
||||
base_url_image = ''
|
||||
|
||||
%>
|
||||
<!doctype html>
|
||||
<html>
|
||||
@@ -61,17 +68,14 @@
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
/* Set a max-width, and make it display as block so it will automatically stretch to that width, but will also shrink down on a phone or something */
|
||||
.container {
|
||||
display: block;
|
||||
margin: 0 auto !important;
|
||||
/* makes it centered */
|
||||
max-width: 1042px;
|
||||
padding: 10px;
|
||||
width: 1042px;
|
||||
}
|
||||
|
||||
/* This should also be a block element, so that it will fill 100% of the .container */
|
||||
.content {
|
||||
box-sizing: border-box;
|
||||
display: block;
|
||||
@@ -83,6 +87,14 @@
|
||||
/* -------------------------------------
|
||||
HEADER, FOOTER, MAIN
|
||||
------------------------------------- */
|
||||
.local-preview-note {
|
||||
text-align: center;
|
||||
padding-top: 10px;
|
||||
}
|
||||
.local-preview-note p {
|
||||
color: #282A2D;
|
||||
font-size: 12px;
|
||||
}
|
||||
.main {
|
||||
background: #282A2D;
|
||||
border-radius: 3px;
|
||||
@@ -157,88 +169,9 @@
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
/* -------------------------------------
|
||||
BUTTONS
|
||||
------------------------------------- */
|
||||
.btn {
|
||||
box-sizing: border-box;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.btn > tbody > tr > td {
|
||||
padding-bottom: 15px;
|
||||
}
|
||||
|
||||
.btn table {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.btn table td {
|
||||
background-color: #ffffff;
|
||||
border-radius: 5px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.btn a {
|
||||
background-color: #ffffff;
|
||||
border: solid 1px #3498db;
|
||||
border-radius: 5px;
|
||||
box-sizing: border-box;
|
||||
color: #3498db;
|
||||
cursor: pointer;
|
||||
display: inline-block;
|
||||
font-size: 14px;
|
||||
font-weight: bold;
|
||||
margin: 0;
|
||||
padding: 12px 25px;
|
||||
text-decoration: none;
|
||||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
.btn-primary table td {
|
||||
background-color: #3498db;
|
||||
}
|
||||
|
||||
.btn-primary a {
|
||||
background-color: #3498db;
|
||||
border-color: #3498db;
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
/* -------------------------------------
|
||||
OTHER STYLES THAT MIGHT BE USEFUL
|
||||
------------------------------------- */
|
||||
.last {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.first {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
.align-center {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.align-right {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.align-left {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.clear {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.mt0 {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
.mb0 {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
.mb5 {
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
@@ -248,9 +181,6 @@
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
}
|
||||
.clear {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.preheader {
|
||||
color: transparent;
|
||||
@@ -309,6 +239,7 @@
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
/* -------------------------------------
|
||||
MEDIA SECTIONS
|
||||
------------------------------------- */
|
||||
@@ -363,6 +294,7 @@
|
||||
font-size: 20px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
/* -------------------------------------
|
||||
MEDIA CARDS
|
||||
------------------------------------- */
|
||||
@@ -448,6 +380,7 @@
|
||||
}
|
||||
.card-info-body > p {
|
||||
max-width: 325px;
|
||||
color: #ffffff;
|
||||
}
|
||||
.card-instance.movie .card-info-body,
|
||||
.card-instance.show .card-info-body {
|
||||
@@ -464,21 +397,21 @@
|
||||
padding-left: 5px;
|
||||
}
|
||||
.card-info-footer .badge-container {
|
||||
max-width: 265px;
|
||||
max-width: 260px;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
.card-info-footer .star-rating-container {
|
||||
width: 60px;
|
||||
vertical-align: bottom;
|
||||
padding-right: 5px;
|
||||
width: 65px;
|
||||
}
|
||||
.card-info-footer .star-rating {
|
||||
margin-left: 4px;
|
||||
font-size: 0.8rem;
|
||||
line-height: 1rem;
|
||||
width: 0.5rem;
|
||||
display: inline-block;
|
||||
vertical-align: bottom;
|
||||
}
|
||||
.star-rating.full {
|
||||
color: #E5A00D;
|
||||
@@ -489,6 +422,7 @@
|
||||
.badge {
|
||||
display: inline-block;
|
||||
min-width: 10px;
|
||||
margin-right: 4px;
|
||||
padding: 3px 7px;
|
||||
font-size: 11px;
|
||||
line-height: 1;
|
||||
@@ -499,6 +433,7 @@
|
||||
border-radius: 2px;
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
/* -------------------------------------
|
||||
@@ -592,37 +527,27 @@
|
||||
line-height: inherit !important;
|
||||
text-decoration: none !important;
|
||||
}
|
||||
|
||||
.btn-primary table td:hover {
|
||||
background-color: #34495e !important;
|
||||
}
|
||||
|
||||
.btn-primary a:hover {
|
||||
background-color: #34495e !important;
|
||||
border-color: #34495e !important;
|
||||
}
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body class="" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;-webkit-font-smoothing: antialiased;font-size: 14px;line-height: 1.4;margin: 0;padding: 0;-ms-text-size-adjust: 100%;-webkit-text-size-adjust: 100%;">
|
||||
<body style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;-webkit-font-smoothing: antialiased;font-size: 14px;line-height: 1.4;margin: 0;padding: 0;-ms-text-size-adjust: 100%;-webkit-text-size-adjust: 100%;">
|
||||
% if preview and service and service != 'self-hosted':
|
||||
<div class="local-preview-note" style="text-align: center;padding-top: 10px;"><p style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;color: #282A2D;font-size: 12px;">Note: Local preview images only - images will be uploaded to ${service.capitalize()} when the newsletter is sent.</p></div> <!-- IGNORE SAVE -->
|
||||
% elif preview and not service:
|
||||
<div class="local-preview-note" style="text-align: center;padding-top: 10px;"><p style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;color: #282A2D;font-size: 12px;">Warning: The Image Hosting setting must be enabled for images to display on the newsletter.</p></div> <!-- IGNORE SAVE -->
|
||||
% endif
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="body" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tr>
|
||||
<td class="container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;display: block;max-width: 1042px;padding: 10px;width: 1042px;margin: 0 auto !important;">
|
||||
<div class="content" style="box-sizing: border-box;display: block;margin: 0 auto;max-width: 1037px;padding: 10px;">
|
||||
|
||||
<!-- START CENTERED WHITE CONTAINER -->
|
||||
<span class="preheader" style="color: transparent;display: none;height: 0;max-height: 0;max-width: 0;opacity: 0;overflow: hidden;mso-hide: all;visibility: hidden;width: 0;">Tautulli Newsletter - ${subject}</span>
|
||||
|
||||
% if base_url and not preview:
|
||||
<div class="view-full" style="clear: both;color: #282A2D;font-size: 12px;margin-bottom: 10px;text-align: center;width: 100%;"> <!-- IGNORE SAVE -->
|
||||
<a href="${base_url + uuid}" title="View full newsletter" target="_blank" style="text-decoration: underline;color: #282A2D;">Click here to view the full newsletter.</a> <!-- IGNORE SAVE -->
|
||||
</div> <!-- IGNORE SAVE -->
|
||||
% endif
|
||||
|
||||
<table border="0" cellpadding="3" cellspacing="0" class="main" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;background: #282A2D;border-radius: 3px;color: #ffffff;">
|
||||
|
||||
<!-- START MAIN CONTENT AREA -->
|
||||
<tr>
|
||||
<td class="wrapper" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;box-sizing: border-box;padding: 5px;overflow: auto;">
|
||||
<div class="header" style="width: 100%;height: 90px;text-align: center;">
|
||||
@@ -632,7 +557,6 @@
|
||||
<div class="dates" style="color: #aaaaaa;font-size: 20px;text-align: center;">${parameters['start_date']} - ${parameters['end_date']}</div>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
% if message:
|
||||
<tr>
|
||||
<td class="wrapper" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;box-sizing: border-box;padding: 5px;overflow: auto;">
|
||||
@@ -641,7 +565,6 @@
|
||||
</td>
|
||||
</tr>
|
||||
% endif
|
||||
|
||||
% if recently_added.get('movie'):
|
||||
<tr>
|
||||
<td class="wrapper" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;box-sizing: border-box;padding: 5px;overflow: auto;">
|
||||
@@ -657,109 +580,105 @@
|
||||
<tr>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tbody>
|
||||
% for movie_a, movie_b in grouper(recently_added['movie'], 2):
|
||||
<tr>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" width="100%" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tbody>
|
||||
<tr>
|
||||
% for movie in (movie_a, movie_b):
|
||||
% if movie:
|
||||
% if not movie_b:
|
||||
<td align="center" valign="top" class="card-instance pad" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 0 !important;width: 251px !important;min-width: 251px !important;max-width: 251px !important;"></td>
|
||||
% endif
|
||||
<td align="center" valign="top" class="card-instance movie" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 3px;width: 502px;min-width: 502px;max-width: 502px;height: 233px;">
|
||||
<table border="0" cellpadding="0" cellspacing="3" width="100%" class="card-background" style="background-image: url(${(base_url_image + movie['art_hash']) if base_url_image else movie['art_url']});border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;background-color: #282828;background-position: center;background-size: cover;background-repeat: no-repeat;background-clip: padding-box;border: 1px solid rgba(255,255,255,.1);">
|
||||
<tbody>
|
||||
% for movie_a, movie_b in grouper(recently_added['movie'], 2):
|
||||
<tr>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" width="100%" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tr>
|
||||
% for movie in (movie_a, movie_b):
|
||||
% if movie:
|
||||
% if not movie_b:
|
||||
<td align="center" valign="top" class="card-instance pad" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 0 !important;width: 251px !important;min-width: 251px !important;max-width: 251px !important;"></td>
|
||||
% endif
|
||||
<td align="center" valign="top" class="card-instance movie" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 3px;width: 502px;min-width: 502px;max-width: 502px;height: 233px;">
|
||||
<table border="0" cellpadding="0" cellspacing="3" width="100%" class="card-background" style="background-image: url(${(base_url_image + movie['art_hash']) if base_url_image else movie['art_url']});border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;background-color: #282828;background-position: center;background-size: cover;background-repeat: no-repeat;background-clip: padding-box;border: 1px solid rgba(255,255,255,.1);">
|
||||
<tr>
|
||||
<td class="card-poster-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;width: 152px;min-width: 152px;height: 227px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-poster" style="background-image: url(${(base_url_image + movie['thumb_hash']) if base_url_image else movie['thumb_url']});border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;background-color: #3F4245;background-position: center;background-size: cover;background-repeat: no-repeat;background-clip: padding-box;border: 1px solid rgba(255,255,255,.1);">
|
||||
<tr>
|
||||
<td class="card-poster-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;width: 152px;min-width: 152px;height: 227px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-poster" style="background-image: url(${(base_url_image + movie['thumb_hash']) if base_url_image else movie['poster_url']});border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;background-color: #3F4245;background-position: center;background-size: cover;background-repeat: no-repeat;background-clip: padding-box;border: 1px solid rgba(255,255,255,.1);">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${movie['rating_key']}" title="${movie['title']}" target="_blank" style="text-decoration: underline;">
|
||||
<img class="card-poster-overlay" src="${base_url_image + 'images/newsletter/view-on-plex-poster.png' if base_url_image else 'http://tautulli.com/images/newsletter/view-on-plex-poster.png'}" width="150" height="225" style="border: none;-ms-interpolation-mode: bicubic;max-width: 100%;display: block;">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${movie['rating_key']}" title="${movie['title']}" target="_blank" style="text-decoration: underline;">
|
||||
<img class="card-poster-overlay" src="${base_url_image + 'images/newsletter/view-on-plex-poster.png' if base_url_image else 'http://tautulli.com/images/newsletter/view-on-plex-poster.png'}" width="150" height="225" style="border: none;-ms-interpolation-mode: bicubic;max-width: 100%;display: block;">
|
||||
</a>
|
||||
</td>
|
||||
<td class="card-info-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;padding-left: 4px;text-align: left;height: 227px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-info-container-table" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;height: 100%;">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="card-info-title nowrap" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.9rem;vertical-align: top;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;border-bottom: 1px solid rgba(255, 255, 255, .1);line-height: 1.2rem;padding: 5px;">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${movie['rating_key']}" title="${movie['title']}" target="_blank" style="text-decoration: none;color: #ffffff;">${movie['title']}</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-body" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.75rem;vertical-align: top;padding: 5px;height: 100%;">
|
||||
% if movie['tagline']:
|
||||
<p class="nowrap mb5" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;margin-bottom: 5px;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;max-width: 325px;">
|
||||
<em>${movie['tagline']}</em>
|
||||
</p>
|
||||
% endif
|
||||
<p style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;max-width: 325px;">
|
||||
${movie['summary'][:450] + (movie['summary'][450:] and '...')}
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-footer nowrap" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.6rem;vertical-align: top;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;padding-top: 0px;padding-right: 5px;padding-bottom: 5px;padding-left: 5px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="badge-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;max-width: 265px;white-space: nowrap;overflow: hidden;text-overflow: ellipsis;">
|
||||
% if movie['year']:
|
||||
<span class="badge" title="${movie['year']}" style="display: inline-block;min-width: 10px;padding: 3px 7px;font-size: 11px;line-height: 1;text-align: center;white-space: nowrap;vertical-align: middle;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;">${movie['year']}</span>
|
||||
% endif
|
||||
% if movie['duration']:
|
||||
<% duration = int(int(movie['duration'])/60000) %>
|
||||
<span class="badge" title="${duration} mins" style="display: inline-block;min-width: 10px;padding: 3px 7px;font-size: 11px;line-height: 1;text-align: center;white-space: nowrap;vertical-align: middle;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;">${duration} mins</span>
|
||||
% endif
|
||||
% if movie['genres']:
|
||||
% for genre in movie['genres'][:2]:
|
||||
<span class="badge" title="${genre}" style="display: inline-block;min-width: 10px;padding: 3px 7px;font-size: 11px;line-height: 1;text-align: center;white-space: nowrap;vertical-align: middle;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;">${genre}</span>
|
||||
% endfor
|
||||
% endif
|
||||
</td>
|
||||
% if movie['rating']:
|
||||
<% rating = int(round(float(movie['rating']) / 2)) %>
|
||||
<td class="star-rating-container" title="${int(float(movie['rating'])/0.1)}%" align="right" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: bottom;width: 60px;padding-right: 5px;">
|
||||
% for _ in range(rating):
|
||||
<span class="star-rating full" style="font-size: 0.8rem;line-height: 1rem;width: 0.5rem;display: inline-block;color: #E5A00D;">★</span>
|
||||
% endfor
|
||||
% for _ in range(5-rating):
|
||||
<span class="star-rating empty" style="font-size: 0.8rem;line-height: 1rem;width: 0.5rem;display: inline-block;color: #aaaaaa;">☆</span>
|
||||
% endfor
|
||||
</td>
|
||||
% endif
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
<td class="card-info-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;padding-left: 4px;text-align: left;height: 227px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-info-container-table" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;height: 100%;">
|
||||
<tr>
|
||||
<td class="card-info-title nowrap" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.9rem;vertical-align: top;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;border-bottom: 1px solid rgba(255, 255, 255, .1);line-height: 1.2rem;padding: 5px;max-width: 320px;">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${movie['rating_key']}" title="${movie['title']}" target="_blank" style="text-decoration: none;color: #ffffff;">${movie['title']}</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-body" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.75rem;vertical-align: top;padding: 5px;height: 100%;">
|
||||
% if movie['tagline']:
|
||||
<p class="nowrap mb5" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;margin-bottom: 5px;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;max-width: 325px;color: #ffffff;">
|
||||
<em>${movie['tagline']}</em>
|
||||
</p>
|
||||
% endif
|
||||
<p style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;max-width: 325px;color: #ffffff;">
|
||||
${movie['summary'][:450] + (movie['summary'][450:] and '...')}
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-footer nowrap" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.6rem;vertical-align: top;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;padding-top: 0px;padding-right: 5px;padding-bottom: 5px;padding-left: 5px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tr>
|
||||
<td class="badge-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;max-width: 260px;white-space: nowrap;overflow: hidden;text-overflow: ellipsis;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tr>
|
||||
% if movie['year']:
|
||||
<td class="badge" title="${movie['year']}" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 11px;vertical-align: middle;display: inline-block;min-width: 10px;margin-right: 4px;padding: 3px 7px;line-height: 1;text-align: center;white-space: nowrap;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;color: #ffffff;">${movie['year']}</td>
|
||||
% endif
|
||||
% if movie['duration']:
|
||||
<% duration = int(int(movie['duration'])/60000) %>
|
||||
<td class="badge" title="${duration} mins" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 11px;vertical-align: middle;display: inline-block;min-width: 10px;margin-right: 4px;padding: 3px 7px;line-height: 1;text-align: center;white-space: nowrap;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;color: #ffffff;">${duration} mins</td>
|
||||
% endif
|
||||
% if movie['genres']:
|
||||
% for genre in movie['genres'][:]:
|
||||
<td class="badge" title="${genre}" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 11px;vertical-align: middle;display: inline-block;min-width: 10px;margin-right: 4px;padding: 3px 7px;line-height: 1;text-align: center;white-space: nowrap;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;color: #ffffff;">${genre}</td>
|
||||
% endfor
|
||||
% endif
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% if movie['rating']:
|
||||
<% rating = int(round(float(movie['rating']) / 2)) %>
|
||||
<td class="star-rating-container" title="${int(float(movie['rating'])/0.1)}%" align="right" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;width: 65px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tr>
|
||||
% for _ in range(rating):
|
||||
<td class="star-rating full" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.8rem;vertical-align: bottom;margin-left: 4px;line-height: 1rem;width: 0.5rem;display: inline-block;color: #E5A00D;">★</td>
|
||||
% endfor
|
||||
% for _ in range(5-rating):
|
||||
<td class="star-rating empty" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.8rem;vertical-align: bottom;margin-left: 4px;line-height: 1rem;width: 0.5rem;display: inline-block;color: #aaaaaa;">☆</td>
|
||||
% endfor
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% endif
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
% if not movie_b:
|
||||
<td align="center" valign="top" class="card-instance pad" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 0 !important;width: 251px !important;min-width: 251px !important;max-width: 251px !important;"></td>
|
||||
% endif
|
||||
% endif
|
||||
% endfor
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
% endfor
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% if not movie_b:
|
||||
<td align="center" valign="top" class="card-instance pad" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 0 !important;width: 251px !important;min-width: 251px !important;max-width: 251px !important;"></td>
|
||||
% endif
|
||||
% endif
|
||||
% endfor
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
% endfor
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
@@ -781,142 +700,137 @@
|
||||
<tr>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tbody>
|
||||
% for show_a, show_b in grouper(recently_added['show'], 2):
|
||||
<tr>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" width="100%" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tbody>
|
||||
<tr>
|
||||
% for show in (show_a, show_b):
|
||||
% if show:
|
||||
% if not show_b:
|
||||
<td align="center" valign="top" class="card-instance pad" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 0 !important;width: 251px !important;min-width: 251px !important;max-width: 251px !important;"></td>
|
||||
% endif
|
||||
<%
|
||||
if show['season_count'] == 1:
|
||||
if show['season'][0]['episode_count'] == 1:
|
||||
link_rating_key = show['season'][0]['episode'][0]['rating_key']
|
||||
else:
|
||||
link_rating_key = show['season'][0]['episode'][0]['parent_rating_key']
|
||||
else:
|
||||
link_rating_key = show['rating_key']
|
||||
%>
|
||||
<td align="center" valign="top" class="card-instance show" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 3px;width: 502px;min-width: 502px;max-width: 502px;height: 233px;">
|
||||
<table border="0" cellpadding="0" cellspacing="3" width="100%" class="card-background" style="background-image: url(${(base_url_image + show['art_hash']) if base_url_image else show['art_url']});border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;background-color: #282828;background-position: center;background-size: cover;background-repeat: no-repeat;background-clip: padding-box;border: 1px solid rgba(255,255,255,.1);">
|
||||
<tbody>
|
||||
% for show_a, show_b in grouper(recently_added['show'], 2):
|
||||
<tr>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" width="100%" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tr>
|
||||
% for show in (show_a, show_b):
|
||||
% if show:
|
||||
% if not show_b:
|
||||
<td align="center" valign="top" class="card-instance pad" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 0 !important;width: 251px !important;min-width: 251px !important;max-width: 251px !important;"></td>
|
||||
% endif
|
||||
<%
|
||||
if show['season_count'] == 1:
|
||||
if show['season'][0]['episode_count'] == 1:
|
||||
link_rating_key = show['season'][0]['episode'][0]['rating_key']
|
||||
else:
|
||||
link_rating_key = show['season'][0]['episode'][0]['parent_rating_key']
|
||||
else:
|
||||
link_rating_key = show['rating_key']
|
||||
%>
|
||||
<td align="center" valign="top" class="card-instance show" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 3px;width: 502px;min-width: 502px;max-width: 502px;height: 233px;">
|
||||
<table border="0" cellpadding="0" cellspacing="3" width="100%" class="card-background" style="background-image: url(${(base_url_image + show['art_hash']) if base_url_image else show['art_url']});border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;background-color: #282828;background-position: center;background-size: cover;background-repeat: no-repeat;background-clip: padding-box;border: 1px solid rgba(255,255,255,.1);">
|
||||
<tr>
|
||||
<td class="card-poster-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;width: 152px;min-width: 152px;height: 227px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-poster" style="background-image: url(${(base_url_image + show['thumb_hash']) if base_url_image else show['thumb_url']});border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;background-color: #3F4245;background-position: center;background-size: cover;background-repeat: no-repeat;background-clip: padding-box;border: 1px solid rgba(255,255,255,.1);">
|
||||
<tr>
|
||||
<td class="card-poster-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;width: 152px;min-width: 152px;height: 227px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-poster" style="background-image: url(${(base_url_image + show['thumb_hash']) if base_url_image else show['poster_url']});border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;background-color: #3F4245;background-position: center;background-size: cover;background-repeat: no-repeat;background-clip: padding-box;border: 1px solid rgba(255,255,255,.1);">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${link_rating_key}" title="${show['title']}" target="_blank" style="text-decoration: underline;">
|
||||
<img class="card-poster-overlay" src="${base_url_image + 'images/newsletter/view-on-plex-poster.png' if base_url_image else 'http://tautulli.com/images/newsletter/view-on-plex-poster.png'}" width="150" height="225" style="border: none;-ms-interpolation-mode: bicubic;max-width: 100%;display: block;">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${link_rating_key}" title="${show['title']}" target="_blank" style="text-decoration: underline;">
|
||||
<img class="card-poster-overlay" src="${base_url_image + 'images/newsletter/view-on-plex-poster.png' if base_url_image else 'http://tautulli.com/images/newsletter/view-on-plex-poster.png'}" width="150" height="225" style="border: none;-ms-interpolation-mode: bicubic;max-width: 100%;display: block;">
|
||||
</a>
|
||||
</td>
|
||||
<td class="card-info-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;padding-left: 4px;text-align: left;height: 227px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-info-container-table" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;height: 100%;">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="card-info-title nowrap" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.9rem;vertical-align: top;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;border-bottom: 1px solid rgba(255, 255, 255, .1);line-height: 1.2rem;padding: 5px;">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${link_rating_key}" title="${show['title']}" target="_blank" style="text-decoration: none;color: #ffffff;">${show['title']}</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-body" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.75rem;vertical-align: top;padding: 5px;height: 100%;">
|
||||
<p class="nowrap mb5" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;margin-bottom: 5px;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;max-width: 325px;">
|
||||
% if show['season_count'] > 1:
|
||||
<em>${show['season_count']} seasons /</em>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
<td class="card-info-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;padding-left: 4px;text-align: left;height: 227px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-info-container-table" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;height: 100%;">
|
||||
<tr>
|
||||
<td class="card-info-title nowrap" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.9rem;vertical-align: top;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;border-bottom: 1px solid rgba(255, 255, 255, .1);line-height: 1.2rem;padding: 5px;max-width: 320px;">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${link_rating_key}" title="${show['title']}" target="_blank" style="text-decoration: none;color: #ffffff;">${show['title']}</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-body" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.75rem;vertical-align: top;padding: 5px;height: 100%;">
|
||||
<p class="nowrap mb5" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;margin-bottom: 5px;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;max-width: 325px;color: #ffffff;">
|
||||
% if show['season_count'] > 1:
|
||||
<em>${show['season_count']} seasons /</em>
|
||||
% endif
|
||||
<% total_show_episodes = sum(s['episode_count'] for s in show['season']) %>
|
||||
<em>${total_show_episodes} episode${'s' if total_show_episodes > 1 else ''}</em>
|
||||
</p>
|
||||
<p class="nowrap mb5" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;margin-bottom: 5px;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;max-width: 325px;color: #ffffff;">
|
||||
% for i, season in enumerate(show['season'][:8]):
|
||||
% if season['episode_count'] == 1:
|
||||
Season ${season['media_index']} · Episode ${season['episode'][0]['media_index']} - ${season['episode'][0]['title']}
|
||||
% else:
|
||||
Season ${season['media_index']} · Episodes ${season['episode_range']}
|
||||
% endif
|
||||
% if i < min(show['season_count'], 7):
|
||||
<br>
|
||||
% elif i == 7 and show['season_count'] > 8:
|
||||
...plus ${show['season_count'] - 8} more seasons!
|
||||
% endif
|
||||
% endfor
|
||||
</p>
|
||||
<p style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;max-width: 325px;color: #ffffff;">
|
||||
% if show['season_count'] == 1 and show['season'][0]['episode_count'] == 1:
|
||||
${show['season'][0]['episode'][0]['summary'][:350] + (show['season'][0]['episode'][0]['summary'][350:] and '...')}
|
||||
% else:
|
||||
<% length = max(0, 350 - 50 * (show['season_count'] - 1)) %>
|
||||
% if length:
|
||||
${show['summary'][:length] + (show['summary'][length:] and '...')}
|
||||
% endif
|
||||
% endif
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-footer nowrap" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.6rem;vertical-align: top;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;padding-top: 0px;padding-right: 5px;padding-bottom: 5px;padding-left: 5px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tr>
|
||||
<td class="badge-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;max-width: 260px;white-space: nowrap;overflow: hidden;text-overflow: ellipsis;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tr>
|
||||
% if show['year']:
|
||||
<td class="badge" title="${show['year']}" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 11px;vertical-align: middle;display: inline-block;min-width: 10px;margin-right: 4px;padding: 3px 7px;line-height: 1;text-align: center;white-space: nowrap;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;color: #ffffff;">${show['year']}</td>
|
||||
% endif
|
||||
<% total_show_episodes = sum(s['episode_count'] for s in show['season']) %>
|
||||
<em>${total_show_episodes} episode${'s' if total_show_episodes > 1 else ''}</em>
|
||||
</p>
|
||||
<p class="nowrap mb5" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;margin-bottom: 5px;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;max-width: 325px;">
|
||||
% for i, season in enumerate(show['season'][:8]):
|
||||
Season ${season['media_index']} ·
|
||||
% if season['episode_count'] == 1:
|
||||
Episode ${season['episode'][0]['media_index']} - ${season['episode'][0]['title']}
|
||||
% else:
|
||||
Episodes ${season['episode_range']}
|
||||
% endif
|
||||
% if i < min(show['season_count'], 7):
|
||||
<br>
|
||||
% elif i == 7 and show['season_count'] > 8:
|
||||
...plus ${show['season_count'] - 8} more seasons!
|
||||
% endif
|
||||
% if show['duration']:
|
||||
<% duration = int(int(show['duration'])/60000) %>
|
||||
<td class="badge" title="${duration} mins" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 11px;vertical-align: middle;display: inline-block;min-width: 10px;margin-right: 4px;padding: 3px 7px;line-height: 1;text-align: center;white-space: nowrap;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;color: #ffffff;">${duration} mins</td>
|
||||
% endif
|
||||
% if show['genres']:
|
||||
% for genre in show['genres'][:2]:
|
||||
<td class="badge" title="${genre}" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 11px;vertical-align: middle;display: inline-block;min-width: 10px;margin-right: 4px;padding: 3px 7px;line-height: 1;text-align: center;white-space: nowrap;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;color: #ffffff;">${genre}</td>
|
||||
% endfor
|
||||
</p>
|
||||
<p style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;max-width: 325px;">
|
||||
% if show['season_count'] == 1 and show['season'][0]['episode_count'] == 1:
|
||||
${show['season'][0]['episode'][0]['summary'][:350] + (show['season'][0]['episode'][0]['summary'][350:] and '...')}
|
||||
% else:
|
||||
<% length = max(0, 350 - 50 * (show['season_count'] - 1)) %>
|
||||
% if length:
|
||||
${show['summary'][:length] + (show['summary'][length:] and '...')}
|
||||
% endif
|
||||
% endif
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-footer nowrap" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.6rem;vertical-align: top;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;padding-top: 0px;padding-right: 5px;padding-bottom: 5px;padding-left: 5px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="badge-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;max-width: 265px;white-space: nowrap;overflow: hidden;text-overflow: ellipsis;">
|
||||
% if show['year']:
|
||||
<span class="badge" title="${show['year']}" style="display: inline-block;min-width: 10px;padding: 3px 7px;font-size: 11px;line-height: 1;text-align: center;white-space: nowrap;vertical-align: middle;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;">${show['year']}</span>
|
||||
% endif
|
||||
% if show['duration']:
|
||||
<% duration = int(int(show['duration'])/60000) %>
|
||||
<span class="badge" title="${duration} mins" style="display: inline-block;min-width: 10px;padding: 3px 7px;font-size: 11px;line-height: 1;text-align: center;white-space: nowrap;vertical-align: middle;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;">${duration} mins</span>
|
||||
% endif
|
||||
% if show['genres']:
|
||||
% for genre in show['genres'][:2]:
|
||||
<span class="badge" title="${genre}" style="display: inline-block;min-width: 10px;padding: 3px 7px;font-size: 11px;line-height: 1;text-align: center;white-space: nowrap;vertical-align: middle;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;">${genre}</span>
|
||||
% endfor
|
||||
% endif
|
||||
</td>
|
||||
% if show['rating']:
|
||||
<% rating = int(round(float(show['rating']) / 2)) %>
|
||||
<td class="star-rating-container" title="${int(float(show['rating'])/0.1)}%" align="right" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: bottom;width: 60px;padding-right: 5px;">
|
||||
% for _ in range(rating):
|
||||
<span class="star-rating full" style="font-size: 0.8rem;line-height: 1rem;width: 0.5rem;display: inline-block;color: #E5A00D;">★</span>
|
||||
% endfor
|
||||
% for _ in range(5-rating):
|
||||
<span class="star-rating empty" style="font-size: 0.8rem;line-height: 1rem;width: 0.5rem;display: inline-block;color: #aaaaaa;">☆</span>
|
||||
% endfor
|
||||
</td>
|
||||
% endif
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% if show['rating']:
|
||||
<% rating = int(round(float(show['rating']) / 2)) %>
|
||||
<td class="star-rating-container" title="${int(float(show['rating'])/0.1)}%" align="right" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;width: 65px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tr>
|
||||
% for _ in range(rating):
|
||||
<td class="star-rating full" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.8rem;vertical-align: bottom;margin-left: 4px;line-height: 1rem;width: 0.5rem;display: inline-block;color: #E5A00D;">★</td>
|
||||
% endfor
|
||||
% for _ in range(5-rating):
|
||||
<td class="star-rating empty" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.8rem;vertical-align: bottom;margin-left: 4px;line-height: 1rem;width: 0.5rem;display: inline-block;color: #aaaaaa;">☆</td>
|
||||
% endfor
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% endif
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
% if not show_b:
|
||||
<td align="center" valign="top" class="card-instance pad" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 0 !important;width: 251px !important;min-width: 251px !important;max-width: 251px !important;"></td>
|
||||
% endif
|
||||
% endif
|
||||
% endfor
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
% endfor
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% if not show_b:
|
||||
<td align="center" valign="top" class="card-instance pad" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 0 !important;width: 251px !important;min-width: 251px !important;max-width: 251px !important;"></td>
|
||||
% endif
|
||||
% endif
|
||||
% endfor
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
% endfor
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
@@ -938,124 +852,114 @@
|
||||
<tr>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tbody>
|
||||
% for album_a, album_b in grouper([a for artist in recently_added['artist'] for a in artist['album']], 2):
|
||||
<tr>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" width="100%" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tbody>
|
||||
<tr>
|
||||
% for album in (album_a, album_b):
|
||||
% if album:
|
||||
% if not album_b:
|
||||
<td align="center" valign="top" class="card-instance pad" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 0 !important;width: 251px !important;min-width: 251px !important;max-width: 251px !important;"></td>
|
||||
% endif
|
||||
<td align="center" valign="top" class="card-instance album" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 3px;width: 502px;min-width: 502px;max-width: 502px;height: 158px;">
|
||||
<table border="0" cellpadding="0" cellspacing="3" width="100%" class="card-background" style="background-image: url(${(base_url_image + album['art_hash']) if base_url_image else album['art_url']});border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;background-color: #282828;background-position: center;background-size: cover;background-repeat: no-repeat;background-clip: padding-box;border: 1px solid rgba(255,255,255,.1);">
|
||||
<tbody>
|
||||
% for album_a, album_b in grouper([a for artist in recently_added['artist'] for a in artist['album']], 2):
|
||||
<tr>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" width="100%" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tr>
|
||||
% for album in (album_a, album_b):
|
||||
% if album:
|
||||
% if not album_b:
|
||||
<td align="center" valign="top" class="card-instance pad" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 0 !important;width: 251px !important;min-width: 251px !important;max-width: 251px !important;"></td>
|
||||
% endif
|
||||
<td align="center" valign="top" class="card-instance album" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 3px;width: 502px;min-width: 502px;max-width: 502px;height: 158px;">
|
||||
<table border="0" cellpadding="0" cellspacing="3" width="100%" class="card-background" style="background-image: url(${(base_url_image + album['art_hash']) if base_url_image else album['art_url']});border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;background-color: #282828;background-position: center;background-size: cover;background-repeat: no-repeat;background-clip: padding-box;border: 1px solid rgba(255,255,255,.1);">
|
||||
<tr>
|
||||
<td class="card-poster-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;width: 152px;min-width: 152px;height: 152px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-poster" style="background-image: url(${(base_url_image + album['thumb_hash']) if base_url_image else album['thumb_url']});border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;background-color: #3F4245;background-position: center;background-size: cover;background-repeat: no-repeat;background-clip: padding-box;border: 1px solid rgba(255,255,255,.1);">
|
||||
<tr>
|
||||
<td class="card-poster-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;width: 152px;min-width: 152px;height: 152px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-poster" style="background-image: url(${(base_url_image + album['thumb_hash']) if base_url_image else album['poster_url']});border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;background-color: #3F4245;background-position: center;background-size: cover;background-repeat: no-repeat;background-clip: padding-box;border: 1px solid rgba(255,255,255,.1);">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${album['rating_key']}" title="${album['title']}" target="_blank" style="text-decoration: underline;">
|
||||
<img class="card-poster-overlay" src="${base_url_image + 'images/newsletter/view-on-plex-cover.png' if base_url_image else 'http://tautulli.com/images/newsletter/view-on-plex-cover.png'}" width="150" height="150" style="border: none;-ms-interpolation-mode: bicubic;max-width: 100%;display: block;">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<td style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${album['rating_key']}" title="${album['title']}" target="_blank" style="text-decoration: underline;">
|
||||
<img class="card-poster-overlay" src="${base_url_image + 'images/newsletter/view-on-plex-cover.png' if base_url_image else 'http://tautulli.com/images/newsletter/view-on-plex-cover.png'}" width="150" height="150" style="border: none;-ms-interpolation-mode: bicubic;max-width: 100%;display: block;">
|
||||
</a>
|
||||
</td>
|
||||
<td class="card-info-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;padding-left: 4px;text-align: left;height: 152px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-info-container-table" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;height: 100%;">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="card-info-title nowrap" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.9rem;vertical-align: top;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;border-bottom: 1px solid rgba(255, 255, 255, .1);line-height: 1.2rem;padding: 5px;">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${album['rating_key']}" title="${album['title']}" target="_blank" style="text-decoration: none;color: #ffffff;">${album['title']}</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-body" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.75rem;vertical-align: top;padding: 5px;height: 82px;min-height: 82px;">
|
||||
<p class="nowrap mb5" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;margin-bottom: 5px;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;max-width: 325px;">
|
||||
<em>${album['parent_title']} · ${album['track_count']} track${'s' if album['track_count'] > 1 else ''}</em>
|
||||
</p>
|
||||
% if artist['title'].lower() != 'various artists':
|
||||
<p style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;max-width: 325px;">
|
||||
${album['summary'][:200] + (album['summary'][200:] and '...')}
|
||||
</p>
|
||||
% endif
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-footer nowrap" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.6rem;vertical-align: top;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;padding-top: 0px;padding-right: 5px;padding-bottom: 5px;padding-left: 5px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="badge-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;max-width: 265px;white-space: nowrap;overflow: hidden;text-overflow: ellipsis;">
|
||||
% if album['year']:
|
||||
<span class="badge" title="${album['year']}" style="display: inline-block;min-width: 10px;padding: 3px 7px;font-size: 11px;line-height: 1;text-align: center;white-space: nowrap;vertical-align: middle;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;">${album['year']}</span>
|
||||
% endif
|
||||
% if album['genres']:
|
||||
% for genre in album['genres'][:2]:
|
||||
<span class="badge" title="${genre}" style="display: inline-block;min-width: 10px;padding: 3px 7px;font-size: 11px;line-height: 1;text-align: center;white-space: nowrap;vertical-align: middle;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;">${genre}</span>
|
||||
% endfor
|
||||
% endif
|
||||
</td>
|
||||
% if album['rating']:
|
||||
<% rating = int(round(float(album['rating']) / 2)) %>
|
||||
<td class="star-rating-container" title="${int(float(album['rating'])/0.1)}%" align="right" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: bottom;width: 60px;padding-right: 5px;">
|
||||
% for _ in range(rating):
|
||||
<span class="star-rating full" style="font-size: 0.8rem;line-height: 1rem;width: 0.5rem;display: inline-block;color: #E5A00D;">★</span>
|
||||
% endfor
|
||||
% for _ in range(5-rating):
|
||||
<span class="star-rating empty" style="font-size: 0.8rem;line-height: 1rem;width: 0.5rem;display: inline-block;color: #aaaaaa;">☆</span>
|
||||
% endfor
|
||||
</td>
|
||||
% endif
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
<td class="card-info-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;padding-left: 4px;text-align: left;height: 152px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-info-container-table" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;height: 100%;">
|
||||
<tr>
|
||||
<td class="card-info-title nowrap" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.9rem;vertical-align: top;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;border-bottom: 1px solid rgba(255, 255, 255, .1);line-height: 1.2rem;padding: 5px;max-width: 320px;">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${album['rating_key']}" title="${album['title']}" target="_blank" style="text-decoration: none;color: #ffffff;">${album['title']}</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-body" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.75rem;vertical-align: top;padding: 5px;height: 82px;min-height: 82px;">
|
||||
<p class="nowrap mb5" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;margin-bottom: 5px;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;max-width: 325px;color: #ffffff;">
|
||||
<em>${album['parent_title']} · ${album['track_count']} track${'s' if album['track_count'] > 1 else ''}</em>
|
||||
</p>
|
||||
% if album['parent_title'].lower() != 'various artists':
|
||||
<p style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;max-width: 325px;color: #ffffff;">
|
||||
${album['summary'][:200] + (album['summary'][200:] and '...')}
|
||||
</p>
|
||||
% endif
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-footer nowrap" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.6rem;vertical-align: top;white-space: nowrap;text-overflow: ellipsis;overflow: hidden;padding-top: 0px;padding-right: 5px;padding-bottom: 5px;padding-left: 5px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tr>
|
||||
<td class="badge-container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;max-width: 260px;white-space: nowrap;overflow: hidden;text-overflow: ellipsis;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tr>
|
||||
% if album['year']:
|
||||
<td class="badge" title="${album['year']}" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 11px;vertical-align: middle;display: inline-block;min-width: 10px;margin-right: 4px;padding: 3px 7px;line-height: 1;text-align: center;white-space: nowrap;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;color: #ffffff;">${album['year']}</td>
|
||||
% endif
|
||||
% if album['genres']:
|
||||
% for genre in album['genres'][:2]:
|
||||
<td class="badge" title="${genre}" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 11px;vertical-align: middle;display: inline-block;min-width: 10px;margin-right: 4px;padding: 3px 7px;line-height: 1;text-align: center;white-space: nowrap;background-color: rgba(0, 0, 0, .25);border-radius: 2px;text-overflow: ellipsis;overflow: hidden;color: #ffffff;">${genre}</td>
|
||||
% endfor
|
||||
% endif
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% if album['rating']:
|
||||
<% rating = int(round(float(album['rating']) / 2)) %>
|
||||
<td class="star-rating-container" title="${int(float(album['rating'])/0.1)}%" align="right" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;width: 65px;">
|
||||
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tr>
|
||||
% for _ in range(rating):
|
||||
<td class="star-rating full" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.8rem;vertical-align: bottom;margin-left: 4px;line-height: 1rem;width: 0.5rem;display: inline-block;color: #E5A00D;">★</td>
|
||||
% endfor
|
||||
% for _ in range(5-rating):
|
||||
<td class="star-rating empty" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 0.8rem;vertical-align: bottom;margin-left: 4px;line-height: 1rem;width: 0.5rem;display: inline-block;color: #aaaaaa;">☆</td>
|
||||
% endfor
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% endif
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
% if not album_b:
|
||||
<td align="center" valign="top" class="card-instance pad" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 0 !important;width: 251px !important;min-width: 251px !important;max-width: 251px !important;"></td>
|
||||
% endif
|
||||
% endif
|
||||
% endfor
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
% endfor
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% if not album_b:
|
||||
<td align="center" valign="top" class="card-instance pad" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;overflow: hidden;padding: 0 !important;width: 251px !important;min-width: 251px !important;max-width: 251px !important;"></td>
|
||||
% endif
|
||||
% endif
|
||||
% endfor
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
% endfor
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td class="footer" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 12px;vertical-align: top;clear: both;margin-top: 10px;text-align: center;width: 100%;">
|
||||
<!-- START FOOTER -->
|
||||
<div class="footer-bar" style="margin-left: auto;margin-right: auto;width: 200px;border-top: 1px solid #E5A00D;margin-top: 25px;"></div>
|
||||
<div class="content-block powered-by" style="padding-bottom: 10px;padding-top: 10px;">
|
||||
Newsletter generated by <a href="http://tautulli.com" target="_blank" style="text-decoration: underline;color: #fff;font-size: 12px;text-align: center;">Tautulli</a>.
|
||||
<!-- FOOTER MESSAGE - DO NOT REMOVE -->
|
||||
</div>
|
||||
<!-- END FOOTER -->
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- END MAIN CONTENT AREA -->
|
||||
</table>
|
||||
|
||||
<!-- END CENTERED WHITE CONTAINER -->
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
@@ -1,17 +1,24 @@
|
||||
% if data:
|
||||
<%
|
||||
import plexpy
|
||||
from plexpy.helpers import grouper
|
||||
from plexpy.helpers import grouper, get_img_service
|
||||
|
||||
recently_added = data['recently_added']
|
||||
if plexpy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'newsletter/'
|
||||
base_url_image = base_url + 'image/'
|
||||
elif preview:
|
||||
base_url = 'newsletter/'
|
||||
base_url_image = base_url + 'image/'
|
||||
else:
|
||||
base_url = base_url_image = ''
|
||||
base_url = ''
|
||||
|
||||
service = get_img_service(include_self=True)
|
||||
if service == 'self-hosted' and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url_image = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'image/'
|
||||
elif preview and service and service != 'self-hosted':
|
||||
base_url_image = 'image/'
|
||||
else:
|
||||
base_url_image = ''
|
||||
|
||||
%>
|
||||
<!doctype html>
|
||||
<html>
|
||||
@@ -61,17 +68,14 @@
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
/* Set a max-width, and make it display as block so it will automatically stretch to that width, but will also shrink down on a phone or something */
|
||||
.container {
|
||||
display: block;
|
||||
margin: 0 auto !important;
|
||||
/* makes it centered */
|
||||
max-width: 1042px;
|
||||
padding: 10px;
|
||||
width: 1042px;
|
||||
}
|
||||
|
||||
/* This should also be a block element, so that it will fill 100% of the .container */
|
||||
.content {
|
||||
box-sizing: border-box;
|
||||
display: block;
|
||||
@@ -83,6 +87,14 @@
|
||||
/* -------------------------------------
|
||||
HEADER, FOOTER, MAIN
|
||||
------------------------------------- */
|
||||
.local-preview-note {
|
||||
text-align: center;
|
||||
padding-top: 10px;
|
||||
}
|
||||
.local-preview-note p {
|
||||
color: #282A2D;
|
||||
font-size: 12px;
|
||||
}
|
||||
.main {
|
||||
background: #282A2D;
|
||||
border-radius: 3px;
|
||||
@@ -157,88 +169,9 @@
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
/* -------------------------------------
|
||||
BUTTONS
|
||||
------------------------------------- */
|
||||
.btn {
|
||||
box-sizing: border-box;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.btn > tbody > tr > td {
|
||||
padding-bottom: 15px;
|
||||
}
|
||||
|
||||
.btn table {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.btn table td {
|
||||
background-color: #ffffff;
|
||||
border-radius: 5px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.btn a {
|
||||
background-color: #ffffff;
|
||||
border: solid 1px #3498db;
|
||||
border-radius: 5px;
|
||||
box-sizing: border-box;
|
||||
color: #3498db;
|
||||
cursor: pointer;
|
||||
display: inline-block;
|
||||
font-size: 14px;
|
||||
font-weight: bold;
|
||||
margin: 0;
|
||||
padding: 12px 25px;
|
||||
text-decoration: none;
|
||||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
.btn-primary table td {
|
||||
background-color: #3498db;
|
||||
}
|
||||
|
||||
.btn-primary a {
|
||||
background-color: #3498db;
|
||||
border-color: #3498db;
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
/* -------------------------------------
|
||||
OTHER STYLES THAT MIGHT BE USEFUL
|
||||
------------------------------------- */
|
||||
.last {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.first {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
.align-center {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.align-right {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.align-left {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.clear {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.mt0 {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
.mb0 {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
.mb5 {
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
@@ -248,9 +181,6 @@
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
}
|
||||
.clear {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.preheader {
|
||||
color: transparent;
|
||||
@@ -309,6 +239,7 @@
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
/* -------------------------------------
|
||||
MEDIA SECTIONS
|
||||
------------------------------------- */
|
||||
@@ -363,6 +294,7 @@
|
||||
font-size: 20px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
/* -------------------------------------
|
||||
MEDIA CARDS
|
||||
------------------------------------- */
|
||||
@@ -436,6 +368,7 @@
|
||||
line-height: 1.2rem;
|
||||
font-size: 0.9rem;
|
||||
padding: 5px;
|
||||
max-width: 320px;
|
||||
}
|
||||
.card-info-title a {
|
||||
text-decoration: none;
|
||||
@@ -448,6 +381,7 @@
|
||||
}
|
||||
.card-info-body > p {
|
||||
max-width: 325px;
|
||||
color: #ffffff;
|
||||
}
|
||||
.card-instance.movie .card-info-body,
|
||||
.card-instance.show .card-info-body {
|
||||
@@ -464,21 +398,21 @@
|
||||
padding-left: 5px;
|
||||
}
|
||||
.card-info-footer .badge-container {
|
||||
max-width: 265px;
|
||||
max-width: 260px;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
.card-info-footer .star-rating-container {
|
||||
width: 60px;
|
||||
vertical-align: bottom;
|
||||
padding-right: 5px;
|
||||
width: 65px;
|
||||
}
|
||||
.card-info-footer .star-rating {
|
||||
margin-left: 4px;
|
||||
font-size: 0.8rem;
|
||||
line-height: 1rem;
|
||||
width: 0.5rem;
|
||||
display: inline-block;
|
||||
vertical-align: bottom;
|
||||
}
|
||||
.star-rating.full {
|
||||
color: #E5A00D;
|
||||
@@ -489,6 +423,7 @@
|
||||
.badge {
|
||||
display: inline-block;
|
||||
min-width: 10px;
|
||||
margin-right: 4px;
|
||||
padding: 3px 7px;
|
||||
font-size: 11px;
|
||||
line-height: 1;
|
||||
@@ -499,6 +434,7 @@
|
||||
border-radius: 2px;
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
/* -------------------------------------
|
||||
@@ -592,37 +528,27 @@
|
||||
line-height: inherit !important;
|
||||
text-decoration: none !important;
|
||||
}
|
||||
|
||||
.btn-primary table td:hover {
|
||||
background-color: #34495e !important;
|
||||
}
|
||||
|
||||
.btn-primary a:hover {
|
||||
background-color: #34495e !important;
|
||||
border-color: #34495e !important;
|
||||
}
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body class="">
|
||||
<body>
|
||||
% if preview and service and service != 'self-hosted':
|
||||
<div class="local-preview-note"><p>Note: Local preview images only - images will be uploaded to ${service.capitalize()} when the newsletter is sent.</p></div> <!-- IGNORE SAVE -->
|
||||
% elif preview and not service:
|
||||
<div class="local-preview-note"><p>Warning: The Image Hosting setting must be enabled for images to display on the newsletter.</p></div> <!-- IGNORE SAVE -->
|
||||
% endif
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="body">
|
||||
<tr>
|
||||
<td class="container">
|
||||
<div class="content">
|
||||
|
||||
<!-- START CENTERED WHITE CONTAINER -->
|
||||
<span class="preheader">Tautulli Newsletter - ${subject}</span>
|
||||
|
||||
% if base_url and not preview:
|
||||
<div class="view-full"> <!-- IGNORE SAVE -->
|
||||
<a href="${base_url + uuid}" title="View full newsletter" target="_blank">Click here to view the full newsletter.</a> <!-- IGNORE SAVE -->
|
||||
</div> <!-- IGNORE SAVE -->
|
||||
% endif
|
||||
|
||||
<table border="0" cellpadding="3" cellspacing="0" class="main">
|
||||
|
||||
<!-- START MAIN CONTENT AREA -->
|
||||
<tr>
|
||||
<td class="wrapper">
|
||||
<div class="header">
|
||||
@@ -632,7 +558,6 @@
|
||||
<div class="dates">${parameters['start_date']} - ${parameters['end_date']}</div>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
% if message:
|
||||
<tr>
|
||||
<td class="wrapper">
|
||||
@@ -641,7 +566,6 @@
|
||||
</td>
|
||||
</tr>
|
||||
% endif
|
||||
|
||||
% if recently_added.get('movie'):
|
||||
<tr>
|
||||
<td class="wrapper">
|
||||
@@ -657,109 +581,105 @@
|
||||
<tr>
|
||||
<td>
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tbody>
|
||||
% for movie_a, movie_b in grouper(recently_added['movie'], 2):
|
||||
<tr>
|
||||
<td>
|
||||
<table border="0" cellpadding="0" cellspacing="0" width="100%">
|
||||
<tbody>
|
||||
<tr>
|
||||
% for movie in (movie_a, movie_b):
|
||||
% if movie:
|
||||
% if not movie_b:
|
||||
<td align="center" valign="top" class="card-instance pad"></td>
|
||||
% endif
|
||||
<td align="center" valign="top" class="card-instance movie">
|
||||
<table border="0" cellpadding="0" cellspacing="3" width="100%" class="card-background" style="background-image: url(${(base_url_image + movie['art_hash']) if base_url_image else movie['art_url']});">
|
||||
<tbody>
|
||||
% for movie_a, movie_b in grouper(recently_added['movie'], 2):
|
||||
<tr>
|
||||
<td>
|
||||
<table border="0" cellpadding="0" cellspacing="0" width="100%">
|
||||
<tr>
|
||||
% for movie in (movie_a, movie_b):
|
||||
% if movie:
|
||||
% if not movie_b:
|
||||
<td align="center" valign="top" class="card-instance pad"></td>
|
||||
% endif
|
||||
<td align="center" valign="top" class="card-instance movie">
|
||||
<table border="0" cellpadding="0" cellspacing="3" width="100%" class="card-background" style="background-image: url(${(base_url_image + movie['art_hash']) if base_url_image else movie['art_url']});">
|
||||
<tr>
|
||||
<td class="card-poster-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-poster" style="background-image: url(${(base_url_image + movie['thumb_hash']) if base_url_image else movie['thumb_url']})">
|
||||
<tr>
|
||||
<td class="card-poster-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-poster" style="background-image: url(${(base_url_image + movie['thumb_hash']) if base_url_image else movie['poster_url']})">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${movie['rating_key']}" title="${movie['title']}" target="_blank">
|
||||
<img class="card-poster-overlay" src="${base_url_image + 'images/newsletter/view-on-plex-poster.png' if base_url_image else 'http://tautulli.com/images/newsletter/view-on-plex-poster.png'}" width="150" height="225">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<td>
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${movie['rating_key']}" title="${movie['title']}" target="_blank">
|
||||
<img class="card-poster-overlay" src="${base_url_image + 'images/newsletter/view-on-plex-poster.png' if base_url_image else 'http://tautulli.com/images/newsletter/view-on-plex-poster.png'}" width="150" height="225">
|
||||
</a>
|
||||
</td>
|
||||
<td class="card-info-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-info-container-table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="card-info-title nowrap">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${movie['rating_key']}" title="${movie['title']}" target="_blank">${movie['title']}</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-body">
|
||||
% if movie['tagline']:
|
||||
<p class="nowrap mb5">
|
||||
<em>${movie['tagline']}</em>
|
||||
</p>
|
||||
% endif
|
||||
<p>
|
||||
${movie['summary'][:450] + (movie['summary'][450:] and '...')}
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-footer nowrap">
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="badge-container">
|
||||
% if movie['year']:
|
||||
<span class="badge" title="${movie['year']}">${movie['year']}</span>
|
||||
% endif
|
||||
% if movie['duration']:
|
||||
<% duration = int(int(movie['duration'])/60000) %>
|
||||
<span class="badge" title="${duration} mins">${duration} mins</span>
|
||||
% endif
|
||||
% if movie['genres']:
|
||||
% for genre in movie['genres'][:2]:
|
||||
<span class="badge" title="${genre}">${genre}</span>
|
||||
% endfor
|
||||
% endif
|
||||
</td>
|
||||
% if movie['rating']:
|
||||
<% rating = int(round(float(movie['rating']) / 2)) %>
|
||||
<td class="star-rating-container" title="${int(float(movie['rating'])/0.1)}%" align="right">
|
||||
% for _ in range(rating):
|
||||
<span class="star-rating full">★</span>
|
||||
% endfor
|
||||
% for _ in range(5-rating):
|
||||
<span class="star-rating empty">☆</span>
|
||||
% endfor
|
||||
</td>
|
||||
% endif
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
<td class="card-info-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-info-container-table">
|
||||
<tr>
|
||||
<td class="card-info-title nowrap">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${movie['rating_key']}" title="${movie['title']}" target="_blank">${movie['title']}</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-body">
|
||||
% if movie['tagline']:
|
||||
<p class="nowrap mb5">
|
||||
<em>${movie['tagline']}</em>
|
||||
</p>
|
||||
% endif
|
||||
<p>
|
||||
${movie['summary'][:450] + (movie['summary'][450:] and '...')}
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-footer nowrap">
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tr>
|
||||
<td class="badge-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tr>
|
||||
% if movie['year']:
|
||||
<td class="badge" title="${movie['year']}">${movie['year']}</td>
|
||||
% endif
|
||||
% if movie['duration']:
|
||||
<% duration = int(int(movie['duration'])/60000) %>
|
||||
<td class="badge" title="${duration} mins">${duration} mins</td>
|
||||
% endif
|
||||
% if movie['genres']:
|
||||
% for genre in movie['genres'][:]:
|
||||
<td class="badge" title="${genre}">${genre}</td>
|
||||
% endfor
|
||||
% endif
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% if movie['rating']:
|
||||
<% rating = int(round(float(movie['rating']) / 2)) %>
|
||||
<td class="star-rating-container" title="${int(float(movie['rating'])/0.1)}%" align="right">
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tr>
|
||||
% for _ in range(rating):
|
||||
<td class="star-rating full">★</td>
|
||||
% endfor
|
||||
% for _ in range(5-rating):
|
||||
<td class="star-rating empty">☆</td>
|
||||
% endfor
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% endif
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
% if not movie_b:
|
||||
<td align="center" valign="top" class="card-instance pad"></td>
|
||||
% endif
|
||||
% endif
|
||||
% endfor
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
% endfor
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% if not movie_b:
|
||||
<td align="center" valign="top" class="card-instance pad"></td>
|
||||
% endif
|
||||
% endif
|
||||
% endfor
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
% endfor
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
@@ -781,142 +701,137 @@
|
||||
<tr>
|
||||
<td>
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tbody>
|
||||
% for show_a, show_b in grouper(recently_added['show'], 2):
|
||||
<tr>
|
||||
<td>
|
||||
<table border="0" cellpadding="0" cellspacing="0" width="100%">
|
||||
<tbody>
|
||||
<tr>
|
||||
% for show in (show_a, show_b):
|
||||
% if show:
|
||||
% if not show_b:
|
||||
<td align="center" valign="top" class="card-instance pad"></td>
|
||||
% endif
|
||||
<%
|
||||
if show['season_count'] == 1:
|
||||
if show['season'][0]['episode_count'] == 1:
|
||||
link_rating_key = show['season'][0]['episode'][0]['rating_key']
|
||||
else:
|
||||
link_rating_key = show['season'][0]['episode'][0]['parent_rating_key']
|
||||
else:
|
||||
link_rating_key = show['rating_key']
|
||||
%>
|
||||
<td align="center" valign="top" class="card-instance show">
|
||||
<table border="0" cellpadding="0" cellspacing="3" width="100%" class="card-background" style="background-image: url(${(base_url_image + show['art_hash']) if base_url_image else show['art_url']});">
|
||||
<tbody>
|
||||
% for show_a, show_b in grouper(recently_added['show'], 2):
|
||||
<tr>
|
||||
<td>
|
||||
<table border="0" cellpadding="0" cellspacing="0" width="100%">
|
||||
<tr>
|
||||
% for show in (show_a, show_b):
|
||||
% if show:
|
||||
% if not show_b:
|
||||
<td align="center" valign="top" class="card-instance pad"></td>
|
||||
% endif
|
||||
<%
|
||||
if show['season_count'] == 1:
|
||||
if show['season'][0]['episode_count'] == 1:
|
||||
link_rating_key = show['season'][0]['episode'][0]['rating_key']
|
||||
else:
|
||||
link_rating_key = show['season'][0]['episode'][0]['parent_rating_key']
|
||||
else:
|
||||
link_rating_key = show['rating_key']
|
||||
%>
|
||||
<td align="center" valign="top" class="card-instance show">
|
||||
<table border="0" cellpadding="0" cellspacing="3" width="100%" class="card-background" style="background-image: url(${(base_url_image + show['art_hash']) if base_url_image else show['art_url']});">
|
||||
<tr>
|
||||
<td class="card-poster-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-poster" style="background-image: url(${(base_url_image + show['thumb_hash']) if base_url_image else show['thumb_url']})">
|
||||
<tr>
|
||||
<td class="card-poster-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-poster" style="background-image: url(${(base_url_image + show['thumb_hash']) if base_url_image else show['poster_url']})">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${link_rating_key}" title="${show['title']}" target="_blank">
|
||||
<img class="card-poster-overlay" src="${base_url_image + 'images/newsletter/view-on-plex-poster.png' if base_url_image else 'http://tautulli.com/images/newsletter/view-on-plex-poster.png'}" width="150" height="225">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<td>
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${link_rating_key}" title="${show['title']}" target="_blank">
|
||||
<img class="card-poster-overlay" src="${base_url_image + 'images/newsletter/view-on-plex-poster.png' if base_url_image else 'http://tautulli.com/images/newsletter/view-on-plex-poster.png'}" width="150" height="225">
|
||||
</a>
|
||||
</td>
|
||||
<td class="card-info-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-info-container-table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="card-info-title nowrap">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${link_rating_key}" title="${show['title']}" target="_blank">${show['title']}</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-body">
|
||||
<p class="nowrap mb5">
|
||||
% if show['season_count'] > 1:
|
||||
<em>${show['season_count']} seasons /</em>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
<td class="card-info-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-info-container-table">
|
||||
<tr>
|
||||
<td class="card-info-title nowrap">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${link_rating_key}" title="${show['title']}" target="_blank">${show['title']}</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-body">
|
||||
<p class="nowrap mb5">
|
||||
% if show['season_count'] > 1:
|
||||
<em>${show['season_count']} seasons /</em>
|
||||
% endif
|
||||
<% total_show_episodes = sum(s['episode_count'] for s in show['season']) %>
|
||||
<em>${total_show_episodes} episode${'s' if total_show_episodes > 1 else ''}</em>
|
||||
</p>
|
||||
<p class="nowrap mb5">
|
||||
% for i, season in enumerate(show['season'][:8]):
|
||||
% if season['episode_count'] == 1:
|
||||
Season ${season['media_index']} · Episode ${season['episode'][0]['media_index']} - ${season['episode'][0]['title']}
|
||||
% else:
|
||||
Season ${season['media_index']} · Episodes ${season['episode_range']}
|
||||
% endif
|
||||
% if i < min(show['season_count'], 7):
|
||||
<br>
|
||||
% elif i == 7 and show['season_count'] > 8:
|
||||
...plus ${show['season_count'] - 8} more seasons!
|
||||
% endif
|
||||
% endfor
|
||||
</p>
|
||||
<p>
|
||||
% if show['season_count'] == 1 and show['season'][0]['episode_count'] == 1:
|
||||
${show['season'][0]['episode'][0]['summary'][:350] + (show['season'][0]['episode'][0]['summary'][350:] and '...')}
|
||||
% else:
|
||||
<% length = max(0, 350 - 50 * (show['season_count'] - 1)) %>
|
||||
% if length:
|
||||
${show['summary'][:length] + (show['summary'][length:] and '...')}
|
||||
% endif
|
||||
% endif
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-footer nowrap">
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tr>
|
||||
<td class="badge-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tr>
|
||||
% if show['year']:
|
||||
<td class="badge" title="${show['year']}">${show['year']}</td>
|
||||
% endif
|
||||
<% total_show_episodes = sum(s['episode_count'] for s in show['season']) %>
|
||||
<em>${total_show_episodes} episode${'s' if total_show_episodes > 1 else ''}</em>
|
||||
</p>
|
||||
<p class="nowrap mb5">
|
||||
% for i, season in enumerate(show['season'][:8]):
|
||||
Season ${season['media_index']} ·
|
||||
% if season['episode_count'] == 1:
|
||||
Episode ${season['episode'][0]['media_index']} - ${season['episode'][0]['title']}
|
||||
% else:
|
||||
Episodes ${season['episode_range']}
|
||||
% endif
|
||||
% if i < min(show['season_count'], 7):
|
||||
<br>
|
||||
% elif i == 7 and show['season_count'] > 8:
|
||||
...plus ${show['season_count'] - 8} more seasons!
|
||||
% endif
|
||||
% if show['duration']:
|
||||
<% duration = int(int(show['duration'])/60000) %>
|
||||
<td class="badge" title="${duration} mins">${duration} mins</td>
|
||||
% endif
|
||||
% if show['genres']:
|
||||
% for genre in show['genres'][:2]:
|
||||
<td class="badge" title="${genre}">${genre}</td>
|
||||
% endfor
|
||||
</p>
|
||||
<p>
|
||||
% if show['season_count'] == 1 and show['season'][0]['episode_count'] == 1:
|
||||
${show['season'][0]['episode'][0]['summary'][:350] + (show['season'][0]['episode'][0]['summary'][350:] and '...')}
|
||||
% else:
|
||||
<% length = max(0, 350 - 50 * (show['season_count'] - 1)) %>
|
||||
% if length:
|
||||
${show['summary'][:length] + (show['summary'][length:] and '...')}
|
||||
% endif
|
||||
% endif
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-footer nowrap">
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="badge-container">
|
||||
% if show['year']:
|
||||
<span class="badge" title="${show['year']}">${show['year']}</span>
|
||||
% endif
|
||||
% if show['duration']:
|
||||
<% duration = int(int(show['duration'])/60000) %>
|
||||
<span class="badge" title="${duration} mins">${duration} mins</span>
|
||||
% endif
|
||||
% if show['genres']:
|
||||
% for genre in show['genres'][:2]:
|
||||
<span class="badge" title="${genre}">${genre}</span>
|
||||
% endfor
|
||||
% endif
|
||||
</td>
|
||||
% if show['rating']:
|
||||
<% rating = int(round(float(show['rating']) / 2)) %>
|
||||
<td class="star-rating-container" title="${int(float(show['rating'])/0.1)}%" align="right">
|
||||
% for _ in range(rating):
|
||||
<span class="star-rating full">★</span>
|
||||
% endfor
|
||||
% for _ in range(5-rating):
|
||||
<span class="star-rating empty">☆</span>
|
||||
% endfor
|
||||
</td>
|
||||
% endif
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% if show['rating']:
|
||||
<% rating = int(round(float(show['rating']) / 2)) %>
|
||||
<td class="star-rating-container" title="${int(float(show['rating'])/0.1)}%" align="right">
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tr>
|
||||
% for _ in range(rating):
|
||||
<td class="star-rating full">★</td>
|
||||
% endfor
|
||||
% for _ in range(5-rating):
|
||||
<td class="star-rating empty">☆</td>
|
||||
% endfor
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% endif
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
% if not show_b:
|
||||
<td align="center" valign="top" class="card-instance pad"></td>
|
||||
% endif
|
||||
% endif
|
||||
% endfor
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
% endfor
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% if not show_b:
|
||||
<td align="center" valign="top" class="card-instance pad"></td>
|
||||
% endif
|
||||
% endif
|
||||
% endfor
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
% endfor
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
@@ -938,124 +853,114 @@
|
||||
<tr>
|
||||
<td>
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tbody>
|
||||
% for album_a, album_b in grouper([a for artist in recently_added['artist'] for a in artist['album']], 2):
|
||||
<tr>
|
||||
<td>
|
||||
<table border="0" cellpadding="0" cellspacing="0" width="100%">
|
||||
<tbody>
|
||||
<tr>
|
||||
% for album in (album_a, album_b):
|
||||
% if album:
|
||||
% if not album_b:
|
||||
<td align="center" valign="top" class="card-instance pad"></td>
|
||||
% endif
|
||||
<td align="center" valign="top" class="card-instance album">
|
||||
<table border="0" cellpadding="0" cellspacing="3" width="100%" class="card-background" style="background-image: url(${(base_url_image + album['art_hash']) if base_url_image else album['art_url']});">
|
||||
<tbody>
|
||||
% for album_a, album_b in grouper([a for artist in recently_added['artist'] for a in artist['album']], 2):
|
||||
<tr>
|
||||
<td>
|
||||
<table border="0" cellpadding="0" cellspacing="0" width="100%">
|
||||
<tr>
|
||||
% for album in (album_a, album_b):
|
||||
% if album:
|
||||
% if not album_b:
|
||||
<td align="center" valign="top" class="card-instance pad"></td>
|
||||
% endif
|
||||
<td align="center" valign="top" class="card-instance album">
|
||||
<table border="0" cellpadding="0" cellspacing="3" width="100%" class="card-background" style="background-image: url(${(base_url_image + album['art_hash']) if base_url_image else album['art_url']});">
|
||||
<tr>
|
||||
<td class="card-poster-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-poster" style="background-image: url(${(base_url_image + album['thumb_hash']) if base_url_image else album['thumb_url']})">
|
||||
<tr>
|
||||
<td class="card-poster-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-poster" style="background-image: url(${(base_url_image + album['thumb_hash']) if base_url_image else album['poster_url']})">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${album['rating_key']}" title="${album['title']}" target="_blank">
|
||||
<img class="card-poster-overlay" src="${base_url_image + 'images/newsletter/view-on-plex-cover.png' if base_url_image else 'http://tautulli.com/images/newsletter/view-on-plex-cover.png'}" width="150" height="150">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<td>
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${album['rating_key']}" title="${album['title']}" target="_blank">
|
||||
<img class="card-poster-overlay" src="${base_url_image + 'images/newsletter/view-on-plex-cover.png' if base_url_image else 'http://tautulli.com/images/newsletter/view-on-plex-cover.png'}" width="150" height="150">
|
||||
</a>
|
||||
</td>
|
||||
<td class="card-info-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-info-container-table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="card-info-title nowrap">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${album['rating_key']}" title="${album['title']}" target="_blank">${album['title']}</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-body">
|
||||
<p class="nowrap mb5">
|
||||
<em>${album['parent_title']} · ${album['track_count']} track${'s' if album['track_count'] > 1 else ''}</em>
|
||||
</p>
|
||||
% if artist['title'].lower() != 'various artists':
|
||||
<p>
|
||||
${album['summary'][:200] + (album['summary'][200:] and '...')}
|
||||
</p>
|
||||
% endif
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-footer nowrap">
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="badge-container">
|
||||
% if album['year']:
|
||||
<span class="badge" title="${album['year']}">${album['year']}</span>
|
||||
% endif
|
||||
% if album['genres']:
|
||||
% for genre in album['genres'][:2]:
|
||||
<span class="badge" title="${genre}">${genre}</span>
|
||||
% endfor
|
||||
% endif
|
||||
</td>
|
||||
% if album['rating']:
|
||||
<% rating = int(round(float(album['rating']) / 2)) %>
|
||||
<td class="star-rating-container" title="${int(float(album['rating'])/0.1)}%" align="right">
|
||||
% for _ in range(rating):
|
||||
<span class="star-rating full">★</span>
|
||||
% endfor
|
||||
% for _ in range(5-rating):
|
||||
<span class="star-rating empty">☆</span>
|
||||
% endfor
|
||||
</td>
|
||||
% endif
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
<td class="card-info-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="card-info-container-table">
|
||||
<tr>
|
||||
<td class="card-info-title nowrap">
|
||||
<a href="${parameters['pms_web_url']}#!/server/${parameters['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${album['rating_key']}" title="${album['title']}" target="_blank">${album['title']}</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-body">
|
||||
<p class="nowrap mb5">
|
||||
<em>${album['parent_title']} · ${album['track_count']} track${'s' if album['track_count'] > 1 else ''}</em>
|
||||
</p>
|
||||
% if album['parent_title'].lower() != 'various artists':
|
||||
<p>
|
||||
${album['summary'][:200] + (album['summary'][200:] and '...')}
|
||||
</p>
|
||||
% endif
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="card-info-footer nowrap">
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tr>
|
||||
<td class="badge-container">
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tr>
|
||||
% if album['year']:
|
||||
<td class="badge" title="${album['year']}">${album['year']}</td>
|
||||
% endif
|
||||
% if album['genres']:
|
||||
% for genre in album['genres'][:2]:
|
||||
<td class="badge" title="${genre}">${genre}</td>
|
||||
% endfor
|
||||
% endif
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% if album['rating']:
|
||||
<% rating = int(round(float(album['rating']) / 2)) %>
|
||||
<td class="star-rating-container" title="${int(float(album['rating'])/0.1)}%" align="right">
|
||||
<table border="0" cellpadding="0" cellspacing="0">
|
||||
<tr>
|
||||
% for _ in range(rating):
|
||||
<td class="star-rating full">★</td>
|
||||
% endfor
|
||||
% for _ in range(5-rating):
|
||||
<td class="star-rating empty">☆</td>
|
||||
% endfor
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% endif
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
% if not album_b:
|
||||
<td align="center" valign="top" class="card-instance pad"></td>
|
||||
% endif
|
||||
% endif
|
||||
% endfor
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
% endfor
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
% if not album_b:
|
||||
<td align="center" valign="top" class="card-instance pad"></td>
|
||||
% endif
|
||||
% endif
|
||||
% endfor
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
% endfor
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td class="footer">
|
||||
<!-- START FOOTER -->
|
||||
<div class="footer-bar"></div>
|
||||
<div class="content-block powered-by">
|
||||
Newsletter generated by <a href="http://tautulli.com" target="_blank">Tautulli</a>.
|
||||
<!-- FOOTER MESSAGE - DO NOT REMOVE -->
|
||||
</div>
|
||||
<!-- END FOOTER -->
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- END MAIN CONTENT AREA -->
|
||||
</table>
|
||||
|
||||
<!-- END CENTERED WHITE CONTAINER -->
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
@@ -9,7 +9,7 @@ __all__ = ('AllExpression', 'RangeExpression', 'WeekdayRangeExpression',
|
||||
'WeekdayPositionExpression', 'LastDayOfMonthExpression')
|
||||
|
||||
|
||||
WEEKDAYS = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
|
||||
WEEKDAYS = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat']
|
||||
MONTHS = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
|
||||
|
||||
|
||||
|
||||
302
lib/cloudinary/__init__.py
Normal file
302
lib/cloudinary/__init__.py
Normal file
@@ -0,0 +1,302 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger("Cloudinary")
|
||||
ch = logging.StreamHandler()
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from six import python_2_unicode_compatible
|
||||
|
||||
from cloudinary import utils
|
||||
from cloudinary.compat import urlparse, parse_qs
|
||||
from cloudinary.search import Search
|
||||
|
||||
CF_SHARED_CDN = "d3jpl91pxevbkh.cloudfront.net"
|
||||
OLD_AKAMAI_SHARED_CDN = "cloudinary-a.akamaihd.net"
|
||||
AKAMAI_SHARED_CDN = "res.cloudinary.com"
|
||||
SHARED_CDN = AKAMAI_SHARED_CDN
|
||||
CL_BLANK = "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7"
|
||||
|
||||
VERSION = "1.11.0"
|
||||
USER_AGENT = "CloudinaryPython/" + VERSION
|
||||
""" :const: USER_AGENT """
|
||||
|
||||
USER_PLATFORM = ""
|
||||
"""
|
||||
Additional information to be passed with the USER_AGENT, e.g. "CloudinaryMagento/1.0.1".
|
||||
This value is set in platform-specific implementations that use cloudinary_php.
|
||||
|
||||
The format of the value should be <ProductName>/Version[ (comment)].
|
||||
@see http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.43
|
||||
|
||||
**Do not set this value in application code!**
|
||||
"""
|
||||
|
||||
|
||||
def get_user_agent():
|
||||
"""Provides the `USER_AGENT` string that is passed to the Cloudinary servers.
|
||||
Prepends `USER_PLATFORM` if it is defined.
|
||||
|
||||
:returns: the user agent
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
if USER_PLATFORM == "":
|
||||
return USER_AGENT
|
||||
else:
|
||||
return USER_PLATFORM + " " + USER_AGENT
|
||||
|
||||
|
||||
def import_django_settings():
|
||||
try:
|
||||
import django.conf
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
try:
|
||||
if 'CLOUDINARY' in dir(django.conf.settings):
|
||||
return django.conf.settings.CLOUDINARY
|
||||
else:
|
||||
return None
|
||||
except ImproperlyConfigured:
|
||||
return None
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
|
||||
class Config(object):
|
||||
def __init__(self):
|
||||
django_settings = import_django_settings()
|
||||
if django_settings:
|
||||
self.update(**django_settings)
|
||||
elif os.environ.get("CLOUDINARY_CLOUD_NAME"):
|
||||
self.update(
|
||||
cloud_name=os.environ.get("CLOUDINARY_CLOUD_NAME"),
|
||||
api_key=os.environ.get("CLOUDINARY_API_KEY"),
|
||||
api_secret=os.environ.get("CLOUDINARY_API_SECRET"),
|
||||
secure_distribution=os.environ.get("CLOUDINARY_SECURE_DISTRIBUTION"),
|
||||
private_cdn=os.environ.get("CLOUDINARY_PRIVATE_CDN") == 'true'
|
||||
)
|
||||
elif os.environ.get("CLOUDINARY_URL"):
|
||||
cloudinary_url = os.environ.get("CLOUDINARY_URL")
|
||||
self._parse_cloudinary_url(cloudinary_url)
|
||||
|
||||
def _parse_cloudinary_url(self, cloudinary_url):
|
||||
uri = urlparse(cloudinary_url.replace("cloudinary://", "http://"))
|
||||
for k, v in parse_qs(uri.query).items():
|
||||
if self._is_nested_key(k):
|
||||
self._put_nested_key(k, v)
|
||||
else:
|
||||
self.__dict__[k] = v[0]
|
||||
self.update(
|
||||
cloud_name=uri.hostname,
|
||||
api_key=uri.username,
|
||||
api_secret=uri.password,
|
||||
private_cdn=uri.path != ''
|
||||
)
|
||||
if uri.path != '':
|
||||
self.update(secure_distribution=uri.path[1:])
|
||||
|
||||
def __getattr__(self, i):
|
||||
if i in self.__dict__:
|
||||
return self.__dict__[i]
|
||||
else:
|
||||
return None
|
||||
|
||||
def update(self, **keywords):
|
||||
for k, v in keywords.items():
|
||||
self.__dict__[k] = v
|
||||
|
||||
def _is_nested_key(self, key):
|
||||
return re.match(r'\w+\[\w+\]', key)
|
||||
|
||||
def _put_nested_key(self, key, value):
|
||||
chain = re.split(r'[\[\]]+', key)
|
||||
chain = [key for key in chain if key]
|
||||
outer = self.__dict__
|
||||
last_key = chain.pop()
|
||||
for inner_key in chain:
|
||||
if inner_key in outer:
|
||||
inner = outer[inner_key]
|
||||
else:
|
||||
inner = dict()
|
||||
outer[inner_key] = inner
|
||||
outer = inner
|
||||
if isinstance(value, list):
|
||||
value = value[0]
|
||||
outer[last_key] = value
|
||||
|
||||
_config = Config()
|
||||
|
||||
|
||||
def config(**keywords):
|
||||
global _config
|
||||
_config.update(**keywords)
|
||||
return _config
|
||||
|
||||
|
||||
def reset_config():
|
||||
global _config
|
||||
_config = Config()
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class CloudinaryResource(object):
|
||||
def __init__(self, public_id=None, format=None, version=None,
|
||||
signature=None, url_options=None, metadata=None, type=None, resource_type=None,
|
||||
default_resource_type=None):
|
||||
self.metadata = metadata
|
||||
metadata = metadata or {}
|
||||
self.public_id = public_id or metadata.get('public_id')
|
||||
self.format = format or metadata.get('format')
|
||||
self.version = version or metadata.get('version')
|
||||
self.signature = signature or metadata.get('signature')
|
||||
self.type = type or metadata.get('type') or "upload"
|
||||
self.resource_type = resource_type or metadata.get('resource_type') or default_resource_type
|
||||
self.url_options = url_options or {}
|
||||
|
||||
def __str__(self):
|
||||
return self.public_id
|
||||
|
||||
def __len__(self):
|
||||
return len(self.public_id) if self.public_id is not None else 0
|
||||
|
||||
def validate(self):
|
||||
return self.signature == self.get_expected_signature()
|
||||
|
||||
def get_prep_value(self):
|
||||
if None in [self.public_id,
|
||||
self.type,
|
||||
self.resource_type]:
|
||||
return None
|
||||
prep = ''
|
||||
prep = prep + self.resource_type + '/' + self.type + '/'
|
||||
if self.version: prep = prep + 'v' + str(self.version) + '/'
|
||||
prep = prep + self.public_id
|
||||
if self.format: prep = prep + '.' + self.format
|
||||
return prep
|
||||
|
||||
def get_presigned(self):
|
||||
return self.get_prep_value() + '#' + self.get_expected_signature()
|
||||
|
||||
def get_expected_signature(self):
|
||||
return utils.api_sign_request({"public_id": self.public_id, "version": self.version}, config().api_secret)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return self.build_url(**self.url_options)
|
||||
|
||||
def __build_url(self, **options):
|
||||
combined_options = dict(format=self.format, version=self.version, type=self.type,
|
||||
resource_type=self.resource_type or "image")
|
||||
combined_options.update(options)
|
||||
public_id = combined_options.get('public_id') or self.public_id
|
||||
return utils.cloudinary_url(public_id, **combined_options)
|
||||
|
||||
def build_url(self, **options):
|
||||
return self.__build_url(**options)[0]
|
||||
|
||||
def default_poster_options(self, options):
|
||||
options["format"] = options.get("format", "jpg")
|
||||
|
||||
def default_source_types(self):
|
||||
return ['webm', 'mp4', 'ogv']
|
||||
|
||||
def image(self, **options):
|
||||
if options.get("resource_type", self.resource_type) == "video":
|
||||
self.default_poster_options(options)
|
||||
src, attrs = self.__build_url(**options)
|
||||
client_hints = attrs.pop("client_hints", config().client_hints)
|
||||
responsive = attrs.pop("responsive", False)
|
||||
hidpi = attrs.pop("hidpi", False)
|
||||
if (responsive or hidpi) and not client_hints:
|
||||
attrs["data-src"] = src
|
||||
classes = "cld-responsive" if responsive else "cld-hidpi"
|
||||
if "class" in attrs: classes += " " + attrs["class"]
|
||||
attrs["class"] = classes
|
||||
src = attrs.pop("responsive_placeholder", config().responsive_placeholder)
|
||||
if src == "blank": src = CL_BLANK
|
||||
|
||||
if src: attrs["src"] = src
|
||||
|
||||
return u"<img {0}/>".format(utils.html_attrs(attrs))
|
||||
|
||||
def video_thumbnail(self, **options):
|
||||
self.default_poster_options(options)
|
||||
return self.build_url(**options)
|
||||
|
||||
# Creates an HTML video tag for the provided +source+
|
||||
#
|
||||
# ==== Options
|
||||
# * <tt>source_types</tt> - Specify which source type the tag should include. defaults to webm, mp4 and ogv.
|
||||
# * <tt>source_transformation</tt> - specific transformations to use for a specific source type.
|
||||
# * <tt>poster</tt> - override default thumbnail:
|
||||
# * url: provide an ad hoc url
|
||||
# * options: with specific poster transformations and/or Cloudinary +:public_id+
|
||||
#
|
||||
# ==== Examples
|
||||
# CloudinaryResource("mymovie.mp4").video()
|
||||
# CloudinaryResource("mymovie.mp4").video(source_types = 'webm')
|
||||
# CloudinaryResource("mymovie.ogv").video(poster = "myspecialplaceholder.jpg")
|
||||
# CloudinaryResource("mymovie.webm").video(source_types = ['webm', 'mp4'], poster = {'effect': 'sepia'})
|
||||
def video(self, **options):
|
||||
public_id = options.get('public_id', self.public_id)
|
||||
source = re.sub("\.({0})$".format("|".join(self.default_source_types())), '', public_id)
|
||||
|
||||
source_types = options.pop('source_types', [])
|
||||
source_transformation = options.pop('source_transformation', {})
|
||||
fallback = options.pop('fallback_content', '')
|
||||
options['resource_type'] = options.pop('resource_type', self.resource_type or 'video')
|
||||
|
||||
if not source_types: source_types = self.default_source_types()
|
||||
video_options = options.copy()
|
||||
|
||||
if 'poster' in video_options:
|
||||
poster_options = video_options['poster']
|
||||
if isinstance(poster_options, dict):
|
||||
if 'public_id' in poster_options:
|
||||
video_options['poster'] = utils.cloudinary_url(poster_options['public_id'], **poster_options)[0]
|
||||
else:
|
||||
video_options['poster'] = self.video_thumbnail(public_id=source, **poster_options)
|
||||
else:
|
||||
video_options['poster'] = self.video_thumbnail(public_id=source, **options)
|
||||
|
||||
if not video_options['poster']: del video_options['poster']
|
||||
|
||||
nested_source_types = isinstance(source_types, list) and len(source_types) > 1
|
||||
if not nested_source_types:
|
||||
source = source + '.' + utils.build_array(source_types)[0]
|
||||
|
||||
video_url = utils.cloudinary_url(source, **video_options)
|
||||
video_options = video_url[1]
|
||||
if not nested_source_types:
|
||||
video_options['src'] = video_url[0]
|
||||
if 'html_width' in video_options: video_options['width'] = video_options.pop('html_width')
|
||||
if 'html_height' in video_options: video_options['height'] = video_options.pop('html_height')
|
||||
|
||||
sources = ""
|
||||
if nested_source_types:
|
||||
for source_type in source_types:
|
||||
transformation = options.copy()
|
||||
transformation.update(source_transformation.get(source_type, {}))
|
||||
src = utils.cloudinary_url(source, format=source_type, **transformation)[0]
|
||||
video_type = "ogg" if source_type == 'ogv' else source_type
|
||||
mime_type = "video/" + video_type
|
||||
sources += "<source {attributes}>".format(attributes=utils.html_attrs({'src': src, 'type': mime_type}))
|
||||
|
||||
html = "<video {attributes}>{sources}{fallback}</video>".format(
|
||||
attributes=utils.html_attrs(video_options), sources=sources, fallback=fallback)
|
||||
return html
|
||||
|
||||
|
||||
class CloudinaryImage(CloudinaryResource):
|
||||
def __init__(self, public_id=None, **kwargs):
|
||||
super(CloudinaryImage, self).__init__(public_id=public_id, default_resource_type="image", **kwargs)
|
||||
|
||||
|
||||
class CloudinaryVideo(CloudinaryResource):
|
||||
def __init__(self, public_id=None, **kwargs):
|
||||
super(CloudinaryVideo, self).__init__(public_id=public_id, default_resource_type="video", **kwargs)
|
||||
448
lib/cloudinary/api.py
Normal file
448
lib/cloudinary/api.py
Normal file
@@ -0,0 +1,448 @@
|
||||
# Copyright Cloudinary
|
||||
|
||||
import email.utils
|
||||
import json
|
||||
import socket
|
||||
|
||||
import cloudinary
|
||||
from six import string_types
|
||||
|
||||
import urllib3
|
||||
import certifi
|
||||
|
||||
from cloudinary import utils
|
||||
from urllib3.exceptions import HTTPError
|
||||
|
||||
logger = cloudinary.logger
|
||||
|
||||
# intentionally one-liners
|
||||
class Error(Exception): pass
|
||||
class NotFound(Error): pass
|
||||
class NotAllowed(Error): pass
|
||||
class AlreadyExists(Error): pass
|
||||
class RateLimited(Error): pass
|
||||
class BadRequest(Error): pass
|
||||
class GeneralError(Error): pass
|
||||
class AuthorizationRequired(Error): pass
|
||||
|
||||
|
||||
EXCEPTION_CODES = {
|
||||
400: BadRequest,
|
||||
401: AuthorizationRequired,
|
||||
403: NotAllowed,
|
||||
404: NotFound,
|
||||
409: AlreadyExists,
|
||||
420: RateLimited,
|
||||
500: GeneralError
|
||||
}
|
||||
|
||||
|
||||
class Response(dict):
|
||||
def __init__(self, result, response, **kwargs):
|
||||
super(Response, self).__init__(**kwargs)
|
||||
self.update(result)
|
||||
self.rate_limit_allowed = int(response.headers["x-featureratelimit-limit"])
|
||||
self.rate_limit_reset_at = email.utils.parsedate(response.headers["x-featureratelimit-reset"])
|
||||
self.rate_limit_remaining = int(response.headers["x-featureratelimit-remaining"])
|
||||
|
||||
_http = urllib3.PoolManager(
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
ca_certs=certifi.where()
|
||||
)
|
||||
|
||||
|
||||
def ping(**options):
|
||||
return call_api("get", ["ping"], {}, **options)
|
||||
|
||||
|
||||
def usage(**options):
|
||||
return call_api("get", ["usage"], {}, **options)
|
||||
|
||||
|
||||
def resource_types(**options):
|
||||
return call_api("get", ["resources"], {}, **options)
|
||||
|
||||
|
||||
def resources(**options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", None)
|
||||
uri = ["resources", resource_type]
|
||||
if upload_type: uri.append(upload_type)
|
||||
params = only(options,
|
||||
"next_cursor", "max_results", "prefix", "tags", "context", "moderations", "direction", "start_at")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resources_by_tag(tag, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "tags", tag]
|
||||
params = only(options, "next_cursor", "max_results", "tags", "context", "moderations", "direction")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resources_by_moderation(kind, status, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "moderations", kind, status]
|
||||
params = only(options, "next_cursor", "max_results", "tags", "context", "moderations", "direction")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resources_by_ids(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = dict(only(options, "tags", "moderations", "context"), public_ids=public_ids)
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resource(public_id, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type, public_id]
|
||||
params = only(options, "exif", "faces", "colors", "image_metadata", "pages", "phash", "coordinates", "max_results")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def update(public_id, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type, public_id]
|
||||
params = only(options, "moderation_status", "raw_convert",
|
||||
"quality_override", "ocr",
|
||||
"categorization", "detection", "similarity_search",
|
||||
"background_removal", "notification_url")
|
||||
if "tags" in options:
|
||||
params["tags"] = ",".join(utils.build_array(options["tags"]))
|
||||
if "face_coordinates" in options:
|
||||
params["face_coordinates"] = utils.encode_double_array(options.get("face_coordinates"))
|
||||
if "custom_coordinates" in options:
|
||||
params["custom_coordinates"] = utils.encode_double_array(options.get("custom_coordinates"))
|
||||
if "context" in options:
|
||||
params["context"] = utils.encode_context(options.get("context"))
|
||||
if "auto_tagging" in options:
|
||||
params["auto_tagging"] = str(options.get("auto_tagging"))
|
||||
if "access_control" in options:
|
||||
params["access_control"] = utils.json_encode(utils.build_list_of_dicts(options.get("access_control")))
|
||||
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def delete_resources(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = __delete_resource_params(options, public_ids=public_ids)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_resources_by_prefix(prefix, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = __delete_resource_params(options, prefix=prefix)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_all_resources(**options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = __delete_resource_params(options, all=True)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_resources_by_tag(tag, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "tags", tag]
|
||||
params = __delete_resource_params(options)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_derived_resources(derived_resource_ids, **options):
|
||||
uri = ["derived_resources"]
|
||||
params = {"derived_resource_ids": derived_resource_ids}
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_derived_by_transformation(public_ids, transformations,
|
||||
resource_type='image', type='upload', invalidate=None,
|
||||
**options):
|
||||
"""
|
||||
Delete derived resources of public ids, identified by transformations
|
||||
|
||||
:param public_ids: the base resources
|
||||
:type public_ids: list of str
|
||||
:param transformations: the transformation of derived resources, optionally including the format
|
||||
:type transformations: list of (dict or str)
|
||||
:param type: The upload type
|
||||
:type type: str
|
||||
:param resource_type: The type of the resource: defaults to "image"
|
||||
:type resource_type: str
|
||||
:param invalidate: (optional) True to invalidate the resources after deletion
|
||||
:type invalidate: bool
|
||||
:return: a list of the public ids for which derived resources were deleted
|
||||
:rtype: dict
|
||||
"""
|
||||
uri = ["resources", resource_type, type]
|
||||
if not isinstance(public_ids, list):
|
||||
public_ids = [public_ids]
|
||||
params = {"public_ids": public_ids,
|
||||
"transformations": utils.build_eager(transformations),
|
||||
"keep_original": True}
|
||||
if invalidate is not None:
|
||||
params['invalidate'] = invalidate
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def tags(**options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["tags", resource_type]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results", "prefix"), **options)
|
||||
|
||||
|
||||
def transformations(**options):
|
||||
uri = ["transformations"]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def transformation(transformation, **options):
|
||||
uri = ["transformations", transformation_string(transformation)]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def delete_transformation(transformation, **options):
|
||||
uri = ["transformations", transformation_string(transformation)]
|
||||
return call_api("delete", uri, {}, **options)
|
||||
|
||||
|
||||
# updates - currently only supported update is the "allowed_for_strict" boolean flag and unsafe_update
|
||||
def update_transformation(transformation, **options):
|
||||
uri = ["transformations", transformation_string(transformation)]
|
||||
updates = only(options, "allowed_for_strict")
|
||||
if "unsafe_update" in options:
|
||||
updates["unsafe_update"] = transformation_string(options.get("unsafe_update"))
|
||||
if not updates: raise Exception("No updates given")
|
||||
|
||||
return call_api("put", uri, updates, **options)
|
||||
|
||||
|
||||
def create_transformation(name, definition, **options):
|
||||
uri = ["transformations", name]
|
||||
return call_api("post", uri, {"transformation": transformation_string(definition)}, **options)
|
||||
|
||||
|
||||
def publish_by_ids(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "publish_resources"]
|
||||
params = dict(only(options, "type", "overwrite", "invalidate"), public_ids=public_ids)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def publish_by_prefix(prefix, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "publish_resources"]
|
||||
params = dict(only(options, "type", "overwrite", "invalidate"), prefix=prefix)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def publish_by_tag(tag, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "publish_resources"]
|
||||
params = dict(only(options, "type", "overwrite", "invalidate"), tag=tag)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def upload_presets(**options):
|
||||
uri = ["upload_presets"]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def upload_preset(name, **options):
|
||||
uri = ["upload_presets", name]
|
||||
return call_api("get", uri, only(options, "max_results"), **options)
|
||||
|
||||
|
||||
def delete_upload_preset(name, **options):
|
||||
uri = ["upload_presets", name]
|
||||
return call_api("delete", uri, {}, **options)
|
||||
|
||||
|
||||
def update_upload_preset(name, **options):
|
||||
uri = ["upload_presets", name]
|
||||
params = utils.build_upload_params(**options)
|
||||
params = utils.cleanup_params(params)
|
||||
params.update(only(options, "unsigned", "disallow_public_id"))
|
||||
return call_api("put", uri, params, **options)
|
||||
|
||||
|
||||
def create_upload_preset(**options):
|
||||
uri = ["upload_presets"]
|
||||
params = utils.build_upload_params(**options)
|
||||
params = utils.cleanup_params(params)
|
||||
params.update(only(options, "unsigned", "disallow_public_id", "name"))
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def root_folders(**options):
|
||||
return call_api("get", ["folders"], {}, **options)
|
||||
|
||||
|
||||
def subfolders(of_folder_path, **options):
|
||||
return call_api("get", ["folders", of_folder_path], {}, **options)
|
||||
|
||||
|
||||
def restore(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type, "restore"]
|
||||
params = dict(public_ids=public_ids)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def upload_mappings(**options):
|
||||
uri = ["upload_mappings"]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def delete_upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def update_upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
params.update(only(options, "template"))
|
||||
return call_api("put", uri, params, **options)
|
||||
|
||||
|
||||
def create_upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
params.update(only(options, "template"))
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def list_streaming_profiles(**options):
|
||||
uri = ["streaming_profiles"]
|
||||
return call_api('GET', uri, {}, **options)
|
||||
|
||||
|
||||
def get_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles", name]
|
||||
return call_api('GET', uri, {}, **options)
|
||||
|
||||
|
||||
def delete_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles", name]
|
||||
return call_api('DELETE', uri, {}, **options)
|
||||
|
||||
|
||||
def create_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles"]
|
||||
params = __prepare_streaming_profile_params(**options)
|
||||
params["name"] = name
|
||||
return call_api('POST', uri, params, **options)
|
||||
|
||||
|
||||
def update_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles", name]
|
||||
params = __prepare_streaming_profile_params(**options)
|
||||
return call_api('PUT', uri, params, **options)
|
||||
|
||||
|
||||
def call_json_api(method, uri, jsonBody, **options):
|
||||
logger.debug(jsonBody)
|
||||
data = json.dumps(jsonBody).encode('utf-8')
|
||||
return _call_api(method, uri, body=data, headers={'Content-Type': 'application/json'}, **options)
|
||||
|
||||
|
||||
def call_api(method, uri, params, **options):
|
||||
return _call_api(method, uri, params=params, **options)
|
||||
|
||||
|
||||
def _call_api(method, uri, params=None, body=None, headers=None, **options):
|
||||
prefix = options.pop("upload_prefix",
|
||||
cloudinary.config().upload_prefix) or "https://api.cloudinary.com"
|
||||
cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name)
|
||||
if not cloud_name: raise Exception("Must supply cloud_name")
|
||||
api_key = options.pop("api_key", cloudinary.config().api_key)
|
||||
if not api_key: raise Exception("Must supply api_key")
|
||||
api_secret = options.pop("api_secret", cloudinary.config().api_secret)
|
||||
if not cloud_name: raise Exception("Must supply api_secret")
|
||||
api_url = "/".join([prefix, "v1_1", cloud_name] + uri)
|
||||
|
||||
processed_params = None
|
||||
if isinstance(params, dict):
|
||||
processed_params = {}
|
||||
for key, value in params.items():
|
||||
if isinstance(value, list):
|
||||
value_list = {"{}[{}]".format(key, i): i_value for i, i_value in enumerate(value)}
|
||||
processed_params.update(value_list)
|
||||
elif value:
|
||||
processed_params[key] = value
|
||||
|
||||
# Add authentication
|
||||
req_headers = urllib3.make_headers(
|
||||
basic_auth="{0}:{1}".format(api_key, api_secret),
|
||||
user_agent=cloudinary.get_user_agent()
|
||||
)
|
||||
if headers is not None:
|
||||
req_headers.update(headers)
|
||||
kw = {}
|
||||
if 'timeout' in options:
|
||||
kw['timeout'] = options['timeout']
|
||||
if body is not None:
|
||||
kw['body'] = body
|
||||
try:
|
||||
response = _http.request(method.upper(), api_url, processed_params, req_headers, **kw)
|
||||
body = response.data
|
||||
except HTTPError as e:
|
||||
raise GeneralError("Unexpected error {0}", e.message)
|
||||
except socket.error as e:
|
||||
raise GeneralError("Socket Error: %s" % (str(e)))
|
||||
|
||||
try:
|
||||
result = json.loads(body.decode('utf-8'))
|
||||
except Exception as e:
|
||||
# Error is parsing json
|
||||
raise GeneralError("Error parsing server response (%d) - %s. Got - %s" % (response.status, body, e))
|
||||
|
||||
if "error" in result:
|
||||
exception_class = EXCEPTION_CODES.get(response.status) or Exception
|
||||
exception_class = exception_class
|
||||
raise exception_class("Error {0} - {1}".format(response.status, result["error"]["message"]))
|
||||
|
||||
return Response(result, response)
|
||||
|
||||
|
||||
def only(source, *keys):
|
||||
return {key: source[key] for key in keys if key in source}
|
||||
|
||||
|
||||
def transformation_string(transformation):
|
||||
if isinstance(transformation, string_types):
|
||||
return transformation
|
||||
else:
|
||||
return cloudinary.utils.generate_transformation_string(**transformation)[0]
|
||||
|
||||
|
||||
def __prepare_streaming_profile_params(**options):
|
||||
params = only(options, "display_name")
|
||||
if "representations" in options:
|
||||
representations = [{"transformation": transformation_string(trans)} for trans in options["representations"]]
|
||||
params["representations"] = json.dumps(representations)
|
||||
return params
|
||||
|
||||
def __delete_resource_params(options, **params):
|
||||
p = dict(transformations=utils.build_eager(options.get('transformations')),
|
||||
**only(options, "keep_original", "next_cursor", "invalidate"))
|
||||
p.update(params)
|
||||
return p
|
||||
47
lib/cloudinary/auth_token.py
Normal file
47
lib/cloudinary/auth_token.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import hashlib
|
||||
import hmac
|
||||
import re
|
||||
import time
|
||||
from binascii import a2b_hex
|
||||
from cloudinary.compat import quote_plus
|
||||
|
||||
AUTH_TOKEN_NAME = "__cld_token__"
|
||||
|
||||
|
||||
|
||||
def generate(url=None, acl=None, start_time=None, duration=None, expiration=None, ip=None, key=None,
|
||||
token_name=AUTH_TOKEN_NAME):
|
||||
|
||||
if expiration is None:
|
||||
if duration is not None:
|
||||
start = start_time if start_time is not None else int(time.mktime(time.gmtime()))
|
||||
expiration = start + duration
|
||||
else:
|
||||
raise Exception("Must provide either expiration or duration")
|
||||
|
||||
token_parts = []
|
||||
if ip is not None: token_parts.append("ip=" + ip)
|
||||
if start_time is not None: token_parts.append("st=%d" % start_time)
|
||||
token_parts.append("exp=%d" % expiration)
|
||||
if acl is not None: token_parts.append("acl=%s" % _escape_to_lower(acl))
|
||||
to_sign = list(token_parts)
|
||||
if url is not None:
|
||||
to_sign.append("url=%s" % _escape_to_lower(url))
|
||||
auth = _digest("~".join(to_sign), key)
|
||||
token_parts.append("hmac=%s" % auth)
|
||||
return "%(token_name)s=%(token)s" % {"token_name": token_name, "token": "~".join(token_parts)}
|
||||
|
||||
|
||||
def _digest(message, key):
|
||||
bin_key = a2b_hex(key)
|
||||
return hmac.new(bin_key, message.encode('utf-8'), hashlib.sha256).hexdigest()
|
||||
|
||||
|
||||
def _escape_to_lower(url):
|
||||
escaped_url = quote_plus(url)
|
||||
|
||||
def toLowercase(match):
|
||||
return match.group(0).lower()
|
||||
|
||||
escaped_url = re.sub(r'%..', toLowercase, escaped_url)
|
||||
return escaped_url
|
||||
34
lib/cloudinary/compat.py
Normal file
34
lib/cloudinary/compat.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# Copyright Cloudinary
|
||||
import six.moves.urllib.parse
|
||||
urlencode = six.moves.urllib.parse.urlencode
|
||||
unquote = six.moves.urllib.parse.unquote
|
||||
urlparse = six.moves.urllib.parse.urlparse
|
||||
parse_qs = six.moves.urllib.parse.parse_qs
|
||||
parse_qsl = six.moves.urllib.parse.parse_qsl
|
||||
quote_plus = six.moves.urllib.parse.quote_plus
|
||||
httplib = six.moves.http_client
|
||||
from six import PY3, string_types, StringIO, BytesIO
|
||||
urllib2 = six.moves.urllib.request
|
||||
NotConnected = six.moves.http_client.NotConnected
|
||||
|
||||
if PY3:
|
||||
to_bytes = lambda s: s.encode('utf8')
|
||||
to_bytearray = lambda s: bytearray(s, 'utf8')
|
||||
to_string = lambda b: b.decode('utf8')
|
||||
|
||||
else:
|
||||
to_bytes = str
|
||||
to_bytearray = str
|
||||
to_string = str
|
||||
|
||||
try:
|
||||
cldrange = xrange
|
||||
except NameError:
|
||||
def cldrange(*args, **kwargs):
|
||||
return iter(range(*args, **kwargs))
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
||||
134
lib/cloudinary/forms.py
Normal file
134
lib/cloudinary/forms.py
Normal file
@@ -0,0 +1,134 @@
|
||||
from django import forms
|
||||
from cloudinary import CloudinaryResource
|
||||
import cloudinary.uploader
|
||||
import cloudinary.utils
|
||||
import re
|
||||
import json
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
def cl_init_js_callbacks(form, request):
|
||||
for field in form.fields.values():
|
||||
if isinstance(field, CloudinaryJsFileField):
|
||||
field.enable_callback(request)
|
||||
|
||||
|
||||
class CloudinaryInput(forms.TextInput):
|
||||
input_type = 'file'
|
||||
|
||||
def render(self, name, value, attrs=None):
|
||||
attrs = self.build_attrs(attrs)
|
||||
options = attrs.get('options', {})
|
||||
attrs["options"] = ''
|
||||
|
||||
params = cloudinary.utils.build_upload_params(**options)
|
||||
if options.get("unsigned"):
|
||||
params = cloudinary.utils.cleanup_params(params)
|
||||
else:
|
||||
params = cloudinary.utils.sign_request(params, options)
|
||||
|
||||
if 'resource_type' not in options: options['resource_type'] = 'auto'
|
||||
cloudinary_upload_url = cloudinary.utils.cloudinary_api_url("upload", **options)
|
||||
|
||||
attrs["data-url"] = cloudinary_upload_url
|
||||
attrs["data-form-data"] = json.dumps(params)
|
||||
attrs["data-cloudinary-field"] = name
|
||||
chunk_size = options.get("chunk_size", None)
|
||||
if chunk_size: attrs["data-max-chunk-size"] = chunk_size
|
||||
attrs["class"] = " ".join(["cloudinary-fileupload", attrs.get("class", "")])
|
||||
|
||||
widget = super(CloudinaryInput, self).render("file", None, attrs=attrs)
|
||||
if value:
|
||||
if isinstance(value, CloudinaryResource):
|
||||
value_string = value.get_presigned()
|
||||
else:
|
||||
value_string = value
|
||||
widget += forms.HiddenInput().render(name, value_string)
|
||||
return widget
|
||||
|
||||
|
||||
class CloudinaryJsFileField(forms.Field):
|
||||
default_error_messages = {
|
||||
'required': _(u"No file selected!")
|
||||
}
|
||||
|
||||
def __init__(self, attrs=None, options=None, autosave=True, *args, **kwargs):
|
||||
if attrs is None: attrs = {}
|
||||
if options is None: options = {}
|
||||
self.autosave = autosave
|
||||
attrs = attrs.copy()
|
||||
attrs["options"] = options.copy()
|
||||
|
||||
field_options = {'widget': CloudinaryInput(attrs=attrs)}
|
||||
field_options.update(kwargs)
|
||||
super(CloudinaryJsFileField, self).__init__(*args, **field_options)
|
||||
|
||||
def enable_callback(self, request):
|
||||
from django.contrib.staticfiles.storage import staticfiles_storage
|
||||
self.widget.attrs["options"]["callback"] = request.build_absolute_uri(
|
||||
staticfiles_storage.url("html/cloudinary_cors.html"))
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert to CloudinaryResource"""
|
||||
if not value: return None
|
||||
m = re.search(r'^([^/]+)/([^/]+)/v(\d+)/([^#]+)#([^/]+)$', value)
|
||||
if not m:
|
||||
raise forms.ValidationError("Invalid format")
|
||||
resource_type = m.group(1)
|
||||
upload_type = m.group(2)
|
||||
version = m.group(3)
|
||||
filename = m.group(4)
|
||||
signature = m.group(5)
|
||||
m = re.search(r'(.*)\.(.*)', filename)
|
||||
if not m:
|
||||
raise forms.ValidationError("Invalid file name")
|
||||
public_id = m.group(1)
|
||||
image_format = m.group(2)
|
||||
return CloudinaryResource(public_id,
|
||||
format=image_format,
|
||||
version=version,
|
||||
signature=signature,
|
||||
type=upload_type,
|
||||
resource_type=resource_type)
|
||||
|
||||
def validate(self, value):
|
||||
"""Validate the signature"""
|
||||
# Use the parent's handling of required fields, etc.
|
||||
super(CloudinaryJsFileField, self).validate(value)
|
||||
if not value: return
|
||||
if not value.validate():
|
||||
raise forms.ValidationError("Signature mismatch")
|
||||
|
||||
|
||||
class CloudinaryUnsignedJsFileField(CloudinaryJsFileField):
|
||||
def __init__(self, upload_preset, attrs=None, options=None, autosave=True, *args, **kwargs):
|
||||
if attrs is None:
|
||||
attrs = {}
|
||||
if options is None:
|
||||
options = {}
|
||||
options = options.copy()
|
||||
options.update({"unsigned": True, "upload_preset": upload_preset})
|
||||
super(CloudinaryUnsignedJsFileField, self).__init__(attrs, options, autosave, *args, **kwargs)
|
||||
|
||||
|
||||
class CloudinaryFileField(forms.FileField):
|
||||
my_default_error_messages = {
|
||||
'required': _(u"No file selected!")
|
||||
}
|
||||
default_error_messages = forms.FileField.default_error_messages.copy()
|
||||
default_error_messages.update(my_default_error_messages)
|
||||
|
||||
def __init__(self, options=None, autosave=True, *args, **kwargs):
|
||||
self.autosave = autosave
|
||||
self.options = options or {}
|
||||
super(CloudinaryFileField, self).__init__(*args, **kwargs)
|
||||
|
||||
def to_python(self, value):
|
||||
"""Upload and convert to CloudinaryResource"""
|
||||
value = super(CloudinaryFileField, self).to_python(value)
|
||||
if not value:
|
||||
return None
|
||||
if self.autosave:
|
||||
return cloudinary.uploader.upload_image(value, **self.options)
|
||||
else:
|
||||
return value
|
||||
121
lib/cloudinary/models.py
Normal file
121
lib/cloudinary/models.py
Normal file
@@ -0,0 +1,121 @@
|
||||
import re
|
||||
|
||||
|
||||
from cloudinary import CloudinaryResource, forms, uploader
|
||||
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
from django.db import models
|
||||
|
||||
# Add introspection rules for South, if it's installed.
|
||||
try:
|
||||
from south.modelsinspector import add_introspection_rules
|
||||
add_introspection_rules([], ["^cloudinary.models.CloudinaryField"])
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
CLOUDINARY_FIELD_DB_RE = r'(?:(?P<resource_type>image|raw|video)/(?P<type>upload|private|authenticated)/)?(?:v(?P<version>\d+)/)?(?P<public_id>.*?)(\.(?P<format>[^.]+))?$'
|
||||
|
||||
|
||||
# Taken from six - https://pythonhosted.org/six/
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(meta):
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
class CloudinaryField(models.Field):
|
||||
description = "A resource stored in Cloudinary"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
options = {'max_length': 255}
|
||||
self.default_form_class = kwargs.pop("default_form_class", forms.CloudinaryFileField)
|
||||
options.update(kwargs)
|
||||
self.type = options.pop("type", "upload")
|
||||
self.resource_type = options.pop("resource_type", "image")
|
||||
self.width_field = options.pop("width_field", None)
|
||||
self.height_field = options.pop("height_field", None)
|
||||
super(CloudinaryField, self).__init__(*args, **options)
|
||||
|
||||
def get_internal_type(self):
|
||||
return 'CharField'
|
||||
|
||||
def value_to_string(self, obj):
|
||||
# We need to support both legacy `_get_val_from_obj` and new `value_from_object` models.Field methods.
|
||||
# It would be better to wrap it with try -> except AttributeError -> fallback to legacy.
|
||||
# Unfortunately, we can catch AttributeError exception from `value_from_object` function itself.
|
||||
# Parsing exception string is an overkill here, that's why we check for attribute existence
|
||||
|
||||
if hasattr(self, 'value_from_object'):
|
||||
value = self.value_from_object(obj)
|
||||
else: # fallback for legacy django versions
|
||||
value = self._get_val_from_obj(obj)
|
||||
|
||||
return self.get_prep_value(value)
|
||||
|
||||
def parse_cloudinary_resource(self, value):
|
||||
m = re.match(CLOUDINARY_FIELD_DB_RE, value)
|
||||
resource_type = m.group('resource_type') or self.resource_type
|
||||
upload_type = m.group('type') or self.type
|
||||
return CloudinaryResource(
|
||||
type=upload_type,
|
||||
resource_type=resource_type,
|
||||
version=m.group('version'),
|
||||
public_id=m.group('public_id'),
|
||||
format=m.group('format')
|
||||
)
|
||||
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
if value is None:
|
||||
return value
|
||||
return self.parse_cloudinary_resource(value)
|
||||
|
||||
def to_python(self, value):
|
||||
if isinstance(value, CloudinaryResource):
|
||||
return value
|
||||
elif isinstance(value, UploadedFile):
|
||||
return value
|
||||
elif value is None:
|
||||
return value
|
||||
else:
|
||||
return self.parse_cloudinary_resource(value)
|
||||
|
||||
def upload_options_with_filename(self, model_instance, filename):
|
||||
return self.upload_options(model_instance)
|
||||
|
||||
def upload_options(self, model_instance):
|
||||
return {}
|
||||
|
||||
def pre_save(self, model_instance, add):
|
||||
value = super(CloudinaryField, self).pre_save(model_instance, add)
|
||||
if isinstance(value, UploadedFile):
|
||||
options = {"type": self.type, "resource_type": self.resource_type}
|
||||
options.update(self.upload_options_with_filename(model_instance, value.name))
|
||||
instance_value = uploader.upload_resource(value, **options)
|
||||
setattr(model_instance, self.attname, instance_value)
|
||||
if self.width_field:
|
||||
setattr(model_instance, self.width_field, instance_value.metadata['width'])
|
||||
if self.height_field:
|
||||
setattr(model_instance, self.height_field, instance_value.metadata['height'])
|
||||
return self.get_prep_value(instance_value)
|
||||
else:
|
||||
return value
|
||||
|
||||
def get_prep_value(self, value):
|
||||
if not value:
|
||||
return self.get_default()
|
||||
if isinstance(value, CloudinaryResource):
|
||||
return value.get_prep_value()
|
||||
else:
|
||||
return value
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
options = {"type": self.type, "resource_type": self.resource_type}
|
||||
options.update(kwargs.pop('options', {}))
|
||||
defaults = {'form_class': self.default_form_class, 'options': options, 'autosave': False}
|
||||
defaults.update(kwargs)
|
||||
return super(CloudinaryField, self).formfield(**defaults)
|
||||
34
lib/cloudinary/poster/__init__.py
Normal file
34
lib/cloudinary/poster/__init__.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# MIT licensed code copied from https://bitbucket.org/chrisatlee/poster
|
||||
#
|
||||
# Copyright (c) 2011 Chris AtLee
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
"""poster module
|
||||
|
||||
Support for streaming HTTP uploads, and multipart/form-data encoding
|
||||
|
||||
```poster.version``` is a 3-tuple of integers representing the version number.
|
||||
New releases of poster will always have a version number that compares greater
|
||||
than an older version of poster.
|
||||
New in version 0.6."""
|
||||
|
||||
import cloudinary.poster.streaminghttp
|
||||
import cloudinary.poster.encode
|
||||
|
||||
version = (0, 8, 2) # Thanks JP!
|
||||
447
lib/cloudinary/poster/encode.py
Normal file
447
lib/cloudinary/poster/encode.py
Normal file
@@ -0,0 +1,447 @@
|
||||
# MIT licensed code copied from https://bitbucket.org/chrisatlee/poster
|
||||
"""multipart/form-data encoding module
|
||||
|
||||
This module provides functions that faciliate encoding name/value pairs
|
||||
as multipart/form-data suitable for a HTTP POST or PUT request.
|
||||
|
||||
multipart/form-data is the standard way to upload files over HTTP"""
|
||||
|
||||
__all__ = ['gen_boundary', 'encode_and_quote', 'MultipartParam',
|
||||
'encode_string', 'encode_file_header', 'get_body_size', 'get_headers',
|
||||
'multipart_encode']
|
||||
|
||||
try:
|
||||
from io import UnsupportedOperation
|
||||
except ImportError:
|
||||
UnsupportedOperation = None
|
||||
|
||||
try:
|
||||
import uuid
|
||||
def gen_boundary():
|
||||
"""Returns a random string to use as the boundary for a message"""
|
||||
return uuid.uuid4().hex
|
||||
except ImportError:
|
||||
import random, sha
|
||||
def gen_boundary():
|
||||
"""Returns a random string to use as the boundary for a message"""
|
||||
bits = random.getrandbits(160)
|
||||
return sha.new(str(bits)).hexdigest()
|
||||
|
||||
import re, os, mimetypes
|
||||
from cloudinary.compat import (PY3, string_types, to_bytes, to_string,
|
||||
to_bytearray, quote_plus, advance_iterator)
|
||||
try:
|
||||
from email.header import Header
|
||||
except ImportError:
|
||||
# Python 2.4
|
||||
from email.Header import Header
|
||||
|
||||
if PY3:
|
||||
def encode_and_quote(data):
|
||||
if data is None:
|
||||
return None
|
||||
return quote_plus(to_bytes(data))
|
||||
|
||||
else:
|
||||
def encode_and_quote(data):
|
||||
"""If ``data`` is unicode, return quote_plus(data.encode("utf-8")) otherwise return quote_plus(data)"""
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
if isinstance(data, unicode):
|
||||
data = data.encode("utf-8")
|
||||
return quote_plus(data)
|
||||
|
||||
if PY3:
|
||||
def _strify(s):
|
||||
if s is None:
|
||||
return None
|
||||
elif isinstance(s, bytes):
|
||||
return s
|
||||
else:
|
||||
try:
|
||||
return to_bytes(s)
|
||||
except AttributeError:
|
||||
return to_bytes(str(s))
|
||||
else:
|
||||
def _strify(s):
|
||||
"""If s is a unicode string, encode it to UTF-8 and return the results, otherwise return str(s), or None if s is None"""
|
||||
if s is None:
|
||||
return None
|
||||
if isinstance(s, unicode):
|
||||
return s.encode("utf-8")
|
||||
return str(s)
|
||||
|
||||
class MultipartParam(object):
|
||||
"""Represents a single parameter in a multipart/form-data request
|
||||
|
||||
``name`` is the name of this parameter.
|
||||
|
||||
If ``value`` is set, it must be a string or unicode object to use as the
|
||||
data for this parameter.
|
||||
|
||||
If ``filename`` is set, it is what to say that this parameter's filename
|
||||
is. Note that this does not have to be the actual filename any local file.
|
||||
|
||||
If ``filetype`` is set, it is used as the Content-Type for this parameter.
|
||||
If unset it defaults to "text/plain; charset=utf8"
|
||||
|
||||
If ``filesize`` is set, it specifies the length of the file ``fileobj``
|
||||
|
||||
If ``fileobj`` is set, it must be a file-like object that supports
|
||||
.read().
|
||||
|
||||
Both ``value`` and ``fileobj`` must not be set, doing so will
|
||||
raise a ValueError assertion.
|
||||
|
||||
If ``fileobj`` is set, and ``filesize`` is not specified, then
|
||||
the file's size will be determined first by stat'ing ``fileobj``'s
|
||||
file descriptor, and if that fails, by seeking to the end of the file,
|
||||
recording the current position as the size, and then by seeking back to the
|
||||
beginning of the file.
|
||||
|
||||
``cb`` is a callable which will be called from iter_encode with (self,
|
||||
current, total), representing the current parameter, current amount
|
||||
transferred, and the total size.
|
||||
"""
|
||||
def __init__(self, name, value=None, filename=None, filetype=None,
|
||||
filesize=None, fileobj=None, cb=None):
|
||||
self.name = Header(name).encode()
|
||||
self.value = _strify(value)
|
||||
if filename is None:
|
||||
self.filename = None
|
||||
else:
|
||||
if PY3:
|
||||
byte_filename = filename.encode("ascii", "xmlcharrefreplace")
|
||||
self.filename = to_string(byte_filename)
|
||||
encoding = 'unicode_escape'
|
||||
else:
|
||||
if isinstance(filename, unicode):
|
||||
# Encode with XML entities
|
||||
self.filename = filename.encode("ascii", "xmlcharrefreplace")
|
||||
else:
|
||||
self.filename = str(filename)
|
||||
encoding = 'string_escape'
|
||||
self.filename = self.filename.encode(encoding).replace(to_bytes('"'), to_bytes('\\"'))
|
||||
self.filetype = _strify(filetype)
|
||||
|
||||
self.filesize = filesize
|
||||
self.fileobj = fileobj
|
||||
self.cb = cb
|
||||
|
||||
if self.value is not None and self.fileobj is not None:
|
||||
raise ValueError("Only one of value or fileobj may be specified")
|
||||
|
||||
if fileobj is not None and filesize is None:
|
||||
# Try and determine the file size
|
||||
try:
|
||||
self.filesize = os.fstat(fileobj.fileno()).st_size
|
||||
except (OSError, AttributeError, UnsupportedOperation):
|
||||
try:
|
||||
fileobj.seek(0, 2)
|
||||
self.filesize = fileobj.tell()
|
||||
fileobj.seek(0)
|
||||
except:
|
||||
raise ValueError("Could not determine filesize")
|
||||
|
||||
def __cmp__(self, other):
|
||||
attrs = ['name', 'value', 'filename', 'filetype', 'filesize', 'fileobj']
|
||||
myattrs = [getattr(self, a) for a in attrs]
|
||||
oattrs = [getattr(other, a) for a in attrs]
|
||||
return cmp(myattrs, oattrs)
|
||||
|
||||
def reset(self):
|
||||
if self.fileobj is not None:
|
||||
self.fileobj.seek(0)
|
||||
elif self.value is None:
|
||||
raise ValueError("Don't know how to reset this parameter")
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, paramname, filename):
|
||||
"""Returns a new MultipartParam object constructed from the local
|
||||
file at ``filename``.
|
||||
|
||||
``filesize`` is determined by os.path.getsize(``filename``)
|
||||
|
||||
``filetype`` is determined by mimetypes.guess_type(``filename``)[0]
|
||||
|
||||
``filename`` is set to os.path.basename(``filename``)
|
||||
"""
|
||||
|
||||
return cls(paramname, filename=os.path.basename(filename),
|
||||
filetype=mimetypes.guess_type(filename)[0],
|
||||
filesize=os.path.getsize(filename),
|
||||
fileobj=open(filename, "rb"))
|
||||
|
||||
@classmethod
|
||||
def from_params(cls, params):
|
||||
"""Returns a list of MultipartParam objects from a sequence of
|
||||
name, value pairs, MultipartParam instances,
|
||||
or from a mapping of names to values
|
||||
|
||||
The values may be strings or file objects, or MultipartParam objects.
|
||||
MultipartParam object names must match the given names in the
|
||||
name,value pairs or mapping, if applicable."""
|
||||
if hasattr(params, 'items'):
|
||||
params = params.items()
|
||||
|
||||
retval = []
|
||||
for item in params:
|
||||
if isinstance(item, cls):
|
||||
retval.append(item)
|
||||
continue
|
||||
name, value = item
|
||||
if isinstance(value, cls):
|
||||
assert value.name == name
|
||||
retval.append(value)
|
||||
continue
|
||||
if hasattr(value, 'read'):
|
||||
# Looks like a file object
|
||||
filename = getattr(value, 'name', None)
|
||||
if filename is not None:
|
||||
filetype = mimetypes.guess_type(filename)[0]
|
||||
else:
|
||||
filetype = None
|
||||
|
||||
retval.append(cls(name=name, filename=filename,
|
||||
filetype=filetype, fileobj=value))
|
||||
else:
|
||||
retval.append(cls(name, value))
|
||||
return retval
|
||||
|
||||
def encode_hdr(self, boundary):
|
||||
"""Returns the header of the encoding of this parameter"""
|
||||
boundary = encode_and_quote(boundary)
|
||||
|
||||
headers = ["--%s" % boundary]
|
||||
|
||||
if self.filename:
|
||||
disposition = 'form-data; name="%s"; filename="%s"' % (self.name,
|
||||
to_string(self.filename))
|
||||
else:
|
||||
disposition = 'form-data; name="%s"' % self.name
|
||||
|
||||
headers.append("Content-Disposition: %s" % disposition)
|
||||
|
||||
if self.filetype:
|
||||
filetype = to_string(self.filetype)
|
||||
else:
|
||||
filetype = "text/plain; charset=utf-8"
|
||||
|
||||
headers.append("Content-Type: %s" % filetype)
|
||||
|
||||
headers.append("")
|
||||
headers.append("")
|
||||
|
||||
return "\r\n".join(headers)
|
||||
|
||||
def encode(self, boundary):
|
||||
"""Returns the string encoding of this parameter"""
|
||||
if self.value is None:
|
||||
value = self.fileobj.read()
|
||||
else:
|
||||
value = self.value
|
||||
|
||||
if re.search(to_bytes("^--%s$" % re.escape(boundary)), value, re.M):
|
||||
raise ValueError("boundary found in encoded string")
|
||||
|
||||
return to_bytes(self.encode_hdr(boundary)) + value + b"\r\n"
|
||||
|
||||
def iter_encode(self, boundary, blocksize=4096):
|
||||
"""Yields the encoding of this parameter
|
||||
If self.fileobj is set, then blocks of ``blocksize`` bytes are read and
|
||||
yielded."""
|
||||
total = self.get_size(boundary)
|
||||
current = 0
|
||||
if self.value is not None:
|
||||
block = self.encode(boundary)
|
||||
current += len(block)
|
||||
yield block
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
else:
|
||||
block = to_bytes(self.encode_hdr(boundary))
|
||||
current += len(block)
|
||||
yield block
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
last_block = to_bytearray("")
|
||||
encoded_boundary = "--%s" % encode_and_quote(boundary)
|
||||
boundary_exp = re.compile(to_bytes("^%s$" % re.escape(encoded_boundary)),
|
||||
re.M)
|
||||
while True:
|
||||
block = self.fileobj.read(blocksize)
|
||||
if not block:
|
||||
current += 2
|
||||
yield to_bytes("\r\n")
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
break
|
||||
last_block += block
|
||||
if boundary_exp.search(last_block):
|
||||
raise ValueError("boundary found in file data")
|
||||
last_block = last_block[-len(to_bytes(encoded_boundary))-2:]
|
||||
current += len(block)
|
||||
yield block
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
|
||||
def get_size(self, boundary):
|
||||
"""Returns the size in bytes that this param will be when encoded
|
||||
with the given boundary."""
|
||||
if self.filesize is not None:
|
||||
valuesize = self.filesize
|
||||
else:
|
||||
valuesize = len(self.value)
|
||||
|
||||
return len(self.encode_hdr(boundary)) + 2 + valuesize
|
||||
|
||||
def encode_string(boundary, name, value):
|
||||
"""Returns ``name`` and ``value`` encoded as a multipart/form-data
|
||||
variable. ``boundary`` is the boundary string used throughout
|
||||
a single request to separate variables."""
|
||||
|
||||
return MultipartParam(name, value).encode(boundary)
|
||||
|
||||
def encode_file_header(boundary, paramname, filesize, filename=None,
|
||||
filetype=None):
|
||||
"""Returns the leading data for a multipart/form-data field that contains
|
||||
file data.
|
||||
|
||||
``boundary`` is the boundary string used throughout a single request to
|
||||
separate variables.
|
||||
|
||||
``paramname`` is the name of the variable in this request.
|
||||
|
||||
``filesize`` is the size of the file data.
|
||||
|
||||
``filename`` if specified is the filename to give to this field. This
|
||||
field is only useful to the server for determining the original filename.
|
||||
|
||||
``filetype`` if specified is the MIME type of this file.
|
||||
|
||||
The actual file data should be sent after this header has been sent.
|
||||
"""
|
||||
|
||||
return MultipartParam(paramname, filesize=filesize, filename=filename,
|
||||
filetype=filetype).encode_hdr(boundary)
|
||||
|
||||
def get_body_size(params, boundary):
|
||||
"""Returns the number of bytes that the multipart/form-data encoding
|
||||
of ``params`` will be."""
|
||||
size = sum(p.get_size(boundary) for p in MultipartParam.from_params(params))
|
||||
return size + len(boundary) + 6
|
||||
|
||||
def get_headers(params, boundary):
|
||||
"""Returns a dictionary with Content-Type and Content-Length headers
|
||||
for the multipart/form-data encoding of ``params``."""
|
||||
headers = {}
|
||||
boundary = quote_plus(boundary)
|
||||
headers['Content-Type'] = "multipart/form-data; boundary=%s" % boundary
|
||||
headers['Content-Length'] = str(get_body_size(params, boundary))
|
||||
return headers
|
||||
|
||||
class multipart_yielder:
|
||||
def __init__(self, params, boundary, cb):
|
||||
self.params = params
|
||||
self.boundary = boundary
|
||||
self.cb = cb
|
||||
|
||||
self.i = 0
|
||||
self.p = None
|
||||
self.param_iter = None
|
||||
self.current = 0
|
||||
self.total = get_body_size(params, boundary)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
return self.next()
|
||||
|
||||
def next(self):
|
||||
"""generator function to yield multipart/form-data representation
|
||||
of parameters"""
|
||||
if self.param_iter is not None:
|
||||
try:
|
||||
block = advance_iterator(self.param_iter)
|
||||
self.current += len(block)
|
||||
if self.cb:
|
||||
self.cb(self.p, self.current, self.total)
|
||||
return block
|
||||
except StopIteration:
|
||||
self.p = None
|
||||
self.param_iter = None
|
||||
|
||||
if self.i is None:
|
||||
raise StopIteration
|
||||
elif self.i >= len(self.params):
|
||||
self.param_iter = None
|
||||
self.p = None
|
||||
self.i = None
|
||||
block = to_bytes("--%s--\r\n" % self.boundary)
|
||||
self.current += len(block)
|
||||
if self.cb:
|
||||
self.cb(self.p, self.current, self.total)
|
||||
return block
|
||||
|
||||
self.p = self.params[self.i]
|
||||
self.param_iter = self.p.iter_encode(self.boundary)
|
||||
self.i += 1
|
||||
return advance_iterator(self)
|
||||
|
||||
def reset(self):
|
||||
self.i = 0
|
||||
self.current = 0
|
||||
for param in self.params:
|
||||
param.reset()
|
||||
|
||||
def multipart_encode(params, boundary=None, cb=None):
|
||||
"""Encode ``params`` as multipart/form-data.
|
||||
|
||||
``params`` should be a sequence of (name, value) pairs or MultipartParam
|
||||
objects, or a mapping of names to values.
|
||||
Values are either strings parameter values, or file-like objects to use as
|
||||
the parameter value. The file-like objects must support .read() and either
|
||||
.fileno() or both .seek() and .tell().
|
||||
|
||||
If ``boundary`` is set, then it as used as the MIME boundary. Otherwise
|
||||
a randomly generated boundary will be used. In either case, if the
|
||||
boundary string appears in the parameter values a ValueError will be
|
||||
raised.
|
||||
|
||||
If ``cb`` is set, it should be a callback which will get called as blocks
|
||||
of data are encoded. It will be called with (param, current, total),
|
||||
indicating the current parameter being encoded, the current amount encoded,
|
||||
and the total amount to encode.
|
||||
|
||||
Returns a tuple of `datagen`, `headers`, where `datagen` is a
|
||||
generator that will yield blocks of data that make up the encoded
|
||||
parameters, and `headers` is a dictionary with the assoicated
|
||||
Content-Type and Content-Length headers.
|
||||
|
||||
Examples:
|
||||
|
||||
>>> datagen, headers = multipart_encode( [("key", "value1"), ("key", "value2")] )
|
||||
>>> s = "".join(datagen)
|
||||
>>> assert "value2" in s and "value1" in s
|
||||
|
||||
>>> p = MultipartParam("key", "value2")
|
||||
>>> datagen, headers = multipart_encode( [("key", "value1"), p] )
|
||||
>>> s = "".join(datagen)
|
||||
>>> assert "value2" in s and "value1" in s
|
||||
|
||||
>>> datagen, headers = multipart_encode( {"key": "value1"} )
|
||||
>>> s = "".join(datagen)
|
||||
>>> assert "value2" not in s and "value1" in s
|
||||
|
||||
"""
|
||||
if boundary is None:
|
||||
boundary = gen_boundary()
|
||||
else:
|
||||
boundary = quote_plus(boundary)
|
||||
|
||||
headers = get_headers(params, boundary)
|
||||
params = MultipartParam.from_params(params)
|
||||
|
||||
return multipart_yielder(params, boundary, cb), headers
|
||||
201
lib/cloudinary/poster/streaminghttp.py
Normal file
201
lib/cloudinary/poster/streaminghttp.py
Normal file
@@ -0,0 +1,201 @@
|
||||
# MIT licensed code copied from https://bitbucket.org/chrisatlee/poster
|
||||
"""Streaming HTTP uploads module.
|
||||
|
||||
This module extends the standard httplib and urllib2 objects so that
|
||||
iterable objects can be used in the body of HTTP requests.
|
||||
|
||||
In most cases all one should have to do is call :func:`register_openers()`
|
||||
to register the new streaming http handlers which will take priority over
|
||||
the default handlers, and then you can use iterable objects in the body
|
||||
of HTTP requests.
|
||||
|
||||
**N.B.** You must specify a Content-Length header if using an iterable object
|
||||
since there is no way to determine in advance the total size that will be
|
||||
yielded, and there is no way to reset an interator.
|
||||
|
||||
Example usage:
|
||||
|
||||
>>> from StringIO import StringIO
|
||||
>>> import urllib2, poster.streaminghttp
|
||||
|
||||
>>> opener = poster.streaminghttp.register_openers()
|
||||
|
||||
>>> s = "Test file data"
|
||||
>>> f = StringIO(s)
|
||||
|
||||
>>> req = urllib2.Request("http://localhost:5000", f,
|
||||
... {'Content-Length': str(len(s))})
|
||||
"""
|
||||
|
||||
import sys, socket
|
||||
from cloudinary.compat import httplib, urllib2, NotConnected
|
||||
|
||||
__all__ = ['StreamingHTTPConnection', 'StreamingHTTPRedirectHandler',
|
||||
'StreamingHTTPHandler', 'register_openers']
|
||||
|
||||
if hasattr(httplib, 'HTTPS'):
|
||||
__all__.extend(['StreamingHTTPSHandler', 'StreamingHTTPSConnection'])
|
||||
|
||||
class _StreamingHTTPMixin:
|
||||
"""Mixin class for HTTP and HTTPS connections that implements a streaming
|
||||
send method."""
|
||||
def send(self, value):
|
||||
"""Send ``value`` to the server.
|
||||
|
||||
``value`` can be a string object, a file-like object that supports
|
||||
a .read() method, or an iterable object that supports a .next()
|
||||
method.
|
||||
"""
|
||||
# Based on python 2.6's httplib.HTTPConnection.send()
|
||||
if self.sock is None:
|
||||
if self.auto_open:
|
||||
self.connect()
|
||||
else:
|
||||
raise NotConnected()
|
||||
|
||||
# send the data to the server. if we get a broken pipe, then close
|
||||
# the socket. we want to reconnect when somebody tries to send again.
|
||||
#
|
||||
# NOTE: we DO propagate the error, though, because we cannot simply
|
||||
# ignore the error... the caller will know if they can retry.
|
||||
if self.debuglevel > 0:
|
||||
print("send:", repr(value))
|
||||
try:
|
||||
blocksize = 8192
|
||||
if hasattr(value, 'read') :
|
||||
if hasattr(value, 'seek'):
|
||||
value.seek(0)
|
||||
if self.debuglevel > 0:
|
||||
print("sendIng a read()able")
|
||||
data = value.read(blocksize)
|
||||
while data:
|
||||
self.sock.sendall(data)
|
||||
data = value.read(blocksize)
|
||||
elif hasattr(value, 'next'):
|
||||
if hasattr(value, 'reset'):
|
||||
value.reset()
|
||||
if self.debuglevel > 0:
|
||||
print("sendIng an iterable")
|
||||
for data in value:
|
||||
self.sock.sendall(data)
|
||||
else:
|
||||
self.sock.sendall(value)
|
||||
except socket.error:
|
||||
e = sys.exc_info()[1]
|
||||
if e[0] == 32: # Broken pipe
|
||||
self.close()
|
||||
raise
|
||||
|
||||
class StreamingHTTPConnection(_StreamingHTTPMixin, httplib.HTTPConnection):
|
||||
"""Subclass of `httplib.HTTPConnection` that overrides the `send()` method
|
||||
to support iterable body objects"""
|
||||
|
||||
class StreamingHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||
"""Subclass of `urllib2.HTTPRedirectHandler` that overrides the
|
||||
`redirect_request` method to properly handle redirected POST requests
|
||||
|
||||
This class is required because python 2.5's HTTPRedirectHandler does
|
||||
not remove the Content-Type or Content-Length headers when requesting
|
||||
the new resource, but the body of the original request is not preserved.
|
||||
"""
|
||||
|
||||
handler_order = urllib2.HTTPRedirectHandler.handler_order - 1
|
||||
|
||||
# From python2.6 urllib2's HTTPRedirectHandler
|
||||
def redirect_request(self, req, fp, code, msg, headers, newurl):
|
||||
"""Return a Request or None in response to a redirect.
|
||||
|
||||
This is called by the http_error_30x methods when a
|
||||
redirection response is received. If a redirection should
|
||||
take place, return a new Request to allow http_error_30x to
|
||||
perform the redirect. Otherwise, raise HTTPError if no-one
|
||||
else should try to handle this url. Return None if you can't
|
||||
but another Handler might.
|
||||
"""
|
||||
m = req.get_method()
|
||||
if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
|
||||
or code in (301, 302, 303) and m == "POST"):
|
||||
# Strictly (according to RFC 2616), 301 or 302 in response
|
||||
# to a POST MUST NOT cause a redirection without confirmation
|
||||
# from the user (of urllib2, in this case). In practice,
|
||||
# essentially all clients do redirect in this case, so we
|
||||
# do the same.
|
||||
# be conciliant with URIs containing a space
|
||||
newurl = newurl.replace(' ', '%20')
|
||||
newheaders = dict((k, v) for k, v in req.headers.items()
|
||||
if k.lower() not in (
|
||||
"content-length", "content-type")
|
||||
)
|
||||
return urllib2.Request(newurl,
|
||||
headers=newheaders,
|
||||
origin_req_host=req.get_origin_req_host(),
|
||||
unverifiable=True)
|
||||
else:
|
||||
raise urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp)
|
||||
|
||||
class StreamingHTTPHandler(urllib2.HTTPHandler):
|
||||
"""Subclass of `urllib2.HTTPHandler` that uses
|
||||
StreamingHTTPConnection as its http connection class."""
|
||||
|
||||
handler_order = urllib2.HTTPHandler.handler_order - 1
|
||||
|
||||
def http_open(self, req):
|
||||
"""Open a StreamingHTTPConnection for the given request"""
|
||||
return self.do_open(StreamingHTTPConnection, req)
|
||||
|
||||
def http_request(self, req):
|
||||
"""Handle a HTTP request. Make sure that Content-Length is specified
|
||||
if we're using an interable value"""
|
||||
# Make sure that if we're using an iterable object as the request
|
||||
# body, that we've also specified Content-Length
|
||||
if req.has_data():
|
||||
data = req.get_data()
|
||||
if hasattr(data, 'read') or hasattr(data, 'next'):
|
||||
if not req.has_header('Content-length'):
|
||||
raise ValueError(
|
||||
"No Content-Length specified for iterable body")
|
||||
return urllib2.HTTPHandler.do_request_(self, req)
|
||||
|
||||
if hasattr(httplib, 'HTTPS'):
|
||||
class StreamingHTTPSConnection(_StreamingHTTPMixin,
|
||||
httplib.HTTPSConnection):
|
||||
"""Subclass of `httplib.HTTSConnection` that overrides the `send()`
|
||||
method to support iterable body objects"""
|
||||
|
||||
class StreamingHTTPSHandler(urllib2.HTTPSHandler):
|
||||
"""Subclass of `urllib2.HTTPSHandler` that uses
|
||||
StreamingHTTPSConnection as its http connection class."""
|
||||
|
||||
handler_order = urllib2.HTTPSHandler.handler_order - 1
|
||||
|
||||
def https_open(self, req):
|
||||
return self.do_open(StreamingHTTPSConnection, req)
|
||||
|
||||
def https_request(self, req):
|
||||
# Make sure that if we're using an iterable object as the request
|
||||
# body, that we've also specified Content-Length
|
||||
if req.has_data():
|
||||
data = req.get_data()
|
||||
if hasattr(data, 'read') or hasattr(data, 'next'):
|
||||
if not req.has_header('Content-length'):
|
||||
raise ValueError(
|
||||
"No Content-Length specified for iterable body")
|
||||
return urllib2.HTTPSHandler.do_request_(self, req)
|
||||
|
||||
|
||||
def get_handlers():
|
||||
handlers = [StreamingHTTPHandler, StreamingHTTPRedirectHandler]
|
||||
if hasattr(httplib, "HTTPS"):
|
||||
handlers.append(StreamingHTTPSHandler)
|
||||
return handlers
|
||||
|
||||
def register_openers():
|
||||
"""Register the streaming http handlers in the global urllib2 default
|
||||
opener object.
|
||||
|
||||
Returns the created OpenerDirector object."""
|
||||
opener = urllib2.build_opener(*get_handlers())
|
||||
|
||||
urllib2.install_opener(opener)
|
||||
|
||||
return opener
|
||||
59
lib/cloudinary/search.py
Normal file
59
lib/cloudinary/search.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import json
|
||||
from copy import deepcopy
|
||||
from . import api
|
||||
|
||||
|
||||
class Search:
|
||||
"""Build and execute a search query."""
|
||||
def __init__(self):
|
||||
self.query = {}
|
||||
|
||||
def expression(self, value):
|
||||
"""Specify the search query expression."""
|
||||
self.query["expression"] = value
|
||||
return self
|
||||
|
||||
def max_results(self, value):
|
||||
"""Set the max results to return"""
|
||||
self.query["max_results"] = value
|
||||
return self
|
||||
|
||||
def next_cursor(self, value):
|
||||
"""Get next page in the query using the ``next_cursor`` value from a previous invocation."""
|
||||
self.query["next_cursor"] = value
|
||||
return self
|
||||
|
||||
def sort_by(self, field_name, direction=None):
|
||||
"""Add a field to sort results by. If not provided, direction is ``desc``."""
|
||||
if direction is None:
|
||||
direction = 'desc'
|
||||
self._add("sort_by", {field_name: direction})
|
||||
return self
|
||||
|
||||
def aggregate(self, value):
|
||||
"""Aggregate field."""
|
||||
self._add("aggregate", value)
|
||||
return self
|
||||
|
||||
def with_field(self, value):
|
||||
"""Request an additional field in the result set."""
|
||||
self._add("with_field", value)
|
||||
return self
|
||||
|
||||
def to_json(self):
|
||||
return json.dumps(self.query)
|
||||
|
||||
def execute(self, **options):
|
||||
"""Execute the search and return results."""
|
||||
options["content_type"] = 'application/json'
|
||||
uri = ['resources','search']
|
||||
return api.call_json_api('post', uri, self.as_dict(), **options)
|
||||
|
||||
def _add(self, name, value):
|
||||
if name not in self.query:
|
||||
self.query[name] = []
|
||||
self.query[name].append(value)
|
||||
return self
|
||||
|
||||
def as_dict(self):
|
||||
return deepcopy(self.query)
|
||||
43
lib/cloudinary/static/html/cloudinary_cors.html
Normal file
43
lib/cloudinary/static/html/cloudinary_cors.html
Normal file
@@ -0,0 +1,43 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
/*
|
||||
json2.js
|
||||
2011-10-19
|
||||
|
||||
Public Domain.
|
||||
|
||||
NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
||||
|
||||
See http://www.JSON.org/js.html
|
||||
|
||||
This code should be minified before deployment.
|
||||
See http://javascript.crockford.com/jsmin.html
|
||||
|
||||
USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
|
||||
NOT CONTROL.
|
||||
|
||||
*/
|
||||
var JSON;if(!JSON){JSON={}}(function(){function str(a,b){var c,d,e,f,g=gap,h,i=b[a];if(i&&typeof i==="object"&&typeof i.toJSON==="function"){i=i.toJSON(a)}if(typeof rep==="function"){i=rep.call(b,a,i)}switch(typeof i){case"string":return quote(i);case"number":return isFinite(i)?String(i):"null";case"boolean":case"null":return String(i);case"object":if(!i){return"null"}gap+=indent;h=[];if(Object.prototype.toString.apply(i)==="[object Array]"){f=i.length;for(c=0;c<f;c+=1){h[c]=str(c,i)||"null"}e=h.length===0?"[]":gap?"[\n"+gap+h.join(",\n"+gap)+"\n"+g+"]":"["+h.join(",")+"]";gap=g;return e}if(rep&&typeof rep==="object"){f=rep.length;for(c=0;c<f;c+=1){if(typeof rep[c]==="string"){d=rep[c];e=str(d,i);if(e){h.push(quote(d)+(gap?": ":":")+e)}}}}else{for(d in i){if(Object.prototype.hasOwnProperty.call(i,d)){e=str(d,i);if(e){h.push(quote(d)+(gap?": ":":")+e)}}}}e=h.length===0?"{}":gap?"{\n"+gap+h.join(",\n"+gap)+"\n"+g+"}":"{"+h.join(",")+"}";gap=g;return e}}function quote(a){escapable.lastIndex=0;return escapable.test(a)?'"'+a.replace(escapable,function(a){var b=meta[a];return typeof b==="string"?b:"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)})+'"':'"'+a+'"'}function f(a){return a<10?"0"+a:a}"use strict";if(typeof Date.prototype.toJSON!=="function"){Date.prototype.toJSON=function(a){return isFinite(this.valueOf())?this.getUTCFullYear()+"-"+f(this.getUTCMonth()+1)+"-"+f(this.getUTCDate())+"T"+f(this.getUTCHours())+":"+f(this.getUTCMinutes())+":"+f(this.getUTCSeconds())+"Z":null};String.prototype.toJSON=Number.prototype.toJSON=Boolean.prototype.toJSON=function(a){return this.valueOf()}}var cx=/[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,escapable=/[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,gap,indent,meta={"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r":"\\r",'"':'\\"',"\\":"\\\\"},rep;if(typeof JSON.stringify!=="function"){JSON.stringify=function(a,b,c){var d;gap="";indent="";if(typeof c==="number"){for(d=0;d<c;d+=1){indent+=" "}}else if(typeof c==="string"){indent=c}rep=b;if(b&&typeof b!=="function"&&(typeof b!=="object"||typeof b.length!=="number")){throw new Error("JSON.stringify")}return str("",{"":a})}}if(typeof JSON.parse!=="function"){JSON.parse=function(text,reviver){function walk(a,b){var c,d,e=a[b];if(e&&typeof e==="object"){for(c in e){if(Object.prototype.hasOwnProperty.call(e,c)){d=walk(e,c);if(d!==undefined){e[c]=d}else{delete e[c]}}}}return reviver.call(a,b,e)}var j;text=String(text);cx.lastIndex=0;if(cx.test(text)){text=text.replace(cx,function(a){return"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)})}if(/^[\],:{}\s]*$/.test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,""))){j=eval("("+text+")");return typeof reviver==="function"?walk({"":j},""):j}throw new SyntaxError("JSON.parse")}}})()
|
||||
/* end of json2.js */
|
||||
;
|
||||
function parse(query) {
|
||||
var result = {};
|
||||
var params = query.split("&");
|
||||
for (var i = 0; i < params.length; i++) {
|
||||
var param = params[i].split("=");
|
||||
result[param[0]] = decodeURIComponent(param[1]);
|
||||
}
|
||||
return JSON.stringify(result);
|
||||
}
|
||||
|
||||
document.body.textContent = document.body.innerText = parse(window.location.search.slice(1));
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
2
lib/cloudinary/static/js/canvas-to-blob.min.js
vendored
Normal file
2
lib/cloudinary/static/js/canvas-to-blob.min.js
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
!function(t){"use strict";var e=t.HTMLCanvasElement&&t.HTMLCanvasElement.prototype,o=t.Blob&&function(){try{return Boolean(new Blob)}catch(t){return!1}}(),n=o&&t.Uint8Array&&function(){try{return 100===new Blob([new Uint8Array(100)]).size}catch(t){return!1}}(),r=t.BlobBuilder||t.WebKitBlobBuilder||t.MozBlobBuilder||t.MSBlobBuilder,a=/^data:((.*?)(;charset=.*?)?)(;base64)?,/,i=(o||r)&&t.atob&&t.ArrayBuffer&&t.Uint8Array&&function(t){var e,i,l,u,c,f,b,d,B;if(!(e=t.match(a)))throw new Error("invalid data URI");for(i=e[2]?e[1]:"text/plain"+(e[3]||";charset=US-ASCII"),l=!!e[4],u=t.slice(e[0].length),c=l?atob(u):decodeURIComponent(u),f=new ArrayBuffer(c.length),b=new Uint8Array(f),d=0;d<c.length;d+=1)b[d]=c.charCodeAt(d);return o?new Blob([n?b:f],{type:i}):((B=new r).append(f),B.getBlob(i))};t.HTMLCanvasElement&&!e.toBlob&&(e.mozGetAsFile?e.toBlob=function(t,o,n){var r=this;setTimeout(function(){t(n&&e.toDataURL&&i?i(r.toDataURL(o,n)):r.mozGetAsFile("blob",o))})}:e.toDataURL&&i&&(e.toBlob=function(t,e,o){var n=this;setTimeout(function(){t(i(n.toDataURL(e,o)))})})),"function"==typeof define&&define.amd?define(function(){return i}):"object"==typeof module&&module.exports?module.exports=i:t.dataURLtoBlob=i}(window);
|
||||
//# sourceMappingURL=canvas-to-blob.min.js.map
|
||||
4722
lib/cloudinary/static/js/jquery.cloudinary.js
Normal file
4722
lib/cloudinary/static/js/jquery.cloudinary.js
Normal file
File diff suppressed because it is too large
Load Diff
326
lib/cloudinary/static/js/jquery.fileupload-image.js
vendored
Normal file
326
lib/cloudinary/static/js/jquery.fileupload-image.js
vendored
Normal file
@@ -0,0 +1,326 @@
|
||||
/*
|
||||
* jQuery File Upload Image Preview & Resize Plugin
|
||||
* https://github.com/blueimp/jQuery-File-Upload
|
||||
*
|
||||
* Copyright 2013, Sebastian Tschan
|
||||
* https://blueimp.net
|
||||
*
|
||||
* Licensed under the MIT license:
|
||||
* https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
/* jshint nomen:false */
|
||||
/* global define, require, window, Blob */
|
||||
|
||||
;(function (factory) {
|
||||
'use strict';
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
// Register as an anonymous AMD module:
|
||||
define([
|
||||
'jquery',
|
||||
'load-image',
|
||||
'load-image-meta',
|
||||
'load-image-scale',
|
||||
'load-image-exif',
|
||||
'canvas-to-blob',
|
||||
'./jquery.fileupload-process'
|
||||
], factory);
|
||||
} else if (typeof exports === 'object') {
|
||||
// Node/CommonJS:
|
||||
factory(
|
||||
require('jquery'),
|
||||
require('blueimp-load-image/js/load-image'),
|
||||
require('blueimp-load-image/js/load-image-meta'),
|
||||
require('blueimp-load-image/js/load-image-scale'),
|
||||
require('blueimp-load-image/js/load-image-exif'),
|
||||
require('blueimp-canvas-to-blob'),
|
||||
require('./jquery.fileupload-process')
|
||||
);
|
||||
} else {
|
||||
// Browser globals:
|
||||
factory(
|
||||
window.jQuery,
|
||||
window.loadImage
|
||||
);
|
||||
}
|
||||
}(function ($, loadImage) {
|
||||
'use strict';
|
||||
|
||||
// Prepend to the default processQueue:
|
||||
$.blueimp.fileupload.prototype.options.processQueue.unshift(
|
||||
{
|
||||
action: 'loadImageMetaData',
|
||||
disableImageHead: '@',
|
||||
disableExif: '@',
|
||||
disableExifThumbnail: '@',
|
||||
disableExifSub: '@',
|
||||
disableExifGps: '@',
|
||||
disabled: '@disableImageMetaDataLoad'
|
||||
},
|
||||
{
|
||||
action: 'loadImage',
|
||||
// Use the action as prefix for the "@" options:
|
||||
prefix: true,
|
||||
fileTypes: '@',
|
||||
maxFileSize: '@',
|
||||
noRevoke: '@',
|
||||
disabled: '@disableImageLoad'
|
||||
},
|
||||
{
|
||||
action: 'resizeImage',
|
||||
// Use "image" as prefix for the "@" options:
|
||||
prefix: 'image',
|
||||
maxWidth: '@',
|
||||
maxHeight: '@',
|
||||
minWidth: '@',
|
||||
minHeight: '@',
|
||||
crop: '@',
|
||||
orientation: '@',
|
||||
forceResize: '@',
|
||||
disabled: '@disableImageResize'
|
||||
},
|
||||
{
|
||||
action: 'saveImage',
|
||||
quality: '@imageQuality',
|
||||
type: '@imageType',
|
||||
disabled: '@disableImageResize'
|
||||
},
|
||||
{
|
||||
action: 'saveImageMetaData',
|
||||
disabled: '@disableImageMetaDataSave'
|
||||
},
|
||||
{
|
||||
action: 'resizeImage',
|
||||
// Use "preview" as prefix for the "@" options:
|
||||
prefix: 'preview',
|
||||
maxWidth: '@',
|
||||
maxHeight: '@',
|
||||
minWidth: '@',
|
||||
minHeight: '@',
|
||||
crop: '@',
|
||||
orientation: '@',
|
||||
thumbnail: '@',
|
||||
canvas: '@',
|
||||
disabled: '@disableImagePreview'
|
||||
},
|
||||
{
|
||||
action: 'setImage',
|
||||
name: '@imagePreviewName',
|
||||
disabled: '@disableImagePreview'
|
||||
},
|
||||
{
|
||||
action: 'deleteImageReferences',
|
||||
disabled: '@disableImageReferencesDeletion'
|
||||
}
|
||||
);
|
||||
|
||||
// The File Upload Resize plugin extends the fileupload widget
|
||||
// with image resize functionality:
|
||||
$.widget('blueimp.fileupload', $.blueimp.fileupload, {
|
||||
|
||||
options: {
|
||||
// The regular expression for the types of images to load:
|
||||
// matched against the file type:
|
||||
loadImageFileTypes: /^image\/(gif|jpeg|png|svg\+xml)$/,
|
||||
// The maximum file size of images to load:
|
||||
loadImageMaxFileSize: 10000000, // 10MB
|
||||
// The maximum width of resized images:
|
||||
imageMaxWidth: 1920,
|
||||
// The maximum height of resized images:
|
||||
imageMaxHeight: 1080,
|
||||
// Defines the image orientation (1-8) or takes the orientation
|
||||
// value from Exif data if set to true:
|
||||
imageOrientation: false,
|
||||
// Define if resized images should be cropped or only scaled:
|
||||
imageCrop: false,
|
||||
// Disable the resize image functionality by default:
|
||||
disableImageResize: true,
|
||||
// The maximum width of the preview images:
|
||||
previewMaxWidth: 80,
|
||||
// The maximum height of the preview images:
|
||||
previewMaxHeight: 80,
|
||||
// Defines the preview orientation (1-8) or takes the orientation
|
||||
// value from Exif data if set to true:
|
||||
previewOrientation: true,
|
||||
// Create the preview using the Exif data thumbnail:
|
||||
previewThumbnail: true,
|
||||
// Define if preview images should be cropped or only scaled:
|
||||
previewCrop: false,
|
||||
// Define if preview images should be resized as canvas elements:
|
||||
previewCanvas: true
|
||||
},
|
||||
|
||||
processActions: {
|
||||
|
||||
// Loads the image given via data.files and data.index
|
||||
// as img element, if the browser supports the File API.
|
||||
// Accepts the options fileTypes (regular expression)
|
||||
// and maxFileSize (integer) to limit the files to load:
|
||||
loadImage: function (data, options) {
|
||||
if (options.disabled) {
|
||||
return data;
|
||||
}
|
||||
var that = this,
|
||||
file = data.files[data.index],
|
||||
dfd = $.Deferred();
|
||||
if (($.type(options.maxFileSize) === 'number' &&
|
||||
file.size > options.maxFileSize) ||
|
||||
(options.fileTypes &&
|
||||
!options.fileTypes.test(file.type)) ||
|
||||
!loadImage(
|
||||
file,
|
||||
function (img) {
|
||||
if (img.src) {
|
||||
data.img = img;
|
||||
}
|
||||
dfd.resolveWith(that, [data]);
|
||||
},
|
||||
options
|
||||
)) {
|
||||
return data;
|
||||
}
|
||||
return dfd.promise();
|
||||
},
|
||||
|
||||
// Resizes the image given as data.canvas or data.img
|
||||
// and updates data.canvas or data.img with the resized image.
|
||||
// Also stores the resized image as preview property.
|
||||
// Accepts the options maxWidth, maxHeight, minWidth,
|
||||
// minHeight, canvas and crop:
|
||||
resizeImage: function (data, options) {
|
||||
if (options.disabled || !(data.canvas || data.img)) {
|
||||
return data;
|
||||
}
|
||||
options = $.extend({canvas: true}, options);
|
||||
var that = this,
|
||||
dfd = $.Deferred(),
|
||||
img = (options.canvas && data.canvas) || data.img,
|
||||
resolve = function (newImg) {
|
||||
if (newImg && (newImg.width !== img.width ||
|
||||
newImg.height !== img.height ||
|
||||
options.forceResize)) {
|
||||
data[newImg.getContext ? 'canvas' : 'img'] = newImg;
|
||||
}
|
||||
data.preview = newImg;
|
||||
dfd.resolveWith(that, [data]);
|
||||
},
|
||||
thumbnail;
|
||||
if (data.exif) {
|
||||
if (options.orientation === true) {
|
||||
options.orientation = data.exif.get('Orientation');
|
||||
}
|
||||
if (options.thumbnail) {
|
||||
thumbnail = data.exif.get('Thumbnail');
|
||||
if (thumbnail) {
|
||||
loadImage(thumbnail, resolve, options);
|
||||
return dfd.promise();
|
||||
}
|
||||
}
|
||||
// Prevent orienting the same image twice:
|
||||
if (data.orientation) {
|
||||
delete options.orientation;
|
||||
} else {
|
||||
data.orientation = options.orientation;
|
||||
}
|
||||
}
|
||||
if (img) {
|
||||
resolve(loadImage.scale(img, options));
|
||||
return dfd.promise();
|
||||
}
|
||||
return data;
|
||||
},
|
||||
|
||||
// Saves the processed image given as data.canvas
|
||||
// inplace at data.index of data.files:
|
||||
saveImage: function (data, options) {
|
||||
if (!data.canvas || options.disabled) {
|
||||
return data;
|
||||
}
|
||||
var that = this,
|
||||
file = data.files[data.index],
|
||||
dfd = $.Deferred();
|
||||
if (data.canvas.toBlob) {
|
||||
data.canvas.toBlob(
|
||||
function (blob) {
|
||||
if (!blob.name) {
|
||||
if (file.type === blob.type) {
|
||||
blob.name = file.name;
|
||||
} else if (file.name) {
|
||||
blob.name = file.name.replace(
|
||||
/\.\w+$/,
|
||||
'.' + blob.type.substr(6)
|
||||
);
|
||||
}
|
||||
}
|
||||
// Don't restore invalid meta data:
|
||||
if (file.type !== blob.type) {
|
||||
delete data.imageHead;
|
||||
}
|
||||
// Store the created blob at the position
|
||||
// of the original file in the files list:
|
||||
data.files[data.index] = blob;
|
||||
dfd.resolveWith(that, [data]);
|
||||
},
|
||||
options.type || file.type,
|
||||
options.quality
|
||||
);
|
||||
} else {
|
||||
return data;
|
||||
}
|
||||
return dfd.promise();
|
||||
},
|
||||
|
||||
loadImageMetaData: function (data, options) {
|
||||
if (options.disabled) {
|
||||
return data;
|
||||
}
|
||||
var that = this,
|
||||
dfd = $.Deferred();
|
||||
loadImage.parseMetaData(data.files[data.index], function (result) {
|
||||
$.extend(data, result);
|
||||
dfd.resolveWith(that, [data]);
|
||||
}, options);
|
||||
return dfd.promise();
|
||||
},
|
||||
|
||||
saveImageMetaData: function (data, options) {
|
||||
if (!(data.imageHead && data.canvas &&
|
||||
data.canvas.toBlob && !options.disabled)) {
|
||||
return data;
|
||||
}
|
||||
var file = data.files[data.index],
|
||||
blob = new Blob([
|
||||
data.imageHead,
|
||||
// Resized images always have a head size of 20 bytes,
|
||||
// including the JPEG marker and a minimal JFIF header:
|
||||
this._blobSlice.call(file, 20)
|
||||
], {type: file.type});
|
||||
blob.name = file.name;
|
||||
data.files[data.index] = blob;
|
||||
return data;
|
||||
},
|
||||
|
||||
// Sets the resized version of the image as a property of the
|
||||
// file object, must be called after "saveImage":
|
||||
setImage: function (data, options) {
|
||||
if (data.preview && !options.disabled) {
|
||||
data.files[data.index][options.name || 'preview'] = data.preview;
|
||||
}
|
||||
return data;
|
||||
},
|
||||
|
||||
deleteImageReferences: function (data, options) {
|
||||
if (!options.disabled) {
|
||||
delete data.img;
|
||||
delete data.canvas;
|
||||
delete data.preview;
|
||||
delete data.imageHead;
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}));
|
||||
178
lib/cloudinary/static/js/jquery.fileupload-process.js
vendored
Normal file
178
lib/cloudinary/static/js/jquery.fileupload-process.js
vendored
Normal file
@@ -0,0 +1,178 @@
|
||||
/*
|
||||
* jQuery File Upload Processing Plugin
|
||||
* https://github.com/blueimp/jQuery-File-Upload
|
||||
*
|
||||
* Copyright 2012, Sebastian Tschan
|
||||
* https://blueimp.net
|
||||
*
|
||||
* Licensed under the MIT license:
|
||||
* https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
/* jshint nomen:false */
|
||||
/* global define, require, window */
|
||||
|
||||
;(function (factory) {
|
||||
'use strict';
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
// Register as an anonymous AMD module:
|
||||
define([
|
||||
'jquery',
|
||||
'./jquery.fileupload'
|
||||
], factory);
|
||||
} else if (typeof exports === 'object') {
|
||||
// Node/CommonJS:
|
||||
factory(
|
||||
require('jquery'),
|
||||
require('./jquery.fileupload')
|
||||
);
|
||||
} else {
|
||||
// Browser globals:
|
||||
factory(
|
||||
window.jQuery
|
||||
);
|
||||
}
|
||||
}(function ($) {
|
||||
'use strict';
|
||||
|
||||
var originalAdd = $.blueimp.fileupload.prototype.options.add;
|
||||
|
||||
// The File Upload Processing plugin extends the fileupload widget
|
||||
// with file processing functionality:
|
||||
$.widget('blueimp.fileupload', $.blueimp.fileupload, {
|
||||
|
||||
options: {
|
||||
// The list of processing actions:
|
||||
processQueue: [
|
||||
/*
|
||||
{
|
||||
action: 'log',
|
||||
type: 'debug'
|
||||
}
|
||||
*/
|
||||
],
|
||||
add: function (e, data) {
|
||||
var $this = $(this);
|
||||
data.process(function () {
|
||||
return $this.fileupload('process', data);
|
||||
});
|
||||
originalAdd.call(this, e, data);
|
||||
}
|
||||
},
|
||||
|
||||
processActions: {
|
||||
/*
|
||||
log: function (data, options) {
|
||||
console[options.type](
|
||||
'Processing "' + data.files[data.index].name + '"'
|
||||
);
|
||||
}
|
||||
*/
|
||||
},
|
||||
|
||||
_processFile: function (data, originalData) {
|
||||
var that = this,
|
||||
dfd = $.Deferred().resolveWith(that, [data]),
|
||||
chain = dfd.promise();
|
||||
this._trigger('process', null, data);
|
||||
$.each(data.processQueue, function (i, settings) {
|
||||
var func = function (data) {
|
||||
if (originalData.errorThrown) {
|
||||
return $.Deferred()
|
||||
.rejectWith(that, [originalData]).promise();
|
||||
}
|
||||
return that.processActions[settings.action].call(
|
||||
that,
|
||||
data,
|
||||
settings
|
||||
);
|
||||
};
|
||||
chain = chain.then(func, settings.always && func);
|
||||
});
|
||||
chain
|
||||
.done(function () {
|
||||
that._trigger('processdone', null, data);
|
||||
that._trigger('processalways', null, data);
|
||||
})
|
||||
.fail(function () {
|
||||
that._trigger('processfail', null, data);
|
||||
that._trigger('processalways', null, data);
|
||||
});
|
||||
return chain;
|
||||
},
|
||||
|
||||
// Replaces the settings of each processQueue item that
|
||||
// are strings starting with an "@", using the remaining
|
||||
// substring as key for the option map,
|
||||
// e.g. "@autoUpload" is replaced with options.autoUpload:
|
||||
_transformProcessQueue: function (options) {
|
||||
var processQueue = [];
|
||||
$.each(options.processQueue, function () {
|
||||
var settings = {},
|
||||
action = this.action,
|
||||
prefix = this.prefix === true ? action : this.prefix;
|
||||
$.each(this, function (key, value) {
|
||||
if ($.type(value) === 'string' &&
|
||||
value.charAt(0) === '@') {
|
||||
settings[key] = options[
|
||||
value.slice(1) || (prefix ? prefix +
|
||||
key.charAt(0).toUpperCase() + key.slice(1) : key)
|
||||
];
|
||||
} else {
|
||||
settings[key] = value;
|
||||
}
|
||||
|
||||
});
|
||||
processQueue.push(settings);
|
||||
});
|
||||
options.processQueue = processQueue;
|
||||
},
|
||||
|
||||
// Returns the number of files currently in the processsing queue:
|
||||
processing: function () {
|
||||
return this._processing;
|
||||
},
|
||||
|
||||
// Processes the files given as files property of the data parameter,
|
||||
// returns a Promise object that allows to bind callbacks:
|
||||
process: function (data) {
|
||||
var that = this,
|
||||
options = $.extend({}, this.options, data);
|
||||
if (options.processQueue && options.processQueue.length) {
|
||||
this._transformProcessQueue(options);
|
||||
if (this._processing === 0) {
|
||||
this._trigger('processstart');
|
||||
}
|
||||
$.each(data.files, function (index) {
|
||||
var opts = index ? $.extend({}, options) : options,
|
||||
func = function () {
|
||||
if (data.errorThrown) {
|
||||
return $.Deferred()
|
||||
.rejectWith(that, [data]).promise();
|
||||
}
|
||||
return that._processFile(opts, data);
|
||||
};
|
||||
opts.index = index;
|
||||
that._processing += 1;
|
||||
that._processingQueue = that._processingQueue.then(func, func)
|
||||
.always(function () {
|
||||
that._processing -= 1;
|
||||
if (that._processing === 0) {
|
||||
that._trigger('processstop');
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
return this._processingQueue;
|
||||
},
|
||||
|
||||
_create: function () {
|
||||
this._super();
|
||||
this._processing = 0;
|
||||
this._processingQueue = $.Deferred().resolveWith(this)
|
||||
.promise();
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}));
|
||||
125
lib/cloudinary/static/js/jquery.fileupload-validate.js
vendored
Normal file
125
lib/cloudinary/static/js/jquery.fileupload-validate.js
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
/*
|
||||
* jQuery File Upload Validation Plugin
|
||||
* https://github.com/blueimp/jQuery-File-Upload
|
||||
*
|
||||
* Copyright 2013, Sebastian Tschan
|
||||
* https://blueimp.net
|
||||
*
|
||||
* Licensed under the MIT license:
|
||||
* https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
/* global define, require, window */
|
||||
|
||||
;(function (factory) {
|
||||
'use strict';
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
// Register as an anonymous AMD module:
|
||||
define([
|
||||
'jquery',
|
||||
'./jquery.fileupload-process'
|
||||
], factory);
|
||||
} else if (typeof exports === 'object') {
|
||||
// Node/CommonJS:
|
||||
factory(
|
||||
require('jquery'),
|
||||
require('./jquery.fileupload-process')
|
||||
);
|
||||
} else {
|
||||
// Browser globals:
|
||||
factory(
|
||||
window.jQuery
|
||||
);
|
||||
}
|
||||
}(function ($) {
|
||||
'use strict';
|
||||
|
||||
// Append to the default processQueue:
|
||||
$.blueimp.fileupload.prototype.options.processQueue.push(
|
||||
{
|
||||
action: 'validate',
|
||||
// Always trigger this action,
|
||||
// even if the previous action was rejected:
|
||||
always: true,
|
||||
// Options taken from the global options map:
|
||||
acceptFileTypes: '@',
|
||||
maxFileSize: '@',
|
||||
minFileSize: '@',
|
||||
maxNumberOfFiles: '@',
|
||||
disabled: '@disableValidation'
|
||||
}
|
||||
);
|
||||
|
||||
// The File Upload Validation plugin extends the fileupload widget
|
||||
// with file validation functionality:
|
||||
$.widget('blueimp.fileupload', $.blueimp.fileupload, {
|
||||
|
||||
options: {
|
||||
/*
|
||||
// The regular expression for allowed file types, matches
|
||||
// against either file type or file name:
|
||||
acceptFileTypes: /(\.|\/)(gif|jpe?g|png)$/i,
|
||||
// The maximum allowed file size in bytes:
|
||||
maxFileSize: 10000000, // 10 MB
|
||||
// The minimum allowed file size in bytes:
|
||||
minFileSize: undefined, // No minimal file size
|
||||
// The limit of files to be uploaded:
|
||||
maxNumberOfFiles: 10,
|
||||
*/
|
||||
|
||||
// Function returning the current number of files,
|
||||
// has to be overriden for maxNumberOfFiles validation:
|
||||
getNumberOfFiles: $.noop,
|
||||
|
||||
// Error and info messages:
|
||||
messages: {
|
||||
maxNumberOfFiles: 'Maximum number of files exceeded',
|
||||
acceptFileTypes: 'File type not allowed',
|
||||
maxFileSize: 'File is too large',
|
||||
minFileSize: 'File is too small'
|
||||
}
|
||||
},
|
||||
|
||||
processActions: {
|
||||
|
||||
validate: function (data, options) {
|
||||
if (options.disabled) {
|
||||
return data;
|
||||
}
|
||||
var dfd = $.Deferred(),
|
||||
settings = this.options,
|
||||
file = data.files[data.index],
|
||||
fileSize;
|
||||
if (options.minFileSize || options.maxFileSize) {
|
||||
fileSize = file.size;
|
||||
}
|
||||
if ($.type(options.maxNumberOfFiles) === 'number' &&
|
||||
(settings.getNumberOfFiles() || 0) + data.files.length >
|
||||
options.maxNumberOfFiles) {
|
||||
file.error = settings.i18n('maxNumberOfFiles');
|
||||
} else if (options.acceptFileTypes &&
|
||||
!(options.acceptFileTypes.test(file.type) ||
|
||||
options.acceptFileTypes.test(file.name))) {
|
||||
file.error = settings.i18n('acceptFileTypes');
|
||||
} else if (fileSize > options.maxFileSize) {
|
||||
file.error = settings.i18n('maxFileSize');
|
||||
} else if ($.type(fileSize) === 'number' &&
|
||||
fileSize < options.minFileSize) {
|
||||
file.error = settings.i18n('minFileSize');
|
||||
} else {
|
||||
delete file.error;
|
||||
}
|
||||
if (file.error || data.files.error) {
|
||||
data.files.error = true;
|
||||
dfd.rejectWith(this, [data]);
|
||||
} else {
|
||||
dfd.resolveWith(this, [data]);
|
||||
}
|
||||
return dfd.promise();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}));
|
||||
1482
lib/cloudinary/static/js/jquery.fileupload.js
vendored
Normal file
1482
lib/cloudinary/static/js/jquery.fileupload.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
224
lib/cloudinary/static/js/jquery.iframe-transport.js
Normal file
224
lib/cloudinary/static/js/jquery.iframe-transport.js
Normal file
@@ -0,0 +1,224 @@
|
||||
/*
|
||||
* jQuery Iframe Transport Plugin
|
||||
* https://github.com/blueimp/jQuery-File-Upload
|
||||
*
|
||||
* Copyright 2011, Sebastian Tschan
|
||||
* https://blueimp.net
|
||||
*
|
||||
* Licensed under the MIT license:
|
||||
* https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
/* global define, require, window, document, JSON */
|
||||
|
||||
;(function (factory) {
|
||||
'use strict';
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
// Register as an anonymous AMD module:
|
||||
define(['jquery'], factory);
|
||||
} else if (typeof exports === 'object') {
|
||||
// Node/CommonJS:
|
||||
factory(require('jquery'));
|
||||
} else {
|
||||
// Browser globals:
|
||||
factory(window.jQuery);
|
||||
}
|
||||
}(function ($) {
|
||||
'use strict';
|
||||
|
||||
// Helper variable to create unique names for the transport iframes:
|
||||
var counter = 0,
|
||||
jsonAPI = $,
|
||||
jsonParse = 'parseJSON';
|
||||
|
||||
if ('JSON' in window && 'parse' in JSON) {
|
||||
jsonAPI = JSON;
|
||||
jsonParse = 'parse';
|
||||
}
|
||||
|
||||
// The iframe transport accepts four additional options:
|
||||
// options.fileInput: a jQuery collection of file input fields
|
||||
// options.paramName: the parameter name for the file form data,
|
||||
// overrides the name property of the file input field(s),
|
||||
// can be a string or an array of strings.
|
||||
// options.formData: an array of objects with name and value properties,
|
||||
// equivalent to the return data of .serializeArray(), e.g.:
|
||||
// [{name: 'a', value: 1}, {name: 'b', value: 2}]
|
||||
// options.initialIframeSrc: the URL of the initial iframe src,
|
||||
// by default set to "javascript:false;"
|
||||
$.ajaxTransport('iframe', function (options) {
|
||||
if (options.async) {
|
||||
// javascript:false as initial iframe src
|
||||
// prevents warning popups on HTTPS in IE6:
|
||||
/*jshint scripturl: true */
|
||||
var initialIframeSrc = options.initialIframeSrc || 'javascript:false;',
|
||||
/*jshint scripturl: false */
|
||||
form,
|
||||
iframe,
|
||||
addParamChar;
|
||||
return {
|
||||
send: function (_, completeCallback) {
|
||||
form = $('<form style="display:none;"></form>');
|
||||
form.attr('accept-charset', options.formAcceptCharset);
|
||||
addParamChar = /\?/.test(options.url) ? '&' : '?';
|
||||
// XDomainRequest only supports GET and POST:
|
||||
if (options.type === 'DELETE') {
|
||||
options.url = options.url + addParamChar + '_method=DELETE';
|
||||
options.type = 'POST';
|
||||
} else if (options.type === 'PUT') {
|
||||
options.url = options.url + addParamChar + '_method=PUT';
|
||||
options.type = 'POST';
|
||||
} else if (options.type === 'PATCH') {
|
||||
options.url = options.url + addParamChar + '_method=PATCH';
|
||||
options.type = 'POST';
|
||||
}
|
||||
// IE versions below IE8 cannot set the name property of
|
||||
// elements that have already been added to the DOM,
|
||||
// so we set the name along with the iframe HTML markup:
|
||||
counter += 1;
|
||||
iframe = $(
|
||||
'<iframe src="' + initialIframeSrc +
|
||||
'" name="iframe-transport-' + counter + '"></iframe>'
|
||||
).bind('load', function () {
|
||||
var fileInputClones,
|
||||
paramNames = $.isArray(options.paramName) ?
|
||||
options.paramName : [options.paramName];
|
||||
iframe
|
||||
.unbind('load')
|
||||
.bind('load', function () {
|
||||
var response;
|
||||
// Wrap in a try/catch block to catch exceptions thrown
|
||||
// when trying to access cross-domain iframe contents:
|
||||
try {
|
||||
response = iframe.contents();
|
||||
// Google Chrome and Firefox do not throw an
|
||||
// exception when calling iframe.contents() on
|
||||
// cross-domain requests, so we unify the response:
|
||||
if (!response.length || !response[0].firstChild) {
|
||||
throw new Error();
|
||||
}
|
||||
} catch (e) {
|
||||
response = undefined;
|
||||
}
|
||||
// The complete callback returns the
|
||||
// iframe content document as response object:
|
||||
completeCallback(
|
||||
200,
|
||||
'success',
|
||||
{'iframe': response}
|
||||
);
|
||||
// Fix for IE endless progress bar activity bug
|
||||
// (happens on form submits to iframe targets):
|
||||
$('<iframe src="' + initialIframeSrc + '"></iframe>')
|
||||
.appendTo(form);
|
||||
window.setTimeout(function () {
|
||||
// Removing the form in a setTimeout call
|
||||
// allows Chrome's developer tools to display
|
||||
// the response result
|
||||
form.remove();
|
||||
}, 0);
|
||||
});
|
||||
form
|
||||
.prop('target', iframe.prop('name'))
|
||||
.prop('action', options.url)
|
||||
.prop('method', options.type);
|
||||
if (options.formData) {
|
||||
$.each(options.formData, function (index, field) {
|
||||
$('<input type="hidden"/>')
|
||||
.prop('name', field.name)
|
||||
.val(field.value)
|
||||
.appendTo(form);
|
||||
});
|
||||
}
|
||||
if (options.fileInput && options.fileInput.length &&
|
||||
options.type === 'POST') {
|
||||
fileInputClones = options.fileInput.clone();
|
||||
// Insert a clone for each file input field:
|
||||
options.fileInput.after(function (index) {
|
||||
return fileInputClones[index];
|
||||
});
|
||||
if (options.paramName) {
|
||||
options.fileInput.each(function (index) {
|
||||
$(this).prop(
|
||||
'name',
|
||||
paramNames[index] || options.paramName
|
||||
);
|
||||
});
|
||||
}
|
||||
// Appending the file input fields to the hidden form
|
||||
// removes them from their original location:
|
||||
form
|
||||
.append(options.fileInput)
|
||||
.prop('enctype', 'multipart/form-data')
|
||||
// enctype must be set as encoding for IE:
|
||||
.prop('encoding', 'multipart/form-data');
|
||||
// Remove the HTML5 form attribute from the input(s):
|
||||
options.fileInput.removeAttr('form');
|
||||
}
|
||||
form.submit();
|
||||
// Insert the file input fields at their original location
|
||||
// by replacing the clones with the originals:
|
||||
if (fileInputClones && fileInputClones.length) {
|
||||
options.fileInput.each(function (index, input) {
|
||||
var clone = $(fileInputClones[index]);
|
||||
// Restore the original name and form properties:
|
||||
$(input)
|
||||
.prop('name', clone.prop('name'))
|
||||
.attr('form', clone.attr('form'));
|
||||
clone.replaceWith(input);
|
||||
});
|
||||
}
|
||||
});
|
||||
form.append(iframe).appendTo(document.body);
|
||||
},
|
||||
abort: function () {
|
||||
if (iframe) {
|
||||
// javascript:false as iframe src aborts the request
|
||||
// and prevents warning popups on HTTPS in IE6.
|
||||
// concat is used to avoid the "Script URL" JSLint error:
|
||||
iframe
|
||||
.unbind('load')
|
||||
.prop('src', initialIframeSrc);
|
||||
}
|
||||
if (form) {
|
||||
form.remove();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// The iframe transport returns the iframe content document as response.
|
||||
// The following adds converters from iframe to text, json, html, xml
|
||||
// and script.
|
||||
// Please note that the Content-Type for JSON responses has to be text/plain
|
||||
// or text/html, if the browser doesn't include application/json in the
|
||||
// Accept header, else IE will show a download dialog.
|
||||
// The Content-Type for XML responses on the other hand has to be always
|
||||
// application/xml or text/xml, so IE properly parses the XML response.
|
||||
// See also
|
||||
// https://github.com/blueimp/jQuery-File-Upload/wiki/Setup#content-type-negotiation
|
||||
$.ajaxSetup({
|
||||
converters: {
|
||||
'iframe text': function (iframe) {
|
||||
return iframe && $(iframe[0].body).text();
|
||||
},
|
||||
'iframe json': function (iframe) {
|
||||
return iframe && jsonAPI[jsonParse]($(iframe[0].body).text());
|
||||
},
|
||||
'iframe html': function (iframe) {
|
||||
return iframe && $(iframe[0].body).html();
|
||||
},
|
||||
'iframe xml': function (iframe) {
|
||||
var xmlDoc = iframe && iframe[0];
|
||||
return xmlDoc && $.isXMLDoc(xmlDoc) ? xmlDoc :
|
||||
$.parseXML((xmlDoc.XMLDocument && xmlDoc.XMLDocument.xml) ||
|
||||
$(xmlDoc.body).html());
|
||||
},
|
||||
'iframe script': function (iframe) {
|
||||
return iframe && $.globalEval($(iframe[0].body).text());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
}));
|
||||
572
lib/cloudinary/static/js/jquery.ui.widget.js
vendored
Normal file
572
lib/cloudinary/static/js/jquery.ui.widget.js
vendored
Normal file
@@ -0,0 +1,572 @@
|
||||
/*! jQuery UI - v1.11.4+CommonJS - 2015-08-28
|
||||
* http://jqueryui.com
|
||||
* Includes: widget.js
|
||||
* Copyright 2015 jQuery Foundation and other contributors; Licensed MIT */
|
||||
|
||||
(function( factory ) {
|
||||
if ( typeof define === "function" && define.amd ) {
|
||||
|
||||
// AMD. Register as an anonymous module.
|
||||
define([ "jquery" ], factory );
|
||||
|
||||
} else if ( typeof exports === "object" ) {
|
||||
|
||||
// Node/CommonJS
|
||||
factory( require( "jquery" ) );
|
||||
|
||||
} else {
|
||||
|
||||
// Browser globals
|
||||
factory( jQuery );
|
||||
}
|
||||
}(function( $ ) {
|
||||
/*!
|
||||
* jQuery UI Widget 1.11.4
|
||||
* http://jqueryui.com
|
||||
*
|
||||
* Copyright jQuery Foundation and other contributors
|
||||
* Released under the MIT license.
|
||||
* http://jquery.org/license
|
||||
*
|
||||
* http://api.jqueryui.com/jQuery.widget/
|
||||
*/
|
||||
|
||||
|
||||
var widget_uuid = 0,
|
||||
widget_slice = Array.prototype.slice;
|
||||
|
||||
$.cleanData = (function( orig ) {
|
||||
return function( elems ) {
|
||||
var events, elem, i;
|
||||
for ( i = 0; (elem = elems[i]) != null; i++ ) {
|
||||
try {
|
||||
|
||||
// Only trigger remove when necessary to save time
|
||||
events = $._data( elem, "events" );
|
||||
if ( events && events.remove ) {
|
||||
$( elem ).triggerHandler( "remove" );
|
||||
}
|
||||
|
||||
// http://bugs.jquery.com/ticket/8235
|
||||
} catch ( e ) {}
|
||||
}
|
||||
orig( elems );
|
||||
};
|
||||
})( $.cleanData );
|
||||
|
||||
$.widget = function( name, base, prototype ) {
|
||||
var fullName, existingConstructor, constructor, basePrototype,
|
||||
// proxiedPrototype allows the provided prototype to remain unmodified
|
||||
// so that it can be used as a mixin for multiple widgets (#8876)
|
||||
proxiedPrototype = {},
|
||||
namespace = name.split( "." )[ 0 ];
|
||||
|
||||
name = name.split( "." )[ 1 ];
|
||||
fullName = namespace + "-" + name;
|
||||
|
||||
if ( !prototype ) {
|
||||
prototype = base;
|
||||
base = $.Widget;
|
||||
}
|
||||
|
||||
// create selector for plugin
|
||||
$.expr[ ":" ][ fullName.toLowerCase() ] = function( elem ) {
|
||||
return !!$.data( elem, fullName );
|
||||
};
|
||||
|
||||
$[ namespace ] = $[ namespace ] || {};
|
||||
existingConstructor = $[ namespace ][ name ];
|
||||
constructor = $[ namespace ][ name ] = function( options, element ) {
|
||||
// allow instantiation without "new" keyword
|
||||
if ( !this._createWidget ) {
|
||||
return new constructor( options, element );
|
||||
}
|
||||
|
||||
// allow instantiation without initializing for simple inheritance
|
||||
// must use "new" keyword (the code above always passes args)
|
||||
if ( arguments.length ) {
|
||||
this._createWidget( options, element );
|
||||
}
|
||||
};
|
||||
// extend with the existing constructor to carry over any static properties
|
||||
$.extend( constructor, existingConstructor, {
|
||||
version: prototype.version,
|
||||
// copy the object used to create the prototype in case we need to
|
||||
// redefine the widget later
|
||||
_proto: $.extend( {}, prototype ),
|
||||
// track widgets that inherit from this widget in case this widget is
|
||||
// redefined after a widget inherits from it
|
||||
_childConstructors: []
|
||||
});
|
||||
|
||||
basePrototype = new base();
|
||||
// we need to make the options hash a property directly on the new instance
|
||||
// otherwise we'll modify the options hash on the prototype that we're
|
||||
// inheriting from
|
||||
basePrototype.options = $.widget.extend( {}, basePrototype.options );
|
||||
$.each( prototype, function( prop, value ) {
|
||||
if ( !$.isFunction( value ) ) {
|
||||
proxiedPrototype[ prop ] = value;
|
||||
return;
|
||||
}
|
||||
proxiedPrototype[ prop ] = (function() {
|
||||
var _super = function() {
|
||||
return base.prototype[ prop ].apply( this, arguments );
|
||||
},
|
||||
_superApply = function( args ) {
|
||||
return base.prototype[ prop ].apply( this, args );
|
||||
};
|
||||
return function() {
|
||||
var __super = this._super,
|
||||
__superApply = this._superApply,
|
||||
returnValue;
|
||||
|
||||
this._super = _super;
|
||||
this._superApply = _superApply;
|
||||
|
||||
returnValue = value.apply( this, arguments );
|
||||
|
||||
this._super = __super;
|
||||
this._superApply = __superApply;
|
||||
|
||||
return returnValue;
|
||||
};
|
||||
})();
|
||||
});
|
||||
constructor.prototype = $.widget.extend( basePrototype, {
|
||||
// TODO: remove support for widgetEventPrefix
|
||||
// always use the name + a colon as the prefix, e.g., draggable:start
|
||||
// don't prefix for widgets that aren't DOM-based
|
||||
widgetEventPrefix: existingConstructor ? (basePrototype.widgetEventPrefix || name) : name
|
||||
}, proxiedPrototype, {
|
||||
constructor: constructor,
|
||||
namespace: namespace,
|
||||
widgetName: name,
|
||||
widgetFullName: fullName
|
||||
});
|
||||
|
||||
// If this widget is being redefined then we need to find all widgets that
|
||||
// are inheriting from it and redefine all of them so that they inherit from
|
||||
// the new version of this widget. We're essentially trying to replace one
|
||||
// level in the prototype chain.
|
||||
if ( existingConstructor ) {
|
||||
$.each( existingConstructor._childConstructors, function( i, child ) {
|
||||
var childPrototype = child.prototype;
|
||||
|
||||
// redefine the child widget using the same prototype that was
|
||||
// originally used, but inherit from the new version of the base
|
||||
$.widget( childPrototype.namespace + "." + childPrototype.widgetName, constructor, child._proto );
|
||||
});
|
||||
// remove the list of existing child constructors from the old constructor
|
||||
// so the old child constructors can be garbage collected
|
||||
delete existingConstructor._childConstructors;
|
||||
} else {
|
||||
base._childConstructors.push( constructor );
|
||||
}
|
||||
|
||||
$.widget.bridge( name, constructor );
|
||||
|
||||
return constructor;
|
||||
};
|
||||
|
||||
$.widget.extend = function( target ) {
|
||||
var input = widget_slice.call( arguments, 1 ),
|
||||
inputIndex = 0,
|
||||
inputLength = input.length,
|
||||
key,
|
||||
value;
|
||||
for ( ; inputIndex < inputLength; inputIndex++ ) {
|
||||
for ( key in input[ inputIndex ] ) {
|
||||
value = input[ inputIndex ][ key ];
|
||||
if ( input[ inputIndex ].hasOwnProperty( key ) && value !== undefined ) {
|
||||
// Clone objects
|
||||
if ( $.isPlainObject( value ) ) {
|
||||
target[ key ] = $.isPlainObject( target[ key ] ) ?
|
||||
$.widget.extend( {}, target[ key ], value ) :
|
||||
// Don't extend strings, arrays, etc. with objects
|
||||
$.widget.extend( {}, value );
|
||||
// Copy everything else by reference
|
||||
} else {
|
||||
target[ key ] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return target;
|
||||
};
|
||||
|
||||
$.widget.bridge = function( name, object ) {
|
||||
var fullName = object.prototype.widgetFullName || name;
|
||||
$.fn[ name ] = function( options ) {
|
||||
var isMethodCall = typeof options === "string",
|
||||
args = widget_slice.call( arguments, 1 ),
|
||||
returnValue = this;
|
||||
|
||||
if ( isMethodCall ) {
|
||||
this.each(function() {
|
||||
var methodValue,
|
||||
instance = $.data( this, fullName );
|
||||
if ( options === "instance" ) {
|
||||
returnValue = instance;
|
||||
return false;
|
||||
}
|
||||
if ( !instance ) {
|
||||
return $.error( "cannot call methods on " + name + " prior to initialization; " +
|
||||
"attempted to call method '" + options + "'" );
|
||||
}
|
||||
if ( !$.isFunction( instance[options] ) || options.charAt( 0 ) === "_" ) {
|
||||
return $.error( "no such method '" + options + "' for " + name + " widget instance" );
|
||||
}
|
||||
methodValue = instance[ options ].apply( instance, args );
|
||||
if ( methodValue !== instance && methodValue !== undefined ) {
|
||||
returnValue = methodValue && methodValue.jquery ?
|
||||
returnValue.pushStack( methodValue.get() ) :
|
||||
methodValue;
|
||||
return false;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
|
||||
// Allow multiple hashes to be passed on init
|
||||
if ( args.length ) {
|
||||
options = $.widget.extend.apply( null, [ options ].concat(args) );
|
||||
}
|
||||
|
||||
this.each(function() {
|
||||
var instance = $.data( this, fullName );
|
||||
if ( instance ) {
|
||||
instance.option( options || {} );
|
||||
if ( instance._init ) {
|
||||
instance._init();
|
||||
}
|
||||
} else {
|
||||
$.data( this, fullName, new object( options, this ) );
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return returnValue;
|
||||
};
|
||||
};
|
||||
|
||||
$.Widget = function( /* options, element */ ) {};
|
||||
$.Widget._childConstructors = [];
|
||||
|
||||
$.Widget.prototype = {
|
||||
widgetName: "widget",
|
||||
widgetEventPrefix: "",
|
||||
defaultElement: "<div>",
|
||||
options: {
|
||||
disabled: false,
|
||||
|
||||
// callbacks
|
||||
create: null
|
||||
},
|
||||
_createWidget: function( options, element ) {
|
||||
element = $( element || this.defaultElement || this )[ 0 ];
|
||||
this.element = $( element );
|
||||
this.uuid = widget_uuid++;
|
||||
this.eventNamespace = "." + this.widgetName + this.uuid;
|
||||
|
||||
this.bindings = $();
|
||||
this.hoverable = $();
|
||||
this.focusable = $();
|
||||
|
||||
if ( element !== this ) {
|
||||
$.data( element, this.widgetFullName, this );
|
||||
this._on( true, this.element, {
|
||||
remove: function( event ) {
|
||||
if ( event.target === element ) {
|
||||
this.destroy();
|
||||
}
|
||||
}
|
||||
});
|
||||
this.document = $( element.style ?
|
||||
// element within the document
|
||||
element.ownerDocument :
|
||||
// element is window or document
|
||||
element.document || element );
|
||||
this.window = $( this.document[0].defaultView || this.document[0].parentWindow );
|
||||
}
|
||||
|
||||
this.options = $.widget.extend( {},
|
||||
this.options,
|
||||
this._getCreateOptions(),
|
||||
options );
|
||||
|
||||
this._create();
|
||||
this._trigger( "create", null, this._getCreateEventData() );
|
||||
this._init();
|
||||
},
|
||||
_getCreateOptions: $.noop,
|
||||
_getCreateEventData: $.noop,
|
||||
_create: $.noop,
|
||||
_init: $.noop,
|
||||
|
||||
destroy: function() {
|
||||
this._destroy();
|
||||
// we can probably remove the unbind calls in 2.0
|
||||
// all event bindings should go through this._on()
|
||||
this.element
|
||||
.unbind( this.eventNamespace )
|
||||
.removeData( this.widgetFullName )
|
||||
// support: jquery <1.6.3
|
||||
// http://bugs.jquery.com/ticket/9413
|
||||
.removeData( $.camelCase( this.widgetFullName ) );
|
||||
this.widget()
|
||||
.unbind( this.eventNamespace )
|
||||
.removeAttr( "aria-disabled" )
|
||||
.removeClass(
|
||||
this.widgetFullName + "-disabled " +
|
||||
"ui-state-disabled" );
|
||||
|
||||
// clean up events and states
|
||||
this.bindings.unbind( this.eventNamespace );
|
||||
this.hoverable.removeClass( "ui-state-hover" );
|
||||
this.focusable.removeClass( "ui-state-focus" );
|
||||
},
|
||||
_destroy: $.noop,
|
||||
|
||||
widget: function() {
|
||||
return this.element;
|
||||
},
|
||||
|
||||
option: function( key, value ) {
|
||||
var options = key,
|
||||
parts,
|
||||
curOption,
|
||||
i;
|
||||
|
||||
if ( arguments.length === 0 ) {
|
||||
// don't return a reference to the internal hash
|
||||
return $.widget.extend( {}, this.options );
|
||||
}
|
||||
|
||||
if ( typeof key === "string" ) {
|
||||
// handle nested keys, e.g., "foo.bar" => { foo: { bar: ___ } }
|
||||
options = {};
|
||||
parts = key.split( "." );
|
||||
key = parts.shift();
|
||||
if ( parts.length ) {
|
||||
curOption = options[ key ] = $.widget.extend( {}, this.options[ key ] );
|
||||
for ( i = 0; i < parts.length - 1; i++ ) {
|
||||
curOption[ parts[ i ] ] = curOption[ parts[ i ] ] || {};
|
||||
curOption = curOption[ parts[ i ] ];
|
||||
}
|
||||
key = parts.pop();
|
||||
if ( arguments.length === 1 ) {
|
||||
return curOption[ key ] === undefined ? null : curOption[ key ];
|
||||
}
|
||||
curOption[ key ] = value;
|
||||
} else {
|
||||
if ( arguments.length === 1 ) {
|
||||
return this.options[ key ] === undefined ? null : this.options[ key ];
|
||||
}
|
||||
options[ key ] = value;
|
||||
}
|
||||
}
|
||||
|
||||
this._setOptions( options );
|
||||
|
||||
return this;
|
||||
},
|
||||
_setOptions: function( options ) {
|
||||
var key;
|
||||
|
||||
for ( key in options ) {
|
||||
this._setOption( key, options[ key ] );
|
||||
}
|
||||
|
||||
return this;
|
||||
},
|
||||
_setOption: function( key, value ) {
|
||||
this.options[ key ] = value;
|
||||
|
||||
if ( key === "disabled" ) {
|
||||
this.widget()
|
||||
.toggleClass( this.widgetFullName + "-disabled", !!value );
|
||||
|
||||
// If the widget is becoming disabled, then nothing is interactive
|
||||
if ( value ) {
|
||||
this.hoverable.removeClass( "ui-state-hover" );
|
||||
this.focusable.removeClass( "ui-state-focus" );
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
},
|
||||
|
||||
enable: function() {
|
||||
return this._setOptions({ disabled: false });
|
||||
},
|
||||
disable: function() {
|
||||
return this._setOptions({ disabled: true });
|
||||
},
|
||||
|
||||
_on: function( suppressDisabledCheck, element, handlers ) {
|
||||
var delegateElement,
|
||||
instance = this;
|
||||
|
||||
// no suppressDisabledCheck flag, shuffle arguments
|
||||
if ( typeof suppressDisabledCheck !== "boolean" ) {
|
||||
handlers = element;
|
||||
element = suppressDisabledCheck;
|
||||
suppressDisabledCheck = false;
|
||||
}
|
||||
|
||||
// no element argument, shuffle and use this.element
|
||||
if ( !handlers ) {
|
||||
handlers = element;
|
||||
element = this.element;
|
||||
delegateElement = this.widget();
|
||||
} else {
|
||||
element = delegateElement = $( element );
|
||||
this.bindings = this.bindings.add( element );
|
||||
}
|
||||
|
||||
$.each( handlers, function( event, handler ) {
|
||||
function handlerProxy() {
|
||||
// allow widgets to customize the disabled handling
|
||||
// - disabled as an array instead of boolean
|
||||
// - disabled class as method for disabling individual parts
|
||||
if ( !suppressDisabledCheck &&
|
||||
( instance.options.disabled === true ||
|
||||
$( this ).hasClass( "ui-state-disabled" ) ) ) {
|
||||
return;
|
||||
}
|
||||
return ( typeof handler === "string" ? instance[ handler ] : handler )
|
||||
.apply( instance, arguments );
|
||||
}
|
||||
|
||||
// copy the guid so direct unbinding works
|
||||
if ( typeof handler !== "string" ) {
|
||||
handlerProxy.guid = handler.guid =
|
||||
handler.guid || handlerProxy.guid || $.guid++;
|
||||
}
|
||||
|
||||
var match = event.match( /^([\w:-]*)\s*(.*)$/ ),
|
||||
eventName = match[1] + instance.eventNamespace,
|
||||
selector = match[2];
|
||||
if ( selector ) {
|
||||
delegateElement.delegate( selector, eventName, handlerProxy );
|
||||
} else {
|
||||
element.bind( eventName, handlerProxy );
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_off: function( element, eventName ) {
|
||||
eventName = (eventName || "").split( " " ).join( this.eventNamespace + " " ) +
|
||||
this.eventNamespace;
|
||||
element.unbind( eventName ).undelegate( eventName );
|
||||
|
||||
// Clear the stack to avoid memory leaks (#10056)
|
||||
this.bindings = $( this.bindings.not( element ).get() );
|
||||
this.focusable = $( this.focusable.not( element ).get() );
|
||||
this.hoverable = $( this.hoverable.not( element ).get() );
|
||||
},
|
||||
|
||||
_delay: function( handler, delay ) {
|
||||
function handlerProxy() {
|
||||
return ( typeof handler === "string" ? instance[ handler ] : handler )
|
||||
.apply( instance, arguments );
|
||||
}
|
||||
var instance = this;
|
||||
return setTimeout( handlerProxy, delay || 0 );
|
||||
},
|
||||
|
||||
_hoverable: function( element ) {
|
||||
this.hoverable = this.hoverable.add( element );
|
||||
this._on( element, {
|
||||
mouseenter: function( event ) {
|
||||
$( event.currentTarget ).addClass( "ui-state-hover" );
|
||||
},
|
||||
mouseleave: function( event ) {
|
||||
$( event.currentTarget ).removeClass( "ui-state-hover" );
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_focusable: function( element ) {
|
||||
this.focusable = this.focusable.add( element );
|
||||
this._on( element, {
|
||||
focusin: function( event ) {
|
||||
$( event.currentTarget ).addClass( "ui-state-focus" );
|
||||
},
|
||||
focusout: function( event ) {
|
||||
$( event.currentTarget ).removeClass( "ui-state-focus" );
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_trigger: function( type, event, data ) {
|
||||
var prop, orig,
|
||||
callback = this.options[ type ];
|
||||
|
||||
data = data || {};
|
||||
event = $.Event( event );
|
||||
event.type = ( type === this.widgetEventPrefix ?
|
||||
type :
|
||||
this.widgetEventPrefix + type ).toLowerCase();
|
||||
// the original event may come from any element
|
||||
// so we need to reset the target on the new event
|
||||
event.target = this.element[ 0 ];
|
||||
|
||||
// copy original event properties over to the new event
|
||||
orig = event.originalEvent;
|
||||
if ( orig ) {
|
||||
for ( prop in orig ) {
|
||||
if ( !( prop in event ) ) {
|
||||
event[ prop ] = orig[ prop ];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.element.trigger( event, data );
|
||||
return !( $.isFunction( callback ) &&
|
||||
callback.apply( this.element[0], [ event ].concat( data ) ) === false ||
|
||||
event.isDefaultPrevented() );
|
||||
}
|
||||
};
|
||||
|
||||
$.each( { show: "fadeIn", hide: "fadeOut" }, function( method, defaultEffect ) {
|
||||
$.Widget.prototype[ "_" + method ] = function( element, options, callback ) {
|
||||
if ( typeof options === "string" ) {
|
||||
options = { effect: options };
|
||||
}
|
||||
var hasOptions,
|
||||
effectName = !options ?
|
||||
method :
|
||||
options === true || typeof options === "number" ?
|
||||
defaultEffect :
|
||||
options.effect || defaultEffect;
|
||||
options = options || {};
|
||||
if ( typeof options === "number" ) {
|
||||
options = { duration: options };
|
||||
}
|
||||
hasOptions = !$.isEmptyObject( options );
|
||||
options.complete = callback;
|
||||
if ( options.delay ) {
|
||||
element.delay( options.delay );
|
||||
}
|
||||
if ( hasOptions && $.effects && $.effects.effect[ effectName ] ) {
|
||||
element[ method ]( options );
|
||||
} else if ( effectName !== method && element[ effectName ] ) {
|
||||
element[ effectName ]( options.duration, options.easing, callback );
|
||||
} else {
|
||||
element.queue(function( next ) {
|
||||
$( this )[ method ]();
|
||||
if ( callback ) {
|
||||
callback.call( element[ 0 ] );
|
||||
}
|
||||
next();
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
var widget = $.widget;
|
||||
|
||||
|
||||
|
||||
}));
|
||||
2
lib/cloudinary/static/js/load-image.all.min.js
vendored
Normal file
2
lib/cloudinary/static/js/load-image.all.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
12
lib/cloudinary/templates/cloudinary_direct_upload.html
Normal file
12
lib/cloudinary/templates/cloudinary_direct_upload.html
Normal file
@@ -0,0 +1,12 @@
|
||||
<form action={{url}} method="POST" enctype="multipart/form-data">
|
||||
{% for name, value in params.items %}
|
||||
<input type="hidden" name="{{name}}" value="{{value}}"/>
|
||||
{% endfor %}
|
||||
{% block extra %} {% endblock %}
|
||||
{% block file %}
|
||||
<input type="file" name="file"/>
|
||||
{% endblock %}
|
||||
{% block submit %}
|
||||
<input type="submit"/>
|
||||
{% endblock %}
|
||||
</form>
|
||||
14
lib/cloudinary/templates/cloudinary_includes.html
Normal file
14
lib/cloudinary/templates/cloudinary_includes.html
Normal file
@@ -0,0 +1,14 @@
|
||||
{% load staticfiles %}
|
||||
|
||||
<script src="{% static "js/jquery.ui.widget.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.iframe-transport.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.fileupload.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.cloudinary.js" %}" type="text/javascript"></script>
|
||||
|
||||
{% if processing %}
|
||||
<script src="{% static "js/load-image.all.min.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/canvas-to-blob.min.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.fileupload-process.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.fileupload-image.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.fileupload-validate.js" %}" type="text/javascript"></script>
|
||||
{% endif %}
|
||||
3
lib/cloudinary/templates/cloudinary_js_config.html
Normal file
3
lib/cloudinary/templates/cloudinary_js_config.html
Normal file
@@ -0,0 +1,3 @@
|
||||
<script type='text/javascript'>
|
||||
$.cloudinary.config({{ params|safe }});
|
||||
</script>
|
||||
1
lib/cloudinary/templatetags/__init__.py
Normal file
1
lib/cloudinary/templatetags/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
#
|
||||
85
lib/cloudinary/templatetags/cloudinary.py
Normal file
85
lib/cloudinary/templatetags/cloudinary.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
|
||||
from django import template
|
||||
from django.forms import Form
|
||||
from django.utils.safestring import mark_safe
|
||||
|
||||
import cloudinary
|
||||
from cloudinary import CloudinaryResource, utils, uploader
|
||||
from cloudinary.forms import CloudinaryJsFileField, cl_init_js_callbacks
|
||||
from cloudinary.compat import PY3
|
||||
|
||||
register = template.Library()
|
||||
|
||||
|
||||
@register.simple_tag(takes_context=True)
|
||||
def cloudinary_url(context, source, options_dict=None, **options):
|
||||
if options_dict is None:
|
||||
options = dict(**options)
|
||||
else:
|
||||
options = dict(options_dict, **options)
|
||||
try:
|
||||
if context['request'].is_secure() and 'secure' not in options:
|
||||
options['secure'] = True
|
||||
except KeyError:
|
||||
pass
|
||||
if not isinstance(source, CloudinaryResource):
|
||||
source = CloudinaryResource(source)
|
||||
return source.build_url(**options)
|
||||
|
||||
|
||||
@register.simple_tag(name='cloudinary', takes_context=True)
|
||||
def cloudinary_tag(context, image, options_dict=None, **options):
|
||||
if options_dict is None:
|
||||
options = dict(**options)
|
||||
else:
|
||||
options = dict(options_dict, **options)
|
||||
try:
|
||||
if context['request'].is_secure() and 'secure' not in options:
|
||||
options['secure'] = True
|
||||
except KeyError:
|
||||
pass
|
||||
if not isinstance(image, CloudinaryResource):
|
||||
image = CloudinaryResource(image)
|
||||
return mark_safe(image.image(**options))
|
||||
|
||||
|
||||
@register.simple_tag
|
||||
def cloudinary_direct_upload_field(field_name="image", request=None):
|
||||
form = type("OnTheFlyForm", (Form,), {field_name: CloudinaryJsFileField()})()
|
||||
if request:
|
||||
cl_init_js_callbacks(form, request)
|
||||
value = form[field_name]
|
||||
if not PY3:
|
||||
value = unicode(value)
|
||||
return value
|
||||
|
||||
|
||||
"""Deprecated - please use cloudinary_direct_upload_field, or a proper form"""
|
||||
@register.inclusion_tag('cloudinary_direct_upload.html')
|
||||
def cloudinary_direct_upload(callback_url, **options):
|
||||
params = utils.build_upload_params(callback=callback_url, **options)
|
||||
params = utils.sign_request(params, options)
|
||||
|
||||
api_url = utils.cloudinary_api_url("upload", resource_type=options.get("resource_type", "image"),
|
||||
upload_prefix=options.get("upload_prefix"))
|
||||
|
||||
return {"params": params, "url": api_url}
|
||||
|
||||
|
||||
@register.inclusion_tag('cloudinary_includes.html')
|
||||
def cloudinary_includes(processing=False):
|
||||
return {"processing": processing}
|
||||
|
||||
|
||||
CLOUDINARY_JS_CONFIG_PARAMS = ("api_key", "cloud_name", "private_cdn", "secure_distribution", "cdn_subdomain")
|
||||
@register.inclusion_tag('cloudinary_js_config.html')
|
||||
def cloudinary_js_config():
|
||||
config = cloudinary.config()
|
||||
return dict(
|
||||
params=json.dumps(dict(
|
||||
(param, getattr(config, param)) for param in CLOUDINARY_JS_CONFIG_PARAMS if getattr(config, param, None)
|
||||
))
|
||||
)
|
||||
325
lib/cloudinary/uploader.py
Normal file
325
lib/cloudinary/uploader.py
Normal file
@@ -0,0 +1,325 @@
|
||||
# Copyright Cloudinary
|
||||
import json
|
||||
import re
|
||||
import socket
|
||||
from os.path import getsize
|
||||
|
||||
import cloudinary
|
||||
import urllib3
|
||||
import certifi
|
||||
from cloudinary import utils
|
||||
from cloudinary.api import Error
|
||||
from cloudinary.compat import string_types
|
||||
from urllib3.exceptions import HTTPError
|
||||
from urllib3 import PoolManager
|
||||
|
||||
try:
|
||||
from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
|
||||
except Exception:
|
||||
def is_appengine_sandbox():
|
||||
return False
|
||||
|
||||
try: # Python 2.7+
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from urllib3.packages.ordered_dict import OrderedDict
|
||||
|
||||
if is_appengine_sandbox():
|
||||
# AppEngineManager uses AppEngine's URLFetch API behind the scenes
|
||||
_http = AppEngineManager()
|
||||
else:
|
||||
# PoolManager uses a socket-level API behind the scenes
|
||||
_http = PoolManager(
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
ca_certs=certifi.where()
|
||||
)
|
||||
|
||||
|
||||
def upload(file, **options):
|
||||
params = utils.build_upload_params(**options)
|
||||
return call_api("upload", params, file=file, **options)
|
||||
|
||||
|
||||
def unsigned_upload(file, upload_preset, **options):
|
||||
return upload(file, upload_preset=upload_preset, unsigned=True, **options)
|
||||
|
||||
|
||||
def upload_image(file, **options):
|
||||
result = upload(file, **options)
|
||||
return cloudinary.CloudinaryImage(
|
||||
result["public_id"], version=str(result["version"]),
|
||||
format=result.get("format"), metadata=result)
|
||||
|
||||
|
||||
def upload_resource(file, **options):
|
||||
result = upload(file, **options)
|
||||
return cloudinary.CloudinaryResource(
|
||||
result["public_id"], version=str(result["version"]),
|
||||
format=result.get("format"), type=result["type"], resource_type=result["resource_type"], metadata=result)
|
||||
|
||||
|
||||
def upload_large(file, **options):
|
||||
""" Upload large files. """
|
||||
upload_id = utils.random_public_id()
|
||||
with open(file, 'rb') as file_io:
|
||||
results = None
|
||||
current_loc = 0
|
||||
chunk_size = options.get("chunk_size", 20000000)
|
||||
file_size = getsize(file)
|
||||
chunk = file_io.read(chunk_size)
|
||||
while chunk:
|
||||
range = "bytes {0}-{1}/{2}".format(current_loc, current_loc + len(chunk) - 1, file_size)
|
||||
current_loc += len(chunk)
|
||||
|
||||
results = upload_large_part((file, chunk),
|
||||
http_headers={"Content-Range": range, "X-Unique-Upload-Id": upload_id},
|
||||
**options)
|
||||
options["public_id"] = results.get("public_id")
|
||||
chunk = file_io.read(chunk_size)
|
||||
return results
|
||||
|
||||
|
||||
def upload_large_part(file, **options):
|
||||
""" Upload large files. """
|
||||
params = utils.build_upload_params(**options)
|
||||
if 'resource_type' not in options: options['resource_type'] = "raw"
|
||||
return call_api("upload", params, file=file, **options)
|
||||
|
||||
|
||||
def destroy(public_id, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"type": options.get("type"),
|
||||
"invalidate": options.get("invalidate"),
|
||||
"public_id": public_id
|
||||
}
|
||||
return call_api("destroy", params, **options)
|
||||
|
||||
|
||||
def rename(from_public_id, to_public_id, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"type": options.get("type"),
|
||||
"overwrite": options.get("overwrite"),
|
||||
"invalidate": options.get("invalidate"),
|
||||
"from_public_id": from_public_id,
|
||||
"to_public_id": to_public_id
|
||||
}
|
||||
return call_api("rename", params, **options)
|
||||
|
||||
|
||||
def explicit(public_id, **options):
|
||||
params = utils.build_upload_params(**options)
|
||||
params["public_id"] = public_id
|
||||
return call_api("explicit", params, **options)
|
||||
|
||||
|
||||
def create_archive(**options):
|
||||
params = utils.archive_params(**options)
|
||||
if options.get("target_format") is not None:
|
||||
params["target_format"] = options.get("target_format")
|
||||
return call_api("generate_archive", params, **options)
|
||||
|
||||
|
||||
def create_zip(**options):
|
||||
return create_archive(target_format="zip", **options)
|
||||
|
||||
|
||||
def generate_sprite(tag, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"tag": tag,
|
||||
"async": options.get("async"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"transformation": utils.generate_transformation_string(fetch_format=options.get("format"), **options)[0]
|
||||
}
|
||||
return call_api("sprite", params, **options)
|
||||
|
||||
|
||||
def multi(tag, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"tag": tag,
|
||||
"format": options.get("format"),
|
||||
"async": options.get("async"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"transformation": utils.generate_transformation_string(**options)[0]
|
||||
}
|
||||
return call_api("multi", params, **options)
|
||||
|
||||
|
||||
def explode(public_id, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"public_id": public_id,
|
||||
"format": options.get("format"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"transformation": utils.generate_transformation_string(**options)[0]
|
||||
}
|
||||
return call_api("explode", params, **options)
|
||||
|
||||
|
||||
# options may include 'exclusive' (boolean) which causes clearing this tag from all other resources
|
||||
def add_tag(tag, public_ids=None, **options):
|
||||
exclusive = options.pop("exclusive", None)
|
||||
command = "set_exclusive" if exclusive else "add"
|
||||
return call_tags_api(tag, command, public_ids, **options)
|
||||
|
||||
|
||||
def remove_tag(tag, public_ids=None, **options):
|
||||
return call_tags_api(tag, "remove", public_ids, **options)
|
||||
|
||||
|
||||
def replace_tag(tag, public_ids=None, **options):
|
||||
return call_tags_api(tag, "replace", public_ids, **options)
|
||||
|
||||
|
||||
def remove_all_tags(public_ids, **options):
|
||||
"""
|
||||
Remove all tags from the specified public IDs.
|
||||
:param public_ids: the public IDs of the resources to update
|
||||
:param options: additional options passed to the request
|
||||
:return: dictionary with a list of public IDs that were updated
|
||||
"""
|
||||
return call_tags_api(None, "remove_all", public_ids, **options)
|
||||
|
||||
|
||||
def add_context(context, public_ids, **options):
|
||||
"""
|
||||
Add a context keys and values. If a particular key already exists, the value associated with the key is updated.
|
||||
:param context: dictionary of context
|
||||
:param public_ids: the public IDs of the resources to update
|
||||
:param options: additional options passed to the request
|
||||
:return: dictionary with a list of public IDs that were updated
|
||||
"""
|
||||
return call_context_api(context, "add", public_ids, **options)
|
||||
|
||||
|
||||
def remove_all_context(public_ids, **options):
|
||||
"""
|
||||
Remove all custom context from the specified public IDs.
|
||||
:param public_ids: the public IDs of the resources to update
|
||||
:param options: additional options passed to the request
|
||||
:return: dictionary with a list of public IDs that were updated
|
||||
"""
|
||||
return call_context_api(None, "remove_all", public_ids, **options)
|
||||
|
||||
|
||||
def call_tags_api(tag, command, public_ids=None, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"tag": tag,
|
||||
"public_ids": utils.build_array(public_ids),
|
||||
"command": command,
|
||||
"type": options.get("type")
|
||||
}
|
||||
return call_api("tags", params, **options)
|
||||
|
||||
|
||||
def call_context_api(context, command, public_ids=None, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"context": utils.encode_context(context),
|
||||
"public_ids": utils.build_array(public_ids),
|
||||
"command": command,
|
||||
"type": options.get("type")
|
||||
}
|
||||
return call_api("context", params, **options)
|
||||
|
||||
|
||||
TEXT_PARAMS = ["public_id",
|
||||
"font_family",
|
||||
"font_size",
|
||||
"font_color",
|
||||
"text_align",
|
||||
"font_weight",
|
||||
"font_style",
|
||||
"background",
|
||||
"opacity",
|
||||
"text_decoration"
|
||||
]
|
||||
|
||||
|
||||
def text(text, **options):
|
||||
params = {"timestamp": utils.now(), "text": text}
|
||||
for key in TEXT_PARAMS:
|
||||
params[key] = options.get(key)
|
||||
return call_api("text", params, **options)
|
||||
|
||||
|
||||
def call_api(action, params, http_headers=None, return_error=False, unsigned=False, file=None, timeout=None, **options):
|
||||
if http_headers is None:
|
||||
http_headers = {}
|
||||
file_io = None
|
||||
try:
|
||||
if unsigned:
|
||||
params = utils.cleanup_params(params)
|
||||
else:
|
||||
params = utils.sign_request(params, options)
|
||||
|
||||
param_list = OrderedDict()
|
||||
for k, v in params.items():
|
||||
if isinstance(v, list):
|
||||
for i in range(len(v)):
|
||||
param_list["{0}[{1}]".format(k, i)] = v[i]
|
||||
elif v:
|
||||
param_list[k] = v
|
||||
|
||||
api_url = utils.cloudinary_api_url(action, **options)
|
||||
if file:
|
||||
if isinstance(file, string_types):
|
||||
if re.match(r'ftp:|https?:|s3:|data:[^;]*;base64,([a-zA-Z0-9\/+\n=]+)$', file):
|
||||
# URL
|
||||
name = None
|
||||
data = file
|
||||
else:
|
||||
# file path
|
||||
name = file
|
||||
with open(file, "rb") as opened:
|
||||
data = opened.read()
|
||||
elif hasattr(file, 'read') and callable(file.read):
|
||||
# stream
|
||||
data = file.read()
|
||||
name = file.name if hasattr(file, 'name') and isinstance(file.name, str) else "stream"
|
||||
elif isinstance(file, tuple):
|
||||
name = None
|
||||
data = file
|
||||
else:
|
||||
# Not a string, not a stream
|
||||
name = "file"
|
||||
data = file
|
||||
|
||||
param_list["file"] = (name, data) if name else data
|
||||
|
||||
headers = {"User-Agent": cloudinary.get_user_agent()}
|
||||
headers.update(http_headers)
|
||||
|
||||
kw = {}
|
||||
if timeout is not None:
|
||||
kw['timeout'] = timeout
|
||||
|
||||
code = 200
|
||||
try:
|
||||
response = _http.request("POST", api_url, param_list, headers, **kw)
|
||||
except HTTPError as e:
|
||||
raise Error("Unexpected error - {0!r}".format(e))
|
||||
except socket.error as e:
|
||||
raise Error("Socket error: {0!r}".format(e))
|
||||
|
||||
try:
|
||||
result = json.loads(response.data.decode('utf-8'))
|
||||
except Exception as e:
|
||||
# Error is parsing json
|
||||
raise Error("Error parsing server response (%d) - %s. Got - %s", response.status, response, e)
|
||||
|
||||
if "error" in result:
|
||||
if response.status not in [200, 400, 401, 403, 404, 500]:
|
||||
code = response.status
|
||||
if return_error:
|
||||
result["error"]["http_code"] = code
|
||||
else:
|
||||
raise Error(result["error"]["message"])
|
||||
|
||||
return result
|
||||
finally:
|
||||
if file_io: file_io.close()
|
||||
912
lib/cloudinary/utils.py
Normal file
912
lib/cloudinary/utils.py
Normal file
@@ -0,0 +1,912 @@
|
||||
# Copyright Cloudinary
|
||||
import base64
|
||||
import copy
|
||||
import hashlib
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
import struct
|
||||
import time
|
||||
import zlib
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime, date
|
||||
from fractions import Fraction
|
||||
|
||||
import six.moves.urllib.parse
|
||||
from six import iteritems
|
||||
|
||||
import cloudinary
|
||||
from cloudinary import auth_token
|
||||
from cloudinary.compat import PY3, to_bytes, to_bytearray, to_string, string_types, urlparse
|
||||
|
||||
VAR_NAME_RE = r'(\$\([a-zA-Z]\w+\))'
|
||||
|
||||
urlencode = six.moves.urllib.parse.urlencode
|
||||
unquote = six.moves.urllib.parse.unquote
|
||||
|
||||
""" @deprecated: use cloudinary.SHARED_CDN """
|
||||
SHARED_CDN = "res.cloudinary.com"
|
||||
|
||||
DEFAULT_RESPONSIVE_WIDTH_TRANSFORMATION = {"width": "auto", "crop": "limit"}
|
||||
|
||||
RANGE_VALUE_RE = r'^(?P<value>(\d+\.)?\d+)(?P<modifier>[%pP])?$'
|
||||
RANGE_RE = r'^(\d+\.)?\d+[%pP]?\.\.(\d+\.)?\d+[%pP]?$'
|
||||
FLOAT_RE = r'^(\d+)\.(\d+)?$'
|
||||
__LAYER_KEYWORD_PARAMS = [("font_weight", "normal"),
|
||||
("font_style", "normal"),
|
||||
("text_decoration", "none"),
|
||||
("text_align", None),
|
||||
("stroke", "none")]
|
||||
|
||||
|
||||
def build_array(arg):
|
||||
if isinstance(arg, list):
|
||||
return arg
|
||||
elif arg is None:
|
||||
return []
|
||||
else:
|
||||
return [arg]
|
||||
|
||||
|
||||
def build_list_of_dicts(val):
|
||||
"""
|
||||
Converts a value that can be presented as a list of dict.
|
||||
|
||||
In case top level item is not a list, it is wrapped with a list
|
||||
|
||||
Valid values examples:
|
||||
- Valid dict: {"k": "v", "k2","v2"}
|
||||
- List of dict: [{"k": "v"}, {"k2","v2"}]
|
||||
- JSON decodable string: '{"k": "v"}', or '[{"k": "v"}]'
|
||||
- List of JSON decodable strings: ['{"k": "v"}', '{"k2","v2"}']
|
||||
|
||||
Invalid values examples:
|
||||
- ["not", "a", "dict"]
|
||||
- [123, None],
|
||||
- [["another", "list"]]
|
||||
|
||||
:param val: Input value
|
||||
:type val: Union[list, dict, str]
|
||||
|
||||
:return: Converted(or original) list of dict
|
||||
:raises: ValueError in case value cannot be converted to a list of dict
|
||||
"""
|
||||
if val is None:
|
||||
return []
|
||||
|
||||
if isinstance(val, str):
|
||||
# use OrderedDict to preserve order
|
||||
val = json.loads(val, object_pairs_hook=OrderedDict)
|
||||
|
||||
if isinstance(val, dict):
|
||||
val = [val]
|
||||
|
||||
for index, item in enumerate(val):
|
||||
if isinstance(item, str):
|
||||
# use OrderedDict to preserve order
|
||||
val[index] = json.loads(item, object_pairs_hook=OrderedDict)
|
||||
if not isinstance(val[index], dict):
|
||||
raise ValueError("Expected a list of dicts")
|
||||
return val
|
||||
|
||||
|
||||
def encode_double_array(array):
|
||||
array = build_array(array)
|
||||
if len(array) > 0 and isinstance(array[0], list):
|
||||
return "|".join([",".join([str(i) for i in build_array(inner)]) for inner in array])
|
||||
else:
|
||||
return ",".join([str(i) for i in array])
|
||||
|
||||
|
||||
def encode_dict(arg):
|
||||
if isinstance(arg, dict):
|
||||
if PY3:
|
||||
items = arg.items()
|
||||
else:
|
||||
items = arg.iteritems()
|
||||
return "|".join((k + "=" + v) for k, v in items)
|
||||
else:
|
||||
return arg
|
||||
|
||||
|
||||
def encode_context(context):
|
||||
"""
|
||||
:param context: dict of context to be encoded
|
||||
:return: a joined string of all keys and values properly escaped and separated by a pipe character
|
||||
"""
|
||||
|
||||
if not isinstance(context, dict):
|
||||
return context
|
||||
|
||||
return "|".join(("{}={}".format(k, v.replace("=", "\\=").replace("|", "\\|"))) for k, v in iteritems(context))
|
||||
|
||||
|
||||
def json_encode(value):
|
||||
"""
|
||||
Converts value to a json encoded string
|
||||
|
||||
:param value: value to be encoded
|
||||
|
||||
:return: JSON encoded string
|
||||
"""
|
||||
return json.dumps(value, default=__json_serializer, separators=(',', ':'))
|
||||
|
||||
|
||||
def generate_transformation_string(**options):
|
||||
responsive_width = options.pop("responsive_width", cloudinary.config().responsive_width)
|
||||
size = options.pop("size", None)
|
||||
if size:
|
||||
options["width"], options["height"] = size.split("x")
|
||||
width = options.get("width")
|
||||
height = options.get("height")
|
||||
has_layer = ("underlay" in options) or ("overlay" in options)
|
||||
|
||||
crop = options.pop("crop", None)
|
||||
angle = ".".join([str(value) for value in build_array(options.pop("angle", None))])
|
||||
no_html_sizes = has_layer or angle or crop == "fit" or crop == "limit" or responsive_width
|
||||
|
||||
if width and (str(width).startswith("auto") or str(width) == "ow" or is_fraction(width) or no_html_sizes):
|
||||
del options["width"]
|
||||
if height and (str(height) == "oh" or is_fraction(height) or no_html_sizes):
|
||||
del options["height"]
|
||||
|
||||
background = options.pop("background", None)
|
||||
if background:
|
||||
background = background.replace("#", "rgb:")
|
||||
color = options.pop("color", None)
|
||||
if color:
|
||||
color = color.replace("#", "rgb:")
|
||||
|
||||
base_transformations = build_array(options.pop("transformation", None))
|
||||
if any(isinstance(bs, dict) for bs in base_transformations):
|
||||
def recurse(bs):
|
||||
if isinstance(bs, dict):
|
||||
return generate_transformation_string(**bs)[0]
|
||||
else:
|
||||
return generate_transformation_string(transformation=bs)[0]
|
||||
base_transformations = list(map(recurse, base_transformations))
|
||||
named_transformation = None
|
||||
else:
|
||||
named_transformation = ".".join(base_transformations)
|
||||
base_transformations = []
|
||||
|
||||
effect = options.pop("effect", None)
|
||||
if isinstance(effect, list):
|
||||
effect = ":".join([str(x) for x in effect])
|
||||
elif isinstance(effect, dict):
|
||||
effect = ":".join([str(x) for x in list(effect.items())[0]])
|
||||
|
||||
border = options.pop("border", None)
|
||||
if isinstance(border, dict):
|
||||
border_color = border.get("color", "black").replace("#", "rgb:")
|
||||
border = "%(width)spx_solid_%(color)s" % {"color": border_color,
|
||||
"width": str(border.get("width", 2))}
|
||||
|
||||
flags = ".".join(build_array(options.pop("flags", None)))
|
||||
dpr = options.pop("dpr", cloudinary.config().dpr)
|
||||
duration = norm_range_value(options.pop("duration", None))
|
||||
start_offset = norm_range_value(options.pop("start_offset", None))
|
||||
end_offset = norm_range_value(options.pop("end_offset", None))
|
||||
offset = split_range(options.pop("offset", None))
|
||||
if offset:
|
||||
start_offset = norm_range_value(offset[0])
|
||||
end_offset = norm_range_value(offset[1])
|
||||
|
||||
video_codec = process_video_codec_param(options.pop("video_codec", None))
|
||||
|
||||
aspect_ratio = options.pop("aspect_ratio", None)
|
||||
if isinstance(aspect_ratio, Fraction):
|
||||
aspect_ratio = str(aspect_ratio.numerator) + ":" + str(aspect_ratio.denominator)
|
||||
|
||||
overlay = process_layer(options.pop("overlay", None), "overlay")
|
||||
underlay = process_layer(options.pop("underlay", None), "underlay")
|
||||
if_value = process_conditional(options.pop("if", None))
|
||||
|
||||
params = {
|
||||
"a": normalize_expression(angle),
|
||||
"ar": normalize_expression(aspect_ratio),
|
||||
"b": background,
|
||||
"bo": border,
|
||||
"c": crop,
|
||||
"co": color,
|
||||
"dpr": normalize_expression(dpr),
|
||||
"du": normalize_expression(duration),
|
||||
"e": normalize_expression(effect),
|
||||
"eo": normalize_expression(end_offset),
|
||||
"fl": flags,
|
||||
"h": normalize_expression(height),
|
||||
"l": overlay,
|
||||
"o": normalize_expression(options.pop('opacity',None)),
|
||||
"q": normalize_expression(options.pop('quality',None)),
|
||||
"r": normalize_expression(options.pop('radius',None)),
|
||||
"so": normalize_expression(start_offset),
|
||||
"t": named_transformation,
|
||||
"u": underlay,
|
||||
"w": normalize_expression(width),
|
||||
"x": normalize_expression(options.pop('x',None)),
|
||||
"y": normalize_expression(options.pop('y',None)),
|
||||
"vc": video_codec,
|
||||
"z": normalize_expression(options.pop('zoom',None))
|
||||
}
|
||||
simple_params = {
|
||||
"ac": "audio_codec",
|
||||
"af": "audio_frequency",
|
||||
"br": "bit_rate",
|
||||
"cs": "color_space",
|
||||
"d": "default_image",
|
||||
"dl": "delay",
|
||||
"dn": "density",
|
||||
"f": "fetch_format",
|
||||
"g": "gravity",
|
||||
"ki": "keyframe_interval",
|
||||
"p": "prefix",
|
||||
"pg": "page",
|
||||
"sp": "streaming_profile",
|
||||
"vs": "video_sampling",
|
||||
}
|
||||
|
||||
for param, option in simple_params.items():
|
||||
params[param] = options.pop(option, None)
|
||||
|
||||
variables = options.pop('variables',{})
|
||||
var_params = []
|
||||
for key,value in options.items():
|
||||
if re.match(r'^\$', key):
|
||||
var_params.append(u"{0}_{1}".format(key, normalize_expression(str(value))))
|
||||
|
||||
var_params.sort()
|
||||
|
||||
if variables:
|
||||
for var in variables:
|
||||
var_params.append(u"{0}_{1}".format(var[0], normalize_expression(str(var[1]))))
|
||||
|
||||
|
||||
variables = ','.join(var_params)
|
||||
|
||||
sorted_params = sorted([param + "_" + str(value) for param, value in params.items() if (value or value == 0)])
|
||||
if variables:
|
||||
sorted_params.insert(0, str(variables))
|
||||
|
||||
if if_value is not None:
|
||||
sorted_params.insert(0, "if_" + str(if_value))
|
||||
transformation = ",".join(sorted_params)
|
||||
if "raw_transformation" in options:
|
||||
transformation = transformation + "," + options.pop("raw_transformation")
|
||||
transformations = base_transformations + [transformation]
|
||||
if responsive_width:
|
||||
responsive_width_transformation = cloudinary.config().responsive_width_transformation \
|
||||
or DEFAULT_RESPONSIVE_WIDTH_TRANSFORMATION
|
||||
transformations += [generate_transformation_string(**responsive_width_transformation)[0]]
|
||||
url = "/".join([trans for trans in transformations if trans])
|
||||
|
||||
if str(width).startswith("auto") or responsive_width:
|
||||
options["responsive"] = True
|
||||
if dpr == "auto":
|
||||
options["hidpi"] = True
|
||||
return url, options
|
||||
|
||||
|
||||
def is_fraction(width):
|
||||
width = str(width)
|
||||
return re.match(FLOAT_RE, width) and float(width) < 1
|
||||
|
||||
|
||||
def split_range(range):
|
||||
if (isinstance(range, list) or isinstance(range, tuple)) and len(range) >= 2:
|
||||
return [range[0], range[-1]]
|
||||
elif isinstance(range, string_types) and re.match(RANGE_RE, range):
|
||||
return range.split("..", 1)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def norm_range_value(value):
|
||||
if value is None: return None
|
||||
|
||||
match = re.match(RANGE_VALUE_RE, str(value))
|
||||
|
||||
if match is None: return None
|
||||
|
||||
modifier = ''
|
||||
if match.group('modifier') is not None:
|
||||
modifier = 'p'
|
||||
return match.group('value') + modifier
|
||||
|
||||
|
||||
def process_video_codec_param(param):
|
||||
out_param = param
|
||||
if isinstance(out_param, dict):
|
||||
out_param = param['codec']
|
||||
if 'profile' in param:
|
||||
out_param = out_param + ':' + param['profile']
|
||||
if 'level' in param:
|
||||
out_param = out_param + ':' + param['level']
|
||||
return out_param
|
||||
|
||||
|
||||
def cleanup_params(params):
|
||||
return dict([(k, __safe_value(v)) for (k, v) in params.items() if v is not None and not v == ""])
|
||||
|
||||
|
||||
def sign_request(params, options):
|
||||
api_key = options.get("api_key", cloudinary.config().api_key)
|
||||
if not api_key: raise ValueError("Must supply api_key")
|
||||
api_secret = options.get("api_secret", cloudinary.config().api_secret)
|
||||
if not api_secret: raise ValueError("Must supply api_secret")
|
||||
|
||||
params = cleanup_params(params)
|
||||
params["signature"] = api_sign_request(params, api_secret)
|
||||
params["api_key"] = api_key
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def api_sign_request(params_to_sign, api_secret):
|
||||
params = [(k + "=" + (",".join(v) if isinstance(v, list) else str(v))) for k, v in params_to_sign.items() if v]
|
||||
to_sign = "&".join(sorted(params))
|
||||
return hashlib.sha1(to_bytes(to_sign + api_secret)).hexdigest()
|
||||
|
||||
|
||||
def breakpoint_settings_mapper(breakpoint_settings):
|
||||
breakpoint_settings = copy.deepcopy(breakpoint_settings)
|
||||
transformation = breakpoint_settings.get("transformation")
|
||||
if transformation is not None:
|
||||
breakpoint_settings["transformation"], _ = generate_transformation_string(**transformation)
|
||||
return breakpoint_settings
|
||||
|
||||
|
||||
def generate_responsive_breakpoints_string(breakpoints):
|
||||
if breakpoints is None:
|
||||
return None
|
||||
breakpoints = build_array(breakpoints)
|
||||
return json.dumps(list(map(breakpoint_settings_mapper, breakpoints)))
|
||||
|
||||
|
||||
def finalize_source(source, format, url_suffix):
|
||||
source = re.sub(r'([^:])/+', r'\1/', source)
|
||||
if re.match(r'^https?:/', source):
|
||||
source = smart_escape(source)
|
||||
source_to_sign = source
|
||||
else:
|
||||
source = unquote(source)
|
||||
if not PY3: source = source.encode('utf8')
|
||||
source = smart_escape(source)
|
||||
source_to_sign = source
|
||||
if url_suffix is not None:
|
||||
if re.search(r'[\./]', url_suffix): raise ValueError("url_suffix should not include . or /")
|
||||
source = source + "/" + url_suffix
|
||||
if format is not None:
|
||||
source = source + "." + format
|
||||
source_to_sign = source_to_sign + "." + format
|
||||
|
||||
return source, source_to_sign
|
||||
|
||||
|
||||
def finalize_resource_type(resource_type, type, url_suffix, use_root_path, shorten):
|
||||
upload_type = type or "upload"
|
||||
if url_suffix is not None:
|
||||
if resource_type == "image" and upload_type == "upload":
|
||||
resource_type = "images"
|
||||
upload_type = None
|
||||
elif resource_type == "raw" and upload_type == "upload":
|
||||
resource_type = "files"
|
||||
upload_type = None
|
||||
else:
|
||||
raise ValueError("URL Suffix only supported for image/upload and raw/upload")
|
||||
|
||||
if use_root_path:
|
||||
if (resource_type == "image" and upload_type == "upload") or (resource_type == "images" and upload_type is None):
|
||||
resource_type = None
|
||||
upload_type = None
|
||||
else:
|
||||
raise ValueError("Root path only supported for image/upload")
|
||||
|
||||
if shorten and resource_type == "image" and upload_type == "upload":
|
||||
resource_type = "iu"
|
||||
upload_type = None
|
||||
|
||||
return resource_type, upload_type
|
||||
|
||||
|
||||
def unsigned_download_url_prefix(source, cloud_name, private_cdn, cdn_subdomain, secure_cdn_subdomain, cname, secure,
|
||||
secure_distribution):
|
||||
"""cdn_subdomain and secure_cdn_subdomain
|
||||
1) Customers in shared distribution (e.g. res.cloudinary.com)
|
||||
if cdn_domain is true uses res-[1-5].cloudinary.com for both http and https. Setting secure_cdn_subdomain to false disables this for https.
|
||||
2) Customers with private cdn
|
||||
if cdn_domain is true uses cloudname-res-[1-5].cloudinary.com for http
|
||||
if secure_cdn_domain is true uses cloudname-res-[1-5].cloudinary.com for https (please contact support if you require this)
|
||||
3) Customers with cname
|
||||
if cdn_domain is true uses a[1-5].cname for http. For https, uses the same naming scheme as 1 for shared distribution and as 2 for private distribution."""
|
||||
shared_domain = not private_cdn
|
||||
shard = __crc(source)
|
||||
if secure:
|
||||
if secure_distribution is None or secure_distribution == cloudinary.OLD_AKAMAI_SHARED_CDN:
|
||||
secure_distribution = cloud_name + "-res.cloudinary.com" if private_cdn else cloudinary.SHARED_CDN
|
||||
|
||||
shared_domain = shared_domain or secure_distribution == cloudinary.SHARED_CDN
|
||||
if secure_cdn_subdomain is None and shared_domain:
|
||||
secure_cdn_subdomain = cdn_subdomain
|
||||
|
||||
if secure_cdn_subdomain:
|
||||
secure_distribution = re.sub('res.cloudinary.com', "res-" + shard + ".cloudinary.com", secure_distribution)
|
||||
|
||||
prefix = "https://" + secure_distribution
|
||||
elif cname:
|
||||
subdomain = "a" + shard + "." if cdn_subdomain else ""
|
||||
prefix = "http://" + subdomain + cname
|
||||
else:
|
||||
subdomain = cloud_name + "-res" if private_cdn else "res"
|
||||
if cdn_subdomain: subdomain = subdomain + "-" + shard
|
||||
prefix = "http://" + subdomain + ".cloudinary.com"
|
||||
|
||||
if shared_domain: prefix += "/" + cloud_name
|
||||
|
||||
return prefix
|
||||
|
||||
|
||||
def merge(*dict_args):
|
||||
result = None
|
||||
for dictionary in dict_args:
|
||||
if dictionary is not None:
|
||||
if result is None:
|
||||
result = dictionary.copy()
|
||||
else:
|
||||
result.update(dictionary)
|
||||
return result
|
||||
|
||||
|
||||
def cloudinary_url(source, **options):
|
||||
original_source = source
|
||||
|
||||
type = options.pop("type", "upload")
|
||||
if type == 'fetch':
|
||||
options["fetch_format"] = options.get("fetch_format", options.pop("format", None))
|
||||
transformation, options = generate_transformation_string(**options)
|
||||
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
version = options.pop("version", None)
|
||||
format = options.pop("format", None)
|
||||
cdn_subdomain = options.pop("cdn_subdomain", cloudinary.config().cdn_subdomain)
|
||||
secure_cdn_subdomain = options.pop("secure_cdn_subdomain", cloudinary.config().secure_cdn_subdomain)
|
||||
cname = options.pop("cname", cloudinary.config().cname)
|
||||
shorten = options.pop("shorten", cloudinary.config().shorten)
|
||||
|
||||
cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name or None)
|
||||
if cloud_name is None:
|
||||
raise ValueError("Must supply cloud_name in tag or in configuration")
|
||||
secure = options.pop("secure", cloudinary.config().secure)
|
||||
private_cdn = options.pop("private_cdn", cloudinary.config().private_cdn)
|
||||
secure_distribution = options.pop("secure_distribution", cloudinary.config().secure_distribution)
|
||||
sign_url = options.pop("sign_url", cloudinary.config().sign_url)
|
||||
api_secret = options.pop("api_secret", cloudinary.config().api_secret)
|
||||
url_suffix = options.pop("url_suffix", None)
|
||||
use_root_path = options.pop("use_root_path", cloudinary.config().use_root_path)
|
||||
auth_token = options.pop("auth_token", None)
|
||||
if auth_token is not False:
|
||||
auth_token = merge(cloudinary.config().auth_token, auth_token)
|
||||
|
||||
if (not source) or type == "upload" and re.match(r'^https?:', source):
|
||||
return original_source, options
|
||||
|
||||
resource_type, type = finalize_resource_type(resource_type, type, url_suffix, use_root_path, shorten)
|
||||
source, source_to_sign = finalize_source(source, format, url_suffix)
|
||||
|
||||
if source_to_sign.find("/") >= 0 \
|
||||
and not re.match(r'^https?:/', source_to_sign) \
|
||||
and not re.match(r'^v[0-9]+', source_to_sign) \
|
||||
and not version:
|
||||
version = "1"
|
||||
if version: version = "v" + str(version)
|
||||
|
||||
transformation = re.sub(r'([^:])/+', r'\1/', transformation)
|
||||
|
||||
signature = None
|
||||
if sign_url and not auth_token:
|
||||
to_sign = "/".join(__compact([transformation, source_to_sign]))
|
||||
signature = "s--" + to_string(
|
||||
base64.urlsafe_b64encode(hashlib.sha1(to_bytes(to_sign + api_secret)).digest())[0:8]) + "--"
|
||||
|
||||
prefix = unsigned_download_url_prefix(source, cloud_name, private_cdn, cdn_subdomain, secure_cdn_subdomain, cname,
|
||||
secure, secure_distribution)
|
||||
source = "/".join(__compact([prefix, resource_type, type, signature, transformation, version, source]))
|
||||
if sign_url and auth_token:
|
||||
path = urlparse(source).path
|
||||
token = cloudinary.auth_token.generate( **merge(auth_token, {"url": path}))
|
||||
source = "%s?%s" % (source, token)
|
||||
return source, options
|
||||
|
||||
|
||||
def cloudinary_api_url(action='upload', **options):
|
||||
cloudinary_prefix = options.get("upload_prefix", cloudinary.config().upload_prefix) or "https://api.cloudinary.com"
|
||||
cloud_name = options.get("cloud_name", cloudinary.config().cloud_name)
|
||||
if not cloud_name: raise ValueError("Must supply cloud_name")
|
||||
resource_type = options.get("resource_type", "image")
|
||||
return "/".join([cloudinary_prefix, "v1_1", cloud_name, resource_type, action])
|
||||
|
||||
|
||||
# Based on ruby's CGI::unescape. In addition does not escape / :
|
||||
def smart_escape(source,unsafe = r"([^a-zA-Z0-9_.\-\/:]+)"):
|
||||
def pack(m):
|
||||
return to_bytes('%' + "%".join(["%02X" % x for x in struct.unpack('B' * len(m.group(1)), m.group(1))]).upper())
|
||||
return to_string(re.sub(to_bytes(unsafe), pack, to_bytes(source)))
|
||||
|
||||
|
||||
def random_public_id():
|
||||
return ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(16))
|
||||
|
||||
|
||||
def signed_preloaded_image(result):
|
||||
filename = ".".join([x for x in [result["public_id"], result["format"]] if x])
|
||||
path = "/".join([result["resource_type"], "upload", "v" + str(result["version"]), filename])
|
||||
return path + "#" + result["signature"]
|
||||
|
||||
|
||||
def now():
|
||||
return str(int(time.time()))
|
||||
|
||||
|
||||
def private_download_url(public_id, format, **options):
|
||||
cloudinary_params = sign_request({
|
||||
"timestamp": now(),
|
||||
"public_id": public_id,
|
||||
"format": format,
|
||||
"type": options.get("type"),
|
||||
"attachment": options.get("attachment"),
|
||||
"expires_at": options.get("expires_at")
|
||||
}, options)
|
||||
|
||||
return cloudinary_api_url("download", **options) + "?" + urlencode(cloudinary_params)
|
||||
|
||||
|
||||
def zip_download_url(tag, **options):
|
||||
cloudinary_params = sign_request({
|
||||
"timestamp": now(),
|
||||
"tag": tag,
|
||||
"transformation": generate_transformation_string(**options)[0]
|
||||
}, options)
|
||||
|
||||
return cloudinary_api_url("download_tag.zip", **options) + "?" + urlencode(cloudinary_params)
|
||||
|
||||
|
||||
def bracketize_seq(params):
|
||||
url_params = dict()
|
||||
for param_name in params:
|
||||
param_value = params[param_name]
|
||||
if isinstance(param_value, list):
|
||||
param_name += "[]"
|
||||
url_params[param_name] = param_value
|
||||
return url_params
|
||||
|
||||
|
||||
def download_archive_url(**options):
|
||||
params = options.copy()
|
||||
params.update(mode="download")
|
||||
cloudinary_params = sign_request(archive_params(**params), options)
|
||||
return cloudinary_api_url("generate_archive", **options) + "?" + urlencode(bracketize_seq(cloudinary_params), True)
|
||||
|
||||
|
||||
def download_zip_url(**options):
|
||||
new_options = options.copy()
|
||||
new_options.update(target_format="zip")
|
||||
return download_archive_url(**new_options)
|
||||
|
||||
def generate_auth_token(**options):
|
||||
token_options = merge(cloudinary.config().auth_token, options)
|
||||
return auth_token.generate(**token_options)
|
||||
|
||||
def archive_params(**options):
|
||||
if options.get("timestamp") is None:
|
||||
timestamp = now()
|
||||
else:
|
||||
timestamp = options.get("timestamp")
|
||||
params = {
|
||||
"allow_missing": options.get("allow_missing"),
|
||||
"async": options.get("async"),
|
||||
"expires_at": options.get("expires_at"),
|
||||
"flatten_folders": options.get("flatten_folders"),
|
||||
"flatten_transformations": options.get("flatten_transformations"),
|
||||
"keep_derived": options.get("keep_derived"),
|
||||
"mode": options.get("mode"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"phash": options.get("phash"),
|
||||
"prefixes": options.get("prefixes") and build_array(options.get("prefixes")),
|
||||
"public_ids": options.get("public_ids") and build_array(options.get("public_ids")),
|
||||
"skip_transformation_name": options.get("skip_transformation_name"),
|
||||
"tags": options.get("tags") and build_array(options.get("tags")),
|
||||
"target_format": options.get("target_format"),
|
||||
"target_public_id": options.get("target_public_id"),
|
||||
"target_tags": options.get("target_tags") and build_array(options.get("target_tags")),
|
||||
"timestamp": timestamp,
|
||||
"transformations": build_eager(options.get("transformations")),
|
||||
"type": options.get("type"),
|
||||
"use_original_filename": options.get("use_original_filename"),
|
||||
}
|
||||
return params
|
||||
|
||||
|
||||
def build_eager(transformations):
|
||||
if transformations is None:
|
||||
return None
|
||||
eager = []
|
||||
for tr in build_array(transformations):
|
||||
if isinstance(tr, string_types):
|
||||
single_eager = tr
|
||||
else:
|
||||
ext = tr.get("format")
|
||||
single_eager = "/".join([x for x in [generate_transformation_string(**tr)[0], ext] if x])
|
||||
eager.append(single_eager)
|
||||
return "|".join(eager)
|
||||
|
||||
|
||||
def build_custom_headers(headers):
|
||||
if headers is None:
|
||||
return None
|
||||
elif isinstance(headers, list):
|
||||
pass
|
||||
elif isinstance(headers, dict):
|
||||
headers = [k + ": " + v for k, v in headers.items()]
|
||||
else:
|
||||
return headers
|
||||
return "\n".join(headers)
|
||||
|
||||
|
||||
def build_upload_params(**options):
|
||||
params = {"timestamp": now(),
|
||||
"transformation": generate_transformation_string(**options)[0],
|
||||
"public_id": options.get("public_id"),
|
||||
"callback": options.get("callback"),
|
||||
"format": options.get("format"),
|
||||
"type": options.get("type"),
|
||||
"backup": options.get("backup"),
|
||||
"faces": options.get("faces"),
|
||||
"image_metadata": options.get("image_metadata"),
|
||||
"exif": options.get("exif"),
|
||||
"colors": options.get("colors"),
|
||||
"headers": build_custom_headers(options.get("headers")),
|
||||
"eager": build_eager(options.get("eager")),
|
||||
"use_filename": options.get("use_filename"),
|
||||
"unique_filename": options.get("unique_filename"),
|
||||
"discard_original_filename": options.get("discard_original_filename"),
|
||||
"invalidate": options.get("invalidate"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"eager_notification_url": options.get("eager_notification_url"),
|
||||
"eager_async": options.get("eager_async"),
|
||||
"proxy": options.get("proxy"),
|
||||
"folder": options.get("folder"),
|
||||
"overwrite": options.get("overwrite"),
|
||||
"tags": options.get("tags") and ",".join(build_array(options["tags"])),
|
||||
"allowed_formats": options.get("allowed_formats") and ",".join(build_array(options["allowed_formats"])),
|
||||
"face_coordinates": encode_double_array(options.get("face_coordinates")),
|
||||
"custom_coordinates": encode_double_array(options.get("custom_coordinates")),
|
||||
"context": encode_context(options.get("context")),
|
||||
"moderation": options.get("moderation"),
|
||||
"raw_convert": options.get("raw_convert"),
|
||||
"quality_override": options.get("quality_override"),
|
||||
"ocr": options.get("ocr"),
|
||||
"categorization": options.get("categorization"),
|
||||
"detection": options.get("detection"),
|
||||
"similarity_search": options.get("similarity_search"),
|
||||
"background_removal": options.get("background_removal"),
|
||||
"upload_preset": options.get("upload_preset"),
|
||||
"phash": options.get("phash"),
|
||||
"return_delete_token": options.get("return_delete_token"),
|
||||
"auto_tagging": options.get("auto_tagging") and str(options.get("auto_tagging")),
|
||||
"responsive_breakpoints": generate_responsive_breakpoints_string(options.get("responsive_breakpoints")),
|
||||
"async": options.get("async"),
|
||||
"access_control": options.get("access_control") and json_encode(build_list_of_dicts(options.get("access_control")))}
|
||||
return params
|
||||
|
||||
|
||||
def __process_text_options(layer, layer_parameter):
|
||||
font_family = layer.get("font_family")
|
||||
font_size = layer.get("font_size")
|
||||
keywords = []
|
||||
for attr, default_value in __LAYER_KEYWORD_PARAMS:
|
||||
attr_value = layer.get(attr)
|
||||
if attr_value != default_value and attr_value is not None:
|
||||
keywords.append(attr_value)
|
||||
|
||||
letter_spacing = layer.get("letter_spacing")
|
||||
if letter_spacing is not None:
|
||||
keywords.append("letter_spacing_" + str(letter_spacing))
|
||||
|
||||
line_spacing = layer.get("line_spacing")
|
||||
if line_spacing is not None:
|
||||
keywords.append("line_spacing_" + str(line_spacing))
|
||||
|
||||
if font_size is None and font_family is None and len(keywords) == 0:
|
||||
return None
|
||||
|
||||
if font_family is None:
|
||||
raise ValueError("Must supply font_family for text in " + layer_parameter)
|
||||
|
||||
if font_size is None:
|
||||
raise ValueError("Must supply font_size for text in " + layer_parameter)
|
||||
|
||||
keywords.insert(0, font_size)
|
||||
keywords.insert(0, font_family)
|
||||
|
||||
return '_'.join([str(k) for k in keywords])
|
||||
|
||||
|
||||
def process_layer(layer, layer_parameter):
|
||||
if isinstance(layer, string_types) and layer.startswith("fetch:"):
|
||||
layer = {"url": layer[len('fetch:'):]}
|
||||
if not isinstance(layer, dict):
|
||||
return layer
|
||||
|
||||
resource_type = layer.get("resource_type")
|
||||
text = layer.get("text")
|
||||
type = layer.get("type")
|
||||
public_id = layer.get("public_id")
|
||||
format = layer.get("format")
|
||||
fetch = layer.get("url")
|
||||
components = list()
|
||||
|
||||
if text is not None and resource_type is None:
|
||||
resource_type = "text"
|
||||
|
||||
if fetch and resource_type is None:
|
||||
resource_type = "fetch"
|
||||
|
||||
if public_id is not None and format is not None:
|
||||
public_id = public_id + "." + format
|
||||
|
||||
if public_id is None and resource_type != "text" and resource_type != "fetch":
|
||||
raise ValueError("Must supply public_id for for non-text " + layer_parameter)
|
||||
|
||||
if resource_type is not None and resource_type != "image":
|
||||
components.append(resource_type)
|
||||
|
||||
if type is not None and type != "upload":
|
||||
components.append(type)
|
||||
|
||||
if resource_type == "text" or resource_type == "subtitles":
|
||||
if public_id is None and text is None:
|
||||
raise ValueError("Must supply either text or public_id in " + layer_parameter)
|
||||
|
||||
text_options = __process_text_options(layer, layer_parameter)
|
||||
|
||||
if text_options is not None:
|
||||
components.append(text_options)
|
||||
|
||||
if public_id is not None:
|
||||
public_id = public_id.replace("/", ':')
|
||||
components.append(public_id)
|
||||
|
||||
if text is not None:
|
||||
var_pattern = VAR_NAME_RE
|
||||
match = re.findall(var_pattern,text)
|
||||
|
||||
parts= filter(lambda p: p is not None, re.split(var_pattern,text))
|
||||
encoded_text = []
|
||||
for part in parts:
|
||||
if re.match(var_pattern,part):
|
||||
encoded_text.append(part)
|
||||
else:
|
||||
encoded_text.append(smart_escape(smart_escape(part, r"([,/])")))
|
||||
|
||||
text = ''.join(encoded_text)
|
||||
# text = text.replace("%2C", "%252C")
|
||||
# text = text.replace("/", "%252F")
|
||||
components.append(text)
|
||||
elif resource_type == "fetch":
|
||||
b64 = base64_encode_url(fetch)
|
||||
components.append(b64)
|
||||
else:
|
||||
public_id = public_id.replace("/", ':')
|
||||
components.append(public_id)
|
||||
|
||||
return ':'.join(components)
|
||||
|
||||
IF_OPERATORS = {
|
||||
"=": 'eq',
|
||||
"!=": 'ne',
|
||||
"<": 'lt',
|
||||
">": 'gt',
|
||||
"<=": 'lte',
|
||||
">=": 'gte',
|
||||
"&&": 'and',
|
||||
"||": 'or',
|
||||
"*": 'mul',
|
||||
"/": 'div',
|
||||
"+": 'add',
|
||||
"-": 'sub'
|
||||
}
|
||||
|
||||
PREDEFINED_VARS = {
|
||||
"aspect_ratio": "ar",
|
||||
"current_page": "cp",
|
||||
"face_count": "fc",
|
||||
"height": "h",
|
||||
"initial_aspect_ratio": "iar",
|
||||
"initial_height": "ih",
|
||||
"initial_width": "iw",
|
||||
"page_count": "pc",
|
||||
"page_x": "px",
|
||||
"page_y": "py",
|
||||
"tags": "tags",
|
||||
"width": "w"
|
||||
}
|
||||
|
||||
replaceRE = "((\\|\\||>=|<=|&&|!=|>|=|<|/|-|\\+|\\*)(?=[ _])|" + '|'.join(PREDEFINED_VARS.keys())+ ")"
|
||||
|
||||
|
||||
def translate_if(match):
|
||||
name = match.group(0)
|
||||
return IF_OPERATORS.get(name,
|
||||
PREDEFINED_VARS.get(name,
|
||||
name))
|
||||
|
||||
def process_conditional(conditional):
|
||||
if conditional is None:
|
||||
return conditional
|
||||
result = normalize_expression(conditional)
|
||||
return result
|
||||
|
||||
def normalize_expression(expression):
|
||||
if re.match(r'^!.+!$',str(expression)): # quoted string
|
||||
return expression
|
||||
elif expression:
|
||||
result = str(expression)
|
||||
result = re.sub(replaceRE, translate_if, result)
|
||||
result = re.sub('[ _]+', '_', result)
|
||||
return result
|
||||
else:
|
||||
return expression
|
||||
|
||||
def __join_pair(key, value):
|
||||
if value is None or value == "":
|
||||
return None
|
||||
elif value is True:
|
||||
return key
|
||||
else:
|
||||
return u"{0}=\"{1}\"".format(key, value)
|
||||
|
||||
|
||||
def html_attrs(attrs, only=None):
|
||||
return ' '.join(sorted([__join_pair(key, value) for key, value in attrs.items() if only is None or key in only]))
|
||||
|
||||
|
||||
def __safe_value(v):
|
||||
if isinstance(v, bool):
|
||||
return "1" if v else "0"
|
||||
else:
|
||||
return v
|
||||
|
||||
|
||||
def __crc(source):
|
||||
return str((zlib.crc32(to_bytearray(source)) & 0xffffffff) % 5 + 1)
|
||||
|
||||
|
||||
def __compact(array):
|
||||
return filter(lambda x: x, array)
|
||||
|
||||
|
||||
def base64_encode_url(url):
|
||||
"""
|
||||
Returns the Base64-decoded version of url.
|
||||
The method tries to unquote the url because quoting it
|
||||
|
||||
:param str url:
|
||||
the url to encode. the value is URIdecoded and then
|
||||
re-encoded before converting to base64 representation
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
url = unquote(url)
|
||||
except:
|
||||
pass
|
||||
url = smart_escape(url)
|
||||
b64 = base64.b64encode(url.encode('utf-8'))
|
||||
return b64.decode('ascii')
|
||||
|
||||
|
||||
def __json_serializer(obj):
|
||||
"""JSON serializer for objects not serializable by default json code"""
|
||||
if isinstance(obj, (datetime, date)):
|
||||
return obj.isoformat()
|
||||
raise TypeError("Object of type %s is not JSON serializable" % type(obj))
|
||||
49
lib/plexapi/__init__.py
Normal file
49
lib/plexapi/__init__.py
Normal file
@@ -0,0 +1,49 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
import os
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from platform import uname
|
||||
from plexapi.config import PlexConfig, reset_base_headers
|
||||
from plexapi.utils import SecretsFilter
|
||||
from uuid import getnode
|
||||
|
||||
# Load User Defined Config
|
||||
DEFAULT_CONFIG_PATH = os.path.expanduser('~/.config/plexapi/config.ini')
|
||||
CONFIG_PATH = os.environ.get('PLEXAPI_CONFIG_PATH', DEFAULT_CONFIG_PATH)
|
||||
CONFIG = PlexConfig(CONFIG_PATH)
|
||||
|
||||
# PlexAPI Settings
|
||||
PROJECT = 'PlexAPI'
|
||||
VERSION = '3.0.6'
|
||||
TIMEOUT = CONFIG.get('plexapi.timeout', 30, int)
|
||||
X_PLEX_CONTAINER_SIZE = CONFIG.get('plexapi.container_size', 100, int)
|
||||
|
||||
# Plex Header Configuation
|
||||
X_PLEX_PROVIDES = CONFIG.get('header.provides', 'controller')
|
||||
X_PLEX_PLATFORM = CONFIG.get('header.platorm', uname()[0])
|
||||
X_PLEX_PLATFORM_VERSION = CONFIG.get('header.platform_version', uname()[2])
|
||||
X_PLEX_PRODUCT = CONFIG.get('header.product', PROJECT)
|
||||
X_PLEX_VERSION = CONFIG.get('header.version', VERSION)
|
||||
X_PLEX_DEVICE = CONFIG.get('header.device', X_PLEX_PLATFORM)
|
||||
X_PLEX_DEVICE_NAME = CONFIG.get('header.device_name', uname()[1])
|
||||
X_PLEX_IDENTIFIER = CONFIG.get('header.identifier', str(hex(getnode())))
|
||||
BASE_HEADERS = reset_base_headers()
|
||||
|
||||
# Logging Configuration
|
||||
log = logging.getLogger('plexapi')
|
||||
logfile = CONFIG.get('log.path')
|
||||
logformat = CONFIG.get('log.format', '%(asctime)s %(module)12s:%(lineno)-4s %(levelname)-9s %(message)s')
|
||||
loglevel = CONFIG.get('log.level', 'INFO').upper()
|
||||
loghandler = logging.NullHandler()
|
||||
|
||||
if logfile: # pragma: no cover
|
||||
logbackups = CONFIG.get('log.backup_count', 3, int)
|
||||
logbytes = CONFIG.get('log.rotate_bytes', 512000, int)
|
||||
loghandler = RotatingFileHandler(os.path.expanduser(logfile), 'a', logbytes, logbackups)
|
||||
|
||||
loghandler.setFormatter(logging.Formatter(logformat))
|
||||
log.addHandler(loghandler)
|
||||
log.setLevel(loglevel)
|
||||
logfilter = SecretsFilter()
|
||||
if CONFIG.get('log.show_secrets', '').lower() != 'true':
|
||||
log.addFilter(logfilter)
|
||||
58
lib/plexapi/alert.py
Normal file
58
lib/plexapi/alert.py
Normal file
@@ -0,0 +1,58 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import json
|
||||
import threading
|
||||
import websocket
|
||||
from plexapi import log
|
||||
|
||||
|
||||
class AlertListener(threading.Thread):
|
||||
""" Creates a websocket connection to the PlexServer to optionally recieve alert notifications.
|
||||
These often include messages from Plex about media scans as well as updates to currently running
|
||||
Transcode Sessions. This class implements threading.Thread, therfore to start monitoring
|
||||
alerts you must call .start() on the object once it's created. When calling
|
||||
`PlexServer.startAlertListener()`, the thread will be started for you.
|
||||
|
||||
Parameters:
|
||||
server (:class:`~plexapi.server.PlexServer`): PlexServer this listener is connected to.
|
||||
callback (func): Callback function to call on recieved messages. The callback function
|
||||
will be sent a single argument 'data' which will contain a dictionary of data
|
||||
recieved from the server. :samp:`def my_callback(data): ...`
|
||||
"""
|
||||
key = '/:/websockets/notifications'
|
||||
|
||||
def __init__(self, server, callback=None):
|
||||
super(AlertListener, self).__init__()
|
||||
self.daemon = True
|
||||
self._server = server
|
||||
self._callback = callback
|
||||
self._ws = None
|
||||
|
||||
def run(self):
|
||||
# create the websocket connection
|
||||
url = self._server.url(self.key, includeToken=True).replace('http', 'ws')
|
||||
log.info('Starting AlertListener: %s', url)
|
||||
self._ws = websocket.WebSocketApp(url, on_message=self._onMessage,
|
||||
on_error=self._onError)
|
||||
self._ws.run_forever()
|
||||
|
||||
def stop(self):
|
||||
""" Stop the AlertListener thread. Once the notifier is stopped, it cannot be diractly
|
||||
started again. You must call :func:`plexapi.server.PlexServer.startAlertListener()`
|
||||
from a PlexServer instance.
|
||||
"""
|
||||
log.info('Stopping AlertListener.')
|
||||
self._ws.close()
|
||||
|
||||
def _onMessage(self, ws, message):
|
||||
""" Called when websocket message is recieved. """
|
||||
try:
|
||||
data = json.loads(message)['NotificationContainer']
|
||||
log.debug('Alert: %s %s %s', *data)
|
||||
if self._callback:
|
||||
self._callback(data)
|
||||
except Exception as err: # pragma: no cover
|
||||
log.error('AlertListener Msg Error: %s', err)
|
||||
|
||||
def _onError(self, ws, err): # pragma: no cover
|
||||
""" Called when websocket error is recieved. """
|
||||
log.error('AlertListener Error: %s' % err)
|
||||
304
lib/plexapi/audio.py
Normal file
304
lib/plexapi/audio.py
Normal file
@@ -0,0 +1,304 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from plexapi import media, utils
|
||||
from plexapi.base import Playable, PlexPartialObject
|
||||
|
||||
|
||||
class Audio(PlexPartialObject):
|
||||
""" Base class for audio :class:`~plexapi.audio.Artist`, :class:`~plexapi.audio.Album`
|
||||
and :class:`~plexapi.audio.Track` objects.
|
||||
|
||||
Attributes:
|
||||
addedAt (datetime): Datetime this item was added to the library.
|
||||
index (sting): Index Number (often the track number).
|
||||
key (str): API URL (/library/metadata/<ratingkey>).
|
||||
lastViewedAt (datetime): Datetime item was last accessed.
|
||||
librarySectionID (int): :class:`~plexapi.library.LibrarySection` ID.
|
||||
listType (str): Hardcoded as 'audio' (useful for search filters).
|
||||
ratingKey (int): Unique key identifying this item.
|
||||
summary (str): Summary of the artist, track, or album.
|
||||
thumb (str): URL to thumbnail image.
|
||||
title (str): Artist, Album or Track title. (Jason Mraz, We Sing, Lucky, etc.)
|
||||
titleSort (str): Title to use when sorting (defaults to title).
|
||||
type (str): 'artist', 'album', or 'track'.
|
||||
updatedAt (datatime): Datetime this item was updated.
|
||||
viewCount (int): Count of times this item was accessed.
|
||||
"""
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.listType = 'audio'
|
||||
self.addedAt = utils.toDatetime(data.attrib.get('addedAt'))
|
||||
self.index = data.attrib.get('index')
|
||||
self.key = data.attrib.get('key')
|
||||
self.lastViewedAt = utils.toDatetime(data.attrib.get('lastViewedAt'))
|
||||
self.librarySectionID = data.attrib.get('librarySectionID')
|
||||
self.ratingKey = utils.cast(int, data.attrib.get('ratingKey'))
|
||||
self.summary = data.attrib.get('summary')
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
self.title = data.attrib.get('title')
|
||||
self.titleSort = data.attrib.get('titleSort', self.title)
|
||||
self.type = data.attrib.get('type')
|
||||
self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt'))
|
||||
self.viewCount = utils.cast(int, data.attrib.get('viewCount', 0))
|
||||
|
||||
@property
|
||||
def thumbUrl(self):
|
||||
""" Return url to for the thumbnail image. """
|
||||
key = self.firstAttr('thumb', 'parentThumb', 'granparentThumb')
|
||||
return self._server.url(key, includeToken=True) if key else None
|
||||
|
||||
@property
|
||||
def artUrl(self):
|
||||
""" Return the first art url starting on the most specific for that item."""
|
||||
art = self.firstAttr('art', 'grandparentArt')
|
||||
return self._server.url(art, includeToken=True) if art else None
|
||||
|
||||
def url(self, part):
|
||||
""" Returns the full URL for this audio item. Typically used for getting a specific track. """
|
||||
return self._server.url(part, includeToken=True) if part else None
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Artist(Audio):
|
||||
""" Represents a single audio artist.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Directory'
|
||||
TYPE (str): 'artist'
|
||||
art (str): Artist artwork (/library/metadata/<ratingkey>/art/<artid>)
|
||||
countries (list): List of :class:`~plexapi.media.Country` objects this artist respresents.
|
||||
genres (list): List of :class:`~plexapi.media.Genre` objects this artist respresents.
|
||||
guid (str): Unknown (unique ID; com.plexapp.agents.plexmusic://gracenote/artist/05517B8701668D28?lang=en)
|
||||
key (str): API URL (/library/metadata/<ratingkey>).
|
||||
location (str): Filepath this artist is found on disk.
|
||||
similar (list): List of :class:`~plexapi.media.Similar` artists.
|
||||
"""
|
||||
TAG = 'Directory'
|
||||
TYPE = 'artist'
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
Audio._loadData(self, data)
|
||||
self.art = data.attrib.get('art')
|
||||
self.guid = data.attrib.get('guid')
|
||||
self.key = self.key.replace('/children', '') # FIX_BUG_50
|
||||
self.locations = self.listAttrs(data, 'path', etag='Location')
|
||||
self.countries = self.findItems(data, media.Country)
|
||||
self.genres = self.findItems(data, media.Genre)
|
||||
self.similar = self.findItems(data, media.Similar)
|
||||
self.collections = self.findItems(data, media.Collection)
|
||||
|
||||
def __iter__(self):
|
||||
for album in self.albums():
|
||||
yield album
|
||||
|
||||
def album(self, title):
|
||||
""" Returns the :class:`~plexapi.audio.Album` that matches the specified title.
|
||||
|
||||
Parameters:
|
||||
title (str): Title of the album to return.
|
||||
"""
|
||||
key = '%s/children' % self.key
|
||||
return self.fetchItem(key, title__iexact=title)
|
||||
|
||||
def albums(self, **kwargs):
|
||||
""" Returns a list of :class:`~plexapi.audio.Album` objects by this artist. """
|
||||
key = '%s/children' % self.key
|
||||
return self.fetchItems(key, **kwargs)
|
||||
|
||||
def track(self, title):
|
||||
""" Returns the :class:`~plexapi.audio.Track` that matches the specified title.
|
||||
|
||||
Parameters:
|
||||
title (str): Title of the track to return.
|
||||
"""
|
||||
key = '%s/allLeaves' % self.key
|
||||
return self.fetchItem(key, title__iexact=title)
|
||||
|
||||
def tracks(self, **kwargs):
|
||||
""" Returns a list of :class:`~plexapi.audio.Track` objects by this artist. """
|
||||
key = '%s/allLeaves' % self.key
|
||||
return self.fetchItems(key, **kwargs)
|
||||
|
||||
def get(self, title):
|
||||
""" Alias of :func:`~plexapi.audio.Artist.track`. """
|
||||
return self.track(title)
|
||||
|
||||
def download(self, savepath=None, keep_orginal_name=False, **kwargs):
|
||||
""" Downloads all tracks for this artist to the specified location.
|
||||
|
||||
Parameters:
|
||||
savepath (str): Title of the track to return.
|
||||
keep_orginal_name (bool): Set True to keep the original filename as stored in
|
||||
the Plex server. False will create a new filename with the format
|
||||
"<Atrist> - <Album> <Track>".
|
||||
kwargs (dict): If specified, a :func:`~plexapi.audio.Track.getStreamURL()` will
|
||||
be returned and the additional arguments passed in will be sent to that
|
||||
function. If kwargs is not specified, the media items will be downloaded
|
||||
and saved to disk.
|
||||
"""
|
||||
filepaths = []
|
||||
for album in self.albums():
|
||||
for track in album.tracks():
|
||||
filepaths += track.download(savepath, keep_orginal_name, **kwargs)
|
||||
return filepaths
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Album(Audio):
|
||||
""" Represents a single audio album.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Directory'
|
||||
TYPE (str): 'album'
|
||||
art (str): Album artwork (/library/metadata/<ratingkey>/art/<artid>)
|
||||
genres (list): List of :class:`~plexapi.media.Genre` objects this album respresents.
|
||||
key (str): API URL (/library/metadata/<ratingkey>).
|
||||
originallyAvailableAt (datetime): Datetime this album was released.
|
||||
parentKey (str): API URL of this artist.
|
||||
parentRatingKey (int): Unique key identifying artist.
|
||||
parentThumb (str): URL to artist thumbnail image.
|
||||
parentTitle (str): Name of the artist for this album.
|
||||
studio (str): Studio that released this album.
|
||||
year (int): Year this album was released.
|
||||
"""
|
||||
TAG = 'Directory'
|
||||
TYPE = 'album'
|
||||
|
||||
def __iter__(self):
|
||||
for track in self.tracks:
|
||||
yield track
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
Audio._loadData(self, data)
|
||||
self.art = data.attrib.get('art')
|
||||
self.key = self.key.replace('/children', '') # fixes bug #50
|
||||
self.originallyAvailableAt = utils.toDatetime(data.attrib.get('originallyAvailableAt'), '%Y-%m-%d')
|
||||
self.parentKey = data.attrib.get('parentKey')
|
||||
self.parentRatingKey = data.attrib.get('parentRatingKey')
|
||||
self.parentThumb = data.attrib.get('parentThumb')
|
||||
self.parentTitle = data.attrib.get('parentTitle')
|
||||
self.studio = data.attrib.get('studio')
|
||||
self.year = utils.cast(int, data.attrib.get('year'))
|
||||
self.genres = self.findItems(data, media.Genre)
|
||||
self.collections = self.findItems(data, media.Collection)
|
||||
self.labels = self.findItems(data, media.Label)
|
||||
|
||||
def track(self, title):
|
||||
""" Returns the :class:`~plexapi.audio.Track` that matches the specified title.
|
||||
|
||||
Parameters:
|
||||
title (str): Title of the track to return.
|
||||
"""
|
||||
key = '%s/children' % self.key
|
||||
return self.fetchItem(key, title__iexact=title)
|
||||
|
||||
def tracks(self, **kwargs):
|
||||
""" Returns a list of :class:`~plexapi.audio.Track` objects in this album. """
|
||||
key = '%s/children' % self.key
|
||||
return self.fetchItems(key, **kwargs)
|
||||
|
||||
def get(self, title):
|
||||
""" Alias of :func:`~plexapi.audio.Album.track`. """
|
||||
return self.track(title)
|
||||
|
||||
def artist(self):
|
||||
""" Return :func:`~plexapi.audio.Artist` of this album. """
|
||||
return self.fetchItem(self.parentKey)
|
||||
|
||||
def download(self, savepath=None, keep_orginal_name=False, **kwargs):
|
||||
""" Downloads all tracks for this artist to the specified location.
|
||||
|
||||
Parameters:
|
||||
savepath (str): Title of the track to return.
|
||||
keep_orginal_name (bool): Set True to keep the original filename as stored in
|
||||
the Plex server. False will create a new filename with the format
|
||||
"<Atrist> - <Album> <Track>".
|
||||
kwargs (dict): If specified, a :func:`~plexapi.audio.Track.getStreamURL()` will
|
||||
be returned and the additional arguments passed in will be sent to that
|
||||
function. If kwargs is not specified, the media items will be downloaded
|
||||
and saved to disk.
|
||||
"""
|
||||
filepaths = []
|
||||
for track in self.tracks():
|
||||
filepaths += track.download(savepath, keep_orginal_name, **kwargs)
|
||||
return filepaths
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Track(Audio, Playable):
|
||||
""" Represents a single audio track.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Directory'
|
||||
TYPE (str): 'track'
|
||||
art (str): Track artwork (/library/metadata/<ratingkey>/art/<artid>)
|
||||
chapterSource (TYPE): Unknown
|
||||
duration (int): Length of this album in seconds.
|
||||
grandparentArt (str): Artist artowrk.
|
||||
grandparentKey (str): Artist API URL.
|
||||
grandparentRatingKey (str): Unique key identifying artist.
|
||||
grandparentThumb (str): URL to artist thumbnail image.
|
||||
grandparentTitle (str): Name of the artist for this track.
|
||||
guid (str): Unknown (unique ID).
|
||||
media (list): List of :class:`~plexapi.media.Media` objects for this track.
|
||||
moods (list): List of :class:`~plexapi.media.Mood` objects for this track.
|
||||
originalTitle (str): Original track title (if translated).
|
||||
parentIndex (int): Album index.
|
||||
parentKey (str): Album API URL.
|
||||
parentRatingKey (int): Unique key identifying album.
|
||||
parentThumb (str): URL to album thumbnail image.
|
||||
parentTitle (str): Name of the album for this track.
|
||||
primaryExtraKey (str): Unknown
|
||||
ratingCount (int): Unknown
|
||||
userRating (float): Rating of this track (0.0 - 10.0) equaling (0 stars - 5 stars)
|
||||
viewOffset (int): Unknown
|
||||
year (int): Year this track was released.
|
||||
sessionKey (int): Session Key (active sessions only).
|
||||
usernames (str): Username of person playing this track (active sessions only).
|
||||
player (str): :class:`~plexapi.client.PlexClient` for playing track (active sessions only).
|
||||
transcodeSessions (None): :class:`~plexapi.media.TranscodeSession` for playing
|
||||
track (active sessions only).
|
||||
"""
|
||||
TAG = 'Track'
|
||||
TYPE = 'track'
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
Audio._loadData(self, data)
|
||||
Playable._loadData(self, data)
|
||||
self.art = data.attrib.get('art')
|
||||
self.chapterSource = data.attrib.get('chapterSource')
|
||||
self.duration = utils.cast(int, data.attrib.get('duration'))
|
||||
self.grandparentArt = data.attrib.get('grandparentArt')
|
||||
self.grandparentKey = data.attrib.get('grandparentKey')
|
||||
self.grandparentRatingKey = data.attrib.get('grandparentRatingKey')
|
||||
self.grandparentThumb = data.attrib.get('grandparentThumb')
|
||||
self.grandparentTitle = data.attrib.get('grandparentTitle')
|
||||
self.guid = data.attrib.get('guid')
|
||||
self.originalTitle = data.attrib.get('originalTitle')
|
||||
self.parentIndex = data.attrib.get('parentIndex')
|
||||
self.parentKey = data.attrib.get('parentKey')
|
||||
self.parentRatingKey = data.attrib.get('parentRatingKey')
|
||||
self.parentThumb = data.attrib.get('parentThumb')
|
||||
self.parentTitle = data.attrib.get('parentTitle')
|
||||
self.primaryExtraKey = data.attrib.get('primaryExtraKey')
|
||||
self.ratingCount = utils.cast(int, data.attrib.get('ratingCount'))
|
||||
self.userRating = utils.cast(float, data.attrib.get('userRating', 0))
|
||||
self.viewOffset = utils.cast(int, data.attrib.get('viewOffset', 0))
|
||||
self.year = utils.cast(int, data.attrib.get('year'))
|
||||
self.media = self.findItems(data, media.Media)
|
||||
self.moods = self.findItems(data, media.Mood)
|
||||
|
||||
def _prettyfilename(self):
|
||||
""" Returns a filename for use in download. """
|
||||
return '%s - %s %s' % (self.grandparentTitle, self.parentTitle, self.title)
|
||||
|
||||
def album(self):
|
||||
""" Return this track's :class:`~plexapi.audio.Album`. """
|
||||
return self.fetchItem(self.parentKey)
|
||||
|
||||
def artist(self):
|
||||
""" Return this track's :class:`~plexapi.audio.Artist`. """
|
||||
return self.fetchItem(self.grandparentKey)
|
||||
581
lib/plexapi/base.py
Normal file
581
lib/plexapi/base.py
Normal file
@@ -0,0 +1,581 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
|
||||
from plexapi import log, utils
|
||||
from plexapi.compat import quote_plus, urlencode
|
||||
from plexapi.exceptions import BadRequest, NotFound, UnknownType, Unsupported
|
||||
from plexapi.utils import tag_helper
|
||||
|
||||
OPERATORS = {
|
||||
'exact': lambda v, q: v == q,
|
||||
'iexact': lambda v, q: v.lower() == q.lower(),
|
||||
'contains': lambda v, q: q in v,
|
||||
'icontains': lambda v, q: q.lower() in v.lower(),
|
||||
'ne': lambda v, q: v != q,
|
||||
'in': lambda v, q: v in q,
|
||||
'gt': lambda v, q: v > q,
|
||||
'gte': lambda v, q: v >= q,
|
||||
'lt': lambda v, q: v < q,
|
||||
'lte': lambda v, q: v <= q,
|
||||
'startswith': lambda v, q: v.startswith(q),
|
||||
'istartswith': lambda v, q: v.lower().startswith(q),
|
||||
'endswith': lambda v, q: v.endswith(q),
|
||||
'iendswith': lambda v, q: v.lower().endswith(q),
|
||||
'exists': lambda v, q: v is not None if q else v is None,
|
||||
'regex': lambda v, q: re.match(q, v),
|
||||
'iregex': lambda v, q: re.match(q, v, flags=re.IGNORECASE),
|
||||
}
|
||||
|
||||
|
||||
class PlexObject(object):
|
||||
""" Base class for all Plex objects.
|
||||
|
||||
Parameters:
|
||||
server (:class:`~plexapi.server.PlexServer`): PlexServer this client is connected to (optional)
|
||||
data (ElementTree): Response from PlexServer used to build this object (optional).
|
||||
initpath (str): Relative path requested when retrieving specified `data` (optional).
|
||||
"""
|
||||
TAG = None # xml element tag
|
||||
TYPE = None # xml element type
|
||||
key = None # plex relative url
|
||||
|
||||
def __init__(self, server, data, initpath=None):
|
||||
self._server = server
|
||||
self._data = data
|
||||
self._initpath = initpath or self.key
|
||||
self._details_key = ''
|
||||
if data is not None:
|
||||
self._loadData(data)
|
||||
|
||||
def __repr__(self):
|
||||
uid = self._clean(self.firstAttr('_baseurl', 'key', 'id', 'playQueueID', 'uri'))
|
||||
name = self._clean(self.firstAttr('title', 'name', 'username', 'product', 'tag', 'value'))
|
||||
return '<%s>' % ':'.join([p for p in [self.__class__.__name__, uid, name] if p])
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
# dont overwrite an attr with None unless its a private variable
|
||||
if value is not None or attr.startswith('_') or attr not in self.__dict__:
|
||||
self.__dict__[attr] = value
|
||||
|
||||
def _clean(self, value):
|
||||
""" Clean attr value for display in __repr__. """
|
||||
if value:
|
||||
value = str(value).replace('/library/metadata/', '')
|
||||
value = value.replace('/children', '')
|
||||
return value.replace(' ', '-')[:20]
|
||||
|
||||
def _buildItem(self, elem, cls=None, initpath=None):
|
||||
""" Factory function to build objects based on registered PLEXOBJECTS. """
|
||||
# cls is specified, build the object and return
|
||||
initpath = initpath or self._initpath
|
||||
if cls is not None:
|
||||
return cls(self._server, elem, initpath)
|
||||
# cls is not specified, try looking it up in PLEXOBJECTS
|
||||
etype = elem.attrib.get('type', elem.attrib.get('streamType'))
|
||||
ehash = '%s.%s' % (elem.tag, etype) if etype else elem.tag
|
||||
ecls = utils.PLEXOBJECTS.get(ehash, utils.PLEXOBJECTS.get(elem.tag))
|
||||
# log.debug('Building %s as %s', elem.tag, ecls.__name__)
|
||||
if ecls is not None:
|
||||
return ecls(self._server, elem, initpath)
|
||||
raise UnknownType("Unknown library type <%s type='%s'../>" % (elem.tag, etype))
|
||||
|
||||
def _buildItemOrNone(self, elem, cls=None, initpath=None):
|
||||
""" Calls :func:`~plexapi.base.PlexObject._buildItem()` but returns
|
||||
None if elem is an unknown type.
|
||||
"""
|
||||
try:
|
||||
return self._buildItem(elem, cls, initpath)
|
||||
except UnknownType:
|
||||
return None
|
||||
|
||||
def fetchItem(self, ekey, cls=None, **kwargs):
|
||||
""" Load the specified key to find and build the first item with the
|
||||
specified tag and attrs. If no tag or attrs are specified then
|
||||
the first item in the result set is returned.
|
||||
|
||||
Parameters:
|
||||
ekey (str or int): Path in Plex to fetch items from. If an int is passed
|
||||
in, the key will be translated to /library/metadata/<key>. This allows
|
||||
fetching an item only knowing its key-id.
|
||||
cls (:class:`~plexapi.base.PlexObject`): If you know the class of the
|
||||
items to be fetched, passing this in will help the parser ensure
|
||||
it only returns those items. By default we convert the xml elements
|
||||
with the best guess PlexObjects based on tag and type attrs.
|
||||
etag (str): Only fetch items with the specified tag.
|
||||
**kwargs (dict): Optionally add attribute filters on the items to fetch. For
|
||||
example, passing in viewCount=0 will only return matching items. Filtering
|
||||
is done before the Python objects are built to help keep things speedy.
|
||||
Note: Because some attribute names are already used as arguments to this
|
||||
function, such as 'tag', you may still reference the attr tag byappending
|
||||
an underscore. For example, passing in _tag='foobar' will return all items
|
||||
where tag='foobar'. Also Note: Case very much matters when specifying kwargs
|
||||
-- Optionally, operators can be specified by append it
|
||||
to the end of the attribute name for more complex lookups. For example,
|
||||
passing in viewCount__gte=0 will return all items where viewCount >= 0.
|
||||
Available operations include:
|
||||
|
||||
* __contains: Value contains specified arg.
|
||||
* __endswith: Value ends with specified arg.
|
||||
* __exact: Value matches specified arg.
|
||||
* __exists (bool): Value is or is not present in the attrs.
|
||||
* __gt: Value is greater than specified arg.
|
||||
* __gte: Value is greater than or equal to specified arg.
|
||||
* __icontains: Case insensative value contains specified arg.
|
||||
* __iendswith: Case insensative value ends with specified arg.
|
||||
* __iexact: Case insensative value matches specified arg.
|
||||
* __in: Value is in a specified list or tuple.
|
||||
* __iregex: Case insensative value matches the specified regular expression.
|
||||
* __istartswith: Case insensative value starts with specified arg.
|
||||
* __lt: Value is less than specified arg.
|
||||
* __lte: Value is less than or equal to specified arg.
|
||||
* __regex: Value matches the specified regular expression.
|
||||
* __startswith: Value starts with specified arg.
|
||||
"""
|
||||
if isinstance(ekey, int):
|
||||
ekey = '/library/metadata/%s' % ekey
|
||||
for elem in self._server.query(ekey):
|
||||
if self._checkAttrs(elem, **kwargs):
|
||||
return self._buildItem(elem, cls, ekey)
|
||||
clsname = cls.__name__ if cls else 'None'
|
||||
raise NotFound('Unable to find elem: cls=%s, attrs=%s' % (clsname, kwargs))
|
||||
|
||||
def fetchItems(self, ekey, cls=None, **kwargs):
|
||||
""" Load the specified key to find and build all items with the specified tag
|
||||
and attrs. See :func:`~plexapi.base.PlexObject.fetchItem` for more details
|
||||
on how this is used.
|
||||
"""
|
||||
data = self._server.query(ekey)
|
||||
return self.findItems(data, cls, ekey, **kwargs)
|
||||
|
||||
def findItems(self, data, cls=None, initpath=None, **kwargs):
|
||||
""" Load the specified data to find and build all items with the specified tag
|
||||
and attrs. See :func:`~plexapi.base.PlexObject.fetchItem` for more details
|
||||
on how this is used.
|
||||
"""
|
||||
# filter on cls attrs if specified
|
||||
if cls and cls.TAG and 'tag' not in kwargs:
|
||||
kwargs['etag'] = cls.TAG
|
||||
if cls and cls.TYPE and 'type' not in kwargs:
|
||||
kwargs['type'] = cls.TYPE
|
||||
# loop through all data elements to find matches
|
||||
items = []
|
||||
for elem in data:
|
||||
if self._checkAttrs(elem, **kwargs):
|
||||
item = self._buildItemOrNone(elem, cls, initpath)
|
||||
if item is not None:
|
||||
items.append(item)
|
||||
return items
|
||||
|
||||
def firstAttr(self, *attrs):
|
||||
""" Return the first attribute in attrs that is not None. """
|
||||
for attr in attrs:
|
||||
value = self.__dict__.get(attr)
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
def listAttrs(self, data, attr, **kwargs):
|
||||
results = []
|
||||
for elem in data:
|
||||
kwargs['%s__exists' % attr] = True
|
||||
if self._checkAttrs(elem, **kwargs):
|
||||
results.append(elem.attrib.get(attr))
|
||||
return results
|
||||
|
||||
def reload(self, key=None):
|
||||
""" Reload the data for this object from self.key. """
|
||||
key = key or self._details_key or self.key
|
||||
if not key:
|
||||
raise Unsupported('Cannot reload an object not built from a URL.')
|
||||
self._initpath = key
|
||||
data = self._server.query(key)
|
||||
self._loadData(data[0])
|
||||
return self
|
||||
|
||||
def _checkAttrs(self, elem, **kwargs):
|
||||
attrsFound = {}
|
||||
for attr, query in kwargs.items():
|
||||
attr, op, operator = self._getAttrOperator(attr)
|
||||
values = self._getAttrValue(elem, attr)
|
||||
# special case query in (None, 0, '') to include missing attr
|
||||
if op == 'exact' and not values and query in (None, 0, ''):
|
||||
return True
|
||||
# return if attr were looking for is missing
|
||||
attrsFound[attr] = False
|
||||
for value in values:
|
||||
value = self._castAttrValue(op, query, value)
|
||||
if operator(value, query):
|
||||
attrsFound[attr] = True
|
||||
break
|
||||
# log.debug('Checking %s for %s found: %s', elem.tag, kwargs, attrsFound)
|
||||
return all(attrsFound.values())
|
||||
|
||||
def _getAttrOperator(self, attr):
|
||||
for op, operator in OPERATORS.items():
|
||||
if attr.endswith('__%s' % op):
|
||||
attr = attr.rsplit('__', 1)[0]
|
||||
return attr, op, operator
|
||||
# default to exact match
|
||||
return attr, 'exact', OPERATORS['exact']
|
||||
|
||||
def _getAttrValue(self, elem, attrstr, results=None):
|
||||
# log.debug('Fetching %s in %s', attrstr, elem.tag)
|
||||
parts = attrstr.split('__', 1)
|
||||
attr = parts[0]
|
||||
attrstr = parts[1] if len(parts) == 2 else None
|
||||
if attrstr:
|
||||
results = [] if results is None else results
|
||||
for child in [c for c in elem if c.tag.lower() == attr.lower()]:
|
||||
results += self._getAttrValue(child, attrstr, results)
|
||||
return [r for r in results if r is not None]
|
||||
# check were looking for the tag
|
||||
if attr.lower() == 'etag':
|
||||
return [elem.tag]
|
||||
# loop through attrs so we can perform case-insensative match
|
||||
for _attr, value in elem.attrib.items():
|
||||
if attr.lower() == _attr.lower():
|
||||
return [value]
|
||||
return []
|
||||
|
||||
def _castAttrValue(self, op, query, value):
|
||||
if op == 'exists':
|
||||
return value
|
||||
if isinstance(query, bool):
|
||||
return bool(int(value))
|
||||
if isinstance(query, int) and '.' in value:
|
||||
return float(value)
|
||||
if isinstance(query, int):
|
||||
return int(value)
|
||||
if isinstance(query, float):
|
||||
return float(value)
|
||||
return value
|
||||
|
||||
def _loadData(self, data):
|
||||
raise NotImplementedError('Abstract method not implemented.')
|
||||
|
||||
|
||||
class PlexPartialObject(PlexObject):
|
||||
""" Not all objects in the Plex listings return the complete list of elements
|
||||
for the object. This object will allow you to assume each object is complete,
|
||||
and if the specified value you request is None it will fetch the full object
|
||||
automatically and update itself.
|
||||
"""
|
||||
|
||||
def __eq__(self, other):
|
||||
return other is not None and self.key == other.key
|
||||
|
||||
def __hash__(self):
|
||||
return hash(repr(self))
|
||||
|
||||
def __iter__(self):
|
||||
yield self
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
# Dragons inside.. :-/
|
||||
value = super(PlexPartialObject, self).__getattribute__(attr)
|
||||
# Check a few cases where we dont want to reload
|
||||
if attr == 'key' or attr.startswith('_'): return value
|
||||
if value not in (None, []): return value
|
||||
if self.isFullObject(): return value
|
||||
# Log the reload.
|
||||
clsname = self.__class__.__name__
|
||||
title = self.__dict__.get('title', self.__dict__.get('name'))
|
||||
objname = "%s '%s'" % (clsname, title) if title else clsname
|
||||
log.debug("Reloading %s for attr '%s'" % (objname, attr))
|
||||
# Reload and return the value
|
||||
self.reload()
|
||||
return super(PlexPartialObject, self).__getattribute__(attr)
|
||||
|
||||
def analyze(self):
|
||||
""" Tell Plex Media Server to performs analysis on it this item to gather
|
||||
information. Analysis includes:
|
||||
|
||||
* Gather Media Properties: All of the media you add to a Library has
|
||||
properties that are useful to know–whether it's a video file, a
|
||||
music track, or one of your photos (container, codec, resolution, etc).
|
||||
* Generate Default Artwork: Artwork will automatically be grabbed from a
|
||||
video file. A background image will be pulled out as well as a
|
||||
smaller image to be used for poster/thumbnail type purposes.
|
||||
* Generate Video Preview Thumbnails: Video preview thumbnails are created,
|
||||
if you have that feature enabled. Video preview thumbnails allow
|
||||
graphical seeking in some Apps. It's also used in the Plex Web App Now
|
||||
Playing screen to show a graphical representation of where playback
|
||||
is. Video preview thumbnails creation is a CPU-intensive process akin
|
||||
to transcoding the file.
|
||||
"""
|
||||
key = '/%s/analyze' % self.key.lstrip('/')
|
||||
self._server.query(key, method=self._server._session.put)
|
||||
|
||||
def isFullObject(self):
|
||||
""" Retruns True if this is already a full object. A full object means all attributes
|
||||
were populated from the api path representing only this item. For example, the
|
||||
search result for a movie often only contain a portion of the attributes a full
|
||||
object (main url) for that movie contain.
|
||||
"""
|
||||
return not self.key or self.key == self._initpath
|
||||
|
||||
def isPartialObject(self):
|
||||
""" Returns True if this is not a full object. """
|
||||
return not self.isFullObject()
|
||||
|
||||
def edit(self, **kwargs):
|
||||
""" Edit an object.
|
||||
|
||||
Parameters:
|
||||
kwargs (dict): Dict of settings to edit.
|
||||
|
||||
Example:
|
||||
{'type': 1,
|
||||
'id': movie.ratingKey,
|
||||
'collection[0].tag.tag': 'Super',
|
||||
'collection.locked': 0}
|
||||
"""
|
||||
if 'id' not in kwargs:
|
||||
kwargs['id'] = self.ratingKey
|
||||
if 'type' not in kwargs:
|
||||
kwargs['type'] = utils.searchType(self.type)
|
||||
|
||||
part = '/library/sections/%s/all?%s' % (self.librarySectionID,
|
||||
urlencode(kwargs))
|
||||
self._server.query(part, method=self._server._session.put)
|
||||
|
||||
def _edit_tags(self, tag, items, locked=True, remove=False):
|
||||
""" Helper to edit and refresh a tags.
|
||||
|
||||
Parameters:
|
||||
tag (str): tag name
|
||||
items (list): list of tags to add
|
||||
locked (bool): lock this field.
|
||||
remove (bool): If this is active remove the tags in items.
|
||||
"""
|
||||
if not isinstance(items, list):
|
||||
items = [items]
|
||||
value = getattr(self, tag + 's')
|
||||
existing_cols = [t.tag for t in value if t and remove is False]
|
||||
d = tag_helper(tag, existing_cols + items, locked, remove)
|
||||
self.edit(**d)
|
||||
self.refresh()
|
||||
|
||||
def addCollection(self, collections):
|
||||
""" Add a collection(s).
|
||||
|
||||
Parameters:
|
||||
collections (list): list of strings
|
||||
"""
|
||||
self._edit_tags('collection', collections)
|
||||
|
||||
def removeCollection(self, collections):
|
||||
""" Remove a collection(s). """
|
||||
self._edit_tags('collection', collections, remove=True)
|
||||
|
||||
def addLabel(self, labels):
|
||||
""" Add a label(s). """
|
||||
self._edit_tags('label', labels)
|
||||
|
||||
def removeLabel(self, labels):
|
||||
""" Remove a label(s). """
|
||||
self._edit_tags('label', labels, remove=True)
|
||||
|
||||
def addGenre(self, genres):
|
||||
""" Add a genre(s). """
|
||||
self._edit_tags('genre', genres)
|
||||
|
||||
def removeGenre(self, genres):
|
||||
""" Remove a genre(s). """
|
||||
self._edit_tags('genre', genres, remove=True)
|
||||
|
||||
def refresh(self):
|
||||
""" Refreshing a Library or individual item causes the metadata for the item to be
|
||||
refreshed, even if it already has metadata. You can think of refreshing as
|
||||
"update metadata for the requested item even if it already has some". You should
|
||||
refresh a Library or individual item if:
|
||||
|
||||
* You've changed the Library Metadata Agent.
|
||||
* You've added "Local Media Assets" (such as artwork, theme music, external
|
||||
subtitle files, etc.)
|
||||
* You want to freshen the item posters, summary, etc.
|
||||
* There's a problem with the poster image that's been downloaded.
|
||||
* Items are missing posters or other downloaded information. This is possible if
|
||||
the refresh process is interrupted (the Server is turned off, internet
|
||||
connection dies, etc).
|
||||
"""
|
||||
key = '%s/refresh' % self.key
|
||||
self._server.query(key, method=self._server._session.put)
|
||||
|
||||
def section(self):
|
||||
""" Returns the :class:`~plexapi.library.LibrarySection` this item belongs to. """
|
||||
return self._server.library.sectionByID(self.librarySectionID)
|
||||
|
||||
def delete(self):
|
||||
""" Delete a media element. This has to be enabled under settings > server > library in plex webui. """
|
||||
try:
|
||||
return self._server.query(self.key, method=self._server._session.delete)
|
||||
except BadRequest: # pragma: no cover
|
||||
log.error('Failed to delete %s. This could be because you '
|
||||
'havnt allowed items to be deleted' % self.key)
|
||||
raise
|
||||
|
||||
# The photo tag cant be built atm. TODO
|
||||
# def arts(self):
|
||||
# part = '%s/arts' % self.key
|
||||
# return self.fetchItem(part)
|
||||
|
||||
# def poster(self):
|
||||
# part = '%s/posters' % self.key
|
||||
# return self.fetchItem(part, etag='Photo')
|
||||
|
||||
|
||||
class Playable(object):
|
||||
""" This is a general place to store functions specific to media that is Playable.
|
||||
Things were getting mixed up a bit when dealing with Shows, Season, Artists,
|
||||
Albums which are all not playable.
|
||||
|
||||
Attributes:
|
||||
sessionKey (int): Active session key.
|
||||
usernames (str): Username of the person playing this item (for active sessions).
|
||||
players (:class:`~plexapi.client.PlexClient`): Client objects playing this item (for active sessions).
|
||||
session (:class:`~plexapi.media.Session`): Session object, for a playing media file.
|
||||
transcodeSessions (:class:`~plexapi.media.TranscodeSession`): Transcode Session object
|
||||
if item is being transcoded (None otherwise).
|
||||
viewedAt (datetime): Datetime item was last viewed (history).
|
||||
playlistItemID (int): Playlist item ID (only populated for :class:`~plexapi.playlist.Playlist` items).
|
||||
"""
|
||||
|
||||
def _loadData(self, data):
|
||||
self.sessionKey = utils.cast(int, data.attrib.get('sessionKey')) # session
|
||||
self.usernames = self.listAttrs(data, 'title', etag='User') # session
|
||||
self.players = self.findItems(data, etag='Player') # session
|
||||
self.transcodeSessions = self.findItems(data, etag='TranscodeSession') # session
|
||||
self.session = self.findItems(data, etag='Session') # session
|
||||
self.viewedAt = utils.toDatetime(data.attrib.get('viewedAt')) # history
|
||||
self.playlistItemID = utils.cast(int, data.attrib.get('playlistItemID')) # playlist
|
||||
|
||||
def isFullObject(self):
|
||||
""" Retruns True if this is already a full object. A full object means all attributes
|
||||
were populated from the api path representing only this item. For example, the
|
||||
search result for a movie often only contain a portion of the attributes a full
|
||||
object (main url) for that movie contain.
|
||||
"""
|
||||
return self._details_key == self._initpath or not self.key
|
||||
|
||||
def getStreamURL(self, **params):
|
||||
""" Returns a stream url that may be used by external applications such as VLC.
|
||||
|
||||
Parameters:
|
||||
**params (dict): optional parameters to manipulate the playback when accessing
|
||||
the stream. A few known parameters include: maxVideoBitrate, videoResolution
|
||||
offset, copyts, protocol, mediaIndex, platform.
|
||||
|
||||
Raises:
|
||||
Unsupported: When the item doesn't support fetching a stream URL.
|
||||
"""
|
||||
if self.TYPE not in ('movie', 'episode', 'track'):
|
||||
raise Unsupported('Fetching stream URL for %s is unsupported.' % self.TYPE)
|
||||
mvb = params.get('maxVideoBitrate')
|
||||
vr = params.get('videoResolution', '')
|
||||
params = {
|
||||
'path': self.key,
|
||||
'offset': params.get('offset', 0),
|
||||
'copyts': params.get('copyts', 1),
|
||||
'protocol': params.get('protocol'),
|
||||
'mediaIndex': params.get('mediaIndex', 0),
|
||||
'X-Plex-Platform': params.get('platform', 'Chrome'),
|
||||
'maxVideoBitrate': max(mvb, 64) if mvb else None,
|
||||
'videoResolution': vr if re.match('^\d+x\d+$', vr) else None
|
||||
}
|
||||
# remove None values
|
||||
params = {k: v for k, v in params.items() if v is not None}
|
||||
streamtype = 'audio' if self.TYPE in ('track', 'album') else 'video'
|
||||
# sort the keys since the randomness fucks with my tests..
|
||||
sorted_params = sorted(params.items(), key=lambda val: val[0])
|
||||
return self._server.url('/%s/:/transcode/universal/start.m3u8?%s' %
|
||||
(streamtype, urlencode(sorted_params)), includeToken=True)
|
||||
|
||||
def iterParts(self):
|
||||
""" Iterates over the parts of this media item. """
|
||||
for item in self.media:
|
||||
for part in item.parts:
|
||||
yield part
|
||||
|
||||
def split(self):
|
||||
"""Split a duplicate."""
|
||||
key = '%s/split' % self.key
|
||||
return self._server.query(key, method=self._server._session.put)
|
||||
|
||||
def unmatch(self):
|
||||
"""Unmatch a media file."""
|
||||
key = '%s/unmatch' % self.key
|
||||
return self._server.query(key, method=self._server._session.put)
|
||||
|
||||
def play(self, client):
|
||||
""" Start playback on the specified client.
|
||||
|
||||
Parameters:
|
||||
client (:class:`~plexapi.client.PlexClient`): Client to start playing on.
|
||||
"""
|
||||
client.playMedia(self)
|
||||
|
||||
def download(self, savepath=None, keep_orginal_name=False, **kwargs):
|
||||
""" Downloads this items media to the specified location. Returns a list of
|
||||
filepaths that have been saved to disk.
|
||||
|
||||
Parameters:
|
||||
savepath (str): Title of the track to return.
|
||||
keep_orginal_name (bool): Set True to keep the original filename as stored in
|
||||
the Plex server. False will create a new filename with the format
|
||||
"<Artist> - <Album> <Track>".
|
||||
kwargs (dict): If specified, a :func:`~plexapi.audio.Track.getStreamURL()` will
|
||||
be returned and the additional arguments passed in will be sent to that
|
||||
function. If kwargs is not specified, the media items will be downloaded
|
||||
and saved to disk.
|
||||
"""
|
||||
filepaths = []
|
||||
locations = [i for i in self.iterParts() if i]
|
||||
for location in locations:
|
||||
filename = location.file
|
||||
if keep_orginal_name is False:
|
||||
filename = '%s.%s' % (self._prettyfilename(), location.container)
|
||||
# So this seems to be a alot slower but allows transcode.
|
||||
if kwargs:
|
||||
download_url = self.getStreamURL(**kwargs)
|
||||
else:
|
||||
download_url = self._server.url('%s?download=1' % location.key)
|
||||
filepath = utils.download(download_url, self._server._token, filename=filename,
|
||||
savepath=savepath, session=self._server._session)
|
||||
if filepath:
|
||||
filepaths.append(filepath)
|
||||
return filepaths
|
||||
|
||||
def stop(self, reason=''):
|
||||
""" Stop playback for a media item. """
|
||||
key = '/status/sessions/terminate?sessionId=%s&reason=%s' % (self.session[0].id, quote_plus(reason))
|
||||
return self._server.query(key)
|
||||
|
||||
def updateProgress(self, time, state='stopped'):
|
||||
""" Set the watched progress for this video.
|
||||
|
||||
Note that setting the time to 0 will not work.
|
||||
Use `markWatched` or `markUnwatched` to achieve
|
||||
that goal.
|
||||
|
||||
Parameters:
|
||||
time (int): milliseconds watched
|
||||
state (string): state of the video, default 'stopped'
|
||||
"""
|
||||
key = '/:/progress?key=%s&identifier=com.plexapp.plugins.library&time=%d&state=%s' % (self.ratingKey,
|
||||
time, state)
|
||||
self._server.query(key)
|
||||
self.reload()
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Release(PlexObject):
|
||||
TAG = 'Release'
|
||||
key = '/updater/status'
|
||||
|
||||
def _loadData(self, data):
|
||||
self.download_key = data.attrib.get('key')
|
||||
self.version = data.attrib.get('version')
|
||||
self.added = data.attrib.get('added')
|
||||
self.fixed = data.attrib.get('fixed')
|
||||
self.downloadURL = data.attrib.get('downloadURL')
|
||||
self.state = data.attrib.get('state')
|
||||
527
lib/plexapi/client.py
Normal file
527
lib/plexapi/client.py
Normal file
@@ -0,0 +1,527 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import requests
|
||||
|
||||
from requests.status_codes import _codes as codes
|
||||
from plexapi import BASE_HEADERS, CONFIG, TIMEOUT
|
||||
from plexapi import log, logfilter, utils
|
||||
from plexapi.base import PlexObject
|
||||
from plexapi.compat import ElementTree
|
||||
from plexapi.exceptions import BadRequest, Unsupported
|
||||
from plexapi.playqueue import PlayQueue
|
||||
|
||||
|
||||
DEFAULT_MTYPE = 'video'
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class PlexClient(PlexObject):
|
||||
""" Main class for interacting with a Plex client. This class can connect
|
||||
directly to the client and control it or proxy commands through your
|
||||
Plex Server. To better understand the Plex client API's read this page:
|
||||
https://github.com/plexinc/plex-media-player/wiki/Remote-control-API
|
||||
|
||||
Parameters:
|
||||
server (:class:`~plexapi.server.PlexServer`): PlexServer this client is connected to (optional).
|
||||
data (ElementTree): Response from PlexServer used to build this object (optional).
|
||||
initpath (str): Path used to generate data.
|
||||
baseurl (str): HTTP URL to connect dirrectly to this client.
|
||||
token (str): X-Plex-Token used for authenication (optional).
|
||||
session (:class:`~requests.Session`): requests.Session object if you want more control (optional).
|
||||
timeout (int): timeout in seconds on initial connect to client (default config.TIMEOUT).
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Player'
|
||||
key (str): '/resources'
|
||||
device (str): Best guess on the type of device this is (PS, iPhone, Linux, etc).
|
||||
deviceClass (str): Device class (pc, phone, etc).
|
||||
machineIdentifier (str): Unique ID for this device.
|
||||
model (str): Unknown
|
||||
platform (str): Unknown
|
||||
platformVersion (str): Description
|
||||
product (str): Client Product (Plex for iOS, etc).
|
||||
protocol (str): Always seems ot be 'plex'.
|
||||
protocolCapabilities (list<str>): List of client capabilities (navigation, playback,
|
||||
timeline, mirror, playqueues).
|
||||
protocolVersion (str): Protocol version (1, future proofing?)
|
||||
server (:class:`~plexapi.server.PlexServer`): Server this client is connected to.
|
||||
session (:class:`~requests.Session`): Session object used for connection.
|
||||
state (str): Unknown
|
||||
title (str): Name of this client (Johns iPhone, etc).
|
||||
token (str): X-Plex-Token used for authenication
|
||||
vendor (str): Unknown
|
||||
version (str): Device version (4.6.1, etc).
|
||||
_baseurl (str): HTTP address of the client.
|
||||
_token (str): Token used to access this client.
|
||||
_session (obj): Requests session object used to access this client.
|
||||
_proxyThroughServer (bool): Set to True after calling
|
||||
:func:`~plexapi.client.PlexClient.proxyThroughServer()` (default False).
|
||||
"""
|
||||
TAG = 'Player'
|
||||
key = '/resources'
|
||||
|
||||
def __init__(self, server=None, data=None, initpath=None, baseurl=None,
|
||||
token=None, connect=True, session=None, timeout=None):
|
||||
super(PlexClient, self).__init__(server, data, initpath)
|
||||
self._baseurl = baseurl.strip('/') if baseurl else None
|
||||
self._token = logfilter.add_secret(token)
|
||||
self._showSecrets = CONFIG.get('log.show_secrets', '').lower() == 'true'
|
||||
server_session = server._session if server else None
|
||||
self._session = session or server_session or requests.Session()
|
||||
self._proxyThroughServer = False
|
||||
self._commandId = 0
|
||||
if not any([data, initpath, baseurl, token]):
|
||||
self._baseurl = CONFIG.get('auth.client_baseurl', 'http://localhost:32433')
|
||||
self._token = logfilter.add_secret(CONFIG.get('auth.client_token'))
|
||||
if connect and self._baseurl:
|
||||
self.connect(timeout=timeout)
|
||||
|
||||
def _nextCommandId(self):
|
||||
self._commandId += 1
|
||||
return self._commandId
|
||||
|
||||
def connect(self, timeout=None):
|
||||
""" Alias of reload as any subsequent requests to this client will be
|
||||
made directly to the device even if the object attributes were initially
|
||||
populated from a PlexServer.
|
||||
"""
|
||||
if not self.key:
|
||||
raise Unsupported('Cannot reload an object not built from a URL.')
|
||||
self._initpath = self.key
|
||||
data = self.query(self.key, timeout=timeout)
|
||||
self._loadData(data[0])
|
||||
return self
|
||||
|
||||
def reload(self):
|
||||
""" Alias to self.connect(). """
|
||||
return self.connect()
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.deviceClass = data.attrib.get('deviceClass')
|
||||
self.machineIdentifier = data.attrib.get('machineIdentifier')
|
||||
self.product = data.attrib.get('product')
|
||||
self.protocol = data.attrib.get('protocol')
|
||||
self.protocolCapabilities = data.attrib.get('protocolCapabilities', '').split(',')
|
||||
self.protocolVersion = data.attrib.get('protocolVersion')
|
||||
self.platform = data.attrib.get('platform')
|
||||
self.platformVersion = data.attrib.get('platformVersion')
|
||||
self.title = data.attrib.get('title') or data.attrib.get('name')
|
||||
# Active session details
|
||||
# Since protocolCapabilities is missing from /sessions we cant really control this player without
|
||||
# creating a client manually.
|
||||
# Add this in next breaking release.
|
||||
# if self._initpath == 'status/sessions':
|
||||
self.device = data.attrib.get('device') # session
|
||||
self.model = data.attrib.get('model') # session
|
||||
self.state = data.attrib.get('state') # session
|
||||
self.vendor = data.attrib.get('vendor') # session
|
||||
self.version = data.attrib.get('version') # session
|
||||
self.local = utils.cast(bool, data.attrib.get('local', 0))
|
||||
self.address = data.attrib.get('address') # session
|
||||
self.remotePublicAddress = data.attrib.get('remotePublicAddress')
|
||||
self.userID = data.attrib.get('userID')
|
||||
|
||||
def _headers(self, **kwargs):
|
||||
""" Returns a dict of all default headers for Client requests. """
|
||||
headers = BASE_HEADERS
|
||||
if self._token:
|
||||
headers['X-Plex-Token'] = self._token
|
||||
headers.update(kwargs)
|
||||
return headers
|
||||
|
||||
def proxyThroughServer(self, value=True, server=None):
|
||||
""" Tells this PlexClient instance to proxy all future commands through the PlexServer.
|
||||
Useful if you do not wish to connect directly to the Client device itself.
|
||||
|
||||
Parameters:
|
||||
value (bool): Enable or disable proxying (optional, default True).
|
||||
|
||||
Raises:
|
||||
:class:`~plexapi.exceptions.Unsupported`: Cannot use client proxy with unknown server.
|
||||
"""
|
||||
if server:
|
||||
self._server = server
|
||||
if value is True and not self._server:
|
||||
raise Unsupported('Cannot use client proxy with unknown server.')
|
||||
self._proxyThroughServer = value
|
||||
|
||||
def query(self, path, method=None, headers=None, timeout=None, **kwargs):
|
||||
""" Main method used to handle HTTPS requests to the Plex client. This method helps
|
||||
by encoding the response to utf-8 and parsing the returned XML into and
|
||||
ElementTree object. Returns None if no data exists in the response.
|
||||
"""
|
||||
url = self.url(path)
|
||||
method = method or self._session.get
|
||||
timeout = timeout or TIMEOUT
|
||||
log.debug('%s %s', method.__name__.upper(), url)
|
||||
headers = self._headers(**headers or {})
|
||||
response = method(url, headers=headers, timeout=timeout, **kwargs)
|
||||
if response.status_code not in (200, 201):
|
||||
codename = codes.get(response.status_code)[0]
|
||||
errtext = response.text.replace('\n', ' ')
|
||||
log.warning('BadRequest (%s) %s %s; %s' % (response.status_code, codename, response.url, errtext))
|
||||
raise BadRequest('(%s) %s; %s %s' % (response.status_code, codename, response.url, errtext))
|
||||
data = response.text.encode('utf8')
|
||||
return ElementTree.fromstring(data) if data.strip() else None
|
||||
|
||||
def sendCommand(self, command, proxy=None, **params):
|
||||
""" Convenience wrapper around :func:`~plexapi.client.PlexClient.query()` to more easily
|
||||
send simple commands to the client. Returns an ElementTree object containing
|
||||
the response.
|
||||
|
||||
Parameters:
|
||||
command (str): Command to be sent in for format '<controller>/<command>'.
|
||||
proxy (bool): Set True to proxy this command through the PlexServer.
|
||||
**params (dict): Additional GET parameters to include with the command.
|
||||
|
||||
Raises:
|
||||
:class:`~plexapi.exceptions.Unsupported`: When we detect the client
|
||||
doesn't support this capability.
|
||||
"""
|
||||
command = command.strip('/')
|
||||
controller = command.split('/')[0]
|
||||
if controller not in self.protocolCapabilities:
|
||||
log.debug('Client %s doesnt support %s controller.'
|
||||
'What your trying might not work' % (self.title, controller))
|
||||
|
||||
params['commandID'] = self._nextCommandId()
|
||||
key = '/player/%s%s' % (command, utils.joinArgs(params))
|
||||
headers = {'X-Plex-Target-Client-Identifier': self.machineIdentifier}
|
||||
proxy = self._proxyThroughServer if proxy is None else proxy
|
||||
if proxy:
|
||||
return self._server.query(key, headers=headers)
|
||||
return self.query(key, headers=headers)
|
||||
|
||||
def url(self, key, includeToken=False):
|
||||
""" Build a URL string with proper token argument. Token will be appended to the URL
|
||||
if either includeToken is True or CONFIG.log.show_secrets is 'true'.
|
||||
"""
|
||||
if not self._baseurl:
|
||||
raise BadRequest('PlexClient object missing baseurl.')
|
||||
if self._token and (includeToken or self._showSecrets):
|
||||
delim = '&' if '?' in key else '?'
|
||||
return '%s%s%sX-Plex-Token=%s' % (self._baseurl, key, delim, self._token)
|
||||
return '%s%s' % (self._baseurl, key)
|
||||
|
||||
# ---------------------
|
||||
# Navigation Commands
|
||||
# These commands navigate around the user-interface.
|
||||
def contextMenu(self):
|
||||
""" Open the context menu on the client. """
|
||||
self.sendCommand('navigation/contextMenu')
|
||||
|
||||
def goBack(self):
|
||||
""" Navigate back one position. """
|
||||
self.sendCommand('navigation/back')
|
||||
|
||||
def goToHome(self):
|
||||
""" Go directly to the home screen. """
|
||||
self.sendCommand('navigation/home')
|
||||
|
||||
def goToMusic(self):
|
||||
""" Go directly to the playing music panel. """
|
||||
self.sendCommand('navigation/music')
|
||||
|
||||
def moveDown(self):
|
||||
""" Move selection down a position. """
|
||||
self.sendCommand('navigation/moveDown')
|
||||
|
||||
def moveLeft(self):
|
||||
""" Move selection left a position. """
|
||||
self.sendCommand('navigation/moveLeft')
|
||||
|
||||
def moveRight(self):
|
||||
""" Move selection right a position. """
|
||||
self.sendCommand('navigation/moveRight')
|
||||
|
||||
def moveUp(self):
|
||||
""" Move selection up a position. """
|
||||
self.sendCommand('navigation/moveUp')
|
||||
|
||||
def nextLetter(self):
|
||||
""" Jump to next letter in the alphabet. """
|
||||
self.sendCommand('navigation/nextLetter')
|
||||
|
||||
def pageDown(self):
|
||||
""" Move selection down a full page. """
|
||||
self.sendCommand('navigation/pageDown')
|
||||
|
||||
def pageUp(self):
|
||||
""" Move selection up a full page. """
|
||||
self.sendCommand('navigation/pageUp')
|
||||
|
||||
def previousLetter(self):
|
||||
""" Jump to previous letter in the alphabet. """
|
||||
self.sendCommand('navigation/previousLetter')
|
||||
|
||||
def select(self):
|
||||
""" Select element at the current position. """
|
||||
self.sendCommand('navigation/select')
|
||||
|
||||
def toggleOSD(self):
|
||||
""" Toggle the on screen display during playback. """
|
||||
self.sendCommand('navigation/toggleOSD')
|
||||
|
||||
def goToMedia(self, media, **params):
|
||||
""" Navigate directly to the specified media page.
|
||||
|
||||
Parameters:
|
||||
media (:class:`~plexapi.media.Media`): Media object to navigate to.
|
||||
**params (dict): Additional GET parameters to include with the command.
|
||||
|
||||
Raises:
|
||||
:class:`~plexapi.exceptions.Unsupported`: When no PlexServer specified in this object.
|
||||
"""
|
||||
if not self._server:
|
||||
raise Unsupported('A server must be specified before using this command.')
|
||||
server_url = media._server._baseurl.split(':')
|
||||
self.sendCommand('mirror/details', **dict({
|
||||
'machineIdentifier': self._server.machineIdentifier,
|
||||
'address': server_url[1].strip('/'),
|
||||
'port': server_url[-1],
|
||||
'key': media.key,
|
||||
}, **params))
|
||||
|
||||
# -------------------
|
||||
# Playback Commands
|
||||
# Most of the playback commands take a mandatory mtype {'music','photo','video'} argument,
|
||||
# to specify which media type to apply the command to, (except for playMedia). This
|
||||
# is in case there are multiple things happening (e.g. music in the background, photo
|
||||
# slideshow in the foreground).
|
||||
def pause(self, mtype=DEFAULT_MTYPE):
|
||||
""" Pause the currently playing media type.
|
||||
|
||||
Parameters:
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.sendCommand('playback/pause', type=mtype)
|
||||
|
||||
def play(self, mtype=DEFAULT_MTYPE):
|
||||
""" Start playback for the specified media type.
|
||||
|
||||
Parameters:
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.sendCommand('playback/play', type=mtype)
|
||||
|
||||
def refreshPlayQueue(self, playQueueID, mtype=DEFAULT_MTYPE):
|
||||
""" Refresh the specified Playqueue.
|
||||
|
||||
Parameters:
|
||||
playQueueID (str): Playqueue ID.
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.sendCommand(
|
||||
'playback/refreshPlayQueue', playQueueID=playQueueID, type=mtype)
|
||||
|
||||
def seekTo(self, offset, mtype=DEFAULT_MTYPE):
|
||||
""" Seek to the specified offset (ms) during playback.
|
||||
|
||||
Parameters:
|
||||
offset (int): Position to seek to (milliseconds).
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.sendCommand('playback/seekTo', offset=offset, type=mtype)
|
||||
|
||||
def skipNext(self, mtype=DEFAULT_MTYPE):
|
||||
""" Skip to the next playback item.
|
||||
|
||||
Parameters:
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.sendCommand('playback/skipNext', type=mtype)
|
||||
|
||||
def skipPrevious(self, mtype=DEFAULT_MTYPE):
|
||||
""" Skip to previous playback item.
|
||||
|
||||
Parameters:
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.sendCommand('playback/skipPrevious', type=mtype)
|
||||
|
||||
def skipTo(self, key, mtype=DEFAULT_MTYPE):
|
||||
""" Skip to the playback item with the specified key.
|
||||
|
||||
Parameters:
|
||||
key (str): Key of the media item to skip to.
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.sendCommand('playback/skipTo', key=key, type=mtype)
|
||||
|
||||
def stepBack(self, mtype=DEFAULT_MTYPE):
|
||||
""" Step backward a chunk of time in the current playback item.
|
||||
|
||||
Parameters:
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.sendCommand('playback/stepBack', type=mtype)
|
||||
|
||||
def stepForward(self, mtype=DEFAULT_MTYPE):
|
||||
""" Step forward a chunk of time in the current playback item.
|
||||
|
||||
Parameters:
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.sendCommand('playback/stepForward', type=mtype)
|
||||
|
||||
def stop(self, mtype=DEFAULT_MTYPE):
|
||||
""" Stop the currently playing item.
|
||||
|
||||
Parameters:
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.sendCommand('playback/stop', type=mtype)
|
||||
|
||||
def setRepeat(self, repeat, mtype=DEFAULT_MTYPE):
|
||||
""" Enable repeat for the specified playback items.
|
||||
|
||||
Parameters:
|
||||
repeat (int): Repeat mode (0=off, 1=repeatone, 2=repeatall).
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.setParameters(repeat=repeat, mtype=mtype)
|
||||
|
||||
def setShuffle(self, shuffle, mtype=DEFAULT_MTYPE):
|
||||
""" Enable shuffle for the specified playback items.
|
||||
|
||||
Parameters:
|
||||
shuffle (int): Shuffle mode (0=off, 1=on)
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.setParameters(shuffle=shuffle, mtype=mtype)
|
||||
|
||||
def setVolume(self, volume, mtype=DEFAULT_MTYPE):
|
||||
""" Enable volume for the current playback item.
|
||||
|
||||
Parameters:
|
||||
volume (int): Volume level (0-100).
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.setParameters(volume=volume, mtype=mtype)
|
||||
|
||||
def setAudioStream(self, audioStreamID, mtype=DEFAULT_MTYPE):
|
||||
""" Select the audio stream for the current playback item (only video).
|
||||
|
||||
Parameters:
|
||||
audioStreamID (str): ID of the audio stream from the media object.
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.setStreams(audioStreamID=audioStreamID, mtype=mtype)
|
||||
|
||||
def setSubtitleStream(self, subtitleStreamID, mtype=DEFAULT_MTYPE):
|
||||
""" Select the subtitle stream for the current playback item (only video).
|
||||
|
||||
Parameters:
|
||||
subtitleStreamID (str): ID of the subtitle stream from the media object.
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.setStreams(subtitleStreamID=subtitleStreamID, mtype=mtype)
|
||||
|
||||
def setVideoStream(self, videoStreamID, mtype=DEFAULT_MTYPE):
|
||||
""" Select the video stream for the current playback item (only video).
|
||||
|
||||
Parameters:
|
||||
videoStreamID (str): ID of the video stream from the media object.
|
||||
mtype (str): Media type to take action against (music, photo, video).
|
||||
"""
|
||||
self.setStreams(videoStreamID=videoStreamID, mtype=mtype)
|
||||
|
||||
def playMedia(self, media, offset=0, **params):
|
||||
""" Start playback of the specified media item. See also:
|
||||
|
||||
Parameters:
|
||||
media (:class:`~plexapi.media.Media`): Media item to be played back
|
||||
(movie, music, photo, playlist, playqueue).
|
||||
offset (int): Number of milliseconds at which to start playing with zero
|
||||
representing the beginning (default 0).
|
||||
**params (dict): Optional additional parameters to include in the playback request. See
|
||||
also: https://github.com/plexinc/plex-media-player/wiki/Remote-control-API#modified-commands
|
||||
|
||||
Raises:
|
||||
:class:`~plexapi.exceptions.Unsupported`: When no PlexServer specified in this object.
|
||||
"""
|
||||
if not self._server:
|
||||
raise Unsupported('A server must be specified before using this command.')
|
||||
server_url = media._server._baseurl.split(':')
|
||||
|
||||
if self.product != 'OpenPHT':
|
||||
try:
|
||||
self.sendCommand('timeline/subscribe', port=server_url[1].strip('/'), protocol='http')
|
||||
except: # noqa: E722
|
||||
# some clients dont need or like this and raises http 400.
|
||||
# We want to include the exception in the log,
|
||||
# but it might still work so we swallow it.
|
||||
log.exception('%s failed to subscribe ' % self.title)
|
||||
|
||||
playqueue = media if isinstance(media, PlayQueue) else self._server.createPlayQueue(media)
|
||||
self.sendCommand('playback/playMedia', **dict({
|
||||
'machineIdentifier': self._server.machineIdentifier,
|
||||
'address': server_url[1].strip('/'),
|
||||
'port': server_url[-1],
|
||||
'offset': offset,
|
||||
'key': media.key,
|
||||
'token': media._server._token,
|
||||
'containerKey': '/playQueues/%s?window=100&own=1' % playqueue.playQueueID,
|
||||
}, **params))
|
||||
|
||||
def setParameters(self, volume=None, shuffle=None, repeat=None, mtype=DEFAULT_MTYPE):
|
||||
""" Set multiple playback parameters at once.
|
||||
|
||||
Parameters:
|
||||
volume (int): Volume level (0-100; optional).
|
||||
shuffle (int): Shuffle mode (0=off, 1=on; optional).
|
||||
repeat (int): Repeat mode (0=off, 1=repeatone, 2=repeatall; optional).
|
||||
mtype (str): Media type to take action against (optional music, photo, video).
|
||||
"""
|
||||
params = {}
|
||||
if repeat is not None:
|
||||
params['repeat'] = repeat
|
||||
if shuffle is not None:
|
||||
params['shuffle'] = shuffle
|
||||
if volume is not None:
|
||||
params['volume'] = volume
|
||||
if mtype is not None:
|
||||
params['type'] = mtype
|
||||
self.sendCommand('playback/setParameters', **params)
|
||||
|
||||
def setStreams(self, audioStreamID=None, subtitleStreamID=None, videoStreamID=None, mtype=DEFAULT_MTYPE):
|
||||
""" Select multiple playback streams at once.
|
||||
|
||||
Parameters:
|
||||
audioStreamID (str): ID of the audio stream from the media object.
|
||||
subtitleStreamID (str): ID of the subtitle stream from the media object.
|
||||
videoStreamID (str): ID of the video stream from the media object.
|
||||
mtype (str): Media type to take action against (optional music, photo, video).
|
||||
"""
|
||||
params = {}
|
||||
if audioStreamID is not None:
|
||||
params['audioStreamID'] = audioStreamID
|
||||
if subtitleStreamID is not None:
|
||||
params['subtitleStreamID'] = subtitleStreamID
|
||||
if videoStreamID is not None:
|
||||
params['videoStreamID'] = videoStreamID
|
||||
if mtype is not None:
|
||||
params['type'] = mtype
|
||||
self.sendCommand('playback/setStreams', **params)
|
||||
|
||||
# -------------------
|
||||
# Timeline Commands
|
||||
def timeline(self):
|
||||
""" Poll the current timeline and return the XML response. """
|
||||
return self.sendCommand('timeline/poll', wait=1)
|
||||
|
||||
def isPlayingMedia(self, includePaused=False):
|
||||
""" Returns True if any media is currently playing.
|
||||
|
||||
Parameters:
|
||||
includePaused (bool): Set True to treat currently paused items
|
||||
as playing (optional; default True).
|
||||
"""
|
||||
for mediatype in self.timeline():
|
||||
if mediatype.get('state') == 'playing':
|
||||
return True
|
||||
if includePaused and mediatype.get('state') == 'paused':
|
||||
return True
|
||||
return False
|
||||
53
lib/plexapi/compat.py
Normal file
53
lib/plexapi/compat.py
Normal file
@@ -0,0 +1,53 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Python 2/3 compatability
|
||||
# Always try Py3 first
|
||||
import os
|
||||
from sys import version_info
|
||||
|
||||
ustr = str
|
||||
if version_info < (3,):
|
||||
ustr = unicode
|
||||
|
||||
try:
|
||||
string_type = basestring
|
||||
except NameError:
|
||||
string_type = str
|
||||
|
||||
try:
|
||||
from urllib.parse import urlencode
|
||||
except ImportError:
|
||||
from urllib import urlencode
|
||||
|
||||
try:
|
||||
from urllib.parse import quote
|
||||
except ImportError:
|
||||
from urllib import quote
|
||||
|
||||
try:
|
||||
from urllib.parse import quote_plus
|
||||
except ImportError:
|
||||
from urllib import quote_plus
|
||||
|
||||
try:
|
||||
from urllib.parse import unquote
|
||||
except ImportError:
|
||||
from urllib import unquote
|
||||
|
||||
try:
|
||||
from configparser import ConfigParser
|
||||
except ImportError:
|
||||
from ConfigParser import ConfigParser
|
||||
|
||||
try:
|
||||
from xml.etree import cElementTree as ElementTree
|
||||
except ImportError:
|
||||
from xml.etree import ElementTree
|
||||
|
||||
|
||||
def makedirs(name, mode=0o777, exist_ok=False):
|
||||
""" Mimicks os.makedirs() from Python 3. """
|
||||
try:
|
||||
os.makedirs(name, mode)
|
||||
except OSError:
|
||||
if not os.path.isdir(name) or not exist_ok:
|
||||
raise
|
||||
63
lib/plexapi/config.py
Normal file
63
lib/plexapi/config.py
Normal file
@@ -0,0 +1,63 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from plexapi.compat import ConfigParser
|
||||
|
||||
|
||||
class PlexConfig(ConfigParser):
|
||||
""" PlexAPI configuration object. Settings are stored in an INI file within the
|
||||
user's home directory and can be overridden after importing plexapi by simply
|
||||
setting the value. See the documentation section 'Configuration' for more
|
||||
details on available options.
|
||||
|
||||
Parameters:
|
||||
path (str): Path of the configuration file to load.
|
||||
"""
|
||||
def __init__(self, path):
|
||||
ConfigParser.__init__(self)
|
||||
self.read(path)
|
||||
self.data = self._asDict()
|
||||
|
||||
def get(self, key, default=None, cast=None):
|
||||
""" Returns the specified configuration value or <default> if not found.
|
||||
|
||||
Parameters:
|
||||
key (str): Configuration variable to load in the format '<section>.<variable>'.
|
||||
default: Default value to use if key not found.
|
||||
cast (func): Cast the value to the specified type before returning.
|
||||
"""
|
||||
try:
|
||||
# First: check environment variable is set
|
||||
envkey = 'PLEXAPI_%s' % key.upper().replace('.', '_')
|
||||
value = os.environ.get(envkey)
|
||||
if value is None:
|
||||
# Second: check the config file has attr
|
||||
section, name = key.lower().split('.')
|
||||
value = self.data.get(section, {}).get(name, default)
|
||||
return cast(value) if cast else value
|
||||
except: # noqa: E722
|
||||
return default
|
||||
|
||||
def _asDict(self):
|
||||
""" Returns all configuration values as a dictionary. """
|
||||
config = defaultdict(dict)
|
||||
for section in self._sections:
|
||||
for name, value in self._sections[section].items():
|
||||
if name != '__name__':
|
||||
config[section.lower()][name.lower()] = value
|
||||
return dict(config)
|
||||
|
||||
|
||||
def reset_base_headers():
|
||||
""" Convenience function returns a dict of all base X-Plex-* headers for session requests. """
|
||||
import plexapi
|
||||
return {
|
||||
'X-Plex-Platform': plexapi.X_PLEX_PLATFORM,
|
||||
'X-Plex-Platform-Version': plexapi.X_PLEX_PLATFORM_VERSION,
|
||||
'X-Plex-Provides': plexapi.X_PLEX_PROVIDES,
|
||||
'X-Plex-Product': plexapi.X_PLEX_PRODUCT,
|
||||
'X-Plex-Version': plexapi.X_PLEX_VERSION,
|
||||
'X-Plex-Device': plexapi.X_PLEX_DEVICE,
|
||||
'X-Plex-Device-Name': plexapi.X_PLEX_DEVICE_NAME,
|
||||
'X-Plex-Client-Identifier': plexapi.X_PLEX_IDENTIFIER,
|
||||
}
|
||||
31
lib/plexapi/exceptions.py
Normal file
31
lib/plexapi/exceptions.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
class PlexApiException(Exception):
|
||||
""" Base class for all PlexAPI exceptions. """
|
||||
pass
|
||||
|
||||
|
||||
class BadRequest(PlexApiException):
|
||||
""" An invalid request, generally a user error. """
|
||||
pass
|
||||
|
||||
|
||||
class NotFound(PlexApiException):
|
||||
""" Request media item or device is not found. """
|
||||
pass
|
||||
|
||||
|
||||
class UnknownType(PlexApiException):
|
||||
""" Unknown library type. """
|
||||
pass
|
||||
|
||||
|
||||
class Unsupported(PlexApiException):
|
||||
""" Unsupported client request. """
|
||||
pass
|
||||
|
||||
|
||||
class Unauthorized(PlexApiException):
|
||||
""" Invalid username or password. """
|
||||
pass
|
||||
716
lib/plexapi/library.py
Normal file
716
lib/plexapi/library.py
Normal file
@@ -0,0 +1,716 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from plexapi import X_PLEX_CONTAINER_SIZE, log, utils
|
||||
from plexapi.base import PlexObject
|
||||
from plexapi.compat import unquote, urlencode, quote_plus
|
||||
from plexapi.media import MediaTag
|
||||
from plexapi.exceptions import BadRequest, NotFound
|
||||
|
||||
|
||||
class Library(PlexObject):
|
||||
""" Represents a PlexServer library. This contains all sections of media defined
|
||||
in your Plex server including video, shows and audio.
|
||||
|
||||
Attributes:
|
||||
key (str): '/library'
|
||||
identifier (str): Unknown ('com.plexapp.plugins.library').
|
||||
mediaTagVersion (str): Unknown (/system/bundle/media/flags/)
|
||||
server (:class:`~plexapi.server.PlexServer`): PlexServer this client is connected to.
|
||||
title1 (str): 'Plex Library' (not sure how useful this is).
|
||||
title2 (str): Second title (this is blank on my setup).
|
||||
"""
|
||||
key = '/library'
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self._sectionsByID = {} # cached Section UUIDs
|
||||
self.identifier = data.attrib.get('identifier')
|
||||
self.mediaTagVersion = data.attrib.get('mediaTagVersion')
|
||||
self.title1 = data.attrib.get('title1')
|
||||
self.title2 = data.attrib.get('title2')
|
||||
|
||||
def sections(self):
|
||||
""" Returns a list of all media sections in this library. Library sections may be any of
|
||||
:class:`~plexapi.library.MovieSection`, :class:`~plexapi.library.ShowSection`,
|
||||
:class:`~plexapi.library.MusicSection`, :class:`~plexapi.library.PhotoSection`.
|
||||
"""
|
||||
key = '/library/sections'
|
||||
sections = []
|
||||
for elem in self._server.query(key):
|
||||
for cls in (MovieSection, ShowSection, MusicSection, PhotoSection):
|
||||
if elem.attrib.get('type') == cls.TYPE:
|
||||
section = cls(self._server, elem, key)
|
||||
self._sectionsByID[section.key] = section
|
||||
sections.append(section)
|
||||
return sections
|
||||
|
||||
def section(self, title=None):
|
||||
""" Returns the :class:`~plexapi.library.LibrarySection` that matches the specified title.
|
||||
|
||||
Parameters:
|
||||
title (str): Title of the section to return.
|
||||
"""
|
||||
for section in self.sections():
|
||||
if section.title.lower() == title.lower():
|
||||
return section
|
||||
raise NotFound('Invalid library section: %s' % title)
|
||||
|
||||
def sectionByID(self, sectionID):
|
||||
""" Returns the :class:`~plexapi.library.LibrarySection` that matches the specified sectionID.
|
||||
|
||||
Parameters:
|
||||
sectionID (str): ID of the section to return.
|
||||
"""
|
||||
if not self._sectionsByID or sectionID not in self._sectionsByID:
|
||||
self.sections()
|
||||
return self._sectionsByID[sectionID]
|
||||
|
||||
def all(self, **kwargs):
|
||||
""" Returns a list of all media from all library sections.
|
||||
This may be a very large dataset to retrieve.
|
||||
"""
|
||||
items = []
|
||||
for section in self.sections():
|
||||
for item in section.all(**kwargs):
|
||||
items.append(item)
|
||||
return items
|
||||
|
||||
def onDeck(self):
|
||||
""" Returns a list of all media items on deck. """
|
||||
return self.fetchItems('/library/onDeck')
|
||||
|
||||
def recentlyAdded(self):
|
||||
""" Returns a list of all media items recently added. """
|
||||
return self.fetchItems('/library/recentlyAdded')
|
||||
|
||||
def search(self, title=None, libtype=None, **kwargs):
|
||||
""" Searching within a library section is much more powerful. It seems certain
|
||||
attributes on the media objects can be targeted to filter this search down
|
||||
a bit, but I havent found the documentation for it.
|
||||
|
||||
Example: "studio=Comedy%20Central" or "year=1999" "title=Kung Fu" all work. Other items
|
||||
such as actor=<id> seem to work, but require you already know the id of the actor.
|
||||
TLDR: This is untested but seems to work. Use library section search when you can.
|
||||
"""
|
||||
args = {}
|
||||
if title:
|
||||
args['title'] = title
|
||||
if libtype:
|
||||
args['type'] = utils.searchType(libtype)
|
||||
for attr, value in kwargs.items():
|
||||
args[attr] = value
|
||||
key = '/library/all%s' % utils.joinArgs(args)
|
||||
return self.fetchItems(key)
|
||||
|
||||
def cleanBundles(self):
|
||||
""" Poster images and other metadata for items in your library are kept in "bundle"
|
||||
packages. When you remove items from your library, these bundles aren't immediately
|
||||
removed. Removing these old bundles can reduce the size of your install. By default, your
|
||||
server will automatically clean up old bundles once a week as part of Scheduled Tasks.
|
||||
"""
|
||||
# TODO: Should this check the response for success or the correct mediaprefix?
|
||||
self._server.query('/library/clean/bundles')
|
||||
|
||||
def emptyTrash(self):
|
||||
""" If a library has items in the Library Trash, use this option to empty the Trash. """
|
||||
for section in self.sections():
|
||||
section.emptyTrash()
|
||||
|
||||
def optimize(self):
|
||||
""" The Optimize option cleans up the server database from unused or fragmented data.
|
||||
For example, if you have deleted or added an entire library or many items in a
|
||||
library, you may like to optimize the database.
|
||||
"""
|
||||
self._server.query('/library/optimize')
|
||||
|
||||
def update(self):
|
||||
""" Scan this library for new items."""
|
||||
self._server.query('/library/sections/all/refresh')
|
||||
|
||||
def cancelUpdate(self):
|
||||
""" Cancel a library update. """
|
||||
key = '/library/sections/all/refresh'
|
||||
self._server.query(key, method=self._server._session.delete)
|
||||
|
||||
def refresh(self):
|
||||
""" Forces a download of fresh media information from the internet.
|
||||
This can take a long time. Any locked fields are not modified.
|
||||
"""
|
||||
self._server.query('/library/sections/all/refresh?force=1')
|
||||
|
||||
def deleteMediaPreviews(self):
|
||||
""" Delete the preview thumbnails for the all sections. This cannot be
|
||||
undone. Recreating media preview files can take hours or even days.
|
||||
"""
|
||||
for section in self.sections():
|
||||
section.deleteMediaPreviews()
|
||||
|
||||
def add(self, name='', type='', agent='', scanner='', location='', language='en', *args, **kwargs):
|
||||
""" Simplified add for the most common options.
|
||||
|
||||
Parameters:
|
||||
name (str): Name of the library
|
||||
agent (str): Example com.plexapp.agents.imdb
|
||||
type (str): movie, show, # check me
|
||||
location (str): /path/to/files
|
||||
language (str): Two letter language fx en
|
||||
kwargs (dict): Advanced options should be passed as a dict. where the id is the key.
|
||||
|
||||
**Photo Preferences**
|
||||
|
||||
* **agent** (str): com.plexapp.agents.none
|
||||
* **enableAutoPhotoTags** (bool): Tag photos. Default value false.
|
||||
* **enableBIFGeneration** (bool): Enable video preview thumbnails. Default value true.
|
||||
* **includeInGlobal** (bool): Include in dashboard. Default value true.
|
||||
* **scanner** (str): Plex Photo Scanner
|
||||
|
||||
**Movie Preferences**
|
||||
|
||||
* **agent** (str): com.plexapp.agents.none, com.plexapp.agents.imdb, com.plexapp.agents.themoviedb
|
||||
* **enableBIFGeneration** (bool): Enable video preview thumbnails. Default value true.
|
||||
* **enableCinemaTrailers** (bool): Enable Cinema Trailers. Default value true.
|
||||
* **includeInGlobal** (bool): Include in dashboard. Default value true.
|
||||
* **scanner** (str): Plex Movie Scanner, Plex Video Files Scanner
|
||||
|
||||
**IMDB Movie Options** (com.plexapp.agents.imdb)
|
||||
|
||||
* **title** (bool): Localized titles. Default value false.
|
||||
* **extras** (bool): Find trailers and extras automatically (Plex Pass required). Default value true.
|
||||
* **only_trailers** (bool): Skip extras which aren't trailers. Default value false.
|
||||
* **redband** (bool): Use red band (restricted audiences) trailers when available. Default value false.
|
||||
* **native_subs** (bool): Include extras with subtitles in Library language. Default value false.
|
||||
* **cast_list** (int): Cast List Source: Default value 1 Possible options: 0:IMDb,1:The Movie Database.
|
||||
* **ratings** (int): Ratings Source, Default value 0 Possible options:
|
||||
0:Rotten Tomatoes, 1:IMDb, 2:The Movie Database.
|
||||
* **summary** (int): Plot Summary Source: Default value 1 Possible options: 0:IMDb,1:The Movie Database.
|
||||
* **country** (int): Default value 46 Possible options 0:Argentina, 1:Australia, 2:Austria,
|
||||
3:Belgium, 4:Belize, 5:Bolivia, 6:Brazil, 7:Canada, 8:Chile, 9:Colombia, 10:Costa Rica,
|
||||
11:Czech Republic, 12:Denmark, 13:Dominican Republic, 14:Ecuador, 15:El Salvador,
|
||||
16:France, 17:Germany, 18:Guatemala, 19:Honduras, 20:Hong Kong SAR, 21:Ireland,
|
||||
22:Italy, 23:Jamaica, 24:Korea, 25:Liechtenstein, 26:Luxembourg, 27:Mexico, 28:Netherlands,
|
||||
29:New Zealand, 30:Nicaragua, 31:Panama, 32:Paraguay, 33:Peru, 34:Portugal,
|
||||
35:Peoples Republic of China, 36:Puerto Rico, 37:Russia, 38:Singapore, 39:South Africa,
|
||||
40:Spain, 41:Sweden, 42:Switzerland, 43:Taiwan, 44:Trinidad, 45:United Kingdom,
|
||||
46:United States, 47:Uruguay, 48:Venezuela.
|
||||
* **collections** (bool): Use collection info from The Movie Database. Default value false.
|
||||
* **localart** (bool): Prefer artwork based on library language. Default value true.
|
||||
* **adult** (bool): Include adult content. Default value false.
|
||||
* **usage** (bool): Send anonymous usage data to Plex. Default value true.
|
||||
|
||||
**TheMovieDB Movie Options** (com.plexapp.agents.themoviedb)
|
||||
|
||||
* **collections** (bool): Use collection info from The Movie Database. Default value false.
|
||||
* **localart** (bool): Prefer artwork based on library language. Default value true.
|
||||
* **adult** (bool): Include adult content. Default value false.
|
||||
* **country** (int): Country (used for release date and content rating). Default value 47 Possible
|
||||
options 0:, 1:Argentina, 2:Australia, 3:Austria, 4:Belgium, 5:Belize, 6:Bolivia, 7:Brazil, 8:Canada,
|
||||
9:Chile, 10:Colombia, 11:Costa Rica, 12:Czech Republic, 13:Denmark, 14:Dominican Republic, 15:Ecuador,
|
||||
16:El Salvador, 17:France, 18:Germany, 19:Guatemala, 20:Honduras, 21:Hong Kong SAR, 22:Ireland,
|
||||
23:Italy, 24:Jamaica, 25:Korea, 26:Liechtenstein, 27:Luxembourg, 28:Mexico, 29:Netherlands,
|
||||
30:New Zealand, 31:Nicaragua, 32:Panama, 33:Paraguay, 34:Peru, 35:Portugal,
|
||||
36:Peoples Republic of China, 37:Puerto Rico, 38:Russia, 39:Singapore, 40:South Africa, 41:Spain,
|
||||
42:Sweden, 43:Switzerland, 44:Taiwan, 45:Trinidad, 46:United Kingdom, 47:United States, 48:Uruguay,
|
||||
49:Venezuela.
|
||||
|
||||
**Show Preferences**
|
||||
|
||||
* **agent** (str): com.plexapp.agents.none, com.plexapp.agents.thetvdb, com.plexapp.agents.themoviedb
|
||||
* **enableBIFGeneration** (bool): Enable video preview thumbnails. Default value true.
|
||||
* **episodeSort** (int): Episode order. Default -1 Possible options: 0:Oldest first, 1:Newest first.
|
||||
* **flattenSeasons** (int): Seasons. Default value 0 Possible options: 0:Show,1:Hide.
|
||||
* **includeInGlobal** (bool): Include in dashboard. Default value true.
|
||||
* **scanner** (str): Plex Series Scanner
|
||||
|
||||
**TheTVDB Show Options** (com.plexapp.agents.thetvdb)
|
||||
|
||||
* **extras** (bool): Find trailers and extras automatically (Plex Pass required). Default value true.
|
||||
* **native_subs** (bool): Include extras with subtitles in Library language. Default value false.
|
||||
|
||||
**TheMovieDB Show Options** (com.plexapp.agents.themoviedb)
|
||||
|
||||
* **collections** (bool): Use collection info from The Movie Database. Default value false.
|
||||
* **localart** (bool): Prefer artwork based on library language. Default value true.
|
||||
* **adult** (bool): Include adult content. Default value false.
|
||||
* **country** (int): Country (used for release date and content rating). Default value 47 options
|
||||
0:, 1:Argentina, 2:Australia, 3:Austria, 4:Belgium, 5:Belize, 6:Bolivia, 7:Brazil, 8:Canada, 9:Chile,
|
||||
10:Colombia, 11:Costa Rica, 12:Czech Republic, 13:Denmark, 14:Dominican Republic, 15:Ecuador,
|
||||
16:El Salvador, 17:France, 18:Germany, 19:Guatemala, 20:Honduras, 21:Hong Kong SAR, 22:Ireland,
|
||||
23:Italy, 24:Jamaica, 25:Korea, 26:Liechtenstein, 27:Luxembourg, 28:Mexico, 29:Netherlands,
|
||||
30:New Zealand, 31:Nicaragua, 32:Panama, 33:Paraguay, 34:Peru, 35:Portugal,
|
||||
36:Peoples Republic of China, 37:Puerto Rico, 38:Russia, 39:Singapore, 40:South Africa,
|
||||
41:Spain, 42:Sweden, 43:Switzerland, 44:Taiwan, 45:Trinidad, 46:United Kingdom, 47:United States,
|
||||
48:Uruguay, 49:Venezuela.
|
||||
|
||||
**Other Video Preferences**
|
||||
|
||||
* **agent** (str): com.plexapp.agents.none, com.plexapp.agents.imdb, com.plexapp.agents.themoviedb
|
||||
* **enableBIFGeneration** (bool): Enable video preview thumbnails. Default value true.
|
||||
* **enableCinemaTrailers** (bool): Enable Cinema Trailers. Default value true.
|
||||
* **includeInGlobal** (bool): Include in dashboard. Default value true.
|
||||
* **scanner** (str): Plex Movie Scanner, Plex Video Files Scanner
|
||||
|
||||
**IMDB Other Video Options** (com.plexapp.agents.imdb)
|
||||
|
||||
* **title** (bool): Localized titles. Default value false.
|
||||
* **extras** (bool): Find trailers and extras automatically (Plex Pass required). Default value true.
|
||||
* **only_trailers** (bool): Skip extras which aren't trailers. Default value false.
|
||||
* **redband** (bool): Use red band (restricted audiences) trailers when available. Default value false.
|
||||
* **native_subs** (bool): Include extras with subtitles in Library language. Default value false.
|
||||
* **cast_list** (int): Cast List Source: Default value 1 Possible options: 0:IMDb,1:The Movie Database.
|
||||
* **ratings** (int): Ratings Source Default value 0 Possible options:
|
||||
0:Rotten Tomatoes,1:IMDb,2:The Movie Database.
|
||||
* **summary** (int): Plot Summary Source: Default value 1 Possible options: 0:IMDb,1:The Movie Database.
|
||||
* **country** (int): Country: Default value 46 Possible options: 0:Argentina, 1:Australia, 2:Austria,
|
||||
3:Belgium, 4:Belize, 5:Bolivia, 6:Brazil, 7:Canada, 8:Chile, 9:Colombia, 10:Costa Rica,
|
||||
11:Czech Republic, 12:Denmark, 13:Dominican Republic, 14:Ecuador, 15:El Salvador, 16:France,
|
||||
17:Germany, 18:Guatemala, 19:Honduras, 20:Hong Kong SAR, 21:Ireland, 22:Italy, 23:Jamaica,
|
||||
24:Korea, 25:Liechtenstein, 26:Luxembourg, 27:Mexico, 28:Netherlands, 29:New Zealand, 30:Nicaragua,
|
||||
31:Panama, 32:Paraguay, 33:Peru, 34:Portugal, 35:Peoples Republic of China, 36:Puerto Rico,
|
||||
37:Russia, 38:Singapore, 39:South Africa, 40:Spain, 41:Sweden, 42:Switzerland, 43:Taiwan, 44:Trinidad,
|
||||
45:United Kingdom, 46:United States, 47:Uruguay, 48:Venezuela.
|
||||
* **collections** (bool): Use collection info from The Movie Database. Default value false.
|
||||
* **localart** (bool): Prefer artwork based on library language. Default value true.
|
||||
* **adult** (bool): Include adult content. Default value false.
|
||||
* **usage** (bool): Send anonymous usage data to Plex. Default value true.
|
||||
|
||||
**TheMovieDB Other Video Options** (com.plexapp.agents.themoviedb)
|
||||
|
||||
* **collections** (bool): Use collection info from The Movie Database. Default value false.
|
||||
* **localart** (bool): Prefer artwork based on library language. Default value true.
|
||||
* **adult** (bool): Include adult content. Default value false.
|
||||
* **country** (int): Country (used for release date and content rating). Default
|
||||
value 47 Possible options 0:, 1:Argentina, 2:Australia, 3:Austria, 4:Belgium, 5:Belize,
|
||||
6:Bolivia, 7:Brazil, 8:Canada, 9:Chile, 10:Colombia, 11:Costa Rica, 12:Czech Republic,
|
||||
13:Denmark, 14:Dominican Republic, 15:Ecuador, 16:El Salvador, 17:France, 18:Germany,
|
||||
19:Guatemala, 20:Honduras, 21:Hong Kong SAR, 22:Ireland, 23:Italy, 24:Jamaica,
|
||||
25:Korea, 26:Liechtenstein, 27:Luxembourg, 28:Mexico, 29:Netherlands, 30:New Zealand,
|
||||
31:Nicaragua, 32:Panama, 33:Paraguay, 34:Peru, 35:Portugal,
|
||||
36:Peoples Republic of China, 37:Puerto Rico, 38:Russia, 39:Singapore,
|
||||
40:South Africa, 41:Spain, 42:Sweden, 43:Switzerland, 44:Taiwan, 45:Trinidad,
|
||||
46:United Kingdom, 47:United States, 48:Uruguay, 49:Venezuela.
|
||||
"""
|
||||
part = '/library/sections?name=%s&type=%s&agent=%s&scanner=%s&language=%s&location=%s' % (
|
||||
quote_plus(name), type, agent, quote_plus(scanner), language, quote_plus(location)) # noqa E126
|
||||
if kwargs:
|
||||
part += urlencode(kwargs)
|
||||
return self._server.query(part, method=self._server._session.post)
|
||||
|
||||
|
||||
class LibrarySection(PlexObject):
|
||||
""" Base class for a single library section.
|
||||
|
||||
Attributes:
|
||||
ALLOWED_FILTERS (tuple): ()
|
||||
ALLOWED_SORT (tuple): ()
|
||||
BOOLEAN_FILTERS (tuple<str>): ('unwatched', 'duplicate')
|
||||
server (:class:`~plexapi.server.PlexServer`): Server this client is connected to.
|
||||
initpath (str): Path requested when building this object.
|
||||
agent (str): Unknown (com.plexapp.agents.imdb, etc)
|
||||
allowSync (bool): True if you allow syncing content from this section.
|
||||
art (str): Wallpaper artwork used to respresent this section.
|
||||
composite (str): Composit image used to represent this section.
|
||||
createdAt (datetime): Datetime this library section was created.
|
||||
filters (str): Unknown
|
||||
key (str): Key (or ID) of this library section.
|
||||
language (str): Language represented in this section (en, xn, etc).
|
||||
locations (str): Paths on disk where section content is stored.
|
||||
refreshing (str): True if this section is currently being refreshed.
|
||||
scanner (str): Internal scanner used to find media (Plex Movie Scanner, Plex Premium Music Scanner, etc.)
|
||||
thumb (str): Thumbnail image used to represent this section.
|
||||
title (str): Title of this section.
|
||||
type (str): Type of content section represents (movie, artist, photo, show).
|
||||
updatedAt (datetime): Datetime this library section was last updated.
|
||||
uuid (str): Unique id for this section (32258d7c-3e6c-4ac5-98ad-bad7a3b78c63)
|
||||
"""
|
||||
ALLOWED_FILTERS = ()
|
||||
ALLOWED_SORT = ()
|
||||
BOOLEAN_FILTERS = ('unwatched', 'duplicate')
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.agent = data.attrib.get('agent')
|
||||
self.allowSync = utils.cast(bool, data.attrib.get('allowSync'))
|
||||
self.art = data.attrib.get('art')
|
||||
self.composite = data.attrib.get('composite')
|
||||
self.createdAt = utils.toDatetime(data.attrib.get('createdAt'))
|
||||
self.filters = data.attrib.get('filters')
|
||||
self.key = data.attrib.get('key') # invalid key from plex
|
||||
self.language = data.attrib.get('language')
|
||||
self.locations = self.listAttrs(data, 'path', etag='Location')
|
||||
self.refreshing = utils.cast(bool, data.attrib.get('refreshing'))
|
||||
self.scanner = data.attrib.get('scanner')
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
self.title = data.attrib.get('title')
|
||||
self.type = data.attrib.get('type')
|
||||
self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt'))
|
||||
self.uuid = data.attrib.get('uuid')
|
||||
|
||||
def delete(self):
|
||||
""" Delete a library section. """
|
||||
try:
|
||||
return self._server.query('/library/sections/%s' % self.key, method=self._server._session.delete)
|
||||
except BadRequest: # pragma: no cover
|
||||
msg = 'Failed to delete library %s' % self.key
|
||||
msg += 'You may need to allow this permission in your Plex settings.'
|
||||
log.error(msg)
|
||||
raise
|
||||
|
||||
def edit(self, **kwargs):
|
||||
""" Edit a library (Note: agent is required). See :class:`~plexapi.library.Library` for example usage.
|
||||
|
||||
Parameters:
|
||||
kwargs (dict): Dict of settings to edit.
|
||||
"""
|
||||
part = '/library/sections/%s?%s' % (self.key, urlencode(kwargs))
|
||||
self._server.query(part, method=self._server._session.put)
|
||||
|
||||
# Reload this way since the self.key dont have a full path, but is simply a id.
|
||||
for s in self._server.library.sections():
|
||||
if s.key == self.key:
|
||||
return s
|
||||
|
||||
def get(self, title):
|
||||
""" Returns the media item with the specified title.
|
||||
|
||||
Parameters:
|
||||
title (str): Title of the item to return.
|
||||
"""
|
||||
key = '/library/sections/%s/all' % self.key
|
||||
return self.fetchItem(key, title__iexact=title)
|
||||
|
||||
def all(self, **kwargs):
|
||||
""" Returns a list of media from this library section. """
|
||||
key = '/library/sections/%s/all' % self.key
|
||||
return self.fetchItems(key, **kwargs)
|
||||
|
||||
def onDeck(self):
|
||||
""" Returns a list of media items on deck from this library section. """
|
||||
key = '/library/sections/%s/onDeck' % self.key
|
||||
return self.fetchItems(key)
|
||||
|
||||
def recentlyAdded(self, maxresults=50):
|
||||
""" Returns a list of media items recently added from this library section.
|
||||
|
||||
Parameters:
|
||||
maxresults (int): Max number of items to return (default 50).
|
||||
"""
|
||||
return self.search(sort='addedAt:desc', maxresults=maxresults)
|
||||
|
||||
def analyze(self):
|
||||
""" Run an analysis on all of the items in this library section. See
|
||||
See :func:`~plexapi.base.PlexPartialObject.analyze` for more details.
|
||||
"""
|
||||
key = '/library/sections/%s/analyze' % self.key
|
||||
self._server.query(key, method=self._server._session.put)
|
||||
|
||||
def emptyTrash(self):
|
||||
""" If a section has items in the Trash, use this option to empty the Trash. """
|
||||
key = '/library/sections/%s/emptyTrash' % self.key
|
||||
self._server.query(key, method=self._server._session.put)
|
||||
|
||||
def update(self):
|
||||
""" Scan this section for new media. """
|
||||
key = '/library/sections/%s/refresh' % self.key
|
||||
self._server.query(key)
|
||||
|
||||
def cancelUpdate(self):
|
||||
""" Cancel update of this Library Section. """
|
||||
key = '/library/sections/%s/refresh' % self.key
|
||||
self._server.query(key, method=self._server._session.delete)
|
||||
|
||||
def refresh(self):
|
||||
""" Forces a download of fresh media information from the internet.
|
||||
This can take a long time. Any locked fields are not modified.
|
||||
"""
|
||||
key = '/library/sections/%s/refresh?force=1' % self.key
|
||||
self._server.query(key)
|
||||
|
||||
def deleteMediaPreviews(self):
|
||||
""" Delete the preview thumbnails for items in this library. This cannot
|
||||
be undone. Recreating media preview files can take hours or even days.
|
||||
"""
|
||||
key = '/library/sections/%s/indexes' % self.key
|
||||
self._server.query(key, method=self._server._session.delete)
|
||||
|
||||
def listChoices(self, category, libtype=None, **kwargs):
|
||||
""" Returns a list of :class:`~plexapi.library.FilterChoice` objects for the
|
||||
specified category and libtype. kwargs can be any of the same kwargs in
|
||||
:func:`plexapi.library.LibraySection.search()` to help narrow down the choices
|
||||
to only those that matter in your current context.
|
||||
|
||||
Parameters:
|
||||
category (str): Category to list choices for (genre, contentRating, etc).
|
||||
libtype (int): Library type of item filter.
|
||||
**kwargs (dict): Additional kwargs to narrow down the choices.
|
||||
|
||||
Raises:
|
||||
:class:`~plexapi.exceptions.BadRequest`: Cannot include kwarg equal to specified category.
|
||||
"""
|
||||
# TODO: Should this be moved to base?
|
||||
if category in kwargs:
|
||||
raise BadRequest('Cannot include kwarg equal to specified category: %s' % category)
|
||||
args = {}
|
||||
for subcategory, value in kwargs.items():
|
||||
args[category] = self._cleanSearchFilter(subcategory, value)
|
||||
if libtype is not None:
|
||||
args['type'] = utils.searchType(libtype)
|
||||
key = '/library/sections/%s/%s%s' % (self.key, category, utils.joinArgs(args))
|
||||
return self.fetchItems(key, cls=FilterChoice)
|
||||
|
||||
def search(self, title=None, sort=None, maxresults=999999, libtype=None, **kwargs):
|
||||
""" Search the library. If there are many results, they will be fetched from the server
|
||||
in batches of X_PLEX_CONTAINER_SIZE amounts. If you're only looking for the first <num>
|
||||
results, it would be wise to set the maxresults option to that amount so this functions
|
||||
doesn't iterate over all results on the server.
|
||||
|
||||
Parameters:
|
||||
title (str): General string query to search for (optional).
|
||||
sort (str): column:dir; column can be any of {addedAt, originallyAvailableAt, lastViewedAt,
|
||||
titleSort, rating, mediaHeight, duration}. dir can be asc or desc (optional).
|
||||
maxresults (int): Only return the specified number of results (optional).
|
||||
libtype (str): Filter results to a spcifiec libtype (movie, show, episode, artist,
|
||||
album, track; optional).
|
||||
**kwargs (dict): Any of the available filters for the current library section. Partial string
|
||||
matches allowed. Multiple matches OR together. All inputs will be compared with the
|
||||
available options and a warning logged if the option does not appear valid.
|
||||
|
||||
* unwatched: Display or hide unwatched content (True, False). [all]
|
||||
* duplicate: Display or hide duplicate items (True, False). [movie]
|
||||
* actor: List of actors to search ([actor_or_id, ...]). [movie]
|
||||
* collection: List of collections to search within ([collection_or_id, ...]). [all]
|
||||
* contentRating: List of content ratings to search within ([rating_or_key, ...]). [movie,tv]
|
||||
* country: List of countries to search within ([country_or_key, ...]). [movie,music]
|
||||
* decade: List of decades to search within ([yyy0, ...]). [movie]
|
||||
* director: List of directors to search ([director_or_id, ...]). [movie]
|
||||
* genre: List Genres to search within ([genere_or_id, ...]). [all]
|
||||
* network: List of TV networks to search within ([resolution_or_key, ...]). [tv]
|
||||
* resolution: List of video resolutions to search within ([resolution_or_key, ...]). [movie]
|
||||
* studio: List of studios to search within ([studio_or_key, ...]). [music]
|
||||
* year: List of years to search within ([yyyy, ...]). [all]
|
||||
"""
|
||||
# cleanup the core arguments
|
||||
args = {}
|
||||
for category, value in kwargs.items():
|
||||
args[category] = self._cleanSearchFilter(category, value, libtype)
|
||||
if title is not None:
|
||||
args['title'] = title
|
||||
if sort is not None:
|
||||
args['sort'] = self._cleanSearchSort(sort)
|
||||
if libtype is not None:
|
||||
args['type'] = utils.searchType(libtype)
|
||||
# iterate over the results
|
||||
results, subresults = [], '_init'
|
||||
args['X-Plex-Container-Start'] = 0
|
||||
args['X-Plex-Container-Size'] = min(X_PLEX_CONTAINER_SIZE, maxresults)
|
||||
while subresults and maxresults > len(results):
|
||||
key = '/library/sections/%s/all%s' % (self.key, utils.joinArgs(args))
|
||||
subresults = self.fetchItems(key)
|
||||
results += subresults[:maxresults - len(results)]
|
||||
args['X-Plex-Container-Start'] += args['X-Plex-Container-Size']
|
||||
return results
|
||||
|
||||
def _cleanSearchFilter(self, category, value, libtype=None):
|
||||
# check a few things before we begin
|
||||
if category not in self.ALLOWED_FILTERS:
|
||||
raise BadRequest('Unknown filter category: %s' % category)
|
||||
if category in self.BOOLEAN_FILTERS:
|
||||
return '1' if value else '0'
|
||||
if not isinstance(value, (list, tuple)):
|
||||
value = [value]
|
||||
# convert list of values to list of keys or ids
|
||||
result = set()
|
||||
choices = self.listChoices(category, libtype)
|
||||
lookup = {c.title.lower(): unquote(unquote(c.key)) for c in choices}
|
||||
allowed = set(c.key for c in choices)
|
||||
for item in value:
|
||||
item = str((item.id or item.tag) if isinstance(item, MediaTag) else item).lower()
|
||||
# find most logical choice(s) to use in url
|
||||
if item in allowed: result.add(item); continue
|
||||
if item in lookup: result.add(lookup[item]); continue
|
||||
matches = [k for t, k in lookup.items() if item in t]
|
||||
if matches: map(result.add, matches); continue
|
||||
# nothing matched; use raw item value
|
||||
log.warning('Filter value not listed, using raw item value: %s' % item)
|
||||
result.add(item)
|
||||
return ','.join(result)
|
||||
|
||||
def _cleanSearchSort(self, sort):
|
||||
sort = '%s:asc' % sort if ':' not in sort else sort
|
||||
scol, sdir = sort.lower().split(':')
|
||||
lookup = {s.lower(): s for s in self.ALLOWED_SORT}
|
||||
if scol not in lookup:
|
||||
raise BadRequest('Unknown sort column: %s' % scol)
|
||||
if sdir not in ('asc', 'desc'):
|
||||
raise BadRequest('Unknown sort dir: %s' % sdir)
|
||||
return '%s:%s' % (lookup[scol], sdir)
|
||||
|
||||
|
||||
class MovieSection(LibrarySection):
|
||||
""" Represents a :class:`~plexapi.library.LibrarySection` section containing movies.
|
||||
|
||||
Attributes:
|
||||
ALLOWED_FILTERS (list<str>): List of allowed search filters. ('unwatched',
|
||||
'duplicate', 'year', 'decade', 'genre', 'contentRating', 'collection',
|
||||
'director', 'actor', 'country', 'studio', 'resolution', 'guid', 'label')
|
||||
ALLOWED_SORT (list<str>): List of allowed sorting keys. ('addedAt',
|
||||
'originallyAvailableAt', 'lastViewedAt', 'titleSort', 'rating',
|
||||
'mediaHeight', 'duration')
|
||||
TAG (str): 'Directory'
|
||||
TYPE (str): 'movie'
|
||||
"""
|
||||
ALLOWED_FILTERS = ('unwatched', 'duplicate', 'year', 'decade', 'genre', 'contentRating',
|
||||
'collection', 'director', 'actor', 'country', 'studio', 'resolution',
|
||||
'guid', 'label')
|
||||
ALLOWED_SORT = ('addedAt', 'originallyAvailableAt', 'lastViewedAt', 'titleSort', 'rating',
|
||||
'mediaHeight', 'duration')
|
||||
TAG = 'Directory'
|
||||
TYPE = 'movie'
|
||||
|
||||
|
||||
class ShowSection(LibrarySection):
|
||||
""" Represents a :class:`~plexapi.library.LibrarySection` section containing tv shows.
|
||||
|
||||
Attributes:
|
||||
ALLOWED_FILTERS (list<str>): List of allowed search filters. ('unwatched',
|
||||
'year', 'genre', 'contentRating', 'network', 'collection', 'guid', 'label')
|
||||
ALLOWED_SORT (list<str>): List of allowed sorting keys. ('addedAt', 'lastViewedAt',
|
||||
'originallyAvailableAt', 'titleSort', 'rating', 'unwatched')
|
||||
TAG (str): 'Directory'
|
||||
TYPE (str): 'show'
|
||||
"""
|
||||
ALLOWED_FILTERS = ('unwatched', 'year', 'genre', 'contentRating', 'network', 'collection',
|
||||
'guid', 'duplicate', 'label')
|
||||
ALLOWED_SORT = ('addedAt', 'lastViewedAt', 'originallyAvailableAt', 'titleSort',
|
||||
'rating', 'unwatched')
|
||||
TAG = 'Directory'
|
||||
TYPE = 'show'
|
||||
|
||||
def searchShows(self, **kwargs):
|
||||
""" Search for a show. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
|
||||
return self.search(libtype='show', **kwargs)
|
||||
|
||||
def searchEpisodes(self, **kwargs):
|
||||
""" Search for an episode. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
|
||||
return self.search(libtype='episode', **kwargs)
|
||||
|
||||
def recentlyAdded(self, libtype='episode', maxresults=50):
|
||||
""" Returns a list of recently added episodes from this library section.
|
||||
|
||||
Parameters:
|
||||
maxresults (int): Max number of items to return (default 50).
|
||||
"""
|
||||
return self.search(sort='addedAt:desc', libtype=libtype, maxresults=maxresults)
|
||||
|
||||
|
||||
class MusicSection(LibrarySection):
|
||||
""" Represents a :class:`~plexapi.library.LibrarySection` section containing music artists.
|
||||
|
||||
Attributes:
|
||||
ALLOWED_FILTERS (list<str>): List of allowed search filters. ('genre',
|
||||
'country', 'collection')
|
||||
ALLOWED_SORT (list<str>): List of allowed sorting keys. ('addedAt',
|
||||
'lastViewedAt', 'viewCount', 'titleSort')
|
||||
TAG (str): 'Directory'
|
||||
TYPE (str): 'artist'
|
||||
"""
|
||||
ALLOWED_FILTERS = ('genre', 'country', 'collection', 'mood')
|
||||
ALLOWED_SORT = ('addedAt', 'lastViewedAt', 'viewCount', 'titleSort')
|
||||
TAG = 'Directory'
|
||||
TYPE = 'artist'
|
||||
|
||||
def albums(self):
|
||||
""" Returns a list of :class:`~plexapi.audio.Album` objects in this section. """
|
||||
key = '/library/sections/%s/albums' % self.key
|
||||
return self.fetchItems(key)
|
||||
|
||||
def searchArtists(self, **kwargs):
|
||||
""" Search for an artist. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
|
||||
return self.search(libtype='artist', **kwargs)
|
||||
|
||||
def searchAlbums(self, **kwargs):
|
||||
""" Search for an album. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
|
||||
return self.search(libtype='album', **kwargs)
|
||||
|
||||
def searchTracks(self, **kwargs):
|
||||
""" Search for a track. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
|
||||
return self.search(libtype='track', **kwargs)
|
||||
|
||||
|
||||
class PhotoSection(LibrarySection):
|
||||
""" Represents a :class:`~plexapi.library.LibrarySection` section containing photos.
|
||||
|
||||
Attributes:
|
||||
ALLOWED_FILTERS (list<str>): List of allowed search filters. ('all', 'iso',
|
||||
'make', 'lens', 'aperture', 'exposure')
|
||||
ALLOWED_SORT (list<str>): List of allowed sorting keys. ('addedAt')
|
||||
TAG (str): 'Directory'
|
||||
TYPE (str): 'photo'
|
||||
"""
|
||||
ALLOWED_FILTERS = ('all', 'iso', 'make', 'lens', 'aperture', 'exposure')
|
||||
ALLOWED_SORT = ('addedAt',)
|
||||
TAG = 'Directory'
|
||||
TYPE = 'photo'
|
||||
|
||||
def searchAlbums(self, title, **kwargs):
|
||||
""" Search for an album. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
|
||||
key = '/library/sections/%s/all?type=14' % self.key
|
||||
return self.fetchItems(key, title=title)
|
||||
|
||||
def searchPhotos(self, title, **kwargs):
|
||||
""" Search for a photo. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
|
||||
key = '/library/sections/%s/all?type=13' % self.key
|
||||
return self.fetchItems(key, title=title)
|
||||
|
||||
|
||||
class FilterChoice(PlexObject):
|
||||
""" Represents a single filter choice. These objects are gathered when using filters
|
||||
while searching for library items and is the object returned in the result set of
|
||||
:func:`~plexapi.library.LibrarySection.listChoices()`.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Directory'
|
||||
server (:class:`~plexapi.server.PlexServer`): PlexServer this client is connected to.
|
||||
initpath (str): Relative path requested when retrieving specified `data` (optional).
|
||||
fastKey (str): API path to quickly list all items in this filter
|
||||
(/library/sections/<section>/all?genre=<key>)
|
||||
key (str): Short key (id) of this filter option (used ad <key> in fastKey above).
|
||||
thumb (str): Thumbnail used to represent this filter option.
|
||||
title (str): Human readable name for this filter option.
|
||||
type (str): Filter type (genre, contentRating, etc).
|
||||
"""
|
||||
TAG = 'Directory'
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.fastKey = data.attrib.get('fastKey')
|
||||
self.key = data.attrib.get('key')
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
self.title = data.attrib.get('title')
|
||||
self.type = data.attrib.get('type')
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Hub(PlexObject):
|
||||
""" Represents a single Hub (or category) in the PlexServer search.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Hub'
|
||||
hubIdentifier (str): Unknown.
|
||||
size (int): Number of items found.
|
||||
title (str): Title of this Hub.
|
||||
type (str): Type of items in the Hub.
|
||||
items (str): List of items in the Hub.
|
||||
"""
|
||||
TAG = 'Hub'
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.hubIdentifier = data.attrib.get('hubIdentifier')
|
||||
self.size = utils.cast(int, data.attrib.get('size'))
|
||||
self.title = data.attrib.get('title')
|
||||
self.type = data.attrib.get('type')
|
||||
self.items = self.findItems(data)
|
||||
|
||||
def __len__(self):
|
||||
return self.size
|
||||
504
lib/plexapi/media.py
Normal file
504
lib/plexapi/media.py
Normal file
@@ -0,0 +1,504 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from plexapi import log, utils
|
||||
from plexapi.base import PlexObject
|
||||
from plexapi.exceptions import BadRequest
|
||||
from plexapi.utils import cast
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Media(PlexObject):
|
||||
""" Container object for all MediaPart objects. Provides useful data about the
|
||||
video this media belong to such as video framerate, resolution, etc.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Media'
|
||||
server (:class:`~plexapi.server.PlexServer`): PlexServer object this is from.
|
||||
initpath (str): Relative path requested when retrieving specified data.
|
||||
video (str): Video this media belongs to.
|
||||
aspectRatio (float): Aspect ratio of the video (ex: 2.35).
|
||||
audioChannels (int): Number of audio channels for this video (ex: 6).
|
||||
audioCodec (str): Audio codec used within the video (ex: ac3).
|
||||
bitrate (int): Bitrate of the video (ex: 1624)
|
||||
container (str): Container this video is in (ex: avi).
|
||||
duration (int): Length of the video in milliseconds (ex: 6990483).
|
||||
height (int): Height of the video in pixels (ex: 256).
|
||||
id (int): Plex ID of this media item (ex: 46184).
|
||||
has64bitOffsets (bool): True if video has 64 bit offsets (?).
|
||||
optimizedForStreaming (bool): True if video is optimized for streaming.
|
||||
videoCodec (str): Video codec used within the video (ex: ac3).
|
||||
videoFrameRate (str): Video frame rate (ex: 24p).
|
||||
videoResolution (str): Video resolution (ex: sd).
|
||||
width (int): Width of the video in pixels (ex: 608).
|
||||
parts (list<:class:`~plexapi.media.MediaPart`>): List of MediaParts in this video.
|
||||
"""
|
||||
TAG = 'Media'
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.aspectRatio = cast(float, data.attrib.get('aspectRatio'))
|
||||
self.audioChannels = cast(int, data.attrib.get('audioChannels'))
|
||||
self.audioCodec = data.attrib.get('audioCodec')
|
||||
self.bitrate = cast(int, data.attrib.get('bitrate'))
|
||||
self.container = data.attrib.get('container')
|
||||
self.duration = cast(int, data.attrib.get('duration'))
|
||||
self.height = cast(int, data.attrib.get('height'))
|
||||
self.id = cast(int, data.attrib.get('id'))
|
||||
self.has64bitOffsets = cast(bool, data.attrib.get('has64bitOffsets'))
|
||||
self.optimizedForStreaming = cast(bool, data.attrib.get('optimizedForStreaming'))
|
||||
self.videoCodec = data.attrib.get('videoCodec')
|
||||
self.videoFrameRate = data.attrib.get('videoFrameRate')
|
||||
self.videoResolution = data.attrib.get('videoResolution')
|
||||
self.width = cast(int, data.attrib.get('width'))
|
||||
self.parts = self.findItems(data, MediaPart)
|
||||
|
||||
def delete(self):
|
||||
part = self._initpath + '/media/%s' % self.id
|
||||
try:
|
||||
return self._server.query(part, method=self._server._session.delete)
|
||||
except BadRequest:
|
||||
log.error("Failed to delete %s. This could be because you havn't allowed "
|
||||
"items to be deleted" % part)
|
||||
raise
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class MediaPart(PlexObject):
|
||||
""" Represents a single media part (often a single file) for the media this belongs to.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Part'
|
||||
server (:class:`~plexapi.server.PlexServer`): PlexServer object this is from.
|
||||
initpath (str): Relative path requested when retrieving specified data.
|
||||
media (:class:`~plexapi.media.Media`): Media object this part belongs to.
|
||||
container (str): Container type of this media part (ex: avi).
|
||||
duration (int): Length of this media part in milliseconds.
|
||||
file (str): Path to this file on disk (ex: /media/Movies/Cars.(2006)/Cars.cd2.avi)
|
||||
id (int): Unique ID of this media part.
|
||||
indexes (str, None): None or SD.
|
||||
key (str): Key used to access this media part (ex: /library/parts/46618/1389985872/file.avi).
|
||||
size (int): Size of this file in bytes (ex: 733884416).
|
||||
streams (list<:class:`~plexapi.media.MediaPartStream`>): List of streams in this media part.
|
||||
"""
|
||||
TAG = 'Part'
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.container = data.attrib.get('container')
|
||||
self.duration = cast(int, data.attrib.get('duration'))
|
||||
self.file = data.attrib.get('file')
|
||||
self.id = cast(int, data.attrib.get('id'))
|
||||
self.indexes = data.attrib.get('indexes')
|
||||
self.key = data.attrib.get('key')
|
||||
self.size = cast(int, data.attrib.get('size'))
|
||||
self.streams = self._buildStreams(data)
|
||||
|
||||
def _buildStreams(self, data):
|
||||
streams = []
|
||||
for elem in data:
|
||||
for cls in (VideoStream, AudioStream, SubtitleStream):
|
||||
if elem.attrib.get('streamType') == str(cls.STREAMTYPE):
|
||||
streams.append(cls(self._server, elem, self._initpath))
|
||||
return streams
|
||||
|
||||
def videoStreams(self):
|
||||
""" Returns a list of :class:`~plexapi.media.VideoStream` objects in this MediaPart. """
|
||||
return [stream for stream in self.streams if stream.streamType == VideoStream.STREAMTYPE]
|
||||
|
||||
def audioStreams(self):
|
||||
""" Returns a list of :class:`~plexapi.media.AudioStream` objects in this MediaPart. """
|
||||
return [stream for stream in self.streams if stream.streamType == AudioStream.STREAMTYPE]
|
||||
|
||||
def subtitleStreams(self):
|
||||
""" Returns a list of :class:`~plexapi.media.SubtitleStream` objects in this MediaPart. """
|
||||
return [stream for stream in self.streams if stream.streamType == SubtitleStream.STREAMTYPE]
|
||||
|
||||
|
||||
class MediaPartStream(PlexObject):
|
||||
""" Base class for media streams. These consist of video, audio and subtitles.
|
||||
|
||||
Attributes:
|
||||
server (:class:`~plexapi.server.PlexServer`): PlexServer object this is from.
|
||||
initpath (str): Relative path requested when retrieving specified data.
|
||||
part (:class:`~plexapi.media.MediaPart`): Media part this stream belongs to.
|
||||
codec (str): Codec of this stream (ex: srt, ac3, mpeg4).
|
||||
codecID (str): Codec ID (ex: XVID).
|
||||
id (int): Unique stream ID on this server.
|
||||
index (int): Unknown
|
||||
language (str): Stream language (ex: English, ไทย).
|
||||
languageCode (str): Ascii code for language (ex: eng, tha).
|
||||
selected (bool): True if this stream is selected.
|
||||
streamType (int): Stream type (1=:class:`~plexapi.media.VideoStream`,
|
||||
2=:class:`~plexapi.media.AudioStream`, 3=:class:`~plexapi.media.SubtitleStream`).
|
||||
type (int): Alias for streamType.
|
||||
"""
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.codec = data.attrib.get('codec')
|
||||
self.codecID = data.attrib.get('codecID')
|
||||
self.id = cast(int, data.attrib.get('id'))
|
||||
self.index = cast(int, data.attrib.get('index', '-1'))
|
||||
self.language = data.attrib.get('language')
|
||||
self.languageCode = data.attrib.get('languageCode')
|
||||
self.selected = cast(bool, data.attrib.get('selected', '0'))
|
||||
self.streamType = cast(int, data.attrib.get('streamType'))
|
||||
self.type = cast(int, data.attrib.get('streamType'))
|
||||
|
||||
@staticmethod
|
||||
def parse(server, data, initpath): # pragma: no cover seems to be dead code.
|
||||
""" Factory method returns a new MediaPartStream from xml data. """
|
||||
STREAMCLS = {1: VideoStream, 2: AudioStream, 3: SubtitleStream}
|
||||
stype = cast(int, data.attrib.get('streamType'))
|
||||
cls = STREAMCLS.get(stype, MediaPartStream)
|
||||
return cls(server, data, initpath)
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class VideoStream(MediaPartStream):
|
||||
""" Respresents a video stream within a :class:`~plexapi.media.MediaPart`.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Stream'
|
||||
STREAMTYPE (int): 1
|
||||
bitDepth (int): Bit depth (ex: 8).
|
||||
bitrate (int): Bitrate (ex: 1169)
|
||||
cabac (int): Unknown
|
||||
chromaSubsampling (str): Chroma Subsampling (ex: 4:2:0).
|
||||
colorSpace (str): Unknown
|
||||
duration (int): Duration of video stream in milliseconds.
|
||||
frameRate (float): Frame rate (ex: 23.976)
|
||||
frameRateMode (str): Unknown
|
||||
hasScallingMatrix (bool): True if video stream has a scaling matrix.
|
||||
height (int): Height of video stream.
|
||||
level (int): Videl stream level (?).
|
||||
profile (str): Video stream profile (ex: asp).
|
||||
refFrames (int): Unknown
|
||||
scanType (str): Video stream scan type (ex: progressive).
|
||||
title (str): Title of this video stream.
|
||||
width (int): Width of video stream.
|
||||
"""
|
||||
TAG = 'Stream'
|
||||
STREAMTYPE = 1
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
super(VideoStream, self)._loadData(data)
|
||||
self.bitDepth = cast(int, data.attrib.get('bitDepth'))
|
||||
self.bitrate = cast(int, data.attrib.get('bitrate'))
|
||||
self.cabac = cast(int, data.attrib.get('cabac'))
|
||||
self.chromaSubsampling = data.attrib.get('chromaSubsampling')
|
||||
self.colorSpace = data.attrib.get('colorSpace')
|
||||
self.duration = cast(int, data.attrib.get('duration'))
|
||||
self.frameRate = cast(float, data.attrib.get('frameRate'))
|
||||
self.frameRateMode = data.attrib.get('frameRateMode')
|
||||
self.hasScallingMatrix = cast(bool, data.attrib.get('hasScallingMatrix'))
|
||||
self.height = cast(int, data.attrib.get('height'))
|
||||
self.level = cast(int, data.attrib.get('level'))
|
||||
self.profile = data.attrib.get('profile')
|
||||
self.refFrames = cast(int, data.attrib.get('refFrames'))
|
||||
self.scanType = data.attrib.get('scanType')
|
||||
self.title = data.attrib.get('title')
|
||||
self.width = cast(int, data.attrib.get('width'))
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class AudioStream(MediaPartStream):
|
||||
""" Respresents a audio stream within a :class:`~plexapi.media.MediaPart`.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Stream'
|
||||
STREAMTYPE (int): 2
|
||||
audioChannelLayout (str): Audio channel layout (ex: 5.1(side)).
|
||||
bitDepth (int): Bit depth (ex: 16).
|
||||
bitrate (int): Audio bitrate (ex: 448).
|
||||
bitrateMode (str): Bitrate mode (ex: cbr).
|
||||
channels (int): number of channels in this stream (ex: 6).
|
||||
dialogNorm (int): Unknown (ex: -27).
|
||||
duration (int): Duration of audio stream in milliseconds.
|
||||
samplingRate (int): Sampling rate (ex: xxx)
|
||||
title (str): Title of this audio stream.
|
||||
"""
|
||||
TAG = 'Stream'
|
||||
STREAMTYPE = 2
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
super(AudioStream, self)._loadData(data)
|
||||
self.audioChannelLayout = data.attrib.get('audioChannelLayout')
|
||||
self.bitDepth = cast(int, data.attrib.get('bitDepth'))
|
||||
self.bitrate = cast(int, data.attrib.get('bitrate'))
|
||||
self.bitrateMode = data.attrib.get('bitrateMode')
|
||||
self.channels = cast(int, data.attrib.get('channels'))
|
||||
self.dialogNorm = cast(int, data.attrib.get('dialogNorm'))
|
||||
self.duration = cast(int, data.attrib.get('duration'))
|
||||
self.samplingRate = cast(int, data.attrib.get('samplingRate'))
|
||||
self.title = data.attrib.get('title')
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class SubtitleStream(MediaPartStream):
|
||||
""" Respresents a audio stream within a :class:`~plexapi.media.MediaPart`.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Stream'
|
||||
STREAMTYPE (int): 3
|
||||
format (str): Subtitle format (ex: srt).
|
||||
key (str): Key of this subtitle stream (ex: /library/streams/212284).
|
||||
title (str): Title of this subtitle stream.
|
||||
"""
|
||||
TAG = 'Stream'
|
||||
STREAMTYPE = 3
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
super(SubtitleStream, self)._loadData(data)
|
||||
self.format = data.attrib.get('format')
|
||||
self.key = data.attrib.get('key')
|
||||
self.title = data.attrib.get('title')
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Session(PlexObject):
|
||||
""" Represents a current session. """
|
||||
TAG = 'Session'
|
||||
|
||||
def _loadData(self, data):
|
||||
self.id = data.attrib.get('id')
|
||||
self.bandwidth = utils.cast(int, data.attrib.get('bandwidth'))
|
||||
self.location = data.attrib.get('location')
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class TranscodeSession(PlexObject):
|
||||
""" Represents a current transcode session.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'TranscodeSession'
|
||||
TODO: Document this.
|
||||
"""
|
||||
TAG = 'TranscodeSession'
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.audioChannels = cast(int, data.attrib.get('audioChannels'))
|
||||
self.audioCodec = data.attrib.get('audioCodec')
|
||||
self.audioDecision = data.attrib.get('audioDecision')
|
||||
self.container = data.attrib.get('container')
|
||||
self.context = data.attrib.get('context')
|
||||
self.duration = cast(int, data.attrib.get('duration'))
|
||||
self.height = cast(int, data.attrib.get('height'))
|
||||
self.key = data.attrib.get('key')
|
||||
self.progress = cast(float, data.attrib.get('progress'))
|
||||
self.protocol = data.attrib.get('protocol')
|
||||
self.remaining = cast(int, data.attrib.get('remaining'))
|
||||
self.speed = cast(int, data.attrib.get('speed'))
|
||||
self.throttled = cast(int, data.attrib.get('throttled'))
|
||||
self.sourceVideoCodec = data.attrib.get('sourceVideoCodec')
|
||||
self.videoCodec = data.attrib.get('videoCodec')
|
||||
self.videoDecision = data.attrib.get('videoDecision')
|
||||
self.width = cast(int, data.attrib.get('width'))
|
||||
|
||||
|
||||
class MediaTag(PlexObject):
|
||||
""" Base class for media tags used for filtering and searching your library
|
||||
items or navigating the metadata of media items in your library. Tags are
|
||||
the construct used for things such as Country, Director, Genre, etc.
|
||||
|
||||
Attributes:
|
||||
server (:class:`~plexapi.server.PlexServer`): Server this client is connected to.
|
||||
id (id): Tag ID (This seems meaningless except to use it as a unique id).
|
||||
role (str): Unknown
|
||||
tag (str): Name of the tag. This will be Animation, SciFi etc for Genres. The name of
|
||||
person for Directors and Roles (ex: Animation, Stephen Graham, etc).
|
||||
<Hub_Search_Attributes>: Attributes only applicable in search results from
|
||||
PlexServer :func:`~plexapi.server.PlexServer.search()`. They provide details of which
|
||||
library section the tag was found as well as the url to dig deeper into the results.
|
||||
|
||||
* key (str): API URL to dig deeper into this tag (ex: /library/sections/1/all?actor=9081).
|
||||
* librarySectionID (int): Section ID this tag was generated from.
|
||||
* librarySectionTitle (str): Library section title this tag was found.
|
||||
* librarySectionType (str): Media type of the library section this tag was found.
|
||||
* tagType (int): Tag type ID.
|
||||
* thumb (str): URL to thumbnail image.
|
||||
"""
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.id = cast(int, data.attrib.get('id'))
|
||||
self.role = data.attrib.get('role')
|
||||
self.tag = data.attrib.get('tag')
|
||||
# additional attributes only from hub search
|
||||
self.key = data.attrib.get('key')
|
||||
self.librarySectionID = cast(int, data.attrib.get('librarySectionID'))
|
||||
self.librarySectionTitle = data.attrib.get('librarySectionTitle')
|
||||
self.librarySectionType = data.attrib.get('librarySectionType')
|
||||
self.tagType = cast(int, data.attrib.get('tagType'))
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
|
||||
def items(self, *args, **kwargs):
|
||||
""" Return the list of items within this tag. This function is only applicable
|
||||
in search results from PlexServer :func:`~plexapi.server.PlexServer.search()`.
|
||||
"""
|
||||
if not self.key:
|
||||
raise BadRequest('Key is not defined for this tag: %s' % self.tag)
|
||||
return self.fetchItems(self.key)
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Collection(MediaTag):
|
||||
""" Represents a single Collection media tag.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Collection'
|
||||
FILTER (str): 'collection'
|
||||
"""
|
||||
TAG = 'Collection'
|
||||
FILTER = 'collection'
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Label(MediaTag):
|
||||
""" Represents a single label media tag.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'label'
|
||||
FILTER (str): 'label'
|
||||
"""
|
||||
TAG = 'Label'
|
||||
FILTER = 'label'
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Country(MediaTag):
|
||||
""" Represents a single Country media tag.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Country'
|
||||
FILTER (str): 'country'
|
||||
"""
|
||||
TAG = 'Country'
|
||||
FILTER = 'country'
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Director(MediaTag):
|
||||
""" Represents a single Director media tag.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Director'
|
||||
FILTER (str): 'director'
|
||||
"""
|
||||
TAG = 'Director'
|
||||
FILTER = 'director'
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Genre(MediaTag):
|
||||
""" Represents a single Genre media tag.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Genre'
|
||||
FILTER (str): 'genre'
|
||||
"""
|
||||
TAG = 'Genre'
|
||||
FILTER = 'genre'
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Mood(MediaTag):
|
||||
""" Represents a single Mood media tag.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Mood'
|
||||
FILTER (str): 'mood'
|
||||
"""
|
||||
TAG = 'Mood'
|
||||
FILTER = 'mood'
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Producer(MediaTag):
|
||||
""" Represents a single Producer media tag.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Producer'
|
||||
FILTER (str): 'producer'
|
||||
"""
|
||||
TAG = 'Producer'
|
||||
FILTER = 'producer'
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Role(MediaTag):
|
||||
""" Represents a single Role (actor/actress) media tag.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Role'
|
||||
FILTER (str): 'role'
|
||||
"""
|
||||
TAG = 'Role'
|
||||
FILTER = 'role'
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Similar(MediaTag):
|
||||
""" Represents a single Similar media tag.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Similar'
|
||||
FILTER (str): 'similar'
|
||||
"""
|
||||
TAG = 'Similar'
|
||||
FILTER = 'similar'
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Writer(MediaTag):
|
||||
""" Represents a single Writer media tag.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Writer'
|
||||
FILTER (str): 'writer'
|
||||
"""
|
||||
TAG = 'Writer'
|
||||
FILTER = 'writer'
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Chapter(PlexObject):
|
||||
""" Represents a single Writer media tag.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Chapter'
|
||||
"""
|
||||
TAG = 'Chapter'
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.id = cast(int, data.attrib.get('id', 0))
|
||||
self.filter = data.attrib.get('filter') # I couldn't filter on it anyways
|
||||
self.tag = data.attrib.get('tag')
|
||||
self.title = self.tag
|
||||
self.index = cast(int, data.attrib.get('index'))
|
||||
self.start = cast(int, data.attrib.get('startTimeOffset'))
|
||||
self.end = cast(int, data.attrib.get('endTimeOffset'))
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Field(PlexObject):
|
||||
""" Represents a single Field.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Field'
|
||||
"""
|
||||
TAG = 'Field'
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.name = data.attrib.get('name')
|
||||
self.locked = cast(bool, data.attrib.get('locked'))
|
||||
727
lib/plexapi/myplex.py
Normal file
727
lib/plexapi/myplex.py
Normal file
@@ -0,0 +1,727 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import copy
|
||||
import requests
|
||||
import time
|
||||
from requests.status_codes import _codes as codes
|
||||
from plexapi import BASE_HEADERS, CONFIG, TIMEOUT
|
||||
from plexapi import log, logfilter, utils
|
||||
from plexapi.base import PlexObject
|
||||
from plexapi.exceptions import BadRequest, NotFound
|
||||
from plexapi.client import PlexClient
|
||||
from plexapi.compat import ElementTree
|
||||
from plexapi.library import LibrarySection
|
||||
from plexapi.server import PlexServer
|
||||
from plexapi.utils import joinArgs
|
||||
|
||||
|
||||
class MyPlexAccount(PlexObject):
|
||||
""" MyPlex account and profile information. This object represents the data found Account on
|
||||
the myplex.tv servers at the url https://plex.tv/users/account. You may create this object
|
||||
directly by passing in your username & password (or token). There is also a convenience
|
||||
method provided at :class:`~plexapi.server.PlexServer.myPlexAccount()` which will create
|
||||
and return this object.
|
||||
|
||||
Parameters:
|
||||
username (str): Your MyPlex username.
|
||||
password (str): Your MyPlex password.
|
||||
session (requests.Session, optional): Use your own session object if you want to
|
||||
cache the http responses from PMS
|
||||
timeout (int): timeout in seconds on initial connect to myplex (default config.TIMEOUT).
|
||||
|
||||
Attributes:
|
||||
SIGNIN (str): 'https://plex.tv/users/sign_in.xml'
|
||||
key (str): 'https://plex.tv/users/account'
|
||||
authenticationToken (str): Unknown.
|
||||
certificateVersion (str): Unknown.
|
||||
cloudSyncDevice (str): Unknown.
|
||||
email (str): Your current Plex email address.
|
||||
entitlements (List<str>): List of devices your allowed to use with this account.
|
||||
guest (bool): Unknown.
|
||||
home (bool): Unknown.
|
||||
homeSize (int): Unknown.
|
||||
id (str): Your Plex account ID.
|
||||
locale (str): Your Plex locale
|
||||
mailing_list_status (str): Your current mailing list status.
|
||||
maxHomeSize (int): Unknown.
|
||||
queueEmail (str): Email address to add items to your `Watch Later` queue.
|
||||
queueUid (str): Unknown.
|
||||
restricted (bool): Unknown.
|
||||
roles: (List<str>) Lit of account roles. Plexpass membership listed here.
|
||||
scrobbleTypes (str): Description
|
||||
secure (bool): Description
|
||||
subscriptionActive (bool): True if your subsctiption is active.
|
||||
subscriptionFeatures: (List<str>) List of features allowed on your subscription.
|
||||
subscriptionPlan (str): Name of subscription plan.
|
||||
subscriptionStatus (str): String representation of `subscriptionActive`.
|
||||
thumb (str): URL of your account thumbnail.
|
||||
title (str): Unknown. - Looks like an alias for `username`.
|
||||
username (str): Your account username.
|
||||
uuid (str): Unknown.
|
||||
_token (str): Token used to access this client.
|
||||
_session (obj): Requests session object used to access this client.
|
||||
"""
|
||||
FRIENDINVITE = 'https://plex.tv/api/servers/{machineId}/shared_servers' # post with data
|
||||
FRIENDSERVERS = 'https://plex.tv/api/servers/{machineId}/shared_servers/{serverId}' # put with data
|
||||
PLEXSERVERS = 'https://plex.tv/api/servers/{machineId}' # get
|
||||
FRIENDUPDATE = 'https://plex.tv/api/friends/{userId}' # put with args, delete
|
||||
REMOVEINVITE = 'https://plex.tv/api/invites/requested/{userId}?friend=0&server=1&home=0' # delete
|
||||
REQUESTED = 'https://plex.tv/api/invites/requested' # get
|
||||
REQUESTS = 'https://plex.tv/api/invites/requests' # get
|
||||
SIGNIN = 'https://plex.tv/users/sign_in.xml' # get with auth
|
||||
WEBHOOKS = 'https://plex.tv/api/v2/user/webhooks' # get, post with data
|
||||
# Key may someday switch to the following url. For now the current value works.
|
||||
# https://plex.tv/api/v2/user?X-Plex-Token={token}&X-Plex-Client-Identifier={clientId}
|
||||
key = 'https://plex.tv/users/account'
|
||||
|
||||
def __init__(self, username=None, password=None, token=None, session=None, timeout=None):
|
||||
self._token = token
|
||||
self._session = session or requests.Session()
|
||||
data, initpath = self._signin(username, password, timeout)
|
||||
super(MyPlexAccount, self).__init__(self, data, initpath)
|
||||
|
||||
def _signin(self, username, password, timeout):
|
||||
if self._token:
|
||||
return self.query(self.key), self.key
|
||||
username = username or CONFIG.get('auth.myplex_username')
|
||||
password = password or CONFIG.get('auth.myplex_password')
|
||||
data = self.query(self.SIGNIN, method=self._session.post, auth=(username, password), timeout=timeout)
|
||||
return data, self.SIGNIN
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self._token = logfilter.add_secret(data.attrib.get('authenticationToken'))
|
||||
self._webhooks = []
|
||||
self.authenticationToken = self._token
|
||||
self.certificateVersion = data.attrib.get('certificateVersion')
|
||||
self.cloudSyncDevice = data.attrib.get('cloudSyncDevice')
|
||||
self.email = data.attrib.get('email')
|
||||
self.guest = utils.cast(bool, data.attrib.get('guest'))
|
||||
self.home = utils.cast(bool, data.attrib.get('home'))
|
||||
self.homeSize = utils.cast(int, data.attrib.get('homeSize'))
|
||||
self.id = data.attrib.get('id')
|
||||
self.locale = data.attrib.get('locale')
|
||||
self.mailing_list_status = data.attrib.get('mailing_list_status')
|
||||
self.maxHomeSize = utils.cast(int, data.attrib.get('maxHomeSize'))
|
||||
self.queueEmail = data.attrib.get('queueEmail')
|
||||
self.queueUid = data.attrib.get('queueUid')
|
||||
self.restricted = utils.cast(bool, data.attrib.get('restricted'))
|
||||
self.scrobbleTypes = data.attrib.get('scrobbleTypes')
|
||||
self.secure = utils.cast(bool, data.attrib.get('secure'))
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
self.title = data.attrib.get('title')
|
||||
self.username = data.attrib.get('username')
|
||||
self.uuid = data.attrib.get('uuid')
|
||||
# TODO: Fetch missing MyPlexAccount attributes
|
||||
self.subscriptionActive = None # renamed on server
|
||||
self.subscriptionStatus = None # renamed on server
|
||||
self.subscriptionPlan = None # renmaed on server
|
||||
self.subscriptionFeatures = None # renamed on server
|
||||
self.roles = None
|
||||
self.entitlements = None
|
||||
|
||||
def device(self, name):
|
||||
""" Returns the :class:`~plexapi.myplex.MyPlexDevice` that matches the name specified.
|
||||
|
||||
Parameters:
|
||||
name (str): Name to match against.
|
||||
"""
|
||||
for device in self.devices():
|
||||
if device.name.lower() == name.lower():
|
||||
return device
|
||||
raise NotFound('Unable to find device %s' % name)
|
||||
|
||||
def devices(self):
|
||||
""" Returns a list of all :class:`~plexapi.myplex.MyPlexDevice` objects connected to the server. """
|
||||
data = self.query(MyPlexDevice.key)
|
||||
return [MyPlexDevice(self, elem) for elem in data]
|
||||
|
||||
def _headers(self, **kwargs):
|
||||
""" Returns dict containing base headers for all requests to the server. """
|
||||
headers = BASE_HEADERS.copy()
|
||||
if self._token:
|
||||
headers['X-Plex-Token'] = self._token
|
||||
headers.update(kwargs)
|
||||
return headers
|
||||
|
||||
def query(self, url, method=None, headers=None, timeout=None, **kwargs):
|
||||
method = method or self._session.get
|
||||
timeout = timeout or TIMEOUT
|
||||
log.debug('%s %s %s', method.__name__.upper(), url, kwargs.get('json', ''))
|
||||
headers = self._headers(**headers or {})
|
||||
response = method(url, headers=headers, timeout=timeout, **kwargs)
|
||||
if response.status_code not in (200, 201, 204): # pragma: no cover
|
||||
codename = codes.get(response.status_code)[0]
|
||||
errtext = response.text.replace('\n', ' ')
|
||||
log.warning('BadRequest (%s) %s %s; %s' % (response.status_code, codename, response.url, errtext))
|
||||
raise BadRequest('(%s) %s %s; %s' % (response.status_code, codename, response.url, errtext))
|
||||
data = response.text.encode('utf8')
|
||||
return ElementTree.fromstring(data) if data.strip() else None
|
||||
|
||||
def resource(self, name):
|
||||
""" Returns the :class:`~plexapi.myplex.MyPlexResource` that matches the name specified.
|
||||
|
||||
Parameters:
|
||||
name (str): Name to match against.
|
||||
"""
|
||||
for resource in self.resources():
|
||||
if resource.name.lower() == name.lower():
|
||||
return resource
|
||||
raise NotFound('Unable to find resource %s' % name)
|
||||
|
||||
def resources(self):
|
||||
""" Returns a list of all :class:`~plexapi.myplex.MyPlexResource` objects connected to the server. """
|
||||
data = self.query(MyPlexResource.key)
|
||||
return [MyPlexResource(self, elem) for elem in data]
|
||||
|
||||
def inviteFriend(self, user, server, sections=None, allowSync=False, allowCameraUpload=False,
|
||||
allowChannels=False, filterMovies=None, filterTelevision=None, filterMusic=None):
|
||||
""" Share library content with the specified user.
|
||||
|
||||
Parameters:
|
||||
user (str): MyPlexUser, username, email of the user to be added.
|
||||
server (PlexServer): PlexServer object or machineIdentifier containing the library sections to share.
|
||||
sections ([Section]): Library sections, names or ids to be shared (default None shares all sections).
|
||||
allowSync (Bool): Set True to allow user to sync content.
|
||||
allowCameraUpload (Bool): Set True to allow user to upload photos.
|
||||
allowChannels (Bool): Set True to allow user to utilize installed channels.
|
||||
filterMovies (Dict): Dict containing key 'contentRating' and/or 'label' each set to a list of
|
||||
values to be filtered. ex: {'contentRating':['G'], 'label':['foo']}
|
||||
filterTelevision (Dict): Dict containing key 'contentRating' and/or 'label' each set to a list of
|
||||
values to be filtered. ex: {'contentRating':['G'], 'label':['foo']}
|
||||
filterMusic (Dict): Dict containing key 'label' set to a list of values to be filtered.
|
||||
ex: {'label':['foo']}
|
||||
"""
|
||||
username = user.username if isinstance(user, MyPlexUser) else user
|
||||
machineId = server.machineIdentifier if isinstance(server, PlexServer) else server
|
||||
sectionIds = self._getSectionIds(machineId, sections)
|
||||
params = {
|
||||
'server_id': machineId,
|
||||
'shared_server': {'library_section_ids': sectionIds, 'invited_email': username},
|
||||
'sharing_settings': {
|
||||
'allowSync': ('1' if allowSync else '0'),
|
||||
'allowCameraUpload': ('1' if allowCameraUpload else '0'),
|
||||
'allowChannels': ('1' if allowChannels else '0'),
|
||||
'filterMovies': self._filterDictToStr(filterMovies or {}),
|
||||
'filterTelevision': self._filterDictToStr(filterTelevision or {}),
|
||||
'filterMusic': self._filterDictToStr(filterMusic or {}),
|
||||
},
|
||||
}
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
url = self.FRIENDINVITE.format(machineId=machineId)
|
||||
return self.query(url, self._session.post, json=params, headers=headers)
|
||||
|
||||
def removeFriend(self, user):
|
||||
""" Remove the specified user from all sharing.
|
||||
|
||||
Parameters:
|
||||
user (str): MyPlexUser, username, email of the user to be added.
|
||||
"""
|
||||
user = self.user(user)
|
||||
url = self.FRIENDUPDATE if user.friend else self.REMOVEINVITE
|
||||
url = url.format(userId=user.id)
|
||||
return self.query(url, self._session.delete)
|
||||
|
||||
def updateFriend(self, user, server, sections=None, removeSections=False, allowSync=None, allowCameraUpload=None,
|
||||
allowChannels=None, filterMovies=None, filterTelevision=None, filterMusic=None):
|
||||
""" Update the specified user's share settings.
|
||||
|
||||
Parameters:
|
||||
user (str): MyPlexUser, username, email of the user to be added.
|
||||
server (PlexServer): PlexServer object or machineIdentifier containing the library sections to share.
|
||||
sections: ([Section]): Library sections, names or ids to be shared (default None shares all sections).
|
||||
removeSections (Bool): Set True to remove all shares. Supersedes sections.
|
||||
allowSync (Bool): Set True to allow user to sync content.
|
||||
allowCameraUpload (Bool): Set True to allow user to upload photos.
|
||||
allowChannels (Bool): Set True to allow user to utilize installed channels.
|
||||
filterMovies (Dict): Dict containing key 'contentRating' and/or 'label' each set to a list of
|
||||
values to be filtered. ex: {'contentRating':['G'], 'label':['foo']}
|
||||
filterTelevision (Dict): Dict containing key 'contentRating' and/or 'label' each set to a list of
|
||||
values to be filtered. ex: {'contentRating':['G'], 'label':['foo']}
|
||||
filterMusic (Dict): Dict containing key 'label' set to a list of values to be filtered.
|
||||
ex: {'label':['foo']}
|
||||
"""
|
||||
# Update friend servers
|
||||
response_filters = ''
|
||||
response_servers = ''
|
||||
user = self.user(user.username if isinstance(user, MyPlexUser) else user)
|
||||
machineId = server.machineIdentifier if isinstance(server, PlexServer) else server
|
||||
sectionIds = self._getSectionIds(machineId, sections)
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
# Determine whether user has access to the shared server.
|
||||
user_servers = [s for s in user.servers if s.machineIdentifier == machineId]
|
||||
if user_servers and sectionIds:
|
||||
serverId = user_servers[0].id
|
||||
params = {'server_id': machineId, 'shared_server': {'library_section_ids': sectionIds}}
|
||||
url = self.FRIENDSERVERS.format(machineId=machineId, serverId=serverId)
|
||||
else:
|
||||
params = {'server_id': machineId, 'shared_server': {'library_section_ids': sectionIds,
|
||||
"invited_id": user.id}}
|
||||
url = self.FRIENDINVITE.format(machineId=machineId)
|
||||
# Remove share sections, add shares to user without shares, or update shares
|
||||
if sectionIds:
|
||||
if removeSections is True:
|
||||
response_servers = self.query(url, self._session.delete, json=params, headers=headers)
|
||||
elif 'invited_id' in params.get('shared_server', ''):
|
||||
response_servers = self.query(url, self._session.post, json=params, headers=headers)
|
||||
else:
|
||||
response_servers = self.query(url, self._session.put, json=params, headers=headers)
|
||||
else:
|
||||
log.warning('Section name, number of section object is required changing library sections')
|
||||
# Update friend filters
|
||||
url = self.FRIENDUPDATE.format(userId=user.id)
|
||||
params = {}
|
||||
if isinstance(allowSync, bool):
|
||||
params['allowSync'] = '1' if allowSync else '0'
|
||||
if isinstance(allowCameraUpload, bool):
|
||||
params['allowCameraUpload'] = '1' if allowCameraUpload else '0'
|
||||
if isinstance(allowChannels, bool):
|
||||
params['allowChannels'] = '1' if allowChannels else '0'
|
||||
if isinstance(filterMovies, dict):
|
||||
params['filterMovies'] = self._filterDictToStr(filterMovies or {}) # '1' if allowChannels else '0'
|
||||
if isinstance(filterTelevision, dict):
|
||||
params['filterTelevision'] = self._filterDictToStr(filterTelevision or {})
|
||||
if isinstance(allowChannels, dict):
|
||||
params['filterMusic'] = self._filterDictToStr(filterMusic or {})
|
||||
if params:
|
||||
url += joinArgs(params)
|
||||
response_filters = self.query(url, self._session.put)
|
||||
return response_servers, response_filters
|
||||
|
||||
def user(self, username):
|
||||
""" Returns the :class:`~myplex.MyPlexUser` that matches the email or username specified.
|
||||
|
||||
Parameters:
|
||||
username (str): Username, email or id of the user to return.
|
||||
"""
|
||||
for user in self.users():
|
||||
# Home users don't have email, username etc.
|
||||
if username.lower() == user.title.lower():
|
||||
return user
|
||||
|
||||
elif (user.username and user.email and user.id and username.lower() in
|
||||
(user.username.lower(), user.email.lower(), str(user.id))):
|
||||
return user
|
||||
|
||||
raise NotFound('Unable to find user %s' % username)
|
||||
|
||||
def users(self):
|
||||
""" Returns a list of all :class:`~plexapi.myplex.MyPlexUser` objects connected to your account.
|
||||
This includes both friends and pending invites. You can reference the user.friend to
|
||||
distinguish between the two.
|
||||
"""
|
||||
friends = [MyPlexUser(self, elem) for elem in self.query(MyPlexUser.key)]
|
||||
requested = [MyPlexUser(self, elem, self.REQUESTED) for elem in self.query(self.REQUESTED)]
|
||||
return friends + requested
|
||||
|
||||
def _getSectionIds(self, server, sections):
|
||||
""" Converts a list of section objects or names to sectionIds needed for library sharing. """
|
||||
if not sections: return []
|
||||
# Get a list of all section ids for looking up each section.
|
||||
allSectionIds = {}
|
||||
machineIdentifier = server.machineIdentifier if isinstance(server, PlexServer) else server
|
||||
url = self.PLEXSERVERS.replace('{machineId}', machineIdentifier)
|
||||
data = self.query(url, self._session.get)
|
||||
for elem in data[0]:
|
||||
allSectionIds[elem.attrib.get('id', '').lower()] = elem.attrib.get('id')
|
||||
allSectionIds[elem.attrib.get('title', '').lower()] = elem.attrib.get('id')
|
||||
allSectionIds[elem.attrib.get('key', '').lower()] = elem.attrib.get('id')
|
||||
log.debug(allSectionIds)
|
||||
# Convert passed in section items to section ids from above lookup
|
||||
sectionIds = []
|
||||
for section in sections:
|
||||
sectionKey = section.key if isinstance(section, LibrarySection) else section
|
||||
sectionIds.append(allSectionIds[sectionKey.lower()])
|
||||
return sectionIds
|
||||
|
||||
def _filterDictToStr(self, filterDict):
|
||||
""" Converts friend filters to a string representation for transport. """
|
||||
values = []
|
||||
for key, vals in filterDict.items():
|
||||
if key not in ('contentRating', 'label'):
|
||||
raise BadRequest('Unknown filter key: %s', key)
|
||||
values.append('%s=%s' % (key, '%2C'.join(vals)))
|
||||
return '|'.join(values)
|
||||
|
||||
def addWebhook(self, url):
|
||||
# copy _webhooks and append url
|
||||
urls = self._webhooks[:] + [url]
|
||||
return self.setWebhooks(urls)
|
||||
|
||||
def deleteWebhook(self, url):
|
||||
urls = copy.copy(self._webhooks)
|
||||
if url not in urls:
|
||||
raise BadRequest('Webhook does not exist: %s' % url)
|
||||
urls.remove(url)
|
||||
return self.setWebhooks(urls)
|
||||
|
||||
def setWebhooks(self, urls):
|
||||
log.info('Setting webhooks: %s' % urls)
|
||||
data = self.query(self.WEBHOOKS, self._session.post, data={'urls[]': urls})
|
||||
self._webhooks = self.listAttrs(data, 'url', etag='webhook')
|
||||
return self._webhooks
|
||||
|
||||
def webhooks(self):
|
||||
data = self.query(self.WEBHOOKS)
|
||||
self._webhooks = self.listAttrs(data, 'url', etag='webhook')
|
||||
return self._webhooks
|
||||
|
||||
def optOut(self, playback=None, library=None):
|
||||
""" Opt in or out of sharing stuff with plex.
|
||||
See: https://www.plex.tv/about/privacy-legal/
|
||||
"""
|
||||
params = {}
|
||||
if playback is not None:
|
||||
params['optOutPlayback'] = int(playback)
|
||||
if library is not None:
|
||||
params['optOutLibraryStats'] = int(library)
|
||||
url = 'https://plex.tv/api/v2/user/privacy'
|
||||
return self.query(url, method=self._session.put, params=params)
|
||||
|
||||
|
||||
class MyPlexUser(PlexObject):
|
||||
""" This object represents non-signed in users such as friends and linked
|
||||
accounts. NOTE: This should not be confused with the :class:`~myplex.MyPlexAccount`
|
||||
which is your specific account. The raw xml for the data presented here
|
||||
can be found at: https://plex.tv/api/users/
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'User'
|
||||
key (str): 'https://plex.tv/api/users/'
|
||||
allowCameraUpload (bool): True if this user can upload images.
|
||||
allowChannels (bool): True if this user has access to channels.
|
||||
allowSync (bool): True if this user can sync.
|
||||
email (str): User's email address (user@gmail.com).
|
||||
filterAll (str): Unknown.
|
||||
filterMovies (str): Unknown.
|
||||
filterMusic (str): Unknown.
|
||||
filterPhotos (str): Unknown.
|
||||
filterTelevision (str): Unknown.
|
||||
home (bool): Unknown.
|
||||
id (int): User's Plex account ID.
|
||||
protected (False): Unknown (possibly SSL enabled?).
|
||||
recommendationsPlaylistId (str): Unknown.
|
||||
restricted (str): Unknown.
|
||||
thumb (str): Link to the users avatar.
|
||||
title (str): Seems to be an aliad for username.
|
||||
username (str): User's username.
|
||||
"""
|
||||
TAG = 'User'
|
||||
key = 'https://plex.tv/api/users/'
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.friend = self._initpath == self.key
|
||||
self.allowCameraUpload = utils.cast(bool, data.attrib.get('allowCameraUpload'))
|
||||
self.allowChannels = utils.cast(bool, data.attrib.get('allowChannels'))
|
||||
self.allowSync = utils.cast(bool, data.attrib.get('allowSync'))
|
||||
self.email = data.attrib.get('email')
|
||||
self.filterAll = data.attrib.get('filterAll')
|
||||
self.filterMovies = data.attrib.get('filterMovies')
|
||||
self.filterMusic = data.attrib.get('filterMusic')
|
||||
self.filterPhotos = data.attrib.get('filterPhotos')
|
||||
self.filterTelevision = data.attrib.get('filterTelevision')
|
||||
self.home = utils.cast(bool, data.attrib.get('home'))
|
||||
self.id = utils.cast(int, data.attrib.get('id'))
|
||||
self.protected = utils.cast(bool, data.attrib.get('protected'))
|
||||
self.recommendationsPlaylistId = data.attrib.get('recommendationsPlaylistId')
|
||||
self.restricted = data.attrib.get('restricted')
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
self.title = data.attrib.get('title', '')
|
||||
self.username = data.attrib.get('username', '')
|
||||
self.servers = self.findItems(data, MyPlexServerShare)
|
||||
|
||||
def get_token(self, machineIdentifier):
|
||||
try:
|
||||
for item in self._server.query(self._server.FRIENDINVITE.format(machineId=machineIdentifier)):
|
||||
if utils.cast(int, item.attrib.get('userID')) == self.id:
|
||||
return item.attrib.get('accessToken')
|
||||
except Exception:
|
||||
log.exception('Failed to get access token for %s' % self.title)
|
||||
|
||||
|
||||
class Section(PlexObject):
|
||||
""" This refers to a shared section. The raw xml for the data presented here
|
||||
can be found at: https://plex.tv/api/servers/{machineId}/shared_servers/{serverId}
|
||||
|
||||
Attributes:
|
||||
TAG (str): section
|
||||
id (int): shared section id
|
||||
sectionKey (str): what key we use for this section
|
||||
title (str): Title of the section
|
||||
sectionId (str): shared section id
|
||||
type (str): movie, tvshow, artist
|
||||
shared (bool): If this section is shared with the user
|
||||
|
||||
"""
|
||||
TAG = 'Section'
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
# self.id = utils.cast(int, data.attrib.get('id')) # Havnt decided if this should be changed.
|
||||
self.sectionKey = data.attrib.get('key')
|
||||
self.title = data.attrib.get('title')
|
||||
self.sectionId = data.attrib.get('id')
|
||||
self.type = data.attrib.get('type')
|
||||
self.shared = utils.cast(bool, data.attrib.get('shared'))
|
||||
|
||||
|
||||
class MyPlexServerShare(PlexObject):
|
||||
""" Represents a single user's server reference. Used for library sharing.
|
||||
|
||||
Attributes:
|
||||
id (int): id for this share
|
||||
serverId (str): what id plex uses for this.
|
||||
machineIdentifier (str): The servers machineIdentifier
|
||||
name (str): The servers name
|
||||
lastSeenAt (datetime): Last connected to the server?
|
||||
numLibraries (int): Total number of libraries
|
||||
allLibraries (bool): True if all libraries is shared with this user.
|
||||
owned (bool): 1 if the server is owned by the user
|
||||
pending (bool): True if the invite is pending.
|
||||
|
||||
"""
|
||||
TAG = 'Server'
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.id = utils.cast(int, data.attrib.get('id'))
|
||||
self.serverId = utils.cast(int, data.attrib.get('serverId'))
|
||||
self.machineIdentifier = data.attrib.get('machineIdentifier')
|
||||
self.name = data.attrib.get('name')
|
||||
self.lastSeenAt = utils.toDatetime(data.attrib.get('lastSeenAt'))
|
||||
self.numLibraries = utils.cast(int, data.attrib.get('numLibraries'))
|
||||
self.allLibraries = utils.cast(bool, data.attrib.get('allLibraries'))
|
||||
self.owned = utils.cast(bool, data.attrib.get('owned'))
|
||||
self.pending = utils.cast(bool, data.attrib.get('pending'))
|
||||
|
||||
def sections(self):
|
||||
url = MyPlexAccount.FRIENDSERVERS.format(machineId=self.machineIdentifier, serverId=self.id)
|
||||
data = self._server.query(url)
|
||||
sections = []
|
||||
|
||||
for section in data.iter('Section'):
|
||||
if ElementTree.iselement(section):
|
||||
sections.append(Section(self, section, url))
|
||||
|
||||
return sections
|
||||
|
||||
|
||||
class MyPlexResource(PlexObject):
|
||||
""" This object represents resources connected to your Plex server that can provide
|
||||
content such as Plex Media Servers, iPhone or Android clients, etc. The raw xml
|
||||
for the data presented here can be found at:
|
||||
https://plex.tv/api/resources?includeHttps=1&includeRelay=1
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Device'
|
||||
key (str): 'https://plex.tv/api/resources?includeHttps=1&includeRelay=1'
|
||||
accessToken (str): This resources accesstoken.
|
||||
clientIdentifier (str): Unique ID for this resource.
|
||||
connections (list): List of :class:`~myplex.ResourceConnection` objects
|
||||
for this resource.
|
||||
createdAt (datetime): Timestamp this resource first connected to your server.
|
||||
device (str): Best guess on the type of device this is (PS, iPhone, Linux, etc).
|
||||
home (bool): Unknown
|
||||
lastSeenAt (datetime): Timestamp this resource last connected.
|
||||
name (str): Descriptive name of this resource.
|
||||
owned (bool): True if this resource is one of your own (you logged into it).
|
||||
platform (str): OS the resource is running (Linux, Windows, Chrome, etc.)
|
||||
platformVersion (str): Version of the platform.
|
||||
presence (bool): True if the resource is online
|
||||
product (str): Plex product (Plex Media Server, Plex for iOS, Plex Web, etc.)
|
||||
productVersion (str): Version of the product.
|
||||
provides (str): List of services this resource provides (client, server,
|
||||
player, pubsub-player, etc.)
|
||||
synced (bool): Unknown (possibly True if the resource has synced content?)
|
||||
"""
|
||||
TAG = 'Device'
|
||||
key = 'https://plex.tv/api/resources?includeHttps=1&includeRelay=1'
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.name = data.attrib.get('name')
|
||||
self.accessToken = logfilter.add_secret(data.attrib.get('accessToken'))
|
||||
self.product = data.attrib.get('product')
|
||||
self.productVersion = data.attrib.get('productVersion')
|
||||
self.platform = data.attrib.get('platform')
|
||||
self.platformVersion = data.attrib.get('platformVersion')
|
||||
self.device = data.attrib.get('device')
|
||||
self.clientIdentifier = data.attrib.get('clientIdentifier')
|
||||
self.createdAt = utils.toDatetime(data.attrib.get('createdAt'))
|
||||
self.lastSeenAt = utils.toDatetime(data.attrib.get('lastSeenAt'))
|
||||
self.provides = data.attrib.get('provides')
|
||||
self.owned = utils.cast(bool, data.attrib.get('owned'))
|
||||
self.home = utils.cast(bool, data.attrib.get('home'))
|
||||
self.synced = utils.cast(bool, data.attrib.get('synced'))
|
||||
self.presence = utils.cast(bool, data.attrib.get('presence'))
|
||||
self.connections = self.findItems(data, ResourceConnection)
|
||||
self.publicAddressMatches = utils.cast(bool, data.attrib.get('publicAddressMatches'))
|
||||
# This seems to only be available if its not your device (say are shared server)
|
||||
self.httpsRequired = utils.cast(bool, data.attrib.get('httpsRequired'))
|
||||
self.ownerid = utils.cast(int, data.attrib.get('ownerId', 0))
|
||||
self.sourceTitle = data.attrib.get('sourceTitle') # owners plex username.
|
||||
|
||||
def connect(self, ssl=None, timeout=None):
|
||||
""" Returns a new :class:`~server.PlexServer` or :class:`~client.PlexClient` object.
|
||||
Often times there is more than one address specified for a server or client.
|
||||
This function will prioritize local connections before remote and HTTPS before HTTP.
|
||||
After trying to connect to all available addresses for this resource and
|
||||
assuming at least one connection was successful, the PlexServer object is built and returned.
|
||||
|
||||
Parameters:
|
||||
ssl (optional): Set True to only connect to HTTPS connections. Set False to
|
||||
only connect to HTTP connections. Set None (default) to connect to any
|
||||
HTTP or HTTPS connection.
|
||||
|
||||
Raises:
|
||||
:class:`~plexapi.exceptions.NotFound`: When unable to connect to any addresses for this resource.
|
||||
"""
|
||||
# Sort connections from (https, local) to (http, remote)
|
||||
# Only check non-local connections unless we own the resource
|
||||
connections = sorted(self.connections, key=lambda c: c.local, reverse=True)
|
||||
owned_or_unowned_non_local = lambda x: self.owned or (not self.owned and not x.local)
|
||||
https = [c.uri for c in connections if owned_or_unowned_non_local(c)]
|
||||
http = [c.httpuri for c in connections if owned_or_unowned_non_local(c)]
|
||||
cls = PlexServer if 'server' in self.provides else PlexClient
|
||||
# Force ssl, no ssl, or any (default)
|
||||
if ssl is True: connections = https
|
||||
elif ssl is False: connections = http
|
||||
else: connections = https + http
|
||||
# Try connecting to all known resource connections in parellel, but
|
||||
# only return the first server (in order) that provides a response.
|
||||
listargs = [[cls, url, self.accessToken, timeout] for url in connections]
|
||||
log.info('Testing %s resource connections..', len(listargs))
|
||||
results = utils.threaded(_connect, listargs)
|
||||
return _chooseConnection('Resource', self.name, results)
|
||||
|
||||
|
||||
class ResourceConnection(PlexObject):
|
||||
""" Represents a Resource Connection object found within the
|
||||
:class:`~myplex.MyPlexResource` objects.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Connection'
|
||||
address (str): Local IP address
|
||||
httpuri (str): Full local address
|
||||
local (bool): True if local
|
||||
port (int): 32400
|
||||
protocol (str): HTTP or HTTPS
|
||||
uri (str): External address
|
||||
"""
|
||||
TAG = 'Connection'
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.protocol = data.attrib.get('protocol')
|
||||
self.address = data.attrib.get('address')
|
||||
self.port = utils.cast(int, data.attrib.get('port'))
|
||||
self.uri = data.attrib.get('uri')
|
||||
self.local = utils.cast(bool, data.attrib.get('local'))
|
||||
self.httpuri = 'http://%s:%s' % (self.address, self.port)
|
||||
self.relay = utils.cast(bool, data.attrib.get('relay'))
|
||||
|
||||
|
||||
class MyPlexDevice(PlexObject):
|
||||
""" This object represents resources connected to your Plex server that provide
|
||||
playback ability from your Plex Server, iPhone or Android clients, Plex Web,
|
||||
this API, etc. The raw xml for the data presented here can be found at:
|
||||
https://plex.tv/devices.xml
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Device'
|
||||
key (str): 'https://plex.tv/devices.xml'
|
||||
clientIdentifier (str): Unique ID for this resource.
|
||||
connections (list): List of connection URIs for the device.
|
||||
device (str): Best guess on the type of device this is (Linux, iPad, AFTB, etc).
|
||||
id (str): MyPlex ID of the device.
|
||||
model (str): Model of the device (bueller, Linux, x86_64, etc.)
|
||||
name (str): Hostname of the device.
|
||||
platform (str): OS the resource is running (Linux, Windows, Chrome, etc.)
|
||||
platformVersion (str): Version of the platform.
|
||||
product (str): Plex product (Plex Media Server, Plex for iOS, Plex Web, etc.)
|
||||
productVersion (string): Version of the product.
|
||||
provides (str): List of services this resource provides (client, controller,
|
||||
sync-target, player, pubsub-player).
|
||||
publicAddress (str): Public IP address.
|
||||
screenDensity (str): Unknown
|
||||
screenResolution (str): Screen resolution (750x1334, 1242x2208, etc.)
|
||||
token (str): Plex authentication token for the device.
|
||||
vendor (str): Device vendor (ubuntu, etc).
|
||||
version (str): Unknown (1, 2, 1.3.3.3148-b38628e, 1.3.15, etc.)
|
||||
"""
|
||||
TAG = 'Device'
|
||||
key = 'https://plex.tv/devices.xml'
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.name = data.attrib.get('name')
|
||||
self.publicAddress = data.attrib.get('publicAddress')
|
||||
self.product = data.attrib.get('product')
|
||||
self.productVersion = data.attrib.get('productVersion')
|
||||
self.platform = data.attrib.get('platform')
|
||||
self.platformVersion = data.attrib.get('platformVersion')
|
||||
self.device = data.attrib.get('device')
|
||||
self.model = data.attrib.get('model')
|
||||
self.vendor = data.attrib.get('vendor')
|
||||
self.provides = data.attrib.get('provides')
|
||||
self.clientIdentifier = data.attrib.get('clientIdentifier')
|
||||
self.version = data.attrib.get('version')
|
||||
self.id = data.attrib.get('id')
|
||||
self.token = logfilter.add_secret(data.attrib.get('token'))
|
||||
self.screenResolution = data.attrib.get('screenResolution')
|
||||
self.screenDensity = data.attrib.get('screenDensity')
|
||||
self.createdAt = utils.toDatetime(data.attrib.get('createdAt'))
|
||||
self.lastSeenAt = utils.toDatetime(data.attrib.get('lastSeenAt'))
|
||||
self.connections = [connection.attrib.get('uri') for connection in data.iter('Connection')]
|
||||
|
||||
def connect(self, timeout=None):
|
||||
""" Returns a new :class:`~plexapi.client.PlexClient` or :class:`~plexapi.server.PlexServer`
|
||||
Sometimes there is more than one address specified for a server or client.
|
||||
After trying to connect to all available addresses for this client and assuming
|
||||
at least one connection was successful, the PlexClient object is built and returned.
|
||||
|
||||
Raises:
|
||||
:class:`~plexapi.exceptions.NotFound`: When unable to connect to any addresses for this device.
|
||||
"""
|
||||
cls = PlexServer if 'server' in self.provides else PlexClient
|
||||
listargs = [[cls, url, self.token, timeout] for url in self.connections]
|
||||
log.info('Testing %s device connections..', len(listargs))
|
||||
results = utils.threaded(_connect, listargs)
|
||||
return _chooseConnection('Device', self.name, results)
|
||||
|
||||
def delete(self):
|
||||
""" Remove this device from your account. """
|
||||
key = 'https://plex.tv/devices/%s.xml' % self.id
|
||||
self._server.query(key, self._server._session.delete)
|
||||
|
||||
|
||||
def _connect(cls, url, token, timeout, results, i):
|
||||
""" Connects to the specified cls with url and token. Stores the connection
|
||||
information to results[i] in a threadsafe way.
|
||||
"""
|
||||
starttime = time.time()
|
||||
try:
|
||||
device = cls(baseurl=url, token=token, timeout=timeout)
|
||||
runtime = int(time.time() - starttime)
|
||||
results[i] = (url, token, device, runtime)
|
||||
except Exception as err:
|
||||
runtime = int(time.time() - starttime)
|
||||
log.error('%s: %s', url, err)
|
||||
results[i] = (url, token, None, runtime)
|
||||
|
||||
|
||||
def _chooseConnection(ctype, name, results):
|
||||
""" Chooses the first (best) connection from the given _connect results. """
|
||||
# At this point we have a list of result tuples containing (url, token, PlexServer, runtime)
|
||||
# or (url, token, None, runtime) in the case a connection could not be established.
|
||||
for url, token, result, runtime in results:
|
||||
okerr = 'OK' if result else 'ERR'
|
||||
log.info('%s connection %s (%ss): %s?X-Plex-Token=%s', ctype, okerr, runtime, url, token)
|
||||
results = [r[2] for r in results if r and r[2] is not None]
|
||||
if results:
|
||||
log.info('Connecting to %s: %s?X-Plex-Token=%s', ctype, results[0]._baseurl, results[0]._token)
|
||||
return results[0]
|
||||
raise NotFound('Unable to connect to %s: %s' % (ctype.lower(), name))
|
||||
125
lib/plexapi/photo.py
Normal file
125
lib/plexapi/photo.py
Normal file
@@ -0,0 +1,125 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from plexapi import media, utils
|
||||
from plexapi.base import PlexPartialObject
|
||||
from plexapi.exceptions import NotFound
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Photoalbum(PlexPartialObject):
|
||||
""" Represents a photoalbum (collection of photos).
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Directory'
|
||||
TYPE (str): 'photo'
|
||||
addedAt (datetime): Datetime this item was added to the library.
|
||||
art (str): Photo art (/library/metadata/<ratingkey>/art/<artid>)
|
||||
composite (str): Unknown
|
||||
guid (str): Unknown (unique ID)
|
||||
index (sting): Index number of this album.
|
||||
key (str): API URL (/library/metadata/<ratingkey>).
|
||||
librarySectionID (int): :class:`~plexapi.library.LibrarySection` ID.
|
||||
listType (str): Hardcoded as 'photo' (useful for search filters).
|
||||
ratingKey (int): Unique key identifying this item.
|
||||
summary (str): Summary of the photoalbum.
|
||||
thumb (str): URL to thumbnail image.
|
||||
title (str): Photoalbum title. (Trip to Disney World)
|
||||
type (str): Unknown
|
||||
updatedAt (datatime): Datetime this item was updated.
|
||||
"""
|
||||
TAG = 'Directory'
|
||||
TYPE = 'photo'
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self.listType = 'photo'
|
||||
self.addedAt = utils.toDatetime(data.attrib.get('addedAt'))
|
||||
self.art = data.attrib.get('art')
|
||||
self.composite = data.attrib.get('composite')
|
||||
self.guid = data.attrib.get('guid')
|
||||
self.index = utils.cast(int, data.attrib.get('index'))
|
||||
self.key = data.attrib.get('key')
|
||||
self.librarySectionID = data.attrib.get('librarySectionID')
|
||||
self.ratingKey = data.attrib.get('ratingKey')
|
||||
self.summary = data.attrib.get('summary')
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
self.title = data.attrib.get('title')
|
||||
self.type = data.attrib.get('type')
|
||||
self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt'))
|
||||
|
||||
def albums(self, **kwargs):
|
||||
""" Returns a list of :class:`~plexapi.photo.Photoalbum` objects in this album. """
|
||||
key = '/library/metadata/%s/children' % self.ratingKey
|
||||
return self.fetchItems(key, etag='Directory', **kwargs)
|
||||
|
||||
def album(self, title):
|
||||
""" Returns the :class:`~plexapi.photo.Photoalbum` that matches the specified title. """
|
||||
for album in self.albums():
|
||||
if album.title.lower() == title.lower():
|
||||
return album
|
||||
raise NotFound('Unable to find album: %s' % title)
|
||||
|
||||
def photos(self, **kwargs):
|
||||
""" Returns a list of :class:`~plexapi.photo.Photo` objects in this album. """
|
||||
key = '/library/metadata/%s/children' % self.ratingKey
|
||||
return self.fetchItems(key, etag='Photo', **kwargs)
|
||||
|
||||
def photo(self, title):
|
||||
""" Returns the :class:`~plexapi.photo.Photo` that matches the specified title. """
|
||||
for photo in self.photos():
|
||||
if photo.title.lower() == title.lower():
|
||||
return photo
|
||||
raise NotFound('Unable to find photo: %s' % title)
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Photo(PlexPartialObject):
|
||||
""" Represents a single photo.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Photo'
|
||||
TYPE (str): 'photo'
|
||||
addedAt (datetime): Datetime this item was added to the library.
|
||||
index (sting): Index number of this photo.
|
||||
key (str): API URL (/library/metadata/<ratingkey>).
|
||||
listType (str): Hardcoded as 'photo' (useful for search filters).
|
||||
media (TYPE): Unknown
|
||||
originallyAvailableAt (datetime): Datetime this photo was added to Plex.
|
||||
parentKey (str): Photoalbum API URL.
|
||||
parentRatingKey (int): Unique key identifying the photoalbum.
|
||||
ratingKey (int): Unique key identifying this item.
|
||||
summary (str): Summary of the photo.
|
||||
thumb (str): URL to thumbnail image.
|
||||
title (str): Photo title.
|
||||
type (str): Unknown
|
||||
updatedAt (datatime): Datetime this item was updated.
|
||||
year (int): Year this photo was taken.
|
||||
"""
|
||||
TAG = 'Photo'
|
||||
TYPE = 'photo'
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self.listType = 'photo'
|
||||
self.addedAt = utils.toDatetime(data.attrib.get('addedAt'))
|
||||
self.index = utils.cast(int, data.attrib.get('index'))
|
||||
self.key = data.attrib.get('key')
|
||||
self.originallyAvailableAt = utils.toDatetime(
|
||||
data.attrib.get('originallyAvailableAt'), '%Y-%m-%d')
|
||||
self.parentKey = data.attrib.get('parentKey')
|
||||
self.parentRatingKey = data.attrib.get('parentRatingKey')
|
||||
self.ratingKey = data.attrib.get('ratingKey')
|
||||
self.summary = data.attrib.get('summary')
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
self.title = data.attrib.get('title')
|
||||
self.type = data.attrib.get('type')
|
||||
self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt'))
|
||||
self.year = utils.cast(int, data.attrib.get('year'))
|
||||
self.media = self.findItems(data, media.Media)
|
||||
|
||||
def photoalbum(self):
|
||||
""" Return this photo's :class:`~plexapi.photo.Photoalbum`. """
|
||||
return self.fetchItem(self.parentKey)
|
||||
|
||||
def section(self):
|
||||
""" Returns the :class:`~plexapi.library.LibrarySection` this item belongs to. """
|
||||
return self._server.library.sectionByID(self.photoalbum().librarySectionID)
|
||||
134
lib/plexapi/playlist.py
Normal file
134
lib/plexapi/playlist.py
Normal file
@@ -0,0 +1,134 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from plexapi import utils
|
||||
from plexapi.base import PlexPartialObject, Playable
|
||||
from plexapi.exceptions import BadRequest
|
||||
from plexapi.playqueue import PlayQueue
|
||||
from plexapi.utils import cast, toDatetime
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Playlist(PlexPartialObject, Playable):
|
||||
""" Represents a single Playlist object.
|
||||
# TODO: Document attributes
|
||||
"""
|
||||
TAG = 'Playlist'
|
||||
TYPE = 'playlist'
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
Playable._loadData(self, data)
|
||||
self.addedAt = toDatetime(data.attrib.get('addedAt'))
|
||||
self.composite = data.attrib.get('composite') # url to thumbnail
|
||||
self.duration = cast(int, data.attrib.get('duration'))
|
||||
self.durationInSeconds = cast(int, data.attrib.get('durationInSeconds'))
|
||||
self.guid = data.attrib.get('guid')
|
||||
self.key = data.attrib.get('key')
|
||||
self.key = self.key.replace('/items', '') if self.key else self.key # FIX_BUG_50
|
||||
self.leafCount = cast(int, data.attrib.get('leafCount'))
|
||||
self.playlistType = data.attrib.get('playlistType')
|
||||
self.ratingKey = cast(int, data.attrib.get('ratingKey'))
|
||||
self.smart = cast(bool, data.attrib.get('smart'))
|
||||
self.summary = data.attrib.get('summary')
|
||||
self.title = data.attrib.get('title')
|
||||
self.type = data.attrib.get('type')
|
||||
self.updatedAt = toDatetime(data.attrib.get('updatedAt'))
|
||||
self._items = None # cache for self.items
|
||||
|
||||
def __len__(self): # pragma: no cover
|
||||
return len(self.items())
|
||||
|
||||
def __contains__(self, other): # pragma: no cover
|
||||
return any(i.key == other.key for i in self.items())
|
||||
|
||||
def __getitem__(self, key): # pragma: no cover
|
||||
return self.items()[key]
|
||||
|
||||
def items(self):
|
||||
""" Returns a list of all items in the playlist. """
|
||||
if self._items is None:
|
||||
key = '%s/items' % self.key
|
||||
items = self.fetchItems(key)
|
||||
self._items = items
|
||||
return self._items
|
||||
|
||||
def addItems(self, items):
|
||||
""" Add items to a playlist. """
|
||||
if not isinstance(items, (list, tuple)):
|
||||
items = [items]
|
||||
ratingKeys = []
|
||||
for item in items:
|
||||
if item.listType != self.playlistType: # pragma: no cover
|
||||
raise BadRequest('Can not mix media types when building a playlist: %s and %s' %
|
||||
(self.playlistType, item.listType))
|
||||
ratingKeys.append(str(item.ratingKey))
|
||||
uuid = items[0].section().uuid
|
||||
ratingKeys = ','.join(ratingKeys)
|
||||
key = '%s/items%s' % (self.key, utils.joinArgs({
|
||||
'uri': 'library://%s/directory//library/metadata/%s' % (uuid, ratingKeys)
|
||||
}))
|
||||
result = self._server.query(key, method=self._server._session.put)
|
||||
self.reload()
|
||||
return result
|
||||
|
||||
def removeItem(self, item):
|
||||
""" Remove a file from a playlist. """
|
||||
key = '%s/items/%s' % (self.key, item.playlistItemID)
|
||||
result = self._server.query(key, method=self._server._session.delete)
|
||||
self.reload()
|
||||
return result
|
||||
|
||||
def moveItem(self, item, after=None):
|
||||
""" Move a to a new position in playlist. """
|
||||
key = '%s/items/%s/move' % (self.key, item.playlistItemID)
|
||||
if after:
|
||||
key += '?after=%s' % after.playlistItemID
|
||||
result = self._server.query(key, method=self._server._session.put)
|
||||
self.reload()
|
||||
return result
|
||||
|
||||
def edit(self, title=None, summary=None):
|
||||
""" Edit playlist. """
|
||||
key = '/library/metadata/%s%s' % (self.ratingKey, utils.joinArgs({'title': title, 'summary': summary}))
|
||||
result = self._server.query(key, method=self._server._session.put)
|
||||
self.reload()
|
||||
return result
|
||||
|
||||
def delete(self):
|
||||
""" Delete playlist. """
|
||||
return self._server.query(self.key, method=self._server._session.delete)
|
||||
|
||||
def playQueue(self, *args, **kwargs):
|
||||
""" Create a playqueue from this playlist. """
|
||||
return PlayQueue.create(self._server, self, *args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def create(cls, server, title, items):
|
||||
""" Create a playlist. """
|
||||
if not isinstance(items, (list, tuple)):
|
||||
items = [items]
|
||||
ratingKeys = []
|
||||
for item in items:
|
||||
if item.listType != items[0].listType: # pragma: no cover
|
||||
raise BadRequest('Can not mix media types when building a playlist')
|
||||
ratingKeys.append(str(item.ratingKey))
|
||||
ratingKeys = ','.join(ratingKeys)
|
||||
uuid = items[0].section().uuid
|
||||
key = '/playlists%s' % utils.joinArgs({
|
||||
'uri': 'library://%s/directory//library/metadata/%s' % (uuid, ratingKeys),
|
||||
'type': items[0].listType,
|
||||
'title': title,
|
||||
'smart': 0
|
||||
})
|
||||
data = server.query(key, method=server._session.post)[0]
|
||||
return cls(server, data, initpath=key)
|
||||
|
||||
def copyToUser(self, user):
|
||||
""" Copy playlist to another user account. """
|
||||
from plexapi.server import PlexServer
|
||||
myplex = self._server.myPlexAccount()
|
||||
user = myplex.user(user)
|
||||
# Get the token for your machine.
|
||||
token = user.get_token(self._server.machineIdentifier)
|
||||
# Login to your server using your friends credentials.
|
||||
user_server = PlexServer(self._server._baseurl, token)
|
||||
return self.create(user_server, self.title, self.items())
|
||||
75
lib/plexapi/playqueue.py
Normal file
75
lib/plexapi/playqueue.py
Normal file
@@ -0,0 +1,75 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from plexapi import utils
|
||||
from plexapi.base import PlexObject
|
||||
|
||||
|
||||
class PlayQueue(PlexObject):
|
||||
""" Control a PlayQueue.
|
||||
|
||||
Attributes:
|
||||
key (str): This is only added to support playMedia
|
||||
identifier (str): com.plexapp.plugins.library
|
||||
initpath (str): Relative url where data was grabbed from.
|
||||
items (list): List of :class:`~plexapi.media.Media` or class:`~plexapi.playlist.Playlist`
|
||||
mediaTagPrefix (str): Fx /system/bundle/media/flags/
|
||||
mediaTagVersion (str): Fx 1485957738
|
||||
playQueueID (str): a id for the playqueue
|
||||
playQueueSelectedItemID (str): playQueueSelectedItemID
|
||||
playQueueSelectedItemOffset (str): playQueueSelectedItemOffset
|
||||
playQueueSelectedMetadataItemID (<type 'str'>): 7
|
||||
playQueueShuffled (bool): True if shuffled
|
||||
playQueueSourceURI (str): Fx library://150425c9-0d99-4242-821e-e5ab81cd2221/item//library/metadata/7
|
||||
playQueueTotalCount (str): How many items in the play queue.
|
||||
playQueueVersion (str): What version the playqueue is.
|
||||
server (:class:`~plexapi.server.PlexServer`): Server you are connected to.
|
||||
size (str): Seems to be a alias for playQueueTotalCount.
|
||||
"""
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.identifier = data.attrib.get('identifier')
|
||||
self.mediaTagPrefix = data.attrib.get('mediaTagPrefix')
|
||||
self.mediaTagVersion = data.attrib.get('mediaTagVersion')
|
||||
self.playQueueID = data.attrib.get('playQueueID')
|
||||
self.playQueueSelectedItemID = data.attrib.get('playQueueSelectedItemID')
|
||||
self.playQueueSelectedItemOffset = data.attrib.get('playQueueSelectedItemOffset')
|
||||
self.playQueueSelectedMetadataItemID = data.attrib.get('playQueueSelectedMetadataItemID')
|
||||
self.playQueueShuffled = utils.cast(bool, data.attrib.get('playQueueShuffled', 0))
|
||||
self.playQueueSourceURI = data.attrib.get('playQueueSourceURI')
|
||||
self.playQueueTotalCount = data.attrib.get('playQueueTotalCount')
|
||||
self.playQueueVersion = data.attrib.get('playQueueVersion')
|
||||
self.size = utils.cast(int, data.attrib.get('size', 0))
|
||||
self.items = self.findItems(data)
|
||||
|
||||
@classmethod
|
||||
def create(cls, server, item, shuffle=0, repeat=0, includeChapters=1, includeRelated=1):
|
||||
""" Create and returns a new :class:`~plexapi.playqueue.PlayQueue`.
|
||||
|
||||
Paramaters:
|
||||
server (:class:`~plexapi.server.PlexServer`): Server you are connected to.
|
||||
item (:class:`~plexapi.media.Media` or class:`~plexapi.playlist.Playlist`): A media or Playlist.
|
||||
shuffle (int, optional): Start the playqueue shuffled.
|
||||
repeat (int, optional): Start the playqueue shuffled.
|
||||
includeChapters (int, optional): include Chapters.
|
||||
includeRelated (int, optional): include Related.
|
||||
"""
|
||||
args = {}
|
||||
args['includeChapters'] = includeChapters
|
||||
args['includeRelated'] = includeRelated
|
||||
args['repeat'] = repeat
|
||||
args['shuffle'] = shuffle
|
||||
if item.type == 'playlist':
|
||||
args['playlistID'] = item.ratingKey
|
||||
args['type'] = item.playlistType
|
||||
else:
|
||||
uuid = item.section().uuid
|
||||
args['key'] = item.key
|
||||
args['type'] = item.listType
|
||||
args['uri'] = 'library://%s/item/%s' % (uuid, item.key)
|
||||
path = '/playQueues%s' % utils.joinArgs(args)
|
||||
data = server.query(path, method=server._session.post)
|
||||
c = cls(server, data, initpath=path)
|
||||
# we manually add a key so we can pass this to playMedia
|
||||
# since the data, does not contain a key.
|
||||
c.key = item.key
|
||||
return c
|
||||
471
lib/plexapi/server.py
Normal file
471
lib/plexapi/server.py
Normal file
@@ -0,0 +1,471 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import requests
|
||||
from requests.status_codes import _codes as codes
|
||||
from plexapi import BASE_HEADERS, CONFIG, TIMEOUT
|
||||
from plexapi import log, logfilter, utils
|
||||
from plexapi.alert import AlertListener
|
||||
from plexapi.base import PlexObject
|
||||
from plexapi.client import PlexClient
|
||||
from plexapi.compat import ElementTree, urlencode
|
||||
from plexapi.exceptions import BadRequest, NotFound
|
||||
from plexapi.library import Library, Hub
|
||||
from plexapi.settings import Settings
|
||||
from plexapi.playlist import Playlist
|
||||
from plexapi.playqueue import PlayQueue
|
||||
from plexapi.utils import cast
|
||||
|
||||
# Need these imports to populate utils.PLEXOBJECTS
|
||||
from plexapi import (audio as _audio, video as _video, # noqa: F401
|
||||
photo as _photo, media as _media, playlist as _playlist) # noqa: F401
|
||||
|
||||
|
||||
class PlexServer(PlexObject):
|
||||
""" This is the main entry point to interacting with a Plex server. It allows you to
|
||||
list connected clients, browse your library sections and perform actions such as
|
||||
emptying trash. If you do not know the auth token required to access your Plex
|
||||
server, or simply want to access your server with your username and password, you
|
||||
can also create an PlexServer instance from :class:`~plexapi.myplex.MyPlexAccount`.
|
||||
|
||||
Parameters:
|
||||
baseurl (str): Base url for to access the Plex Media Server (default: 'http://localhost:32400').
|
||||
token (str): Required Plex authentication token to access the server.
|
||||
session (requests.Session, optional): Use your own session object if you want to
|
||||
cache the http responses from PMS
|
||||
timeout (int): timeout in seconds on initial connect to server (default config.TIMEOUT).
|
||||
|
||||
Attributes:
|
||||
allowCameraUpload (bool): True if server allows camera upload.
|
||||
allowChannelAccess (bool): True if server allows channel access (iTunes?).
|
||||
allowMediaDeletion (bool): True is server allows media to be deleted.
|
||||
allowSharing (bool): True is server allows sharing.
|
||||
allowSync (bool): True is server allows sync.
|
||||
backgroundProcessing (bool): Unknown
|
||||
certificate (bool): True if server has an HTTPS certificate.
|
||||
companionProxy (bool): Unknown
|
||||
diagnostics (bool): Unknown
|
||||
eventStream (bool): Unknown
|
||||
friendlyName (str): Human friendly name for this server.
|
||||
hubSearch (bool): True if `Hub Search <https://www.plex.tv/blog
|
||||
/seek-plex-shall-find-leveling-web-app/>`_ is enabled. I believe this
|
||||
is enabled for everyone
|
||||
machineIdentifier (str): Unique ID for this server (looks like an md5).
|
||||
multiuser (bool): True if `multiusers <https://support.plex.tv/hc/en-us/articles
|
||||
/200250367-Multi-User-Support>`_ are enabled.
|
||||
myPlex (bool): Unknown (True if logged into myPlex?).
|
||||
myPlexMappingState (str): Unknown (ex: mapped).
|
||||
myPlexSigninState (str): Unknown (ex: ok).
|
||||
myPlexSubscription (bool): True if you have a myPlex subscription.
|
||||
myPlexUsername (str): Email address if signed into myPlex (user@example.com)
|
||||
ownerFeatures (list): List of features allowed by the server owner. This may be based
|
||||
on your PlexPass subscription. Features include: camera_upload, cloudsync,
|
||||
content_filter, dvr, hardware_transcoding, home, lyrics, music_videos, pass,
|
||||
photo_autotags, premium_music_metadata, session_bandwidth_restrictions, sync,
|
||||
trailers, webhooks (and maybe more).
|
||||
photoAutoTag (bool): True if photo `auto-tagging <https://support.plex.tv/hc/en-us
|
||||
/articles/234976627-Auto-Tagging-of-Photos>`_ is enabled.
|
||||
platform (str): Platform the server is hosted on (ex: Linux)
|
||||
platformVersion (str): Platform version (ex: '6.1 (Build 7601)', '4.4.0-59-generic').
|
||||
pluginHost (bool): Unknown
|
||||
readOnlyLibraries (bool): Unknown
|
||||
requestParametersInCookie (bool): Unknown
|
||||
streamingBrainVersion (bool): Current `Streaming Brain <https://www.plex.tv/blog
|
||||
/mcstreamy-brain-take-world-two-easy-steps/>`_ version.
|
||||
sync (bool): True if `syncing to a device <https://support.plex.tv/hc/en-us/articles
|
||||
/201053678-Sync-Media-to-a-Device>`_ is enabled.
|
||||
transcoderActiveVideoSessions (int): Number of active video transcoding sessions.
|
||||
transcoderAudio (bool): True if audio transcoding audio is available.
|
||||
transcoderLyrics (bool): True if audio transcoding lyrics is available.
|
||||
transcoderPhoto (bool): True if audio transcoding photos is available.
|
||||
transcoderSubtitles (bool): True if audio transcoding subtitles is available.
|
||||
transcoderVideo (bool): True if audio transcoding video is available.
|
||||
transcoderVideoBitrates (bool): List of video bitrates.
|
||||
transcoderVideoQualities (bool): List of video qualities.
|
||||
transcoderVideoResolutions (bool): List of video resolutions.
|
||||
updatedAt (int): Datetime the server was updated.
|
||||
updater (bool): Unknown
|
||||
version (str): Current Plex version (ex: 1.3.2.3112-1751929)
|
||||
voiceSearch (bool): True if voice search is enabled. (is this Google Voice search?)
|
||||
_baseurl (str): HTTP address of the client.
|
||||
_token (str): Token used to access this client.
|
||||
_session (obj): Requests session object used to access this client.
|
||||
"""
|
||||
key = '/'
|
||||
|
||||
def __init__(self, baseurl=None, token=None, session=None, timeout=None):
|
||||
self._baseurl = baseurl or CONFIG.get('auth.server_baseurl', 'http://localhost:32400')
|
||||
self._token = logfilter.add_secret(token or CONFIG.get('auth.server_token'))
|
||||
self._showSecrets = CONFIG.get('log.show_secrets', '').lower() == 'true'
|
||||
self._session = session or requests.Session()
|
||||
self._library = None # cached library
|
||||
self._settings = None # cached settings
|
||||
self._myPlexAccount = None # cached myPlexAccount
|
||||
data = self.query(self.key, timeout=timeout)
|
||||
super(PlexServer, self).__init__(self, data, self.key)
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.allowCameraUpload = cast(bool, data.attrib.get('allowCameraUpload'))
|
||||
self.allowChannelAccess = cast(bool, data.attrib.get('allowChannelAccess'))
|
||||
self.allowMediaDeletion = cast(bool, data.attrib.get('allowMediaDeletion'))
|
||||
self.allowSharing = cast(bool, data.attrib.get('allowSharing'))
|
||||
self.allowSync = cast(bool, data.attrib.get('allowSync'))
|
||||
self.backgroundProcessing = cast(bool, data.attrib.get('backgroundProcessing'))
|
||||
self.certificate = cast(bool, data.attrib.get('certificate'))
|
||||
self.companionProxy = cast(bool, data.attrib.get('companionProxy'))
|
||||
self.diagnostics = utils.toList(data.attrib.get('diagnostics'))
|
||||
self.eventStream = cast(bool, data.attrib.get('eventStream'))
|
||||
self.friendlyName = data.attrib.get('friendlyName')
|
||||
self.hubSearch = cast(bool, data.attrib.get('hubSearch'))
|
||||
self.machineIdentifier = data.attrib.get('machineIdentifier')
|
||||
self.multiuser = cast(bool, data.attrib.get('multiuser'))
|
||||
self.myPlex = cast(bool, data.attrib.get('myPlex'))
|
||||
self.myPlexMappingState = data.attrib.get('myPlexMappingState')
|
||||
self.myPlexSigninState = data.attrib.get('myPlexSigninState')
|
||||
self.myPlexSubscription = cast(bool, data.attrib.get('myPlexSubscription'))
|
||||
self.myPlexUsername = data.attrib.get('myPlexUsername')
|
||||
self.ownerFeatures = utils.toList(data.attrib.get('ownerFeatures'))
|
||||
self.photoAutoTag = cast(bool, data.attrib.get('photoAutoTag'))
|
||||
self.platform = data.attrib.get('platform')
|
||||
self.platformVersion = data.attrib.get('platformVersion')
|
||||
self.pluginHost = cast(bool, data.attrib.get('pluginHost'))
|
||||
self.readOnlyLibraries = cast(int, data.attrib.get('readOnlyLibraries'))
|
||||
self.requestParametersInCookie = cast(bool, data.attrib.get('requestParametersInCookie'))
|
||||
self.streamingBrainVersion = data.attrib.get('streamingBrainVersion')
|
||||
self.sync = cast(bool, data.attrib.get('sync'))
|
||||
self.transcoderActiveVideoSessions = int(data.attrib.get('transcoderActiveVideoSessions', 0))
|
||||
self.transcoderAudio = cast(bool, data.attrib.get('transcoderAudio'))
|
||||
self.transcoderLyrics = cast(bool, data.attrib.get('transcoderLyrics'))
|
||||
self.transcoderPhoto = cast(bool, data.attrib.get('transcoderPhoto'))
|
||||
self.transcoderSubtitles = cast(bool, data.attrib.get('transcoderSubtitles'))
|
||||
self.transcoderVideo = cast(bool, data.attrib.get('transcoderVideo'))
|
||||
self.transcoderVideoBitrates = utils.toList(data.attrib.get('transcoderVideoBitrates'))
|
||||
self.transcoderVideoQualities = utils.toList(data.attrib.get('transcoderVideoQualities'))
|
||||
self.transcoderVideoResolutions = utils.toList(data.attrib.get('transcoderVideoResolutions'))
|
||||
self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt'))
|
||||
self.updater = cast(bool, data.attrib.get('updater'))
|
||||
self.version = data.attrib.get('version')
|
||||
self.voiceSearch = cast(bool, data.attrib.get('voiceSearch'))
|
||||
|
||||
def _headers(self, **kwargs):
|
||||
""" Returns dict containing base headers for all requests to the server. """
|
||||
headers = BASE_HEADERS.copy()
|
||||
if self._token:
|
||||
headers['X-Plex-Token'] = self._token
|
||||
headers.update(kwargs)
|
||||
return headers
|
||||
|
||||
@property
|
||||
def library(self):
|
||||
""" Library to browse or search your media. """
|
||||
if not self._library:
|
||||
try:
|
||||
data = self.query(Library.key)
|
||||
self._library = Library(self, data)
|
||||
except BadRequest:
|
||||
data = self.query('/library/sections/')
|
||||
# Only the owner has access to /library
|
||||
# so just return the library without the data.
|
||||
return Library(self, data)
|
||||
return self._library
|
||||
|
||||
@property
|
||||
def settings(self):
|
||||
""" Returns a list of all server settings. """
|
||||
if not self._settings:
|
||||
data = self.query(Settings.key)
|
||||
self._settings = Settings(self, data)
|
||||
return self._settings
|
||||
|
||||
def account(self):
|
||||
""" Returns the :class:`~plexapi.server.Account` object this server belongs to. """
|
||||
data = self.query(Account.key)
|
||||
return Account(self, data)
|
||||
|
||||
def myPlexAccount(self):
|
||||
""" Returns a :class:`~plexapi.myplex.MyPlexAccount` object using the same
|
||||
token to access this server. If you are not the owner of this PlexServer
|
||||
you're likley to recieve an authentication error calling this.
|
||||
"""
|
||||
if self._myPlexAccount is None:
|
||||
from plexapi.myplex import MyPlexAccount
|
||||
self._myPlexAccount = MyPlexAccount(token=self._token)
|
||||
return self._myPlexAccount
|
||||
|
||||
def _myPlexClientPorts(self):
|
||||
""" Sometimes the PlexServer does not properly advertise port numbers required
|
||||
to connect. This attemps to look up device port number from plex.tv.
|
||||
See issue #126: Make PlexServer.clients() more user friendly.
|
||||
https://github.com/pkkid/python-plexapi/issues/126
|
||||
"""
|
||||
try:
|
||||
ports = {}
|
||||
account = self.myPlexAccount()
|
||||
for device in account.devices():
|
||||
if device.connections and ':' in device.connections[0][6:]:
|
||||
ports[device.clientIdentifier] = device.connections[0].split(':')[-1]
|
||||
return ports
|
||||
except Exception as err:
|
||||
log.warning('Unable to fetch client ports from myPlex: %s', err)
|
||||
return ports
|
||||
|
||||
def clients(self):
|
||||
""" Returns list of all :class:`~plexapi.client.PlexClient` objects connected to server. """
|
||||
items = []
|
||||
ports = None
|
||||
for elem in self.query('/clients'):
|
||||
port = elem.attrib.get('port')
|
||||
if not port:
|
||||
log.warning('%s did not advertise a port, checking plex.tv.', elem.attrib.get('name'))
|
||||
ports = self._myPlexClientPorts() if ports is None else ports
|
||||
port = ports.get(elem.attrib.get('machineIdentifier'))
|
||||
baseurl = 'http://%s:%s' % (elem.attrib['host'], port)
|
||||
items.append(PlexClient(baseurl=baseurl, server=self,
|
||||
token=self._token, data=elem, connect=False))
|
||||
|
||||
return items
|
||||
|
||||
def client(self, name):
|
||||
""" Returns the :class:`~plexapi.client.PlexClient` that matches the specified name.
|
||||
|
||||
Parameters:
|
||||
name (str): Name of the client to return.
|
||||
|
||||
Raises:
|
||||
:class:`~plexapi.exceptions.NotFound`: Unknown client name
|
||||
"""
|
||||
for client in self.clients():
|
||||
if client and client.title == name:
|
||||
return client
|
||||
|
||||
raise NotFound('Unknown client name: %s' % name)
|
||||
|
||||
def createPlaylist(self, title, items):
|
||||
""" Creates and returns a new :class:`~plexapi.playlist.Playlist`.
|
||||
|
||||
Parameters:
|
||||
title (str): Title of the playlist to be created.
|
||||
items (list<Media>): List of media items to include in the playlist.
|
||||
"""
|
||||
return Playlist.create(self, title, items)
|
||||
|
||||
def createPlayQueue(self, item, **kwargs):
|
||||
""" Creates and returns a new :class:`~plexapi.playqueue.PlayQueue`.
|
||||
|
||||
Parameters:
|
||||
item (Media or Playlist): Media or playlist to add to PlayQueue.
|
||||
kwargs (dict): See `~plexapi.playerque.PlayQueue.create`.
|
||||
"""
|
||||
return PlayQueue.create(self, item, **kwargs)
|
||||
|
||||
def downloadDatabases(self, savepath=None, unpack=False):
|
||||
""" Download databases.
|
||||
|
||||
Parameters:
|
||||
savepath (str): Defaults to current working dir.
|
||||
unpack (bool): Unpack the zip file.
|
||||
"""
|
||||
url = self.url('/diagnostics/databases')
|
||||
filepath = utils.download(url, self._token, None, savepath, self._session, unpack=unpack)
|
||||
return filepath
|
||||
|
||||
def downloadLogs(self, savepath=None, unpack=False):
|
||||
""" Download server logs.
|
||||
|
||||
Parameters:
|
||||
savepath (str): Defaults to current working dir.
|
||||
unpack (bool): Unpack the zip file.
|
||||
"""
|
||||
url = self.url('/diagnostics/logs')
|
||||
filepath = utils.download(url, self._token, None, savepath, self._session, unpack=unpack)
|
||||
return filepath
|
||||
|
||||
def check_for_update(self, force=True, download=False):
|
||||
""" Returns a :class:`~plexapi.base.Release` object containing release info.
|
||||
|
||||
Parameters:
|
||||
force (bool): Force server to check for new releases
|
||||
download (bool): Download if a update is available.
|
||||
"""
|
||||
part = '/updater/check?download=%s' % (1 if download else 0)
|
||||
if force:
|
||||
self.query(part, method=self._session.put)
|
||||
return self.fetchItem('/updater/status')
|
||||
|
||||
def isLatest(self):
|
||||
""" Check if the installed version of PMS is the latest. """
|
||||
release = self.check_for_update(force=True)
|
||||
return bool(release.version == self.version)
|
||||
|
||||
def installUpdate(self):
|
||||
""" Install the newest version of Plex Media Server. """
|
||||
# We can add this but dunno how useful this is since it sometimes
|
||||
# requires user action using a gui.
|
||||
part = '/updater/apply'
|
||||
release = self.check_for_update(force=True, download=True)
|
||||
if release and release.version != self.version:
|
||||
# figure out what method this is..
|
||||
return self.query(part, method=self._session.put)
|
||||
|
||||
def history(self):
|
||||
""" Returns a list of media items from watched history. """
|
||||
return self.fetchItems('/status/sessions/history/all')
|
||||
|
||||
def playlists(self):
|
||||
""" Returns a list of all :class:`~plexapi.playlist.Playlist` objects saved on the server. """
|
||||
# TODO: Add sort and type options?
|
||||
# /playlists/all?type=15&sort=titleSort%3Aasc&playlistType=video&smart=0
|
||||
return self.fetchItems('/playlists')
|
||||
|
||||
def playlist(self, title):
|
||||
""" Returns the :class:`~plexapi.client.Playlist` that matches the specified title.
|
||||
|
||||
Parameters:
|
||||
title (str): Title of the playlist to return.
|
||||
|
||||
Raises:
|
||||
:class:`~plexapi.exceptions.NotFound`: Invalid playlist title
|
||||
"""
|
||||
return self.fetchItem('/playlists', title=title)
|
||||
|
||||
def query(self, key, method=None, headers=None, timeout=None, **kwargs):
|
||||
""" Main method used to handle HTTPS requests to the Plex server. This method helps
|
||||
by encoding the response to utf-8 and parsing the returned XML into and
|
||||
ElementTree object. Returns None if no data exists in the response.
|
||||
"""
|
||||
url = self.url(key)
|
||||
method = method or self._session.get
|
||||
timeout = timeout or TIMEOUT
|
||||
log.debug('%s %s', method.__name__.upper(), url)
|
||||
headers = self._headers(**headers or {})
|
||||
response = method(url, headers=headers, timeout=timeout, **kwargs)
|
||||
if response.status_code not in (200, 201):
|
||||
codename = codes.get(response.status_code)[0]
|
||||
errtext = response.text.replace('\n', ' ')
|
||||
log.warning('BadRequest (%s) %s %s; %s' % (response.status_code, codename, response.url, errtext))
|
||||
raise BadRequest('(%s) %s; %s %s' % (response.status_code, codename, response.url, errtext))
|
||||
data = response.text.encode('utf8')
|
||||
return ElementTree.fromstring(data) if data.strip() else None
|
||||
|
||||
def search(self, query, mediatype=None, limit=None):
|
||||
""" Returns a list of media items or filter categories from the resulting
|
||||
`Hub Search <https://www.plex.tv/blog/seek-plex-shall-find-leveling-web-app/>`_
|
||||
against all items in your Plex library. This searches genres, actors, directors,
|
||||
playlists, as well as all the obvious media titles. It performs spell-checking
|
||||
against your search terms (because KUROSAWA is hard to spell). It also provides
|
||||
contextual search results. So for example, if you search for 'Pernice', it’ll
|
||||
return 'Pernice Brothers' as the artist result, but we’ll also go ahead and
|
||||
return your most-listened to albums and tracks from the artist. If you type
|
||||
'Arnold' you’ll get a result for the actor, but also the most recently added
|
||||
movies he’s in.
|
||||
|
||||
Parameters:
|
||||
query (str): Query to use when searching your library.
|
||||
mediatype (str): Optionally limit your search to the specified media type.
|
||||
limit (int): Optionally limit to the specified number of results per Hub.
|
||||
"""
|
||||
results = []
|
||||
params = {'query': query}
|
||||
if mediatype:
|
||||
params['section'] = utils.SEARCHTYPES[mediatype]
|
||||
if limit:
|
||||
params['limit'] = limit
|
||||
key = '/hubs/search?%s' % urlencode(params)
|
||||
for hub in self.fetchItems(key, Hub):
|
||||
results += hub.items
|
||||
return results
|
||||
|
||||
def sessions(self):
|
||||
""" Returns a list of all active session (currently playing) media objects. """
|
||||
return self.fetchItems('/status/sessions')
|
||||
|
||||
def startAlertListener(self, callback=None):
|
||||
""" Creates a websocket connection to the Plex Server to optionally recieve
|
||||
notifications. These often include messages from Plex about media scans
|
||||
as well as updates to currently running Transcode Sessions.
|
||||
|
||||
NOTE: You need websocket-client installed in order to use this feature.
|
||||
>> pip install websocket-client
|
||||
|
||||
Parameters:
|
||||
callback (func): Callback function to call on recieved messages.
|
||||
|
||||
raises:
|
||||
:class:`~plexapi.exception.Unsupported`: Websocket-client not installed.
|
||||
"""
|
||||
notifier = AlertListener(self, callback)
|
||||
notifier.start()
|
||||
return notifier
|
||||
|
||||
def transcodeImage(self, media, height, width, opacity=100, saturation=100):
|
||||
""" Returns the URL for a transcoded image from the specified media object.
|
||||
Returns None if no media specified (needed if user tries to pass thumb
|
||||
or art directly).
|
||||
|
||||
Parameters:
|
||||
height (int): Height to transcode the image to.
|
||||
width (int): Width to transcode the image to.
|
||||
opacity (int): Opacity of the resulting image (possibly deprecated).
|
||||
saturation (int): Saturating of the resulting image.
|
||||
"""
|
||||
if media:
|
||||
transcode_url = '/photo/:/transcode?height=%s&width=%s&opacity=%s&saturation=%s&url=%s' % (
|
||||
height, width, opacity, saturation, media)
|
||||
return self.url(transcode_url, includeToken=True)
|
||||
|
||||
def url(self, key, includeToken=None):
|
||||
""" Build a URL string with proper token argument. Token will be appended to the URL
|
||||
if either includeToken is True or CONFIG.log.show_secrets is 'true'.
|
||||
"""
|
||||
if self._token and (includeToken or self._showSecrets):
|
||||
delim = '&' if '?' in key else '?'
|
||||
return '%s%s%sX-Plex-Token=%s' % (self._baseurl, key, delim, self._token)
|
||||
return '%s%s' % (self._baseurl, key)
|
||||
|
||||
|
||||
class Account(PlexObject):
|
||||
""" Contains the locally cached MyPlex account information. The properties provided don't
|
||||
match the :class:`~plexapi.myplex.MyPlexAccount` object very well. I believe this exists
|
||||
because access to myplex is not required to get basic plex information. I can't imagine
|
||||
object is terribly useful except unless you were needed this information while offline.
|
||||
|
||||
Parameters:
|
||||
server (:class:`~plexapi.server.PlexServer`): PlexServer this account is connected to (optional)
|
||||
data (ElementTree): Response from PlexServer used to build this object (optional).
|
||||
|
||||
Attributes:
|
||||
authToken (str): Plex authentication token to access the server.
|
||||
mappingError (str): Unknown
|
||||
mappingErrorMessage (str): Unknown
|
||||
mappingState (str): Unknown
|
||||
privateAddress (str): Local IP address of the Plex server.
|
||||
privatePort (str): Local port of the Plex server.
|
||||
publicAddress (str): Public IP address of the Plex server.
|
||||
publicPort (str): Public port of the Plex server.
|
||||
signInState (str): Signin state for this account (ex: ok).
|
||||
subscriptionActive (str): True if the account subscription is active.
|
||||
subscriptionFeatures (str): List of features allowed by the server for this account.
|
||||
This may be based on your PlexPass subscription. Features include: camera_upload,
|
||||
cloudsync, content_filter, dvr, hardware_transcoding, home, lyrics, music_videos,
|
||||
pass, photo_autotags, premium_music_metadata, session_bandwidth_restrictions,
|
||||
sync, trailers, webhooks' (and maybe more).
|
||||
subscriptionState (str): 'Active' if this subscription is active.
|
||||
username (str): Plex account username (user@example.com).
|
||||
"""
|
||||
key = '/myplex/account'
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.authToken = data.attrib.get('authToken')
|
||||
self.username = data.attrib.get('username')
|
||||
self.mappingState = data.attrib.get('mappingState')
|
||||
self.mappingError = data.attrib.get('mappingError')
|
||||
self.mappingErrorMessage = data.attrib.get('mappingErrorMessage')
|
||||
self.signInState = data.attrib.get('signInState')
|
||||
self.publicAddress = data.attrib.get('publicAddress')
|
||||
self.publicPort = data.attrib.get('publicPort')
|
||||
self.privateAddress = data.attrib.get('privateAddress')
|
||||
self.privatePort = data.attrib.get('privatePort')
|
||||
self.subscriptionFeatures = utils.toList(data.attrib.get('subscriptionFeatures'))
|
||||
self.subscriptionActive = cast(bool, data.attrib.get('subscriptionActive'))
|
||||
self.subscriptionState = data.attrib.get('subscriptionState')
|
||||
156
lib/plexapi/settings.py
Normal file
156
lib/plexapi/settings.py
Normal file
@@ -0,0 +1,156 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
|
||||
from plexapi import log, utils
|
||||
from plexapi.base import PlexObject
|
||||
from plexapi.compat import quote, string_type
|
||||
from plexapi.exceptions import BadRequest, NotFound
|
||||
|
||||
|
||||
class Settings(PlexObject):
|
||||
""" Container class for all settings. Allows getting and setting PlexServer settings.
|
||||
|
||||
Attributes:
|
||||
key (str): '/:/prefs'
|
||||
"""
|
||||
key = '/:/prefs'
|
||||
|
||||
def __init__(self, server, data, initpath=None):
|
||||
self._settings = {}
|
||||
super(Settings, self).__init__(server, data, initpath)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr.startswith('_'):
|
||||
return self.__dict__[attr]
|
||||
return self.get(attr).value
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
if not attr.startswith('_'):
|
||||
return self.get(attr).set(value)
|
||||
self.__dict__[attr] = value
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
for elem in data:
|
||||
id = utils.lowerFirst(elem.attrib['id'])
|
||||
if id in self._settings:
|
||||
self._settings[id]._loadData(elem)
|
||||
continue
|
||||
self._settings[id] = Setting(self._server, elem, self._initpath)
|
||||
|
||||
def all(self):
|
||||
""" Returns a list of all :class:`~plexapi.settings.Setting` objects available. """
|
||||
return list(v for id, v in sorted(self._settings.items()))
|
||||
|
||||
def get(self, id):
|
||||
""" Return the :class:`~plexapi.settings.Setting` object with the specified id. """
|
||||
id = utils.lowerFirst(id)
|
||||
if id in self._settings:
|
||||
return self._settings[id]
|
||||
raise NotFound('Invalid setting id: %s' % id)
|
||||
|
||||
def groups(self):
|
||||
""" Returns a dict of lists for all :class:`~plexapi.settings.Setting`
|
||||
objects grouped by setting group.
|
||||
"""
|
||||
groups = defaultdict(list)
|
||||
for setting in self.all():
|
||||
groups[setting.group].append(setting)
|
||||
return dict(groups)
|
||||
|
||||
def group(self, group):
|
||||
""" Return a list of all :class:`~plexapi.settings.Setting` objects in the specified group.
|
||||
|
||||
Parameters:
|
||||
group (str): Group to return all settings.
|
||||
"""
|
||||
return self.groups().get(group, [])
|
||||
|
||||
def save(self):
|
||||
""" Save any outstanding settnig changes to the :class:`~plexapi.server.PlexServer`. This
|
||||
performs a full reload() of Settings after complete.
|
||||
"""
|
||||
params = {}
|
||||
for setting in self.all():
|
||||
if setting._setValue:
|
||||
log.info('Saving PlexServer setting %s = %s' % (setting.id, setting._setValue))
|
||||
params[setting.id] = quote(setting._setValue)
|
||||
if not params:
|
||||
raise BadRequest('No setting have been modified.')
|
||||
querystr = '&'.join(['%s=%s' % (k, v) for k, v in params.items()])
|
||||
url = '%s?%s' % (self.key, querystr)
|
||||
self._server.query(url, self._server._session.put)
|
||||
self.reload()
|
||||
|
||||
|
||||
class Setting(PlexObject):
|
||||
""" Represents a single Plex setting.
|
||||
|
||||
Attributes:
|
||||
id (str): Setting id (or name).
|
||||
label (str): Short description of what this setting is.
|
||||
summary (str): Long description of what this setting is.
|
||||
type (str): Setting type (text, int, double, bool).
|
||||
default (str): Default value for this setting.
|
||||
value (str,bool,int,float): Current value for this setting.
|
||||
hidden (bool): True if this is a hidden setting.
|
||||
advanced (bool): True if this is an advanced setting.
|
||||
group (str): Group name this setting is categorized as.
|
||||
enumValues (list,dict): List or dictionary of valis values for this setting.
|
||||
"""
|
||||
_bool_cast = lambda x: True if x == 'true' else False
|
||||
_bool_str = lambda x: str(x).lower()
|
||||
TYPES = {
|
||||
'bool': {'type': bool, 'cast': _bool_cast, 'tostr': _bool_str},
|
||||
'double': {'type': float, 'cast': float, 'tostr': string_type},
|
||||
'int': {'type': int, 'cast': int, 'tostr': string_type},
|
||||
'text': {'type': string_type, 'cast': string_type, 'tostr': string_type},
|
||||
}
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._setValue = None
|
||||
self.id = data.attrib.get('id')
|
||||
self.label = data.attrib.get('label')
|
||||
self.summary = data.attrib.get('summary')
|
||||
self.type = data.attrib.get('type')
|
||||
self.default = self._cast(data.attrib.get('default'))
|
||||
self.value = self._cast(data.attrib.get('value'))
|
||||
self.hidden = utils.cast(bool, data.attrib.get('hidden'))
|
||||
self.advanced = utils.cast(bool, data.attrib.get('advanced'))
|
||||
self.group = data.attrib.get('group')
|
||||
self.enumValues = self._getEnumValues(data)
|
||||
|
||||
def _cast(self, value):
|
||||
""" Cast the specifief value to the type of this setting. """
|
||||
if self.type != 'text':
|
||||
value = utils.cast(self.TYPES.get(self.type)['cast'], value)
|
||||
return value
|
||||
|
||||
def _getEnumValues(self, data):
|
||||
""" Returns a list of dictionary of valis value for this setting. """
|
||||
enumstr = data.attrib.get('enumValues')
|
||||
if not enumstr:
|
||||
return None
|
||||
if ':' in enumstr:
|
||||
return {self._cast(k): v for k, v in [kv.split(':') for kv in enumstr.split('|')]}
|
||||
return enumstr.split('|')
|
||||
|
||||
def set(self, value):
|
||||
""" Set a new value for this setitng. NOTE: You must call plex.settings.save() for before
|
||||
any changes to setting values are persisted to the :class:`~plexapi.server.PlexServer`.
|
||||
"""
|
||||
# check a few things up front
|
||||
if not isinstance(value, self.TYPES[self.type]['type']):
|
||||
badtype = type(value).__name__
|
||||
raise BadRequest('Invalid value for %s: a %s is required, not %s' % (self.id, self.type, badtype))
|
||||
if self.enumValues and value not in self.enumValues:
|
||||
raise BadRequest('Invalid value for %s: %s not in %s' % (self.id, value, list(self.enumValues)))
|
||||
# store value off to the side until we call settings.save()
|
||||
tostr = self.TYPES[self.type]['tostr']
|
||||
self._setValue = tostr(value)
|
||||
|
||||
def toUrl(self):
|
||||
"""Helper for urls"""
|
||||
return '%s=%s' % (self.id, self._value or self.value)
|
||||
42
lib/plexapi/sync.py
Normal file
42
lib/plexapi/sync.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import requests
|
||||
from plexapi import utils
|
||||
from plexapi.exceptions import NotFound
|
||||
|
||||
|
||||
class SyncItem(object):
|
||||
""" Sync Item. This doesn't current work. """
|
||||
def __init__(self, device, data, servers=None):
|
||||
self._device = device
|
||||
self._servers = servers
|
||||
self._loadData(data)
|
||||
|
||||
def _loadData(self, data):
|
||||
self._data = data
|
||||
self.id = utils.cast(int, data.attrib.get('id'))
|
||||
self.version = utils.cast(int, data.attrib.get('version'))
|
||||
self.rootTitle = data.attrib.get('rootTitle')
|
||||
self.title = data.attrib.get('title')
|
||||
self.metadataType = data.attrib.get('metadataType')
|
||||
self.machineIdentifier = data.find('Server').get('machineIdentifier')
|
||||
self.status = data.find('Status').attrib.copy()
|
||||
self.MediaSettings = data.find('MediaSettings').attrib.copy()
|
||||
self.policy = data.find('Policy').attrib.copy()
|
||||
self.location = data.find('Location').attrib.copy()
|
||||
|
||||
def server(self):
|
||||
server = list(filter(lambda x: x.machineIdentifier == self.machineIdentifier, self._servers))
|
||||
if 0 == len(server):
|
||||
raise NotFound('Unable to find server with uuid %s' % self.machineIdentifier)
|
||||
return server[0]
|
||||
|
||||
def getMedia(self):
|
||||
server = self.server().connect()
|
||||
key = '/sync/items/%s' % self.id
|
||||
return server.fetchItems(key)
|
||||
|
||||
def markAsDone(self, sync_id):
|
||||
server = self.server().connect()
|
||||
url = '/sync/%s/%s/files/%s/downloaded' % (
|
||||
self._device.clientIdentifier, server.machineIdentifier, sync_id)
|
||||
server.query(url, method=requests.put)
|
||||
363
lib/plexapi/utils.py
Normal file
363
lib/plexapi/utils.py
Normal file
@@ -0,0 +1,363 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
import time
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from getpass import getpass
|
||||
from threading import Thread
|
||||
from tqdm import tqdm
|
||||
from plexapi import compat
|
||||
from plexapi.exceptions import NotFound
|
||||
|
||||
# Search Types - Plex uses these to filter specific media types when searching.
|
||||
# Library Types - Populated at runtime
|
||||
SEARCHTYPES = {'movie': 1, 'show': 2, 'season': 3, 'episode': 4,
|
||||
'artist': 8, 'album': 9, 'track': 10, 'photo': 14}
|
||||
PLEXOBJECTS = {}
|
||||
|
||||
|
||||
class SecretsFilter(logging.Filter):
|
||||
""" Logging filter to hide secrets. """
|
||||
|
||||
def __init__(self, secrets=None):
|
||||
self.secrets = secrets or set()
|
||||
|
||||
def add_secret(self, secret):
|
||||
if secret is not None:
|
||||
self.secrets.add(secret)
|
||||
return secret
|
||||
|
||||
def filter(self, record):
|
||||
cleanargs = list(record.args)
|
||||
for i in range(len(cleanargs)):
|
||||
if isinstance(cleanargs[i], compat.string_type):
|
||||
for secret in self.secrets:
|
||||
cleanargs[i] = cleanargs[i].replace(secret, '<hidden>')
|
||||
record.args = tuple(cleanargs)
|
||||
return True
|
||||
|
||||
|
||||
def registerPlexObject(cls):
|
||||
""" Registry of library types we may come across when parsing XML. This allows us to
|
||||
define a few helper functions to dynamically convery the XML into objects. See
|
||||
buildItem() below for an example.
|
||||
"""
|
||||
etype = getattr(cls, 'STREAMTYPE', cls.TYPE)
|
||||
ehash = '%s.%s' % (cls.TAG, etype) if etype else cls.TAG
|
||||
if ehash in PLEXOBJECTS:
|
||||
raise Exception('Ambiguous PlexObject definition %s(tag=%s, type=%s) with %s' %
|
||||
(cls.__name__, cls.TAG, etype, PLEXOBJECTS[ehash].__name__))
|
||||
PLEXOBJECTS[ehash] = cls
|
||||
return cls
|
||||
|
||||
|
||||
def cast(func, value):
|
||||
""" Cast the specified value to the specified type (returned by func). Currently this
|
||||
only support int, float, bool. Should be extended if needed.
|
||||
|
||||
Parameters:
|
||||
func (func): Calback function to used cast to type (int, bool, float).
|
||||
value (any): value to be cast and returned.
|
||||
"""
|
||||
if value is not None:
|
||||
if func == bool:
|
||||
return bool(int(value))
|
||||
elif func in (int, float):
|
||||
try:
|
||||
return func(value)
|
||||
except ValueError:
|
||||
return float('nan')
|
||||
return func(value)
|
||||
return value
|
||||
|
||||
|
||||
def joinArgs(args):
|
||||
""" Returns a query string (uses for HTTP URLs) where only the value is URL encoded.
|
||||
Example return value: '?genre=action&type=1337'.
|
||||
|
||||
Parameters:
|
||||
args (dict): Arguments to include in query string.
|
||||
"""
|
||||
if not args:
|
||||
return ''
|
||||
arglist = []
|
||||
for key in sorted(args, key=lambda x: x.lower()):
|
||||
value = compat.ustr(args[key])
|
||||
arglist.append('%s=%s' % (key, compat.quote(value)))
|
||||
return '?%s' % '&'.join(arglist)
|
||||
|
||||
|
||||
def lowerFirst(s):
|
||||
return s[0].lower() + s[1:]
|
||||
|
||||
|
||||
def rget(obj, attrstr, default=None, delim='.'): # pragma: no cover
|
||||
""" Returns the value at the specified attrstr location within a nexted tree of
|
||||
dicts, lists, tuples, functions, classes, etc. The lookup is done recursivley
|
||||
for each key in attrstr (split by by the delimiter) This function is heavily
|
||||
influenced by the lookups used in Django templates.
|
||||
|
||||
Parameters:
|
||||
obj (any): Object to start the lookup in (dict, obj, list, tuple, etc).
|
||||
attrstr (str): String to lookup (ex: 'foo.bar.baz.value')
|
||||
default (any): Default value to return if not found.
|
||||
delim (str): Delimiter separating keys in attrstr.
|
||||
"""
|
||||
try:
|
||||
parts = attrstr.split(delim, 1)
|
||||
attr = parts[0]
|
||||
attrstr = parts[1] if len(parts) == 2 else None
|
||||
if isinstance(obj, dict):
|
||||
value = obj[attr]
|
||||
elif isinstance(obj, list):
|
||||
value = obj[int(attr)]
|
||||
elif isinstance(obj, tuple):
|
||||
value = obj[int(attr)]
|
||||
elif isinstance(obj, object):
|
||||
value = getattr(obj, attr)
|
||||
if attrstr:
|
||||
return rget(value, attrstr, default, delim)
|
||||
return value
|
||||
except: # noqa: E722
|
||||
return default
|
||||
|
||||
|
||||
def searchType(libtype):
|
||||
""" Returns the integer value of the library string type.
|
||||
|
||||
Parameters:
|
||||
libtype (str): LibType to lookup (movie, show, season, episode, artist, album, track)
|
||||
|
||||
Raises:
|
||||
NotFound: Unknown libtype
|
||||
"""
|
||||
libtype = compat.ustr(libtype)
|
||||
if libtype in [compat.ustr(v) for v in SEARCHTYPES.values()]:
|
||||
return libtype
|
||||
if SEARCHTYPES.get(libtype) is not None:
|
||||
return SEARCHTYPES[libtype]
|
||||
raise NotFound('Unknown libtype: %s' % libtype)
|
||||
|
||||
|
||||
def threaded(callback, listargs):
|
||||
""" Returns the result of <callback> for each set of \*args in listargs. Each call
|
||||
to <callback. is called concurrently in their own separate threads.
|
||||
|
||||
Parameters:
|
||||
callback (func): Callback function to apply to each set of \*args.
|
||||
listargs (list): List of lists; \*args to pass each thread.
|
||||
"""
|
||||
threads, results = [], []
|
||||
for args in listargs:
|
||||
args += [results, len(results)]
|
||||
results.append(None)
|
||||
threads.append(Thread(target=callback, args=args))
|
||||
threads[-1].setDaemon(True)
|
||||
threads[-1].start()
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
return results
|
||||
|
||||
|
||||
def toDatetime(value, format=None):
|
||||
""" Returns a datetime object from the specified value.
|
||||
|
||||
Parameters:
|
||||
value (str): value to return as a datetime
|
||||
format (str): Format to pass strftime (optional; if value is a str).
|
||||
"""
|
||||
if value and value is not None:
|
||||
if format:
|
||||
value = datetime.strptime(value, format)
|
||||
else:
|
||||
value = datetime.fromtimestamp(int(value))
|
||||
return value
|
||||
|
||||
|
||||
def toList(value, itemcast=None, delim=','):
|
||||
""" Returns a list of strings from the specified value.
|
||||
|
||||
Parameters:
|
||||
value (str): comma delimited string to convert to list.
|
||||
itemcast (func): Function to cast each list item to (default str).
|
||||
delim (str): string delimiter (optional; default ',').
|
||||
"""
|
||||
value = value or ''
|
||||
itemcast = itemcast or str
|
||||
return [itemcast(item) for item in value.split(delim) if item != '']
|
||||
|
||||
|
||||
def downloadSessionImages(server, filename=None, height=150, width=150,
|
||||
opacity=100, saturation=100): # pragma: no cover
|
||||
""" Helper to download a bif image or thumb.url from plex.server.sessions.
|
||||
|
||||
Parameters:
|
||||
filename (str): default to None,
|
||||
height (int): Height of the image.
|
||||
width (int): width of the image.
|
||||
opacity (int): Opacity of the resulting image (possibly deprecated).
|
||||
saturation (int): Saturating of the resulting image.
|
||||
|
||||
Returns:
|
||||
{'hellowlol': {'filepath': '<filepath>', 'url': 'http://<url>'},
|
||||
{'<username>': {filepath, url}}, ...
|
||||
"""
|
||||
info = {}
|
||||
for media in server.sessions():
|
||||
url = None
|
||||
for part in media.iterParts():
|
||||
if media.thumb:
|
||||
url = media.thumb
|
||||
if part.indexes: # always use bif images if available.
|
||||
url = '/library/parts/%s/indexes/%s/%s' % (part.id, part.indexes.lower(), media.viewOffset)
|
||||
if url:
|
||||
if filename is None:
|
||||
prettyname = media._prettyfilename()
|
||||
filename = 'session_transcode_%s_%s_%s' % (media.usernames[0], prettyname, int(time.time()))
|
||||
url = server.transcodeImage(url, height, width, opacity, saturation)
|
||||
filepath = download(url, filename=filename)
|
||||
info['username'] = {'filepath': filepath, 'url': url}
|
||||
return info
|
||||
|
||||
|
||||
def download(url, token, filename=None, savepath=None, session=None, chunksize=4024,
|
||||
unpack=False, mocked=False, showstatus=False):
|
||||
""" Helper to download a thumb, videofile or other media item. Returns the local
|
||||
path to the downloaded file.
|
||||
|
||||
Parameters:
|
||||
url (str): URL where the content be reached.
|
||||
token (str): Plex auth token to include in headers.
|
||||
filename (str): Filename of the downloaded file, default None.
|
||||
savepath (str): Defaults to current working dir.
|
||||
chunksize (int): What chunksize read/write at the time.
|
||||
mocked (bool): Helper to do evertything except write the file.
|
||||
unpack (bool): Unpack the zip file.
|
||||
showstatus(bool): Display a progressbar.
|
||||
|
||||
Example:
|
||||
>>> download(a_episode.getStreamURL(), a_episode.location)
|
||||
/path/to/file
|
||||
"""
|
||||
|
||||
from plexapi import log
|
||||
# fetch the data to be saved
|
||||
session = session or requests.Session()
|
||||
headers = {'X-Plex-Token': token}
|
||||
response = session.get(url, headers=headers, stream=True)
|
||||
# make sure the savepath directory exists
|
||||
savepath = savepath or os.getcwd()
|
||||
compat.makedirs(savepath, exist_ok=True)
|
||||
|
||||
# try getting filename from header if not specified in arguments (used for logs, db)
|
||||
if not filename and response.headers.get('Content-Disposition'):
|
||||
filename = re.findall(r'filename=\"(.+)\"', response.headers.get('Content-Disposition'))
|
||||
filename = filename[0] if filename[0] else None
|
||||
|
||||
filename = os.path.basename(filename)
|
||||
fullpath = os.path.join(savepath, filename)
|
||||
# append file.ext from content-type if not already there
|
||||
extension = os.path.splitext(fullpath)[-1]
|
||||
if not extension:
|
||||
contenttype = response.headers.get('content-type')
|
||||
if contenttype and 'image' in contenttype:
|
||||
fullpath += contenttype.split('/')[1]
|
||||
|
||||
# check this is a mocked download (testing)
|
||||
if mocked:
|
||||
log.debug('Mocked download %s', fullpath)
|
||||
return fullpath
|
||||
|
||||
# save the file to disk
|
||||
log.info('Downloading: %s', fullpath)
|
||||
if showstatus: # pragma: no cover
|
||||
total = int(response.headers.get('content-length', 0))
|
||||
bar = tqdm(unit='B', unit_scale=True, total=total, desc=filename)
|
||||
|
||||
with open(fullpath, 'wb') as handle:
|
||||
for chunk in response.iter_content(chunk_size=chunksize):
|
||||
handle.write(chunk)
|
||||
if showstatus:
|
||||
bar.update(len(chunk))
|
||||
|
||||
if showstatus: # pragma: no cover
|
||||
bar.close()
|
||||
# check we want to unzip the contents
|
||||
if fullpath.endswith('zip') and unpack:
|
||||
with zipfile.ZipFile(fullpath, 'r') as handle:
|
||||
handle.extractall(savepath)
|
||||
|
||||
return fullpath
|
||||
|
||||
|
||||
def tag_helper(tag, items, locked=True, remove=False):
|
||||
""" Simple tag helper for editing a object. """
|
||||
if not isinstance(items, list):
|
||||
items = [items]
|
||||
data = {}
|
||||
if not remove:
|
||||
for i, item in enumerate(items):
|
||||
tagname = '%s[%s].tag.tag' % (tag, i)
|
||||
data[tagname] = item
|
||||
if remove:
|
||||
tagname = '%s[].tag.tag-' % tag
|
||||
data[tagname] = ','.join(items)
|
||||
data['%s.locked' % tag] = 1 if locked else 0
|
||||
return data
|
||||
|
||||
|
||||
def getMyPlexAccount(opts=None): # pragma: no cover
|
||||
""" Helper function tries to get a MyPlex Account instance by checking
|
||||
the the following locations for a username and password. This is
|
||||
useful to create user-friendly command line tools.
|
||||
1. command-line options (opts).
|
||||
2. environment variables and config.ini
|
||||
3. Prompt on the command line.
|
||||
"""
|
||||
from plexapi import CONFIG
|
||||
from plexapi.myplex import MyPlexAccount
|
||||
# 1. Check command-line options
|
||||
if opts and opts.username and opts.password:
|
||||
print('Authenticating with Plex.tv as %s..' % opts.username)
|
||||
return MyPlexAccount(opts.username, opts.password)
|
||||
# 2. Check Plexconfig (environment variables and config.ini)
|
||||
config_username = CONFIG.get('auth.myplex_username')
|
||||
config_password = CONFIG.get('auth.myplex_password')
|
||||
if config_username and config_password:
|
||||
print('Authenticating with Plex.tv as %s..' % config_username)
|
||||
return MyPlexAccount(config_username, config_password)
|
||||
# 3. Prompt for username and password on the command line
|
||||
username = input('What is your plex.tv username: ')
|
||||
password = getpass('What is your plex.tv password: ')
|
||||
print('Authenticating with Plex.tv as %s..' % username)
|
||||
return MyPlexAccount(username, password)
|
||||
|
||||
|
||||
def choose(msg, items, attr): # pragma: no cover
|
||||
""" Command line helper to display a list of choices, asking the
|
||||
user to choose one of the options.
|
||||
"""
|
||||
# Return the first item if there is only one choice
|
||||
if len(items) == 1:
|
||||
return items[0]
|
||||
# Print all choices to the command line
|
||||
print()
|
||||
for index, i in enumerate(items):
|
||||
name = attr(i) if callable(attr) else getattr(i, attr)
|
||||
print(' %s: %s' % (index, name))
|
||||
print()
|
||||
# Request choice from the user
|
||||
while True:
|
||||
try:
|
||||
inp = input('%s: ' % msg)
|
||||
if any(s in inp for s in (':', '::', '-')):
|
||||
idx = slice(*map(lambda x: int(x.strip()) if x.strip() else None, inp.split(':')))
|
||||
return items[idx]
|
||||
else:
|
||||
return items[int(inp)]
|
||||
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
560
lib/plexapi/video.py
Normal file
560
lib/plexapi/video.py
Normal file
@@ -0,0 +1,560 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from plexapi import media, utils
|
||||
from plexapi.exceptions import BadRequest, NotFound
|
||||
from plexapi.base import Playable, PlexPartialObject
|
||||
|
||||
|
||||
class Video(PlexPartialObject):
|
||||
""" Base class for all video objects including :class:`~plexapi.video.Movie`,
|
||||
:class:`~plexapi.video.Show`, :class:`~plexapi.video.Season`,
|
||||
:class:`~plexapi.video.Episode`.
|
||||
|
||||
Attributes:
|
||||
addedAt (datetime): Datetime this item was added to the library.
|
||||
key (str): API URL (/library/metadata/<ratingkey>).
|
||||
lastViewedAt (datetime): Datetime item was last accessed.
|
||||
librarySectionID (int): :class:`~plexapi.library.LibrarySection` ID.
|
||||
listType (str): Hardcoded as 'audio' (useful for search filters).
|
||||
ratingKey (int): Unique key identifying this item.
|
||||
summary (str): Summary of the artist, track, or album.
|
||||
thumb (str): URL to thumbnail image.
|
||||
title (str): Artist, Album or Track title. (Jason Mraz, We Sing, Lucky, etc.)
|
||||
titleSort (str): Title to use when sorting (defaults to title).
|
||||
type (str): 'artist', 'album', or 'track'.
|
||||
updatedAt (datatime): Datetime this item was updated.
|
||||
viewCount (int): Count of times this item was accessed.
|
||||
"""
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.listType = 'video'
|
||||
self.addedAt = utils.toDatetime(data.attrib.get('addedAt'))
|
||||
self.key = data.attrib.get('key', '')
|
||||
self.lastViewedAt = utils.toDatetime(data.attrib.get('lastViewedAt'))
|
||||
self.librarySectionID = data.attrib.get('librarySectionID')
|
||||
self.ratingKey = utils.cast(int, data.attrib.get('ratingKey'))
|
||||
self.summary = data.attrib.get('summary')
|
||||
self.thumb = data.attrib.get('thumb')
|
||||
self.title = data.attrib.get('title')
|
||||
self.titleSort = data.attrib.get('titleSort', self.title)
|
||||
self.type = data.attrib.get('type')
|
||||
self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt'))
|
||||
self.viewCount = utils.cast(int, data.attrib.get('viewCount', 0))
|
||||
|
||||
@property
|
||||
def isWatched(self):
|
||||
""" Returns True if this video is watched. """
|
||||
return bool(self.viewCount > 0) if self.viewCount else False
|
||||
|
||||
@property
|
||||
def thumbUrl(self):
|
||||
""" Return the first first thumbnail url starting on
|
||||
the most specific thumbnail for that item.
|
||||
"""
|
||||
thumb = self.firstAttr('thumb', 'parentThumb', 'granparentThumb')
|
||||
return self._server.url(thumb, includeToken=True) if thumb else None
|
||||
|
||||
@property
|
||||
def artUrl(self):
|
||||
""" Return the first first art url starting on the most specific for that item."""
|
||||
art = self.firstAttr('art', 'grandparentArt')
|
||||
return self._server.url(art, includeToken=True) if art else None
|
||||
|
||||
def url(self, part):
|
||||
""" Returns the full url for something. Typically used for getting a specific image. """
|
||||
return self._server.url(part, includeToken=True) if part else None
|
||||
|
||||
def markWatched(self):
|
||||
""" Mark video as watched. """
|
||||
key = '/:/scrobble?key=%s&identifier=com.plexapp.plugins.library' % self.ratingKey
|
||||
self._server.query(key)
|
||||
self.reload()
|
||||
|
||||
def markUnwatched(self):
|
||||
""" Mark video unwatched. """
|
||||
key = '/:/unscrobble?key=%s&identifier=com.plexapp.plugins.library' % self.ratingKey
|
||||
self._server.query(key)
|
||||
self.reload()
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Movie(Playable, Video):
|
||||
""" Represents a single Movie.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Video'
|
||||
TYPE (str): 'movie'
|
||||
art (str): Key to movie artwork (/library/metadata/<ratingkey>/art/<artid>)
|
||||
audienceRating (float): Audience rating (usually from Rotten Tomatoes).
|
||||
audienceRatingImage (str): Key to audience rating image (rottentomatoes://image.rating.spilled)
|
||||
chapterSource (str): Chapter source (agent; media; mixed).
|
||||
contentRating (str) Content rating (PG-13; NR; TV-G).
|
||||
duration (int): Duration of movie in milliseconds.
|
||||
guid: Plex GUID (com.plexapp.agents.imdb://tt4302938?lang=en).
|
||||
originalTitle (str): Original title, often the foreign title (転々; 엽기적인 그녀).
|
||||
originallyAvailableAt (datetime): Datetime movie was released.
|
||||
primaryExtraKey (str) Primary extra key (/library/metadata/66351).
|
||||
rating (float): Movie rating (7.9; 9.8; 8.1).
|
||||
ratingImage (str): Key to rating image (rottentomatoes://image.rating.rotten).
|
||||
studio (str): Studio that created movie (Di Bonaventura Pictures; 21 Laps Entertainment).
|
||||
tagline (str): Movie tag line (Back 2 Work; Who says men can't change?).
|
||||
userRating (float): User rating (2.0; 8.0).
|
||||
viewOffset (int): View offset in milliseconds.
|
||||
year (int): Year movie was released.
|
||||
collections (List<:class:`~plexapi.media.Collection`>): List of collections this media belongs.
|
||||
countries (List<:class:`~plexapi.media.Country`>): List of countries objects.
|
||||
directors (List<:class:`~plexapi.media.Director`>): List of director objects.
|
||||
fields (List<:class:`~plexapi.media.Field`>): List of field objects.
|
||||
genres (List<:class:`~plexapi.media.Genre`>): List of genre objects.
|
||||
media (List<:class:`~plexapi.media.Media`>): List of media objects.
|
||||
producers (List<:class:`~plexapi.media.Producer`>): List of producers objects.
|
||||
roles (List<:class:`~plexapi.media.Role`>): List of role objects.
|
||||
writers (List<:class:`~plexapi.media.Writer`>): List of writers objects.
|
||||
chapters (List<:class:`~plexapi.media.Chapter`>): List of Chapter objects.
|
||||
similar (List<:class:`~plexapi.media.Similar`>): List of Similar objects.
|
||||
"""
|
||||
TAG = 'Video'
|
||||
TYPE = 'movie'
|
||||
_include = ('?checkFiles=1&includeExtras=1&includeRelated=1'
|
||||
'&includeOnDeck=1&includeChapters=1&includePopularLeaves=1'
|
||||
'&includeConcerts=1&includePreferences=1')
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
Video._loadData(self, data)
|
||||
Playable._loadData(self, data)
|
||||
|
||||
self._details_key = self.key + self._include
|
||||
self.art = data.attrib.get('art')
|
||||
self.audienceRating = utils.cast(float, data.attrib.get('audienceRating'))
|
||||
self.audienceRatingImage = data.attrib.get('audienceRatingImage')
|
||||
self.chapterSource = data.attrib.get('chapterSource')
|
||||
self.contentRating = data.attrib.get('contentRating')
|
||||
self.duration = utils.cast(int, data.attrib.get('duration'))
|
||||
self.guid = data.attrib.get('guid')
|
||||
self.originalTitle = data.attrib.get('originalTitle')
|
||||
self.originallyAvailableAt = utils.toDatetime(
|
||||
data.attrib.get('originallyAvailableAt'), '%Y-%m-%d')
|
||||
self.primaryExtraKey = data.attrib.get('primaryExtraKey')
|
||||
self.rating = utils.cast(float, data.attrib.get('rating'))
|
||||
self.ratingImage = data.attrib.get('ratingImage')
|
||||
self.studio = data.attrib.get('studio')
|
||||
self.tagline = data.attrib.get('tagline')
|
||||
self.userRating = utils.cast(float, data.attrib.get('userRating'))
|
||||
self.viewOffset = utils.cast(int, data.attrib.get('viewOffset', 0))
|
||||
self.year = utils.cast(int, data.attrib.get('year'))
|
||||
self.collections = self.findItems(data, media.Collection)
|
||||
self.countries = self.findItems(data, media.Country)
|
||||
self.directors = self.findItems(data, media.Director)
|
||||
self.fields = self.findItems(data, media.Field)
|
||||
self.genres = self.findItems(data, media.Genre)
|
||||
self.media = self.findItems(data, media.Media)
|
||||
self.producers = self.findItems(data, media.Producer)
|
||||
self.roles = self.findItems(data, media.Role)
|
||||
self.writers = self.findItems(data, media.Writer)
|
||||
self.labels = self.findItems(data, media.Label)
|
||||
self.chapters = self.findItems(data, media.Chapter)
|
||||
self.similar = self.findItems(data, media.Similar)
|
||||
|
||||
@property
|
||||
def actors(self):
|
||||
""" Alias to self.roles. """
|
||||
return self.roles
|
||||
|
||||
@property
|
||||
def locations(self):
|
||||
""" This does not exist in plex xml response but is added to have a common
|
||||
interface to get the location of the Movie/Show/Episode
|
||||
"""
|
||||
return [part.file for part in self.iterParts() if part]
|
||||
|
||||
def subtitleStreams(self):
|
||||
""" Returns a list of :class:`~plexapi.media.SubtitleStream` objects for all MediaParts. """
|
||||
streams = []
|
||||
for elem in self.media:
|
||||
for part in elem.parts:
|
||||
streams += part.subtitleStreams()
|
||||
return streams
|
||||
|
||||
def _prettyfilename(self):
|
||||
# This is just for compat.
|
||||
return self.title
|
||||
|
||||
def download(self, savepath=None, keep_orginal_name=False, **kwargs):
|
||||
""" Download video files to specified directory.
|
||||
|
||||
Parameters:
|
||||
savepath (str): Defaults to current working dir.
|
||||
keep_orginal_name (bool): True to keep the original file name otherwise
|
||||
a friendlier is generated.
|
||||
**kwargs: Additional options passed into :func:`~plexapi.base.PlexObject.getStreamURL()`.
|
||||
"""
|
||||
filepaths = []
|
||||
locations = [i for i in self.iterParts() if i]
|
||||
for location in locations:
|
||||
name = location.file
|
||||
if not keep_orginal_name:
|
||||
title = self.title.replace(' ', '.')
|
||||
name = '%s.%s' % (title, location.container)
|
||||
if kwargs is not None:
|
||||
url = self.getStreamURL(**kwargs)
|
||||
else:
|
||||
self._server.url('%s?download=1' % location.key)
|
||||
filepath = utils.download(url, self._server._token, filename=name,
|
||||
savepath=savepath, session=self._server._session)
|
||||
if filepath:
|
||||
filepaths.append(filepath)
|
||||
return filepaths
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Show(Video):
|
||||
""" Represents a single Show (including all seasons and episodes).
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Directory'
|
||||
TYPE (str): 'show'
|
||||
art (str): Key to show artwork (/library/metadata/<ratingkey>/art/<artid>)
|
||||
banner (str): Key to banner artwork (/library/metadata/<ratingkey>/art/<artid>)
|
||||
childCount (int): Unknown.
|
||||
contentRating (str) Content rating (PG-13; NR; TV-G).
|
||||
duration (int): Duration of show in milliseconds.
|
||||
guid (str): Plex GUID (com.plexapp.agents.imdb://tt4302938?lang=en).
|
||||
index (int): Plex index (?)
|
||||
leafCount (int): Unknown.
|
||||
locations (list<str>): List of locations paths.
|
||||
originallyAvailableAt (datetime): Datetime show was released.
|
||||
rating (float): Show rating (7.9; 9.8; 8.1).
|
||||
studio (str): Studio that created show (Di Bonaventura Pictures; 21 Laps Entertainment).
|
||||
theme (str): Key to theme resource (/library/metadata/<ratingkey>/theme/<themeid>)
|
||||
viewedLeafCount (int): Unknown.
|
||||
year (int): Year the show was released.
|
||||
genres (List<:class:`~plexapi.media.Genre`>): List of genre objects.
|
||||
roles (List<:class:`~plexapi.media.Role`>): List of role objects.
|
||||
similar (List<:class:`~plexapi.media.Similar`>): List of Similar objects.
|
||||
"""
|
||||
TAG = 'Directory'
|
||||
TYPE = 'show'
|
||||
|
||||
def __iter__(self):
|
||||
for season in self.seasons():
|
||||
yield season
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
Video._loadData(self, data)
|
||||
# fix key if loaded from search
|
||||
self.key = self.key.replace('/children', '')
|
||||
self.art = data.attrib.get('art')
|
||||
self.banner = data.attrib.get('banner')
|
||||
self.childCount = utils.cast(int, data.attrib.get('childCount'))
|
||||
self.contentRating = data.attrib.get('contentRating')
|
||||
self.duration = utils.cast(int, data.attrib.get('duration'))
|
||||
self.guid = data.attrib.get('guid')
|
||||
self.index = data.attrib.get('index')
|
||||
self.leafCount = utils.cast(int, data.attrib.get('leafCount'))
|
||||
self.locations = self.listAttrs(data, 'path', etag='Location')
|
||||
self.originallyAvailableAt = utils.toDatetime(
|
||||
data.attrib.get('originallyAvailableAt'), '%Y-%m-%d')
|
||||
self.rating = utils.cast(float, data.attrib.get('rating'))
|
||||
self.studio = data.attrib.get('studio')
|
||||
self.theme = data.attrib.get('theme')
|
||||
self.viewedLeafCount = utils.cast(int, data.attrib.get('viewedLeafCount'))
|
||||
self.year = utils.cast(int, data.attrib.get('year'))
|
||||
self.genres = self.findItems(data, media.Genre)
|
||||
self.roles = self.findItems(data, media.Role)
|
||||
self.labels = self.findItems(data, media.Label)
|
||||
self.similar = self.findItems(data, media.Similar)
|
||||
|
||||
@property
|
||||
def actors(self):
|
||||
""" Alias to self.roles. """
|
||||
return self.roles
|
||||
|
||||
@property
|
||||
def isWatched(self):
|
||||
""" Returns True if this show is fully watched. """
|
||||
return bool(self.viewedLeafCount == self.leafCount)
|
||||
|
||||
def seasons(self, **kwargs):
|
||||
""" Returns a list of :class:`~plexapi.video.Season` objects. """
|
||||
key = '/library/metadata/%s/children' % self.ratingKey
|
||||
return self.fetchItems(key, **kwargs)
|
||||
|
||||
def season(self, title=None):
|
||||
""" Returns the season with the specified title or number.
|
||||
|
||||
Parameters:
|
||||
title (str or int): Title or Number of the season to return.
|
||||
"""
|
||||
if isinstance(title, int):
|
||||
title = 'Season %s' % title
|
||||
key = '/library/metadata/%s/children' % self.ratingKey
|
||||
return self.fetchItem(key, etag='Directory', title__iexact=title)
|
||||
|
||||
def episodes(self, **kwargs):
|
||||
""" Returns a list of :class:`~plexapi.video.Episode` objects. """
|
||||
key = '/library/metadata/%s/allLeaves' % self.ratingKey
|
||||
return self.fetchItems(key, **kwargs)
|
||||
|
||||
def episode(self, title=None, season=None, episode=None):
|
||||
""" Find a episode using a title or season and episode.
|
||||
|
||||
Parameters:
|
||||
title (str): Title of the episode to return
|
||||
season (int): Season number (default:None; required if title not specified).
|
||||
episode (int): Episode number (default:None; required if title not specified).
|
||||
|
||||
Raises:
|
||||
BadRequest: If season and episode is missing.
|
||||
NotFound: If the episode is missing.
|
||||
"""
|
||||
if title:
|
||||
key = '/library/metadata/%s/allLeaves' % self.ratingKey
|
||||
return self.fetchItem(key, title__iexact=title)
|
||||
elif season and episode:
|
||||
results = [i for i in self.episodes() if i.seasonNumber == season and i.index == episode]
|
||||
if results:
|
||||
return results[0]
|
||||
raise NotFound('Couldnt find %s S%s E%s' % (self.title, season, episode))
|
||||
raise BadRequest('Missing argument: title or season and episode are required')
|
||||
|
||||
def watched(self):
|
||||
""" Returns list of watched :class:`~plexapi.video.Episode` objects. """
|
||||
return self.episodes(viewCount__gt=0)
|
||||
|
||||
def unwatched(self):
|
||||
""" Returns list of unwatched :class:`~plexapi.video.Episode` objects. """
|
||||
return self.episodes(viewCount=0)
|
||||
|
||||
def get(self, title=None, season=None, episode=None):
|
||||
""" Alias to :func:`~plexapi.video.Show.episode()`. """
|
||||
return self.episode(title, season, episode)
|
||||
|
||||
def download(self, savepath=None, keep_orginal_name=False, **kwargs):
|
||||
""" Download video files to specified directory.
|
||||
|
||||
Parameters:
|
||||
savepath (str): Defaults to current working dir.
|
||||
keep_orginal_name (bool): True to keep the original file name otherwise
|
||||
a friendlier is generated.
|
||||
**kwargs: Additional options passed into :func:`~plexapi.base.PlexObject.getStreamURL()`.
|
||||
"""
|
||||
filepaths = []
|
||||
for episode in self.episodes():
|
||||
filepaths += episode.download(savepath, keep_orginal_name, **kwargs)
|
||||
return filepaths
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Season(Video):
|
||||
""" Represents a single Show Season (including all episodes).
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Directory'
|
||||
TYPE (str): 'season'
|
||||
leafCount (int): Number of episodes in season.
|
||||
index (int): Season number.
|
||||
parentKey (str): Key to this seasons :class:`~plexapi.video.Show`.
|
||||
parentRatingKey (int): Unique key for this seasons :class:`~plexapi.video.Show`.
|
||||
parentTitle (str): Title of this seasons :class:`~plexapi.video.Show`.
|
||||
viewedLeafCount (int): Number of watched episodes in season.
|
||||
"""
|
||||
TAG = 'Directory'
|
||||
TYPE = 'season'
|
||||
|
||||
def __iter__(self):
|
||||
for episode in self.episodes():
|
||||
yield episode
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
Video._loadData(self, data)
|
||||
# fix key if loaded from search
|
||||
self.key = self.key.replace('/children', '')
|
||||
self.leafCount = utils.cast(int, data.attrib.get('leafCount'))
|
||||
self.index = utils.cast(int, data.attrib.get('index'))
|
||||
self.parentKey = data.attrib.get('parentKey')
|
||||
self.parentRatingKey = utils.cast(int, data.attrib.get('parentRatingKey'))
|
||||
self.parentTitle = data.attrib.get('parentTitle')
|
||||
self.viewedLeafCount = utils.cast(int, data.attrib.get('viewedLeafCount'))
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s>' % ':'.join([p for p in [
|
||||
self.__class__.__name__,
|
||||
self.key.replace('/library/metadata/', '').replace('/children', ''),
|
||||
'%s-s%s' % (self.parentTitle.replace(' ', '-')[:20], self.seasonNumber),
|
||||
] if p])
|
||||
|
||||
@property
|
||||
def isWatched(self):
|
||||
""" Returns True if this season is fully watched. """
|
||||
return bool(self.viewedLeafCount == self.leafCount)
|
||||
|
||||
@property
|
||||
def seasonNumber(self):
|
||||
""" Returns season number. """
|
||||
return self.index
|
||||
|
||||
def episodes(self, **kwargs):
|
||||
""" Returns a list of :class:`~plexapi.video.Episode` objects. """
|
||||
key = '/library/metadata/%s/children' % self.ratingKey
|
||||
return self.fetchItems(key, **kwargs)
|
||||
|
||||
def episode(self, title=None, episode=None):
|
||||
""" Returns the episode with the given title or number.
|
||||
|
||||
Parameters:
|
||||
title (str): Title of the episode to return.
|
||||
episode (int): Episode number (default:None; required if title not specified).
|
||||
"""
|
||||
if not title and not episode:
|
||||
raise BadRequest('Missing argument, you need to use title or episode.')
|
||||
key = '/library/metadata/%s/children' % self.ratingKey
|
||||
if title:
|
||||
return self.fetchItem(key, title=title)
|
||||
return self.fetchItem(key, seasonNumber=self.index, index=episode)
|
||||
|
||||
def get(self, title=None, episode=None):
|
||||
""" Alias to :func:`~plexapi.video.Season.episode()`. """
|
||||
return self.episode(title, episode)
|
||||
|
||||
def show(self):
|
||||
""" Return this seasons :func:`~plexapi.video.Show`.. """
|
||||
return self.fetchItem(self.parentKey)
|
||||
|
||||
def watched(self):
|
||||
""" Returns list of watched :class:`~plexapi.video.Episode` objects. """
|
||||
return self.episodes(watched=True)
|
||||
|
||||
def unwatched(self):
|
||||
""" Returns list of unwatched :class:`~plexapi.video.Episode` objects. """
|
||||
return self.episodes(watched=False)
|
||||
|
||||
def download(self, savepath=None, keep_orginal_name=False, **kwargs):
|
||||
""" Download video files to specified directory.
|
||||
|
||||
Parameters:
|
||||
savepath (str): Defaults to current working dir.
|
||||
keep_orginal_name (bool): True to keep the original file name otherwise
|
||||
a friendlier is generated.
|
||||
**kwargs: Additional options passed into :func:`~plexapi.base.PlexObject.getStreamURL()`.
|
||||
"""
|
||||
filepaths = []
|
||||
for episode in self.episodes():
|
||||
filepaths += episode.download(savepath, keep_orginal_name, **kwargs)
|
||||
return filepaths
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class Episode(Playable, Video):
|
||||
""" Represents a single Shows Episode.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'Video'
|
||||
TYPE (str): 'episode'
|
||||
art (str): Key to episode artwork (/library/metadata/<ratingkey>/art/<artid>)
|
||||
chapterSource (str): Unknown (media).
|
||||
contentRating (str) Content rating (PG-13; NR; TV-G).
|
||||
duration (int): Duration of episode in milliseconds.
|
||||
grandparentArt (str): Key to this episodes :class:`~plexapi.video.Show` artwork.
|
||||
grandparentKey (str): Key to this episodes :class:`~plexapi.video.Show`.
|
||||
grandparentRatingKey (str): Unique key for this episodes :class:`~plexapi.video.Show`.
|
||||
grandparentTheme (str): Key to this episodes :class:`~plexapi.video.Show` theme.
|
||||
grandparentThumb (str): Key to this episodes :class:`~plexapi.video.Show` thumb.
|
||||
grandparentTitle (str): Title of this episodes :class:`~plexapi.video.Show`.
|
||||
guid (str): Plex GUID (com.plexapp.agents.imdb://tt4302938?lang=en).
|
||||
index (int): Episode number.
|
||||
originallyAvailableAt (datetime): Datetime episode was released.
|
||||
parentIndex (str): Season number of episode.
|
||||
parentKey (str): Key to this episodes :class:`~plexapi.video.Season`.
|
||||
parentRatingKey (int): Unique key for this episodes :class:`~plexapi.video.Season`.
|
||||
parentThumb (str): Key to this episodes thumbnail.
|
||||
parentTitle (str): Name of this episode's season
|
||||
title (str): Name of this Episode
|
||||
rating (float): Movie rating (7.9; 9.8; 8.1).
|
||||
viewOffset (int): View offset in milliseconds.
|
||||
year (int): Year episode was released.
|
||||
directors (List<:class:`~plexapi.media.Director`>): List of director objects.
|
||||
media (List<:class:`~plexapi.media.Media`>): List of media objects.
|
||||
writers (List<:class:`~plexapi.media.Writer`>): List of writers objects.
|
||||
"""
|
||||
TAG = 'Video'
|
||||
TYPE = 'episode'
|
||||
_include = ('?checkFiles=1&includeExtras=1&includeRelated=1'
|
||||
'&includeOnDeck=1&includeChapters=1&includePopularLeaves=1'
|
||||
'&includeConcerts=1&includePreferences=1')
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
Video._loadData(self, data)
|
||||
Playable._loadData(self, data)
|
||||
self._details_key = self.key + self._include
|
||||
self._seasonNumber = None # cached season number
|
||||
self.art = data.attrib.get('art')
|
||||
self.chapterSource = data.attrib.get('chapterSource')
|
||||
self.contentRating = data.attrib.get('contentRating')
|
||||
self.duration = utils.cast(int, data.attrib.get('duration'))
|
||||
self.grandparentArt = data.attrib.get('grandparentArt')
|
||||
self.grandparentKey = data.attrib.get('grandparentKey')
|
||||
self.grandparentRatingKey = utils.cast(int, data.attrib.get('grandparentRatingKey'))
|
||||
self.grandparentTheme = data.attrib.get('grandparentTheme')
|
||||
self.grandparentThumb = data.attrib.get('grandparentThumb')
|
||||
self.grandparentTitle = data.attrib.get('grandparentTitle')
|
||||
self.guid = data.attrib.get('guid')
|
||||
self.index = utils.cast(int, data.attrib.get('index'))
|
||||
self.originallyAvailableAt = utils.toDatetime(data.attrib.get('originallyAvailableAt'), '%Y-%m-%d')
|
||||
self.parentIndex = data.attrib.get('parentIndex')
|
||||
self.parentKey = data.attrib.get('parentKey')
|
||||
self.parentRatingKey = utils.cast(int, data.attrib.get('parentRatingKey'))
|
||||
self.parentThumb = data.attrib.get('parentThumb')
|
||||
self.parentTitle = data.attrib.get('parentTitle')
|
||||
self.title = data.attrib.get('title')
|
||||
self.rating = utils.cast(float, data.attrib.get('rating'))
|
||||
self.viewOffset = utils.cast(int, data.attrib.get('viewOffset', 0))
|
||||
self.year = utils.cast(int, data.attrib.get('year'))
|
||||
self.directors = self.findItems(data, media.Director)
|
||||
self.media = self.findItems(data, media.Media)
|
||||
self.writers = self.findItems(data, media.Writer)
|
||||
self.labels = self.findItems(data, media.Label)
|
||||
self.collections = self.findItems(data, media.Collection)
|
||||
self.chapters = self.findItems(data, media.Chapter)
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s>' % ':'.join([p for p in [
|
||||
self.__class__.__name__,
|
||||
self.key.replace('/library/metadata/', '').replace('/children', ''),
|
||||
'%s-%s' % (self.grandparentTitle.replace(' ', '-')[:20], self.seasonEpisode),
|
||||
] if p])
|
||||
|
||||
def _prettyfilename(self):
|
||||
""" Returns a human friendly filename. """
|
||||
return '%s.%s' % (self.grandparentTitle.replace(' ', '.'), self.seasonEpisode)
|
||||
|
||||
@property
|
||||
def locations(self):
|
||||
""" This does not exist in plex xml response but is added to have a common
|
||||
interface to get the location of the Movie/Show
|
||||
"""
|
||||
return [part.file for part in self.iterParts() if part]
|
||||
|
||||
@property
|
||||
def seasonNumber(self):
|
||||
""" Returns this episodes season number. """
|
||||
if self._seasonNumber is None:
|
||||
self._seasonNumber = self.parentIndex if self.parentIndex else self.season().seasonNumber
|
||||
return utils.cast(int, self._seasonNumber)
|
||||
|
||||
@property
|
||||
def seasonEpisode(self):
|
||||
""" Returns the s00e00 string containing the season and episode. """
|
||||
return 's%se%s' % (str(self.seasonNumber).zfill(2), str(self.index).zfill(2))
|
||||
|
||||
def season(self):
|
||||
"""" Return this episodes :func:`~plexapi.video.Season`.. """
|
||||
return self.fetchItem(self.parentKey)
|
||||
|
||||
def show(self):
|
||||
"""" Return this episodes :func:`~plexapi.video.Show`.. """
|
||||
return self.fetchItem(self.grandparentKey)
|
||||
@@ -1,59 +0,0 @@
|
||||
from math import floor
|
||||
|
||||
import time
|
||||
import sys
|
||||
import threading
|
||||
import functools
|
||||
|
||||
|
||||
def clamp(value):
|
||||
'''
|
||||
Clamp integer between 1 and max
|
||||
|
||||
There must be at least 1 method invocation
|
||||
made over the time period. Make sure the
|
||||
value passed is at least 1 and is not a
|
||||
fraction of an invocation.
|
||||
|
||||
:param float value: The number of method invocations.
|
||||
:return: Clamped number of invocations.
|
||||
:rtype: int
|
||||
'''
|
||||
return max(1, min(sys.maxsize, floor(value)))
|
||||
|
||||
|
||||
class RateLimitDecorator:
|
||||
def __init__(self, period=1, every=1.0):
|
||||
self.frequency = abs(every) / float(clamp(period))
|
||||
self.last_called = 0.0
|
||||
self.lock = threading.RLock()
|
||||
|
||||
def __call__(self, func):
|
||||
'''
|
||||
Extend the behaviour of the following
|
||||
function, forwarding method invocations
|
||||
if the time window hes elapsed.
|
||||
|
||||
:param function func: The function to decorate.
|
||||
:return: Decorated function.
|
||||
:rtype: function
|
||||
'''
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
'''Decorator wrapper function'''
|
||||
with self.lock:
|
||||
elapsed = time.time() - self.last_called
|
||||
left_to_wait = self.frequency - elapsed
|
||||
if left_to_wait > 0:
|
||||
time.sleep(left_to_wait)
|
||||
self.last_called = time.time()
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
rate_limited = RateLimitDecorator
|
||||
|
||||
|
||||
__all__ = [
|
||||
'rate_limited'
|
||||
]
|
||||
@@ -1,9 +0,0 @@
|
||||
class Version(object):
|
||||
'''Version of the package'''
|
||||
|
||||
def __setattr__(self, *args):
|
||||
raise TypeError('cannot modify immutable instance')
|
||||
__delattr__ = __setattr__
|
||||
|
||||
def __init__(self, num):
|
||||
super(Version, self).__setattr__('number', num)
|
||||
421
lib/six.py
421
lib/six.py
@@ -1,6 +1,4 @@
|
||||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
# Copyright (c) 2010-2014 Benjamin Peterson
|
||||
# Copyright (c) 2010-2017 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
@@ -20,17 +18,24 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.6.1"
|
||||
__version__ = "1.11.0"
|
||||
|
||||
|
||||
# Useful for very coarse version differentiation.
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
PY34 = sys.version_info[0:2] >= (3, 4)
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
@@ -53,6 +58,7 @@ else:
|
||||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
@@ -83,14 +89,14 @@ class _LazyDescr(object):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result) # Invokes __set__.
|
||||
try:
|
||||
result = self._resolve()
|
||||
except ImportError:
|
||||
# See the nice big comment in MovedModule.__getattr__.
|
||||
raise AttributeError("%s could not be imported " % self.name)
|
||||
setattr(obj, self.name, result) # Invokes __set__.
|
||||
# This is a bit ugly, but it avoids running this again.
|
||||
delattr(obj.__class__, self.name)
|
||||
# This is a bit ugly, but it avoids running this again by
|
||||
# removing this descriptor.
|
||||
delattr(obj.__class__, self.name)
|
||||
except AttributeError:
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
@@ -109,22 +115,7 @@ class MovedModule(_LazyDescr):
|
||||
return _import_module(self.mod)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
# It turns out many Python frameworks like to traverse sys.modules and
|
||||
# try to load various attributes. This causes problems if this is a
|
||||
# platform-specific module on the wrong platform, like _winreg on
|
||||
# Unixes. Therefore, we silently pretend unimportable modules do not
|
||||
# have any attributes. See issues #51, #53, #56, and #63 for the full
|
||||
# tales of woe.
|
||||
#
|
||||
# First, if possible, avoid loading the module just to look at __file__,
|
||||
# __name__, or __path__.
|
||||
if (attr in ("__file__", "__name__", "__path__") and
|
||||
self.mod not in sys.modules):
|
||||
raise AttributeError(attr)
|
||||
try:
|
||||
_module = self._resolve()
|
||||
except ImportError:
|
||||
raise AttributeError(attr)
|
||||
_module = self._resolve()
|
||||
value = getattr(_module, attr)
|
||||
setattr(self, attr, value)
|
||||
return value
|
||||
@@ -170,9 +161,75 @@ class MovedAttribute(_LazyDescr):
|
||||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
class _SixMetaPathImporter(object):
|
||||
|
||||
"""
|
||||
A meta path importer to import six.moves and its submodules.
|
||||
|
||||
This class implements a PEP302 finder and loader. It should be compatible
|
||||
with Python 2.5 and all existing versions of Python3
|
||||
"""
|
||||
|
||||
def __init__(self, six_module_name):
|
||||
self.name = six_module_name
|
||||
self.known_modules = {}
|
||||
|
||||
def _add_module(self, mod, *fullnames):
|
||||
for fullname in fullnames:
|
||||
self.known_modules[self.name + "." + fullname] = mod
|
||||
|
||||
def _get_module(self, fullname):
|
||||
return self.known_modules[self.name + "." + fullname]
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname in self.known_modules:
|
||||
return self
|
||||
return None
|
||||
|
||||
def __get_module(self, fullname):
|
||||
try:
|
||||
return self.known_modules[fullname]
|
||||
except KeyError:
|
||||
raise ImportError("This loader does not know module " + fullname)
|
||||
|
||||
def load_module(self, fullname):
|
||||
try:
|
||||
# in case of a reload
|
||||
return sys.modules[fullname]
|
||||
except KeyError:
|
||||
pass
|
||||
mod = self.__get_module(fullname)
|
||||
if isinstance(mod, MovedModule):
|
||||
mod = mod._resolve()
|
||||
else:
|
||||
mod.__loader__ = self
|
||||
sys.modules[fullname] = mod
|
||||
return mod
|
||||
|
||||
def is_package(self, fullname):
|
||||
"""
|
||||
Return true, if the named module is a package.
|
||||
|
||||
We need this method to get correct spec objects with
|
||||
Python 3.4 (see PEP451)
|
||||
"""
|
||||
return hasattr(self.__get_module(fullname), "__path__")
|
||||
|
||||
def get_code(self, fullname):
|
||||
"""Return None
|
||||
|
||||
Required, if is_package is implemented"""
|
||||
self.__get_module(fullname) # eventually raises ImportError
|
||||
return None
|
||||
get_source = get_code # same as get_code
|
||||
|
||||
_importer = _SixMetaPathImporter(__name__)
|
||||
|
||||
|
||||
class _MovedItems(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects"""
|
||||
__path__ = [] # mark as package
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
@@ -180,28 +237,37 @@ _moved_attributes = [
|
||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("intern", "__builtin__", "sys"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
|
||||
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
|
||||
MovedAttribute("getoutput", "commands", "subprocess"),
|
||||
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
|
||||
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||
MovedAttribute("UserList", "UserList", "collections"),
|
||||
MovedAttribute("UserString", "UserString", "collections"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||
MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||
@@ -233,21 +299,28 @@ _moved_attributes = [
|
||||
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||
MovedModule("xmlrpc_server", "xmlrpclib", "xmlrpc.server"),
|
||||
MovedModule("winreg", "_winreg"),
|
||||
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||
]
|
||||
# Add windows specific modules.
|
||||
if sys.platform == "win32":
|
||||
_moved_attributes += [
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
if isinstance(attr, MovedModule):
|
||||
sys.modules[__name__ + ".moves." + attr.name] = attr
|
||||
_importer._add_module(attr, "moves." + attr.name)
|
||||
del attr
|
||||
|
||||
_MovedItems._moved_attributes = _moved_attributes
|
||||
|
||||
moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves")
|
||||
moves = _MovedItems(__name__ + ".moves")
|
||||
_importer._add_module(moves, "moves")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_parse(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||
|
||||
|
||||
@@ -266,8 +339,17 @@ _urllib_parse_moved_attributes = [
|
||||
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
|
||||
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splittag", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splituser", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitvalue", "urllib", "urllib.parse"),
|
||||
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
|
||||
]
|
||||
for attr in _urllib_parse_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||
@@ -275,10 +357,12 @@ del attr
|
||||
|
||||
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_parse"] = sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse")
|
||||
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||
"moves.urllib_parse", "moves.urllib.parse")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_error(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||
|
||||
|
||||
@@ -293,10 +377,12 @@ del attr
|
||||
|
||||
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_error"] = sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib.error")
|
||||
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||
"moves.urllib_error", "moves.urllib.error")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_request(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||
|
||||
|
||||
@@ -334,6 +420,8 @@ _urllib_request_moved_attributes = [
|
||||
MovedAttribute("URLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
|
||||
MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
|
||||
MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
|
||||
]
|
||||
for attr in _urllib_request_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_request, attr.name, attr)
|
||||
@@ -341,10 +429,12 @@ del attr
|
||||
|
||||
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_request"] = sys.modules[__name__ + ".moves.urllib.request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib.request")
|
||||
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||
"moves.urllib_request", "moves.urllib.request")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_response(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||
|
||||
|
||||
@@ -360,10 +450,12 @@ del attr
|
||||
|
||||
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_response"] = sys.modules[__name__ + ".moves.urllib.response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib.response")
|
||||
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||
"moves.urllib_response", "moves.urllib.response")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||
|
||||
|
||||
@@ -376,22 +468,25 @@ del attr
|
||||
|
||||
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_robotparser"] = sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser")
|
||||
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||
|
||||
|
||||
class Module_six_moves_urllib(types.ModuleType):
|
||||
|
||||
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||
parse = sys.modules[__name__ + ".moves.urllib_parse"]
|
||||
error = sys.modules[__name__ + ".moves.urllib_error"]
|
||||
request = sys.modules[__name__ + ".moves.urllib_request"]
|
||||
response = sys.modules[__name__ + ".moves.urllib_response"]
|
||||
robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"]
|
||||
__path__ = [] # mark as package
|
||||
parse = _importer._get_module("moves.urllib_parse")
|
||||
error = _importer._get_module("moves.urllib_error")
|
||||
request = _importer._get_module("moves.urllib_request")
|
||||
response = _importer._get_module("moves.urllib_response")
|
||||
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||
|
||||
def __dir__(self):
|
||||
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(__name__ + ".moves.urllib")
|
||||
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||
"moves.urllib")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
@@ -418,11 +513,6 @@ if PY3:
|
||||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
_func_globals = "__globals__"
|
||||
|
||||
_iterkeys = "keys"
|
||||
_itervalues = "values"
|
||||
_iteritems = "items"
|
||||
_iterlists = "lists"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
@@ -432,11 +522,6 @@ else:
|
||||
_func_defaults = "func_defaults"
|
||||
_func_globals = "func_globals"
|
||||
|
||||
_iterkeys = "iterkeys"
|
||||
_itervalues = "itervalues"
|
||||
_iteritems = "iteritems"
|
||||
_iterlists = "iterlists"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
@@ -459,6 +544,9 @@ if PY3:
|
||||
|
||||
create_bound_method = types.MethodType
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return func
|
||||
|
||||
Iterator = object
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
@@ -467,6 +555,9 @@ else:
|
||||
def create_bound_method(func, obj):
|
||||
return types.MethodType(func, obj, obj.__class__)
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return types.MethodType(func, None, cls)
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
@@ -485,69 +576,124 @@ get_function_defaults = operator.attrgetter(_func_defaults)
|
||||
get_function_globals = operator.attrgetter(_func_globals)
|
||||
|
||||
|
||||
def iterkeys(d, **kw):
|
||||
"""Return an iterator over the keys of a dictionary."""
|
||||
return iter(getattr(d, _iterkeys)(**kw))
|
||||
if PY3:
|
||||
def iterkeys(d, **kw):
|
||||
return iter(d.keys(**kw))
|
||||
|
||||
def itervalues(d, **kw):
|
||||
"""Return an iterator over the values of a dictionary."""
|
||||
return iter(getattr(d, _itervalues)(**kw))
|
||||
def itervalues(d, **kw):
|
||||
return iter(d.values(**kw))
|
||||
|
||||
def iteritems(d, **kw):
|
||||
"""Return an iterator over the (key, value) pairs of a dictionary."""
|
||||
return iter(getattr(d, _iteritems)(**kw))
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.items(**kw))
|
||||
|
||||
def iterlists(d, **kw):
|
||||
"""Return an iterator over the (key, [values]) pairs of a dictionary."""
|
||||
return iter(getattr(d, _iterlists)(**kw))
|
||||
def iterlists(d, **kw):
|
||||
return iter(d.lists(**kw))
|
||||
|
||||
viewkeys = operator.methodcaller("keys")
|
||||
|
||||
viewvalues = operator.methodcaller("values")
|
||||
|
||||
viewitems = operator.methodcaller("items")
|
||||
else:
|
||||
def iterkeys(d, **kw):
|
||||
return d.iterkeys(**kw)
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return d.itervalues(**kw)
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return d.iteritems(**kw)
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return d.iterlists(**kw)
|
||||
|
||||
viewkeys = operator.methodcaller("viewkeys")
|
||||
|
||||
viewvalues = operator.methodcaller("viewvalues")
|
||||
|
||||
viewitems = operator.methodcaller("viewitems")
|
||||
|
||||
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||
_add_doc(iteritems,
|
||||
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||
_add_doc(iterlists,
|
||||
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
unichr = chr
|
||||
if sys.version_info[1] <= 1:
|
||||
def int2byte(i):
|
||||
return bytes((i,))
|
||||
else:
|
||||
# This is about 2x faster than the implementation above on 3.2+
|
||||
int2byte = operator.methodcaller("to_bytes", 1, "big")
|
||||
import struct
|
||||
int2byte = struct.Struct(">B").pack
|
||||
del struct
|
||||
byte2int = operator.itemgetter(0)
|
||||
indexbytes = operator.getitem
|
||||
iterbytes = iter
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
_assertCountEqual = "assertCountEqual"
|
||||
if sys.version_info[1] <= 1:
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
else:
|
||||
_assertRaisesRegex = "assertRaisesRegex"
|
||||
_assertRegex = "assertRegex"
|
||||
else:
|
||||
def b(s):
|
||||
return s
|
||||
# Workaround for standalone backslash
|
||||
|
||||
def u(s):
|
||||
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||
unichr = unichr
|
||||
int2byte = chr
|
||||
|
||||
def byte2int(bs):
|
||||
return ord(bs[0])
|
||||
|
||||
def indexbytes(buf, i):
|
||||
return ord(buf[i])
|
||||
def iterbytes(buf):
|
||||
return (ord(byte) for byte in buf)
|
||||
iterbytes = functools.partial(itertools.imap, ord)
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_assertCountEqual = "assertItemsEqual"
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
def assertCountEqual(self, *args, **kwargs):
|
||||
return getattr(self, _assertCountEqual)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRaisesRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
if PY3:
|
||||
exec_ = getattr(moves.builtins, "exec")
|
||||
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
try:
|
||||
if value is None:
|
||||
value = tp()
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
finally:
|
||||
value = None
|
||||
tb = None
|
||||
|
||||
else:
|
||||
def exec_(_code_, _globs_=None, _locs_=None):
|
||||
@@ -562,12 +708,35 @@ else:
|
||||
_locs_ = _globs_
|
||||
exec("""exec _code_ in _globs_, _locs_""")
|
||||
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
raise tp, value, tb
|
||||
try:
|
||||
raise tp, value, tb
|
||||
finally:
|
||||
tb = None
|
||||
""")
|
||||
|
||||
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
try:
|
||||
if from_value is None:
|
||||
raise value
|
||||
raise value from from_value
|
||||
finally:
|
||||
value = None
|
||||
""")
|
||||
elif sys.version_info[:2] > (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
try:
|
||||
raise value from from_value
|
||||
finally:
|
||||
value = None
|
||||
""")
|
||||
else:
|
||||
def raise_from(value, from_value):
|
||||
raise value
|
||||
|
||||
|
||||
print_ = getattr(moves.builtins, "print", None)
|
||||
if print_ is None:
|
||||
def print_(*args, **kwargs):
|
||||
@@ -575,13 +744,14 @@ if print_ is None:
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
# If the file has an encoding, encode unicode with it.
|
||||
if (isinstance(fp, file) and
|
||||
isinstance(data, unicode) and
|
||||
fp.encoding is not None):
|
||||
isinstance(data, unicode) and
|
||||
fp.encoding is not None):
|
||||
errors = getattr(fp, "errors", None)
|
||||
if errors is None:
|
||||
errors = "strict"
|
||||
@@ -622,25 +792,100 @@ if print_ is None:
|
||||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
if sys.version_info[:2] < (3, 3):
|
||||
_print = print_
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
fp = kwargs.get("file", sys.stdout)
|
||||
flush = kwargs.pop("flush", False)
|
||||
_print(*args, **kwargs)
|
||||
if flush and fp is not None:
|
||||
fp.flush()
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
if sys.version_info[0:2] < (3, 4):
|
||||
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES):
|
||||
def wrapper(f):
|
||||
f = functools.wraps(wrapped, assigned, updated)(f)
|
||||
f.__wrapped__ = wrapped
|
||||
return f
|
||||
return wrapper
|
||||
else:
|
||||
wraps = functools.wraps
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
return meta("NewBase", bases, {})
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(type):
|
||||
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
|
||||
@classmethod
|
||||
def __prepare__(cls, name, this_bases):
|
||||
return meta.__prepare__(name, bases)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
def add_metaclass(metaclass):
|
||||
"""Class decorator for creating a class with a metaclass."""
|
||||
def wrapper(cls):
|
||||
orig_vars = cls.__dict__.copy()
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
slots = orig_vars.get('__slots__')
|
||||
if slots is not None:
|
||||
if isinstance(slots, str):
|
||||
slots = [slots]
|
||||
for slots_var in slots:
|
||||
orig_vars.pop(slots_var)
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||
return wrapper
|
||||
|
||||
|
||||
def python_2_unicode_compatible(klass):
|
||||
"""
|
||||
A decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||
Under Python 3 it does nothing.
|
||||
|
||||
To support Python 2 and 3 with a single code base, define a __str__ method
|
||||
returning text and apply this decorator to the class.
|
||||
"""
|
||||
if PY2:
|
||||
if '__str__' not in klass.__dict__:
|
||||
raise ValueError("@python_2_unicode_compatible cannot be applied "
|
||||
"to %s because it doesn't define __str__()." %
|
||||
klass.__name__)
|
||||
klass.__unicode__ = klass.__str__
|
||||
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
|
||||
return klass
|
||||
|
||||
|
||||
# Complete the moves implementation.
|
||||
# This code is at the end of this module to speed up module loading.
|
||||
# Turn this module into a package.
|
||||
__path__ = [] # required for PEP 302 and PEP 451
|
||||
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||
if globals().get("__spec__") is not None:
|
||||
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||
# Remove other six meta path importers, since they cause problems. This can
|
||||
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||
# this for some reason.)
|
||||
if sys.meta_path:
|
||||
for i, importer in enumerate(sys.meta_path):
|
||||
# Here's some real nastiness: Another "instance" of the six module might
|
||||
# be floating around. Therefore, we can't use isinstance() to check for
|
||||
# the six meta path importer, since the other six instance will have
|
||||
# inserted an importer with different class.
|
||||
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||
importer.name == __name__):
|
||||
del sys.meta_path[i]
|
||||
break
|
||||
del i, importer
|
||||
# Finally, add the importer to the meta path import hook.
|
||||
sys.meta_path.append(_importer)
|
||||
|
||||
34
lib/tqdm/__init__.py
Normal file
34
lib/tqdm/__init__.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from ._tqdm import tqdm
|
||||
from ._tqdm import trange
|
||||
from ._tqdm_gui import tqdm_gui
|
||||
from ._tqdm_gui import tgrange
|
||||
from ._tqdm_pandas import tqdm_pandas
|
||||
from ._main import main
|
||||
from ._monitor import TMonitor, TqdmSynchronisationWarning
|
||||
from ._version import __version__ # NOQA
|
||||
from ._tqdm import TqdmTypeError, TqdmKeyError, TqdmWarning, \
|
||||
TqdmDeprecationWarning, TqdmExperimentalWarning, \
|
||||
TqdmMonitorWarning
|
||||
|
||||
__all__ = ['tqdm', 'tqdm_gui', 'trange', 'tgrange', 'tqdm_pandas',
|
||||
'tqdm_notebook', 'tnrange', 'main', 'TMonitor',
|
||||
'TqdmTypeError', 'TqdmKeyError',
|
||||
'TqdmWarning', 'TqdmDeprecationWarning',
|
||||
'TqdmExperimentalWarning',
|
||||
'TqdmMonitorWarning', 'TqdmSynchronisationWarning',
|
||||
'__version__']
|
||||
|
||||
|
||||
def tqdm_notebook(*args, **kwargs): # pragma: no cover
|
||||
"""See tqdm._tqdm_notebook.tqdm_notebook for full documentation"""
|
||||
from ._tqdm_notebook import tqdm_notebook as _tqdm_notebook
|
||||
return _tqdm_notebook(*args, **kwargs)
|
||||
|
||||
|
||||
def tnrange(*args, **kwargs): # pragma: no cover
|
||||
"""
|
||||
A shortcut for tqdm_notebook(xrange(*args), **kwargs).
|
||||
On Python3+ range is used instead of xrange.
|
||||
"""
|
||||
from ._tqdm_notebook import tnrange as _tnrange
|
||||
return _tnrange(*args, **kwargs)
|
||||
2
lib/tqdm/__main__.py
Normal file
2
lib/tqdm/__main__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
from ._main import main
|
||||
main()
|
||||
207
lib/tqdm/_main.py
Normal file
207
lib/tqdm/_main.py
Normal file
@@ -0,0 +1,207 @@
|
||||
from ._tqdm import tqdm, TqdmTypeError, TqdmKeyError
|
||||
from ._version import __version__ # NOQA
|
||||
import sys
|
||||
import re
|
||||
import logging
|
||||
__all__ = ["main"]
|
||||
|
||||
|
||||
def cast(val, typ):
|
||||
log = logging.getLogger(__name__)
|
||||
log.debug((val, typ))
|
||||
if " or " in typ:
|
||||
for t in typ.split(" or "):
|
||||
try:
|
||||
return cast(val, t)
|
||||
except TqdmTypeError:
|
||||
pass
|
||||
raise TqdmTypeError(val + ' : ' + typ)
|
||||
|
||||
# sys.stderr.write('\ndebug | `val:type`: `' + val + ':' + typ + '`.\n')
|
||||
if typ == 'bool':
|
||||
if (val == 'True') or (val == ''):
|
||||
return True
|
||||
elif val == 'False':
|
||||
return False
|
||||
else:
|
||||
raise TqdmTypeError(val + ' : ' + typ)
|
||||
try:
|
||||
return eval(typ + '("' + val + '")')
|
||||
except:
|
||||
if typ == 'chr':
|
||||
return chr(ord(eval('"' + val + '"')))
|
||||
else:
|
||||
raise TqdmTypeError(val + ' : ' + typ)
|
||||
|
||||
|
||||
def posix_pipe(fin, fout, delim='\n', buf_size=256,
|
||||
callback=lambda int: None # pragma: no cover
|
||||
):
|
||||
"""
|
||||
Params
|
||||
------
|
||||
fin : file with `read(buf_size : int)` method
|
||||
fout : file with `write` (and optionally `flush`) methods.
|
||||
callback : function(int), e.g.: `tqdm.update`
|
||||
"""
|
||||
fp_write = fout.write
|
||||
|
||||
# tmp = ''
|
||||
if not delim:
|
||||
while True:
|
||||
tmp = fin.read(buf_size)
|
||||
|
||||
# flush at EOF
|
||||
if not tmp:
|
||||
getattr(fout, 'flush', lambda: None)() # pragma: no cover
|
||||
return
|
||||
|
||||
fp_write(tmp)
|
||||
callback(len(tmp))
|
||||
# return
|
||||
|
||||
buf = ''
|
||||
# n = 0
|
||||
while True:
|
||||
tmp = fin.read(buf_size)
|
||||
|
||||
# flush at EOF
|
||||
if not tmp:
|
||||
if buf:
|
||||
fp_write(buf)
|
||||
callback(1 + buf.count(delim)) # n += 1 + buf.count(delim)
|
||||
getattr(fout, 'flush', lambda: None)() # pragma: no cover
|
||||
return # n
|
||||
|
||||
while True:
|
||||
try:
|
||||
i = tmp.index(delim)
|
||||
except ValueError:
|
||||
buf += tmp
|
||||
break
|
||||
else:
|
||||
fp_write(buf + tmp[:i + len(delim)])
|
||||
callback(1) # n += 1
|
||||
buf = ''
|
||||
tmp = tmp[i + len(delim):]
|
||||
|
||||
|
||||
# ((opt, type), ... )
|
||||
RE_OPTS = re.compile(r'\n {8}(\S+)\s{2,}:\s*([^,]+)')
|
||||
# better split method assuming no positional args
|
||||
RE_SHLEX = re.compile(r'\s*(?<!\S)--?([^\s=]+)(?:\s*|=|$)')
|
||||
|
||||
# TODO: add custom support for some of the following?
|
||||
UNSUPPORTED_OPTS = ('iterable', 'gui', 'out', 'file')
|
||||
|
||||
# The 8 leading spaces are required for consistency
|
||||
CLI_EXTRA_DOC = r"""
|
||||
Extra CLI Options
|
||||
-----------------
|
||||
name : type, optional
|
||||
TODO: find out why this is needed.
|
||||
delim : chr, optional
|
||||
Delimiting character [default: '\n']. Use '\0' for null.
|
||||
N.B.: on Windows systems, Python converts '\n' to '\r\n'.
|
||||
buf_size : int, optional
|
||||
String buffer size in bytes [default: 256]
|
||||
used when `delim` is specified.
|
||||
bytes : bool, optional
|
||||
If true, will count bytes, ignore `delim`, and default
|
||||
`unit_scale` to True, `unit_divisor` to 1024, and `unit` to 'B'.
|
||||
log : str, optional
|
||||
CRITICAL|FATAL|ERROR|WARN(ING)|[default: 'INFO']|DEBUG|NOTSET.
|
||||
"""
|
||||
|
||||
|
||||
def main(fp=sys.stderr):
|
||||
"""
|
||||
Paramters (internal use only)
|
||||
---------
|
||||
fp : file-like object for tqdm
|
||||
"""
|
||||
try:
|
||||
log = sys.argv.index('--log')
|
||||
except ValueError:
|
||||
logLevel = 'INFO'
|
||||
else:
|
||||
# sys.argv.pop(log)
|
||||
# logLevel = sys.argv.pop(log)
|
||||
logLevel = sys.argv[log + 1]
|
||||
logging.basicConfig(level=getattr(logging, logLevel),
|
||||
format="%(levelname)s:%(module)s:%(lineno)d:%(message)s")
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
d = tqdm.__init__.__doc__ + CLI_EXTRA_DOC
|
||||
|
||||
opt_types = dict(RE_OPTS.findall(d))
|
||||
# opt_types['delim'] = 'chr'
|
||||
|
||||
for o in UNSUPPORTED_OPTS:
|
||||
opt_types.pop(o)
|
||||
|
||||
log.debug(sorted(opt_types.items()))
|
||||
|
||||
# d = RE_OPTS.sub(r' --\1=<\1> : \2', d)
|
||||
split = RE_OPTS.split(d)
|
||||
opt_types_desc = zip(split[1::3], split[2::3], split[3::3])
|
||||
d = ''.join('\n --{0}=<{0}> : {1}{2}'.format(*otd)
|
||||
for otd in opt_types_desc if otd[0] not in UNSUPPORTED_OPTS)
|
||||
|
||||
d = """Usage:
|
||||
tqdm [--help | options]
|
||||
|
||||
Options:
|
||||
-h, --help Print this help and exit
|
||||
-v, --version Print version and exit
|
||||
|
||||
""" + d.strip('\n') + '\n'
|
||||
|
||||
# opts = docopt(d, version=__version__)
|
||||
if any(v in sys.argv for v in ('-v', '--version')):
|
||||
sys.stdout.write(__version__ + '\n')
|
||||
sys.exit(0)
|
||||
elif any(v in sys.argv for v in ('-h', '--help')):
|
||||
sys.stdout.write(d + '\n')
|
||||
sys.exit(0)
|
||||
|
||||
argv = RE_SHLEX.split(' '.join(["tqdm"] + sys.argv[1:]))
|
||||
opts = dict(zip(argv[1::2], argv[2::2]))
|
||||
|
||||
log.debug(opts)
|
||||
opts.pop('log', True)
|
||||
|
||||
tqdm_args = {'file': fp}
|
||||
try:
|
||||
for (o, v) in opts.items():
|
||||
try:
|
||||
tqdm_args[o] = cast(v, opt_types[o])
|
||||
except KeyError as e:
|
||||
raise TqdmKeyError(str(e))
|
||||
log.debug('args:' + str(tqdm_args))
|
||||
except:
|
||||
fp.write('\nError:\nUsage:\n tqdm [--help | options]\n')
|
||||
for i in sys.stdin:
|
||||
sys.stdout.write(i)
|
||||
raise
|
||||
else:
|
||||
buf_size = tqdm_args.pop('buf_size', 256)
|
||||
delim = tqdm_args.pop('delim', '\n')
|
||||
delim_per_char = tqdm_args.pop('bytes', False)
|
||||
if delim_per_char:
|
||||
tqdm_args.setdefault('unit', 'B')
|
||||
tqdm_args.setdefault('unit_scale', True)
|
||||
tqdm_args.setdefault('unit_divisor', 1024)
|
||||
log.debug(tqdm_args)
|
||||
with tqdm(**tqdm_args) as t:
|
||||
posix_pipe(sys.stdin, sys.stdout,
|
||||
'', buf_size, t.update)
|
||||
elif delim == '\n':
|
||||
log.debug(tqdm_args)
|
||||
for i in tqdm(sys.stdin, **tqdm_args):
|
||||
sys.stdout.write(i)
|
||||
else:
|
||||
log.debug(tqdm_args)
|
||||
with tqdm(**tqdm_args) as t:
|
||||
posix_pipe(sys.stdin, sys.stdout,
|
||||
delim, buf_size, t.update)
|
||||
93
lib/tqdm/_monitor.py
Normal file
93
lib/tqdm/_monitor.py
Normal file
@@ -0,0 +1,93 @@
|
||||
from threading import Event, Thread
|
||||
from time import time
|
||||
from warnings import warn
|
||||
__all__ = ["TMonitor", "TqdmSynchronisationWarning"]
|
||||
|
||||
|
||||
class TqdmSynchronisationWarning(RuntimeWarning):
|
||||
"""tqdm multi-thread/-process errors which may cause incorrect nesting
|
||||
but otherwise no adverse effects"""
|
||||
pass
|
||||
|
||||
|
||||
class TMonitor(Thread):
|
||||
"""
|
||||
Monitoring thread for tqdm bars.
|
||||
Monitors if tqdm bars are taking too much time to display
|
||||
and readjusts miniters automatically if necessary.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
tqdm_cls : class
|
||||
tqdm class to use (can be core tqdm or a submodule).
|
||||
sleep_interval : fload
|
||||
Time to sleep between monitoring checks.
|
||||
"""
|
||||
|
||||
# internal vars for unit testing
|
||||
_time = None
|
||||
_event = None
|
||||
|
||||
def __init__(self, tqdm_cls, sleep_interval):
|
||||
Thread.__init__(self)
|
||||
self.daemon = True # kill thread when main killed (KeyboardInterrupt)
|
||||
self.was_killed = Event()
|
||||
self.woken = 0 # last time woken up, to sync with monitor
|
||||
self.tqdm_cls = tqdm_cls
|
||||
self.sleep_interval = sleep_interval
|
||||
if TMonitor._time is not None:
|
||||
self._time = TMonitor._time
|
||||
else:
|
||||
self._time = time
|
||||
if TMonitor._event is not None:
|
||||
self._event = TMonitor._event
|
||||
else:
|
||||
self._event = Event
|
||||
self.start()
|
||||
|
||||
def exit(self):
|
||||
self.was_killed.set()
|
||||
self.join()
|
||||
return self.report()
|
||||
|
||||
def run(self):
|
||||
cur_t = self._time()
|
||||
while True:
|
||||
# After processing and before sleeping, notify that we woke
|
||||
# Need to be done just before sleeping
|
||||
self.woken = cur_t
|
||||
# Sleep some time...
|
||||
self.was_killed.wait(self.sleep_interval)
|
||||
# Quit if killed
|
||||
if self.was_killed.is_set():
|
||||
return
|
||||
# Then monitor!
|
||||
# Acquire lock (to access _instances)
|
||||
with self.tqdm_cls.get_lock():
|
||||
cur_t = self._time()
|
||||
# Check tqdm instances are waiting too long to print
|
||||
instances = self.tqdm_cls._instances.copy()
|
||||
for instance in instances:
|
||||
# Check event in loop to reduce blocking time on exit
|
||||
if self.was_killed.is_set():
|
||||
return
|
||||
# Avoid race by checking that the instance started
|
||||
if not hasattr(instance, 'start_t'): # pragma: nocover
|
||||
continue
|
||||
# Only if mininterval > 1 (else iterations are just slow)
|
||||
# and last refresh exceeded maxinterval
|
||||
if instance.miniters > 1 and \
|
||||
(cur_t - instance.last_print_t) >= \
|
||||
instance.maxinterval:
|
||||
# force bypassing miniters on next iteration
|
||||
# (dynamic_miniters adjusts mininterval automatically)
|
||||
instance.miniters = 1
|
||||
# Refresh now! (works only for manual tqdm)
|
||||
instance.refresh(nolock=True)
|
||||
if instances != self.tqdm_cls._instances: # pragma: nocover
|
||||
warn("Set changed size during iteration" +
|
||||
" (see https://github.com/tqdm/tqdm/issues/481)",
|
||||
TqdmSynchronisationWarning)
|
||||
|
||||
def report(self):
|
||||
return not self.was_killed.is_set()
|
||||
1223
lib/tqdm/_tqdm.py
Normal file
1223
lib/tqdm/_tqdm.py
Normal file
File diff suppressed because it is too large
Load Diff
351
lib/tqdm/_tqdm_gui.py
Normal file
351
lib/tqdm/_tqdm_gui.py
Normal file
@@ -0,0 +1,351 @@
|
||||
"""
|
||||
GUI progressbar decorator for iterators.
|
||||
Includes a default (x)range iterator printing to stderr.
|
||||
|
||||
Usage:
|
||||
>>> from tqdm_gui import tgrange[, tqdm_gui]
|
||||
>>> for i in tgrange(10): #same as: for i in tqdm_gui(xrange(10))
|
||||
... ...
|
||||
"""
|
||||
# future division is important to divide integers and get as
|
||||
# a result precise floating numbers (instead of truncated int)
|
||||
from __future__ import division, absolute_import
|
||||
# import compatibility functions and utilities
|
||||
# import sys
|
||||
from time import time
|
||||
from ._utils import _range
|
||||
# to inherit from the tqdm class
|
||||
from ._tqdm import tqdm, TqdmExperimentalWarning
|
||||
from warnings import warn
|
||||
|
||||
|
||||
__author__ = {"github.com/": ["casperdcl", "lrq3000"]}
|
||||
__all__ = ['tqdm_gui', 'tgrange']
|
||||
|
||||
|
||||
class tqdm_gui(tqdm): # pragma: no cover
|
||||
"""
|
||||
Experimental GUI version of tqdm!
|
||||
"""
|
||||
|
||||
# TODO: @classmethod: write() on GUI?
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
import matplotlib as mpl
|
||||
import matplotlib.pyplot as plt
|
||||
from collections import deque
|
||||
kwargs['gui'] = True
|
||||
|
||||
super(tqdm_gui, self).__init__(*args, **kwargs)
|
||||
|
||||
# Initialize the GUI display
|
||||
if self.disable or not kwargs['gui']:
|
||||
return
|
||||
|
||||
warn('GUI is experimental/alpha', TqdmExperimentalWarning)
|
||||
self.mpl = mpl
|
||||
self.plt = plt
|
||||
self.sp = None
|
||||
|
||||
# Remember if external environment uses toolbars
|
||||
self.toolbar = self.mpl.rcParams['toolbar']
|
||||
self.mpl.rcParams['toolbar'] = 'None'
|
||||
|
||||
self.mininterval = max(self.mininterval, 0.5)
|
||||
self.fig, ax = plt.subplots(figsize=(9, 2.2))
|
||||
# self.fig.subplots_adjust(bottom=0.2)
|
||||
if self.total:
|
||||
self.xdata = []
|
||||
self.ydata = []
|
||||
self.zdata = []
|
||||
else:
|
||||
self.xdata = deque([])
|
||||
self.ydata = deque([])
|
||||
self.zdata = deque([])
|
||||
self.line1, = ax.plot(self.xdata, self.ydata, color='b')
|
||||
self.line2, = ax.plot(self.xdata, self.zdata, color='k')
|
||||
ax.set_ylim(0, 0.001)
|
||||
if self.total:
|
||||
ax.set_xlim(0, 100)
|
||||
ax.set_xlabel('percent')
|
||||
self.fig.legend((self.line1, self.line2), ('cur', 'est'),
|
||||
loc='center right')
|
||||
# progressbar
|
||||
self.hspan = plt.axhspan(0, 0.001,
|
||||
xmin=0, xmax=0, color='g')
|
||||
else:
|
||||
# ax.set_xlim(-60, 0)
|
||||
ax.set_xlim(0, 60)
|
||||
ax.invert_xaxis()
|
||||
ax.set_xlabel('seconds')
|
||||
ax.legend(('cur', 'est'), loc='lower left')
|
||||
ax.grid()
|
||||
# ax.set_xlabel('seconds')
|
||||
ax.set_ylabel((self.unit if self.unit else 'it') + '/s')
|
||||
if self.unit_scale:
|
||||
plt.ticklabel_format(style='sci', axis='y',
|
||||
scilimits=(0, 0))
|
||||
ax.yaxis.get_offset_text().set_x(-0.15)
|
||||
|
||||
# Remember if external environment is interactive
|
||||
self.wasion = plt.isinteractive()
|
||||
plt.ion()
|
||||
self.ax = ax
|
||||
|
||||
def __iter__(self):
|
||||
# TODO: somehow allow the following:
|
||||
# if not self.gui:
|
||||
# return super(tqdm_gui, self).__iter__()
|
||||
iterable = self.iterable
|
||||
if self.disable:
|
||||
for obj in iterable:
|
||||
yield obj
|
||||
return
|
||||
|
||||
# ncols = self.ncols
|
||||
mininterval = self.mininterval
|
||||
maxinterval = self.maxinterval
|
||||
miniters = self.miniters
|
||||
dynamic_miniters = self.dynamic_miniters
|
||||
unit = self.unit
|
||||
unit_scale = self.unit_scale
|
||||
ascii = self.ascii
|
||||
start_t = self.start_t
|
||||
last_print_t = self.last_print_t
|
||||
last_print_n = self.last_print_n
|
||||
n = self.n
|
||||
# dynamic_ncols = self.dynamic_ncols
|
||||
smoothing = self.smoothing
|
||||
avg_time = self.avg_time
|
||||
bar_format = self.bar_format
|
||||
|
||||
plt = self.plt
|
||||
ax = self.ax
|
||||
xdata = self.xdata
|
||||
ydata = self.ydata
|
||||
zdata = self.zdata
|
||||
line1 = self.line1
|
||||
line2 = self.line2
|
||||
|
||||
for obj in iterable:
|
||||
yield obj
|
||||
# Update and print the progressbar.
|
||||
# Note: does not call self.update(1) for speed optimisation.
|
||||
n += 1
|
||||
delta_it = n - last_print_n
|
||||
# check the counter first (avoid calls to time())
|
||||
if delta_it >= miniters:
|
||||
cur_t = time()
|
||||
delta_t = cur_t - last_print_t
|
||||
if delta_t >= mininterval:
|
||||
elapsed = cur_t - start_t
|
||||
# EMA (not just overall average)
|
||||
if smoothing and delta_t:
|
||||
avg_time = delta_t / delta_it \
|
||||
if avg_time is None \
|
||||
else smoothing * delta_t / delta_it + \
|
||||
(1 - smoothing) * avg_time
|
||||
|
||||
# Inline due to multiple calls
|
||||
total = self.total
|
||||
# instantaneous rate
|
||||
y = delta_it / delta_t
|
||||
# overall rate
|
||||
z = n / elapsed
|
||||
# update line data
|
||||
xdata.append(n * 100.0 / total if total else cur_t)
|
||||
ydata.append(y)
|
||||
zdata.append(z)
|
||||
|
||||
# Discard old values
|
||||
# xmin, xmax = ax.get_xlim()
|
||||
# if (not total) and elapsed > xmin * 1.1:
|
||||
if (not total) and elapsed > 66:
|
||||
xdata.popleft()
|
||||
ydata.popleft()
|
||||
zdata.popleft()
|
||||
|
||||
ymin, ymax = ax.get_ylim()
|
||||
if y > ymax or z > ymax:
|
||||
ymax = 1.1 * y
|
||||
ax.set_ylim(ymin, ymax)
|
||||
ax.figure.canvas.draw()
|
||||
|
||||
if total:
|
||||
line1.set_data(xdata, ydata)
|
||||
line2.set_data(xdata, zdata)
|
||||
try:
|
||||
poly_lims = self.hspan.get_xy()
|
||||
except AttributeError:
|
||||
self.hspan = plt.axhspan(0, 0.001, xmin=0,
|
||||
xmax=0, color='g')
|
||||
poly_lims = self.hspan.get_xy()
|
||||
poly_lims[0, 1] = ymin
|
||||
poly_lims[1, 1] = ymax
|
||||
poly_lims[2] = [n / total, ymax]
|
||||
poly_lims[3] = [poly_lims[2, 0], ymin]
|
||||
if len(poly_lims) > 4:
|
||||
poly_lims[4, 1] = ymin
|
||||
self.hspan.set_xy(poly_lims)
|
||||
else:
|
||||
t_ago = [cur_t - i for i in xdata]
|
||||
line1.set_data(t_ago, ydata)
|
||||
line2.set_data(t_ago, zdata)
|
||||
|
||||
ax.set_title(self.format_meter(
|
||||
n, total, elapsed, 0,
|
||||
self.desc, ascii, unit, unit_scale,
|
||||
1 / avg_time if avg_time else None, bar_format),
|
||||
fontname="DejaVu Sans Mono", fontsize=11)
|
||||
plt.pause(1e-9)
|
||||
|
||||
# If no `miniters` was specified, adjust automatically
|
||||
# to the maximum iteration rate seen so far.
|
||||
if dynamic_miniters:
|
||||
if maxinterval and delta_t > maxinterval:
|
||||
# Set miniters to correspond to maxinterval
|
||||
miniters = delta_it * maxinterval / delta_t
|
||||
elif mininterval and delta_t:
|
||||
# EMA-weight miniters to converge
|
||||
# towards the timeframe of mininterval
|
||||
miniters = smoothing * delta_it * mininterval \
|
||||
/ delta_t + (1 - smoothing) * miniters
|
||||
else:
|
||||
miniters = smoothing * delta_it + \
|
||||
(1 - smoothing) * miniters
|
||||
|
||||
# Store old values for next call
|
||||
last_print_n = n
|
||||
last_print_t = cur_t
|
||||
|
||||
# Closing the progress bar.
|
||||
# Update some internal variables for close().
|
||||
self.last_print_n = last_print_n
|
||||
self.n = n
|
||||
self.close()
|
||||
|
||||
def update(self, n=1):
|
||||
# if not self.gui:
|
||||
# return super(tqdm_gui, self).close()
|
||||
if self.disable:
|
||||
return
|
||||
|
||||
if n < 0:
|
||||
n = 1
|
||||
self.n += n
|
||||
|
||||
delta_it = self.n - self.last_print_n # should be n?
|
||||
if delta_it >= self.miniters:
|
||||
# We check the counter first, to reduce the overhead of time()
|
||||
cur_t = time()
|
||||
delta_t = cur_t - self.last_print_t
|
||||
if delta_t >= self.mininterval:
|
||||
elapsed = cur_t - self.start_t
|
||||
# EMA (not just overall average)
|
||||
if self.smoothing and delta_t:
|
||||
self.avg_time = delta_t / delta_it \
|
||||
if self.avg_time is None \
|
||||
else self.smoothing * delta_t / delta_it + \
|
||||
(1 - self.smoothing) * self.avg_time
|
||||
|
||||
# Inline due to multiple calls
|
||||
total = self.total
|
||||
ax = self.ax
|
||||
|
||||
# instantaneous rate
|
||||
y = delta_it / delta_t
|
||||
# smoothed rate
|
||||
z = self.n / elapsed
|
||||
# update line data
|
||||
self.xdata.append(self.n * 100.0 / total
|
||||
if total else cur_t)
|
||||
self.ydata.append(y)
|
||||
self.zdata.append(z)
|
||||
|
||||
# Discard old values
|
||||
if (not total) and elapsed > 66:
|
||||
self.xdata.popleft()
|
||||
self.ydata.popleft()
|
||||
self.zdata.popleft()
|
||||
|
||||
ymin, ymax = ax.get_ylim()
|
||||
if y > ymax or z > ymax:
|
||||
ymax = 1.1 * y
|
||||
ax.set_ylim(ymin, ymax)
|
||||
ax.figure.canvas.draw()
|
||||
|
||||
if total:
|
||||
self.line1.set_data(self.xdata, self.ydata)
|
||||
self.line2.set_data(self.xdata, self.zdata)
|
||||
try:
|
||||
poly_lims = self.hspan.get_xy()
|
||||
except AttributeError:
|
||||
self.hspan = self.plt.axhspan(0, 0.001, xmin=0,
|
||||
xmax=0, color='g')
|
||||
poly_lims = self.hspan.get_xy()
|
||||
poly_lims[0, 1] = ymin
|
||||
poly_lims[1, 1] = ymax
|
||||
poly_lims[2] = [self.n / total, ymax]
|
||||
poly_lims[3] = [poly_lims[2, 0], ymin]
|
||||
if len(poly_lims) > 4:
|
||||
poly_lims[4, 1] = ymin
|
||||
self.hspan.set_xy(poly_lims)
|
||||
else:
|
||||
t_ago = [cur_t - i for i in self.xdata]
|
||||
self.line1.set_data(t_ago, self.ydata)
|
||||
self.line2.set_data(t_ago, self.zdata)
|
||||
|
||||
ax.set_title(self.format_meter(
|
||||
self.n, total, elapsed, 0,
|
||||
self.desc, self.ascii, self.unit, self.unit_scale,
|
||||
1 / self.avg_time if self.avg_time else None,
|
||||
self.bar_format),
|
||||
fontname="DejaVu Sans Mono", fontsize=11)
|
||||
self.plt.pause(1e-9)
|
||||
|
||||
# If no `miniters` was specified, adjust automatically to the
|
||||
# maximum iteration rate seen so far.
|
||||
# e.g.: After running `tqdm.update(5)`, subsequent
|
||||
# calls to `tqdm.update()` will only cause an update after
|
||||
# at least 5 more iterations.
|
||||
if self.dynamic_miniters:
|
||||
if self.maxinterval and delta_t > self.maxinterval:
|
||||
self.miniters = self.miniters * self.maxinterval \
|
||||
/ delta_t
|
||||
elif self.mininterval and delta_t:
|
||||
self.miniters = self.smoothing * delta_it \
|
||||
* self.mininterval / delta_t + \
|
||||
(1 - self.smoothing) * self.miniters
|
||||
else:
|
||||
self.miniters = self.smoothing * delta_it + \
|
||||
(1 - self.smoothing) * self.miniters
|
||||
|
||||
# Store old values for next call
|
||||
self.last_print_n = self.n
|
||||
self.last_print_t = cur_t
|
||||
|
||||
def close(self):
|
||||
# if not self.gui:
|
||||
# return super(tqdm_gui, self).close()
|
||||
if self.disable:
|
||||
return
|
||||
|
||||
self.disable = True
|
||||
|
||||
self._instances.remove(self)
|
||||
|
||||
# Restore toolbars
|
||||
self.mpl.rcParams['toolbar'] = self.toolbar
|
||||
# Return to non-interactive mode
|
||||
if not self.wasion:
|
||||
self.plt.ioff()
|
||||
if not self.leave:
|
||||
self.plt.close(self.fig)
|
||||
|
||||
|
||||
def tgrange(*args, **kwargs):
|
||||
"""
|
||||
A shortcut for tqdm_gui(xrange(*args), **kwargs).
|
||||
On Python3+ range is used instead of xrange.
|
||||
"""
|
||||
return tqdm_gui(_range(*args), **kwargs)
|
||||
236
lib/tqdm/_tqdm_notebook.py
Normal file
236
lib/tqdm/_tqdm_notebook.py
Normal file
@@ -0,0 +1,236 @@
|
||||
"""
|
||||
IPython/Jupyter Notebook progressbar decorator for iterators.
|
||||
Includes a default (x)range iterator printing to stderr.
|
||||
|
||||
Usage:
|
||||
>>> from tqdm_notebook import tnrange[, tqdm_notebook]
|
||||
>>> for i in tnrange(10): #same as: for i in tqdm_notebook(xrange(10))
|
||||
... ...
|
||||
"""
|
||||
# future division is important to divide integers and get as
|
||||
# a result precise floating numbers (instead of truncated int)
|
||||
from __future__ import division, absolute_import
|
||||
# import compatibility functions and utilities
|
||||
import sys
|
||||
from ._utils import _range
|
||||
# to inherit from the tqdm class
|
||||
from ._tqdm import tqdm
|
||||
|
||||
|
||||
if True: # pragma: no cover
|
||||
# import IPython/Jupyter base widget and display utilities
|
||||
try: # IPython 4.x
|
||||
import ipywidgets
|
||||
IPY = 4
|
||||
except ImportError: # IPython 3.x / 2.x
|
||||
IPY = 32
|
||||
import warnings
|
||||
with warnings.catch_warnings():
|
||||
ipy_deprecation_msg = "The `IPython.html` package" \
|
||||
" has been deprecated"
|
||||
warnings.filterwarnings('error',
|
||||
message=".*" + ipy_deprecation_msg + ".*")
|
||||
try:
|
||||
import IPython.html.widgets as ipywidgets
|
||||
except Warning as e:
|
||||
if ipy_deprecation_msg not in str(e):
|
||||
raise
|
||||
warnings.simplefilter('ignore')
|
||||
try:
|
||||
import IPython.html.widgets as ipywidgets # NOQA
|
||||
except ImportError:
|
||||
pass
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try: # IPython 4.x / 3.x
|
||||
if IPY == 32:
|
||||
from IPython.html.widgets import IntProgress, HBox, HTML
|
||||
IPY = 3
|
||||
else:
|
||||
from ipywidgets import IntProgress, HBox, HTML
|
||||
except ImportError:
|
||||
try: # IPython 2.x
|
||||
from IPython.html.widgets import IntProgressWidget as IntProgress
|
||||
from IPython.html.widgets import ContainerWidget as HBox
|
||||
from IPython.html.widgets import HTML
|
||||
IPY = 2
|
||||
except ImportError:
|
||||
IPY = 0
|
||||
|
||||
try:
|
||||
from IPython.display import display # , clear_output
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# HTML encoding
|
||||
try: # Py3
|
||||
from html import escape
|
||||
except ImportError: # Py2
|
||||
from cgi import escape
|
||||
|
||||
|
||||
__author__ = {"github.com/": ["lrq3000", "casperdcl", "alexanderkuk"]}
|
||||
__all__ = ['tqdm_notebook', 'tnrange']
|
||||
|
||||
|
||||
class tqdm_notebook(tqdm):
|
||||
"""
|
||||
Experimental IPython/Jupyter Notebook widget using tqdm!
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def status_printer(_, total=None, desc=None):
|
||||
"""
|
||||
Manage the printing of an IPython/Jupyter Notebook progress bar widget.
|
||||
"""
|
||||
# Fallback to text bar if there's no total
|
||||
# DEPRECATED: replaced with an 'info' style bar
|
||||
# if not total:
|
||||
# return super(tqdm_notebook, tqdm_notebook).status_printer(file)
|
||||
|
||||
# fp = file
|
||||
|
||||
# Prepare IPython progress bar
|
||||
if total:
|
||||
pbar = IntProgress(min=0, max=total)
|
||||
else: # No total? Show info style bar with no progress tqdm status
|
||||
pbar = IntProgress(min=0, max=1)
|
||||
pbar.value = 1
|
||||
pbar.bar_style = 'info'
|
||||
if desc:
|
||||
pbar.description = desc
|
||||
# Prepare status text
|
||||
ptext = HTML()
|
||||
# Only way to place text to the right of the bar is to use a container
|
||||
container = HBox(children=[pbar, ptext])
|
||||
display(container)
|
||||
|
||||
def print_status(s='', close=False, bar_style=None, desc=None):
|
||||
# Note: contrary to native tqdm, s='' does NOT clear bar
|
||||
# goal is to keep all infos if error happens so user knows
|
||||
# at which iteration the loop failed.
|
||||
|
||||
# Clear previous output (really necessary?)
|
||||
# clear_output(wait=1)
|
||||
|
||||
# Get current iteration value from format_meter string
|
||||
if total:
|
||||
# n = None
|
||||
if s:
|
||||
npos = s.find(r'/|/') # cause we use bar_format=r'{n}|...'
|
||||
# Check that n can be found in s (else n > total)
|
||||
if npos >= 0:
|
||||
n = int(s[:npos]) # get n from string
|
||||
s = s[npos + 3:] # remove from string
|
||||
|
||||
# Update bar with current n value
|
||||
if n is not None:
|
||||
pbar.value = n
|
||||
|
||||
# Print stats
|
||||
if s: # never clear the bar (signal: s='')
|
||||
s = s.replace('||', '') # remove inesthetical pipes
|
||||
s = escape(s) # html escape special characters (like '?')
|
||||
ptext.value = s
|
||||
|
||||
# Change bar style
|
||||
if bar_style:
|
||||
# Hack-ish way to avoid the danger bar_style being overriden by
|
||||
# success because the bar gets closed after the error...
|
||||
if not (pbar.bar_style == 'danger' and bar_style == 'success'):
|
||||
pbar.bar_style = bar_style
|
||||
|
||||
# Special signal to close the bar
|
||||
if close and pbar.bar_style != 'danger': # hide only if no error
|
||||
try:
|
||||
container.close()
|
||||
except AttributeError:
|
||||
container.visible = False
|
||||
|
||||
# Update description
|
||||
if desc:
|
||||
pbar.description = desc
|
||||
|
||||
return print_status
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Setup default output
|
||||
if kwargs.get('file', sys.stderr) is sys.stderr:
|
||||
kwargs['file'] = sys.stdout # avoid the red block in IPython
|
||||
|
||||
# Remove the bar from the printed string, only print stats
|
||||
if not kwargs.get('bar_format', None):
|
||||
kwargs['bar_format'] = r'{n}/|/{l_bar}{r_bar}'
|
||||
|
||||
# Initialize parent class + avoid printing by using gui=True
|
||||
kwargs['gui'] = True
|
||||
super(tqdm_notebook, self).__init__(*args, **kwargs)
|
||||
if self.disable or not kwargs['gui']:
|
||||
return
|
||||
|
||||
# Delete first pbar generated from super() (wrong total and text)
|
||||
# DEPRECATED by using gui=True
|
||||
# self.sp('', close=True)
|
||||
# Replace with IPython progress bar display (with correct total)
|
||||
self.sp = self.status_printer(self.fp, self.total, self.desc)
|
||||
self.desc = None # trick to place description before the bar
|
||||
|
||||
# Print initial bar state
|
||||
if not self.disable:
|
||||
self.sp(self.__repr__()) # same as self.refresh without clearing
|
||||
|
||||
def __iter__(self, *args, **kwargs):
|
||||
try:
|
||||
for obj in super(tqdm_notebook, self).__iter__(*args, **kwargs):
|
||||
# return super(tqdm...) will not catch exception
|
||||
yield obj
|
||||
# NB: except ... [ as ...] breaks IPython async KeyboardInterrupt
|
||||
except:
|
||||
self.sp(bar_style='danger')
|
||||
raise
|
||||
|
||||
def update(self, *args, **kwargs):
|
||||
try:
|
||||
super(tqdm_notebook, self).update(*args, **kwargs)
|
||||
except Exception as exc:
|
||||
# cannot catch KeyboardInterrupt when using manual tqdm
|
||||
# as the interrupt will most likely happen on another statement
|
||||
self.sp(bar_style='danger')
|
||||
raise exc
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
super(tqdm_notebook, self).close(*args, **kwargs)
|
||||
# If it was not run in a notebook, sp is not assigned, check for it
|
||||
if hasattr(self, 'sp'):
|
||||
# Try to detect if there was an error or KeyboardInterrupt
|
||||
# in manual mode: if n < total, things probably got wrong
|
||||
if self.total and self.n < self.total:
|
||||
self.sp(bar_style='danger')
|
||||
else:
|
||||
if self.leave:
|
||||
self.sp(bar_style='success')
|
||||
else:
|
||||
self.sp(close=True)
|
||||
|
||||
def moveto(self, *args, **kwargs):
|
||||
# void -> avoid extraneous `\n` in IPython output cell
|
||||
return
|
||||
|
||||
def set_description(self, desc=None, **_):
|
||||
"""
|
||||
Set/modify description of the progress bar.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
desc : str, optional
|
||||
"""
|
||||
self.sp(desc=desc)
|
||||
|
||||
|
||||
def tnrange(*args, **kwargs):
|
||||
"""
|
||||
A shortcut for tqdm_notebook(xrange(*args), **kwargs).
|
||||
On Python3+ range is used instead of xrange.
|
||||
"""
|
||||
return tqdm_notebook(_range(*args), **kwargs)
|
||||
46
lib/tqdm/_tqdm_pandas.py
Normal file
46
lib/tqdm/_tqdm_pandas.py
Normal file
@@ -0,0 +1,46 @@
|
||||
import sys
|
||||
|
||||
__author__ = "github.com/casperdcl"
|
||||
__all__ = ['tqdm_pandas']
|
||||
|
||||
|
||||
def tqdm_pandas(tclass, *targs, **tkwargs):
|
||||
"""
|
||||
Registers the given `tqdm` instance with
|
||||
`pandas.core.groupby.DataFrameGroupBy.progress_apply`.
|
||||
It will even close() the `tqdm` instance upon completion.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
tclass : tqdm class you want to use (eg, tqdm, tqdm_notebook, etc)
|
||||
targs and tkwargs : arguments for the tqdm instance
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import pandas as pd
|
||||
>>> import numpy as np
|
||||
>>> from tqdm import tqdm, tqdm_pandas
|
||||
>>>
|
||||
>>> df = pd.DataFrame(np.random.randint(0, 100, (100000, 6)))
|
||||
>>> tqdm_pandas(tqdm, leave=True) # can use tqdm_gui, optional kwargs, etc
|
||||
>>> # Now you can use `progress_apply` instead of `apply`
|
||||
>>> df.groupby(0).progress_apply(lambda x: x**2)
|
||||
|
||||
References
|
||||
----------
|
||||
https://stackoverflow.com/questions/18603270/
|
||||
progress-indicator-during-pandas-operations-python
|
||||
"""
|
||||
from tqdm import TqdmDeprecationWarning
|
||||
|
||||
if isinstance(tclass, type) or (getattr(tclass, '__name__', '').startswith(
|
||||
'tqdm_')): # delayed adapter case
|
||||
TqdmDeprecationWarning("""\
|
||||
Please use `tqdm.pandas(...)` instead of `tqdm_pandas(tqdm, ...)`.
|
||||
""", fp_write=getattr(tkwargs.get('file', None), 'write', sys.stderr.write))
|
||||
tclass.pandas(*targs, **tkwargs)
|
||||
else:
|
||||
TqdmDeprecationWarning("""\
|
||||
Please use `tqdm.pandas(...)` instead of `tqdm_pandas(tqdm(...))`.
|
||||
""", fp_write=getattr(tclass.fp, 'write', sys.stderr.write))
|
||||
type(tclass).pandas(deprecated_t=tclass)
|
||||
215
lib/tqdm/_utils.py
Normal file
215
lib/tqdm/_utils.py
Normal file
@@ -0,0 +1,215 @@
|
||||
import os
|
||||
import subprocess
|
||||
from platform import system as _curos
|
||||
CUR_OS = _curos()
|
||||
IS_WIN = CUR_OS in ['Windows', 'cli']
|
||||
IS_NIX = (not IS_WIN) and any(
|
||||
CUR_OS.startswith(i) for i in
|
||||
['CYGWIN', 'MSYS', 'Linux', 'Darwin', 'SunOS', 'FreeBSD', 'NetBSD'])
|
||||
|
||||
|
||||
# Py2/3 compat. Empty conditional to avoid coverage
|
||||
if True: # pragma: no cover
|
||||
try:
|
||||
_range = xrange
|
||||
except NameError:
|
||||
_range = range
|
||||
|
||||
try:
|
||||
_unich = unichr
|
||||
except NameError:
|
||||
_unich = chr
|
||||
|
||||
try:
|
||||
_unicode = unicode
|
||||
except NameError:
|
||||
_unicode = str
|
||||
|
||||
try:
|
||||
if IS_WIN:
|
||||
import colorama
|
||||
colorama.init()
|
||||
else:
|
||||
colorama = None
|
||||
except ImportError:
|
||||
colorama = None
|
||||
|
||||
try:
|
||||
from weakref import WeakSet
|
||||
except ImportError:
|
||||
WeakSet = set
|
||||
|
||||
try:
|
||||
_basestring = basestring
|
||||
except NameError:
|
||||
_basestring = str
|
||||
|
||||
try: # py>=2.7,>=3.1
|
||||
from collections import OrderedDict as _OrderedDict
|
||||
except ImportError:
|
||||
try: # older Python versions with backported ordereddict lib
|
||||
from ordereddict import OrderedDict as _OrderedDict
|
||||
except ImportError: # older Python versions without ordereddict lib
|
||||
# Py2.6,3.0 compat, from PEP 372
|
||||
from collections import MutableMapping
|
||||
|
||||
class _OrderedDict(dict, MutableMapping):
|
||||
# Methods with direct access to underlying attributes
|
||||
def __init__(self, *args, **kwds):
|
||||
if len(args) > 1:
|
||||
raise TypeError('expected at 1 argument, got %d',
|
||||
len(args))
|
||||
if not hasattr(self, '_keys'):
|
||||
self._keys = []
|
||||
self.update(*args, **kwds)
|
||||
|
||||
def clear(self):
|
||||
del self._keys[:]
|
||||
dict.clear(self)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if key not in self:
|
||||
self._keys.append(key)
|
||||
dict.__setitem__(self, key, value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
dict.__delitem__(self, key)
|
||||
self._keys.remove(key)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._keys)
|
||||
|
||||
def __reversed__(self):
|
||||
return reversed(self._keys)
|
||||
|
||||
def popitem(self):
|
||||
if not self:
|
||||
raise KeyError
|
||||
key = self._keys.pop()
|
||||
value = dict.pop(self, key)
|
||||
return key, value
|
||||
|
||||
def __reduce__(self):
|
||||
items = [[k, self[k]] for k in self]
|
||||
inst_dict = vars(self).copy()
|
||||
inst_dict.pop('_keys', None)
|
||||
return self.__class__, (items,), inst_dict
|
||||
|
||||
# Methods with indirect access via the above methods
|
||||
setdefault = MutableMapping.setdefault
|
||||
update = MutableMapping.update
|
||||
pop = MutableMapping.pop
|
||||
keys = MutableMapping.keys
|
||||
values = MutableMapping.values
|
||||
items = MutableMapping.items
|
||||
|
||||
def __repr__(self):
|
||||
pairs = ', '.join(map('%r: %r'.__mod__, self.items()))
|
||||
return '%s({%s})' % (self.__class__.__name__, pairs)
|
||||
|
||||
def copy(self):
|
||||
return self.__class__(self)
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, value=None):
|
||||
d = cls()
|
||||
for key in iterable:
|
||||
d[key] = value
|
||||
return d
|
||||
|
||||
|
||||
def _is_utf(encoding):
|
||||
try:
|
||||
u'\u2588\u2589'.encode(encoding)
|
||||
except UnicodeEncodeError: # pragma: no cover
|
||||
return False
|
||||
except Exception: # pragma: no cover
|
||||
try:
|
||||
return encoding.lower().startswith('utf-') or ('U8' == encoding)
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def _supports_unicode(fp):
|
||||
try:
|
||||
return _is_utf(fp.encoding)
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
|
||||
def _environ_cols_wrapper(): # pragma: no cover
|
||||
"""
|
||||
Return a function which gets width and height of console
|
||||
(linux,osx,windows,cygwin).
|
||||
"""
|
||||
_environ_cols = None
|
||||
if IS_WIN:
|
||||
_environ_cols = _environ_cols_windows
|
||||
if _environ_cols is None:
|
||||
_environ_cols = _environ_cols_tput
|
||||
if IS_NIX:
|
||||
_environ_cols = _environ_cols_linux
|
||||
return _environ_cols
|
||||
|
||||
|
||||
def _environ_cols_windows(fp): # pragma: no cover
|
||||
try:
|
||||
from ctypes import windll, create_string_buffer
|
||||
import struct
|
||||
from sys import stdin, stdout
|
||||
|
||||
io_handle = -12 # assume stderr
|
||||
if fp == stdin:
|
||||
io_handle = -10
|
||||
elif fp == stdout:
|
||||
io_handle = -11
|
||||
|
||||
h = windll.kernel32.GetStdHandle(io_handle)
|
||||
csbi = create_string_buffer(22)
|
||||
res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
|
||||
if res:
|
||||
(_bufx, _bufy, _curx, _cury, _wattr, left, _top, right, _bottom,
|
||||
_maxx, _maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw)
|
||||
# nlines = bottom - top + 1
|
||||
return right - left # +1
|
||||
except:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def _environ_cols_tput(*_): # pragma: no cover
|
||||
"""cygwin xterm (windows)"""
|
||||
try:
|
||||
import shlex
|
||||
cols = int(subprocess.check_call(shlex.split('tput cols')))
|
||||
# rows = int(subprocess.check_call(shlex.split('tput lines')))
|
||||
return cols
|
||||
except:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def _environ_cols_linux(fp): # pragma: no cover
|
||||
|
||||
try:
|
||||
from termios import TIOCGWINSZ
|
||||
from fcntl import ioctl
|
||||
from array import array
|
||||
except ImportError:
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
return array('h', ioctl(fp, TIOCGWINSZ, '\0' * 8))[1]
|
||||
except:
|
||||
try:
|
||||
from os.environ import get
|
||||
except ImportError:
|
||||
return None
|
||||
else:
|
||||
return int(get('COLUMNS', 1)) - 1
|
||||
|
||||
|
||||
def _term_move_up(): # pragma: no cover
|
||||
return '' if (os.name == 'nt') and (colorama is None) else '\x1b[A'
|
||||
59
lib/tqdm/_version.py
Normal file
59
lib/tqdm/_version.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# Definition of the version number
|
||||
import os
|
||||
from io import open as io_open
|
||||
|
||||
__all__ = ["__version__"]
|
||||
|
||||
# major, minor, patch, -extra
|
||||
version_info = 4, 21, 0
|
||||
|
||||
# Nice string for the version
|
||||
__version__ = '.'.join(map(str, version_info))
|
||||
|
||||
|
||||
# auto -extra based on commit hash (if not tagged as release)
|
||||
scriptdir = os.path.dirname(__file__)
|
||||
gitdir = os.path.abspath(os.path.join(scriptdir, "..", ".git"))
|
||||
if os.path.isdir(gitdir): # pragma: nocover
|
||||
extra = None
|
||||
# Open config file to check if we are in tqdm project
|
||||
with io_open(os.path.join(gitdir, "config"), 'r') as fh_config:
|
||||
if 'tqdm' in fh_config.read():
|
||||
# Open the HEAD file
|
||||
with io_open(os.path.join(gitdir, "HEAD"), 'r') as fh_head:
|
||||
extra = fh_head.readline().strip()
|
||||
# in a branch => HEAD points to file containing last commit
|
||||
if 'ref:' in extra:
|
||||
# reference file path
|
||||
ref_file = extra[5:]
|
||||
branch_name = ref_file.rsplit('/', 1)[-1]
|
||||
|
||||
ref_file_path = os.path.abspath(os.path.join(gitdir, ref_file))
|
||||
# check that we are in git folder
|
||||
# (by stripping the git folder from the ref file path)
|
||||
if os.path.relpath(
|
||||
ref_file_path, gitdir).replace('\\', '/') != ref_file:
|
||||
# out of git folder
|
||||
extra = None
|
||||
else:
|
||||
# open the ref file
|
||||
with io_open(ref_file_path, 'r') as fh_branch:
|
||||
commit_hash = fh_branch.readline().strip()
|
||||
extra = commit_hash[:8]
|
||||
if branch_name != "master":
|
||||
extra += '.' + branch_name
|
||||
|
||||
# detached HEAD mode, already have commit hash
|
||||
else:
|
||||
extra = extra[:8]
|
||||
|
||||
# Append commit hash (and branch) to version string if not tagged
|
||||
if extra is not None:
|
||||
try:
|
||||
with io_open(os.path.join(gitdir, "refs", "tags",
|
||||
'v' + __version__)) as fdv:
|
||||
if fdv.readline().strip()[:8] != extra[:8]:
|
||||
__version__ += '-' + extra
|
||||
except Exception as e:
|
||||
if "No such file" not in str(e):
|
||||
raise
|
||||
94
lib/tqdm/tests/tests_main.py
Normal file
94
lib/tqdm/tests/tests_main.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import sys
|
||||
import subprocess
|
||||
from tqdm import main, TqdmKeyError, TqdmTypeError
|
||||
|
||||
from tests_tqdm import with_setup, pretest, posttest, _range, closing, \
|
||||
UnicodeIO, StringIO
|
||||
|
||||
|
||||
def _sh(*cmd, **kwargs):
|
||||
return subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||
**kwargs).communicate()[0].decode('utf-8')
|
||||
|
||||
|
||||
# WARNING: this should be the last test as it messes with sys.stdin, argv
|
||||
@with_setup(pretest, posttest)
|
||||
def test_main():
|
||||
"""Test command line pipes"""
|
||||
ls_out = _sh('ls').replace('\r\n', '\n')
|
||||
ls = subprocess.Popen('ls', stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
res = _sh(sys.executable, '-c', 'from tqdm import main; main()',
|
||||
stdin=ls.stdout, stderr=subprocess.STDOUT)
|
||||
ls.wait()
|
||||
|
||||
# actual test:
|
||||
|
||||
assert (ls_out in res.replace('\r\n', '\n'))
|
||||
|
||||
# semi-fake test which gets coverage:
|
||||
_SYS = sys.stdin, sys.argv
|
||||
|
||||
with closing(StringIO()) as sys.stdin:
|
||||
sys.argv = ['', '--desc', 'Test CLI-delims',
|
||||
'--ascii', 'True', '--delim', r'\0', '--buf_size', '64']
|
||||
sys.stdin.write('\0'.join(map(str, _range(int(1e3)))))
|
||||
sys.stdin.seek(0)
|
||||
main()
|
||||
|
||||
IN_DATA_LIST = map(str, _range(int(1e3)))
|
||||
sys.stdin = IN_DATA_LIST
|
||||
sys.argv = ['', '--desc', 'Test CLI pipes',
|
||||
'--ascii', 'True', '--unit_scale', 'True']
|
||||
import tqdm.__main__ # NOQA
|
||||
|
||||
IN_DATA = '\0'.join(IN_DATA_LIST)
|
||||
with closing(StringIO()) as sys.stdin:
|
||||
sys.stdin.write(IN_DATA)
|
||||
sys.stdin.seek(0)
|
||||
sys.argv = ['', '--ascii', '--bytes', '--unit_scale', 'False']
|
||||
with closing(UnicodeIO()) as fp:
|
||||
main(fp=fp)
|
||||
assert (str(len(IN_DATA)) in fp.getvalue())
|
||||
|
||||
sys.stdin = IN_DATA_LIST
|
||||
sys.argv = ['', '-ascii', '--unit_scale', 'False',
|
||||
'--desc', 'Test CLI errors']
|
||||
main()
|
||||
|
||||
sys.argv = ['', '-ascii', '-unit_scale', '--bad_arg_u_ment', 'foo']
|
||||
try:
|
||||
main()
|
||||
except TqdmKeyError as e:
|
||||
if 'bad_arg_u_ment' not in str(e):
|
||||
raise
|
||||
else:
|
||||
raise TqdmKeyError('bad_arg_u_ment')
|
||||
|
||||
sys.argv = ['', '-ascii', '-unit_scale', 'invalid_bool_value']
|
||||
try:
|
||||
main()
|
||||
except TqdmTypeError as e:
|
||||
if 'invalid_bool_value' not in str(e):
|
||||
raise
|
||||
else:
|
||||
raise TqdmTypeError('invalid_bool_value')
|
||||
|
||||
sys.argv = ['', '-ascii', '--total', 'invalid_int_value']
|
||||
try:
|
||||
main()
|
||||
except TqdmTypeError as e:
|
||||
if 'invalid_int_value' not in str(e):
|
||||
raise
|
||||
else:
|
||||
raise TqdmTypeError('invalid_int_value')
|
||||
|
||||
for i in ('-h', '--help', '-v', '--version'):
|
||||
sys.argv = ['', i]
|
||||
try:
|
||||
main()
|
||||
except SystemExit:
|
||||
pass
|
||||
|
||||
# clean up
|
||||
sys.stdin, sys.argv = _SYS
|
||||
207
lib/tqdm/tests/tests_pandas.py
Normal file
207
lib/tqdm/tests/tests_pandas.py
Normal file
@@ -0,0 +1,207 @@
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
from tqdm import tqdm
|
||||
from tests_tqdm import with_setup, pretest, posttest, StringIO, closing
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_series():
|
||||
"""Test pandas.Series.progress_apply and .progress_map"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm.pandas(file=our_file, leave=True, ascii=True)
|
||||
|
||||
series = pd.Series(randint(0, 50, (123,)))
|
||||
res1 = series.progress_apply(lambda x: x + 10)
|
||||
res2 = series.apply(lambda x: x + 10)
|
||||
assert res1.equals(res2)
|
||||
|
||||
res3 = series.progress_map(lambda x: x + 10)
|
||||
res4 = series.map(lambda x: x + 10)
|
||||
assert res3.equals(res4)
|
||||
|
||||
expects = ['100%', '123/123']
|
||||
for exres in expects:
|
||||
our_file.seek(0)
|
||||
if our_file.getvalue().count(exres) < 2:
|
||||
our_file.seek(0)
|
||||
raise AssertionError(
|
||||
"\nExpected:\n{0}\nIn:\n{1}\n".format(
|
||||
exres + " at least twice.", our_file.read()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_data_frame():
|
||||
"""Test pandas.DataFrame.progress_apply and .progress_applymap"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm.pandas(file=our_file, leave=True, ascii=True)
|
||||
df = pd.DataFrame(randint(0, 50, (100, 200)))
|
||||
|
||||
def task_func(x):
|
||||
return x + 1
|
||||
|
||||
# applymap
|
||||
res1 = df.progress_applymap(task_func)
|
||||
res2 = df.applymap(task_func)
|
||||
assert res1.equals(res2)
|
||||
|
||||
# apply
|
||||
for axis in [0, 1]:
|
||||
res3 = df.progress_apply(task_func, axis=axis)
|
||||
res4 = df.apply(task_func, axis=axis)
|
||||
assert res3.equals(res4)
|
||||
|
||||
our_file.seek(0)
|
||||
if our_file.read().count('100%') < 3:
|
||||
our_file.seek(0)
|
||||
raise AssertionError("\nExpected:\n{0}\nIn:\n{1}\n".format(
|
||||
'100% at least three times', our_file.read()))
|
||||
|
||||
# apply_map, apply axis=0, apply axis=1
|
||||
expects = ['20000/20000', '200/200', '100/100']
|
||||
for exres in expects:
|
||||
our_file.seek(0)
|
||||
if our_file.getvalue().count(exres) < 1:
|
||||
our_file.seek(0)
|
||||
raise AssertionError(
|
||||
"\nExpected:\n{0}\nIn:\n {1}\n".format(
|
||||
exres + " at least once.", our_file.read()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_groupby_apply():
|
||||
"""Test pandas.DataFrame.groupby(...).progress_apply"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm.pandas(file=our_file, leave=False, ascii=True)
|
||||
|
||||
df = pd.DataFrame(randint(0, 50, (500, 3)))
|
||||
df.groupby(0).progress_apply(lambda x: None)
|
||||
|
||||
dfs = pd.DataFrame(randint(0, 50, (500, 3)), columns=list('abc'))
|
||||
dfs.groupby(['a']).progress_apply(lambda x: None)
|
||||
|
||||
our_file.seek(0)
|
||||
|
||||
# don't expect final output since no `leave` and
|
||||
# high dynamic `miniters`
|
||||
nexres = '100%|##########|'
|
||||
if nexres in our_file.read():
|
||||
our_file.seek(0)
|
||||
raise AssertionError("\nDid not expect:\n{0}\nIn:{1}\n".format(
|
||||
nexres, our_file.read()))
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm.pandas(file=our_file, leave=True, ascii=True)
|
||||
|
||||
dfs = pd.DataFrame(randint(0, 50, (500, 3)), columns=list('abc'))
|
||||
dfs.loc[0] = [2, 1, 1]
|
||||
dfs['d'] = 100
|
||||
|
||||
expects = ['500/500', '1/1', '4/4', '2/2']
|
||||
dfs.groupby(dfs.index).progress_apply(lambda x: None)
|
||||
dfs.groupby('d').progress_apply(lambda x: None)
|
||||
dfs.groupby(dfs.columns, axis=1).progress_apply(lambda x: None)
|
||||
dfs.groupby([2, 2, 1, 1], axis=1).progress_apply(lambda x: None)
|
||||
|
||||
our_file.seek(0)
|
||||
if our_file.read().count('100%') < 4:
|
||||
our_file.seek(0)
|
||||
raise AssertionError("\nExpected:\n{0}\nIn:\n{1}\n".format(
|
||||
'100% at least four times', our_file.read()))
|
||||
|
||||
for exres in expects:
|
||||
our_file.seek(0)
|
||||
if our_file.getvalue().count(exres) < 1:
|
||||
our_file.seek(0)
|
||||
raise AssertionError(
|
||||
"\nExpected:\n{0}\nIn:\n {1}\n".format(
|
||||
exres + " at least once.", our_file.read()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_leave():
|
||||
"""Test pandas with `leave=True`"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
df = pd.DataFrame(randint(0, 100, (1000, 6)))
|
||||
tqdm.pandas(file=our_file, leave=True, ascii=True)
|
||||
df.groupby(0).progress_apply(lambda x: None)
|
||||
|
||||
our_file.seek(0)
|
||||
|
||||
exres = '100%|##########| 100/100'
|
||||
if exres not in our_file.read():
|
||||
our_file.seek(0)
|
||||
raise AssertionError(
|
||||
"\nExpected:\n{0}\nIn:{1}\n".format(exres, our_file.read()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_apply_args_deprecation():
|
||||
"""Test warning info in
|
||||
`pandas.Dataframe(Series).progress_apply(func, *args)`"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
from tqdm import tqdm_pandas
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm_pandas(tqdm(file=our_file, leave=False, ascii=True, ncols=20))
|
||||
df = pd.DataFrame(randint(0, 50, (500, 3)))
|
||||
df.progress_apply(lambda x: None, 1) # 1 shall cause a warning
|
||||
# Check deprecation message
|
||||
res = our_file.getvalue()
|
||||
assert all([i in res for i in (
|
||||
"TqdmDeprecationWarning", "not supported",
|
||||
"keyword arguments instead")])
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_deprecation():
|
||||
"""Test bar object instance as argument deprecation"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
from tqdm import tqdm_pandas
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm_pandas(tqdm(file=our_file, leave=False, ascii=True, ncols=20))
|
||||
df = pd.DataFrame(randint(0, 50, (500, 3)))
|
||||
df.groupby(0).progress_apply(lambda x: None)
|
||||
# Check deprecation message
|
||||
assert "TqdmDeprecationWarning" in our_file.getvalue()
|
||||
assert "instead of `tqdm_pandas(tqdm(...))`" in our_file.getvalue()
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm_pandas(tqdm, file=our_file, leave=False, ascii=True, ncols=20)
|
||||
df = pd.DataFrame(randint(0, 50, (500, 3)))
|
||||
df.groupby(0).progress_apply(lambda x: None)
|
||||
# Check deprecation message
|
||||
assert "TqdmDeprecationWarning" in our_file.getvalue()
|
||||
assert "instead of `tqdm_pandas(tqdm, ...)`" in our_file.getvalue()
|
||||
336
lib/tqdm/tests/tests_perf.py
Normal file
336
lib/tqdm/tests/tests_perf.py
Normal file
@@ -0,0 +1,336 @@
|
||||
from __future__ import print_function, division
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
import sys
|
||||
from time import sleep, time
|
||||
|
||||
from tqdm import trange
|
||||
from tqdm import tqdm
|
||||
|
||||
from tests_tqdm import with_setup, pretest, posttest, StringIO, closing, _range
|
||||
|
||||
# Use relative/cpu timer to have reliable timings when there is a sudden load
|
||||
try:
|
||||
from time import process_time
|
||||
except ImportError:
|
||||
from time import clock
|
||||
process_time = clock
|
||||
|
||||
|
||||
def get_relative_time(prevtime=0):
|
||||
return process_time() - prevtime
|
||||
|
||||
|
||||
def cpu_sleep(t):
|
||||
"""Sleep the given amount of cpu time"""
|
||||
start = process_time()
|
||||
while (process_time() - start) < t:
|
||||
pass
|
||||
|
||||
|
||||
def checkCpuTime(sleeptime=0.2):
|
||||
"""Check if cpu time works correctly"""
|
||||
if checkCpuTime.passed:
|
||||
return True
|
||||
# First test that sleeping does not consume cputime
|
||||
start1 = process_time()
|
||||
sleep(sleeptime)
|
||||
t1 = process_time() - start1
|
||||
|
||||
# secondly check by comparing to cpusleep (where we actually do something)
|
||||
start2 = process_time()
|
||||
cpu_sleep(sleeptime)
|
||||
t2 = process_time() - start2
|
||||
|
||||
if abs(t1) < 0.0001 and (t1 < t2 / 10):
|
||||
return True
|
||||
raise SkipTest
|
||||
|
||||
|
||||
checkCpuTime.passed = False
|
||||
|
||||
|
||||
@contextmanager
|
||||
def relative_timer():
|
||||
start = process_time()
|
||||
|
||||
def elapser():
|
||||
return process_time() - start
|
||||
|
||||
yield lambda: elapser()
|
||||
spent = process_time() - start
|
||||
|
||||
def elapser(): # NOQA
|
||||
return spent
|
||||
|
||||
|
||||
def retry_on_except(n=3):
|
||||
def wrapper(fn):
|
||||
def test_inner():
|
||||
for i in range(1, n + 1):
|
||||
try:
|
||||
checkCpuTime()
|
||||
fn()
|
||||
except SkipTest:
|
||||
if i >= n:
|
||||
raise
|
||||
else:
|
||||
return
|
||||
|
||||
test_inner.__doc__ = fn.__doc__
|
||||
return test_inner
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class MockIO(StringIO):
|
||||
"""Wraps StringIO to mock a file with no I/O"""
|
||||
|
||||
def write(self, data):
|
||||
return
|
||||
|
||||
|
||||
def simple_progress(iterable=None, total=None, file=sys.stdout, desc='',
|
||||
leave=False, miniters=1, mininterval=0.1, width=60):
|
||||
"""Simple progress bar reproducing tqdm's major features"""
|
||||
n = [0] # use a closure
|
||||
start_t = [time()]
|
||||
last_n = [0]
|
||||
last_t = [0]
|
||||
if iterable is not None:
|
||||
total = len(iterable)
|
||||
|
||||
def format_interval(t):
|
||||
mins, s = divmod(int(t), 60)
|
||||
h, m = divmod(mins, 60)
|
||||
if h:
|
||||
return '{0:d}:{1:02d}:{2:02d}'.format(h, m, s)
|
||||
else:
|
||||
return '{0:02d}:{1:02d}'.format(m, s)
|
||||
|
||||
def update_and_print(i=1):
|
||||
n[0] += i
|
||||
if (n[0] - last_n[0]) >= miniters:
|
||||
last_n[0] = n[0]
|
||||
|
||||
if (time() - last_t[0]) >= mininterval:
|
||||
last_t[0] = time() # last_t[0] == current time
|
||||
|
||||
spent = last_t[0] - start_t[0]
|
||||
spent_fmt = format_interval(spent)
|
||||
rate = n[0] / spent if spent > 0 else 0
|
||||
if 0.0 < rate < 1.0:
|
||||
rate_fmt = "%.2fs/it" % (1.0 / rate)
|
||||
else:
|
||||
rate_fmt = "%.2fit/s" % rate
|
||||
|
||||
frac = n[0] / total
|
||||
percentage = int(frac * 100)
|
||||
eta = (total - n[0]) / rate if rate > 0 else 0
|
||||
eta_fmt = format_interval(eta)
|
||||
|
||||
# bar = "#" * int(frac * width)
|
||||
barfill = " " * int((1.0 - frac) * width)
|
||||
bar_length, frac_bar_length = divmod(int(frac * width * 10), 10)
|
||||
bar = '#' * bar_length
|
||||
frac_bar = chr(48 + frac_bar_length) if frac_bar_length \
|
||||
else ' '
|
||||
|
||||
file.write("\r%s %i%%|%s%s%s| %i/%i [%s<%s, %s]" %
|
||||
(desc, percentage, bar, frac_bar, barfill, n[0],
|
||||
total, spent_fmt, eta_fmt, rate_fmt))
|
||||
|
||||
if n[0] == total and leave:
|
||||
file.write("\n")
|
||||
file.flush()
|
||||
|
||||
def update_and_yield():
|
||||
for elt in iterable:
|
||||
yield elt
|
||||
update_and_print()
|
||||
|
||||
update_and_print(0)
|
||||
if iterable is not None:
|
||||
return update_and_yield()
|
||||
else:
|
||||
return update_and_print
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_iter_overhead():
|
||||
"""Test overhead of iteration based tqdm"""
|
||||
|
||||
total = int(1e6)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
a = 0
|
||||
with trange(total, file=our_file) as t:
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in t:
|
||||
a += i
|
||||
assert (a == (total * total - total) / 2.0)
|
||||
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
our_file.write(a)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
if time_tqdm() > 9 * time_bench():
|
||||
raise AssertionError('trange(%g): %f, range(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_manual_overhead():
|
||||
"""Test overhead of manual tqdm"""
|
||||
|
||||
total = int(1e6)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
with tqdm(total=total * 10, file=our_file, leave=True) as t:
|
||||
a = 0
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
t.update(10)
|
||||
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
our_file.write(a)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
if time_tqdm() > 10 * time_bench():
|
||||
raise AssertionError('tqdm(%g): %f, range(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_iter_overhead_hard():
|
||||
"""Test overhead of iteration based tqdm (hard)"""
|
||||
|
||||
total = int(1e5)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
a = 0
|
||||
with trange(total, file=our_file, leave=True, miniters=1,
|
||||
mininterval=0, maxinterval=0) as t:
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in t:
|
||||
a += i
|
||||
assert (a == (total * total - total) / 2.0)
|
||||
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
our_file.write(("%i" % a) * 40)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
try:
|
||||
assert (time_tqdm() < 60 * time_bench())
|
||||
except AssertionError:
|
||||
raise AssertionError('trange(%g): %f, range(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_manual_overhead_hard():
|
||||
"""Test overhead of manual tqdm (hard)"""
|
||||
|
||||
total = int(1e5)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
t = tqdm(total=total * 10, file=our_file, leave=True, miniters=1,
|
||||
mininterval=0, maxinterval=0)
|
||||
a = 0
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
t.update(10)
|
||||
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
our_file.write(("%i" % a) * 40)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
try:
|
||||
assert (time_tqdm() < 100 * time_bench())
|
||||
except AssertionError:
|
||||
raise AssertionError('tqdm(%g): %f, range(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_iter_overhead_simplebar_hard():
|
||||
"""Test overhead of iteration based tqdm vs simple progress bar (hard)"""
|
||||
|
||||
total = int(1e4)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
a = 0
|
||||
with trange(total, file=our_file, leave=True, miniters=1,
|
||||
mininterval=0, maxinterval=0) as t:
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in t:
|
||||
a += i
|
||||
assert (a == (total * total - total) / 2.0)
|
||||
|
||||
a = 0
|
||||
s = simple_progress(_range(total), file=our_file, leave=True,
|
||||
miniters=1, mininterval=0)
|
||||
with relative_timer() as time_bench:
|
||||
for i in s:
|
||||
a += i
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
try:
|
||||
assert (time_tqdm() < 2.5 * time_bench())
|
||||
except AssertionError:
|
||||
raise AssertionError('trange(%g): %f, simple_progress(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_manual_overhead_simplebar_hard():
|
||||
"""Test overhead of manual tqdm vs simple progress bar (hard)"""
|
||||
|
||||
total = int(1e4)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
t = tqdm(total=total * 10, file=our_file, leave=True, miniters=1,
|
||||
mininterval=0, maxinterval=0)
|
||||
a = 0
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
t.update(10)
|
||||
|
||||
simplebar_update = simple_progress(
|
||||
total=total, file=our_file, leave=True, miniters=1, mininterval=0)
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
simplebar_update(10)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
try:
|
||||
assert (time_tqdm() < 2.5 * time_bench())
|
||||
except AssertionError:
|
||||
raise AssertionError('tqdm(%g): %f, simple_progress(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
164
lib/tqdm/tests/tests_synchronisation.py
Normal file
164
lib/tqdm/tests/tests_synchronisation.py
Normal file
@@ -0,0 +1,164 @@
|
||||
from __future__ import division
|
||||
from tqdm import tqdm
|
||||
from tests_tqdm import with_setup, pretest, posttest, StringIO, closing
|
||||
from tests_tqdm import DiscreteTimer, cpu_timify
|
||||
|
||||
from time import sleep
|
||||
from threading import Event
|
||||
from tqdm import TMonitor
|
||||
|
||||
|
||||
class FakeSleep(object):
|
||||
"""Wait until the discrete timer reached the required time"""
|
||||
def __init__(self, dtimer):
|
||||
self.dtimer = dtimer
|
||||
|
||||
def sleep(self, t):
|
||||
end = t + self.dtimer.t
|
||||
while self.dtimer.t < end:
|
||||
sleep(0.0000001) # sleep a bit to interrupt (instead of pass)
|
||||
|
||||
|
||||
class FakeTqdm(object):
|
||||
_instances = []
|
||||
|
||||
|
||||
def make_create_fake_sleep_event(sleep):
|
||||
def wait(self, timeout=None):
|
||||
if timeout is not None:
|
||||
sleep(timeout)
|
||||
return self.is_set()
|
||||
|
||||
def create_fake_sleep_event():
|
||||
event = Event()
|
||||
event.wait = wait
|
||||
return event
|
||||
|
||||
return create_fake_sleep_event
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_monitor_thread():
|
||||
"""Test dummy monitoring thread"""
|
||||
maxinterval = 10
|
||||
|
||||
# Setup a discrete timer
|
||||
timer = DiscreteTimer()
|
||||
TMonitor._time = timer.time
|
||||
# And a fake sleeper
|
||||
sleeper = FakeSleep(timer)
|
||||
TMonitor._event = make_create_fake_sleep_event(sleeper.sleep)
|
||||
|
||||
# Instanciate the monitor
|
||||
monitor = TMonitor(FakeTqdm, maxinterval)
|
||||
# Test if alive, then killed
|
||||
assert monitor.report()
|
||||
monitor.exit()
|
||||
timer.sleep(maxinterval * 2) # need to go out of the sleep to die
|
||||
assert not monitor.report()
|
||||
# assert not monitor.is_alive() # not working dunno why, thread not killed
|
||||
del monitor
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_monitoring_and_cleanup():
|
||||
"""Test for stalled tqdm instance and monitor deletion"""
|
||||
# Note: should fix miniters for these tests, else with dynamic_miniters
|
||||
# it's too complicated to handle with monitoring update and maxinterval...
|
||||
maxinterval = 2
|
||||
|
||||
total = 1000
|
||||
# Setup a discrete timer
|
||||
timer = DiscreteTimer()
|
||||
# And a fake sleeper
|
||||
sleeper = FakeSleep(timer)
|
||||
# Setup TMonitor to use the timer
|
||||
TMonitor._time = timer.time
|
||||
TMonitor._event = make_create_fake_sleep_event(sleeper.sleep)
|
||||
# Set monitor interval
|
||||
tqdm.monitor_interval = maxinterval
|
||||
with closing(StringIO()) as our_file:
|
||||
with tqdm(total=total, file=our_file, miniters=500, mininterval=0.1,
|
||||
maxinterval=maxinterval) as t:
|
||||
cpu_timify(t, timer)
|
||||
# Do a lot of iterations in a small timeframe
|
||||
# (smaller than monitor interval)
|
||||
timer.sleep(maxinterval / 2) # monitor won't wake up
|
||||
t.update(500)
|
||||
# check that our fixed miniters is still there
|
||||
assert t.miniters == 500
|
||||
# Then do 1 it after monitor interval, so that monitor kicks in
|
||||
timer.sleep(maxinterval * 2)
|
||||
t.update(1)
|
||||
# Wait for the monitor to get out of sleep's loop and update tqdm..
|
||||
timeend = timer.time()
|
||||
while not (t.monitor.woken >= timeend and t.miniters == 1):
|
||||
timer.sleep(1) # Force monitor to wake up if it woken too soon
|
||||
sleep(0.000001) # sleep to allow interrupt (instead of pass)
|
||||
assert t.miniters == 1 # check that monitor corrected miniters
|
||||
# Note: at this point, there may be a race condition: monitor saved
|
||||
# current woken time but timer.sleep() happen just before monitor
|
||||
# sleep. To fix that, either sleep here or increase time in a loop
|
||||
# to ensure that monitor wakes up at some point.
|
||||
|
||||
# Try again but already at miniters = 1 so nothing will be done
|
||||
timer.sleep(maxinterval * 2)
|
||||
t.update(2)
|
||||
timeend = timer.time()
|
||||
while not (t.monitor.woken >= timeend):
|
||||
timer.sleep(1) # Force monitor to wake up if it woken too soon
|
||||
sleep(0.000001)
|
||||
# Wait for the monitor to get out of sleep's loop and update tqdm..
|
||||
assert t.miniters == 1 # check that monitor corrected miniters
|
||||
|
||||
# Check that class var monitor is deleted if no instance left
|
||||
tqdm.monitor_interval = 10
|
||||
assert tqdm.monitor is None
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_monitoring_multi():
|
||||
"""Test on multiple bars, one not needing miniters adjustment"""
|
||||
# Note: should fix miniters for these tests, else with dynamic_miniters
|
||||
# it's too complicated to handle with monitoring update and maxinterval...
|
||||
maxinterval = 2
|
||||
|
||||
total = 1000
|
||||
# Setup a discrete timer
|
||||
timer = DiscreteTimer()
|
||||
# And a fake sleeper
|
||||
sleeper = FakeSleep(timer)
|
||||
# Setup TMonitor to use the timer
|
||||
TMonitor._time = timer.time
|
||||
TMonitor._event = make_create_fake_sleep_event(sleeper.sleep)
|
||||
# Set monitor interval
|
||||
tqdm.monitor_interval = maxinterval
|
||||
with closing(StringIO()) as our_file:
|
||||
with tqdm(total=total, file=our_file, miniters=500, mininterval=0.1,
|
||||
maxinterval=maxinterval) as t1:
|
||||
# Set high maxinterval for t2 so monitor does not need to adjust it
|
||||
with tqdm(total=total, file=our_file, miniters=500, mininterval=0.1,
|
||||
maxinterval=1E5) as t2:
|
||||
cpu_timify(t1, timer)
|
||||
cpu_timify(t2, timer)
|
||||
# Do a lot of iterations in a small timeframe
|
||||
timer.sleep(maxinterval / 2)
|
||||
t1.update(500)
|
||||
t2.update(500)
|
||||
assert t1.miniters == 500
|
||||
assert t2.miniters == 500
|
||||
# Then do 1 it after monitor interval, so that monitor kicks in
|
||||
timer.sleep(maxinterval * 2)
|
||||
t1.update(1)
|
||||
t2.update(1)
|
||||
# Wait for the monitor to get out of sleep and update tqdm
|
||||
timeend = timer.time()
|
||||
while not (t1.monitor.woken >= timeend and t1.miniters == 1):
|
||||
timer.sleep(1)
|
||||
sleep(0.000001)
|
||||
assert t1.miniters == 1 # check that monitor corrected miniters
|
||||
assert t2.miniters == 500 # check that t2 was not adjusted
|
||||
|
||||
# Check that class var monitor is deleted if no instance left
|
||||
tqdm.monitor_interval = 10
|
||||
assert tqdm.monitor is None
|
||||
1541
lib/tqdm/tests/tests_tqdm.py
Normal file
1541
lib/tqdm/tests/tests_tqdm.py
Normal file
File diff suppressed because it is too large
Load Diff
12
lib/tqdm/tests/tests_version.py
Normal file
12
lib/tqdm/tests/tests_version.py
Normal file
@@ -0,0 +1,12 @@
|
||||
import re
|
||||
|
||||
|
||||
def test_version():
|
||||
"""Test version string"""
|
||||
from tqdm import __version__
|
||||
version_parts = re.split('[.-]', __version__)
|
||||
assert 3 <= len(version_parts) # must have at least Major.minor.patch
|
||||
try:
|
||||
map(int, version_parts[:3])
|
||||
except ValueError:
|
||||
raise TypeError('Version Major.minor.patch must be 3 integers')
|
||||
@@ -23,7 +23,7 @@ __author__ = 'The Python-Twitter Developers'
|
||||
__email__ = 'python-twitter@googlegroups.com'
|
||||
__copyright__ = 'Copyright (c) 2007-2016 The Python-Twitter Developers'
|
||||
__license__ = 'Apache License 2.0'
|
||||
__version__ = '3.0rc1'
|
||||
__version__ = '3.4.1'
|
||||
__url__ = 'https://github.com/bear/python-twitter'
|
||||
__download_url__ = 'https://pypi.python.org/pypi/python-twitter'
|
||||
__description__ = 'A Python wrapper around the Twitter API'
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
from hashlib import md5
|
||||
@@ -47,7 +46,7 @@ class _FileCache(object):
|
||||
path = self._GetPath(key)
|
||||
if not path.startswith(self._root_directory):
|
||||
raise _FileCacheError('%s does not appear to live under %s' %
|
||||
(path, self._root_directory ))
|
||||
(path, self._root_directory))
|
||||
if os.path.exists(path):
|
||||
os.remove(path)
|
||||
|
||||
@@ -101,61 +100,3 @@ class _FileCache(object):
|
||||
|
||||
def _GetPrefix(self, hashed_key):
|
||||
return os.path.sep.join(hashed_key[0:_FileCache.DEPTH])
|
||||
|
||||
|
||||
class ParseTweet(object):
|
||||
# compile once on import
|
||||
regexp = {"RT": "^RT", "MT": r"^MT", "ALNUM": r"(@[a-zA-Z0-9_]+)",
|
||||
"HASHTAG": r"(#[\w\d]+)", "URL": r"([http://]?[a-zA-Z\d\/]+[\.]+[a-zA-Z\d\/\.]+)"}
|
||||
regexp = dict((key, re.compile(value)) for key, value in list(regexp.items()))
|
||||
|
||||
def __init__(self, timeline_owner, tweet):
|
||||
""" timeline_owner : twitter handle of user account. tweet - 140 chars from feed; object does all computation on construction
|
||||
properties:
|
||||
RT, MT - boolean
|
||||
URLs - list of URL
|
||||
Hashtags - list of tags
|
||||
"""
|
||||
self.Owner = timeline_owner
|
||||
self.tweet = tweet
|
||||
self.UserHandles = ParseTweet.getUserHandles(tweet)
|
||||
self.Hashtags = ParseTweet.getHashtags(tweet)
|
||||
self.URLs = ParseTweet.getURLs(tweet)
|
||||
self.RT = ParseTweet.getAttributeRT(tweet)
|
||||
self.MT = ParseTweet.getAttributeMT(tweet)
|
||||
|
||||
# additional intelligence
|
||||
if ( self.RT and len(self.UserHandles) > 0 ): # change the owner of tweet?
|
||||
self.Owner = self.UserHandles[0]
|
||||
return
|
||||
|
||||
def __str__(self):
|
||||
""" for display method """
|
||||
return "owner %s, urls: %d, hashtags %d, user_handles %d, len_tweet %d, RT = %s, MT = %s" % (
|
||||
self.Owner, len(self.URLs), len(self.Hashtags), len(self.UserHandles),
|
||||
len(self.tweet), self.RT, self.MT)
|
||||
|
||||
@staticmethod
|
||||
def getAttributeRT(tweet):
|
||||
""" see if tweet is a RT """
|
||||
return re.search(ParseTweet.regexp["RT"], tweet.strip()) is not None
|
||||
|
||||
@staticmethod
|
||||
def getAttributeMT(tweet):
|
||||
""" see if tweet is a MT """
|
||||
return re.search(ParseTweet.regexp["MT"], tweet.strip()) is not None
|
||||
|
||||
@staticmethod
|
||||
def getUserHandles(tweet):
|
||||
""" given a tweet we try and extract all user handles in order of occurrence"""
|
||||
return re.findall(ParseTweet.regexp["ALNUM"], tweet)
|
||||
|
||||
@staticmethod
|
||||
def getHashtags(tweet):
|
||||
""" return all hashtags"""
|
||||
return re.findall(ParseTweet.regexp["HASHTAG"], tweet)
|
||||
|
||||
@staticmethod
|
||||
def getURLs(tweet):
|
||||
""" URL : [http://]?[\w\.?/]+"""
|
||||
return re.findall(ParseTweet.regexp["URL"], tweet)
|
||||
|
||||
2183
lib/twitter/api.py
2183
lib/twitter/api.py
File diff suppressed because it is too large
Load Diff
@@ -8,3 +8,18 @@ class TwitterError(Exception):
|
||||
def message(self):
|
||||
'''Returns the first argument used to construct this error.'''
|
||||
return self.args[0]
|
||||
|
||||
|
||||
class PythonTwitterDeprecationWarning(DeprecationWarning):
|
||||
"""Base class for python-twitter deprecation warnings"""
|
||||
pass
|
||||
|
||||
|
||||
class PythonTwitterDeprecationWarning330(PythonTwitterDeprecationWarning):
|
||||
"""Warning for features to be removed in version 3.3.0"""
|
||||
pass
|
||||
|
||||
|
||||
class PythonTwitterDeprecationWarning340(PythonTwitterDeprecationWarning):
|
||||
"""Warning for features to be removed in version 3.4.0"""
|
||||
pass
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user