Compare commits
236 Commits
v2.0.22
...
v2.1.5-bet
Author | SHA1 | Date | |
---|---|---|---|
![]() |
8e3fe7bfa2 | ||
![]() |
6f22c823be | ||
![]() |
34d7c67813 | ||
![]() |
862ed5ce9f | ||
![]() |
84406e6797 | ||
![]() |
19cf567366 | ||
![]() |
8af697a157 | ||
![]() |
76122bea5d | ||
![]() |
1a12422908 | ||
![]() |
2df9f0b48b | ||
![]() |
8540b80e57 | ||
![]() |
8ad565a444 | ||
![]() |
f91b6481b3 | ||
![]() |
826db082c9 | ||
![]() |
f3d64a7886 | ||
![]() |
031d078bc2 | ||
![]() |
04fcd78102 | ||
![]() |
53d1e0f541 | ||
![]() |
9719f0b25b | ||
![]() |
6d1d5bc822 | ||
![]() |
0d7bbe044d | ||
![]() |
b1dc5816a4 | ||
![]() |
476011a783 | ||
![]() |
e038c57c4c | ||
![]() |
a989a53750 | ||
![]() |
d8cfdea704 | ||
![]() |
ed4722c4ce | ||
![]() |
17ab5f05ed | ||
![]() |
71ab2248d7 | ||
![]() |
4fb4410552 | ||
![]() |
a915d2333f | ||
![]() |
aaf5a18251 | ||
![]() |
b90026801b | ||
![]() |
e9676e3651 | ||
![]() |
c16d3288d8 | ||
![]() |
0d7ade8ca4 | ||
![]() |
87b1118e98 | ||
![]() |
9f6422cc8d | ||
![]() |
df1a42a4ee | ||
![]() |
6554136a8f | ||
![]() |
81e04269fd | ||
![]() |
b6c6590a12 | ||
![]() |
136260a822 | ||
![]() |
5710bcb43c | ||
![]() |
30bc3f8a66 | ||
![]() |
e0e7d68df2 | ||
![]() |
cf73639281 | ||
![]() |
008e04d5cf | ||
![]() |
5f7991665c | ||
![]() |
5e000162c6 | ||
![]() |
ea1aba2c87 | ||
![]() |
f321bb869c | ||
![]() |
abe496668a | ||
![]() |
9cefc7f701 | ||
![]() |
1d3cd431eb | ||
![]() |
8f8318da6d | ||
![]() |
36ce751875 | ||
![]() |
858ea33680 | ||
![]() |
eee759d0d0 | ||
![]() |
dbe3b492fd | ||
![]() |
4e4fde2e9a | ||
![]() |
5283126608 | ||
![]() |
df72ecebf5 | ||
![]() |
d316aa34e2 | ||
![]() |
405aec8bb8 | ||
![]() |
4a62f8c395 | ||
![]() |
eabea2deeb | ||
![]() |
3742021dcc | ||
![]() |
9c4219b42e | ||
![]() |
f624908302 | ||
![]() |
ab9132cdd4 | ||
![]() |
0186363753 | ||
![]() |
653ad36f17 | ||
![]() |
5073f82d53 | ||
![]() |
833937eced | ||
![]() |
32df79bb83 | ||
![]() |
fabced9942 | ||
![]() |
8aa34321c9 | ||
![]() |
b144ded87b | ||
![]() |
ef8c91ee56 | ||
![]() |
d76ded3ebe | ||
![]() |
c4fc94ea34 | ||
![]() |
ad61e23d92 | ||
![]() |
fcd7593764 | ||
![]() |
8465df5095 | ||
![]() |
95697a3367 | ||
![]() |
978ae7d8cb | ||
![]() |
366e8514b6 | ||
![]() |
45c646c062 | ||
![]() |
4b482938a1 | ||
![]() |
9699129a38 | ||
![]() |
5ef8947532 | ||
![]() |
f335ffa8d5 | ||
![]() |
793665d62a | ||
![]() |
7da5730c73 | ||
![]() |
1f587ed698 | ||
![]() |
1032fdfe7a | ||
![]() |
35e3f7dccc | ||
![]() |
909cbc90df | ||
![]() |
77ed94bbef | ||
![]() |
c260543586 | ||
![]() |
a4de63095f | ||
![]() |
817335b42e | ||
![]() |
818e7723ff | ||
![]() |
a69008e179 | ||
![]() |
91c647f9ae | ||
![]() |
36b80aa6d3 | ||
![]() |
c35fcc727c | ||
![]() |
749e1fcebe | ||
![]() |
80506b8541 | ||
![]() |
80df2b0fad | ||
![]() |
084732706d | ||
![]() |
2aff7713cd | ||
![]() |
683a782723 | ||
![]() |
5108e1bb09 | ||
![]() |
d8298a12eb | ||
![]() |
dec5931fd4 | ||
![]() |
71d79266f6 | ||
![]() |
d3f6812178 | ||
![]() |
042b48c1fd | ||
![]() |
38613f24fe | ||
![]() |
e23b1a0603 | ||
![]() |
90f3d597dc | ||
![]() |
d166b77ea9 | ||
![]() |
feb74b157f | ||
![]() |
4aeafdae2d | ||
![]() |
f12de78370 | ||
![]() |
d2415c92ea | ||
![]() |
646ca1d9fa | ||
![]() |
c8c93c69ab | ||
![]() |
2c8c20af02 | ||
![]() |
a877da3de8 | ||
![]() |
1b7cfd7f8a | ||
![]() |
3f7edc3635 | ||
![]() |
8fac54aa71 | ||
![]() |
244008d539 | ||
![]() |
502b807e45 | ||
![]() |
35914b9a48 | ||
![]() |
24ac34d5e2 | ||
![]() |
e1035a49fd | ||
![]() |
511f4a916b | ||
![]() |
1f10668838 | ||
![]() |
a9a08a959c | ||
![]() |
341f4040ff | ||
![]() |
e9a1b2ea38 | ||
![]() |
7f67213ff7 | ||
![]() |
e9bdbb863c | ||
![]() |
04641c7c63 | ||
![]() |
15cc96a005 | ||
![]() |
b712874ed2 | ||
![]() |
5b1ff402bc | ||
![]() |
eda0e73eb6 | ||
![]() |
a5807f21b4 | ||
![]() |
e3b71a729e | ||
![]() |
f810f50ea9 | ||
![]() |
2b0f83e036 | ||
![]() |
4977b3def1 | ||
![]() |
1cb5f0b635 | ||
![]() |
7e11af1fd0 | ||
![]() |
6f6fb485fe | ||
![]() |
964f24d6ab | ||
![]() |
1474f144fe | ||
![]() |
8d25b0c973 | ||
![]() |
50b37d6b3a | ||
![]() |
b9b82b23f7 | ||
![]() |
b6bd305694 | ||
![]() |
ebb287e1ee | ||
![]() |
bd3497b2bf | ||
![]() |
034f3ee308 | ||
![]() |
a946879fc1 | ||
![]() |
9f964b5a87 | ||
![]() |
2245e38d40 | ||
![]() |
c9618322c2 | ||
![]() |
960e147e10 | ||
![]() |
bbca0b3b42 | ||
![]() |
ed0b41cd19 | ||
![]() |
dc87591992 | ||
![]() |
1f7be7a4d5 | ||
![]() |
d05e80e573 | ||
![]() |
003e890844 | ||
![]() |
afa16cd656 | ||
![]() |
9aff61f670 | ||
![]() |
8b1c7df3ce | ||
![]() |
25355f29ce | ||
![]() |
09ea81ccd2 | ||
![]() |
28efaf73c7 | ||
![]() |
0057481efb | ||
![]() |
827b012978 | ||
![]() |
0e419695cf | ||
![]() |
46f26cc307 | ||
![]() |
46f7a92c97 | ||
![]() |
2a24ea4cdf | ||
![]() |
8e13bf4f93 | ||
![]() |
aa844b76fc | ||
![]() |
0e5bb7b188 | ||
![]() |
49a6cf8809 | ||
![]() |
2adad24684 | ||
![]() |
d4d5ff9de7 | ||
![]() |
33c2315384 | ||
![]() |
4577704f19 | ||
![]() |
a13d93f239 | ||
![]() |
5ac5b3cd29 | ||
![]() |
d104ec216c | ||
![]() |
32645c374e | ||
![]() |
d1f982847b | ||
![]() |
7770431b67 | ||
![]() |
edeb6ae4e4 | ||
![]() |
af3501a6a6 | ||
![]() |
0f39201774 | ||
![]() |
b73d2ff1f7 | ||
![]() |
6009fb24b6 | ||
![]() |
522684b2ab | ||
![]() |
feab16b351 | ||
![]() |
ee041db63d | ||
![]() |
2479533d07 | ||
![]() |
d045fd5834 | ||
![]() |
8407f27fed | ||
![]() |
b505286caf | ||
![]() |
feb762ce8b | ||
![]() |
8acdb5af83 | ||
![]() |
5af1294f71 | ||
![]() |
87d2d273d3 | ||
![]() |
b5c52ac71e | ||
![]() |
efe9a15f72 | ||
![]() |
525f1e4b0b | ||
![]() |
d18820b832 | ||
![]() |
7e024fd736 | ||
![]() |
c9c5989474 | ||
![]() |
ce9f96d3be | ||
![]() |
7362dd0bf4 | ||
![]() |
9905ebc144 | ||
![]() |
8f8010884b | ||
![]() |
37afd141be | ||
![]() |
a3643b4302 | ||
![]() |
02cfd8d9b7 | ||
![]() |
941ce439b4 |
2
.gitignore
vendored
@@ -15,7 +15,9 @@
|
||||
release.lock
|
||||
version.lock
|
||||
logs/*
|
||||
backups/*
|
||||
cache/*
|
||||
newsletters/*
|
||||
*.mmdb
|
||||
|
||||
# HTTPS Cert/Key #
|
||||
|
3
API.md
@@ -1674,7 +1674,8 @@ Optional parameters:
|
||||
remote (int): 0 or 1
|
||||
|
||||
Returns:
|
||||
string: The unique PMS identifier
|
||||
json:
|
||||
{'identifier': '08u2phnlkdshf890bhdlksghnljsahgleikjfg9t'}
|
||||
```
|
||||
|
||||
|
||||
|
138
CHANGELOG.md
@@ -1,5 +1,143 @@
|
||||
# Changelog
|
||||
|
||||
## v2.1.5-beta (2018-05-07)
|
||||
|
||||
* Newsletters:
|
||||
* New: Added setting for a custom newsletter template folder.
|
||||
* New: Added option to enable static newsletter URLs to retrieve the last sent scheduled newsletter.
|
||||
* New: Added ability to change the newsletter output directory and filenames.
|
||||
* New: Added option to save the newsletter file without sending it to a notification agent.
|
||||
* Fix: Check for disabled image hosting setting.
|
||||
* Fix: Cache newsletter images when refreshing the page.
|
||||
* Fix: Refresh image from the Plex server when uploading to image hosting.
|
||||
* Change: Allow all image hosting options with self-hosted newsletters.
|
||||
* UI:
|
||||
* Change: Don't retrieve recently added on the homepage if the Plex Cloud server is sleeping.
|
||||
* Other:
|
||||
* Fix: Imgur database upgrade migration.
|
||||
|
||||
|
||||
## v2.1.4 (2018-05-05)
|
||||
|
||||
* Newsletters:
|
||||
* Fix: Newsletter URL without an HTTP root.
|
||||
|
||||
|
||||
## v2.1.3-beta (2018-05-04)
|
||||
|
||||
* Newsletters:
|
||||
* Fix: HTTP root doubled in newsletter URL.
|
||||
* Fix: Configuration would not open with failed hostname resolution.
|
||||
* Fix: Schedule one day off when using weekday names in cron.
|
||||
* Fix: Images not refreshing when changed in Plex.
|
||||
* Fix: Cloudinary upload with non-ASCII image titles.
|
||||
* Other:
|
||||
* Fix: Potential XSS vulnerability in search.
|
||||
|
||||
|
||||
## v2.1.2-beta (2018-05-01)
|
||||
|
||||
* Newsletters:
|
||||
* New: Added Cloudinary option for image hosting.
|
||||
* Notifications:
|
||||
* New: Added Message-ID to Email header (Thanks @Dam64)
|
||||
* Fix: Posters not showing up on Twitter with self-hosted images.
|
||||
* Fix: Incorrect action parameter for new device notifications.
|
||||
* Change: Hardcode Pushover sound list instead of fetching the list every time.
|
||||
* API:
|
||||
* Fix: Success result for empty response data.
|
||||
* Change: Do not send notification when checking for Tautulli updates via the API.
|
||||
|
||||
|
||||
## v2.1.1-beta (2018-04-11)
|
||||
|
||||
* Monitoring:
|
||||
* Fix: Live TV transcoding showing incorrectly as direct play.
|
||||
* Newsletters:
|
||||
* New: Added week number as parameter. (Thanks @samip5)
|
||||
* Fix: Fallback to cover art on the newsletter cards.
|
||||
* Change: Option to set newsletter time frame by calendar days or hours.
|
||||
* Notifications:
|
||||
* New: Added week number as parameter. (Thanks @samip5)
|
||||
* Other:
|
||||
* New: Added plexapi library for custom scripts.
|
||||
|
||||
|
||||
## v2.1.0-beta (2018-04-07)
|
||||
|
||||
* Newsletters:
|
||||
* New: A completely new scheduled newsletter system.
|
||||
* Beautiful HTML formatted newsletter for recently added movies, TV shows, or music.
|
||||
* Send newsletters on a daily, weekly, or monthly schedule to your users.
|
||||
* Customize the number of days of recently added content and the libraries to include on the newsletter.
|
||||
* Add a custom message to be included on the newsletter.
|
||||
* Option to either send an HTML formatted email, or a link to a self-hosted newsletter on your own domain to any notification agent.
|
||||
* Notifications:
|
||||
* New: Ability to use self-hosted images on your own domain instead of using Imgur.
|
||||
|
||||
|
||||
## v2.0.28 (2018-04-02)
|
||||
|
||||
* Monitoring:
|
||||
* Fix: Homepage activity header text.
|
||||
|
||||
|
||||
## v2.0.27 (2018-04-02)
|
||||
|
||||
* Monitoring:
|
||||
* Change: Move activity refresh interval setting to the settings page.
|
||||
|
||||
|
||||
## v2.0.26-beta (2018-03-30)
|
||||
|
||||
* Monitoring:
|
||||
* New: Setting to change the refresh interval on the homepage.
|
||||
* Fix: Identify extras correctly on the activity cards.
|
||||
* Notifications:
|
||||
* Change: Send Telegram image and text separately if the caption is longer than 200 characters.
|
||||
* UI:
|
||||
* Fix: Error when clicking on synced playlist links.
|
||||
|
||||
|
||||
## v2.0.25 (2018-03-22)
|
||||
|
||||
* Monitoring:
|
||||
* Fix: Websocket not reconnecting causing activity monitoring and notifications to not work.
|
||||
* Fix: Error checking for synced streams without Plex Pass.
|
||||
|
||||
|
||||
## v2.0.24 (2018-03-18)
|
||||
|
||||
* Monitoring:
|
||||
* Fix: Fix stream data not showing for history recorded before v2.
|
||||
* Notifications:
|
||||
* Fix: Set all environment variables for scripts.
|
||||
* Change: Moved all notification agent instructions to the wiki.
|
||||
* Change: XBMC notification agent renamed to Kodi.
|
||||
* Change: OSX Notify notification agent renamed to macOS Notification Center.
|
||||
|
||||
|
||||
## v2.0.23-beta (2018-03-16)
|
||||
|
||||
* Monitoring:
|
||||
* Fix: Certain transcode stream showing incorrectly as direct play in history. Fix is not retroactive.
|
||||
* Notifications:
|
||||
* New: Added season/episode/album/track count to notification parameters.
|
||||
* New: Added "Value 3" setting for IFTTT notifications.
|
||||
* New: Set PLEX_URL, PLEX_TOKEN, TAUTULLI_URL, and TAUTULLI_APIKEY environment variables for scripts.
|
||||
* Fix: Notifications failing to send with invalid custom conditions json.
|
||||
* Fix: Email notifications failing with unicode username/passwords.
|
||||
* Change: Facebook Graph API version updated to v2.12.
|
||||
* UI:
|
||||
* New: Show the Plex Server URL in the settings.
|
||||
* Fix: Incorrect info displayed in the Tautulli login logs.
|
||||
* API:
|
||||
* Fix: API returning empty data if a message was in the original data.
|
||||
* Change: get_server_id command returns json instead of string.
|
||||
* Other:
|
||||
* Fix: Forgot git pull when changing branches in the web UI.
|
||||
|
||||
|
||||
## v2.0.22 (2018-03-10)
|
||||
|
||||
* Tautulli v2 release!
|
||||
|
@@ -188,7 +188,10 @@ def main():
|
||||
# Move 'plexpy.db' to 'tautulli.db'
|
||||
if os.path.isfile(os.path.join(plexpy.DATA_DIR, 'plexpy.db')) and \
|
||||
not os.path.isfile(os.path.join(plexpy.DATA_DIR, plexpy.DB_FILE)):
|
||||
os.rename(os.path.join(plexpy.DATA_DIR, 'plexpy.db'), plexpy.DB_FILE)
|
||||
try:
|
||||
os.rename(os.path.join(plexpy.DATA_DIR, 'plexpy.db'), plexpy.DB_FILE)
|
||||
except OSError as e:
|
||||
raise SystemExit("Unable to rename plexpy.db to tautulli.db: %s", e)
|
||||
|
||||
if plexpy.DAEMON:
|
||||
plexpy.daemonize()
|
||||
|
@@ -49,6 +49,10 @@ DOCUMENTATION :: END
|
||||
<td>Cache Directory:</td>
|
||||
<td>${plexpy.CONFIG.CACHE_DIR}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Newsletter Directory:</td>
|
||||
<td>${plexpy.CONFIG.NEWSLETTER_DIR}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>GeoLite2 Database:</td>
|
||||
% if plexpy.CONFIG.GEOIP_DB:
|
||||
|
@@ -125,8 +125,10 @@ div.form-control .selectize-input {
|
||||
padding-bottom: 2px !important;
|
||||
transition: background-color .3s;
|
||||
}
|
||||
.react-selectize.root-node .simple-value span {
|
||||
.react-selectize.root-node .simple-value span,
|
||||
.selectize-control.multi .selectize-input > div {
|
||||
padding-bottom: 2px !important;
|
||||
padding-left: 5px !important;
|
||||
}
|
||||
.react-selectize.root-node .react-selectize-control .react-selectize-search-field-and-selected-values .value-wrapper:not(:first-child):before {
|
||||
content: "or";
|
||||
@@ -134,9 +136,6 @@ div.form-control .selectize-input {
|
||||
text-transform: uppercase;
|
||||
font-size: 10px;
|
||||
}
|
||||
.react-selectize.root-node .react-selectize-control .react-selectize-search-field-and-selected-values.negative-operator .value-wrapper:not(:first-child):before {
|
||||
content: "and" !important;
|
||||
}
|
||||
.react-selectize.root-node .react-selectize-control .react-selectize-search-field-and-selected-values .resizable-input {
|
||||
padding-top: 3px !important;
|
||||
padding-bottom: 3px !important;
|
||||
@@ -467,6 +466,18 @@ fieldset[disabled] .btn-bright.active {
|
||||
.btn-group select {
|
||||
margin-top: 0;
|
||||
}
|
||||
.input-group-addon-form {
|
||||
display: inline-block;
|
||||
line-height: 1.42857143;
|
||||
color: #e5e5e5;
|
||||
background-color: #3B3B3B;
|
||||
border: 1px solid transparent;
|
||||
border-top-right-radius: 3px !important;
|
||||
border-bottom-right-radius: 3px !important;
|
||||
height: 32px;
|
||||
width: 100%;
|
||||
margin-top: 5px;
|
||||
}
|
||||
#user-selection label {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
@@ -745,7 +756,10 @@ a .users-poster-face:hover {
|
||||
transition: all .2s ease-in-out;
|
||||
overflow: hidden;
|
||||
}
|
||||
.dashboard-activity-background-overlay {
|
||||
.dashboard-activity-background {
|
||||
background-color: #282828;
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
display: -webkit-flex;
|
||||
display: flex;
|
||||
-webkit-flex-wrap: nowrap;
|
||||
@@ -754,30 +768,13 @@ a .users-poster-face:hover {
|
||||
width: 100%;
|
||||
padding: 5px;
|
||||
overflow: hidden;
|
||||
-webkit-box-shadow: 0 0 4px rgba(0,0,0,.3), inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
-moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
box-shadow: 0 0 4px rgba(0,0,0,.3), inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
}
|
||||
.dashboard-activity-background {
|
||||
background-color: #282828;
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
height: 235px;
|
||||
width: 100%;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
opacity: 0.40;
|
||||
-webkit-filter: blur(3px);
|
||||
-moz-filter: blur(3px);
|
||||
filter: blur(3px);
|
||||
-webkit-transition: background 1s linear;
|
||||
transition: background 1s linear;
|
||||
-webkit-backface-visibility: hidden;
|
||||
backface-visibility: hidden;
|
||||
z-index: -1;
|
||||
-webkit-box-shadow: 0 0 4px rgba(0,0,0,.3), inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
-moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
box-shadow: 0 0 4px rgba(0,0,0,.3), inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
}
|
||||
.dashboard-activity-poster-container {
|
||||
background-color: #282828;
|
||||
@@ -808,14 +805,14 @@ a .users-poster-face:hover {
|
||||
background-size: cover;
|
||||
height: 225px;
|
||||
width: 150px;
|
||||
-webkit-transition: background .2s ease-in-out;
|
||||
transition: background .2s ease-in-out;
|
||||
-webkit-backface-visibility: hidden;
|
||||
backface-visibility: hidden;
|
||||
opacity: 0.60;
|
||||
-webkit-filter: blur(3px);
|
||||
-moz-filter: blur(3px);
|
||||
filter: blur(3px);
|
||||
-webkit-transition: background .2s ease-in-out;
|
||||
transition: background .2s ease-in-out;
|
||||
-webkit-backface-visibility: hidden;
|
||||
backface-visibility: hidden;
|
||||
z-index: 2;
|
||||
}
|
||||
.dashboard-activity-cover {
|
||||
@@ -1162,7 +1159,10 @@ a .dashboard-activity-metadata-user-thumb:hover {
|
||||
transition: all .2s ease-in-out;
|
||||
overflow: hidden;
|
||||
}
|
||||
.dashboard-stats-background-overlay {
|
||||
.dashboard-stats-background {
|
||||
background-color: #282828;
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
display: -webkit-flex;
|
||||
display: flex;
|
||||
-webkit-flex-wrap: nowrap;
|
||||
@@ -1171,30 +1171,13 @@ a .dashboard-activity-metadata-user-thumb:hover {
|
||||
width: 100%;
|
||||
padding: 5px;
|
||||
overflow: hidden;
|
||||
-webkit-box-shadow: 0 0 4px rgba(0,0,0,.3), inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
-moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
box-shadow: 0 0 4px rgba(0,0,0,.3), inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
}
|
||||
.dashboard-stats-background {
|
||||
background-color: #282828;
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
height: 160px;
|
||||
width: 100%;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
opacity: 0.40;
|
||||
-webkit-filter: blur(3px);
|
||||
-moz-filter: blur(3px);
|
||||
filter: blur(3px);
|
||||
-webkit-transition: background .2s ease-in-out;
|
||||
transition: background .2s ease-in-out;
|
||||
-webkit-backface-visibility: hidden;
|
||||
backface-visibility: hidden;
|
||||
z-index: -1;
|
||||
-webkit-box-shadow: 0 0 4px rgba(0,0,0,.3), inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
-moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
box-shadow: 0 0 4px rgba(0,0,0,.3), inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
}
|
||||
.dashboard-stats-background.flat {
|
||||
opacity: 1;
|
||||
@@ -1214,17 +1197,6 @@ a .dashboard-activity-metadata-user-thumb:hover {
|
||||
z-index: 1;
|
||||
}
|
||||
.dashboard-stats-poster {
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
height: 150px;
|
||||
width: 100px;
|
||||
-webkit-transition: background .2s ease-in-out;
|
||||
transition: background .2s ease-in-out;
|
||||
-webkit-backface-visibility: hidden;
|
||||
backface-visibility: hidden;
|
||||
z-index: 2;
|
||||
}
|
||||
.dashboard-stats-poster-blur {
|
||||
background-color: #282828;
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
@@ -1234,10 +1206,6 @@ a .dashboard-activity-metadata-user-thumb:hover {
|
||||
transition: background .2s ease-in-out;
|
||||
-webkit-backface-visibility: hidden;
|
||||
backface-visibility: hidden;
|
||||
opacity: 0.60;
|
||||
-webkit-filter: blur(3px);
|
||||
-moz-filter: blur(3px);
|
||||
filter: blur(3px);
|
||||
z-index: 2;
|
||||
}
|
||||
.dashboard-stats-cover {
|
||||
@@ -2160,6 +2128,12 @@ a:hover .item-children-poster {
|
||||
top: 5px;
|
||||
left: 12px;
|
||||
}
|
||||
.settings-warning {
|
||||
color: #eb8600;
|
||||
}
|
||||
span.settings-warning {
|
||||
padding-left: 10px;
|
||||
}
|
||||
#menu_link_show_advanced_settings.active {
|
||||
color: #fff;
|
||||
background-color: #cc7b19;
|
||||
@@ -2973,6 +2947,9 @@ a .home-platforms-list-cover-face:hover
|
||||
.stacked-configs > li.new-notification-agent,
|
||||
.stacked-configs > li.notification-agent,
|
||||
.stacked-configs > li.add-notification-agent,
|
||||
.stacked-configs > li.new-newsletter-agent,
|
||||
.stacked-configs > li.newsletter-agent,
|
||||
.stacked-configs > li.add-newsletter-agent,
|
||||
.stacked-configs > li.mobile-device,
|
||||
.stacked-configs > li.add-mobile-device {
|
||||
cursor: pointer;
|
||||
@@ -3657,43 +3634,77 @@ a:hover .overlay-refresh-image:hover {
|
||||
}
|
||||
#plexpy-notifiers-table .friendly_name,
|
||||
#notifier-config-modal span.notifier_id,
|
||||
#plexpy-newsletters-table .friendly_name,
|
||||
#newsletter-config-modal span.newsletter_id,
|
||||
#plexpy-mobile-devices-table .friendly_name,
|
||||
#mobile-device-config-modal span.notifier_id {
|
||||
color: #777;
|
||||
}
|
||||
#notifier-config-modal .nav-tabs {
|
||||
#notifier-config-modal .nav-tabs,
|
||||
#newsletter-config-modal .nav-tabs {
|
||||
margin-bottom: 10px;
|
||||
padding-left: 15px;
|
||||
border-bottom: 1px solid #444;
|
||||
}
|
||||
#notifier-config-modal .nav-tabs > li {
|
||||
#notifier-config-modal .nav-tabs > li,
|
||||
#newsletter-config-modal .nav-tabs > li {
|
||||
margin: 0 0 -1px 0;
|
||||
}
|
||||
#notifier-config-modal .nav-tabs > li > a {
|
||||
#notifier-config-modal .nav-tabs > li > a,
|
||||
#newsletter-config-modal .nav-tabs > li > a {
|
||||
padding: 5px 10px;
|
||||
color: #737373;
|
||||
}
|
||||
#notifier-config-modal .nav-tabs > li > a:hover {
|
||||
#notifier-config-modal .nav-tabs > li > a:hover,
|
||||
#newsletter-config-modal .nav-tabs > li > a:hover {
|
||||
border-color: #444;
|
||||
background: #222;
|
||||
}
|
||||
#notifier-config-modal .nav-tabs > li.active > a,
|
||||
#notifier-config-modal .nav-tabs > li.active > a:hover,
|
||||
#notifier-config-modal .nav-tabs > li.active > a:focus {
|
||||
#notifier-config-modal .nav-tabs > li.active > a:focus,
|
||||
#newsletter-config-modal .nav-tabs > li.active > a,
|
||||
#newsletter-config-modal .nav-tabs > li.active > a:hover,
|
||||
#newsletter-config-modal .nav-tabs > li.active > a:focus {
|
||||
color: #fff;
|
||||
background: #222;
|
||||
}
|
||||
#notifier-config-modal .nav-tabs > li.active > a,
|
||||
#notifier-config-modal .nav-tabs > li.active > a:hover,
|
||||
#notifier-config-modal .nav-tabs > li.active > a:focus {
|
||||
#notifier-config-modal .nav-tabs > li.active > a:focus,
|
||||
#newsletter-config-modal .nav-tabs > li.active > a,
|
||||
#newsletter-config-modal .nav-tabs > li.active > a:hover,
|
||||
#newsletter-config-modal .nav-tabs > li.active > a:focus {
|
||||
border: 1px solid #444;
|
||||
border-bottom-color: transparent;
|
||||
}
|
||||
#newsletter-config-modal #custom_cron {
|
||||
display: inline-block;
|
||||
width: initial;
|
||||
height: 32px;
|
||||
margin-right: 5px;
|
||||
margin-top: 4px;
|
||||
}
|
||||
#newsletter-config-modal #cron-widget {
|
||||
display: inline-block;
|
||||
margin-top: 1px;
|
||||
}
|
||||
#newsletter-config-modal #cron-widget select.cron-select {
|
||||
width: initial;
|
||||
display: inline;
|
||||
height: 32px;
|
||||
margin-top: 4px;
|
||||
}
|
||||
#newsletter-config-modal #cron-widget select.cron-select[name=cron-period] option[value=minute],
|
||||
#newsletter-config-modal #cron-widget select.cron-select[name=cron-period] option[value=hour] {
|
||||
display: none !important;
|
||||
}
|
||||
.git-group input.form-control {
|
||||
width: 50%;
|
||||
}
|
||||
.git-group select.form-control {
|
||||
width: 50%;
|
||||
height: 32px;
|
||||
}
|
||||
#changelog-modal .modal-body > h2 {
|
||||
margin-bottom: 10px;
|
||||
@@ -3840,6 +3851,90 @@ a:hover .overlay-refresh-image:hover {
|
||||
background-color: #107c10;
|
||||
background-image: url(../images/platforms/xbox.svg);
|
||||
}
|
||||
.platform-android-rgba {
|
||||
background-color: rgba(164, 202, 57, 0.40);
|
||||
}
|
||||
.platform-atv-rgba {
|
||||
background-color: rgba(133, 132, 135, 0.40);
|
||||
}
|
||||
.platform-chrome-rgba {
|
||||
background-color: rgba(237, 94, 80, 0.40);
|
||||
}
|
||||
.platform-chromecast-rgba {
|
||||
background-color: rgba(16, 164, 232, 0.40);
|
||||
}
|
||||
.platform-default-rgba {
|
||||
background-color: rgba(229, 160, 13, 0.40);
|
||||
}
|
||||
.platform-dlna-rgba {
|
||||
background-color: rgba(12, 177, 75, 0.40);
|
||||
}
|
||||
.platform-firefox-rgba {
|
||||
background-color: rgba(230, 120, 23, 0.40);
|
||||
}
|
||||
.platform-gtv-rgba {
|
||||
background-color: rgba(0, 139, 207, 0.40);
|
||||
}
|
||||
.platform-ie-rgba {
|
||||
background-color: rgba(0, 89, 158, 0.40);
|
||||
}
|
||||
.platform-ios-rgba {
|
||||
background-color: rgba(133, 132, 135, 0.40);
|
||||
}
|
||||
.platform-kodi-rgba {
|
||||
background-color: rgba(49, 175, 225, 0.40);
|
||||
}
|
||||
.platform-linux-rgba {
|
||||
background-color: rgba(23, 147, 208, 0.40);
|
||||
}
|
||||
.platform-macos-rgba {
|
||||
background-color: rgba(133, 132, 135, 0.40);
|
||||
}
|
||||
.platform-msedge-rgba {
|
||||
background-color: rgba(0, 120, 215, 0.40);
|
||||
}
|
||||
.platform-opera-rgba {
|
||||
background-color: rgba(255, 27, 45, 0.40);
|
||||
}
|
||||
.platform-playstation-rgba {
|
||||
background-color: rgba(3, 77, 162, 0.40);
|
||||
}
|
||||
.platform-plex-rgba {
|
||||
background-color: rgba(229, 160, 13, 0.40);
|
||||
}
|
||||
.platform-plexamp-rgba {
|
||||
background-color: rgba(229, 160, 13, 0.40);
|
||||
}
|
||||
.platform-roku-rgba {
|
||||
background-color: rgba(109, 60, 151, 0.40);
|
||||
}
|
||||
.platform-safari-rgba {
|
||||
background-color: rgba(0, 169, 236, 0.40);
|
||||
}
|
||||
.platform-samsung-rgba {
|
||||
background-color: rgba(3, 78, 162, 0.40);
|
||||
}
|
||||
.platform-synclounge-rgba {
|
||||
background-color: rgba(21, 25, 36, 0.40);
|
||||
}
|
||||
.platform-tivo-rgba {
|
||||
background-color: rgba(0, 167, 225, 0.40);
|
||||
}
|
||||
.platform-wiiu-rgba {
|
||||
background-color: rgba(3, 169, 244, 0.40);
|
||||
}
|
||||
.platform-windows-rgba {
|
||||
background-color: rgba(47, 192, 245, 0.40);
|
||||
}
|
||||
.platform-wp-rgba {
|
||||
background-color: rgba(104, 33, 122, 0.40);
|
||||
}
|
||||
.platform-xbmc-rgba {
|
||||
background-color: rgba(59, 72, 114, 0.40);
|
||||
}
|
||||
.platform-xbox-rgba {
|
||||
background-color: rgba(16, 124, 16, 0.40);
|
||||
}
|
||||
.library-movie {
|
||||
background-image: url(../images/libraries/movie.svg);
|
||||
}
|
||||
@@ -3951,3 +4046,50 @@ a:hover .overlay-refresh-image:hover {
|
||||
-webkit-appearance: none;
|
||||
margin: 0;
|
||||
}
|
||||
.newsletter-time_frame .input-group-addon {
|
||||
height: 32px;
|
||||
width: 52px;
|
||||
margin-top: 5px;
|
||||
line-height: 1.42857143;
|
||||
}
|
||||
.newsletter-time_frame input.form-control {
|
||||
width: calc(50% - 37px);
|
||||
}
|
||||
.newsletter-time_frame select.form-control {
|
||||
width: calc(50% - 15px);
|
||||
height: 32px;
|
||||
}
|
||||
.newsletter-loader-container {
|
||||
font-family: 'Open Sans', Arial, sans-serif;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
}
|
||||
.newsletter-loader-message {
|
||||
color: #282A2D;
|
||||
text-align: center;
|
||||
position: absolute;
|
||||
left: 50%;
|
||||
top: 25%;
|
||||
transform: translate(-50%, -50%);
|
||||
}
|
||||
.newsletter-loader {
|
||||
border: 5px solid #ccc;
|
||||
-webkit-animation: spin 1s linear infinite;
|
||||
animation: spin 1s linear infinite;
|
||||
border-top: 5px solid #282A2D;
|
||||
border-radius: 50%;
|
||||
width: 50px;
|
||||
height: 50px;
|
||||
position: relative;
|
||||
left: calc(50% - 25px);
|
||||
}
|
||||
@keyframes spin {
|
||||
0% { transform: rotate(0deg); }
|
||||
100% { transform: rotate(360deg); }
|
||||
}
|
||||
a[data-tab-destination] {
|
||||
cursor: pointer;
|
||||
}
|
@@ -64,7 +64,7 @@ DOCUMENTATION :: END
|
||||
from collections import defaultdict
|
||||
from urllib import quote
|
||||
from plexpy import helpers
|
||||
from plexpy.common import VIDEO_RESOLUTION_OVERRIDES, AUDIO_CODEC_OVERRIDES
|
||||
from plexpy.common import VIDEO_RESOLUTION_OVERRIDES, AUDIO_CODEC_OVERRIDES, EXTRA_TYPES
|
||||
import plexpy
|
||||
%>
|
||||
<%
|
||||
@@ -79,20 +79,19 @@ DOCUMENTATION :: END
|
||||
<div class="dashboard-activity-instance" id="activity-instance-${sk}" data-key="${sk}" data-id="${data['session_id']}"
|
||||
data-rating_key="${data['rating_key']}" data-parent_rating_key="${data['parent_rating_key']}" data-grandparent_rating_key="${data['grandparent_rating_key']}">
|
||||
<div class="dashboard-activity-container">
|
||||
<div class="dashboard-activity-background-overlay">
|
||||
% if data['channel_stream'] == 0:
|
||||
<div id="background-${sk}" class="dashboard-activity-background" style="background-image: url(pms_image_proxy?img=${data['art']}&width=500&height=280&fallback=art&refresh=true);"></div>
|
||||
% else:
|
||||
% if (data['art'] and data['art'].startswith('http')) or (data['thumb'] and data['thumb'].startswith('http')):
|
||||
<div id="background-${sk}" class="dashboard-activity-background" style="background-image: url(${data['art']});"></div>
|
||||
% else:
|
||||
<!--Hacky solution to escape the image url until I come up with something better-->
|
||||
<div id="background-${sk}" class="dashboard-activity-background" style="background-image: url(pms_image_proxy?img=${quote(data['art'] or data['thumb'])}&width=500&height=280&fallback=art&refresh=true&clip=true);"></div>
|
||||
% endif
|
||||
% endif
|
||||
<%
|
||||
if data['channel_stream'] == 0:
|
||||
background_url = 'pms_image_proxy?img=' + data['art'] + '&width=500&height=280&opacity=40&background=282828&blur=3&fallback=art&refresh=true'
|
||||
else:
|
||||
if (data['art'] and data['art'].startswith('http')) or (data['thumb'] and data['thumb'].startswith('http')):
|
||||
background_url = data['art']
|
||||
else:
|
||||
background_url = 'pms_image_proxy?img=' + quote(data['art'] or data['thumb']) + '&width=500&height=280&fallback=art&refresh=true&clip=true'
|
||||
%>
|
||||
<div id="background-${sk}" class="dashboard-activity-background" style="background-image: url(${background_url});">
|
||||
<div class="dashboard-activity-poster-container hidden-xs">
|
||||
% if data['media_type'] == 'track':
|
||||
<div id="poster-${sk}-bg" class="dashboard-activity-poster-blur" style="background-image: url(pms_image_proxy?img=${data['parent_thumb']}&width=300&height=300&fallback=cover&refresh=true);"></div>
|
||||
<div id="poster-${sk}-bg" class="dashboard-activity-poster" style="background-image: url(pms_image_proxy?img=${data['parent_thumb']}&width=300&height=300&opacity=60&background=282828&blur=3&fallback=cover&refresh=true);"></div>
|
||||
% endif
|
||||
% if data['channel_stream'] == 0:
|
||||
% if data['media_type'] == 'movie':
|
||||
@@ -108,7 +107,11 @@ DOCUMENTATION :: END
|
||||
<div id="poster-${sk}" class="dashboard-activity-cover" style="background-image: url(pms_image_proxy?img=${data['parent_thumb']}&width=300&height=300&fallback=cover&refresh=true);"></div>
|
||||
</a>
|
||||
% elif data['media_type'] in ('photo', 'clip'):
|
||||
<div id="poster-${sk}" class="dashboard-activity-poster" style="background-image: url(pms_image_proxy?img=${data['parent_thumb']}&width=300&height=450&fallback=poster&refresh=true);"></div>
|
||||
% if data['extra_type']:
|
||||
<div id="poster-${sk}" class="dashboard-activity-poster" style="background-image: url(pms_image_proxy?img=${data['art'].replace('/art', '/thumb') or data['thumb']}&width=300&height=450&fallback=poster&refresh=true);"></div>
|
||||
% else:
|
||||
<div id="poster-${sk}" class="dashboard-activity-poster" style="background-image: url(pms_image_proxy?img=${data['parent_thumb'] or data['thumb']}&width=300&height=450&fallback=poster&refresh=true);"></div>
|
||||
% endif
|
||||
% else:
|
||||
<div id="poster-${sk}" class="dashboard-activity-poster" style="background-image: url(images/art.png);"></div>
|
||||
% endif
|
||||
@@ -117,7 +120,7 @@ DOCUMENTATION :: END
|
||||
<div id="poster-${sk}" class="dashboard-activity-poster-blur" style="background-image: url(${data['channel_icon']});"></div>
|
||||
<div id="poster-${sk}" class="dashboard-activity-cover" style="background-image: url(${data['channel_icon']});"></div>
|
||||
% else:
|
||||
<div id="poster-${sk}" class="dashboard-activity-poster-blur" style="background-image: url(pms_image_proxy?img=${data['channel_icon']}&width=300&height=300&fallback=cover&refresh=true);"></div>
|
||||
<div id="poster-${sk}" class="dashboard-activity-poster" style="background-image: url(pms_image_proxy?img=${data['channel_icon']}&width=300&height=300&opacity=60&background=282828&blur=3&fallback=cover&refresh=true);"></div>
|
||||
<div id="poster-${sk}" class="dashboard-activity-cover" style="background-image: url(pms_image_proxy?img=${data['channel_icon']}&width=300&height=300&fallback=cover&refresh=true);"></div>
|
||||
% endif
|
||||
% endif
|
||||
@@ -301,14 +304,13 @@ DOCUMENTATION :: END
|
||||
<li class="dashboard-activity-info-item">
|
||||
<div class="sub-heading">Bandwidth</div>
|
||||
<div class="sub-value time-right">
|
||||
% if data['media_type'] != 'photo' and helpers.cast_to_int(data['bandwidth']):
|
||||
% if data['media_type'] != 'photo' and data['bandwidth'] != 'Unknown':
|
||||
<%
|
||||
bw = helpers.cast_to_int(data['bandwidth'])
|
||||
if bw != "Unknown":
|
||||
if bw > 1000:
|
||||
bw = str(round(bw / 1000.0, 1)) + ' Mbps'
|
||||
else:
|
||||
bw = str(bw) + ' kbps'
|
||||
if bw > 1000:
|
||||
bw = str(round(bw / 1000.0, 1)) + ' Mbps'
|
||||
else:
|
||||
bw = str(bw) + ' kbps'
|
||||
%>
|
||||
<span id="stream-bandwidth-${sk}">${bw}</span>
|
||||
<span id="streaming-brain-${sk}" data-toggle="tooltip" title="Streaming Brain Estimate (Required Bandwidth)"><i class="fa fa-info-circle"></i></span>
|
||||
@@ -440,7 +442,12 @@ DOCUMENTATION :: END
|
||||
% elif data['media_type'] == 'photo':
|
||||
<span title="${data['title']}" class="sub-heading">${data['title']}</span>
|
||||
% else:
|
||||
<span title="${data['year']}" class="sub-heading">${data['year']}</span>
|
||||
% if data['extra_type']:
|
||||
<% extra_type = EXTRA_TYPES.get(data['extra_type'], data['sub_type'].capitalize()) %>
|
||||
<span title="${data['year']} (${extra_type})" class="sub-heading">${data['year']} (${extra_type})</span>
|
||||
% else:
|
||||
<span title="${data['year']}" class="sub-heading">${data['year']}</span>
|
||||
% endif
|
||||
% endif
|
||||
% elif data['channel_title']:
|
||||
<span title="${data['channel_title']}" class="sub-heading">${data['channel_title']}</span>
|
||||
|
@@ -113,7 +113,7 @@
|
||||
// Load user ids and names (for the selector)
|
||||
$.ajax({
|
||||
url: 'get_user_names',
|
||||
type: 'get',
|
||||
type: 'GET',
|
||||
dataType: 'json',
|
||||
success: function (data) {
|
||||
var select = $('#history-user');
|
||||
@@ -130,6 +130,7 @@
|
||||
function loadHistoryTable(media_type, selected_user_id) {
|
||||
history_table_options.ajax = {
|
||||
url: 'get_history',
|
||||
type: 'POST',
|
||||
data: function (d) {
|
||||
return {
|
||||
json_data: JSON.stringify(d),
|
||||
@@ -163,7 +164,7 @@
|
||||
}
|
||||
|
||||
var media_type = null;
|
||||
var selected_user_id = "${_session['user_id']}" == "None" ? null : "${_session['user_id']}";
|
||||
var selected_user_id = "${_session['user_group']}" == "admin" ? null : "${_session['user_id']}";
|
||||
loadHistoryTable(media_type, selected_user_id);
|
||||
|
||||
% if _session['user_group'] == 'admin':
|
||||
|
@@ -71,22 +71,21 @@ DOCUMENTATION :: END
|
||||
%>
|
||||
<div class="dashboard-stats-instance" id="stats-instance-${stat_id}" data-stat_id="${stat_id}">
|
||||
<div class="dashboard-stats-container">
|
||||
<div class="dashboard-stats-background-overlay">
|
||||
% if stat_id in ('top_movies', 'popular_movies', 'top_tv', 'popular_tv', 'top_music', 'popular_music', 'last_watched'):
|
||||
% if row0['art']:
|
||||
<div id="stats-background-${stat_id}" class="dashboard-stats-background" style="background-image: url(pms_image_proxy?img=${row0['art']}&width=500&height=280&fallback=art);"></div>
|
||||
% else:
|
||||
<div id="stats-background-${stat_id}" class="dashboard-stats-background" style="background-image: url(images/art.png);"></div>
|
||||
% endif
|
||||
% elif stat_id == 'top_platforms':
|
||||
<div id="stats-background-${stat_id}" class="dashboard-stats-background platform-${row0['platform_name']} no-image"></div>
|
||||
% else:
|
||||
<div id="stats-background-${stat_id}" class="dashboard-stats-background flat"></div>
|
||||
% endif
|
||||
% if stat_id in ('top_movies', 'popular_movies', 'top_tv', 'popular_tv', 'top_music', 'popular_music', 'last_watched'):
|
||||
% if row0['art']:
|
||||
<div id="stats-background-${stat_id}" class="dashboard-stats-background" style="background-image: url(pms_image_proxy?img=${row0['art']}&width=500&height=280&opacity=40&background=282828&blur=3&fallback=art);">
|
||||
% else:
|
||||
<div id="stats-background-${stat_id}" class="dashboard-stats-background" style="background-image: url(images/art.png);">
|
||||
% endif
|
||||
% elif stat_id == 'top_platforms':
|
||||
<div id="stats-background-${stat_id}" class="dashboard-stats-background platform-${row0['platform_name']}-rgba no-image">
|
||||
% else:
|
||||
<div id="stats-background-${stat_id}" class="dashboard-stats-background flat">
|
||||
% endif
|
||||
% if stat_id in ('top_movies', 'popular_movies', 'top_tv', 'popular_tv', 'top_music', 'popular_music', 'last_watched'):
|
||||
<div class="dashboard-stats-poster-container hidden-xs">
|
||||
% if stat_id in ('top_music', 'popular_music'):
|
||||
<div id="stats-thumb-${stat_id}-bg" class="dashboard-stats-poster-blur" style="background-image: url(pms_image_proxy?img=${row0['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
<div id="stats-thumb-${stat_id}-bg" class="dashboard-stats-poster" style="background-image: url(pms_image_proxy?img=${row0['thumb']}&width=300&height=300&opacity=60&background=282828&blur=3&fallback=cover);"></div>
|
||||
% endif
|
||||
<% height, type = ('300', 'cover') if stat_id in ('top_music', 'popular_music') else ('450', 'poster') %>
|
||||
<% href = 'info?rating_key={}'.format(row0['rating_key']) if row0['rating_key'] else '#' %>
|
||||
@@ -200,7 +199,7 @@ DOCUMENTATION :: END
|
||||
}).addClass('platform-' + $(elem).data('platform'));
|
||||
$('#stats-background-' + stat_id).removeClass(function (index, className) {
|
||||
return (className.match (/(^|\s)platform-\S+/g) || []).join(' ');
|
||||
}).addClass('platform-' + $(elem).data('platform'));
|
||||
}).addClass('platform-' + $(elem).data('platform') + '-rgba');
|
||||
} else {
|
||||
if (rating_key) {
|
||||
href = 'info?rating_key=' + rating_key;
|
||||
@@ -209,13 +208,13 @@ DOCUMENTATION :: END
|
||||
}
|
||||
$('#stats-thumb-url-' + stat_id).attr('href', href).prop('title', $(elem).data('title'));
|
||||
if (art) {
|
||||
$('#stats-background-' + stat_id).css('background-image', 'url(pms_image_proxy?img=' + art + '&width=500&height=280&fallback=art)');
|
||||
$('#stats-background-' + stat_id).css('background-image', 'url(pms_image_proxy?img=' + art + '&width=500&height=280&opacity=40&background=282828&blur=3&fallback=art)');
|
||||
} else {
|
||||
$('#stats-background-' + stat_id).css('background-image', 'url(images/art.png)');
|
||||
}
|
||||
if (thumb) {
|
||||
$('#stats-thumb-' + stat_id).css('background-image', 'url(pms_image_proxy?img=' + thumb + '&width=300&height=' + height + '&fallback=' + fallback + ')');
|
||||
$('#stats-thumb-' + stat_id + '-bg').css('background-image', 'url(pms_image_proxy?img=' + thumb + '&width=300&height=' + height + '&fallback=' + fallback + ')');
|
||||
$('#stats-thumb-' + stat_id + '-bg').css('background-image', 'url(pms_image_proxy?img=' + thumb + '&width=300&height=' + height + '&opacity=60&background=282828&blur=3&fallback=' + fallback + ')');
|
||||
} else {
|
||||
$('#stats-thumb-' + stat_id).css('background-image', 'url(images/' + fallback + '.png)');
|
||||
$('#stats-thumb-' + stat_id + '-bg').css('background-image', 'url(images/' + fallback + '.png)');
|
||||
|
BIN
data/interfaces/default/images/libraries/artist.png
Normal file
After Width: | Height: | Size: 4.4 KiB |
BIN
data/interfaces/default/images/libraries/movie.png
Normal file
After Width: | Height: | Size: 1.7 KiB |
BIN
data/interfaces/default/images/libraries/photo.png
Normal file
After Width: | Height: | Size: 5.3 KiB |
BIN
data/interfaces/default/images/libraries/playlist.png
Normal file
After Width: | Height: | Size: 1.1 KiB |
BIN
data/interfaces/default/images/libraries/show.png
Normal file
After Width: | Height: | Size: 1.1 KiB |
BIN
data/interfaces/default/images/libraries/video.png
Normal file
After Width: | Height: | Size: 2.3 KiB |
BIN
data/interfaces/default/images/logo-tautulli-newsletter.png
Normal file
After Width: | Height: | Size: 30 KiB |
BIN
data/interfaces/default/images/newsletter/newsletter-header.png
Normal file
After Width: | Height: | Size: 33 KiB |
BIN
data/interfaces/default/images/newsletter/view-on-plex-cover.png
Normal file
After Width: | Height: | Size: 4.8 KiB |
BIN
data/interfaces/default/images/newsletter/view-on-plex-flat.png
Normal file
After Width: | Height: | Size: 5.4 KiB |
After Width: | Height: | Size: 5.1 KiB |
@@ -5,6 +5,7 @@
|
||||
</%def>
|
||||
|
||||
<%def name="body()">
|
||||
<% from plexpy import PLEX_SERVER_UP %>
|
||||
<div class="container-fluid">
|
||||
% for section in config['home_sections']:
|
||||
% if section == 'current_activity':
|
||||
@@ -22,9 +23,10 @@
|
||||
</h3>
|
||||
</div>
|
||||
<div id="currentActivity">
|
||||
<% from plexpy import PLEX_SERVER_UP %>
|
||||
% if PLEX_SERVER_UP:
|
||||
<div class="text-muted" id="dashboard-checking-activity"><i class="fa fa-refresh fa-spin"></i> Checking for activity...</div>
|
||||
% elif config['pms_is_cloud']:
|
||||
<div id="dashboard-no-activity" class="text-muted">Plex Cloud server is sleeping.</div>
|
||||
% else:
|
||||
<div id="dashboard-no-activity" class="text-muted">There was an error communicating with your Plex Server.
|
||||
% if _session['user_group'] == 'admin':
|
||||
@@ -123,7 +125,7 @@
|
||||
</label>
|
||||
</div>
|
||||
<div class="input-group pull-left" style="width: 1px;" id="recently-added-count-selection">
|
||||
<input type="number" class="form-control number-input" name="recently-added-count" id="recently-added-count" value="${config['home_stats_recently_added_count']}" min="1" max="100" data-default="50" data-toggle="tooltip" title="Min: 1 item<br>Max: 100 items" />
|
||||
<input type="number" class="form-control number-input" name="recently-added-count" id="recently-added-count" value="${config['home_stats_recently_added_count']}" min="1" max="50" data-default="50" data-toggle="tooltip" title="Min: 1 item<br>Max: 50 items" />
|
||||
<span class="input-group-addon btn-dark inactive">items</span>
|
||||
</div>
|
||||
</div>
|
||||
@@ -133,7 +135,17 @@
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<div id="recentlyAdded" style="margin-right: -15px;">
|
||||
% if PLEX_SERVER_UP:
|
||||
<div class="text-muted"><i class="fa fa-refresh fa-spin"></i> Looking for new items...</div>
|
||||
% elif config['pms_is_cloud']:
|
||||
<div class="text-muted">Plex Cloud server is sleeping.</div>
|
||||
% else:
|
||||
<div class="text-muted">There was an error communicating with your Plex Server.
|
||||
% if _session['user_group'] == 'admin':
|
||||
Check the <a href="logs">logs</a> and verify your server connection in the <a href="settings#tab_tabs-plex_media_server">settings</a>.
|
||||
% endif
|
||||
</div>
|
||||
% endif
|
||||
<br>
|
||||
</div>
|
||||
</div>
|
||||
@@ -220,6 +232,7 @@
|
||||
</%def>
|
||||
|
||||
<%def name="javascriptIncludes()">
|
||||
<% from plexpy import PLEX_SERVER_UP %>
|
||||
<script src="${http_root}js/moment-with-locale.js"></script>
|
||||
<script src="${http_root}js/jquery.scrollbar.min.js"></script>
|
||||
<script src="${http_root}js/jquery.mousewheel.min.js"></script>
|
||||
@@ -252,7 +265,6 @@
|
||||
});
|
||||
}
|
||||
</script>
|
||||
<% from plexpy import PLEX_SERVER_UP %>
|
||||
% if 'current_activity' in config['home_sections'] and PLEX_SERVER_UP:
|
||||
<script>
|
||||
var defaultHandler = {
|
||||
@@ -375,7 +387,7 @@
|
||||
if (s.media_type === 'track') {
|
||||
// Update if artist changed
|
||||
if (s.grandparent_rating_key !== instance.data('grandparent_rating_key')) {
|
||||
$('#background-' + key).css('background-image', 'url(pms_image_proxy?img=' + s.art + '&width=500&height=280&fallback=art&refresh=true)');
|
||||
$('#background-' + key).css('background-image', 'url(pms_image_proxy?img=' + s.art + '&width=500&height=280&opacity=40&background=282828&blur=3&fallback=art&refresh=true)');
|
||||
$('#metadata-grandparent_title-' + key)
|
||||
.attr('href', 'info?rating_key=' + s.grandparent_rating_key)
|
||||
.attr('title', s.grandparent_title)
|
||||
@@ -384,7 +396,7 @@
|
||||
// Update cover if album changed
|
||||
if (s.parent_rating_key !== instance.data('parent_rating_key')) {
|
||||
$('#poster-' + key).css('background-image', 'url(pms_image_proxy?img=' + s.parent_thumb + '&width=300&height=300&fallback=poster&refresh=true)');
|
||||
$('#poster-' + key + '-bg').css('background-image', 'url(pms_image_proxy?img=' + s.parent_thumb + '&width=300&height=300&fallback=poster&refresh=true)');
|
||||
$('#poster-' + key + '-bg').css('background-image', 'url(pms_image_proxy?img=' + s.parent_thumb + '&width=300&height=300&opacity=60&background=282828&blur=3&fallback=poster&refresh=true)');
|
||||
$('#poster-url-' + key)
|
||||
.attr('href', 'info?rating_key=' + s.parent_rating_key)
|
||||
.attr('title', s.parent_title);
|
||||
@@ -507,17 +519,15 @@
|
||||
|
||||
$('#location-' + key).html(s.location.toUpperCase());
|
||||
|
||||
if (s.media_type !== 'photo' && parseInt(s.bandwidth)) {
|
||||
var bw = parseInt(s.bandwidth);
|
||||
if (bw !== "Unknown") {
|
||||
if (bw > 1000) {
|
||||
bw = (bw / 1000).toFixed(1) + ' Mbps';
|
||||
} else {
|
||||
bw = bw + ' kbps'
|
||||
}
|
||||
if (s.media_type !== 'photo' && s.bandwidth !== 'Unknown') {
|
||||
var bw = parseInt(s.bandwidth) || 0;
|
||||
if (bw > 1000) {
|
||||
bw = (bw / 1000).toFixed(1) + ' Mbps';
|
||||
} else {
|
||||
bw = bw + ' kbps'
|
||||
}
|
||||
$('#stream-bandwidth-' + key).html(bw);
|
||||
}
|
||||
};
|
||||
|
||||
// Update the stream progress times
|
||||
$('#stream-eta-' + key).html(moment().add(parseInt(s.duration) - parseInt(s.view_offset), 'milliseconds').format(time_format));
|
||||
@@ -589,7 +599,7 @@
|
||||
if (!(create_instances.length) && activity_ready) {
|
||||
getCurrentActivity();
|
||||
}
|
||||
}, 2000);
|
||||
}, ${config['home_refresh_interval'] * 1000});
|
||||
|
||||
setInterval(function(){
|
||||
$('.progress_time_offset').each(function () {
|
||||
@@ -604,7 +614,7 @@
|
||||
if ($(this).data('state') === 'playing' && $(this).data('view_offset') >= 0) {
|
||||
var view_offset = parseInt($(this).data('view_offset'));
|
||||
var stream_duration = parseInt($(this).data('stream_duration'));
|
||||
var progress_percent = Math.min(Math.trunc(view_offset / stream_duration * 100), 100)
|
||||
var progress_percent = Math.min(Math.trunc(view_offset / stream_duration * 100), 100);
|
||||
$(this).width(progress_percent - 3 + '%').html(progress_percent + '%')
|
||||
.attr('data-original-title', 'Stream Progress ' + progress_percent + '%')
|
||||
.data('view_offset', Math.min(view_offset + 1000, stream_duration));
|
||||
@@ -746,7 +756,7 @@
|
||||
getLibraryStats();
|
||||
</script>
|
||||
% endif
|
||||
% if 'recently_added' in config['home_sections']:
|
||||
% if 'recently_added' in config['home_sections'] and PLEX_SERVER_UP:
|
||||
<script>
|
||||
function recentlyAdded(recently_added_count, recently_added_type) {
|
||||
showMsg("Loading recently added items...", true, false, 0);
|
||||
|
@@ -400,14 +400,14 @@ DOCUMENTATION :: END
|
||||
% if data.get('poster_url'):
|
||||
<div class="btn-group">
|
||||
% if data['media_type'] == 'artist' or data['media_type'] == 'album' or data['media_type'] == 'track':
|
||||
<span class="imgur-poster-tooltip" data-toggle="popover" data-img="${data['poster_url']}" data-height="80" data-width="80" style="display: inline-flex;">
|
||||
<span class="hosted-poster-tooltip" data-toggle="popover" data-img="${data['poster_url']}" data-height="80" data-width="80" style="display: inline-flex;">
|
||||
% else:
|
||||
<span class="imgur-poster-tooltip" data-toggle="popover" data-img="${data['poster_url']}" data-height="120" data-width="80" style="display: inline-flex;">
|
||||
<span class="hosted-poster-tooltip" data-toggle="popover" data-img="${data['poster_url']}" data-height="120" data-width="80" style="display: inline-flex;">
|
||||
% endif
|
||||
<button class="btn btn-danger btn-edit" data-toggle="modal" aria-pressed="false" autocomplete="off" id="delete-imgur-poster"
|
||||
<button class="btn btn-danger btn-edit" data-toggle="modal" aria-pressed="false" autocomplete="off" id="delete-hosted-poster"
|
||||
data-id="${data['parent_rating_key'] if data['media_type'] in ('episode', 'track') else data['rating_key']}"
|
||||
data-title="${data["poster_title"]}">
|
||||
<i class="fa fa-picture-o"></i> Delete Imgur Poster
|
||||
<i class="fa fa-picture-o"></i> Delete ${data['img_service']} Poster
|
||||
</button>
|
||||
</span>
|
||||
</div>
|
||||
@@ -547,12 +547,12 @@ DOCUMENTATION :: END
|
||||
function get_history() {
|
||||
history_table_options.ajax = {
|
||||
url: 'get_history',
|
||||
type: 'post',
|
||||
type: 'POST',
|
||||
data: function ( d ) {
|
||||
return {
|
||||
json_data: JSON.stringify( d ),
|
||||
grandparent_rating_key: "${data['rating_key']}",
|
||||
user_id: "${_session['user_id']}" == "None" ? null : "${_session['user_id']}"
|
||||
user_id: "${_session['user_group']}" == "admin" ? null : "${_session['user_id']}"
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -563,12 +563,12 @@ DOCUMENTATION :: END
|
||||
function get_history() {
|
||||
history_table_options.ajax = {
|
||||
url: 'get_history',
|
||||
type: 'post',
|
||||
type: 'POST',
|
||||
data: function ( d ) {
|
||||
return {
|
||||
json_data: JSON.stringify( d ),
|
||||
parent_rating_key: "${data['rating_key']}",
|
||||
user_id: "${_session['user_id']}" == "None" ? null : "${_session['user_id']}"
|
||||
user_id: "${_session['user_group']}" == "admin" ? null : "${_session['user_id']}"
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -579,12 +579,12 @@ DOCUMENTATION :: END
|
||||
function get_history() {
|
||||
history_table_options.ajax = {
|
||||
url: 'get_history',
|
||||
type: 'post',
|
||||
type: 'POST',
|
||||
data: function ( d ) {
|
||||
return {
|
||||
json_data: JSON.stringify( d ),
|
||||
rating_key: "${data['rating_key']}",
|
||||
user_id: "${_session['user_id']}" == "None" ? null : "${_session['user_id']}"
|
||||
user_id: "${_session['user_group']}" == "admin" ? null : "${_session['user_id']}"
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -705,7 +705,7 @@ DOCUMENTATION :: END
|
||||
</script>
|
||||
% if data.get('poster_url'):
|
||||
<script>
|
||||
$('.imgur-poster-tooltip').popover({
|
||||
$('.hosted-poster-tooltip').popover({
|
||||
html: true,
|
||||
container: 'body',
|
||||
trigger: 'hover',
|
||||
@@ -716,14 +716,14 @@ DOCUMENTATION :: END
|
||||
}
|
||||
});
|
||||
|
||||
$('#delete-imgur-poster').on('click', function () {
|
||||
var msg = 'Are you sure you want to delete the Imgur poster for <strong>' + $(this).data('title') + '</strong>?<br><br>' +
|
||||
$('#delete-hosted-poster').on('click', function () {
|
||||
var msg = 'Are you sure you want to delete the ${data['img_service']} poster for <strong>' + $(this).data('title') + '</strong>?<br><br>' +
|
||||
'All previous links to this image will no longer work.';
|
||||
var url = 'delete_imgur_poster';
|
||||
var url = 'delete_hosted_images';
|
||||
var data = { rating_key: $(this).data('id') };
|
||||
var callback = function () {
|
||||
$('.imgur-poster-tooltip').popover('destroy');
|
||||
$('#delete-imgur-poster').closest('.btn-group').remove();
|
||||
$('.hosted-poster-tooltip').popover('destroy');
|
||||
$('#delete-hosted-poster').closest('.btn-group').remove();
|
||||
};
|
||||
confirmAjaxCall(url, msg, data, false, callback);
|
||||
});
|
||||
|
1
data/interfaces/default/js/jquery-cron-min.js
vendored
Normal file
146
data/interfaces/default/js/tables/newsletter_logs.js
Normal file
@@ -0,0 +1,146 @@
|
||||
newsletter_log_table_options = {
|
||||
"destroy": true,
|
||||
"serverSide": true,
|
||||
"processing": false,
|
||||
"pagingType": "full_numbers",
|
||||
"order": [ 0, 'desc'],
|
||||
"pageLength": 50,
|
||||
"stateSave": true,
|
||||
"language": {
|
||||
"search":"Search: ",
|
||||
"lengthMenu": "Show _MENU_ lines per page",
|
||||
"emptyTable": "No log information available",
|
||||
"info" :"Showing _START_ to _END_ of _TOTAL_ lines",
|
||||
"infoEmpty": "Showing 0 to 0 of 0 lines",
|
||||
"infoFiltered": "(filtered from _MAX_ total lines)",
|
||||
"loadingRecords": '<i class="fa fa-refresh fa-spin"></i> Loading items...</div>'
|
||||
},
|
||||
"autoWidth": false,
|
||||
"scrollX": true,
|
||||
"columnDefs": [
|
||||
{
|
||||
"targets": [0],
|
||||
"data": "timestamp",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
$(td).html(moment(cellData, "X").format('YYYY-MM-DD HH:mm:ss'));
|
||||
}
|
||||
},
|
||||
"width": "10%",
|
||||
"className": "no-wrap"
|
||||
},
|
||||
{
|
||||
"targets": [1],
|
||||
"data": "newsletter_id",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
$(td).html(cellData);
|
||||
}
|
||||
},
|
||||
"width": "5%",
|
||||
"className": "no-wrap"
|
||||
},
|
||||
{
|
||||
"targets": [2],
|
||||
"data": "agent_name",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
$(td).html(cellData);
|
||||
}
|
||||
},
|
||||
"width": "5%",
|
||||
"className": "no-wrap"
|
||||
},
|
||||
{
|
||||
"targets": [3],
|
||||
"data": "notify_action",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
$(td).html(cellData);
|
||||
}
|
||||
},
|
||||
"width": "5%",
|
||||
"className": "no-wrap"
|
||||
},
|
||||
{
|
||||
"targets": [4],
|
||||
"data": "subject_text",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
$(td).html(cellData);
|
||||
}
|
||||
},
|
||||
"width": "23%"
|
||||
},
|
||||
{
|
||||
"targets": [5],
|
||||
"data": "body_text",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
$(td).html(cellData);
|
||||
}
|
||||
},
|
||||
"width": "35%"
|
||||
},
|
||||
{
|
||||
"targets": [6],
|
||||
"data": "start_date",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
$(td).html(cellData);
|
||||
}
|
||||
},
|
||||
"width": "5%"
|
||||
},
|
||||
{
|
||||
"targets": [7],
|
||||
"data": "end_date",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
$(td).html(cellData);
|
||||
}
|
||||
},
|
||||
"width": "5%"
|
||||
},
|
||||
{
|
||||
"targets": [8],
|
||||
"data": "uuid",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
$(td).html('<a href="newsletter/' + rowData['uuid'] + '" target="_blank">' + cellData + '</a>');
|
||||
}
|
||||
},
|
||||
"width": "5%"
|
||||
},
|
||||
{
|
||||
"targets": [9],
|
||||
"data": "success",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData === 1) {
|
||||
$(td).html('<span class="success-tooltip" data-toggle="tooltip" title="Newsletter Sent"><i class="fa fa-lg fa-fw fa-check"></i></span>');
|
||||
} else {
|
||||
$(td).html('<span class="success-tooltip" data-toggle="tooltip" title="Newsletter Failed"><i class="fa fa-lg fa-fw fa-times"></i></span>');
|
||||
}
|
||||
},
|
||||
"searchable": false,
|
||||
"orderable": false,
|
||||
"className": "no-wrap",
|
||||
"width": "2%"
|
||||
},
|
||||
],
|
||||
"drawCallback": function (settings) {
|
||||
// Jump to top of page
|
||||
//$('html,body').scrollTop(0);
|
||||
$('#ajaxMsg').fadeOut();
|
||||
|
||||
// Create the tooltips.
|
||||
$('body').tooltip({
|
||||
selector: '[data-toggle="tooltip"]',
|
||||
container: 'body'
|
||||
});
|
||||
},
|
||||
"preDrawCallback": function(settings) {
|
||||
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||
showMsg(msg, false, false, 0)
|
||||
}
|
||||
};
|
@@ -86,7 +86,7 @@ notification_log_table_options = {
|
||||
"targets": [6],
|
||||
"data": "success",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData == 1) {
|
||||
if (cellData === 1) {
|
||||
$(td).html('<span class="success-tooltip" data-toggle="tooltip" title="Notification Sent"><i class="fa fa-lg fa-fw fa-check"></i></span>');
|
||||
} else {
|
||||
$(td).html('<span class="success-tooltip" data-toggle="tooltip" title="Notification Failed"><i class="fa fa-lg fa-fw fa-times"></i></span>');
|
||||
@@ -113,4 +113,4 @@ notification_log_table_options = {
|
||||
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||
showMsg(msg, false, false, 0)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@@ -37,7 +37,6 @@ sync_table_options = {
|
||||
"data": "state",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData === 'pending') {
|
||||
$(td).addClass('currentlyWatching');
|
||||
$(td).html('Pending...');
|
||||
} else {
|
||||
$(td).html(cellData.toProperCase());
|
||||
@@ -66,7 +65,7 @@ sync_table_options = {
|
||||
"data": "sync_title",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
if (rowData['metadata_type'] !== '') {
|
||||
if (rowData['rating_key']) {
|
||||
$(td).html('<a href="info?rating_key=' + rowData['rating_key'] + '">' + cellData + '</a>');
|
||||
} else {
|
||||
$(td).html(cellData);
|
||||
@@ -74,7 +73,7 @@ sync_table_options = {
|
||||
}
|
||||
},
|
||||
"className": "datatable-wrap"
|
||||
},
|
||||
},
|
||||
{
|
||||
"targets": [4],
|
||||
"data": "metadata_type",
|
||||
@@ -150,6 +149,11 @@ sync_table_options = {
|
||||
"preDrawCallback": function (settings) {
|
||||
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||
showMsg(msg, false, false, 0)
|
||||
},
|
||||
"rowCallback": function (row, rowData, rowIndex) {
|
||||
if (rowData['state'] === 'pending') {
|
||||
$(row).addClass('current-activity-row');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
@@ -91,7 +91,7 @@
|
||||
json_data: JSON.stringify(d)
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
libraries_list_table = $('#libraries_list_table').DataTable(libraries_list_table_options);
|
||||
var colvis = new $.fn.dataTable.ColVis(libraries_list_table, { buttonText: '<i class="fa fa-columns"></i> Select columns', buttonClass: 'btn btn-dark', exclude: [0, 1] });
|
||||
|
@@ -374,12 +374,12 @@ DOCUMENTATION :: END
|
||||
// Build watch history table
|
||||
history_table_options.ajax = {
|
||||
url: 'get_history',
|
||||
type: 'post',
|
||||
type: 'POST',
|
||||
data: function ( d ) {
|
||||
return {
|
||||
json_data: JSON.stringify( d ),
|
||||
section_id: section_id,
|
||||
user_id: "${_session['user_id']}" == "None" ? null : "${_session['user_id']}"
|
||||
user_id: "${_session['user_group']}" == "admin" ? null : "${_session['user_id']}"
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -406,7 +406,7 @@ DOCUMENTATION :: END
|
||||
// Build media info table
|
||||
media_info_table_options.ajax = {
|
||||
url: 'get_library_media_info',
|
||||
type: 'post',
|
||||
type: 'POST',
|
||||
data: function ( d ) {
|
||||
return {
|
||||
json_data: JSON.stringify( d ),
|
||||
|
@@ -35,8 +35,7 @@ DOCUMENTATION :: END
|
||||
% if section_type in data:
|
||||
<div class="dashboard-stats-instance" id="library-stats-instance-${section_type}" data-section_type="${section_type}">
|
||||
<div class="dashboard-stats-container">
|
||||
<div class="dashboard-stats-background-overlay">
|
||||
<div id="library-stats-background-${section_type}" class="dashboard-stats-background" style="background-image: url(pms_image_proxy?img=/:/resources/${section_type}-fanart.jpg&width=500&height=280&fallback=art);"></div>
|
||||
<div id="library-stats-background-${section_type}" class="dashboard-stats-background" style="background-image: url(pms_image_proxy?img=/:/resources/${section_type}-fanart.jpg&width=500&height=280&opacity=40&background=282828&blur=3&fallback=art);">
|
||||
<div id="library-stats-thumb-${section_type}" class="dashboard-stats-flat svg-icon library-${section_type} hidden-xs"></div>
|
||||
<div class="dashboard-stats-info-container">
|
||||
<div id="library-stats-title-${section_type}" class="dashboard-stats-info-title">
|
||||
|
@@ -85,7 +85,7 @@
|
||||
dataType: 'json',
|
||||
statusCode: {
|
||||
200: function() {
|
||||
window.location = "${http_root}";
|
||||
window.location = "${redirect_uri or http_root}";
|
||||
},
|
||||
401: function() {
|
||||
$('#incorrect-login').show();
|
||||
|
@@ -50,6 +50,7 @@
|
||||
<button class="btn btn-dark" id="download-plexscannerlog" style="display: none;"><i class="fa fa-download"></i> Download logs</button>
|
||||
<button class="btn btn-dark" id="clear-logs"><i class="fa fa-trash-o"></i> Clear logs</button>
|
||||
<button class="btn btn-dark" id="clear-notify-logs" style="display: none;"><i class="fa fa-trash-o"></i> Clear logs</button>
|
||||
<button class="btn btn-dark" id="clear-newsletter-logs" style="display: none;"><i class="fa fa-trash-o"></i> Clear logs</button>
|
||||
<button class="btn btn-dark" id="clear-login-logs" style="display: none;"><i class="fa fa-trash-o"></i> Clear logs</button>
|
||||
</div>
|
||||
</div>
|
||||
@@ -62,6 +63,7 @@
|
||||
<li role="presentation"><a id="plex-scanner-logs-btn" href="#tabs-plex_scanner_log" aria-controls="tabs-plex_scanner_log" role="tab" data-toggle="tab">Plex Media Scanner Logs</a></li>
|
||||
<li role="presentation"><a id="plex-websocket-logs-btn" href="#tabs-plex_websocket_log" aria-controls="tabs-plex_websocket_log" role="tab" data-toggle="tab">Plex Websocket Logs</a></li>
|
||||
<li role="presentation"><a id="notification-logs-btn" href="#tabs-notification_log" aria-controls="tabs-notification_log" role="tab" data-toggle="tab">Notification Logs</a></li>
|
||||
<li role="presentation"><a id="newsletter-logs-btn" href="#tabs-newsletter_log" aria-controls="tabs-newsletter_log" role="tab" data-toggle="tab">Newsletter Logs</a></li>
|
||||
<li role="presentation"><a id="login-logs-btn" href="#tabs-login_log" aria-controls="tabs-login_log" role="tab" data-toggle="tab">Login Logs</a></li>
|
||||
</ul>
|
||||
<div class="tab-content">
|
||||
@@ -141,6 +143,25 @@
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-newsletter_log">
|
||||
<table class="display" id="newsletter_log_table" width="100%">
|
||||
<thead>
|
||||
<tr>
|
||||
<th align="left" id="newsletter_timestamp">Timestamp</th>
|
||||
<th align="left" id="newsletter_newsletter_id">Newsletter ID</th>
|
||||
<th align="left" id="newsletter_agent_name">Agent</th>
|
||||
<th align="left" id="newsletter_notify_action">Action</th>
|
||||
<th align="left" id="newsletter_subject_text">Subject Text</th>
|
||||
<th align="left" id="newsletter_body_text">Body Text</th>
|
||||
<th align="left" id="newsletter_start_date">Start Date</th>
|
||||
<th align="left" id="newsletter_end_date">End Date</th>
|
||||
<th align="left" id="newsletter_uuid">UUID</th>
|
||||
<th align="left" id="newsletter_success"></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-login_log">
|
||||
<table class="display login_log_table" id="login_log_table" width="100%">
|
||||
<thead>
|
||||
@@ -191,6 +212,7 @@
|
||||
<script src="${http_root}js/tables/logs.js${cache_param}"></script>
|
||||
<script src="${http_root}js/tables/plex_logs.js${cache_param}"></script>
|
||||
<script src="${http_root}js/tables/notification_logs.js${cache_param}"></script>
|
||||
<script src="${http_root}js/tables/newsletter_logs.js${cache_param}"></script>
|
||||
<script src="${http_root}js/tables/login_logs.js${cache_param}"></script>
|
||||
<script>
|
||||
|
||||
@@ -229,8 +251,8 @@
|
||||
var selected_log_level = null;
|
||||
function loadtautullilogs(logfile, selected_log_level) {
|
||||
log_table_options.ajax = {
|
||||
url: "get_log",
|
||||
type: 'post',
|
||||
url: 'get_log',
|
||||
type: 'POST',
|
||||
data: function (d) {
|
||||
return {
|
||||
logfile: logfile,
|
||||
@@ -249,7 +271,8 @@
|
||||
|
||||
function loadPlexLogs() {
|
||||
plex_log_table_options.ajax = {
|
||||
url: "get_plex_log?log_type=server"
|
||||
url: 'get_plex_log?log_type=server',
|
||||
type: 'POST'
|
||||
};
|
||||
plex_log_table_options.initComplete = bindLogLevelFilter;
|
||||
plex_log_table = $('#plex_log_table').DataTable(plex_log_table_options);
|
||||
@@ -257,7 +280,8 @@
|
||||
|
||||
function loadPlexScannerLogs() {
|
||||
plex_log_table_options.ajax = {
|
||||
url: "get_plex_log?log_type=scanner"
|
||||
url: 'get_plex_log?log_type=scanner',
|
||||
type: 'POST'
|
||||
};
|
||||
plex_log_table_options.initComplete = bindLogLevelFilter;
|
||||
plex_scanner_log_table = $('#plex_scanner_log_table').DataTable(plex_log_table_options);
|
||||
@@ -265,7 +289,8 @@
|
||||
|
||||
function loadNotificationLogs() {
|
||||
notification_log_table_options.ajax = {
|
||||
url: "get_notification_log",
|
||||
url: 'get_notification_log',
|
||||
type: 'POST',
|
||||
data: function (d) {
|
||||
return {
|
||||
json_data: JSON.stringify(d)
|
||||
@@ -275,10 +300,23 @@
|
||||
notification_log_table = $('#notification_log_table').DataTable(notification_log_table_options);
|
||||
}
|
||||
|
||||
function loadNewsletterLogs() {
|
||||
newsletter_log_table_options.ajax = {
|
||||
url: "get_newsletter_log",
|
||||
data: function (d) {
|
||||
return {
|
||||
json_data: JSON.stringify(d)
|
||||
};
|
||||
}
|
||||
};
|
||||
newsletter_log_table = $('#newsletter_log_table').DataTable(newsletter_log_table_options);
|
||||
}
|
||||
|
||||
function loadLoginLogs() {
|
||||
login_log_table_options.pageLength = 50;
|
||||
login_log_table_options.ajax = {
|
||||
url: "get_user_logins",
|
||||
url: 'get_user_logins',
|
||||
type: 'POST',
|
||||
data: function (d) {
|
||||
return {
|
||||
json_data: JSON.stringify(d)
|
||||
@@ -296,6 +334,7 @@
|
||||
$("#download-plexserverlog").hide();
|
||||
$("#download-plexscannerlog").hide();
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-newsletter-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadtautullilogs('tautulli', selected_log_level);
|
||||
clearSearchButton('tautulli_log_table', log_table);
|
||||
@@ -309,7 +348,8 @@
|
||||
$("#download-plexserverlog").hide();
|
||||
$("#download-plexscannerlog").hide();
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
$("#clear-newsletter-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadtautullilogs('tautulli_api', selected_log_level);
|
||||
clearSearchButton('tautulli_api_log_table', log_table);
|
||||
});
|
||||
@@ -322,6 +362,7 @@
|
||||
$("#download-plexserverlog").hide();
|
||||
$("#download-plexscannerlog").hide();
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-newsletter-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadtautullilogs('plex_websocket', selected_log_level);
|
||||
clearSearchButton('plex_websocket_log_table', log_table);
|
||||
@@ -335,6 +376,7 @@
|
||||
$("#download-plexserverlog").show();
|
||||
$("#download-plexscannerlog").hide();
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-newsletter-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadPlexLogs();
|
||||
clearSearchButton('plex_log_table', plex_log_table);
|
||||
@@ -348,6 +390,7 @@
|
||||
$("#download-plexserverlog").hide();
|
||||
$("#download-plexscannerlog").show();
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-newsletter-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadPlexScannerLogs();
|
||||
clearSearchButton('plex_scanner_log_table', plex_scanner_log_table);
|
||||
@@ -361,11 +404,26 @@
|
||||
$("#download-plexserverlog").hide();
|
||||
$("#download-plexscannerlog").hide();
|
||||
$("#clear-notify-logs").show();
|
||||
$("#clear-newsletter-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadNotificationLogs();
|
||||
clearSearchButton('notification_log_table', notification_log_table);
|
||||
});
|
||||
|
||||
$("#newsletter-logs-btn").click(function () {
|
||||
$("#tautulli-log-levels").hide();
|
||||
$("#plex-log-levels").hide();
|
||||
$("#clear-logs").hide();
|
||||
$("#download-tautullilog").hide();
|
||||
$("#download-plexserverlog").hide();
|
||||
$("#download-plexscannerlog").hide();
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-newsletter-logs").show();
|
||||
$("#clear-login-logs").hide();
|
||||
loadNewsletterLogs();
|
||||
clearSearchButton('newsletter_log_table', newsletter_log_table);
|
||||
});
|
||||
|
||||
$("#login-logs-btn").click(function () {
|
||||
$("#tautulli-log-levels").hide();
|
||||
$("#plex-log-levels").hide();
|
||||
@@ -374,6 +432,7 @@
|
||||
$("#download-plexserverlog").hide();
|
||||
$("#download-plexscannerlog").hide();
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-newsletter-logs").hide();
|
||||
$("#clear-login-logs").show();
|
||||
loadLoginLogs();
|
||||
clearSearchButton('login_log_table', notification_log_table);
|
||||
@@ -442,6 +501,27 @@
|
||||
});
|
||||
});
|
||||
|
||||
$("#clear-newsletter-logs").click(function () {
|
||||
$("#confirm-message").text("Are you sure you want to clear the Tautulli Newsletter Logs?");
|
||||
$('#confirm-modal').modal();
|
||||
$('#confirm-modal').one('click', '#confirm-button', function () {
|
||||
$.ajax({
|
||||
url: 'delete_newsletter_log',
|
||||
type: 'POST',
|
||||
complete: function (xhr, status) {
|
||||
result = $.parseJSON(xhr.responseText);
|
||||
msg = result.message;
|
||||
if (result.result === 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 5000)
|
||||
} else {
|
||||
showMsg('<i class="fa fa-times"></i> ' + msg, false, true, 5000, true)
|
||||
}
|
||||
newsletter_log_table.draw();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
$("#clear-login-logs").click(function () {
|
||||
$("#confirm-message").text("Are you sure you want to clear the Tautulli Login Logs?");
|
||||
$('#confirm-modal').modal();
|
||||
|
@@ -55,7 +55,7 @@ DOCUMENTATION :: END
|
||||
})
|
||||
}
|
||||
return deferred;
|
||||
}
|
||||
};
|
||||
|
||||
function checkQRAddress(url) {
|
||||
var parser = document.createElement('a');
|
||||
@@ -82,7 +82,7 @@ DOCUMENTATION :: END
|
||||
verifiedDevice = false;
|
||||
|
||||
getPlexPyURL().then(function (url) {
|
||||
checkQRAddress(url)
|
||||
checkQRAddress(url);
|
||||
|
||||
$.get('generate_api_key', { device: true }).then(function (token) {
|
||||
$('#api_qr_address').val(url);
|
||||
@@ -120,7 +120,7 @@ DOCUMENTATION :: END
|
||||
|
||||
$('#api_qr_address').change(function () {
|
||||
var url = $(this).val();
|
||||
checkQRAddress(url)
|
||||
checkQRAddress(url);
|
||||
|
||||
$('#api_qr_code').empty().qrcode({
|
||||
text: url + '|' + $('#api_qr_token').val()
|
||||
|
766
data/interfaces/default/newsletter_config.html
Normal file
@@ -0,0 +1,766 @@
|
||||
% if newsletter:
|
||||
<%!
|
||||
import json
|
||||
from plexpy import notifiers
|
||||
from plexpy.helpers import anon_url, checked
|
||||
|
||||
all_notifiers = sorted(notifiers.get_notifiers(), key=lambda k: (k['agent_label'].lower(), k['friendly_name'], k['id']))
|
||||
email_notifiers = [n for n in all_notifiers if n['agent_name'] == 'email']
|
||||
email_notifiers = [{'id': 0, 'agent_label': 'New Email Configuration', 'friendly_name': ''}] + email_notifiers
|
||||
other_notifiers = [{'id': 0, 'agent_label': 'Select a Notification Agent', 'friendly_name': ''}] + all_notifiers
|
||||
%>
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="newsletter-config-modal-header">${newsletter['agent_label']} Newsletter Settings <small><span class="newsletter_id">(Newsletter ID: ${newsletter['id']})</span></small></h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<ul class="nav nav-tabs list-unstyled" role="tablist">
|
||||
<li role="presentation" class="active"><a href="#tabs-newsletter_config" aria-controls="tabs-newsletter_config" role="tab" data-toggle="tab">Configuration</a></li>
|
||||
<li role="presentation"><a href="#tabs-newsletter_agent" aria-controls="tabs-newsletter_agent" role="tab" data-toggle="tab">Notification Agent</a></li>
|
||||
<li role="presentation"><a href="#tabs-newsletter_text" aria-controls="tabs-newsletter_text" role="tab" data-toggle="tab">Newsletter Text</a></li>
|
||||
<li role="presentation"><a href="#tabs-test_newsletter" aria-controls="tabs-test_newsletter" role="tab" data-toggle="tab">Test Newsletter</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<form action="set_newsletter_config" method="post" class="form" id="set_newsletter_config" data-parsley-validate>
|
||||
<div class="tab-content">
|
||||
<div role="tabpanel" class="tab-pane active" id="tabs-newsletter_config">
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<div class="checkbox" style="margin-bottom: 20px;">
|
||||
<label>
|
||||
<input type="checkbox" data-id="active_value" class="checkboxes" value="1" ${checked(newsletter['active'])}> Enable the Newsletter
|
||||
</label>
|
||||
<input type="hidden" id="active_value" name="active" value="${newsletter['active']}">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="custom_cron">Schedule</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="custom_cron" name="newsletter_config_custom_cron">
|
||||
<option value="0" ${'selected' if newsletter['config']['custom_cron'] == 0 else ''}>Simple</option>
|
||||
<option value="1" ${'selected' if newsletter['config']['custom_cron'] == 1 else ''}>Custom</option>
|
||||
</select>
|
||||
<input type="text" id="cron_value" name="cron" value="${newsletter['cron']}" />
|
||||
<div id="cron-widget"></div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
<span id="simple_cron_message">Set the schedule for the newsletter.</span>
|
||||
<span id="custom_cron_message">Set the schedule for the newsletter using a <a href="${anon_url('https://crontab.guru')}" target="_blank">custom crontab</a>. Only standard cron values are valid.</span>
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="time_frame">Time Frame</label>
|
||||
<div class="row">
|
||||
<div class="col-md-4">
|
||||
<div class="input-group newsletter-time_frame">
|
||||
<span class="input-group-addon form-control btn-dark inactive">Last</span>
|
||||
<input type="number" class="form-control" id="newsletter_config_time_frame" name="newsletter_config_time_frame" value="${newsletter['config']['time_frame']}">
|
||||
<select class="form-control" id="newsletter_config_time_frame_units" name="newsletter_config_time_frame_units">
|
||||
<option value="days" ${'selected' if newsletter['config']['time_frame_units'] == 'days' else ''}>days</option>
|
||||
<option value="hours" ${'selected' if newsletter['config']['time_frame_units'] == 'hours' else ''}>hours</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Set the time frame to include in the newsletter. Note: Days uses calendar days (i.e. since midnight).</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-12" style="padding-top: 10px; border-top: 1px solid #444;">
|
||||
<input type="hidden" id="newsletter_id" name="newsletter_id" value="${newsletter['id']}" />
|
||||
<input type="hidden" id="agent_id" name="agent_id" value="${newsletter['agent_id']}" />
|
||||
% for item in newsletter['config_options']:
|
||||
% if item['input_type'] == 'help':
|
||||
<div class="form-group">
|
||||
<label>${item['label']}</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'text' or item['input_type'] == 'password':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" ${'readonly' if item.get('readonly') else ''}>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'number':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-3">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'button':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="button" class="btn btn-bright" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'checkbox':
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" ${checked(item['value'])}> ${item['label']}
|
||||
</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
<input type="hidden" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
</div>
|
||||
% elif item['input_type'] == 'select':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
% for key, value in sorted(item['select_options'].iteritems()):
|
||||
% if key == item['value']:
|
||||
<option value="${key}" selected>${value}</option>
|
||||
% else:
|
||||
<option value="${key}">${value}</option>
|
||||
% endif
|
||||
% endfor
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'selectize':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
<option value="select-all">Select All</option>
|
||||
<option value="remove-all">Remove All</option>
|
||||
% if isinstance(item['select_options'], dict):
|
||||
% for section, options in item['select_options'].iteritems():
|
||||
<optgroup label="${section}">
|
||||
% for option in sorted(options, key=lambda x: x['text'].lower()):
|
||||
<option value="${option['value']}">${option['text']}</option>
|
||||
% endfor
|
||||
</optgroup>
|
||||
% endfor
|
||||
% else:
|
||||
<option value="border-all"></option>
|
||||
% for option in sorted(item['select_options'], key=lambda x: x['text'].lower()):
|
||||
<option value="${option['value']}">${option['text']}</option>
|
||||
% endfor
|
||||
% endif
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% endif
|
||||
% endfor
|
||||
</div>
|
||||
<div class="col-md-12" style="margin-top: 10px; padding-top: 10px; border-top: 1px solid #444;">
|
||||
<div class="form-group">
|
||||
<label for="newsletter_config_filename">Filename</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="text" class="form-control" id="newsletter_config_filename" name="newsletter_config_filename" value="${newsletter['config']['filename']}" size="30">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">The filename to use when saving the newsletter (ending with <span class="inline-pre">.html</span>). You may use any of the <a href="#newsletter-text-sub-modal" data-toggle="modal">newsletter text parameters</a>. Leave blank for default.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="friendly_name">Description</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="text" class="form-control" id="friendly_name" name="friendly_name" value="${newsletter['friendly_name']}" size="30">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Optional: Enter a description to help identify this newsletter in the newsletters list.</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-newsletter_agent">
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="newsletter_config_save_only_checkbox" data-id="newsletter_config_save_only" class="checkboxes" value="1" ${checked(newsletter['config']['save_only'])}> Save Newsletter File Only
|
||||
</label>
|
||||
<p class="help-block">Enable to save the newsletter file without sending it to any notification agent.</p>
|
||||
<input type="hidden" id="newsletter_config_save_only" name="newsletter_config_save_only" value="${newsletter['config']['save_only']}">
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="newsletter_config_formatted_checkbox" data-id="newsletter_config_formatted" class="checkboxes" value="1" ${checked(newsletter['config']['formatted'])}> Send Newsletter as an HTML Formatted Email
|
||||
</label>
|
||||
<p class="help-block">Enable to send the newsletter as an HTML formatted Email. Disable to only send a subject and body message to a different notification agent.</p>
|
||||
<input type="hidden" id="newsletter_config_formatted" name="newsletter_config_formatted" value="${newsletter['config']['formatted']}">
|
||||
</div>
|
||||
<div class="form-group" id="email_notifier_select">
|
||||
<label for="newsletter_email_notifier_id">Email Notification Agent</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="newsletter_email_notifier_id" name="newsletter_email_notifier_id">
|
||||
% for notifier in email_notifiers:
|
||||
<% selected = 'selected' if notifier['id'] == newsletter['email_config']['notifier_id'] else '' %>
|
||||
% if notifier['friendly_name']:
|
||||
<option value="${notifier['id']}" ${selected}>${notifier['agent_label']} (${notifier['id']} - ${notifier['friendly_name']})</option>
|
||||
% elif notifier['id']:
|
||||
<option value="${notifier['id']}" ${selected}>${notifier['agent_label']} (${notifier['id']})</option>
|
||||
% else:
|
||||
<option value="${notifier['id']}" ${selected}>${notifier['agent_label']}</option>
|
||||
% endif
|
||||
% endfor
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Select an existing Email notification agent or enter a new configuration below.<br>
|
||||
Note: Make sure HTML support is enabled for the Email notification agent.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group" id="other_notifier_select">
|
||||
<label for="newsletter_config_notifier_id">Notification Agent</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="newsletter_config_notifier_id" name="newsletter_config_notifier_id">
|
||||
% for notifier in other_notifiers:
|
||||
<% selected = 'selected' if notifier['id'] == newsletter['config']['notifier_id'] else '' %>
|
||||
% if notifier['friendly_name']:
|
||||
<option value="${notifier['id']}" ${selected}>${notifier['agent_label']} (${notifier['id']} - ${notifier['friendly_name']})</option>
|
||||
% elif notifier['id']:
|
||||
<option value="${notifier['id']}" ${selected}>${notifier['agent_label']} (${notifier['id']})</option>
|
||||
% else:
|
||||
<option value="${notifier['id']}" ${selected}>${notifier['agent_label']}</option>
|
||||
% endif
|
||||
% endfor
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Select an existing notification agent where the subject and body text will be sent.<br>
|
||||
Note: Self-hosted newsletters must be enabled under <a data-tab-destination="tabs-notifications" data-dismiss="modal" data-target="#newsletter_self_hosted">Newsletters</a> to include a link to the newsletter.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div id="newsletter-email-config" class="col-md-12" style="padding-top: 10px; border-top: 1px solid #444;">
|
||||
% for item in newsletter['email_config_options']:
|
||||
% if item['input_type'] == 'help':
|
||||
<div class="form-group">
|
||||
<label>${item['label']}</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'text' or item['input_type'] == 'password':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" ${'readonly' if item.get('readonly') else ''}>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'number':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-3">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'button':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="button" class="btn btn-bright" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'checkbox' and item['name'] != 'newsletter_email_html_support':
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" ${checked(item['value'])}> ${item['label']}
|
||||
</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
<input type="hidden" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
</div>
|
||||
% elif item['input_type'] == 'select':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
% for key, value in sorted(item['select_options'].iteritems()):
|
||||
% if key == item['value']:
|
||||
<option value="${key}" selected>${value}</option>
|
||||
% else:
|
||||
<option value="${key}">${value}</option>
|
||||
% endif
|
||||
% endfor
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'selectize':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
<option value="select-all">Select All</option>
|
||||
<option value="remove-all">Remove All</option>
|
||||
% if isinstance(item['select_options'], dict):
|
||||
% for section, options in item['select_options'].iteritems():
|
||||
<optgroup label="${section}">
|
||||
% for option in sorted(options, key=lambda x: x['text'].lower()):
|
||||
<option value="${option['value']}">${option['text']}</option>
|
||||
% endfor
|
||||
</optgroup>
|
||||
% endfor
|
||||
% else:
|
||||
<option value="border-all"></option>
|
||||
% for option in sorted(item['select_options'], key=lambda x: x['text'].lower()):
|
||||
<option value="${option['value']}">${option['text']}</option>
|
||||
% endfor
|
||||
% endif
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% endif
|
||||
% endfor
|
||||
<input type="hidden" id="newsletter_email_html_support" name="newsletter_email_html_support" value="1">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-newsletter_text">
|
||||
<label>Newsletter Text</label>
|
||||
<p class="help-block">
|
||||
Set the custom formatted text for each type of notification.
|
||||
<a href="#newsletter-text-sub-modal" data-toggle="modal">Click here</a> for a list of available parameters which can be used.
|
||||
</p>
|
||||
<p class="help-block">
|
||||
You can also add text modifiers to change the case or slice parameters with a list of items.
|
||||
<a href="#notify-text-modifiers-modal" data-toggle="modal">Click here</a> to view usage information.
|
||||
</p>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<div class="form-group">
|
||||
<label for="subject">Subject</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="text" class="form-control" id="subject" name="subject" value="${newsletter['subject']}" size="30">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Enter a custom subject line for the newsletter. Leave blank for default.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group" id="newsletter_body">
|
||||
<label for="body">Body</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<textarea class="form-control" id="body" name="body" data-autoresize>${newsletter['body']}</textarea>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Enter a custom body line for the newsletter notification. Leave blank for default.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="message">Message</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<textarea class="form-control" id="message" name="message" data-autoresize>${newsletter['message']}</textarea>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Enter a custom message to include on the newsletter.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-test_newsletter">
|
||||
<label>Preview Newsletter</label>
|
||||
<p class="help-block">
|
||||
Preview the ${newsletter['agent_label']} newsletter.
|
||||
</p>
|
||||
<div class="form-group">
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="button" class="btn btn-bright" id="preview_newsletter" name="preview_newsletter" value="Preview ${newsletter['agent_label']} Newsletter">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<label>Test Newsletter</label>
|
||||
<p class="help-block">
|
||||
Test if the ${newsletter['agent_label']} newsletter is working. Check the <a href="logs">logs</a> for troubleshooting.
|
||||
</p>
|
||||
<p class="help-block">
|
||||
Warning: This will send an actual newsletter to your notification agent!
|
||||
</p>
|
||||
<div class="form-group">
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="button" class="btn btn-bright" id="test_newsletter" name="test_newsletter" value="Test ${newsletter['agent_label']} Newsletter">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<input type="button" id="delete-newsletter-item" class="btn btn-danger btn-edit" style="float:left;" value="Delete">
|
||||
<input type="button" id="duplicate-newsletter-item" class="btn btn-dark btn-edit" style="float:left;" value="Duplicate">
|
||||
<input type="button" id="save-newsletter-item" class="btn btn-bright" value="Save">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="${http_root}js/jquery-cron-min.js"></script>
|
||||
<script>
|
||||
|
||||
$('#newsletter-config-modal').unbind('hidden.bs.modal');
|
||||
|
||||
var cron_widget = $('#cron-widget').cron({
|
||||
initial: '0 0 * * 0',
|
||||
classes: 'form-control cron-select',
|
||||
onChange: function() {
|
||||
$("#cron_value").val($(this).cron('value'));
|
||||
}
|
||||
});
|
||||
|
||||
if (${newsletter['config']['custom_cron']}) {
|
||||
$('#cron_value').val('${newsletter['cron']}');
|
||||
} else {
|
||||
try {
|
||||
cron_widget.cron('value', '${newsletter['cron']}');
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
function toggleCustomCron() {
|
||||
if ($('#custom_cron').val() === '1'){
|
||||
$('#cron-widget').hide();
|
||||
$('#cron_value').show();
|
||||
$('#simple_cron_message').hide();
|
||||
$('#custom_cron_message').show();
|
||||
} else {
|
||||
$('#cron-widget').show();
|
||||
$('#cron_value').hide();
|
||||
$('#simple_cron_message').show();
|
||||
$('#custom_cron_message').hide();
|
||||
}
|
||||
}
|
||||
toggleCustomCron();
|
||||
|
||||
$('#custom_cron').change(function () {
|
||||
toggleCustomCron();
|
||||
});
|
||||
|
||||
function validateFilename() {
|
||||
var filename = $('#newsletter_config_filename').val();
|
||||
if (filename !== '' && !(filename.endsWith('.html'))) {
|
||||
showMsg('<i class="fa fa-times"></i> Failed to save newsletter. Invalid filename.', false, true, 5000, true);
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
var $incl_libraries = $('#newsletter_config_incl_libraries').selectize({
|
||||
plugins: ['remove_button'],
|
||||
maxItems: null,
|
||||
render: {
|
||||
option: function(item) {
|
||||
if (item.value.endsWith('-all')) {
|
||||
return '<div class="' + item.value + '">' + item.text + '</div>'
|
||||
}
|
||||
return '<div>' + item.text + '</div>';
|
||||
}
|
||||
},
|
||||
onItemAdd: function(value) {
|
||||
if (value === 'select-all') {
|
||||
var all_keys = $.map(this.options, function(option){
|
||||
return option.value.endsWith('-all') ? null : option.value;
|
||||
});
|
||||
this.setValue(all_keys);
|
||||
} else if (value === 'remove-all') {
|
||||
this.clear();
|
||||
this.refreshOptions();
|
||||
this.positionDropdown();
|
||||
}
|
||||
}
|
||||
});
|
||||
var incl_libraries = $incl_libraries[0].selectize;
|
||||
incl_libraries.setValue(${json.dumps(next((c['value'] for c in newsletter['config_options'] if c['name'] == 'newsletter_config_incl_libraries'), [])) | n});
|
||||
|
||||
function toggleEmailSelect () {
|
||||
if ($('#newsletter_config_formatted_checkbox').is(':checked')) {
|
||||
$('#newsletter_body').hide();
|
||||
$('#email_notifier_select').show();
|
||||
$('#other_notifier_select').hide();
|
||||
toggleNewEmailConfig();
|
||||
} else {
|
||||
$('#newsletter_body').show();
|
||||
$('#email_notifier_select').hide();
|
||||
$('#other_notifier_select').show();
|
||||
$('#newsletter-email-config').hide();
|
||||
}
|
||||
}
|
||||
toggleEmailSelect();
|
||||
|
||||
$('#newsletter_config_formatted_checkbox').change(function () {
|
||||
toggleEmailSelect();
|
||||
});
|
||||
|
||||
function toggleNewEmailConfig () {
|
||||
if ($('#newsletter_config_formatted_checkbox').is(':checked') && $('#newsletter_email_notifier_id').val() === '0') {
|
||||
$('#newsletter-email-config').show();
|
||||
} else {
|
||||
$('#newsletter-email-config').hide();
|
||||
}
|
||||
}
|
||||
toggleNewEmailConfig();
|
||||
|
||||
$('#newsletter_email_notifier_id').change(function () {
|
||||
toggleNewEmailConfig();
|
||||
});
|
||||
|
||||
var REGEX_EMAIL = '([a-z0-9!#$%&\'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&\'*+/=?^_`{|}~-]+)*@' +
|
||||
'(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?)';
|
||||
var $email_selectors = $('#newsletter_email_to, #newsletter_email_cc, #newsletter_email_bcc').selectize({
|
||||
plugins: ['remove_button'],
|
||||
maxItems: null,
|
||||
render: {
|
||||
item: function(item, escape) {
|
||||
return '<div>' +
|
||||
(item.text ? '<span class="item-text">' + escape(item.text) + '</span>' : '') +
|
||||
(item.value ? '<span class="item-value">' + escape(item.value) + '</span>' : '') +
|
||||
'</div>';
|
||||
},
|
||||
option: function(item, escape) {
|
||||
var label = item.text || item.value;
|
||||
var caption = item.text ? item.value : null;
|
||||
if (item.value.endsWith('-all')) {
|
||||
return '<div class="' + item.value + '">' + escape(label) + '</div>'
|
||||
}
|
||||
return '<div>' +
|
||||
escape(label) +
|
||||
(caption ? '<span class="caption">' + escape(caption) + '</span>' : '') +
|
||||
'</div>';
|
||||
}
|
||||
},
|
||||
onItemAdd: function(value) {
|
||||
if (value === 'select-all') {
|
||||
var all_keys = $.map(this.options, function(option){
|
||||
return option.value.endsWith('-all') ? null : option.value;
|
||||
});
|
||||
this.setValue(all_keys);
|
||||
} else if (value === 'remove-all') {
|
||||
this.clear();
|
||||
this.refreshOptions();
|
||||
this.positionDropdown();
|
||||
}
|
||||
},
|
||||
createFilter: function(input) {
|
||||
var match, regex;
|
||||
|
||||
// email@address.com
|
||||
regex = new RegExp('^' + REGEX_EMAIL + '$', 'i');
|
||||
match = input.match(regex);
|
||||
if (match) return !this.options.hasOwnProperty(match[0]);
|
||||
|
||||
// user <email@address.com>
|
||||
regex = new RegExp('^([^<]*)\<' + REGEX_EMAIL + '\>$', 'i');
|
||||
match = input.match(regex);
|
||||
if (match) return !this.options.hasOwnProperty(match[2]);
|
||||
|
||||
return false;
|
||||
},
|
||||
create: function(input) {
|
||||
if ((new RegExp('^' + REGEX_EMAIL + '$', 'i')).test(input)) {
|
||||
return {value: input};
|
||||
}
|
||||
var match = input.match(new RegExp('^([^<]*)\<' + REGEX_EMAIL + '\>$', 'i'));
|
||||
if (match) {
|
||||
return {
|
||||
value : match[2],
|
||||
text : $.trim(match[1])
|
||||
};
|
||||
}
|
||||
return false;
|
||||
}
|
||||
});
|
||||
var email_to = $email_selectors[0].selectize;
|
||||
var email_cc = $email_selectors[1].selectize;
|
||||
var email_bcc = $email_selectors[2].selectize;
|
||||
email_to.setValue(${json.dumps(next((c['value'] for c in newsletter['email_config_options'] if c['name'] == 'newsletter_email_to'), [])) | n});
|
||||
email_cc.setValue(${json.dumps(next((c['value'] for c in newsletter['email_config_options'] if c['name'] == 'newsletter_email_cc'), [])) | n});
|
||||
email_bcc.setValue(${json.dumps(next((c['value'] for c in newsletter['email_config_options'] if c['name'] == 'newsletter_email_bcc'), [])) | n});
|
||||
|
||||
function reloadModal() {
|
||||
$.ajax({
|
||||
url: 'get_newsletter_config_modal',
|
||||
data: { newsletter_id: '${newsletter["id"]}' },
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function (xhr, status) {
|
||||
$('#newsletter-config-modal').html(xhr.responseText);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function saveCallback(jqXHR) {
|
||||
if (jqXHR) {
|
||||
var result = $.parseJSON(jqXHR.responseText);
|
||||
var msg = result.message;
|
||||
if (result.result == 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 5000)
|
||||
} else {
|
||||
showMsg('<i class="fa fa-times"></i> ' + msg, false, true, 5000, true)
|
||||
}
|
||||
}
|
||||
|
||||
getNewslettersTable();
|
||||
}
|
||||
|
||||
function deleteCallback() {
|
||||
$('#newsletter-config-modal').modal('hide');
|
||||
getNewslettersTable();
|
||||
}
|
||||
|
||||
function duplicateCallback(result) {
|
||||
// Set new newsletter id
|
||||
$('#newsletter_id').val(result.newsletter_id);
|
||||
// Clear friendly name
|
||||
$('#friendly_name').val("");
|
||||
|
||||
saveNewsletter();
|
||||
|
||||
$('#newsletter-config-modal').on('hidden.bs.modal', function () {
|
||||
loadNewsletterConfig(result.newsletter_id);
|
||||
});
|
||||
$('#newsletter-config-modal').modal('hide');
|
||||
}
|
||||
|
||||
function saveNewsletter() {
|
||||
// Trim all text inputs before saving
|
||||
$('input[type=text]').val(function(_, value) {
|
||||
return $.trim(value);
|
||||
});
|
||||
// Make sure simple cron value is set
|
||||
if ($('#custom_cron').val() === '0'){
|
||||
$("#cron_value").val(cron_widget.cron('value'));
|
||||
}
|
||||
if (validateFilename()){
|
||||
doAjaxCall('set_newsletter_config', $(this), 'tabs', true, true, saveCallback);
|
||||
}
|
||||
}
|
||||
|
||||
$('#delete-newsletter-item').click(function () {
|
||||
var msg = 'Are you sure you want to delete this <strong>${newsletter["agent_label"]}</strong> newsletter?';
|
||||
var url = 'delete_newsletter';
|
||||
confirmAjaxCall(url, msg, { newsletter_id: '${newsletter["id"]}' }, null, deleteCallback);
|
||||
});
|
||||
|
||||
$('#duplicate-newsletter-item').click(function() {
|
||||
var msg = 'Are you sure you want to duplicate this <strong>${newsletter["agent_label"]}</strong> newsletter?';
|
||||
var url = 'add_newsletter_config';
|
||||
confirmAjaxCall(url, msg, { agent_id: '${newsletter["agent_id"]}' }, null, duplicateCallback);
|
||||
});
|
||||
|
||||
$('#save-newsletter-item').click(function () {
|
||||
saveNewsletter();
|
||||
});
|
||||
|
||||
$('#preview_newsletter').click(function () {
|
||||
doAjaxCall('set_newsletter_config', $(this), 'tabs', true, false, previewNewsletter);
|
||||
});
|
||||
|
||||
$('#test_newsletter').click(function () {
|
||||
doAjaxCall('set_newsletter_config', $(this), 'tabs', true, false, sendTestNewsletter);
|
||||
});
|
||||
|
||||
function previewNewsletter() {
|
||||
showMsg('<i class="fa fa-check"></i> Check pop-up blocker if no response.', false, true, 2000);
|
||||
window.open('newsletter_preview?newsletter_id=' + $('#newsletter_id').val());
|
||||
}
|
||||
|
||||
function sendTestNewsletter() {
|
||||
showMsg('<i class="fa fa-refresh fa-spin"></i> Sending Newsletter', false);
|
||||
$.ajax({
|
||||
url: 'send_newsletter',
|
||||
data: {
|
||||
newsletter_id: $('#newsletter_id').val(),
|
||||
notify_action: 'test'
|
||||
},
|
||||
cache: false,
|
||||
async: true,
|
||||
success: function (data) {
|
||||
if (data.result === 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + data.message, false, true, 5000);
|
||||
} else {
|
||||
showMsg('<i class="fa fa-exclamation-circle"></i> ' + data.message, false, true, 5000, true);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
$("${', '.join(['#' + c['name'] for c in newsletter['config_options'] if c.get('refresh')])}").on('change', function () {
|
||||
// Reload modal to update certain fields
|
||||
doAjaxCall('set_newsletter_config', $(this), 'tabs', true, false, reloadModal);
|
||||
return false;
|
||||
});
|
||||
|
||||
// Never send checkbox values directly, always substitute value in hidden input.
|
||||
$('.checkboxes').click(function () {
|
||||
var configToggle = $(this).data('id');
|
||||
if ($(this).is(':checked')) {
|
||||
$('#'+configToggle).val(1);
|
||||
} else {
|
||||
$('#'+configToggle).val(0);
|
||||
}
|
||||
});
|
||||
|
||||
// auto resizing textarea for custom notification message body
|
||||
$('textarea[data-autoresize]').each(function () {
|
||||
var offset = this.offsetHeight - this.clientHeight;
|
||||
var resizeTextarea = function (el) {
|
||||
$(el).css('height', 'auto').css('height', el.scrollHeight + offset);
|
||||
};
|
||||
$(this).on('focus keyup input', function () { resizeTextarea(this); }).removeAttr('data-autoresize');
|
||||
});
|
||||
</script>
|
||||
% else:
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="newsletter-config-modal-header">Error</h4>
|
||||
</div>
|
||||
<div class="modal-body" style="text-align: center">
|
||||
<strong>
|
||||
<i class="fa fa-exclamation-circle"></i> Failed to retrieve newsletter configuration. Check the <a href="logs">logs</a> for more info.
|
||||
</strong>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% endif
|
48
data/interfaces/default/newsletter_preview.html
Normal file
@@ -0,0 +1,48 @@
|
||||
<%
|
||||
import urllib
|
||||
%>
|
||||
<!doctype html>
|
||||
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Tautulli - ${title} | ${server_name}</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<link href="${http_root}css/tautulli.css${cache_param}" rel="stylesheet">
|
||||
<style>
|
||||
* {
|
||||
padding: 0 !important;
|
||||
margin: 0 !important;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="loader" class="newsletter-loader-container">
|
||||
<div class="newsletter-loader-message">
|
||||
<div class="newsletter-loader"></div>
|
||||
<br>
|
||||
Generating Newsletter
|
||||
<br>
|
||||
Please wait, this may take a few minutes...
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="${http_root}js/jquery-2.1.4.min.js"></script>
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
var frame = $('<iframe></iframe>', {
|
||||
src: 'real_newsletter?${urllib.urlencode(kwargs) | n}',
|
||||
frameborder: '0',
|
||||
style: 'display: none; height: 100vh; width: 100vw;'
|
||||
});
|
||||
frame.on('load', function (e) {
|
||||
$(e.target).fadeIn();
|
||||
$('#loader').fadeOut();
|
||||
});
|
||||
$('body').append(frame);
|
||||
});
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
51
data/interfaces/default/newsletters_table.html
Normal file
@@ -0,0 +1,51 @@
|
||||
<%doc>
|
||||
USAGE DOCUMENTATION :: PLEASE LEAVE THIS AT THE TOP OF THIS FILE
|
||||
|
||||
For Mako templating syntax documentation please visit: http://docs.makotemplates.org/en/latest/
|
||||
|
||||
Filename: newsletters_table.html
|
||||
Version: 0.1
|
||||
|
||||
DOCUMENTATION :: END
|
||||
</%doc>
|
||||
|
||||
<% from plexpy.newsletter_handler import NEWSLETTER_SCHED %>
|
||||
<ul class="stacked-configs list-unstyled">
|
||||
% for newsletter in sorted(newsletters_list, key=lambda k: (k['agent_label'], k['friendly_name'], k['id'])):
|
||||
<li class="newsletter-agent" data-id="${newsletter['id']}">
|
||||
<span>
|
||||
<span class="toggle-left trigger-tooltip ${'active' if newsletter['active'] else ''}" data-toggle="tooltip" data-placement="top" title="Newsletter ${'active' if newsletter['active'] else 'inactive'}"><i class="fa fa-lg fa-newspaper-o"></i></span>
|
||||
% if newsletter['friendly_name']:
|
||||
${newsletter['agent_label']} <span class="friendly_name">(${newsletter['id']} - ${newsletter['friendly_name']})</span>
|
||||
% else:
|
||||
${newsletter['agent_label']} <span class="friendly_name">(${newsletter['id']})</span>
|
||||
% endif
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-cog"></i></span>
|
||||
<span class="toggle-right friendly_name" id="newsletter-next_run-${newsletter['id']}">
|
||||
% if NEWSLETTER_SCHED.get_job('newsletter-{}'.format(newsletter['id'])):
|
||||
<% job = NEWSLETTER_SCHED.get_job('newsletter-{}'.format(newsletter['id'])) %>
|
||||
<script>
|
||||
$("#newsletter-next_run-${newsletter['id']}").text(moment("${job.next_run_time}", "YYYY-MM-DD HH:mm:ssZ").fromNow())
|
||||
</script>
|
||||
% endif
|
||||
</span>
|
||||
</span>
|
||||
</li>
|
||||
% endfor
|
||||
<li class="add-newsletter-agent" id="add-newsletter-agent" data-target="#add-newsletter-modal" data-toggle="modal">
|
||||
<span>
|
||||
<span class="toggle-left"><i class="fa fa-lg fa-newspaper-o"></i></span> Add a new newsletter agent
|
||||
<span class="toggle-right"><i class="fa fa-lg fa-plus"></i></span>
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
<script>
|
||||
// Load newsletter config modal
|
||||
$(".newsletter-agent").click(function () {
|
||||
var newsletter_id = $(this).data('id');
|
||||
loadNewsletterConfig(newsletter_id);
|
||||
});
|
||||
|
||||
$('.trigger-tooltip').tooltip();
|
||||
</script>
|
@@ -1,3 +1,4 @@
|
||||
% if notifier:
|
||||
<%!
|
||||
import json
|
||||
from plexpy import helpers, notifiers, users
|
||||
@@ -6,9 +7,6 @@
|
||||
user_emails = [{'user': u['friendly_name'] or u['username'], 'email': u['email']} for u in users.Users().get_users() if u['email']]
|
||||
sorted(user_emails, key=lambda u: u['user'])
|
||||
%>
|
||||
% if notifier:
|
||||
<link href="${http_root}css/selectize.bootstrap3.css" rel="stylesheet" />
|
||||
<link href="${http_root}css/selectize.min.css" rel="stylesheet" />
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
@@ -19,7 +17,7 @@
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<ul class="nav nav-tabs list-unstyled" role="tablist">
|
||||
<li role="presentation" class="active"><a href="#tabs-config" aria-controls="tabs-config" role="tab" data-toggle="tab">Configuration</a></li>
|
||||
<li role="presentation" class="active"><a href="#tabs-notifier_config" aria-controls="tabs-notifier_config" role="tab" data-toggle="tab">Configuration</a></li>
|
||||
<li role="presentation"><a href="#tabs-notify_triggers" aria-controls="tabs-notify_triggers" role="tab" data-toggle="tab">Triggers</a></li>
|
||||
<li role="presentation"><a href="#tabs-notify_conditions" aria-controls="tabs-notify_conditions" role="tab" data-toggle="tab">Conditions</a></li>
|
||||
<li role="presentation"><a href="#tabs-notify_text" aria-controls="tabs-notify_text" role="tab" data-toggle="tab">${'Arguments' if notifier['agent_name'] == 'scripts' else 'Text'}</a></li>
|
||||
@@ -28,7 +26,7 @@
|
||||
</div>
|
||||
<form action="set_notifier_config" method="post" class="form" id="set_notifier_config" data-parsley-validate>
|
||||
<div class="tab-content">
|
||||
<div role="tabpanel" class="tab-pane active" id="tabs-config">
|
||||
<div role="tabpanel" class="tab-pane active" id="tabs-notifier_config">
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="hidden" id="notifier_id" name="notifier_id" value="${notifier['id']}" />
|
||||
@@ -45,9 +43,6 @@
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" ${'readonly' if item.get('readonly') else ''}>
|
||||
% if item['name'] == 'osx_notify_app':
|
||||
<a href="javascript:void(0)" id="osxnotifyregister">Register</a>
|
||||
% endif
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
@@ -151,7 +146,7 @@
|
||||
% for action in available_notification_actions:
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-id="${action['name']}" class="checkboxes" value="1" ${helpers.checked(notifier['actions'][action['name']])}> Notify on ${action['label']}
|
||||
<input type="checkbox" data-id="${action['name']}" class="checkboxes" value="1" ${helpers.checked(notifier['actions'][action['name']])}> ${action['label']}
|
||||
</label>
|
||||
<p class="help-block">${action['description'] | n}</p>
|
||||
<input type="hidden" id="${action['name']}" name="${action['name']}" value="${notifier['actions'][action['name']]}">
|
||||
@@ -167,11 +162,11 @@
|
||||
<a href="#notify-text-sub-modal" data-toggle="modal">Click here</a> for a description of all the parameters.
|
||||
</p>
|
||||
<div id="condition-widget"></div>
|
||||
<input type="hidden" name="custom_conditions" id="custom_conditions" />
|
||||
<input type="hidden" id="custom_conditions" name="custom_conditions" />
|
||||
|
||||
<div class="form-group">
|
||||
<label for="custom_conditions_logic">Condition Logic</label>
|
||||
<input type="text" class="form-control" name="custom_conditions_logic" id="custom_conditions_logic" value="${notifier['custom_conditions_logic']}" required />
|
||||
<input type="text" class="form-control" name="custom_conditions_logic" id="custom_conditions_logic" value="${notifier['custom_conditions_logic']}" />
|
||||
<div id="custom_conditions_logic_error" class="alert alert-danger" role="alert" style="padding-top: 5px; padding-bottom: 5px; margin: 0; display: none;"><i class="fa fa-exclamation-triangle" style="color: #a94442;"></i> <span></span></div>
|
||||
<p class="help-block">
|
||||
Optional: Enter custom logic to use when evaluating the conditions (e.g. <span class="inline-pre">{1} and ({2} or {3})</span>).
|
||||
@@ -333,31 +328,16 @@
|
||||
$('#notifier-config-modal').unbind('hidden.bs.modal');
|
||||
|
||||
// Need this for setting conditions since conditions contain the character "
|
||||
$('#custom_conditions').val(${json.dumps(notifier["custom_conditions"]) | n});
|
||||
$('#custom_conditions').val(JSON.stringify(${json.dumps(notifier["custom_conditions"]) | n}));
|
||||
|
||||
$('#condition-widget').filterer({
|
||||
parameters: ${parameters | n},
|
||||
conditions: ${notifier["custom_conditions"] | n},
|
||||
parameters: ${json.dumps(parameters) | n},
|
||||
conditions: ${json.dumps(notifier["custom_conditions"]) | n},
|
||||
updateConditions: function(newConditions){
|
||||
$('#custom_conditions').val(JSON.stringify(newConditions));
|
||||
}
|
||||
});
|
||||
|
||||
function setNegativeOperator(select) {
|
||||
if (select.val() === 'does not contain' || select.val() === 'is not') {
|
||||
select.closest('.form-group').find('.react-selectize-search-field-and-selected-values').addClass('negative-operator');
|
||||
} else {
|
||||
select.closest('.form-group').find('.react-selectize-search-field-and-selected-values').removeClass('negative-operator');
|
||||
}
|
||||
}
|
||||
|
||||
$('#condition-widget select[name=operator]').each(function () {
|
||||
setNegativeOperator($(this));
|
||||
});
|
||||
$('#condition-widget').on('change', 'select[name=operator]', function () {
|
||||
setNegativeOperator($(this));
|
||||
});
|
||||
|
||||
function reloadModal() {
|
||||
$.ajax({
|
||||
url: 'get_notifier_config_modal',
|
||||
@@ -425,7 +405,7 @@
|
||||
$('#duplicate-notifier-item').click(function() {
|
||||
var msg = 'Are you sure you want to duplicate this <strong>${notifier["agent_label"]}</strong> notification agent?';
|
||||
var url = 'add_notifier_config';
|
||||
confirmAjaxCall(url, msg, { agent_id: "${notifier['agent_id']}" }, null, duplicateCallback);
|
||||
confirmAjaxCall(url, msg, { agent_id: '${notifier["agent_id"]}' }, null, duplicateCallback);
|
||||
});
|
||||
|
||||
$('#save-notifier-item').click(function () {
|
||||
@@ -433,16 +413,30 @@
|
||||
});
|
||||
|
||||
% if notifier['agent_name'] == 'facebook':
|
||||
if (location.protocol !== 'https:') {
|
||||
$('#tabs-config .form-group:first').prepend(
|
||||
'<div class="form-group">' +
|
||||
'<label>Warning</label>' +
|
||||
'<p class="help-block" style="color: #eb8600;">Facebook requires HTTPS for authorization. ' +
|
||||
'Please enable HTTPS for Tautulli under <a data-tab-destination="tabs-web_interface" data-dismiss="modal" data-target="#enable_https">Web Interface</a>.</p>' +
|
||||
'</div>'
|
||||
);
|
||||
$('#facebook_redirect_uri').val('HTTPS not enabled');
|
||||
|
||||
} else {
|
||||
$('#facebook_redirect_uri').val(location.href.split('/settings')[0] + '/facebook_redirect');
|
||||
}
|
||||
|
||||
function disableFacebookRequest() {
|
||||
if ($('#facebook_app_id').val() !== '' && $('#facebook_app_secret').val() !== '') { $('#facebook_facebookStep1').prop('disabled', false); }
|
||||
else { $('#facebook_facebookStep1').prop('disabled', true); }
|
||||
if ($('#facebook_app_id').val() !== '' && $('#facebook_app_secret').val() !== '') { $('#facebook_facebook_auth').prop('disabled', false); }
|
||||
else { $('#facebook_facebook_auth').prop('disabled', true); }
|
||||
}
|
||||
disableFacebookRequest();
|
||||
$('#facebook_app_id, #facebook_app_secret').on('change', function () {
|
||||
disableFacebookRequest();
|
||||
});
|
||||
|
||||
$('#facebook_facebookStep1').click(function () {
|
||||
$('#facebook_facebook_auth').click(function () {
|
||||
// Remove trailing '/' from Facebook redirect URI
|
||||
if ($('#facebook_redirect_uri') && $('#facebook_redirect_uri').val().endsWith('/')) {
|
||||
$('#facebook_redirect_uri').val($('#facebook_redirect_uri').val().slice(0, -1));
|
||||
@@ -450,7 +444,7 @@
|
||||
|
||||
var facebook_token;
|
||||
$.ajax({
|
||||
url: 'facebookStep1',
|
||||
url: 'facebook_auth',
|
||||
data: {
|
||||
app_id: $('#facebook_app_id').val(),
|
||||
app_secret: $('#facebook_app_secret').val(),
|
||||
@@ -508,7 +502,7 @@
|
||||
});
|
||||
|
||||
% elif notifier['agent_name'] == 'osx':
|
||||
$('#osxnotifyregister').click(function () {
|
||||
$('#osx_notify_register').click(function () {
|
||||
var osx_notify_app = $('#osx_notify_app').val();
|
||||
$.get('osxnotifyregister', { 'app': osx_notify_app }, function (data) { showMsg('<i class="fa fa-check"></i> ' + data, false, true, 3000); });
|
||||
});
|
||||
@@ -752,11 +746,12 @@
|
||||
});
|
||||
|
||||
function sendTestNotification() {
|
||||
showMsg('<i class="fa fa-refresh fa-spin"></i> Sending Notification', false);
|
||||
if ('${notifier["agent_name"]}' !== 'browser') {
|
||||
$.ajax({
|
||||
url: 'send_notification',
|
||||
data: {
|
||||
notifier_id: '${notifier["id"]}',
|
||||
notifier_id: $('#notifier_id').val(),
|
||||
subject: $('#test_subject').val(),
|
||||
body: $('#test_body').val(),
|
||||
script: $('#test_script').val(),
|
||||
@@ -765,13 +760,11 @@
|
||||
},
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function (xhr, status) {
|
||||
if (xhr.responseText.indexOf('sent') > -1) {
|
||||
msg = '<i class="fa fa-check"></i> ' + xhr.responseText;
|
||||
showMsg(msg, false, true, 2000);
|
||||
success: function (data) {
|
||||
if (data.result === 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + data.message, false, true, 5000);
|
||||
} else {
|
||||
msg = '<i class="fa fa-times"></i> ' + xhr.responseText;
|
||||
showMsg(msg, false, true, 2000, true);
|
||||
showMsg('<i class="fa fa-exclamation-circle"></i> ' + data.message, false, true, 5000, true);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@@ -10,7 +10,7 @@ DOCUMENTATION :: END
|
||||
</%doc>
|
||||
|
||||
<ul class="stacked-configs list-unstyled">
|
||||
% for notifier in sorted(notifiers_list, key=lambda k: (k['agent_label'], k['friendly_name'], k['id'])):
|
||||
% for notifier in sorted(notifiers_list, key=lambda k: (k['agent_label'].lower(), k['friendly_name'], k['id'])):
|
||||
<li class="notification-agent" data-id="${notifier['id']}">
|
||||
<span>
|
||||
<span class="toggle-left trigger-tooltip ${'active' if notifier['active'] else ''}" data-toggle="tooltip" data-placement="top" title="Triggers ${'active' if notifier['active'] else 'inactive'}"><i class="fa fa-lg fa-bell"></i></span>
|
||||
|
@@ -28,15 +28,17 @@
|
||||
|
||||
<%def name="javascriptIncludes()">
|
||||
<script>
|
||||
var query_string = "${query.replace('"','\\"').replace('/','\\/') | n}";
|
||||
|
||||
$('#search_button').removeClass('btn-inactive');
|
||||
$('#query').val("${query.replace('"','\\"') | n}").css({ right: '0', width: '250px' }).addClass('active');
|
||||
$('#query').val(query_string).css({ right: '0', width: '250px' }).addClass('active');
|
||||
|
||||
$.ajax({
|
||||
url: 'get_search_results_children',
|
||||
type: "GET",
|
||||
type: "POST",
|
||||
async: true,
|
||||
data: {
|
||||
query: "${query.replace('"','\\"') | n}",
|
||||
query: query_string,
|
||||
limit: 30
|
||||
},
|
||||
complete: function (xhr, status) {
|
||||
|
@@ -4,10 +4,11 @@
|
||||
import sys
|
||||
|
||||
import plexpy
|
||||
from plexpy import common, notifiers
|
||||
from plexpy import common, notifiers, newsletters
|
||||
from plexpy.helpers import anon_url, checked
|
||||
|
||||
available_notification_agents = sorted(notifiers.available_notification_agents(), key=lambda k: k['label'])
|
||||
available_notification_agents = sorted(notifiers.available_notification_agents(), key=lambda k: k['label'].lower())
|
||||
available_newsletter_agents = sorted(newsletters.available_newsletter_agents(), key=lambda k: k['label'].lower())
|
||||
%>
|
||||
<%def name="headIncludes()">
|
||||
</%def>
|
||||
@@ -49,8 +50,9 @@
|
||||
<li role="presentation"><a href="#tabs-homepage" aria-controls="tabs-homepage" role="tab" data-toggle="tab">Homepage</a></li>
|
||||
<li role="presentation"><a href="#tabs-web_interface" aria-controls="tabs-web_interface" role="tab" data-toggle="tab">Web Interface</a></li>
|
||||
<li role="presentation"><a href="#tabs-plex_media_server" aria-controls="tabs-plex_media_server" role="tab" data-toggle="tab">Plex Media Server</a></li>
|
||||
<li role="presentation"><a href="#tabs-notifications" aria-controls="tabs-notifications" role="tab" data-toggle="tab">Notifications</a></li>
|
||||
<li role="presentation"><a href="#tabs-notifications" aria-controls="tabs-notifications" role="tab" data-toggle="tab">Notifications & Newsletters</a></li>
|
||||
<li role="presentation"><a href="#tabs-notification_agents" aria-controls="tabs-notification_agents" role="tab" data-toggle="tab">Notification Agents</a></li>
|
||||
<li role="presentation"><a href="#tabs-newsletter_agents" aria-controls="tabs-newsletter_agents" role="tab" data-toggle="tab">Newsletter Agents</a></li>
|
||||
<li role="presentation"><a href="#tabs-import_backups" aria-controls="tabs-import_backups" role="tab" data-toggle="tab">Import & Backups</a></li>
|
||||
<li role="presentation"><a href="#tabs-android_app" aria-controls="tabs-android_app" role="tab" data-toggle="tab">Tautulli Remote Android App <sup><small>beta</small></sup></a></li>
|
||||
</ul>
|
||||
@@ -267,6 +269,21 @@
|
||||
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-homepage">
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Activity</h3>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="home_refresh_interval">Activity Refresh Interval</label>
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<input type="text" class="form-control" data-parsley-type="integer" id="home_refresh_interval" name="home_refresh_interval" value="${config['home_refresh_interval']}" size="5" data-parsley-min="2" data-parsley-trigger="change" data-parsley-errors-container="#home_refresh_interval_error" required>
|
||||
</div>
|
||||
<div id="home_refresh_interval_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">Set the interval (in seconds) to refresh the current activity on the homepage. Minimum 2.</p>
|
||||
</div>
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Sections</h3>
|
||||
</div>
|
||||
@@ -438,6 +455,18 @@
|
||||
</div>
|
||||
<p class="help-block">Port to bind web server to. Note that ports below 1024 may require root.</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="http_base_url">Public Tautulli Domain</label>
|
||||
<div class="row">
|
||||
<div class="col-md-8">
|
||||
<input type="text" class="form-control" id="http_base_url" name="http_base_url" value="${config['http_base_url']}" placeholder="http://mydomain.com" data-parsley-trigger="change" data-parsley-pattern="^https?:\/\/\S+$" data-parsley-errors-container="#http_base_url_error" data-parsley-error-message="Invalid URL">
|
||||
</div>
|
||||
<div id=http_base_url_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Set your public Tautulli domain for self-hosted notification images and newsletters. (e.g. http://mydomain.com)
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="http_root">HTTP Root</label>
|
||||
<div class="row">
|
||||
@@ -554,7 +583,7 @@
|
||||
<label>
|
||||
<input type="checkbox" name="http_hash_password" id="http_hash_password" value="1" ${config['http_hash_password']} data-parsley-trigger="change"> Hash Password in the Config File
|
||||
</label>
|
||||
<span id="hashPasswordCheck" style="color: #eb8600; padding-left: 10px;"></span>
|
||||
<span id="hashPasswordCheck" class="settings-warning"></span>
|
||||
<p class="help-block">Store a hashed password in the config file.<br />Warning: Your password cannot be recovered if forgotten!</p>
|
||||
</div>
|
||||
<input type="text" id="http_hashed_password" name="http_hashed_password" value="${config['http_hashed_password']}" style="display: none;" data-parsley-trigger="change" data-parsley-type="integer" data-parsley-range="[0, 1]"
|
||||
@@ -572,14 +601,14 @@
|
||||
<label>
|
||||
<input type="checkbox" class="auth-settings" name="http_plex_admin" id="http_plex_admin" value="1" ${config['http_plex_admin']} data-parsley-trigger="change"> Allow Plex Admin
|
||||
</label>
|
||||
<span id="allowPlexCheck" style="color: #eb8600; padding-left: 10px;"></span>
|
||||
<span id="allowPlexCheck" class="settings-warning"></span>
|
||||
<p class="help-block">Allow the Plex server admin to login as a Tautulli admin using their Plex.tv account.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="allow_guest_access" name="allow_guest_access" value="1" ${config['allow_guest_access']}> Allow Guest Access to Tautulli
|
||||
</label>
|
||||
<span id="allowGuestCheck" style="color: #eb8600; padding-left: 10px;"></span>
|
||||
<span id="allowGuestCheck" class="settings-warning"></span>
|
||||
<p class="help-block">Allow shared users to login to Tautulli using their Plex.tv account. Individual user access needs to be enabled from Users > Edit Mode.</p>
|
||||
</div>
|
||||
|
||||
@@ -642,7 +671,7 @@
|
||||
<label for="pms_port">Plex Port</label>
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<input data-parsley-type="integer" class="pms-settings form-control" type="text" id="pms_port" name="pms_port" value="${config['pms_port']}" size="30" data-parsley-trigger="change" data-parsley-errors-container="#pms_port_error" required>
|
||||
<input data-parsley-type="integer" class="form-control pms-settings" type="text" id="pms_port" name="pms_port" value="${config['pms_port']}" size="30" data-parsley-trigger="change" data-parsley-errors-container="#pms_port_error" required>
|
||||
</div>
|
||||
<div id="pms_port_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
@@ -650,31 +679,53 @@
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_is_remote_checkbox" class="checkbox-toggle" data-id="pms_is_remote" value="1" ${checked(config['pms_is_remote'])}> Remote Server
|
||||
<input type="checkbox" id="pms_is_remote_checkbox" class="checkbox-toggle pms-settings" data-id="pms_is_remote" value="1" ${checked(config['pms_is_remote'])}> Remote Server
|
||||
<input type="hidden" id="pms_is_remote" name="pms_is_remote" value="${config['pms_is_remote']}">
|
||||
</label>
|
||||
<p class="help-block">Check this if your Plex Server is not on the same local network as Tautulli.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_ssl_checkbox" class="checkbox-toggle" data-id="pms_ssl" value="1" ${checked(config['pms_ssl'])}> Use SSL
|
||||
<input type="checkbox" id="pms_ssl_checkbox" class="checkbox-toggle pms-settings" data-id="pms_ssl" value="1" ${checked(config['pms_ssl'])}> Use SSL
|
||||
<input type="hidden" id="pms_ssl" name="pms_ssl" value="${config['pms_ssl']}">
|
||||
</label>
|
||||
<p class="help-block">If you have secure connections enabled on your Plex Server, communicate with it securely.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="pms_url">Plex Server URL</label>
|
||||
<div class="row">
|
||||
<div class="col-md-9">
|
||||
<input type="text" class="form-control" id="pms_url" name="pms_url" value="${config['pms_url']}" size="30" readonly>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
The server URL that Tautulli will use to connect to your Plex server. Retrieved automatically.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="pms_url">Plex Server Identifier</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="pms_identifier" name="pms_identifier" value="${config['pms_identifier']}" size="30" readonly>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
The unique identifier for your Plex server. Retrieved automatically.
|
||||
</p>
|
||||
</div>
|
||||
<div class="checkbox advanced-setting">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_url_manual" name="pms_url_manual" value="1" ${config['pms_url_manual']}> Manual Connection
|
||||
<input type="checkbox" class="pms-settings" id="pms_url_manual" name="pms_url_manual" value="1" ${config['pms_url_manual']}> Manual Connection
|
||||
</label>
|
||||
<span id="cloudManualConnection" style="display: none; color: #eb8600; padding-left: 10px;"> Not available for Plex Cloud servers.</span>
|
||||
<p class="help-block">Use the user defined connection details. Do not retrieve the server connection URL automatically.</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="pms_logs_folder">Plex Web URL</label>
|
||||
<label for="pms_web_url">Plex Web URL</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<div class="col-md-9">
|
||||
<div class="input-group">
|
||||
<input type="text" class="form-control" id="pms_web_url" name="pms_web_url" value="${config['pms_web_url']}" size="30" data-parsley-trigger="change" data-parsley-pattern="^https?:\/\/\S+$|^https:\/\/app.plex.tv\/desktop$" data-parsley-errors-container="#pms_web_url_error" data-parsley-error-message="Invalid Plex Web URL.">
|
||||
<input type="text" class="form-control" id="pms_web_url" name="pms_web_url" value="${config['pms_web_url']}" size="30" data-parsley-trigger="change" data-parsley-pattern="^https?:\/\/\S+$|^https:\/\/app.plex.tv\/desktop$" data-parsley-errors-container="#pms_web_url_error" data-parsley-error-message="Invalid Plex Web URL">
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="test_pms_web_button">Test URL</button>
|
||||
</span>
|
||||
@@ -688,7 +739,6 @@
|
||||
</div>
|
||||
|
||||
<input type="hidden" id="pms_is_cloud" name="pms_is_cloud" value="${config['pms_is_cloud']}">
|
||||
<input type="hidden" id="pms_identifier" name="pms_identifier" value="${config['pms_identifier']}">
|
||||
<input type="checkbox" name="server_changed" id="server_changed" value="1" style="display: none;">
|
||||
|
||||
<div class="form-group advanced-setting">
|
||||
@@ -750,7 +800,7 @@
|
||||
<input type="checkbox" id="monitor_remote_access" name="monitor_remote_access" value="1" ${config['monitor_remote_access']}> Monitor Plex Remote Access
|
||||
</label>
|
||||
<span id="cloudMonitorRemoteAccess" style="display: none; color: #eb8600; padding-left: 10px;"> Not available for Plex Cloud servers.</span>
|
||||
<span id="remoteAccessCheck" style="color: #eb8600; padding-left: 10px;"></span>
|
||||
<span id="remoteAccessCheck" class="settings-warning"></span>
|
||||
<p class="help-block">Enable to have Tautulli check if remote access to the Plex Media Server goes down.</p>
|
||||
</div>
|
||||
|
||||
@@ -896,7 +946,7 @@
|
||||
</div>
|
||||
<!--<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" name="notify_recently_added_upgrade" id="notify_recently_added_upgrade" value="1" ${config['notify_recently_added_upgrade']}> Send a Notification for New Versions <span style="color: #eb8600; padding-left: 10px;">[Not working]</span>
|
||||
<input type="checkbox" name="notify_recently_added_upgrade" id="notify_recently_added_upgrade" value="1" ${config['notify_recently_added_upgrade']}> Send a Notification for New Versions <span class="settings-warning">[Not working]</span>
|
||||
</label>
|
||||
<p class="help-block">
|
||||
Enable to send another recently added notification when adding a new version of existing media.<br />
|
||||
@@ -905,16 +955,71 @@
|
||||
</div>-->
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>3rd Party APIs</h3>
|
||||
<h3>Newsletters</h3>
|
||||
</div>
|
||||
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" name="notify_upload_posters" id="notify_upload_posters" value="1" ${config['notify_upload_posters']}> Upload Posters to Imgur for Notifications
|
||||
<input type="checkbox" id="newsletter_self_hosted" name="newsletter_self_hosted" value="1" ${config['newsletter_self_hosted']}> Self-Hosted Newsletters
|
||||
</label>
|
||||
<p class="help-block">Enable to upload Plex posters to Imgur for notifications. Disable if posters are not being used to save bandwidth.</p>
|
||||
<p class="help-block">Enable to host newsletters on your own domain. This will generate a link to an HTML page where you can view the newsletter.</p>
|
||||
</div>
|
||||
<div id="imgur_upload_options">
|
||||
<div id="self_host_newsletter_options" style="overlfow: hidden; display: ${'block' if config['newsletter_self_hosted'] == 'checked' else 'none'}">
|
||||
<p class="help-block" id="self_host_newsletter_message">
|
||||
Note: The <span class="inline-pre">${http_root}newsletter</span> endpoint on your domain must be publicly accessible from the internet.
|
||||
</p>
|
||||
<p class="help-block settings-warning base-url-warning">Warning: Public Tautulli domain not set under <a data-tab-destination="tabs-web_interface" data-target="#http_base_url">Web Interface</a>.</p>
|
||||
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="newsletter_static_url" name="newsletter_static_url" value="1" ${config['newsletter_static_url']}> Enable Static Newsletter URL
|
||||
</label>
|
||||
<p class="help-block">Enable static newsletter URLs to the last sent scheduled newsletter at <span class="inline-pre">${http_root}newsletter/id/<newsletter_id></span>.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="newsletter_dir">Custom Newsletter Templates Folder</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="newsletter_custom_dir" name="newsletter_custom_dir" value="${config['newsletter_custom_dir']}">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Optional: Enter the full path to your custom newsletter templates folder. Leave blank for default.</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="newsletter_dir">Newsletter Output Directory</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="newsletter_dir" name="newsletter_dir" value="${config['newsletter_dir']}">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Enter the full path to where newsletter files will be saved.</p>
|
||||
</div>
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>3rd Party APIs</h3>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="notify_upload_posters">Image Hosting</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<select class="form-control" id="notify_upload_posters" name="notify_upload_posters">
|
||||
<option value="0" ${'selected' if config['notify_upload_posters'] == 0 else ''}>Disabled</option>
|
||||
<option value="1" ${'selected' if config['notify_upload_posters'] == 1 else ''}>Imgur</option>
|
||||
<option value="3" ${'selected' if config['notify_upload_posters'] == 3 else ''}>Cloudinary</option>
|
||||
<option value="2" ${'selected' if config['notify_upload_posters'] == 2 else ''}>Self-hosted on public Tautulli domain</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Select where to host Plex images for notifications and newsletters.</p>
|
||||
</div>
|
||||
<div id="imgur_upload_options" style="overlfow: hidden; display: ${'none' if config['notify_upload_posters'] != 1 else 'block'}">
|
||||
<p class="help-block" id="imgur_upload_message">
|
||||
You can register a new Imgur application <a href="${anon_url('https://api.imgur.com/oauth2/addclient')}" target="_blank">here</a>.<br>
|
||||
Warning: Imgur uploads are rate-limited and newsletters may exceed the limit. Please use Cloudinary for newsletters instead.
|
||||
</p>
|
||||
<div class="form-group">
|
||||
<label for="imgur_client_id">Imgur Client ID</label>
|
||||
<div class="row">
|
||||
@@ -922,9 +1027,49 @@
|
||||
<input type="text" class="form-control" id="imgur_client_id" name="imgur_client_id" value="${config['imgur_client_id']}" data-parsley-trigger="change">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Enter your Imgur API Client ID.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div id="self_host_image_options" style="overlfow: hidden; display: ${'none' if config['notify_upload_posters'] != 2 else 'block'}">
|
||||
<p class="help-block" id="self_host_image_message">Note: The <span class="inline-pre">${http_root}image</span> endpoint on your domain must be publicly accessible from the internet.</p>
|
||||
<p class="help-block settings-warning base-url-warning">Warning: Public Tautulli domain not set under <a data-tab-destination="tabs-web_interface" data-target="#http_base_url">Web Interface</a>.</p>
|
||||
</div>
|
||||
<div id="cloudinary_upload_options" style="overlfow: hidden; display: ${'none' if config['notify_upload_posters'] != 3 else 'block'}">
|
||||
<p class="help-block" id="imgur_upload_message">
|
||||
You can sign up for Cloudinary <a href="${anon_url('https://cloudinary.com')}" target="_blank">here</a>.<br>
|
||||
</p>
|
||||
<div class="form-group">
|
||||
<label for="cloudinary_cloud_name">Cloudinary Cloud Name</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="cloudinary_cloud_name" name="cloudinary_cloud_name" value="${config['cloudinary_cloud_name']}" data-parsley-trigger="change">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Enter your Imgur API client ID in order to upload posters.
|
||||
You can register a new application <a href="${anon_url('https://api.imgur.com/oauth2/addclient')}" target="_blank">here</a>.<br />
|
||||
Enter your Cloudinary Cloud Name.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="cloudinary_api_key">Cloudinary API Key</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="cloudinary_api_key" name="cloudinary_api_key" value="${config['cloudinary_api_key']}" data-parsley-trigger="change">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Enter your Cloudinary API Key.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="cloudinary_api_secret">Cloudinary API Secret</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="cloudinary_api_secret" name="cloudinary_api_secret" value="${config['cloudinary_api_secret']}" data-parsley-trigger="change">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Enter your Cloudinary API Secret.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
@@ -953,6 +1098,9 @@
|
||||
<p class="help-block">
|
||||
Add a new notification agent, or configure an existing notification agent by clicking the settings icon on the right.
|
||||
</p>
|
||||
<p class="help-block">
|
||||
Please see the <a target='_blank' href='${anon_url('https://github.com/%s/%s-Wiki/wiki/Notification-Agents-Guide' % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO))}'>Notification Agents Guide</a> for instructions on setting up each notification agent.
|
||||
</p>
|
||||
<br />
|
||||
<div id="plexpy-notifiers-table">
|
||||
<div class='text-muted'><i class="fa fa-refresh fa-spin"></i> Loading notification agents...</div>
|
||||
@@ -961,13 +1109,33 @@
|
||||
|
||||
</div>
|
||||
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-newsletter_agents">
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Newsletter Agents</h3>
|
||||
</div>
|
||||
|
||||
<p class="help-block">
|
||||
Add a new newsletter agent, or configure an existing newsletter agent by clicking the settings icon on the right.
|
||||
</p>
|
||||
<p class="help-block settings-warning" id="newsletter_upload_warning">
|
||||
Warning: <a data-tab-destination="tabs-notifications" data-target="#notify_upload_posters">Image Hosting</a> must be enabled for images to display on the newsletter.</span>
|
||||
</p>
|
||||
<br/>
|
||||
<div id="plexpy-newsletters-table">
|
||||
<div class='text-muted'><i class="fa fa-refresh fa-spin"></i> Loading newsletter agents...</div>
|
||||
<br>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-import_backups">
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Database Import</h3>
|
||||
</div>
|
||||
|
||||
<p class="help-block">Click a button below to import an exisiting database from another app.</p>
|
||||
<p class="help-block">Click a button below to import an existing database from another app.</p>
|
||||
<div class="btn-group">
|
||||
<button class="btn btn-form toggle-app-import-modal" type="button" data-target="#app-import-modal" data-toggle="modal" data-app="plexwatch">PlexWatch</button>
|
||||
<button class="btn btn-form toggle-app-import-modal" type="button" data-target="#app-import-modal" data-toggle="modal" data-app="plexivity">Plexivity</button>
|
||||
@@ -1005,6 +1173,17 @@
|
||||
<h3>Directories</h3>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="log_dir">Log Directory</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control directory-settings" id="log_dir" name="log_dir" value="${config['log_dir']}">
|
||||
<div class="btn-group">
|
||||
<button class="btn btn-form" type="button" id="clear_logs">Clear Logs</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="backup_dir">Backup Directory</label>
|
||||
<div class="row">
|
||||
@@ -1029,17 +1208,6 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="log_dir">Log Directory</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control directory-settings" id="log_dir" name="log_dir" value="${config['log_dir']}">
|
||||
<div class="btn-group">
|
||||
<button class="btn btn-form" type="button" id="clear_logs">Clear Logs</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<p><input type="button" class="btn btn-bright save-button" value="Save" data-success="Changes saved successfully"></p>
|
||||
|
||||
@@ -1062,8 +1230,8 @@
|
||||
</div>
|
||||
<p class="form-group">
|
||||
<label>Registered Devices</label>
|
||||
<p class="help-block">Register a new device, or configure an existing device by clicking the settings icon on the right.</p>
|
||||
<p id="app_api_msg" style="color: #eb8600;">The API must be enabled under <a data-tab-destination="tabs-access_control" style="cursor: pointer;">Access Control</a> to use the app.</p>
|
||||
<p class="help-block">Register a new device using a QR code, or configure an existing device by clicking the settings icon on the right.</p>
|
||||
<p id="app_api_msg" style="color: #eb8600;">The API must be enabled under <a data-tab-destination="tabs-web_interface" data-target="#api_enabled">Web Interface</a> to use the app.</p>
|
||||
<div class="row">
|
||||
<div id="plexpy-mobile-devices-table" class="col-md-12">
|
||||
<div class='text-muted'><i class="fa fa-refresh fa-spin"></i> Loading registered devices...</div>
|
||||
@@ -1214,7 +1382,7 @@
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<ul class="stacked-configs list-unstyled">
|
||||
% for agent in available_notification_agents:
|
||||
% for agent in sorted(available_notification_agents, key=lambda k: k['label'].lower()):
|
||||
<li class="new-notification-agent" data-id="${agent['id']}">
|
||||
<span>${agent['label']}</span>
|
||||
</li>
|
||||
@@ -1230,7 +1398,36 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="add-newsletter-modal" class="modal fade" tabindex="-1" role="dialog" aria-labelledby="add-newsletter-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title">Add a Newsletter Agent</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<ul class="stacked-configs list-unstyled">
|
||||
% for agent in available_newsletter_agents:
|
||||
<li class="new-newsletter-agent" data-id="${agent['id']}">
|
||||
<span>${agent['label']}</span>
|
||||
</li>
|
||||
% endfor
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<input type="button" class="btn btn-bright" data-dismiss="modal" value="Cancel">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="notifier-config-modal" class="modal fade wide" tabindex="-1" role="dialog" aria-labelledby="notifier-config-modal"></div>
|
||||
<div id="newsletter-config-modal" class="modal fade wide" tabindex="-1" role="dialog" aria-labelledby="newsletter-config-modal"></div>
|
||||
<div id="notify-text-sub-modal" class="modal fade wide" tabindex="-1" role="dialog" aria-labelledby="notify-text-sub-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
@@ -1386,6 +1583,53 @@
|
||||
</div>
|
||||
<div id="notifier-text-preview-modal" class="modal fade" tabindex="-1" role="dialog" aria-labelledby="notifier-text-preview-modal">
|
||||
</div>
|
||||
<div id="newsletter-text-sub-modal" class="modal fade wide" tabindex="-1" role="dialog" aria-labelledby="newsletter-text-sub-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">
|
||||
<i class="fa fa-remove"></i>
|
||||
</button>
|
||||
<h4 class="modal-title">Newsletter Parameters</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div>
|
||||
<p class="help-block">
|
||||
If the value for a selected parameter cannot be provided, it will display as blank.
|
||||
</p>
|
||||
% for category in common.NEWSLETTER_PARAMETERS:
|
||||
<table class="notification-params">
|
||||
<thead>
|
||||
<tr>
|
||||
<th colspan="2">
|
||||
${category['category']}
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
% for parameter in category['parameters']:
|
||||
<tr>
|
||||
<td><strong>{${parameter['value']}}</strong></td>
|
||||
<td>
|
||||
${parameter['description']}
|
||||
% if parameter.get('example'):
|
||||
<span class="small-muted">(${parameter['example']})</span>
|
||||
% endif
|
||||
% if parameter.get('help_text'):
|
||||
<p class="small-muted">(${parameter['help_text']})</p>
|
||||
% endif
|
||||
</td>
|
||||
</tr>
|
||||
% endfor
|
||||
</tbody>
|
||||
</table>
|
||||
% endfor
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="changelog-modal" class="modal fade wide" tabindex="-1" role="dialog" aria-labelledby="changelog-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
@@ -1520,6 +1764,29 @@
|
||||
});
|
||||
}
|
||||
|
||||
function getNewslettersTable() {
|
||||
$.ajax({
|
||||
url: 'get_newsletters_table',
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-newsletters-table").html(xhr.responseText);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function loadNewsletterConfig(newsletter_id) {
|
||||
$.ajax({
|
||||
url: 'get_newsletter_config_modal',
|
||||
data: { newsletter_id: newsletter_id },
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function (xhr, status) {
|
||||
$("#newsletter-config-modal").html(xhr.responseText).modal('show');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function getMobileDevicesTable() {
|
||||
$.ajax({
|
||||
url: 'get_mobile_devices_table',
|
||||
@@ -1573,7 +1840,7 @@ $(document).ready(function() {
|
||||
}
|
||||
|
||||
function preSaveChecks(_callback) {
|
||||
if ($("#pms_identifier").val() == "") {
|
||||
if (serverChanged) {
|
||||
verifyServer();
|
||||
}
|
||||
verifyPMSWebURL();
|
||||
@@ -1585,13 +1852,14 @@ $(document).ready(function() {
|
||||
|
||||
// Alert the user that their changes require a restart.
|
||||
function postSaveChecks() {
|
||||
if (serverChanged || authChanged || httpChanged || directoryChanged) {
|
||||
if (authChanged || httpChanged || directoryChanged) {
|
||||
$('#restart-modal').modal('show');
|
||||
}
|
||||
$("#http_hashed_password").val($("#http_hash_password").is(":checked") ? 1 : 0);
|
||||
getConfigurationTable();
|
||||
getSchedulerTable();
|
||||
getNotifiersTable();
|
||||
getNewslettersTable();
|
||||
getMobileDevicesTable();
|
||||
loadUpdateDistros();
|
||||
settingsChanged = false;
|
||||
@@ -1628,9 +1896,9 @@ $(document).ready(function() {
|
||||
initConfigCheckbox('#enable_https');
|
||||
initConfigCheckbox('#https_create_cert');
|
||||
initConfigCheckbox('#check_github');
|
||||
initConfigCheckbox('#notify_upload_posters');
|
||||
initConfigCheckbox('#monitor_pms_updates');
|
||||
|
||||
initConfigCheckbox('#newsletter_self_hosted');
|
||||
|
||||
$('#menu_link_shutdown').click(function() {
|
||||
$('#confirm-message').text("Are you sure you want to shutdown Tautulli?");
|
||||
$('#confirm-modal').modal();
|
||||
@@ -1675,6 +1943,7 @@ $(document).ready(function() {
|
||||
getConfigurationTable();
|
||||
getSchedulerTable();
|
||||
getNotifiersTable();
|
||||
getNewslettersTable();
|
||||
getMobileDevicesTable();
|
||||
|
||||
$('#changelog-modal-link').on('click', function (e) {
|
||||
@@ -1769,9 +2038,8 @@ $(document).ready(function() {
|
||||
|
||||
$( ".pms-settings" ).change(function() {
|
||||
serverChanged = true;
|
||||
$("#pms_identifier").val("");
|
||||
$("#server_changed").prop('checked', true);
|
||||
verifyServer();
|
||||
$("#pms_verify").hide();
|
||||
});
|
||||
|
||||
$('.checkbox-toggle').click(function () {
|
||||
@@ -1841,7 +2109,11 @@ $(document).ready(function() {
|
||||
$('#pms_ssl').val(ssl !== 'undefined' && ssl === 1 ? 1 : 0);
|
||||
$('#pms_is_cloud').val(is_cloud !== 'undefined' && is_cloud === true ? 1 : 0);
|
||||
$('#pms_url_manual').prop('checked', false);
|
||||
$('#pms_url').val('Please verify your server above to retrieve the URL');
|
||||
PMSCloudCheck();
|
||||
},
|
||||
onDropdownOpen: function() {
|
||||
this.clear();
|
||||
}
|
||||
});
|
||||
var select_pms = $select_pms[0].selectize;
|
||||
@@ -1906,6 +2178,7 @@ $(document).ready(function() {
|
||||
var pms_identifier = $("#pms_identifier").val();
|
||||
var pms_ssl = $("#pms_ssl").val();
|
||||
var pms_is_remote = $("#pms_is_remote").val();
|
||||
var pms_url_manual = $("#pms_url_manual").is(':checked') ? 1 : 0;
|
||||
|
||||
if (($("#pms_ip").val() !== '') || ($("#pms_port").val() !== '')) {
|
||||
$("#pms_verify").html('<i class="fa fa-refresh fa-spin"></i>').fadeIn('fast');
|
||||
@@ -1914,9 +2187,11 @@ $(document).ready(function() {
|
||||
data: {
|
||||
hostname: pms_ip,
|
||||
port: pms_port,
|
||||
identifier: pms_identifier,
|
||||
ssl: pms_ssl,
|
||||
remote: pms_is_remote
|
||||
remote: pms_is_remote,
|
||||
manual: pms_url_manual,
|
||||
get_url: true,
|
||||
test_websocket: true
|
||||
},
|
||||
cache: true,
|
||||
async: true,
|
||||
@@ -1925,12 +2200,27 @@ $(document).ready(function() {
|
||||
$("#pms_verify").html('<i class="fa fa-close"></i>').fadeIn('fast');
|
||||
$("#pms_ip_group").addClass("has-error");
|
||||
},
|
||||
success: function (json) {
|
||||
var machine_identifier = json;
|
||||
if (machine_identifier) {
|
||||
$("#pms_identifier").val(machine_identifier);
|
||||
$("#pms_verify").html('<i class="fa fa-check"></i>').fadeIn('fast');
|
||||
$("#pms_ip_group").removeClass("has-error");
|
||||
success: function(xhr, status) {
|
||||
var result = xhr;
|
||||
var identifier = result.identifier;
|
||||
var url = result.url;
|
||||
var ws = result.ws;
|
||||
if (identifier) {
|
||||
$("#pms_identifier").val(identifier);
|
||||
|
||||
if (url) {
|
||||
$("#pms_url").val(url);
|
||||
}
|
||||
|
||||
if (ws === false) {
|
||||
$("#pms_verify").html('<i class="fa fa-close"></i>').fadeIn('fast');
|
||||
$("#pms_ip_group").addClass("has-error");
|
||||
showMsg('<i class="fa fa-exclamation-circle"></i> Server found but unable to connect websocket.<br>Check the <a href="logs">logs</a> for errors.', false, true, 5000, true)
|
||||
} else {
|
||||
$("#pms_verify").html('<i class="fa fa-check"></i>').fadeIn('fast');
|
||||
$("#pms_ip_group").removeClass("has-error");
|
||||
serverChanged = false;
|
||||
}
|
||||
|
||||
if (_callback) {
|
||||
_callback();
|
||||
@@ -1950,7 +2240,6 @@ $(document).ready(function() {
|
||||
}
|
||||
|
||||
$('#verify_server_button').on('click', function(){
|
||||
$("#pms_identifier").val("");
|
||||
verifyServer();
|
||||
});
|
||||
|
||||
@@ -2272,6 +2561,32 @@ $(document).ready(function() {
|
||||
});
|
||||
});
|
||||
|
||||
// Add a new newsletter agent
|
||||
$('.new-newsletter-agent').click(function () {
|
||||
$.ajax({
|
||||
url: 'add_newsletter_config',
|
||||
data: { agent_id: $(this).data('id') },
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function (xhr, status) {
|
||||
var result = $.parseJSON(xhr.responseText);
|
||||
var msg = result.message;
|
||||
$('#add-newsletter-modal').modal('hide');
|
||||
if (result.result == 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 5000);
|
||||
loadNewsletterConfig(result.newsletter_id);
|
||||
} else {
|
||||
showMsg('<i class="fa fa-times"></i> ' + msg, false, true, 5000, true);
|
||||
}
|
||||
getNewslettersTable();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
$('#http_base_url').change(function () {
|
||||
$(this).val($(this).val().replace(/\/*$/, ''));
|
||||
});
|
||||
|
||||
function apiEnabled() {
|
||||
var api_enabled = $('#api_enabled').prop('checked');
|
||||
$('#app_api_msg').toggle(!(api_enabled));
|
||||
@@ -2281,9 +2596,67 @@ $(document).ready(function() {
|
||||
apiEnabled();
|
||||
});
|
||||
|
||||
function imageUpload() {
|
||||
var upload_val = $('#notify_upload_posters').val();
|
||||
if (upload_val === '1') {
|
||||
$('#imgur_upload_options').slideDown();
|
||||
} else {
|
||||
$('#imgur_upload_options').slideUp();
|
||||
}
|
||||
if (upload_val === '2') {
|
||||
$('#self_host_image_options').slideDown();
|
||||
} else {
|
||||
$('#self_host_image_options').slideUp();
|
||||
}
|
||||
if (upload_val === '3') {
|
||||
$('#cloudinary_upload_options').slideDown();
|
||||
} else {
|
||||
$('#cloudinary_upload_options').slideUp();
|
||||
}
|
||||
}
|
||||
$('#notify_upload_posters').change(function () {
|
||||
imageUpload();
|
||||
});
|
||||
|
||||
function baseURLSet() {
|
||||
if ($('#http_base_url').val()) {
|
||||
$('.base-url-warning').hide();
|
||||
} else {
|
||||
$('.base-url-warning').show();
|
||||
}
|
||||
}
|
||||
baseURLSet();
|
||||
|
||||
$('#http_base_url').change(function () {
|
||||
baseURLSet();
|
||||
});
|
||||
|
||||
function newsletterUploadEnabled() {
|
||||
if ($('#notify_upload_posters').val() === '0') {
|
||||
$('#newsletter_upload_warning').show();
|
||||
} else {
|
||||
$('#newsletter_upload_warning').hide();
|
||||
}
|
||||
}
|
||||
newsletterUploadEnabled();
|
||||
|
||||
$('#notify_upload_posters, #newsletter_self_hosted').change(function () {
|
||||
baseURLSet();
|
||||
newsletterUploadEnabled();
|
||||
});
|
||||
|
||||
$('body').on('click', 'a[data-tab-destination]', function () {
|
||||
var tab = $(this).data('tab-destination');
|
||||
$("a[href=#" + tab + "]").click();
|
||||
var scroll_destination = $(this).data('target');
|
||||
if (scroll_destination) {
|
||||
if ($(scroll_destination).closest('.advanced-setting').length && !$('#menu_link_show_advanced_settings').hasClass('active')) {
|
||||
$('#menu_link_show_advanced_settings').click()
|
||||
}
|
||||
var body_container = $('.body-container')
|
||||
var scroll_pos = scroll_destination ? body_container.scrollTop() + $(scroll_destination).offset().top - 100 : 0;
|
||||
body_container.animate({scrollTop: scroll_pos});
|
||||
}
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
@@ -58,6 +58,10 @@ DOCUMENTATION :: END
|
||||
<div class="col-sm-12 text-muted stream-info-current">
|
||||
<i class="fa fa-exclamation-circle"></i> Current session. Updated stream details below may be delayed.
|
||||
</div>
|
||||
% elif data['pre_tautulli']:
|
||||
<div class="col-sm-12 text-muted stream-info-current">
|
||||
<i class="fa fa-exclamation-circle"></i> Pre-Tautulli history. Stream details below may be incorrect.
|
||||
</div>
|
||||
% endif
|
||||
<table class="stream-info" style="margin-top: 0;">
|
||||
<thead>
|
||||
@@ -84,8 +88,8 @@ DOCUMENTATION :: END
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Bitrate</td>
|
||||
<td>${data['stream_bitrate']} kbps</td>
|
||||
<td>${data['bitrate']} kbps</td>
|
||||
<td>${data['stream_bitrate']} ${'kbps' if data['stream_bitrate'] else ''}</td>
|
||||
<td>${data['bitrate']} ${'kbps' if data['bitrate'] else ''}</td>
|
||||
</tr>
|
||||
% if data['media_type'] != 'track':
|
||||
<tr>
|
||||
@@ -154,8 +158,8 @@ DOCUMENTATION :: END
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Bitrate</td>
|
||||
<td>${data['stream_video_bitrate']} kbps</td>
|
||||
<td>${data['video_bitrate']} kbps</td>
|
||||
<td>${data['stream_video_bitrate']} ${'kbps' if data['stream_video_bitrate'] else ''}</td>
|
||||
<td>${data['video_bitrate']} ${'kbps' if data['video_bitrate'] else ''}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Width</td>
|
||||
@@ -199,8 +203,8 @@ DOCUMENTATION :: END
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Bitrate</td>
|
||||
<td>${data['stream_audio_bitrate']} kbps</td>
|
||||
<td>${data['audio_bitrate']} kbps</td>
|
||||
<td>${data['stream_audio_bitrate']} ${'kbps' if data['stream_audio_bitrate'] else ''}</td>
|
||||
<td>${data['audio_bitrate']} ${'kbps' if data['audio_bitrate'] else ''}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Channels</td>
|
||||
|
@@ -100,7 +100,7 @@
|
||||
// Load user ids and names (for the selector)
|
||||
$.ajax({
|
||||
url: 'get_user_names',
|
||||
type: 'get',
|
||||
type: 'GET',
|
||||
dataType: 'json',
|
||||
success: function (data) {
|
||||
var select = $('#sync-user');
|
||||
@@ -116,7 +116,8 @@
|
||||
|
||||
function loadSyncTable(selected_user_id) {
|
||||
sync_table_options.ajax = {
|
||||
url: 'get_sync?user_id=' + selected_user_id
|
||||
url: 'get_sync?user_id=' + selected_user_id,
|
||||
type: 'POST'
|
||||
};
|
||||
sync_table = $('#sync_table').DataTable(sync_table_options);
|
||||
var colvis = new $.fn.dataTable.ColVis(sync_table, {
|
||||
@@ -134,7 +135,7 @@
|
||||
});
|
||||
}
|
||||
|
||||
var selected_user_id = "${_session['user_id']}" == "None" ? null : "${_session['user_id']}";
|
||||
var selected_user_id = "${_session['user_group']}" == "admin" ? null : "${_session['user_id']}";
|
||||
loadSyncTable(selected_user_id);
|
||||
|
||||
% if _session['user_group'] == 'admin':
|
||||
|
@@ -96,7 +96,7 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
<div class='table-card-back'>
|
||||
<div id="search-results-list"><i class="fa fa-refresh fa-spin"></i> Loading search results...</div>
|
||||
<div id="search-results-list" class="children-list"><i class="fa fa-refresh fa-spin"></i> Loading search results...</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -188,7 +188,7 @@ DOCUMENTATION :: END
|
||||
},
|
||||
complete: function (xhr, status) {
|
||||
$('#search-results-list').html(xhr.responseText);
|
||||
$('#update_query_title').html(query_string)
|
||||
$('#update_query_title').text(query_string)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@@ -413,7 +413,7 @@ DOCUMENTATION :: END
|
||||
// Build watch history table
|
||||
history_table_options.ajax = {
|
||||
url: 'get_history',
|
||||
type: 'post',
|
||||
type: 'POST',
|
||||
data: function ( d ) {
|
||||
return {
|
||||
json_data: JSON.stringify( d ),
|
||||
@@ -442,7 +442,8 @@ DOCUMENTATION :: END
|
||||
function loadSyncTable() {
|
||||
// Build user sync table
|
||||
sync_table_options.ajax = {
|
||||
url: 'get_sync?user_id=' + user_id
|
||||
url: 'get_sync?user_id=' + user_id,
|
||||
type: 'POST'
|
||||
};
|
||||
sync_table = $('#sync_table-UID-${data["user_id"]}').DataTable(sync_table_options);
|
||||
sync_table.column(2).visible(false);
|
||||
@@ -457,7 +458,7 @@ DOCUMENTATION :: END
|
||||
// Build user IP table
|
||||
user_ip_table_options.ajax = {
|
||||
url: 'get_user_ips',
|
||||
type: 'post',
|
||||
type: 'POST',
|
||||
data: function ( d ) {
|
||||
return {
|
||||
json_data: JSON.stringify( d ),
|
||||
@@ -474,6 +475,7 @@ DOCUMENTATION :: END
|
||||
// Build user login table
|
||||
login_log_table_options.ajax = {
|
||||
url: 'get_user_logins',
|
||||
type: 'POST',
|
||||
data: function(d) {
|
||||
return {
|
||||
json_data: JSON.stringify(d),
|
||||
|
@@ -94,7 +94,7 @@
|
||||
json_data: JSON.stringify(d)
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
users_list_table = $('#users_list_table').DataTable(users_list_table_options);
|
||||
var colvis = new $.fn.dataTable.ColVis(users_list_table, { buttonText: '<i class="fa fa-columns"></i> Select columns', buttonClass: 'btn btn-dark', exclude: [0, 1] });
|
||||
|
@@ -94,7 +94,7 @@
|
||||
<label for="pms_ip">Plex IP or Hostname</label>
|
||||
<div class="row">
|
||||
<div class="col-xs-12">
|
||||
<select class="form-control selectize-pms-ip" id="pms_ip" name="pms_ip">
|
||||
<select class="form-control pms-settings selectize-pms-ip" id="pms_ip" name="pms_ip">
|
||||
<option value="${config['pms_ip']}" selected>${config['pms_ip']}</option>
|
||||
</select>
|
||||
</div>
|
||||
@@ -104,12 +104,12 @@
|
||||
<label for="pms_port">Plex Port</label>
|
||||
<div class="row">
|
||||
<div class="col-xs-3">
|
||||
<input type="text" class="form-control pms_settings" name="pms_port" id="pms_port" placeholder="32400" value="${config['pms_port']}" required>
|
||||
<input type="text" class="form-control pms-settings" name="pms_port" id="pms_port" placeholder="32400" value="${config['pms_port']}" required>
|
||||
</div>
|
||||
<div class="col-xs-4">
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_ssl_checkbox" class="checkbox-toggle" data-id="pms_ssl" value="1" ${helpers.checked(config['pms_ssl'])}> Use SSL
|
||||
<input type="checkbox" id="pms_ssl_checkbox" class="checkbox-toggle pms-settings" data-id="pms_ssl" value="1" ${helpers.checked(config['pms_ssl'])}> Use SSL
|
||||
<input type="hidden" id="pms_ssl" name="pms_ssl" value="${config['pms_ssl']}">
|
||||
</label>
|
||||
</div>
|
||||
@@ -117,16 +117,16 @@
|
||||
<div class="col-xs-4">
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_is_remote_checkbox" class="checkbox-toggle" data-id="pms_is_remote" value="1" ${helpers.checked(config['pms_is_remote'])}> Remote Server
|
||||
<input type="checkbox" id="pms_is_remote_checkbox" class="checkbox-toggle pms-settings" data-id="pms_is_remote" value="1" ${helpers.checked(config['pms_is_remote'])}> Remote Server
|
||||
<input type="hidden" id="pms_is_remote" name="pms_is_remote" value="${config['pms_is_remote']}">
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<input type="hidden" class="form-control pms-settings" id="pms_valid" data-validate="validatePMSip" value="">
|
||||
<input type="hidden" id="pms_valid" data-validate="validatePMSip" value="">
|
||||
<input type="hidden" id="pms_is_cloud" name="pms_is_cloud" value="${config['pms_is_cloud']}">
|
||||
<input type="hidden" class="form-control pms-settings" id="pms_identifier" name="pms_identifier" value="${config['pms_identifier']}">
|
||||
<input type="hidden" id="pms_identifier" name="pms_identifier" value="${config['pms_identifier']}">
|
||||
<a class="btn btn-dark" id="verify-plex-server" href="#" role="button">Verify</a><span style="margin-left: 10px; display: none;" id="pms-verify-status"></span>
|
||||
</div>
|
||||
|
||||
@@ -374,6 +374,9 @@ $(document).ready(function() {
|
||||
$('#pms_is_remote_checkbox').prop('disabled', false);
|
||||
$('#pms_ssl_checkbox').prop('disabled', false);
|
||||
}
|
||||
},
|
||||
onDropdownOpen: function() {
|
||||
this.clear();
|
||||
}
|
||||
});
|
||||
var select_pms = $select_pms[0].selectize;
|
||||
@@ -419,7 +422,8 @@ $(document).ready(function() {
|
||||
port: pms_port,
|
||||
identifier: pms_identifier,
|
||||
ssl: pms_ssl,
|
||||
remote: pms_is_remote },
|
||||
remote: pms_is_remote
|
||||
},
|
||||
cache: true,
|
||||
async: true,
|
||||
timeout: 5000,
|
||||
@@ -427,10 +431,11 @@ $(document).ready(function() {
|
||||
$("#pms-verify-status").html('<i class="fa fa-exclamation-circle"></i> This is not a Plex Server!');
|
||||
$('#pms-verify-status').fadeIn('fast');
|
||||
},
|
||||
success: function (json) {
|
||||
var machine_identifier = json;
|
||||
if (machine_identifier) {
|
||||
$("#pms_identifier").val(machine_identifier);
|
||||
success: function(xhr, status) {
|
||||
var result = xhr;
|
||||
var identifier = result.identifier;
|
||||
if (identifier) {
|
||||
$("#pms_identifier").val(identifier);
|
||||
$("#pms-verify-status").html('<i class="fa fa-check"></i> Server found!');
|
||||
$('#pms-verify-status').fadeIn('fast');
|
||||
pms_verified = true;
|
||||
|
1085
data/interfaces/newsletters/recently_added.html
Normal file
1085
data/interfaces/newsletters/recently_added_master.html
Normal file
@@ -1,5 +1,10 @@
|
||||
version_info = (3, 0, 1)
|
||||
version = '3.0.1'
|
||||
release = '3.0.1'
|
||||
from pkg_resources import get_distribution, DistributionNotFound
|
||||
|
||||
__version__ = release # PEP 396
|
||||
try:
|
||||
release = get_distribution('APScheduler').version.split('-')[0]
|
||||
except DistributionNotFound:
|
||||
release = '3.5.0'
|
||||
|
||||
version_info = tuple(int(x) if x.isdigit() else x for x in release.split('.'))
|
||||
version = __version__ = '.'.join(str(x) for x in version_info[:3])
|
||||
del get_distribution, DistributionNotFound
|
||||
|
@@ -1,25 +1,33 @@
|
||||
__all__ = ('EVENT_SCHEDULER_START', 'EVENT_SCHEDULER_SHUTDOWN', 'EVENT_EXECUTOR_ADDED', 'EVENT_EXECUTOR_REMOVED',
|
||||
'EVENT_JOBSTORE_ADDED', 'EVENT_JOBSTORE_REMOVED', 'EVENT_ALL_JOBS_REMOVED', 'EVENT_JOB_ADDED',
|
||||
'EVENT_JOB_REMOVED', 'EVENT_JOB_MODIFIED', 'EVENT_JOB_EXECUTED', 'EVENT_JOB_ERROR', 'EVENT_JOB_MISSED',
|
||||
__all__ = ('EVENT_SCHEDULER_STARTED', 'EVENT_SCHEDULER_SHUTDOWN', 'EVENT_SCHEDULER_PAUSED',
|
||||
'EVENT_SCHEDULER_RESUMED', 'EVENT_EXECUTOR_ADDED', 'EVENT_EXECUTOR_REMOVED',
|
||||
'EVENT_JOBSTORE_ADDED', 'EVENT_JOBSTORE_REMOVED', 'EVENT_ALL_JOBS_REMOVED',
|
||||
'EVENT_JOB_ADDED', 'EVENT_JOB_REMOVED', 'EVENT_JOB_MODIFIED', 'EVENT_JOB_EXECUTED',
|
||||
'EVENT_JOB_ERROR', 'EVENT_JOB_MISSED', 'EVENT_JOB_SUBMITTED', 'EVENT_JOB_MAX_INSTANCES',
|
||||
'SchedulerEvent', 'JobEvent', 'JobExecutionEvent')
|
||||
|
||||
|
||||
EVENT_SCHEDULER_START = 1
|
||||
EVENT_SCHEDULER_SHUTDOWN = 2
|
||||
EVENT_EXECUTOR_ADDED = 4
|
||||
EVENT_EXECUTOR_REMOVED = 8
|
||||
EVENT_JOBSTORE_ADDED = 16
|
||||
EVENT_JOBSTORE_REMOVED = 32
|
||||
EVENT_ALL_JOBS_REMOVED = 64
|
||||
EVENT_JOB_ADDED = 128
|
||||
EVENT_JOB_REMOVED = 256
|
||||
EVENT_JOB_MODIFIED = 512
|
||||
EVENT_JOB_EXECUTED = 1024
|
||||
EVENT_JOB_ERROR = 2048
|
||||
EVENT_JOB_MISSED = 4096
|
||||
EVENT_ALL = (EVENT_SCHEDULER_START | EVENT_SCHEDULER_SHUTDOWN | EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED |
|
||||
EVENT_SCHEDULER_STARTED = EVENT_SCHEDULER_START = 2 ** 0
|
||||
EVENT_SCHEDULER_SHUTDOWN = 2 ** 1
|
||||
EVENT_SCHEDULER_PAUSED = 2 ** 2
|
||||
EVENT_SCHEDULER_RESUMED = 2 ** 3
|
||||
EVENT_EXECUTOR_ADDED = 2 ** 4
|
||||
EVENT_EXECUTOR_REMOVED = 2 ** 5
|
||||
EVENT_JOBSTORE_ADDED = 2 ** 6
|
||||
EVENT_JOBSTORE_REMOVED = 2 ** 7
|
||||
EVENT_ALL_JOBS_REMOVED = 2 ** 8
|
||||
EVENT_JOB_ADDED = 2 ** 9
|
||||
EVENT_JOB_REMOVED = 2 ** 10
|
||||
EVENT_JOB_MODIFIED = 2 ** 11
|
||||
EVENT_JOB_EXECUTED = 2 ** 12
|
||||
EVENT_JOB_ERROR = 2 ** 13
|
||||
EVENT_JOB_MISSED = 2 ** 14
|
||||
EVENT_JOB_SUBMITTED = 2 ** 15
|
||||
EVENT_JOB_MAX_INSTANCES = 2 ** 16
|
||||
EVENT_ALL = (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED |
|
||||
EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED |
|
||||
EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED |
|
||||
EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED |
|
||||
EVENT_JOB_ERROR | EVENT_JOB_MISSED)
|
||||
EVENT_JOB_ERROR | EVENT_JOB_MISSED | EVENT_JOB_SUBMITTED | EVENT_JOB_MAX_INSTANCES)
|
||||
|
||||
|
||||
class SchedulerEvent(object):
|
||||
@@ -55,9 +63,21 @@ class JobEvent(SchedulerEvent):
|
||||
self.jobstore = jobstore
|
||||
|
||||
|
||||
class JobSubmissionEvent(JobEvent):
|
||||
"""
|
||||
An event that concerns the submission of a job to its executor.
|
||||
|
||||
:ivar scheduled_run_times: a list of datetimes when the job was intended to run
|
||||
"""
|
||||
|
||||
def __init__(self, code, job_id, jobstore, scheduled_run_times):
|
||||
super(JobSubmissionEvent, self).__init__(code, job_id, jobstore)
|
||||
self.scheduled_run_times = scheduled_run_times
|
||||
|
||||
|
||||
class JobExecutionEvent(JobEvent):
|
||||
"""
|
||||
An event that concerns the execution of individual jobs.
|
||||
An event that concerns the running of a job within its executor.
|
||||
|
||||
:ivar scheduled_run_time: the time when the job was scheduled to be run
|
||||
:ivar retval: the return value of the successfully executed job
|
||||
@@ -65,7 +85,8 @@ class JobExecutionEvent(JobEvent):
|
||||
:ivar traceback: a formatted traceback for the exception
|
||||
"""
|
||||
|
||||
def __init__(self, code, job_id, jobstore, scheduled_run_time, retval=None, exception=None, traceback=None):
|
||||
def __init__(self, code, job_id, jobstore, scheduled_run_time, retval=None, exception=None,
|
||||
traceback=None):
|
||||
super(JobExecutionEvent, self).__init__(code, job_id, jobstore)
|
||||
self.scheduled_run_time = scheduled_run_time
|
||||
self.retval = retval
|
||||
|
@@ -1,28 +1,60 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
|
||||
from apscheduler.executors.base import BaseExecutor, run_job
|
||||
|
||||
try:
|
||||
from asyncio import iscoroutinefunction
|
||||
from apscheduler.executors.base_py3 import run_coroutine_job
|
||||
except ImportError:
|
||||
from trollius import iscoroutinefunction
|
||||
run_coroutine_job = None
|
||||
|
||||
|
||||
class AsyncIOExecutor(BaseExecutor):
|
||||
"""
|
||||
Runs jobs in the default executor of the event loop.
|
||||
|
||||
If the job function is a native coroutine function, it is scheduled to be run directly in the
|
||||
event loop as soon as possible. All other functions are run in the event loop's default
|
||||
executor which is usually a thread pool.
|
||||
|
||||
Plugin alias: ``asyncio``
|
||||
"""
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
super(AsyncIOExecutor, self).start(scheduler, alias)
|
||||
self._eventloop = scheduler._eventloop
|
||||
self._pending_futures = set()
|
||||
|
||||
def shutdown(self, wait=True):
|
||||
# There is no way to honor wait=True without converting this method into a coroutine method
|
||||
for f in self._pending_futures:
|
||||
if not f.done():
|
||||
f.cancel()
|
||||
|
||||
self._pending_futures.clear()
|
||||
|
||||
def _do_submit_job(self, job, run_times):
|
||||
def callback(f):
|
||||
self._pending_futures.discard(f)
|
||||
try:
|
||||
events = f.result()
|
||||
except:
|
||||
except BaseException:
|
||||
self._run_job_error(job.id, *sys.exc_info()[1:])
|
||||
else:
|
||||
self._run_job_success(job.id, events)
|
||||
|
||||
f = self._eventloop.run_in_executor(None, run_job, job, job._jobstore_alias, run_times, self._logger.name)
|
||||
if iscoroutinefunction(job.func):
|
||||
if run_coroutine_job is not None:
|
||||
coro = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name)
|
||||
f = self._eventloop.create_task(coro)
|
||||
else:
|
||||
raise Exception('Executing coroutine based jobs is not supported with Trollius')
|
||||
else:
|
||||
f = self._eventloop.run_in_executor(None, run_job, job, job._jobstore_alias, run_times,
|
||||
self._logger.name)
|
||||
|
||||
f.add_done_callback(callback)
|
||||
self._pending_futures.add(f)
|
||||
|
@@ -8,13 +8,15 @@ import sys
|
||||
from pytz import utc
|
||||
import six
|
||||
|
||||
from apscheduler.events import JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED
|
||||
from apscheduler.events import (
|
||||
JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED)
|
||||
|
||||
|
||||
class MaxInstancesReachedError(Exception):
|
||||
def __init__(self, job):
|
||||
super(MaxInstancesReachedError, self).__init__(
|
||||
'Job "%s" has already reached its maximum number of instances (%d)' % (job.id, job.max_instances))
|
||||
'Job "%s" has already reached its maximum number of instances (%d)' %
|
||||
(job.id, job.max_instances))
|
||||
|
||||
|
||||
class BaseExecutor(six.with_metaclass(ABCMeta, object)):
|
||||
@@ -30,13 +32,14 @@ class BaseExecutor(six.with_metaclass(ABCMeta, object)):
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
"""
|
||||
Called by the scheduler when the scheduler is being started or when the executor is being added to an already
|
||||
running scheduler.
|
||||
Called by the scheduler when the scheduler is being started or when the executor is being
|
||||
added to an already running scheduler.
|
||||
|
||||
:param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting this executor
|
||||
:param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting
|
||||
this executor
|
||||
:param str|unicode alias: alias of this executor as it was assigned to the scheduler
|
||||
"""
|
||||
|
||||
"""
|
||||
self._scheduler = scheduler
|
||||
self._lock = scheduler._create_lock()
|
||||
self._logger = logging.getLogger('apscheduler.executors.%s' % alias)
|
||||
@@ -45,7 +48,8 @@ class BaseExecutor(six.with_metaclass(ABCMeta, object)):
|
||||
"""
|
||||
Shuts down this executor.
|
||||
|
||||
:param bool wait: ``True`` to wait until all submitted jobs have been executed
|
||||
:param bool wait: ``True`` to wait until all submitted jobs
|
||||
have been executed
|
||||
"""
|
||||
|
||||
def submit_job(self, job, run_times):
|
||||
@@ -53,10 +57,12 @@ class BaseExecutor(six.with_metaclass(ABCMeta, object)):
|
||||
Submits job for execution.
|
||||
|
||||
:param Job job: job to execute
|
||||
:param list[datetime] run_times: list of datetimes specifying when the job should have been run
|
||||
:raises MaxInstancesReachedError: if the maximum number of allowed instances for this job has been reached
|
||||
"""
|
||||
:param list[datetime] run_times: list of datetimes specifying
|
||||
when the job should have been run
|
||||
:raises MaxInstancesReachedError: if the maximum number of
|
||||
allowed instances for this job has been reached
|
||||
|
||||
"""
|
||||
assert self._lock is not None, 'This executor has not been started yet'
|
||||
with self._lock:
|
||||
if self._instances[job.id] >= job.max_instances:
|
||||
@@ -70,50 +76,71 @@ class BaseExecutor(six.with_metaclass(ABCMeta, object)):
|
||||
"""Performs the actual task of scheduling `run_job` to be called."""
|
||||
|
||||
def _run_job_success(self, job_id, events):
|
||||
"""Called by the executor with the list of generated events when `run_job` has been successfully called."""
|
||||
"""
|
||||
Called by the executor with the list of generated events when :func:`run_job` has been
|
||||
successfully called.
|
||||
|
||||
"""
|
||||
with self._lock:
|
||||
self._instances[job_id] -= 1
|
||||
if self._instances[job_id] == 0:
|
||||
del self._instances[job_id]
|
||||
|
||||
for event in events:
|
||||
self._scheduler._dispatch_event(event)
|
||||
|
||||
def _run_job_error(self, job_id, exc, traceback=None):
|
||||
"""Called by the executor with the exception if there is an error calling `run_job`."""
|
||||
|
||||
"""Called by the executor with the exception if there is an error calling `run_job`."""
|
||||
with self._lock:
|
||||
self._instances[job_id] -= 1
|
||||
if self._instances[job_id] == 0:
|
||||
del self._instances[job_id]
|
||||
|
||||
exc_info = (exc.__class__, exc, traceback)
|
||||
self._logger.error('Error running job %s', job_id, exc_info=exc_info)
|
||||
|
||||
|
||||
def run_job(job, jobstore_alias, run_times, logger_name):
|
||||
"""Called by executors to run the job. Returns a list of scheduler events to be dispatched by the scheduler."""
|
||||
"""
|
||||
Called by executors to run the job. Returns a list of scheduler events to be dispatched by the
|
||||
scheduler.
|
||||
|
||||
"""
|
||||
events = []
|
||||
logger = logging.getLogger(logger_name)
|
||||
for run_time in run_times:
|
||||
# See if the job missed its run time window, and handle possible misfires accordingly
|
||||
# See if the job missed its run time window, and handle
|
||||
# possible misfires accordingly
|
||||
if job.misfire_grace_time is not None:
|
||||
difference = datetime.now(utc) - run_time
|
||||
grace_time = timedelta(seconds=job.misfire_grace_time)
|
||||
if difference > grace_time:
|
||||
events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias, run_time))
|
||||
events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias,
|
||||
run_time))
|
||||
logger.warning('Run time of job "%s" was missed by %s', job, difference)
|
||||
continue
|
||||
|
||||
logger.info('Running job "%s" (scheduled at %s)', job, run_time)
|
||||
try:
|
||||
retval = job.func(*job.args, **job.kwargs)
|
||||
except:
|
||||
except BaseException:
|
||||
exc, tb = sys.exc_info()[1:]
|
||||
formatted_tb = ''.join(format_tb(tb))
|
||||
events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time, exception=exc,
|
||||
traceback=formatted_tb))
|
||||
events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time,
|
||||
exception=exc, traceback=formatted_tb))
|
||||
logger.exception('Job "%s" raised an exception', job)
|
||||
|
||||
# This is to prevent cyclic references that would lead to memory leaks
|
||||
if six.PY2:
|
||||
sys.exc_clear()
|
||||
del tb
|
||||
else:
|
||||
import traceback
|
||||
traceback.clear_frames(tb)
|
||||
del tb
|
||||
else:
|
||||
events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time, retval=retval))
|
||||
events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time,
|
||||
retval=retval))
|
||||
logger.info('Job "%s" executed successfully', job)
|
||||
|
||||
return events
|
||||
|
41
lib/apscheduler/executors/base_py3.py
Normal file
@@ -0,0 +1,41 @@
|
||||
import logging
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
from traceback import format_tb
|
||||
|
||||
from pytz import utc
|
||||
|
||||
from apscheduler.events import (
|
||||
JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED)
|
||||
|
||||
|
||||
async def run_coroutine_job(job, jobstore_alias, run_times, logger_name):
|
||||
"""Coroutine version of run_job()."""
|
||||
events = []
|
||||
logger = logging.getLogger(logger_name)
|
||||
for run_time in run_times:
|
||||
# See if the job missed its run time window, and handle possible misfires accordingly
|
||||
if job.misfire_grace_time is not None:
|
||||
difference = datetime.now(utc) - run_time
|
||||
grace_time = timedelta(seconds=job.misfire_grace_time)
|
||||
if difference > grace_time:
|
||||
events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias,
|
||||
run_time))
|
||||
logger.warning('Run time of job "%s" was missed by %s', job, difference)
|
||||
continue
|
||||
|
||||
logger.info('Running job "%s" (scheduled at %s)', job, run_time)
|
||||
try:
|
||||
retval = await job.func(*job.args, **job.kwargs)
|
||||
except BaseException:
|
||||
exc, tb = sys.exc_info()[1:]
|
||||
formatted_tb = ''.join(format_tb(tb))
|
||||
events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time,
|
||||
exception=exc, traceback=formatted_tb))
|
||||
logger.exception('Job "%s" raised an exception', job)
|
||||
else:
|
||||
events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time,
|
||||
retval=retval))
|
||||
logger.info('Job "%s" executed successfully', job)
|
||||
|
||||
return events
|
@@ -5,7 +5,8 @@ from apscheduler.executors.base import BaseExecutor, run_job
|
||||
|
||||
class DebugExecutor(BaseExecutor):
|
||||
"""
|
||||
A special executor that executes the target callable directly instead of deferring it to a thread or process.
|
||||
A special executor that executes the target callable directly instead of deferring it to a
|
||||
thread or process.
|
||||
|
||||
Plugin alias: ``debug``
|
||||
"""
|
||||
@@ -13,7 +14,7 @@ class DebugExecutor(BaseExecutor):
|
||||
def _do_submit_job(self, job, run_times):
|
||||
try:
|
||||
events = run_job(job, job._jobstore_alias, run_times, self._logger.name)
|
||||
except:
|
||||
except BaseException:
|
||||
self._run_job_error(job.id, *sys.exc_info()[1:])
|
||||
else:
|
||||
self._run_job_success(job.id, events)
|
||||
|
@@ -21,9 +21,10 @@ class GeventExecutor(BaseExecutor):
|
||||
def callback(greenlet):
|
||||
try:
|
||||
events = greenlet.get()
|
||||
except:
|
||||
except BaseException:
|
||||
self._run_job_error(job.id, *sys.exc_info()[1:])
|
||||
else:
|
||||
self._run_job_success(job.id, events)
|
||||
|
||||
gevent.spawn(run_job, job, job._jobstore_alias, run_times, self._logger.name).link(callback)
|
||||
gevent.spawn(run_job, job, job._jobstore_alias, run_times, self._logger.name).\
|
||||
link(callback)
|
||||
|
54
lib/apscheduler/executors/tornado.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from tornado.gen import convert_yielded
|
||||
|
||||
from apscheduler.executors.base import BaseExecutor, run_job
|
||||
|
||||
try:
|
||||
from inspect import iscoroutinefunction
|
||||
from apscheduler.executors.base_py3 import run_coroutine_job
|
||||
except ImportError:
|
||||
def iscoroutinefunction(func):
|
||||
return False
|
||||
|
||||
|
||||
class TornadoExecutor(BaseExecutor):
|
||||
"""
|
||||
Runs jobs either in a thread pool or directly on the I/O loop.
|
||||
|
||||
If the job function is a native coroutine function, it is scheduled to be run directly in the
|
||||
I/O loop as soon as possible. All other functions are run in a thread pool.
|
||||
|
||||
Plugin alias: ``tornado``
|
||||
|
||||
:param int max_workers: maximum number of worker threads in the thread pool
|
||||
"""
|
||||
|
||||
def __init__(self, max_workers=10):
|
||||
super(TornadoExecutor, self).__init__()
|
||||
self.executor = ThreadPoolExecutor(max_workers)
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
super(TornadoExecutor, self).start(scheduler, alias)
|
||||
self._ioloop = scheduler._ioloop
|
||||
|
||||
def _do_submit_job(self, job, run_times):
|
||||
def callback(f):
|
||||
try:
|
||||
events = f.result()
|
||||
except BaseException:
|
||||
self._run_job_error(job.id, *sys.exc_info()[1:])
|
||||
else:
|
||||
self._run_job_success(job.id, events)
|
||||
|
||||
if iscoroutinefunction(job.func):
|
||||
f = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name)
|
||||
else:
|
||||
f = self.executor.submit(run_job, job, job._jobstore_alias, run_times,
|
||||
self._logger.name)
|
||||
|
||||
f = convert_yielded(f)
|
||||
f.add_done_callback(callback)
|
@@ -21,5 +21,5 @@ class TwistedExecutor(BaseExecutor):
|
||||
else:
|
||||
self._run_job_error(job.id, result.value, result.tb)
|
||||
|
||||
self._reactor.getThreadPool().callInThreadWithCallback(callback, run_job, job, job._jobstore_alias, run_times,
|
||||
self._logger.name)
|
||||
self._reactor.getThreadPool().callInThreadWithCallback(
|
||||
callback, run_job, job, job._jobstore_alias, run_times, self._logger.name)
|
||||
|
@@ -4,8 +4,9 @@ from uuid import uuid4
|
||||
import six
|
||||
|
||||
from apscheduler.triggers.base import BaseTrigger
|
||||
from apscheduler.util import ref_to_obj, obj_to_ref, datetime_repr, repr_escape, get_callable_name, check_callable_args, \
|
||||
convert_to_datetime
|
||||
from apscheduler.util import (
|
||||
ref_to_obj, obj_to_ref, datetime_repr, repr_escape, get_callable_name, check_callable_args,
|
||||
convert_to_datetime)
|
||||
|
||||
|
||||
class Job(object):
|
||||
@@ -21,13 +22,20 @@ class Job(object):
|
||||
:var bool coalesce: whether to only run the job once when several run times are due
|
||||
:var trigger: the trigger object that controls the schedule of this job
|
||||
:var str executor: the name of the executor that will run this job
|
||||
:var int misfire_grace_time: the time (in seconds) how much this job's execution is allowed to be late
|
||||
:var int max_instances: the maximum number of concurrently executing instances allowed for this job
|
||||
:var int misfire_grace_time: the time (in seconds) how much this job's execution is allowed to
|
||||
be late
|
||||
:var int max_instances: the maximum number of concurrently executing instances allowed for this
|
||||
job
|
||||
:var datetime.datetime next_run_time: the next scheduled run time of this job
|
||||
|
||||
.. note::
|
||||
The ``misfire_grace_time`` has some non-obvious effects on job execution. See the
|
||||
:ref:`missed-job-executions` section in the documentation for an in-depth explanation.
|
||||
"""
|
||||
|
||||
__slots__ = ('_scheduler', '_jobstore_alias', 'id', 'trigger', 'executor', 'func', 'func_ref', 'args', 'kwargs',
|
||||
'name', 'misfire_grace_time', 'coalesce', 'max_instances', 'next_run_time')
|
||||
__slots__ = ('_scheduler', '_jobstore_alias', 'id', 'trigger', 'executor', 'func', 'func_ref',
|
||||
'args', 'kwargs', 'name', 'misfire_grace_time', 'coalesce', 'max_instances',
|
||||
'next_run_time')
|
||||
|
||||
def __init__(self, scheduler, id=None, **kwargs):
|
||||
super(Job, self).__init__()
|
||||
@@ -38,53 +46,69 @@ class Job(object):
|
||||
def modify(self, **changes):
|
||||
"""
|
||||
Makes the given changes to this job and saves it in the associated job store.
|
||||
|
||||
Accepted keyword arguments are the same as the variables on this class.
|
||||
|
||||
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.modify_job`
|
||||
"""
|
||||
|
||||
:return Job: this job instance
|
||||
|
||||
"""
|
||||
self._scheduler.modify_job(self.id, self._jobstore_alias, **changes)
|
||||
return self
|
||||
|
||||
def reschedule(self, trigger, **trigger_args):
|
||||
"""
|
||||
Shortcut for switching the trigger on this job.
|
||||
|
||||
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.reschedule_job`
|
||||
"""
|
||||
|
||||
:return Job: this job instance
|
||||
|
||||
"""
|
||||
self._scheduler.reschedule_job(self.id, self._jobstore_alias, trigger, **trigger_args)
|
||||
return self
|
||||
|
||||
def pause(self):
|
||||
"""
|
||||
Temporarily suspend the execution of this job.
|
||||
|
||||
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.pause_job`
|
||||
"""
|
||||
|
||||
:return Job: this job instance
|
||||
|
||||
"""
|
||||
self._scheduler.pause_job(self.id, self._jobstore_alias)
|
||||
return self
|
||||
|
||||
def resume(self):
|
||||
"""
|
||||
Resume the schedule of this job if previously paused.
|
||||
|
||||
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.resume_job`
|
||||
"""
|
||||
|
||||
:return Job: this job instance
|
||||
|
||||
"""
|
||||
self._scheduler.resume_job(self.id, self._jobstore_alias)
|
||||
return self
|
||||
|
||||
def remove(self):
|
||||
"""
|
||||
Unschedules this job and removes it from its associated job store.
|
||||
|
||||
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.remove_job`
|
||||
"""
|
||||
|
||||
"""
|
||||
self._scheduler.remove_job(self.id, self._jobstore_alias)
|
||||
|
||||
@property
|
||||
def pending(self):
|
||||
"""Returns ``True`` if the referenced job is still waiting to be added to its designated job store."""
|
||||
"""
|
||||
Returns ``True`` if the referenced job is still waiting to be added to its designated job
|
||||
store.
|
||||
|
||||
"""
|
||||
return self._jobstore_alias is None
|
||||
|
||||
#
|
||||
@@ -97,8 +121,8 @@ class Job(object):
|
||||
|
||||
:type now: datetime.datetime
|
||||
:rtype: list[datetime.datetime]
|
||||
"""
|
||||
|
||||
"""
|
||||
run_times = []
|
||||
next_run_time = self.next_run_time
|
||||
while next_run_time and next_run_time <= now:
|
||||
@@ -108,8 +132,11 @@ class Job(object):
|
||||
return run_times
|
||||
|
||||
def _modify(self, **changes):
|
||||
"""Validates the changes to the Job and makes the modifications if and only if all of them validate."""
|
||||
"""
|
||||
Validates the changes to the Job and makes the modifications if and only if all of them
|
||||
validate.
|
||||
|
||||
"""
|
||||
approved = {}
|
||||
|
||||
if 'id' in changes:
|
||||
@@ -125,7 +152,7 @@ class Job(object):
|
||||
args = changes.pop('args') if 'args' in changes else self.args
|
||||
kwargs = changes.pop('kwargs') if 'kwargs' in changes else self.kwargs
|
||||
|
||||
if isinstance(func, str):
|
||||
if isinstance(func, six.string_types):
|
||||
func_ref = func
|
||||
func = ref_to_obj(func)
|
||||
elif callable(func):
|
||||
@@ -177,7 +204,8 @@ class Job(object):
|
||||
if 'trigger' in changes:
|
||||
trigger = changes.pop('trigger')
|
||||
if not isinstance(trigger, BaseTrigger):
|
||||
raise TypeError('Expected a trigger instance, got %s instead' % trigger.__class__.__name__)
|
||||
raise TypeError('Expected a trigger instance, got %s instead' %
|
||||
trigger.__class__.__name__)
|
||||
|
||||
approved['trigger'] = trigger
|
||||
|
||||
@@ -189,10 +217,12 @@ class Job(object):
|
||||
|
||||
if 'next_run_time' in changes:
|
||||
value = changes.pop('next_run_time')
|
||||
approved['next_run_time'] = convert_to_datetime(value, self._scheduler.timezone, 'next_run_time')
|
||||
approved['next_run_time'] = convert_to_datetime(value, self._scheduler.timezone,
|
||||
'next_run_time')
|
||||
|
||||
if changes:
|
||||
raise AttributeError('The following are not modifiable attributes of Job: %s' % ', '.join(changes))
|
||||
raise AttributeError('The following are not modifiable attributes of Job: %s' %
|
||||
', '.join(changes))
|
||||
|
||||
for key, value in six.iteritems(approved):
|
||||
setattr(self, key, value)
|
||||
@@ -200,9 +230,10 @@ class Job(object):
|
||||
def __getstate__(self):
|
||||
# Don't allow this Job to be serialized if the function reference could not be determined
|
||||
if not self.func_ref:
|
||||
raise ValueError('This Job cannot be serialized since the reference to its callable (%r) could not be '
|
||||
'determined. Consider giving a textual reference (module:function name) instead.' %
|
||||
(self.func,))
|
||||
raise ValueError(
|
||||
'This Job cannot be serialized since the reference to its callable (%r) could not '
|
||||
'be determined. Consider giving a textual reference (module:function name) '
|
||||
'instead.' % (self.func,))
|
||||
|
||||
return {
|
||||
'version': 1,
|
||||
@@ -221,7 +252,8 @@ class Job(object):
|
||||
|
||||
def __setstate__(self, state):
|
||||
if state.get('version', 1) > 1:
|
||||
raise ValueError('Job has version %s, but only version 1 can be handled' % state['version'])
|
||||
raise ValueError('Job has version %s, but only version 1 can be handled' %
|
||||
state['version'])
|
||||
|
||||
self.id = state['id']
|
||||
self.func_ref = state['func']
|
||||
@@ -245,8 +277,13 @@ class Job(object):
|
||||
return '<Job (id=%s name=%s)>' % (repr_escape(self.id), repr_escape(self.name))
|
||||
|
||||
def __str__(self):
|
||||
return '%s (trigger: %s, next run at: %s)' % (repr_escape(self.name), repr_escape(str(self.trigger)),
|
||||
datetime_repr(self.next_run_time))
|
||||
return repr_escape(self.__unicode__())
|
||||
|
||||
def __unicode__(self):
|
||||
return six.u('%s (trigger: %s, next run at: %s)') % (self.name, self.trigger, datetime_repr(self.next_run_time))
|
||||
if hasattr(self, 'next_run_time'):
|
||||
status = ('next run at: ' + datetime_repr(self.next_run_time) if
|
||||
self.next_run_time else 'paused')
|
||||
else:
|
||||
status = 'pending'
|
||||
|
||||
return u'%s (trigger: %s, %s)' % (self.name, self.trigger, status)
|
||||
|
@@ -8,23 +8,27 @@ class JobLookupError(KeyError):
|
||||
"""Raised when the job store cannot find a job for update or removal."""
|
||||
|
||||
def __init__(self, job_id):
|
||||
super(JobLookupError, self).__init__(six.u('No job by the id of %s was found') % job_id)
|
||||
super(JobLookupError, self).__init__(u'No job by the id of %s was found' % job_id)
|
||||
|
||||
|
||||
class ConflictingIdError(KeyError):
|
||||
"""Raised when the uniqueness of job IDs is being violated."""
|
||||
|
||||
def __init__(self, job_id):
|
||||
super(ConflictingIdError, self).__init__(six.u('Job identifier (%s) conflicts with an existing job') % job_id)
|
||||
super(ConflictingIdError, self).__init__(
|
||||
u'Job identifier (%s) conflicts with an existing job' % job_id)
|
||||
|
||||
|
||||
class TransientJobError(ValueError):
|
||||
"""Raised when an attempt to add transient (with no func_ref) job to a persistent job store is detected."""
|
||||
"""
|
||||
Raised when an attempt to add transient (with no func_ref) job to a persistent job store is
|
||||
detected.
|
||||
"""
|
||||
|
||||
def __init__(self, job_id):
|
||||
super(TransientJobError, self).__init__(
|
||||
six.u('Job (%s) cannot be added to this job store because a reference to the callable could not be '
|
||||
'determined.') % job_id)
|
||||
u'Job (%s) cannot be added to this job store because a reference to the callable '
|
||||
u'could not be determined.' % job_id)
|
||||
|
||||
|
||||
class BaseJobStore(six.with_metaclass(ABCMeta)):
|
||||
@@ -36,10 +40,11 @@ class BaseJobStore(six.with_metaclass(ABCMeta)):
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
"""
|
||||
Called by the scheduler when the scheduler is being started or when the job store is being added to an already
|
||||
running scheduler.
|
||||
Called by the scheduler when the scheduler is being started or when the job store is being
|
||||
added to an already running scheduler.
|
||||
|
||||
:param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting this job store
|
||||
:param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting
|
||||
this job store
|
||||
:param str|unicode alias: alias of this job store as it was assigned to the scheduler
|
||||
"""
|
||||
|
||||
@@ -50,13 +55,22 @@ class BaseJobStore(six.with_metaclass(ABCMeta)):
|
||||
def shutdown(self):
|
||||
"""Frees any resources still bound to this job store."""
|
||||
|
||||
def _fix_paused_jobs_sorting(self, jobs):
|
||||
for i, job in enumerate(jobs):
|
||||
if job.next_run_time is not None:
|
||||
if i > 0:
|
||||
paused_jobs = jobs[:i]
|
||||
del jobs[:i]
|
||||
jobs.extend(paused_jobs)
|
||||
break
|
||||
|
||||
@abstractmethod
|
||||
def lookup_job(self, job_id):
|
||||
"""
|
||||
Returns a specific job, or ``None`` if it isn't found..
|
||||
|
||||
The job store is responsible for setting the ``scheduler`` and ``jobstore`` attributes of the returned job to
|
||||
point to the scheduler and itself, respectively.
|
||||
The job store is responsible for setting the ``scheduler`` and ``jobstore`` attributes of
|
||||
the returned job to point to the scheduler and itself, respectively.
|
||||
|
||||
:param str|unicode job_id: identifier of the job
|
||||
:rtype: Job
|
||||
@@ -75,7 +89,8 @@ class BaseJobStore(six.with_metaclass(ABCMeta)):
|
||||
@abstractmethod
|
||||
def get_next_run_time(self):
|
||||
"""
|
||||
Returns the earliest run time of all the jobs stored in this job store, or ``None`` if there are no active jobs.
|
||||
Returns the earliest run time of all the jobs stored in this job store, or ``None`` if
|
||||
there are no active jobs.
|
||||
|
||||
:rtype: datetime.datetime
|
||||
"""
|
||||
@@ -83,11 +98,12 @@ class BaseJobStore(six.with_metaclass(ABCMeta)):
|
||||
@abstractmethod
|
||||
def get_all_jobs(self):
|
||||
"""
|
||||
Returns a list of all jobs in this job store. The returned jobs should be sorted by next run time (ascending).
|
||||
Paused jobs (next_run_time is None) should be sorted last.
|
||||
Returns a list of all jobs in this job store.
|
||||
The returned jobs should be sorted by next run time (ascending).
|
||||
Paused jobs (next_run_time == None) should be sorted last.
|
||||
|
||||
The job store is responsible for setting the ``scheduler`` and ``jobstore`` attributes of the returned jobs to
|
||||
point to the scheduler and itself, respectively.
|
||||
The job store is responsible for setting the ``scheduler`` and ``jobstore`` attributes of
|
||||
the returned jobs to point to the scheduler and itself, respectively.
|
||||
|
||||
:rtype: list[Job]
|
||||
"""
|
||||
|
@@ -13,7 +13,8 @@ class MemoryJobStore(BaseJobStore):
|
||||
|
||||
def __init__(self):
|
||||
super(MemoryJobStore, self).__init__()
|
||||
self._jobs = [] # list of (job, timestamp), sorted by next_run_time and job id (ascending)
|
||||
# list of (job, timestamp), sorted by next_run_time and job id (ascending)
|
||||
self._jobs = []
|
||||
self._jobs_index = {} # id -> (job, timestamp) lookup table
|
||||
|
||||
def lookup_job(self, job_id):
|
||||
@@ -80,13 +81,13 @@ class MemoryJobStore(BaseJobStore):
|
||||
|
||||
def _get_job_index(self, timestamp, job_id):
|
||||
"""
|
||||
Returns the index of the given job, or if it's not found, the index where the job should be inserted based on
|
||||
the given timestamp.
|
||||
Returns the index of the given job, or if it's not found, the index where the job should be
|
||||
inserted based on the given timestamp.
|
||||
|
||||
:type timestamp: int
|
||||
:type job_id: str
|
||||
"""
|
||||
|
||||
"""
|
||||
lo, hi = 0, len(self._jobs)
|
||||
timestamp = float('inf') if timestamp is None else timestamp
|
||||
while lo < hi:
|
||||
|
@@ -1,4 +1,5 @@
|
||||
from __future__ import absolute_import
|
||||
import warnings
|
||||
|
||||
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
|
||||
from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime
|
||||
@@ -19,16 +20,18 @@ except ImportError: # pragma: nocover
|
||||
|
||||
class MongoDBJobStore(BaseJobStore):
|
||||
"""
|
||||
Stores jobs in a MongoDB database. Any leftover keyword arguments are directly passed to pymongo's `MongoClient
|
||||
Stores jobs in a MongoDB database. Any leftover keyword arguments are directly passed to
|
||||
pymongo's `MongoClient
|
||||
<http://api.mongodb.org/python/current/api/pymongo/mongo_client.html#pymongo.mongo_client.MongoClient>`_.
|
||||
|
||||
Plugin alias: ``mongodb``
|
||||
|
||||
:param str database: database to store jobs in
|
||||
:param str collection: collection to store jobs in
|
||||
:param client: a :class:`~pymongo.mongo_client.MongoClient` instance to use instead of providing connection
|
||||
arguments
|
||||
:param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the highest available
|
||||
:param client: a :class:`~pymongo.mongo_client.MongoClient` instance to use instead of
|
||||
providing connection arguments
|
||||
:param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the
|
||||
highest available
|
||||
"""
|
||||
|
||||
def __init__(self, database='apscheduler', collection='jobs', client=None,
|
||||
@@ -42,14 +45,23 @@ class MongoDBJobStore(BaseJobStore):
|
||||
raise ValueError('The "collection" parameter must not be empty')
|
||||
|
||||
if client:
|
||||
self.connection = maybe_ref(client)
|
||||
self.client = maybe_ref(client)
|
||||
else:
|
||||
connect_args.setdefault('w', 1)
|
||||
self.connection = MongoClient(**connect_args)
|
||||
self.client = MongoClient(**connect_args)
|
||||
|
||||
self.collection = self.connection[database][collection]
|
||||
self.collection = self.client[database][collection]
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
super(MongoDBJobStore, self).start(scheduler, alias)
|
||||
self.collection.ensure_index('next_run_time', sparse=True)
|
||||
|
||||
@property
|
||||
def connection(self):
|
||||
warnings.warn('The "connection" member is deprecated -- use "client" instead',
|
||||
DeprecationWarning)
|
||||
return self.client
|
||||
|
||||
def lookup_job(self, job_id):
|
||||
document = self.collection.find_one(job_id, ['job_state'])
|
||||
return self._reconstitute_job(document['job_state']) if document else None
|
||||
@@ -59,12 +71,15 @@ class MongoDBJobStore(BaseJobStore):
|
||||
return self._get_jobs({'next_run_time': {'$lte': timestamp}})
|
||||
|
||||
def get_next_run_time(self):
|
||||
document = self.collection.find_one({'next_run_time': {'$ne': None}}, fields=['next_run_time'],
|
||||
document = self.collection.find_one({'next_run_time': {'$ne': None}},
|
||||
projection=['next_run_time'],
|
||||
sort=[('next_run_time', ASCENDING)])
|
||||
return utc_timestamp_to_datetime(document['next_run_time']) if document else None
|
||||
|
||||
def get_all_jobs(self):
|
||||
return self._get_jobs({})
|
||||
jobs = self._get_jobs({})
|
||||
self._fix_paused_jobs_sorting(jobs)
|
||||
return jobs
|
||||
|
||||
def add_job(self, job):
|
||||
try:
|
||||
@@ -83,7 +98,7 @@ class MongoDBJobStore(BaseJobStore):
|
||||
}
|
||||
result = self.collection.update({'_id': job.id}, {'$set': changes})
|
||||
if result and result['n'] == 0:
|
||||
raise JobLookupError(id)
|
||||
raise JobLookupError(job.id)
|
||||
|
||||
def remove_job(self, job_id):
|
||||
result = self.collection.remove(job_id)
|
||||
@@ -94,7 +109,7 @@ class MongoDBJobStore(BaseJobStore):
|
||||
self.collection.remove()
|
||||
|
||||
def shutdown(self):
|
||||
self.connection.disconnect()
|
||||
self.client.close()
|
||||
|
||||
def _reconstitute_job(self, job_state):
|
||||
job_state = pickle.loads(job_state)
|
||||
@@ -107,11 +122,13 @@ class MongoDBJobStore(BaseJobStore):
|
||||
def _get_jobs(self, conditions):
|
||||
jobs = []
|
||||
failed_job_ids = []
|
||||
for document in self.collection.find(conditions, ['_id', 'job_state'], sort=[('next_run_time', ASCENDING)]):
|
||||
for document in self.collection.find(conditions, ['_id', 'job_state'],
|
||||
sort=[('next_run_time', ASCENDING)]):
|
||||
try:
|
||||
jobs.append(self._reconstitute_job(document['job_state']))
|
||||
except:
|
||||
self._logger.exception('Unable to restore job "%s" -- removing it', document['_id'])
|
||||
except BaseException:
|
||||
self._logger.exception('Unable to restore job "%s" -- removing it',
|
||||
document['_id'])
|
||||
failed_job_ids.append(document['_id'])
|
||||
|
||||
# Remove all the jobs we failed to restore
|
||||
@@ -121,4 +138,4 @@ class MongoDBJobStore(BaseJobStore):
|
||||
return jobs
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s (client=%s)>' % (self.__class__.__name__, self.connection)
|
||||
return '<%s (client=%s)>' % (self.__class__.__name__, self.client)
|
||||
|
@@ -1,5 +1,7 @@
|
||||
from __future__ import absolute_import
|
||||
from datetime import datetime
|
||||
|
||||
from pytz import utc
|
||||
import six
|
||||
|
||||
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
|
||||
@@ -19,14 +21,16 @@ except ImportError: # pragma: nocover
|
||||
|
||||
class RedisJobStore(BaseJobStore):
|
||||
"""
|
||||
Stores jobs in a Redis database. Any leftover keyword arguments are directly passed to redis's StrictRedis.
|
||||
Stores jobs in a Redis database. Any leftover keyword arguments are directly passed to redis's
|
||||
:class:`~redis.StrictRedis`.
|
||||
|
||||
Plugin alias: ``redis``
|
||||
|
||||
:param int db: the database number to store jobs in
|
||||
:param str jobs_key: key to store jobs in
|
||||
:param str run_times_key: key to store the jobs' run times in
|
||||
:param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the highest available
|
||||
:param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the
|
||||
highest available
|
||||
"""
|
||||
|
||||
def __init__(self, db=0, jobs_key='apscheduler.jobs', run_times_key='apscheduler.run_times',
|
||||
@@ -65,7 +69,8 @@ class RedisJobStore(BaseJobStore):
|
||||
def get_all_jobs(self):
|
||||
job_states = self.redis.hgetall(self.jobs_key)
|
||||
jobs = self._reconstitute_jobs(six.iteritems(job_states))
|
||||
return sorted(jobs, key=lambda job: job.next_run_time)
|
||||
paused_sort_key = datetime(9999, 12, 31, tzinfo=utc)
|
||||
return sorted(jobs, key=lambda job: job.next_run_time or paused_sort_key)
|
||||
|
||||
def add_job(self, job):
|
||||
if self.redis.hexists(self.jobs_key, job.id):
|
||||
@@ -73,8 +78,10 @@ class RedisJobStore(BaseJobStore):
|
||||
|
||||
with self.redis.pipeline() as pipe:
|
||||
pipe.multi()
|
||||
pipe.hset(self.jobs_key, job.id, pickle.dumps(job.__getstate__(), self.pickle_protocol))
|
||||
pipe.zadd(self.run_times_key, datetime_to_utc_timestamp(job.next_run_time), job.id)
|
||||
pipe.hset(self.jobs_key, job.id, pickle.dumps(job.__getstate__(),
|
||||
self.pickle_protocol))
|
||||
if job.next_run_time:
|
||||
pipe.zadd(self.run_times_key, datetime_to_utc_timestamp(job.next_run_time), job.id)
|
||||
pipe.execute()
|
||||
|
||||
def update_job(self, job):
|
||||
@@ -82,7 +89,8 @@ class RedisJobStore(BaseJobStore):
|
||||
raise JobLookupError(job.id)
|
||||
|
||||
with self.redis.pipeline() as pipe:
|
||||
pipe.hset(self.jobs_key, job.id, pickle.dumps(job.__getstate__(), self.pickle_protocol))
|
||||
pipe.hset(self.jobs_key, job.id, pickle.dumps(job.__getstate__(),
|
||||
self.pickle_protocol))
|
||||
if job.next_run_time:
|
||||
pipe.zadd(self.run_times_key, datetime_to_utc_timestamp(job.next_run_time), job.id)
|
||||
else:
|
||||
@@ -121,7 +129,7 @@ class RedisJobStore(BaseJobStore):
|
||||
for job_id, job_state in job_states:
|
||||
try:
|
||||
jobs.append(self._reconstitute_job(job_state))
|
||||
except:
|
||||
except BaseException:
|
||||
self._logger.exception('Unable to restore job "%s" -- removing it', job_id)
|
||||
failed_job_ids.append(job_id)
|
||||
|
||||
|
153
lib/apscheduler/jobstores/rethinkdb.py
Normal file
@@ -0,0 +1,153 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
|
||||
from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime
|
||||
from apscheduler.job import Job
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError: # pragma: nocover
|
||||
import pickle
|
||||
|
||||
try:
|
||||
import rethinkdb as r
|
||||
except ImportError: # pragma: nocover
|
||||
raise ImportError('RethinkDBJobStore requires rethinkdb installed')
|
||||
|
||||
|
||||
class RethinkDBJobStore(BaseJobStore):
|
||||
"""
|
||||
Stores jobs in a RethinkDB database. Any leftover keyword arguments are directly passed to
|
||||
rethinkdb's `RethinkdbClient <http://www.rethinkdb.com/api/#connect>`_.
|
||||
|
||||
Plugin alias: ``rethinkdb``
|
||||
|
||||
:param str database: database to store jobs in
|
||||
:param str collection: collection to store jobs in
|
||||
:param client: a :class:`rethinkdb.net.Connection` instance to use instead of providing
|
||||
connection arguments
|
||||
:param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the
|
||||
highest available
|
||||
"""
|
||||
|
||||
def __init__(self, database='apscheduler', table='jobs', client=None,
|
||||
pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
|
||||
super(RethinkDBJobStore, self).__init__()
|
||||
|
||||
if not database:
|
||||
raise ValueError('The "database" parameter must not be empty')
|
||||
if not table:
|
||||
raise ValueError('The "table" parameter must not be empty')
|
||||
|
||||
self.database = database
|
||||
self.table = table
|
||||
self.client = client
|
||||
self.pickle_protocol = pickle_protocol
|
||||
self.connect_args = connect_args
|
||||
self.conn = None
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
super(RethinkDBJobStore, self).start(scheduler, alias)
|
||||
|
||||
if self.client:
|
||||
self.conn = maybe_ref(self.client)
|
||||
else:
|
||||
self.conn = r.connect(db=self.database, **self.connect_args)
|
||||
|
||||
if self.database not in r.db_list().run(self.conn):
|
||||
r.db_create(self.database).run(self.conn)
|
||||
|
||||
if self.table not in r.table_list().run(self.conn):
|
||||
r.table_create(self.table).run(self.conn)
|
||||
|
||||
if 'next_run_time' not in r.table(self.table).index_list().run(self.conn):
|
||||
r.table(self.table).index_create('next_run_time').run(self.conn)
|
||||
|
||||
self.table = r.db(self.database).table(self.table)
|
||||
|
||||
def lookup_job(self, job_id):
|
||||
results = list(self.table.get_all(job_id).pluck('job_state').run(self.conn))
|
||||
return self._reconstitute_job(results[0]['job_state']) if results else None
|
||||
|
||||
def get_due_jobs(self, now):
|
||||
return self._get_jobs(r.row['next_run_time'] <= datetime_to_utc_timestamp(now))
|
||||
|
||||
def get_next_run_time(self):
|
||||
results = list(
|
||||
self.table
|
||||
.filter(r.row['next_run_time'] != None) # flake8: noqa
|
||||
.order_by(r.asc('next_run_time'))
|
||||
.map(lambda x: x['next_run_time'])
|
||||
.limit(1)
|
||||
.run(self.conn)
|
||||
)
|
||||
return utc_timestamp_to_datetime(results[0]) if results else None
|
||||
|
||||
def get_all_jobs(self):
|
||||
jobs = self._get_jobs()
|
||||
self._fix_paused_jobs_sorting(jobs)
|
||||
return jobs
|
||||
|
||||
def add_job(self, job):
|
||||
job_dict = {
|
||||
'id': job.id,
|
||||
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
|
||||
'job_state': r.binary(pickle.dumps(job.__getstate__(), self.pickle_protocol))
|
||||
}
|
||||
results = self.table.insert(job_dict).run(self.conn)
|
||||
if results['errors'] > 0:
|
||||
raise ConflictingIdError(job.id)
|
||||
|
||||
def update_job(self, job):
|
||||
changes = {
|
||||
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
|
||||
'job_state': r.binary(pickle.dumps(job.__getstate__(), self.pickle_protocol))
|
||||
}
|
||||
results = self.table.get_all(job.id).update(changes).run(self.conn)
|
||||
skipped = False in map(lambda x: results[x] == 0, results.keys())
|
||||
if results['skipped'] > 0 or results['errors'] > 0 or not skipped:
|
||||
raise JobLookupError(job.id)
|
||||
|
||||
def remove_job(self, job_id):
|
||||
results = self.table.get_all(job_id).delete().run(self.conn)
|
||||
if results['deleted'] + results['skipped'] != 1:
|
||||
raise JobLookupError(job_id)
|
||||
|
||||
def remove_all_jobs(self):
|
||||
self.table.delete().run(self.conn)
|
||||
|
||||
def shutdown(self):
|
||||
self.conn.close()
|
||||
|
||||
def _reconstitute_job(self, job_state):
|
||||
job_state = pickle.loads(job_state)
|
||||
job = Job.__new__(Job)
|
||||
job.__setstate__(job_state)
|
||||
job._scheduler = self._scheduler
|
||||
job._jobstore_alias = self._alias
|
||||
return job
|
||||
|
||||
def _get_jobs(self, predicate=None):
|
||||
jobs = []
|
||||
failed_job_ids = []
|
||||
query = (self.table.filter(r.row['next_run_time'] != None).filter(predicate) if
|
||||
predicate else self.table)
|
||||
query = query.order_by('next_run_time', 'id').pluck('id', 'job_state')
|
||||
|
||||
for document in query.run(self.conn):
|
||||
try:
|
||||
jobs.append(self._reconstitute_job(document['job_state']))
|
||||
except:
|
||||
self._logger.exception('Unable to restore job "%s" -- removing it', document['id'])
|
||||
failed_job_ids.append(document['id'])
|
||||
|
||||
# Remove all the jobs we failed to restore
|
||||
if failed_job_ids:
|
||||
r.expr(failed_job_ids).for_each(
|
||||
lambda job_id: self.table.get_all(job_id).delete()).run(self.conn)
|
||||
|
||||
return jobs
|
||||
|
||||
def __repr__(self):
|
||||
connection = self.conn
|
||||
return '<%s (connection=%s)>' % (self.__class__.__name__, connection)
|
@@ -10,29 +10,38 @@ except ImportError: # pragma: nocover
|
||||
import pickle
|
||||
|
||||
try:
|
||||
from sqlalchemy import create_engine, Table, Column, MetaData, Unicode, Float, LargeBinary, select
|
||||
from sqlalchemy import (
|
||||
create_engine, Table, Column, MetaData, Unicode, Float, LargeBinary, select)
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.sql.expression import null
|
||||
except ImportError: # pragma: nocover
|
||||
raise ImportError('SQLAlchemyJobStore requires SQLAlchemy installed')
|
||||
|
||||
|
||||
class SQLAlchemyJobStore(BaseJobStore):
|
||||
"""
|
||||
Stores jobs in a database table using SQLAlchemy. The table will be created if it doesn't exist in the database.
|
||||
Stores jobs in a database table using SQLAlchemy.
|
||||
The table will be created if it doesn't exist in the database.
|
||||
|
||||
Plugin alias: ``sqlalchemy``
|
||||
|
||||
:param str url: connection string (see `SQLAlchemy documentation
|
||||
<http://docs.sqlalchemy.org/en/latest/core/engines.html?highlight=create_engine#database-urls>`_
|
||||
on this)
|
||||
:param engine: an SQLAlchemy Engine to use instead of creating a new one based on ``url``
|
||||
:param str url: connection string (see
|
||||
:ref:`SQLAlchemy documentation <sqlalchemy:database_urls>` on this)
|
||||
:param engine: an SQLAlchemy :class:`~sqlalchemy.engine.Engine` to use instead of creating a
|
||||
new one based on ``url``
|
||||
:param str tablename: name of the table to store jobs in
|
||||
:param metadata: a :class:`~sqlalchemy.MetaData` instance to use instead of creating a new one
|
||||
:param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the highest available
|
||||
:param metadata: a :class:`~sqlalchemy.schema.MetaData` instance to use instead of creating a
|
||||
new one
|
||||
:param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the
|
||||
highest available
|
||||
:param str tableschema: name of the (existing) schema in the target database where the table
|
||||
should be
|
||||
:param dict engine_options: keyword arguments to :func:`~sqlalchemy.create_engine`
|
||||
(ignored if ``engine`` is given)
|
||||
"""
|
||||
|
||||
def __init__(self, url=None, engine=None, tablename='apscheduler_jobs', metadata=None,
|
||||
pickle_protocol=pickle.HIGHEST_PROTOCOL):
|
||||
pickle_protocol=pickle.HIGHEST_PROTOCOL, tableschema=None, engine_options=None):
|
||||
super(SQLAlchemyJobStore, self).__init__()
|
||||
self.pickle_protocol = pickle_protocol
|
||||
metadata = maybe_ref(metadata) or MetaData()
|
||||
@@ -40,18 +49,22 @@ class SQLAlchemyJobStore(BaseJobStore):
|
||||
if engine:
|
||||
self.engine = maybe_ref(engine)
|
||||
elif url:
|
||||
self.engine = create_engine(url)
|
||||
self.engine = create_engine(url, **(engine_options or {}))
|
||||
else:
|
||||
raise ValueError('Need either "engine" or "url" defined')
|
||||
|
||||
# 191 = max key length in MySQL for InnoDB/utf8mb4 tables, 25 = precision that translates to an 8-byte float
|
||||
# 191 = max key length in MySQL for InnoDB/utf8mb4 tables,
|
||||
# 25 = precision that translates to an 8-byte float
|
||||
self.jobs_t = Table(
|
||||
tablename, metadata,
|
||||
Column('id', Unicode(191, _warn_on_bytestring=False), primary_key=True),
|
||||
Column('next_run_time', Float(25), index=True),
|
||||
Column('job_state', LargeBinary, nullable=False)
|
||||
Column('job_state', LargeBinary, nullable=False),
|
||||
schema=tableschema
|
||||
)
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
super(SQLAlchemyJobStore, self).start(scheduler, alias)
|
||||
self.jobs_t.create(self.engine, True)
|
||||
|
||||
def lookup_job(self, job_id):
|
||||
@@ -64,13 +77,16 @@ class SQLAlchemyJobStore(BaseJobStore):
|
||||
return self._get_jobs(self.jobs_t.c.next_run_time <= timestamp)
|
||||
|
||||
def get_next_run_time(self):
|
||||
selectable = select([self.jobs_t.c.next_run_time]).where(self.jobs_t.c.next_run_time != None).\
|
||||
selectable = select([self.jobs_t.c.next_run_time]).\
|
||||
where(self.jobs_t.c.next_run_time != null()).\
|
||||
order_by(self.jobs_t.c.next_run_time).limit(1)
|
||||
next_run_time = self.engine.execute(selectable).scalar()
|
||||
return utc_timestamp_to_datetime(next_run_time)
|
||||
|
||||
def get_all_jobs(self):
|
||||
return self._get_jobs()
|
||||
jobs = self._get_jobs()
|
||||
self._fix_paused_jobs_sorting(jobs)
|
||||
return jobs
|
||||
|
||||
def add_job(self, job):
|
||||
insert = self.jobs_t.insert().values(**{
|
||||
@@ -116,13 +132,14 @@ class SQLAlchemyJobStore(BaseJobStore):
|
||||
|
||||
def _get_jobs(self, *conditions):
|
||||
jobs = []
|
||||
selectable = select([self.jobs_t.c.id, self.jobs_t.c.job_state]).order_by(self.jobs_t.c.next_run_time)
|
||||
selectable = select([self.jobs_t.c.id, self.jobs_t.c.job_state]).\
|
||||
order_by(self.jobs_t.c.next_run_time)
|
||||
selectable = selectable.where(*conditions) if conditions else selectable
|
||||
failed_job_ids = set()
|
||||
for row in self.engine.execute(selectable):
|
||||
try:
|
||||
jobs.append(self._reconstitute_job(row.job_state))
|
||||
except:
|
||||
except BaseException:
|
||||
self._logger.exception('Unable to restore job "%s" -- removing it', row.id)
|
||||
failed_job_ids.add(row.id)
|
||||
|
||||
|
179
lib/apscheduler/jobstores/zookeeper.py
Normal file
@@ -0,0 +1,179 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
from pytz import utc
|
||||
from kazoo.exceptions import NoNodeError, NodeExistsError
|
||||
|
||||
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
|
||||
from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime
|
||||
from apscheduler.job import Job
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError: # pragma: nocover
|
||||
import pickle
|
||||
|
||||
try:
|
||||
from kazoo.client import KazooClient
|
||||
except ImportError: # pragma: nocover
|
||||
raise ImportError('ZooKeeperJobStore requires Kazoo installed')
|
||||
|
||||
|
||||
class ZooKeeperJobStore(BaseJobStore):
|
||||
"""
|
||||
Stores jobs in a ZooKeeper tree. Any leftover keyword arguments are directly passed to
|
||||
kazoo's `KazooClient
|
||||
<http://kazoo.readthedocs.io/en/latest/api/client.html>`_.
|
||||
|
||||
Plugin alias: ``zookeeper``
|
||||
|
||||
:param str path: path to store jobs in
|
||||
:param client: a :class:`~kazoo.client.KazooClient` instance to use instead of
|
||||
providing connection arguments
|
||||
:param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the
|
||||
highest available
|
||||
"""
|
||||
|
||||
def __init__(self, path='/apscheduler', client=None, close_connection_on_exit=False,
|
||||
pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
|
||||
super(ZooKeeperJobStore, self).__init__()
|
||||
self.pickle_protocol = pickle_protocol
|
||||
self.close_connection_on_exit = close_connection_on_exit
|
||||
|
||||
if not path:
|
||||
raise ValueError('The "path" parameter must not be empty')
|
||||
|
||||
self.path = path
|
||||
|
||||
if client:
|
||||
self.client = maybe_ref(client)
|
||||
else:
|
||||
self.client = KazooClient(**connect_args)
|
||||
self._ensured_path = False
|
||||
|
||||
def _ensure_paths(self):
|
||||
if not self._ensured_path:
|
||||
self.client.ensure_path(self.path)
|
||||
self._ensured_path = True
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
super(ZooKeeperJobStore, self).start(scheduler, alias)
|
||||
if not self.client.connected:
|
||||
self.client.start()
|
||||
|
||||
def lookup_job(self, job_id):
|
||||
self._ensure_paths()
|
||||
node_path = os.path.join(self.path, job_id)
|
||||
try:
|
||||
content, _ = self.client.get(node_path)
|
||||
doc = pickle.loads(content)
|
||||
job = self._reconstitute_job(doc['job_state'])
|
||||
return job
|
||||
except BaseException:
|
||||
return None
|
||||
|
||||
def get_due_jobs(self, now):
|
||||
timestamp = datetime_to_utc_timestamp(now)
|
||||
jobs = [job_def['job'] for job_def in self._get_jobs()
|
||||
if job_def['next_run_time'] is not None and job_def['next_run_time'] <= timestamp]
|
||||
return jobs
|
||||
|
||||
def get_next_run_time(self):
|
||||
next_runs = [job_def['next_run_time'] for job_def in self._get_jobs()
|
||||
if job_def['next_run_time'] is not None]
|
||||
return utc_timestamp_to_datetime(min(next_runs)) if len(next_runs) > 0 else None
|
||||
|
||||
def get_all_jobs(self):
|
||||
jobs = [job_def['job'] for job_def in self._get_jobs()]
|
||||
self._fix_paused_jobs_sorting(jobs)
|
||||
return jobs
|
||||
|
||||
def add_job(self, job):
|
||||
self._ensure_paths()
|
||||
node_path = os.path.join(self.path, str(job.id))
|
||||
value = {
|
||||
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
|
||||
'job_state': job.__getstate__()
|
||||
}
|
||||
data = pickle.dumps(value, self.pickle_protocol)
|
||||
try:
|
||||
self.client.create(node_path, value=data)
|
||||
except NodeExistsError:
|
||||
raise ConflictingIdError(job.id)
|
||||
|
||||
def update_job(self, job):
|
||||
self._ensure_paths()
|
||||
node_path = os.path.join(self.path, str(job.id))
|
||||
changes = {
|
||||
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
|
||||
'job_state': job.__getstate__()
|
||||
}
|
||||
data = pickle.dumps(changes, self.pickle_protocol)
|
||||
try:
|
||||
self.client.set(node_path, value=data)
|
||||
except NoNodeError:
|
||||
raise JobLookupError(job.id)
|
||||
|
||||
def remove_job(self, job_id):
|
||||
self._ensure_paths()
|
||||
node_path = os.path.join(self.path, str(job_id))
|
||||
try:
|
||||
self.client.delete(node_path)
|
||||
except NoNodeError:
|
||||
raise JobLookupError(job_id)
|
||||
|
||||
def remove_all_jobs(self):
|
||||
try:
|
||||
self.client.delete(self.path, recursive=True)
|
||||
except NoNodeError:
|
||||
pass
|
||||
self._ensured_path = False
|
||||
|
||||
def shutdown(self):
|
||||
if self.close_connection_on_exit:
|
||||
self.client.stop()
|
||||
self.client.close()
|
||||
|
||||
def _reconstitute_job(self, job_state):
|
||||
job_state = job_state
|
||||
job = Job.__new__(Job)
|
||||
job.__setstate__(job_state)
|
||||
job._scheduler = self._scheduler
|
||||
job._jobstore_alias = self._alias
|
||||
return job
|
||||
|
||||
def _get_jobs(self):
|
||||
self._ensure_paths()
|
||||
jobs = []
|
||||
failed_job_ids = []
|
||||
all_ids = self.client.get_children(self.path)
|
||||
for node_name in all_ids:
|
||||
try:
|
||||
node_path = os.path.join(self.path, node_name)
|
||||
content, _ = self.client.get(node_path)
|
||||
doc = pickle.loads(content)
|
||||
job_def = {
|
||||
'job_id': node_name,
|
||||
'next_run_time': doc['next_run_time'] if doc['next_run_time'] else None,
|
||||
'job_state': doc['job_state'],
|
||||
'job': self._reconstitute_job(doc['job_state']),
|
||||
'creation_time': _.ctime
|
||||
}
|
||||
jobs.append(job_def)
|
||||
except BaseException:
|
||||
self._logger.exception('Unable to restore job "%s" -- removing it' % node_name)
|
||||
failed_job_ids.append(node_name)
|
||||
|
||||
# Remove all the jobs we failed to restore
|
||||
if failed_job_ids:
|
||||
for failed_id in failed_job_ids:
|
||||
self.remove_job(failed_id)
|
||||
paused_sort_key = datetime(9999, 12, 31, tzinfo=utc)
|
||||
return sorted(jobs, key=lambda job_def: (job_def['job'].next_run_time or paused_sort_key,
|
||||
job_def['creation_time']))
|
||||
|
||||
def __repr__(self):
|
||||
self._logger.exception('<%s (client=%s)>' % (self.__class__.__name__, self.client))
|
||||
return '<%s (client=%s)>' % (self.__class__.__name__, self.client)
|
@@ -1,5 +1,5 @@
|
||||
from __future__ import absolute_import
|
||||
from functools import wraps
|
||||
from functools import wraps, partial
|
||||
|
||||
from apscheduler.schedulers.base import BaseScheduler
|
||||
from apscheduler.util import maybe_ref
|
||||
@@ -10,13 +10,15 @@ except ImportError: # pragma: nocover
|
||||
try:
|
||||
import trollius as asyncio
|
||||
except ImportError:
|
||||
raise ImportError('AsyncIOScheduler requires either Python 3.4 or the asyncio package installed')
|
||||
raise ImportError(
|
||||
'AsyncIOScheduler requires either Python 3.4 or the asyncio package installed')
|
||||
|
||||
|
||||
def run_in_event_loop(func):
|
||||
@wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
self._eventloop.call_soon_threadsafe(func, self, *args, **kwargs)
|
||||
wrapped = partial(func, self, *args, **kwargs)
|
||||
self._eventloop.call_soon_threadsafe(wrapped)
|
||||
return wrapper
|
||||
|
||||
|
||||
@@ -24,6 +26,8 @@ class AsyncIOScheduler(BaseScheduler):
|
||||
"""
|
||||
A scheduler that runs on an asyncio (:pep:`3156`) event loop.
|
||||
|
||||
The default executor can run jobs based on native coroutines (``async def``).
|
||||
|
||||
Extra options:
|
||||
|
||||
============== =============================================================
|
||||
@@ -34,10 +38,6 @@ class AsyncIOScheduler(BaseScheduler):
|
||||
_eventloop = None
|
||||
_timeout = None
|
||||
|
||||
def start(self):
|
||||
super(AsyncIOScheduler, self).start()
|
||||
self.wakeup()
|
||||
|
||||
@run_in_event_loop
|
||||
def shutdown(self, wait=True):
|
||||
super(AsyncIOScheduler, self).shutdown(wait)
|
||||
|
@@ -1,4 +1,5 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from threading import Thread, Event
|
||||
|
||||
from apscheduler.schedulers.base import BaseScheduler
|
||||
@@ -13,11 +14,12 @@ class BackgroundScheduler(BlockingScheduler):
|
||||
|
||||
Extra options:
|
||||
|
||||
========== ============================================================================================
|
||||
``daemon`` Set the ``daemon`` option in the background thread (defaults to ``True``,
|
||||
see `the documentation <https://docs.python.org/3.4/library/threading.html#thread-objects>`_
|
||||
========== =============================================================================
|
||||
``daemon`` Set the ``daemon`` option in the background thread (defaults to ``True``, see
|
||||
`the documentation
|
||||
<https://docs.python.org/3.4/library/threading.html#thread-objects>`_
|
||||
for further details)
|
||||
========== ============================================================================================
|
||||
========== =============================================================================
|
||||
"""
|
||||
|
||||
_thread = None
|
||||
@@ -26,14 +28,14 @@ class BackgroundScheduler(BlockingScheduler):
|
||||
self._daemon = asbool(config.pop('daemon', True))
|
||||
super(BackgroundScheduler, self)._configure(config)
|
||||
|
||||
def start(self):
|
||||
BaseScheduler.start(self)
|
||||
def start(self, *args, **kwargs):
|
||||
self._event = Event()
|
||||
BaseScheduler.start(self, *args, **kwargs)
|
||||
self._thread = Thread(target=self._main_loop, name='APScheduler')
|
||||
self._thread.daemon = self._daemon
|
||||
self._thread.start()
|
||||
|
||||
def shutdown(self, wait=True):
|
||||
super(BackgroundScheduler, self).shutdown(wait)
|
||||
def shutdown(self, *args, **kwargs):
|
||||
super(BackgroundScheduler, self).shutdown(*args, **kwargs)
|
||||
self._thread.join()
|
||||
del self._thread
|
||||
|
@@ -1,21 +1,21 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from threading import Event
|
||||
|
||||
from apscheduler.schedulers.base import BaseScheduler
|
||||
from apscheduler.schedulers.base import BaseScheduler, STATE_STOPPED
|
||||
from apscheduler.util import TIMEOUT_MAX
|
||||
|
||||
|
||||
class BlockingScheduler(BaseScheduler):
|
||||
"""
|
||||
A scheduler that runs in the foreground (:meth:`~apscheduler.schedulers.base.BaseScheduler.start` will block).
|
||||
A scheduler that runs in the foreground
|
||||
(:meth:`~apscheduler.schedulers.base.BaseScheduler.start` will block).
|
||||
"""
|
||||
|
||||
MAX_WAIT_TIME = 4294967 # Maximum value accepted by Event.wait() on Windows
|
||||
|
||||
_event = None
|
||||
|
||||
def start(self):
|
||||
super(BlockingScheduler, self).start()
|
||||
def start(self, *args, **kwargs):
|
||||
self._event = Event()
|
||||
super(BlockingScheduler, self).start(*args, **kwargs)
|
||||
self._main_loop()
|
||||
|
||||
def shutdown(self, wait=True):
|
||||
@@ -23,10 +23,11 @@ class BlockingScheduler(BaseScheduler):
|
||||
self._event.set()
|
||||
|
||||
def _main_loop(self):
|
||||
while self.running:
|
||||
wait_seconds = self._process_jobs()
|
||||
self._event.wait(wait_seconds if wait_seconds is not None else self.MAX_WAIT_TIME)
|
||||
wait_seconds = TIMEOUT_MAX
|
||||
while self.state != STATE_STOPPED:
|
||||
self._event.wait(wait_seconds)
|
||||
self._event.clear()
|
||||
wait_seconds = self._process_jobs()
|
||||
|
||||
def wakeup(self):
|
||||
self._event.set()
|
||||
|
@@ -16,14 +16,14 @@ class GeventScheduler(BlockingScheduler):
|
||||
|
||||
_greenlet = None
|
||||
|
||||
def start(self):
|
||||
BaseScheduler.start(self)
|
||||
def start(self, *args, **kwargs):
|
||||
self._event = Event()
|
||||
BaseScheduler.start(self, *args, **kwargs)
|
||||
self._greenlet = gevent.spawn(self._main_loop)
|
||||
return self._greenlet
|
||||
|
||||
def shutdown(self, wait=True):
|
||||
super(GeventScheduler, self).shutdown(wait)
|
||||
def shutdown(self, *args, **kwargs):
|
||||
super(GeventScheduler, self).shutdown(*args, **kwargs)
|
||||
self._greenlet.join()
|
||||
del self._greenlet
|
||||
|
||||
|
@@ -4,7 +4,7 @@ from apscheduler.schedulers.base import BaseScheduler
|
||||
|
||||
try:
|
||||
from PyQt5.QtCore import QObject, QTimer
|
||||
except ImportError: # pragma: nocover
|
||||
except (ImportError, RuntimeError): # pragma: nocover
|
||||
try:
|
||||
from PyQt4.QtCore import QObject, QTimer
|
||||
except ImportError:
|
||||
@@ -19,12 +19,8 @@ class QtScheduler(BaseScheduler):
|
||||
|
||||
_timer = None
|
||||
|
||||
def start(self):
|
||||
super(QtScheduler, self).start()
|
||||
self.wakeup()
|
||||
|
||||
def shutdown(self, wait=True):
|
||||
super(QtScheduler, self).shutdown(wait)
|
||||
def shutdown(self, *args, **kwargs):
|
||||
super(QtScheduler, self).shutdown(*args, **kwargs)
|
||||
self._stop_timer()
|
||||
|
||||
def _start_timer(self, wait_seconds):
|
||||
|
@@ -1,4 +1,5 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from datetime import timedelta
|
||||
from functools import wraps
|
||||
|
||||
@@ -22,6 +23,8 @@ class TornadoScheduler(BaseScheduler):
|
||||
"""
|
||||
A scheduler that runs on a Tornado IOLoop.
|
||||
|
||||
The default executor can run jobs based on native coroutines (``async def``).
|
||||
|
||||
=========== ===============================================================
|
||||
``io_loop`` Tornado IOLoop instance to use (defaults to the global IO loop)
|
||||
=========== ===============================================================
|
||||
@@ -30,10 +33,6 @@ class TornadoScheduler(BaseScheduler):
|
||||
_ioloop = None
|
||||
_timeout = None
|
||||
|
||||
def start(self):
|
||||
super(TornadoScheduler, self).start()
|
||||
self.wakeup()
|
||||
|
||||
@run_in_ioloop
|
||||
def shutdown(self, wait=True):
|
||||
super(TornadoScheduler, self).shutdown(wait)
|
||||
@@ -53,6 +52,10 @@ class TornadoScheduler(BaseScheduler):
|
||||
self._ioloop.remove_timeout(self._timeout)
|
||||
del self._timeout
|
||||
|
||||
def _create_default_executor(self):
|
||||
from apscheduler.executors.tornado import TornadoExecutor
|
||||
return TornadoExecutor()
|
||||
|
||||
@run_in_ioloop
|
||||
def wakeup(self):
|
||||
self._stop_timer()
|
||||
|
@@ -1,4 +1,5 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from apscheduler.schedulers.base import BaseScheduler
|
||||
@@ -35,10 +36,6 @@ class TwistedScheduler(BaseScheduler):
|
||||
self._reactor = maybe_ref(config.pop('reactor', default_reactor))
|
||||
super(TwistedScheduler, self)._configure(config)
|
||||
|
||||
def start(self):
|
||||
super(TwistedScheduler, self).start()
|
||||
self.wakeup()
|
||||
|
||||
@run_in_reactor
|
||||
def shutdown(self, wait=True):
|
||||
super(TwistedScheduler, self).shutdown(wait)
|
||||
|
@@ -1,4 +1,6 @@
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from datetime import timedelta
|
||||
import random
|
||||
|
||||
import six
|
||||
|
||||
@@ -6,11 +8,41 @@ import six
|
||||
class BaseTrigger(six.with_metaclass(ABCMeta)):
|
||||
"""Abstract base class that defines the interface that every trigger must implement."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
@abstractmethod
|
||||
def get_next_fire_time(self, previous_fire_time, now):
|
||||
"""
|
||||
Returns the next datetime to fire on, If no such datetime can be calculated, returns ``None``.
|
||||
Returns the next datetime to fire on, If no such datetime can be calculated, returns
|
||||
``None``.
|
||||
|
||||
:param datetime.datetime previous_fire_time: the previous time the trigger was fired
|
||||
:param datetime.datetime now: current datetime
|
||||
"""
|
||||
|
||||
def _apply_jitter(self, next_fire_time, jitter, now):
|
||||
"""
|
||||
Randomize ``next_fire_time`` by adding or subtracting a random value (the jitter). If the
|
||||
resulting datetime is in the past, returns the initial ``next_fire_time`` without jitter.
|
||||
|
||||
``next_fire_time - jitter <= result <= next_fire_time + jitter``
|
||||
|
||||
:param datetime.datetime|None next_fire_time: next fire time without jitter applied. If
|
||||
``None``, returns ``None``.
|
||||
:param int|None jitter: maximum number of seconds to add or subtract to
|
||||
``next_fire_time``. If ``None`` or ``0``, returns ``next_fire_time``
|
||||
:param datetime.datetime now: current datetime
|
||||
:return datetime.datetime|None: next fire time with a jitter.
|
||||
"""
|
||||
if next_fire_time is None or not jitter:
|
||||
return next_fire_time
|
||||
|
||||
next_fire_time_with_jitter = next_fire_time + timedelta(
|
||||
seconds=random.uniform(-jitter, jitter))
|
||||
|
||||
if next_fire_time_with_jitter < now:
|
||||
# Next fire time with jitter is in the past.
|
||||
# Ignore jitter to avoid false misfire.
|
||||
return next_fire_time
|
||||
|
||||
return next_fire_time_with_jitter
|
||||
|
95
lib/apscheduler/triggers/combining.py
Normal file
@@ -0,0 +1,95 @@
|
||||
from apscheduler.triggers.base import BaseTrigger
|
||||
from apscheduler.util import obj_to_ref, ref_to_obj
|
||||
|
||||
|
||||
class BaseCombiningTrigger(BaseTrigger):
|
||||
__slots__ = ('triggers', 'jitter')
|
||||
|
||||
def __init__(self, triggers, jitter=None):
|
||||
self.triggers = triggers
|
||||
self.jitter = jitter
|
||||
|
||||
def __getstate__(self):
|
||||
return {
|
||||
'version': 1,
|
||||
'triggers': [(obj_to_ref(trigger.__class__), trigger.__getstate__())
|
||||
for trigger in self.triggers],
|
||||
'jitter': self.jitter
|
||||
}
|
||||
|
||||
def __setstate__(self, state):
|
||||
if state.get('version', 1) > 1:
|
||||
raise ValueError(
|
||||
'Got serialized data for version %s of %s, but only versions up to 1 can be '
|
||||
'handled' % (state['version'], self.__class__.__name__))
|
||||
|
||||
self.jitter = state['jitter']
|
||||
self.triggers = []
|
||||
for clsref, state in state['triggers']:
|
||||
cls = ref_to_obj(clsref)
|
||||
trigger = cls.__new__(cls)
|
||||
trigger.__setstate__(state)
|
||||
self.triggers.append(trigger)
|
||||
|
||||
def __repr__(self):
|
||||
return '<{}({}{})>'.format(self.__class__.__name__, self.triggers,
|
||||
', jitter={}'.format(self.jitter) if self.jitter else '')
|
||||
|
||||
|
||||
class AndTrigger(BaseCombiningTrigger):
|
||||
"""
|
||||
Always returns the earliest next fire time that all the given triggers can agree on.
|
||||
The trigger is considered to be finished when any of the given triggers has finished its
|
||||
schedule.
|
||||
|
||||
Trigger alias: ``and``
|
||||
|
||||
:param list triggers: triggers to combine
|
||||
:param int|None jitter: advance or delay the job execution by ``jitter`` seconds at most.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def get_next_fire_time(self, previous_fire_time, now):
|
||||
while True:
|
||||
fire_times = [trigger.get_next_fire_time(previous_fire_time, now)
|
||||
for trigger in self.triggers]
|
||||
if None in fire_times:
|
||||
return None
|
||||
elif min(fire_times) == max(fire_times):
|
||||
return self._apply_jitter(fire_times[0], self.jitter, now)
|
||||
else:
|
||||
now = max(fire_times)
|
||||
|
||||
def __str__(self):
|
||||
return 'and[{}]'.format(', '.join(str(trigger) for trigger in self.triggers))
|
||||
|
||||
|
||||
class OrTrigger(BaseCombiningTrigger):
|
||||
"""
|
||||
Always returns the earliest next fire time produced by any of the given triggers.
|
||||
The trigger is considered finished when all the given triggers have finished their schedules.
|
||||
|
||||
Trigger alias: ``or``
|
||||
|
||||
:param list triggers: triggers to combine
|
||||
:param int|None jitter: advance or delay the job execution by ``jitter`` seconds at most.
|
||||
|
||||
.. note:: Triggers that depends on the previous fire time, such as the interval trigger, may
|
||||
seem to behave strangely since they are always passed the previous fire time produced by
|
||||
any of the given triggers.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def get_next_fire_time(self, previous_fire_time, now):
|
||||
fire_times = [trigger.get_next_fire_time(previous_fire_time, now)
|
||||
for trigger in self.triggers]
|
||||
fire_times = [fire_time for fire_time in fire_times if fire_time is not None]
|
||||
if fire_times:
|
||||
return self._apply_jitter(min(fire_times), self.jitter, now)
|
||||
else:
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
return 'or[{}]'.format(', '.join(str(trigger) for trigger in self.triggers))
|
@@ -4,13 +4,15 @@ from tzlocal import get_localzone
|
||||
import six
|
||||
|
||||
from apscheduler.triggers.base import BaseTrigger
|
||||
from apscheduler.triggers.cron.fields import BaseField, WeekField, DayOfMonthField, DayOfWeekField, DEFAULT_VALUES
|
||||
from apscheduler.triggers.cron.fields import (
|
||||
BaseField, MonthField, WeekField, DayOfMonthField, DayOfWeekField, DEFAULT_VALUES)
|
||||
from apscheduler.util import datetime_ceil, convert_to_datetime, datetime_repr, astimezone
|
||||
|
||||
|
||||
class CronTrigger(BaseTrigger):
|
||||
"""
|
||||
Triggers when current time matches all specified time constraints, similarly to how the UNIX cron scheduler works.
|
||||
Triggers when current time matches all specified time constraints,
|
||||
similarly to how the UNIX cron scheduler works.
|
||||
|
||||
:param int|str year: 4-digit year
|
||||
:param int|str month: month (1-12)
|
||||
@@ -22,8 +24,9 @@ class CronTrigger(BaseTrigger):
|
||||
:param int|str second: second (0-59)
|
||||
:param datetime|str start_date: earliest possible date/time to trigger on (inclusive)
|
||||
:param datetime|str end_date: latest possible date/time to trigger on (inclusive)
|
||||
:param datetime.tzinfo|str timezone: time zone to use for the date/time calculations
|
||||
(defaults to scheduler timezone)
|
||||
:param datetime.tzinfo|str timezone: time zone to use for the date/time calculations (defaults
|
||||
to scheduler timezone)
|
||||
:param int|None jitter: advance or delay the job execution by ``jitter`` seconds at most.
|
||||
|
||||
.. note:: The first weekday is always **monday**.
|
||||
"""
|
||||
@@ -31,7 +34,7 @@ class CronTrigger(BaseTrigger):
|
||||
FIELD_NAMES = ('year', 'month', 'day', 'week', 'day_of_week', 'hour', 'minute', 'second')
|
||||
FIELDS_MAP = {
|
||||
'year': BaseField,
|
||||
'month': BaseField,
|
||||
'month': MonthField,
|
||||
'week': WeekField,
|
||||
'day': DayOfMonthField,
|
||||
'day_of_week': DayOfWeekField,
|
||||
@@ -40,15 +43,16 @@ class CronTrigger(BaseTrigger):
|
||||
'second': BaseField
|
||||
}
|
||||
|
||||
__slots__ = 'timezone', 'start_date', 'end_date', 'fields'
|
||||
__slots__ = 'timezone', 'start_date', 'end_date', 'fields', 'jitter'
|
||||
|
||||
def __init__(self, year=None, month=None, day=None, week=None, day_of_week=None, hour=None, minute=None,
|
||||
second=None, start_date=None, end_date=None, timezone=None):
|
||||
def __init__(self, year=None, month=None, day=None, week=None, day_of_week=None, hour=None,
|
||||
minute=None, second=None, start_date=None, end_date=None, timezone=None,
|
||||
jitter=None):
|
||||
if timezone:
|
||||
self.timezone = astimezone(timezone)
|
||||
elif start_date and start_date.tzinfo:
|
||||
elif isinstance(start_date, datetime) and start_date.tzinfo:
|
||||
self.timezone = start_date.tzinfo
|
||||
elif end_date and end_date.tzinfo:
|
||||
elif isinstance(end_date, datetime) and end_date.tzinfo:
|
||||
self.timezone = end_date.tzinfo
|
||||
else:
|
||||
self.timezone = get_localzone()
|
||||
@@ -56,6 +60,8 @@ class CronTrigger(BaseTrigger):
|
||||
self.start_date = convert_to_datetime(start_date, self.timezone, 'start_date')
|
||||
self.end_date = convert_to_datetime(end_date, self.timezone, 'end_date')
|
||||
|
||||
self.jitter = jitter
|
||||
|
||||
values = dict((key, value) for (key, value) in six.iteritems(locals())
|
||||
if key in self.FIELD_NAMES and value is not None)
|
||||
self.fields = []
|
||||
@@ -76,13 +82,35 @@ class CronTrigger(BaseTrigger):
|
||||
field = field_class(field_name, exprs, is_default)
|
||||
self.fields.append(field)
|
||||
|
||||
@classmethod
|
||||
def from_crontab(cls, expr, timezone=None):
|
||||
"""
|
||||
Create a :class:`~CronTrigger` from a standard crontab expression.
|
||||
|
||||
See https://en.wikipedia.org/wiki/Cron for more information on the format accepted here.
|
||||
|
||||
:param expr: minute, hour, day of month, month, day of week
|
||||
:param datetime.tzinfo|str timezone: time zone to use for the date/time calculations (
|
||||
defaults to scheduler timezone)
|
||||
:return: a :class:`~CronTrigger` instance
|
||||
|
||||
"""
|
||||
values = expr.split()
|
||||
if len(values) != 5:
|
||||
raise ValueError('Wrong number of fields; got {}, expected 5'.format(len(values)))
|
||||
|
||||
return cls(minute=values[0], hour=values[1], day=values[2], month=values[3],
|
||||
day_of_week=values[4], timezone=timezone)
|
||||
|
||||
def _increment_field_value(self, dateval, fieldnum):
|
||||
"""
|
||||
Increments the designated field and resets all less significant fields to their minimum values.
|
||||
Increments the designated field and resets all less significant fields to their minimum
|
||||
values.
|
||||
|
||||
:type dateval: datetime
|
||||
:type fieldnum: int
|
||||
:return: a tuple containing the new date, and the number of the field that was actually incremented
|
||||
:return: a tuple containing the new date, and the number of the field that was actually
|
||||
incremented
|
||||
:rtype: tuple
|
||||
"""
|
||||
|
||||
@@ -128,12 +156,13 @@ class CronTrigger(BaseTrigger):
|
||||
else:
|
||||
values[field.name] = new_value
|
||||
|
||||
difference = datetime(**values) - dateval.replace(tzinfo=None)
|
||||
return self.timezone.normalize(dateval + difference)
|
||||
return self.timezone.localize(datetime(**values))
|
||||
|
||||
def get_next_fire_time(self, previous_fire_time, now):
|
||||
if previous_fire_time:
|
||||
start_date = max(now, previous_fire_time + timedelta(microseconds=1))
|
||||
start_date = min(now, previous_fire_time + timedelta(microseconds=1))
|
||||
if start_date == previous_fire_time:
|
||||
start_date += timedelta(microseconds=1)
|
||||
else:
|
||||
start_date = max(now, self.start_date) if self.start_date else now
|
||||
|
||||
@@ -163,8 +192,36 @@ class CronTrigger(BaseTrigger):
|
||||
return None
|
||||
|
||||
if fieldnum >= 0:
|
||||
if self.jitter is not None:
|
||||
next_date = self._apply_jitter(next_date, self.jitter, now)
|
||||
return next_date
|
||||
|
||||
def __getstate__(self):
|
||||
return {
|
||||
'version': 2,
|
||||
'timezone': self.timezone,
|
||||
'start_date': self.start_date,
|
||||
'end_date': self.end_date,
|
||||
'fields': self.fields,
|
||||
'jitter': self.jitter,
|
||||
}
|
||||
|
||||
def __setstate__(self, state):
|
||||
# This is for compatibility with APScheduler 3.0.x
|
||||
if isinstance(state, tuple):
|
||||
state = state[1]
|
||||
|
||||
if state.get('version', 1) > 2:
|
||||
raise ValueError(
|
||||
'Got serialized data for version %s of %s, but only versions up to 2 can be '
|
||||
'handled' % (state['version'], self.__class__.__name__))
|
||||
|
||||
self.timezone = state['timezone']
|
||||
self.start_date = state['start_date']
|
||||
self.end_date = state['end_date']
|
||||
self.fields = state['fields']
|
||||
self.jitter = state.get('jitter')
|
||||
|
||||
def __str__(self):
|
||||
options = ["%s='%s'" % (f.name, f) for f in self.fields if not f.is_default]
|
||||
return 'cron[%s]' % (', '.join(options))
|
||||
@@ -172,5 +229,11 @@ class CronTrigger(BaseTrigger):
|
||||
def __repr__(self):
|
||||
options = ["%s='%s'" % (f.name, f) for f in self.fields if not f.is_default]
|
||||
if self.start_date:
|
||||
options.append("start_date='%s'" % datetime_repr(self.start_date))
|
||||
return '<%s (%s)>' % (self.__class__.__name__, ', '.join(options))
|
||||
options.append("start_date=%r" % datetime_repr(self.start_date))
|
||||
if self.end_date:
|
||||
options.append("end_date=%r" % datetime_repr(self.end_date))
|
||||
if self.jitter:
|
||||
options.append('jitter=%s' % self.jitter)
|
||||
|
||||
return "<%s (%s, timezone='%s')>" % (
|
||||
self.__class__.__name__, ', '.join(options), self.timezone)
|
||||
|
@@ -1,17 +1,16 @@
|
||||
"""
|
||||
This module contains the expressions applicable for CronTrigger's fields.
|
||||
"""
|
||||
"""This module contains the expressions applicable for CronTrigger's fields."""
|
||||
|
||||
from calendar import monthrange
|
||||
import re
|
||||
|
||||
from apscheduler.util import asint
|
||||
|
||||
__all__ = ('AllExpression', 'RangeExpression', 'WeekdayRangeExpression', 'WeekdayPositionExpression',
|
||||
'LastDayOfMonthExpression')
|
||||
__all__ = ('AllExpression', 'RangeExpression', 'WeekdayRangeExpression',
|
||||
'WeekdayPositionExpression', 'LastDayOfMonthExpression')
|
||||
|
||||
|
||||
WEEKDAYS = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
|
||||
WEEKDAYS = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat']
|
||||
MONTHS = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
|
||||
|
||||
|
||||
class AllExpression(object):
|
||||
@@ -22,6 +21,14 @@ class AllExpression(object):
|
||||
if self.step == 0:
|
||||
raise ValueError('Increment must be higher than 0')
|
||||
|
||||
def validate_range(self, field_name):
|
||||
from apscheduler.triggers.cron.fields import MIN_VALUES, MAX_VALUES
|
||||
|
||||
value_range = MAX_VALUES[field_name] - MIN_VALUES[field_name]
|
||||
if self.step and self.step > value_range:
|
||||
raise ValueError('the step value ({}) is higher than the total range of the '
|
||||
'expression ({})'.format(self.step, value_range))
|
||||
|
||||
def get_next_value(self, date, field):
|
||||
start = field.get_value(date)
|
||||
minval = field.get_min(date)
|
||||
@@ -37,6 +44,9 @@ class AllExpression(object):
|
||||
if next <= maxval:
|
||||
return next
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.step == other.step
|
||||
|
||||
def __str__(self):
|
||||
if self.step:
|
||||
return '*/%d' % self.step
|
||||
@@ -51,7 +61,7 @@ class RangeExpression(AllExpression):
|
||||
r'(?P<first>\d+)(?:-(?P<last>\d+))?(?:/(?P<step>\d+))?$')
|
||||
|
||||
def __init__(self, first, last=None, step=None):
|
||||
AllExpression.__init__(self, step)
|
||||
super(RangeExpression, self).__init__(step)
|
||||
first = asint(first)
|
||||
last = asint(last)
|
||||
if last is None and step is None:
|
||||
@@ -61,25 +71,41 @@ class RangeExpression(AllExpression):
|
||||
self.first = first
|
||||
self.last = last
|
||||
|
||||
def validate_range(self, field_name):
|
||||
from apscheduler.triggers.cron.fields import MIN_VALUES, MAX_VALUES
|
||||
|
||||
super(RangeExpression, self).validate_range(field_name)
|
||||
if self.first < MIN_VALUES[field_name]:
|
||||
raise ValueError('the first value ({}) is lower than the minimum value ({})'
|
||||
.format(self.first, MIN_VALUES[field_name]))
|
||||
if self.last is not None and self.last > MAX_VALUES[field_name]:
|
||||
raise ValueError('the last value ({}) is higher than the maximum value ({})'
|
||||
.format(self.last, MAX_VALUES[field_name]))
|
||||
value_range = (self.last or MAX_VALUES[field_name]) - self.first
|
||||
if self.step and self.step > value_range:
|
||||
raise ValueError('the step value ({}) is higher than the total range of the '
|
||||
'expression ({})'.format(self.step, value_range))
|
||||
|
||||
def get_next_value(self, date, field):
|
||||
start = field.get_value(date)
|
||||
startval = field.get_value(date)
|
||||
minval = field.get_min(date)
|
||||
maxval = field.get_max(date)
|
||||
|
||||
# Apply range limits
|
||||
minval = max(minval, self.first)
|
||||
if self.last is not None:
|
||||
maxval = min(maxval, self.last)
|
||||
start = max(start, minval)
|
||||
maxval = min(maxval, self.last) if self.last is not None else maxval
|
||||
nextval = max(minval, startval)
|
||||
|
||||
if not self.step:
|
||||
next = start
|
||||
else:
|
||||
distance_to_next = (self.step - (start - minval)) % self.step
|
||||
next = start + distance_to_next
|
||||
# Apply the step if defined
|
||||
if self.step:
|
||||
distance_to_next = (self.step - (nextval - minval)) % self.step
|
||||
nextval += distance_to_next
|
||||
|
||||
if next <= maxval:
|
||||
return next
|
||||
return nextval if nextval <= maxval else None
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, self.__class__) and self.first == other.first and
|
||||
self.last == other.last)
|
||||
|
||||
def __str__(self):
|
||||
if self.last != self.first and self.last is not None:
|
||||
@@ -100,6 +126,37 @@ class RangeExpression(AllExpression):
|
||||
return "%s(%s)" % (self.__class__.__name__, ', '.join(args))
|
||||
|
||||
|
||||
class MonthRangeExpression(RangeExpression):
|
||||
value_re = re.compile(r'(?P<first>[a-z]+)(?:-(?P<last>[a-z]+))?', re.IGNORECASE)
|
||||
|
||||
def __init__(self, first, last=None):
|
||||
try:
|
||||
first_num = MONTHS.index(first.lower()) + 1
|
||||
except ValueError:
|
||||
raise ValueError('Invalid month name "%s"' % first)
|
||||
|
||||
if last:
|
||||
try:
|
||||
last_num = MONTHS.index(last.lower()) + 1
|
||||
except ValueError:
|
||||
raise ValueError('Invalid month name "%s"' % last)
|
||||
else:
|
||||
last_num = None
|
||||
|
||||
super(MonthRangeExpression, self).__init__(first_num, last_num)
|
||||
|
||||
def __str__(self):
|
||||
if self.last != self.first and self.last is not None:
|
||||
return '%s-%s' % (MONTHS[self.first - 1], MONTHS[self.last - 1])
|
||||
return MONTHS[self.first - 1]
|
||||
|
||||
def __repr__(self):
|
||||
args = ["'%s'" % MONTHS[self.first]]
|
||||
if self.last != self.first and self.last is not None:
|
||||
args.append("'%s'" % MONTHS[self.last - 1])
|
||||
return "%s(%s)" % (self.__class__.__name__, ', '.join(args))
|
||||
|
||||
|
||||
class WeekdayRangeExpression(RangeExpression):
|
||||
value_re = re.compile(r'(?P<first>[a-z]+)(?:-(?P<last>[a-z]+))?', re.IGNORECASE)
|
||||
|
||||
@@ -117,7 +174,7 @@ class WeekdayRangeExpression(RangeExpression):
|
||||
else:
|
||||
last_num = None
|
||||
|
||||
RangeExpression.__init__(self, first_num, last_num)
|
||||
super(WeekdayRangeExpression, self).__init__(first_num, last_num)
|
||||
|
||||
def __str__(self):
|
||||
if self.last != self.first and self.last is not None:
|
||||
@@ -133,9 +190,11 @@ class WeekdayRangeExpression(RangeExpression):
|
||||
|
||||
class WeekdayPositionExpression(AllExpression):
|
||||
options = ['1st', '2nd', '3rd', '4th', '5th', 'last']
|
||||
value_re = re.compile(r'(?P<option_name>%s) +(?P<weekday_name>(?:\d+|\w+))' % '|'.join(options), re.IGNORECASE)
|
||||
value_re = re.compile(r'(?P<option_name>%s) +(?P<weekday_name>(?:\d+|\w+))' %
|
||||
'|'.join(options), re.IGNORECASE)
|
||||
|
||||
def __init__(self, option_name, weekday_name):
|
||||
super(WeekdayPositionExpression, self).__init__(None)
|
||||
try:
|
||||
self.option_num = self.options.index(option_name.lower())
|
||||
except ValueError:
|
||||
@@ -147,8 +206,7 @@ class WeekdayPositionExpression(AllExpression):
|
||||
raise ValueError('Invalid weekday name "%s"' % weekday_name)
|
||||
|
||||
def get_next_value(self, date, field):
|
||||
# Figure out the weekday of the month's first day and the number
|
||||
# of days in that month
|
||||
# Figure out the weekday of the month's first day and the number of days in that month
|
||||
first_day_wday, last_day = monthrange(date.year, date.month)
|
||||
|
||||
# Calculate which day of the month is the first of the target weekdays
|
||||
@@ -160,23 +218,28 @@ class WeekdayPositionExpression(AllExpression):
|
||||
if self.option_num < 5:
|
||||
target_day = first_hit_day + self.option_num * 7
|
||||
else:
|
||||
target_day = first_hit_day + ((last_day - first_hit_day) / 7) * 7
|
||||
target_day = first_hit_day + ((last_day - first_hit_day) // 7) * 7
|
||||
|
||||
if target_day <= last_day and target_day >= date.day:
|
||||
return target_day
|
||||
|
||||
def __eq__(self, other):
|
||||
return (super(WeekdayPositionExpression, self).__eq__(other) and
|
||||
self.option_num == other.option_num and self.weekday == other.weekday)
|
||||
|
||||
def __str__(self):
|
||||
return '%s %s' % (self.options[self.option_num], WEEKDAYS[self.weekday])
|
||||
|
||||
def __repr__(self):
|
||||
return "%s('%s', '%s')" % (self.__class__.__name__, self.options[self.option_num], WEEKDAYS[self.weekday])
|
||||
return "%s('%s', '%s')" % (self.__class__.__name__, self.options[self.option_num],
|
||||
WEEKDAYS[self.weekday])
|
||||
|
||||
|
||||
class LastDayOfMonthExpression(AllExpression):
|
||||
value_re = re.compile(r'last', re.IGNORECASE)
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
super(LastDayOfMonthExpression, self).__init__(None)
|
||||
|
||||
def get_next_value(self, date, field):
|
||||
return monthrange(date.year, date.month)[1]
|
||||
|
@@ -1,22 +1,26 @@
|
||||
"""
|
||||
Fields represent CronTrigger options which map to :class:`~datetime.datetime`
|
||||
fields.
|
||||
"""
|
||||
"""Fields represent CronTrigger options which map to :class:`~datetime.datetime` fields."""
|
||||
|
||||
from calendar import monthrange
|
||||
import re
|
||||
|
||||
import six
|
||||
|
||||
from apscheduler.triggers.cron.expressions import (
|
||||
AllExpression, RangeExpression, WeekdayPositionExpression, LastDayOfMonthExpression, WeekdayRangeExpression)
|
||||
AllExpression, RangeExpression, WeekdayPositionExpression, LastDayOfMonthExpression,
|
||||
WeekdayRangeExpression, MonthRangeExpression)
|
||||
|
||||
|
||||
__all__ = ('MIN_VALUES', 'MAX_VALUES', 'DEFAULT_VALUES', 'BaseField', 'WeekField', 'DayOfMonthField', 'DayOfWeekField')
|
||||
__all__ = ('MIN_VALUES', 'MAX_VALUES', 'DEFAULT_VALUES', 'BaseField', 'WeekField',
|
||||
'DayOfMonthField', 'DayOfWeekField')
|
||||
|
||||
|
||||
MIN_VALUES = {'year': 1970, 'month': 1, 'day': 1, 'week': 1, 'day_of_week': 0, 'hour': 0, 'minute': 0, 'second': 0}
|
||||
MAX_VALUES = {'year': 2 ** 63, 'month': 12, 'day:': 31, 'week': 53, 'day_of_week': 6, 'hour': 23, 'minute': 59,
|
||||
'second': 59}
|
||||
DEFAULT_VALUES = {'year': '*', 'month': 1, 'day': 1, 'week': '*', 'day_of_week': '*', 'hour': 0, 'minute': 0,
|
||||
'second': 0}
|
||||
MIN_VALUES = {'year': 1970, 'month': 1, 'day': 1, 'week': 1, 'day_of_week': 0, 'hour': 0,
|
||||
'minute': 0, 'second': 0}
|
||||
MAX_VALUES = {'year': 9999, 'month': 12, 'day': 31, 'week': 53, 'day_of_week': 6, 'hour': 23,
|
||||
'minute': 59, 'second': 59}
|
||||
DEFAULT_VALUES = {'year': '*', 'month': 1, 'day': 1, 'week': '*', 'day_of_week': '*', 'hour': 0,
|
||||
'minute': 0, 'second': 0}
|
||||
SEPARATOR = re.compile(' *, *')
|
||||
|
||||
|
||||
class BaseField(object):
|
||||
@@ -50,23 +54,29 @@ class BaseField(object):
|
||||
self.expressions = []
|
||||
|
||||
# Split a comma-separated expression list, if any
|
||||
exprs = str(exprs).strip()
|
||||
if ',' in exprs:
|
||||
for expr in exprs.split(','):
|
||||
self.compile_expression(expr)
|
||||
else:
|
||||
self.compile_expression(exprs)
|
||||
for expr in SEPARATOR.split(str(exprs).strip()):
|
||||
self.compile_expression(expr)
|
||||
|
||||
def compile_expression(self, expr):
|
||||
for compiler in self.COMPILERS:
|
||||
match = compiler.value_re.match(expr)
|
||||
if match:
|
||||
compiled_expr = compiler(**match.groupdict())
|
||||
|
||||
try:
|
||||
compiled_expr.validate_range(self.name)
|
||||
except ValueError as e:
|
||||
exc = ValueError('Error validating expression {!r}: {}'.format(expr, e))
|
||||
six.raise_from(exc, None)
|
||||
|
||||
self.expressions.append(compiled_expr)
|
||||
return
|
||||
|
||||
raise ValueError('Unrecognized expression "%s" for field "%s"' % (expr, self.name))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(self, self.__class__) and self.expressions == other.expressions
|
||||
|
||||
def __str__(self):
|
||||
expr_strings = (str(e) for e in self.expressions)
|
||||
return ','.join(expr_strings)
|
||||
@@ -94,4 +104,8 @@ class DayOfWeekField(BaseField):
|
||||
COMPILERS = BaseField.COMPILERS + [WeekdayRangeExpression]
|
||||
|
||||
def get_value(self, dateval):
|
||||
return dateval.weekday()
|
||||
return dateval.isoweekday() % 7
|
||||
|
||||
|
||||
class MonthField(BaseField):
|
||||
COMPILERS = BaseField.COMPILERS + [MonthRangeExpression]
|
||||
|
@@ -14,15 +14,36 @@ class DateTrigger(BaseTrigger):
|
||||
:param datetime.tzinfo|str timezone: time zone for ``run_date`` if it doesn't have one already
|
||||
"""
|
||||
|
||||
__slots__ = 'timezone', 'run_date'
|
||||
__slots__ = 'run_date'
|
||||
|
||||
def __init__(self, run_date=None, timezone=None):
|
||||
timezone = astimezone(timezone) or get_localzone()
|
||||
self.run_date = convert_to_datetime(run_date or datetime.now(), timezone, 'run_date')
|
||||
if run_date is not None:
|
||||
self.run_date = convert_to_datetime(run_date, timezone, 'run_date')
|
||||
else:
|
||||
self.run_date = datetime.now(timezone)
|
||||
|
||||
def get_next_fire_time(self, previous_fire_time, now):
|
||||
return self.run_date if previous_fire_time is None else None
|
||||
|
||||
def __getstate__(self):
|
||||
return {
|
||||
'version': 1,
|
||||
'run_date': self.run_date
|
||||
}
|
||||
|
||||
def __setstate__(self, state):
|
||||
# This is for compatibility with APScheduler 3.0.x
|
||||
if isinstance(state, tuple):
|
||||
state = state[1]
|
||||
|
||||
if state.get('version', 1) > 1:
|
||||
raise ValueError(
|
||||
'Got serialized data for version %s of %s, but only version 1 can be handled' %
|
||||
(state['version'], self.__class__.__name__))
|
||||
|
||||
self.run_date = state['run_date']
|
||||
|
||||
def __str__(self):
|
||||
return 'date[%s]' % datetime_repr(self.run_date)
|
||||
|
||||
|
@@ -9,8 +9,8 @@ from apscheduler.util import convert_to_datetime, timedelta_seconds, datetime_re
|
||||
|
||||
class IntervalTrigger(BaseTrigger):
|
||||
"""
|
||||
Triggers on specified intervals, starting on ``start_date`` if specified, ``datetime.now()`` + interval
|
||||
otherwise.
|
||||
Triggers on specified intervals, starting on ``start_date`` if specified, ``datetime.now()`` +
|
||||
interval otherwise.
|
||||
|
||||
:param int weeks: number of weeks to wait
|
||||
:param int days: number of days to wait
|
||||
@@ -20,12 +20,15 @@ class IntervalTrigger(BaseTrigger):
|
||||
:param datetime|str start_date: starting point for the interval calculation
|
||||
:param datetime|str end_date: latest possible date/time to trigger on
|
||||
:param datetime.tzinfo|str timezone: time zone to use for the date/time calculations
|
||||
:param int|None jitter: advance or delay the job execution by ``jitter`` seconds at most.
|
||||
"""
|
||||
|
||||
__slots__ = 'timezone', 'start_date', 'end_date', 'interval'
|
||||
__slots__ = 'timezone', 'start_date', 'end_date', 'interval', 'interval_length', 'jitter'
|
||||
|
||||
def __init__(self, weeks=0, days=0, hours=0, minutes=0, seconds=0, start_date=None, end_date=None, timezone=None):
|
||||
self.interval = timedelta(weeks=weeks, days=days, hours=hours, minutes=minutes, seconds=seconds)
|
||||
def __init__(self, weeks=0, days=0, hours=0, minutes=0, seconds=0, start_date=None,
|
||||
end_date=None, timezone=None, jitter=None):
|
||||
self.interval = timedelta(weeks=weeks, days=days, hours=hours, minutes=minutes,
|
||||
seconds=seconds)
|
||||
self.interval_length = timedelta_seconds(self.interval)
|
||||
if self.interval_length == 0:
|
||||
self.interval = timedelta(seconds=1)
|
||||
@@ -33,9 +36,9 @@ class IntervalTrigger(BaseTrigger):
|
||||
|
||||
if timezone:
|
||||
self.timezone = astimezone(timezone)
|
||||
elif start_date and start_date.tzinfo:
|
||||
elif isinstance(start_date, datetime) and start_date.tzinfo:
|
||||
self.timezone = start_date.tzinfo
|
||||
elif end_date and end_date.tzinfo:
|
||||
elif isinstance(end_date, datetime) and end_date.tzinfo:
|
||||
self.timezone = end_date.tzinfo
|
||||
else:
|
||||
self.timezone = get_localzone()
|
||||
@@ -44,6 +47,8 @@ class IntervalTrigger(BaseTrigger):
|
||||
self.start_date = convert_to_datetime(start_date, self.timezone, 'start_date')
|
||||
self.end_date = convert_to_datetime(end_date, self.timezone, 'end_date')
|
||||
|
||||
self.jitter = jitter
|
||||
|
||||
def get_next_fire_time(self, previous_fire_time, now):
|
||||
if previous_fire_time:
|
||||
next_fire_time = previous_fire_time + self.interval
|
||||
@@ -54,12 +59,48 @@ class IntervalTrigger(BaseTrigger):
|
||||
next_interval_num = int(ceil(timediff_seconds / self.interval_length))
|
||||
next_fire_time = self.start_date + self.interval * next_interval_num
|
||||
|
||||
if self.jitter is not None:
|
||||
next_fire_time = self._apply_jitter(next_fire_time, self.jitter, now)
|
||||
|
||||
if not self.end_date or next_fire_time <= self.end_date:
|
||||
return self.timezone.normalize(next_fire_time)
|
||||
|
||||
def __getstate__(self):
|
||||
return {
|
||||
'version': 2,
|
||||
'timezone': self.timezone,
|
||||
'start_date': self.start_date,
|
||||
'end_date': self.end_date,
|
||||
'interval': self.interval,
|
||||
'jitter': self.jitter,
|
||||
}
|
||||
|
||||
def __setstate__(self, state):
|
||||
# This is for compatibility with APScheduler 3.0.x
|
||||
if isinstance(state, tuple):
|
||||
state = state[1]
|
||||
|
||||
if state.get('version', 1) > 2:
|
||||
raise ValueError(
|
||||
'Got serialized data for version %s of %s, but only versions up to 2 can be '
|
||||
'handled' % (state['version'], self.__class__.__name__))
|
||||
|
||||
self.timezone = state['timezone']
|
||||
self.start_date = state['start_date']
|
||||
self.end_date = state['end_date']
|
||||
self.interval = state['interval']
|
||||
self.interval_length = timedelta_seconds(self.interval)
|
||||
self.jitter = state.get('jitter')
|
||||
|
||||
def __str__(self):
|
||||
return 'interval[%s]' % str(self.interval)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s (interval=%r, start_date='%s')>" % (self.__class__.__name__, self.interval,
|
||||
datetime_repr(self.start_date))
|
||||
options = ['interval=%r' % self.interval, 'start_date=%r' % datetime_repr(self.start_date)]
|
||||
if self.end_date:
|
||||
options.append("end_date=%r" % datetime_repr(self.end_date))
|
||||
if self.jitter:
|
||||
options.append('jitter=%s' % self.jitter)
|
||||
|
||||
return "<%s (%s, timezone='%s')>" % (
|
||||
self.__class__.__name__, ', '.join(options), self.timezone)
|
||||
|
@@ -2,9 +2,9 @@
|
||||
|
||||
from __future__ import division
|
||||
from datetime import date, datetime, time, timedelta, tzinfo
|
||||
from inspect import isfunction, ismethod, getargspec
|
||||
from calendar import timegm
|
||||
import re
|
||||
from functools import partial
|
||||
|
||||
from pytz import timezone, utc
|
||||
import six
|
||||
@@ -12,14 +12,16 @@ import six
|
||||
try:
|
||||
from inspect import signature
|
||||
except ImportError: # pragma: nocover
|
||||
try:
|
||||
from funcsigs import signature
|
||||
except ImportError:
|
||||
signature = None
|
||||
from funcsigs import signature
|
||||
|
||||
try:
|
||||
from threading import TIMEOUT_MAX
|
||||
except ImportError:
|
||||
TIMEOUT_MAX = 4294967 # Maximum value accepted by Event.wait() on Windows
|
||||
|
||||
__all__ = ('asint', 'asbool', 'astimezone', 'convert_to_datetime', 'datetime_to_utc_timestamp',
|
||||
'utc_timestamp_to_datetime', 'timedelta_seconds', 'datetime_ceil', 'get_callable_name', 'obj_to_ref',
|
||||
'ref_to_obj', 'maybe_ref', 'repr_escape', 'check_callable_args')
|
||||
'utc_timestamp_to_datetime', 'timedelta_seconds', 'datetime_ceil', 'get_callable_name',
|
||||
'obj_to_ref', 'ref_to_obj', 'maybe_ref', 'repr_escape', 'check_callable_args')
|
||||
|
||||
|
||||
class _Undefined(object):
|
||||
@@ -32,17 +34,18 @@ class _Undefined(object):
|
||||
def __repr__(self):
|
||||
return '<undefined>'
|
||||
|
||||
|
||||
undefined = _Undefined() #: a unique object that only signifies that no value is defined
|
||||
|
||||
|
||||
def asint(text):
|
||||
"""
|
||||
Safely converts a string to an integer, returning None if the string is None.
|
||||
Safely converts a string to an integer, returning ``None`` if the string is ``None``.
|
||||
|
||||
:type text: str
|
||||
:rtype: int
|
||||
"""
|
||||
|
||||
"""
|
||||
if text is not None:
|
||||
return int(text)
|
||||
|
||||
@@ -52,8 +55,8 @@ def asbool(obj):
|
||||
Interprets an object as a boolean value.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
|
||||
"""
|
||||
if isinstance(obj, str):
|
||||
obj = obj.strip().lower()
|
||||
if obj in ('true', 'yes', 'on', 'y', 't', '1'):
|
||||
@@ -69,15 +72,19 @@ def astimezone(obj):
|
||||
Interprets an object as a timezone.
|
||||
|
||||
:rtype: tzinfo
|
||||
"""
|
||||
|
||||
"""
|
||||
if isinstance(obj, six.string_types):
|
||||
return timezone(obj)
|
||||
if isinstance(obj, tzinfo):
|
||||
if not hasattr(obj, 'localize') or not hasattr(obj, 'normalize'):
|
||||
raise TypeError('Only timezones from the pytz library are supported')
|
||||
if obj.zone == 'local':
|
||||
raise ValueError('Unable to determine the name of the local timezone -- use an explicit timezone instead')
|
||||
raise ValueError(
|
||||
'Unable to determine the name of the local timezone -- you must explicitly '
|
||||
'specify the name of the local timezone. Please refrain from using timezones like '
|
||||
'EST to prevent problems with daylight saving time. Instead, use a locale based '
|
||||
'timezone name (such as Europe/Helsinki).')
|
||||
return obj
|
||||
if obj is not None:
|
||||
raise TypeError('Expected tzinfo, got %s instead' % obj.__class__.__name__)
|
||||
@@ -92,20 +99,20 @@ _DATE_REGEX = re.compile(
|
||||
def convert_to_datetime(input, tz, arg_name):
|
||||
"""
|
||||
Converts the given object to a timezone aware datetime object.
|
||||
|
||||
If a timezone aware datetime object is passed, it is returned unmodified.
|
||||
If a native datetime object is passed, it is given the specified timezone.
|
||||
If the input is a string, it is parsed as a datetime with the given timezone.
|
||||
|
||||
Date strings are accepted in three different forms: date only (Y-m-d),
|
||||
date with time (Y-m-d H:M:S) or with date+time with microseconds
|
||||
(Y-m-d H:M:S.micro).
|
||||
Date strings are accepted in three different forms: date only (Y-m-d), date with time
|
||||
(Y-m-d H:M:S) or with date+time with microseconds (Y-m-d H:M:S.micro).
|
||||
|
||||
:param str|datetime input: the datetime or string to convert to a timezone aware datetime
|
||||
:param datetime.tzinfo tz: timezone to interpret ``input`` in
|
||||
:param str arg_name: the name of the argument (used in an error message)
|
||||
:rtype: datetime
|
||||
"""
|
||||
|
||||
"""
|
||||
if input is None:
|
||||
return
|
||||
elif isinstance(input, datetime):
|
||||
@@ -125,14 +132,16 @@ def convert_to_datetime(input, tz, arg_name):
|
||||
if datetime_.tzinfo is not None:
|
||||
return datetime_
|
||||
if tz is None:
|
||||
raise ValueError('The "tz" argument must be specified if %s has no timezone information' % arg_name)
|
||||
raise ValueError(
|
||||
'The "tz" argument must be specified if %s has no timezone information' % arg_name)
|
||||
if isinstance(tz, six.string_types):
|
||||
tz = timezone(tz)
|
||||
|
||||
try:
|
||||
return tz.localize(datetime_, is_dst=None)
|
||||
except AttributeError:
|
||||
raise TypeError('Only pytz timezones are supported (need the localize() and normalize() methods)')
|
||||
raise TypeError(
|
||||
'Only pytz timezones are supported (need the localize() and normalize() methods)')
|
||||
|
||||
|
||||
def datetime_to_utc_timestamp(timeval):
|
||||
@@ -141,8 +150,8 @@ def datetime_to_utc_timestamp(timeval):
|
||||
|
||||
:type timeval: datetime
|
||||
:rtype: float
|
||||
"""
|
||||
|
||||
"""
|
||||
if timeval is not None:
|
||||
return timegm(timeval.utctimetuple()) + timeval.microsecond / 1000000
|
||||
|
||||
@@ -153,8 +162,8 @@ def utc_timestamp_to_datetime(timestamp):
|
||||
|
||||
:type timestamp: float
|
||||
:rtype: datetime
|
||||
"""
|
||||
|
||||
"""
|
||||
if timestamp is not None:
|
||||
return datetime.fromtimestamp(timestamp, utc)
|
||||
|
||||
@@ -165,8 +174,8 @@ def timedelta_seconds(delta):
|
||||
|
||||
:type delta: timedelta
|
||||
:rtype: float
|
||||
"""
|
||||
|
||||
"""
|
||||
return delta.days * 24 * 60 * 60 + delta.seconds + \
|
||||
delta.microseconds / 1000000.0
|
||||
|
||||
@@ -176,8 +185,8 @@ def datetime_ceil(dateval):
|
||||
Rounds the given datetime object upwards.
|
||||
|
||||
:type dateval: datetime
|
||||
"""
|
||||
|
||||
"""
|
||||
if dateval.microsecond > 0:
|
||||
return dateval + timedelta(seconds=1, microseconds=-dateval.microsecond)
|
||||
return dateval
|
||||
@@ -192,8 +201,8 @@ def get_callable_name(func):
|
||||
Returns the best available display name for the given function/callable.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
"""
|
||||
# the easy case (on Python 3.3+)
|
||||
if hasattr(func, '__qualname__'):
|
||||
return func.__qualname__
|
||||
@@ -222,20 +231,24 @@ def get_callable_name(func):
|
||||
|
||||
def obj_to_ref(obj):
|
||||
"""
|
||||
Returns the path to the given object.
|
||||
Returns the path to the given callable.
|
||||
|
||||
:rtype: str
|
||||
:raises TypeError: if the given object is not callable
|
||||
:raises ValueError: if the given object is a :class:`~functools.partial`, lambda or a nested
|
||||
function
|
||||
|
||||
"""
|
||||
if isinstance(obj, partial):
|
||||
raise ValueError('Cannot create a reference to a partial()')
|
||||
|
||||
try:
|
||||
ref = '%s:%s' % (obj.__module__, get_callable_name(obj))
|
||||
obj2 = ref_to_obj(ref)
|
||||
if obj != obj2:
|
||||
raise ValueError
|
||||
except Exception:
|
||||
raise ValueError('Cannot determine the reference to %r' % obj)
|
||||
name = get_callable_name(obj)
|
||||
if '<lambda>' in name:
|
||||
raise ValueError('Cannot create a reference to a lambda')
|
||||
if '<locals>' in name:
|
||||
raise ValueError('Cannot create a reference to a nested function')
|
||||
|
||||
return ref
|
||||
return '%s:%s' % (obj.__module__, name)
|
||||
|
||||
|
||||
def ref_to_obj(ref):
|
||||
@@ -243,8 +256,8 @@ def ref_to_obj(ref):
|
||||
Returns the object pointed to by ``ref``.
|
||||
|
||||
:type ref: str
|
||||
"""
|
||||
|
||||
"""
|
||||
if not isinstance(ref, six.string_types):
|
||||
raise TypeError('References must be strings')
|
||||
if ':' not in ref:
|
||||
@@ -252,12 +265,12 @@ def ref_to_obj(ref):
|
||||
|
||||
modulename, rest = ref.split(':', 1)
|
||||
try:
|
||||
obj = __import__(modulename)
|
||||
obj = __import__(modulename, fromlist=[rest])
|
||||
except ImportError:
|
||||
raise LookupError('Error resolving reference %s: could not import module' % ref)
|
||||
|
||||
try:
|
||||
for name in modulename.split('.')[1:] + rest.split('.'):
|
||||
for name in rest.split('.'):
|
||||
obj = getattr(obj, name)
|
||||
return obj
|
||||
except Exception:
|
||||
@@ -268,8 +281,8 @@ def maybe_ref(ref):
|
||||
"""
|
||||
Returns the object that the given reference points to, if it is indeed a reference.
|
||||
If it is not a reference, the object is returned as-is.
|
||||
"""
|
||||
|
||||
"""
|
||||
if not isinstance(ref, str):
|
||||
return ref
|
||||
return ref_to_obj(ref)
|
||||
@@ -281,7 +294,8 @@ if six.PY2:
|
||||
return string.encode('ascii', 'backslashreplace')
|
||||
return string
|
||||
else:
|
||||
repr_escape = lambda string: string
|
||||
def repr_escape(string):
|
||||
return string
|
||||
|
||||
|
||||
def check_callable_args(func, args, kwargs):
|
||||
@@ -290,70 +304,51 @@ def check_callable_args(func, args, kwargs):
|
||||
|
||||
:type args: tuple
|
||||
:type kwargs: dict
|
||||
"""
|
||||
|
||||
"""
|
||||
pos_kwargs_conflicts = [] # parameters that have a match in both args and kwargs
|
||||
positional_only_kwargs = [] # positional-only parameters that have a match in kwargs
|
||||
unsatisfied_args = [] # parameters in signature that don't have a match in args or kwargs
|
||||
unsatisfied_kwargs = [] # keyword-only arguments that don't have a match in kwargs
|
||||
unmatched_args = list(args) # args that didn't match any of the parameters in the signature
|
||||
unmatched_kwargs = list(kwargs) # kwargs that didn't match any of the parameters in the signature
|
||||
has_varargs = has_var_kwargs = False # indicates if the signature defines *args and **kwargs respectively
|
||||
# kwargs that didn't match any of the parameters in the signature
|
||||
unmatched_kwargs = list(kwargs)
|
||||
# indicates if the signature defines *args and **kwargs respectively
|
||||
has_varargs = has_var_kwargs = False
|
||||
|
||||
if signature:
|
||||
try:
|
||||
sig = signature(func)
|
||||
except ValueError:
|
||||
return # signature() doesn't work against every kind of callable
|
||||
try:
|
||||
sig = signature(func)
|
||||
except ValueError:
|
||||
# signature() doesn't work against every kind of callable
|
||||
return
|
||||
|
||||
for param in six.itervalues(sig.parameters):
|
||||
if param.kind == param.POSITIONAL_OR_KEYWORD:
|
||||
if param.name in unmatched_kwargs and unmatched_args:
|
||||
pos_kwargs_conflicts.append(param.name)
|
||||
elif unmatched_args:
|
||||
del unmatched_args[0]
|
||||
elif param.name in unmatched_kwargs:
|
||||
unmatched_kwargs.remove(param.name)
|
||||
elif param.default is param.empty:
|
||||
unsatisfied_args.append(param.name)
|
||||
elif param.kind == param.POSITIONAL_ONLY:
|
||||
if unmatched_args:
|
||||
del unmatched_args[0]
|
||||
elif param.name in unmatched_kwargs:
|
||||
unmatched_kwargs.remove(param.name)
|
||||
positional_only_kwargs.append(param.name)
|
||||
elif param.default is param.empty:
|
||||
unsatisfied_args.append(param.name)
|
||||
elif param.kind == param.KEYWORD_ONLY:
|
||||
if param.name in unmatched_kwargs:
|
||||
unmatched_kwargs.remove(param.name)
|
||||
elif param.default is param.empty:
|
||||
unsatisfied_kwargs.append(param.name)
|
||||
elif param.kind == param.VAR_POSITIONAL:
|
||||
has_varargs = True
|
||||
elif param.kind == param.VAR_KEYWORD:
|
||||
has_var_kwargs = True
|
||||
else:
|
||||
if not isfunction(func) and not ismethod(func) and hasattr(func, '__call__'):
|
||||
func = func.__call__
|
||||
|
||||
try:
|
||||
argspec = getargspec(func)
|
||||
except TypeError:
|
||||
return # getargspec() doesn't work certain callables
|
||||
|
||||
argspec_args = argspec.args if not ismethod(func) else argspec.args[1:]
|
||||
has_varargs = bool(argspec.varargs)
|
||||
has_var_kwargs = bool(argspec.keywords)
|
||||
for arg, default in six.moves.zip_longest(argspec_args, argspec.defaults or (), fillvalue=undefined):
|
||||
if arg in unmatched_kwargs and unmatched_args:
|
||||
pos_kwargs_conflicts.append(arg)
|
||||
for param in six.itervalues(sig.parameters):
|
||||
if param.kind == param.POSITIONAL_OR_KEYWORD:
|
||||
if param.name in unmatched_kwargs and unmatched_args:
|
||||
pos_kwargs_conflicts.append(param.name)
|
||||
elif unmatched_args:
|
||||
del unmatched_args[0]
|
||||
elif arg in unmatched_kwargs:
|
||||
unmatched_kwargs.remove(arg)
|
||||
elif default is undefined:
|
||||
unsatisfied_args.append(arg)
|
||||
elif param.name in unmatched_kwargs:
|
||||
unmatched_kwargs.remove(param.name)
|
||||
elif param.default is param.empty:
|
||||
unsatisfied_args.append(param.name)
|
||||
elif param.kind == param.POSITIONAL_ONLY:
|
||||
if unmatched_args:
|
||||
del unmatched_args[0]
|
||||
elif param.name in unmatched_kwargs:
|
||||
unmatched_kwargs.remove(param.name)
|
||||
positional_only_kwargs.append(param.name)
|
||||
elif param.default is param.empty:
|
||||
unsatisfied_args.append(param.name)
|
||||
elif param.kind == param.KEYWORD_ONLY:
|
||||
if param.name in unmatched_kwargs:
|
||||
unmatched_kwargs.remove(param.name)
|
||||
elif param.default is param.empty:
|
||||
unsatisfied_kwargs.append(param.name)
|
||||
elif param.kind == param.VAR_POSITIONAL:
|
||||
has_varargs = True
|
||||
elif param.kind == param.VAR_KEYWORD:
|
||||
has_var_kwargs = True
|
||||
|
||||
# Make sure there are no conflicts between args and kwargs
|
||||
if pos_kwargs_conflicts:
|
||||
@@ -365,21 +360,26 @@ def check_callable_args(func, args, kwargs):
|
||||
raise ValueError('The following arguments cannot be given as keyword arguments: %s' %
|
||||
', '.join(positional_only_kwargs))
|
||||
|
||||
# Check that the number of positional arguments minus the number of matched kwargs matches the argspec
|
||||
# Check that the number of positional arguments minus the number of matched kwargs matches the
|
||||
# argspec
|
||||
if unsatisfied_args:
|
||||
raise ValueError('The following arguments have not been supplied: %s' % ', '.join(unsatisfied_args))
|
||||
raise ValueError('The following arguments have not been supplied: %s' %
|
||||
', '.join(unsatisfied_args))
|
||||
|
||||
# Check that all keyword-only arguments have been supplied
|
||||
if unsatisfied_kwargs:
|
||||
raise ValueError('The following keyword-only arguments have not been supplied in kwargs: %s' %
|
||||
', '.join(unsatisfied_kwargs))
|
||||
raise ValueError(
|
||||
'The following keyword-only arguments have not been supplied in kwargs: %s' %
|
||||
', '.join(unsatisfied_kwargs))
|
||||
|
||||
# Check that the callable can accept the given number of positional arguments
|
||||
if not has_varargs and unmatched_args:
|
||||
raise ValueError('The list of positional arguments is longer than the target callable can handle '
|
||||
'(allowed: %d, given in args: %d)' % (len(args) - len(unmatched_args), len(args)))
|
||||
raise ValueError(
|
||||
'The list of positional arguments is longer than the target callable can handle '
|
||||
'(allowed: %d, given in args: %d)' % (len(args) - len(unmatched_args), len(args)))
|
||||
|
||||
# Check that the callable can accept the given keyword arguments
|
||||
if not has_var_kwargs and unmatched_kwargs:
|
||||
raise ValueError('The target callable does not accept the following keyword arguments: %s' %
|
||||
', '.join(unmatched_kwargs))
|
||||
raise ValueError(
|
||||
'The target callable does not accept the following keyword arguments: %s' %
|
||||
', '.join(unmatched_kwargs))
|
||||
|
@@ -4,5 +4,5 @@ from .arrow import Arrow
|
||||
from .factory import ArrowFactory
|
||||
from .api import get, now, utcnow
|
||||
|
||||
__version__ = '0.7.0'
|
||||
__version__ = '0.10.0'
|
||||
VERSION = __version__
|
||||
|
@@ -51,5 +51,5 @@ def factory(type):
|
||||
return ArrowFactory(type)
|
||||
|
||||
|
||||
__all__ = ['get', 'utcnow', 'now', 'factory', 'iso']
|
||||
__all__ = ['get', 'utcnow', 'now', 'factory']
|
||||
|
||||
|
@@ -12,6 +12,8 @@ from dateutil import tz as dateutil_tz
|
||||
from dateutil.relativedelta import relativedelta
|
||||
import calendar
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
|
||||
from arrow import util, locales, parser, formatter
|
||||
|
||||
@@ -45,6 +47,7 @@ class Arrow(object):
|
||||
|
||||
_ATTRS = ['year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond']
|
||||
_ATTRS_PLURAL = ['{0}s'.format(a) for a in _ATTRS]
|
||||
_MONTHS_PER_QUARTER = 3
|
||||
|
||||
def __init__(self, year, month, day, hour=0, minute=0, second=0, microsecond=0,
|
||||
tzinfo=None):
|
||||
@@ -306,6 +309,9 @@ class Arrow(object):
|
||||
if name == 'week':
|
||||
return self.isocalendar()[1]
|
||||
|
||||
if name == 'quarter':
|
||||
return int((self.month-1)/self._MONTHS_PER_QUARTER) + 1
|
||||
|
||||
if not name.startswith('_'):
|
||||
value = getattr(self._datetime, name, None)
|
||||
|
||||
@@ -378,16 +384,16 @@ class Arrow(object):
|
||||
>>> arw.replace(year=2014, month=6)
|
||||
<Arrow [2014-06-11T22:27:34.787885+00:00]>
|
||||
|
||||
Use plural property names to shift their current value relatively:
|
||||
|
||||
>>> arw.replace(years=1, months=-1)
|
||||
<Arrow [2014-04-11T22:27:34.787885+00:00]>
|
||||
|
||||
You can also provide a timezone expression can also be replaced:
|
||||
|
||||
>>> arw.replace(tzinfo=tz.tzlocal())
|
||||
<Arrow [2013-05-11T22:27:34.787885-07:00]>
|
||||
|
||||
Use plural property names to shift their current value relatively (**deprecated**):
|
||||
|
||||
>>> arw.replace(years=1, months=-1)
|
||||
<Arrow [2014-04-11T22:27:34.787885+00:00]>
|
||||
|
||||
Recognized timezone expressions:
|
||||
|
||||
- A ``tzinfo`` object.
|
||||
@@ -398,21 +404,29 @@ class Arrow(object):
|
||||
'''
|
||||
|
||||
absolute_kwargs = {}
|
||||
relative_kwargs = {}
|
||||
relative_kwargs = {} # TODO: DEPRECATED; remove in next release
|
||||
|
||||
for key, value in kwargs.items():
|
||||
|
||||
if key in self._ATTRS:
|
||||
absolute_kwargs[key] = value
|
||||
elif key in self._ATTRS_PLURAL or key == 'weeks':
|
||||
elif key in self._ATTRS_PLURAL or key in ['weeks', 'quarters']:
|
||||
# TODO: DEPRECATED
|
||||
warnings.warn("replace() with plural property to shift value"
|
||||
"is deprecated, use shift() instead",
|
||||
DeprecationWarning)
|
||||
relative_kwargs[key] = value
|
||||
elif key == 'week':
|
||||
raise AttributeError('setting absolute week is not supported')
|
||||
elif key in ['week', 'quarter']:
|
||||
raise AttributeError('setting absolute {0} is not supported'.format(key))
|
||||
elif key !='tzinfo':
|
||||
raise AttributeError()
|
||||
raise AttributeError('unknown attribute: "{0}"'.format(key))
|
||||
|
||||
# core datetime does not support quarters, translate to months.
|
||||
relative_kwargs.setdefault('months', 0)
|
||||
relative_kwargs['months'] += relative_kwargs.pop('quarters', 0) * self._MONTHS_PER_QUARTER
|
||||
|
||||
current = self._datetime.replace(**absolute_kwargs)
|
||||
current += relativedelta(**relative_kwargs)
|
||||
current += relativedelta(**relative_kwargs) # TODO: DEPRECATED
|
||||
|
||||
tzinfo = kwargs.get('tzinfo')
|
||||
|
||||
@@ -422,9 +436,41 @@ class Arrow(object):
|
||||
|
||||
return self.fromdatetime(current)
|
||||
|
||||
def shift(self, **kwargs):
|
||||
''' Returns a new :class:`Arrow <arrow.arrow.Arrow>` object with attributes updated
|
||||
according to inputs.
|
||||
|
||||
Use plural property names to shift their current value relatively:
|
||||
|
||||
>>> import arrow
|
||||
>>> arw = arrow.utcnow()
|
||||
>>> arw
|
||||
<Arrow [2013-05-11T22:27:34.787885+00:00]>
|
||||
>>> arw.shift(years=1, months=-1)
|
||||
<Arrow [2014-04-11T22:27:34.787885+00:00]>
|
||||
|
||||
'''
|
||||
|
||||
relative_kwargs = {}
|
||||
|
||||
for key, value in kwargs.items():
|
||||
|
||||
if key in self._ATTRS_PLURAL or key in ['weeks', 'quarters']:
|
||||
relative_kwargs[key] = value
|
||||
else:
|
||||
raise AttributeError()
|
||||
|
||||
# core datetime does not support quarters, translate to months.
|
||||
relative_kwargs.setdefault('months', 0)
|
||||
relative_kwargs['months'] += relative_kwargs.pop('quarters', 0) * self._MONTHS_PER_QUARTER
|
||||
|
||||
current = self._datetime + relativedelta(**relative_kwargs)
|
||||
|
||||
return self.fromdatetime(current)
|
||||
|
||||
def to(self, tz):
|
||||
''' Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, converted to the target
|
||||
timezone.
|
||||
''' Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, converted
|
||||
to the target timezone.
|
||||
|
||||
:param tz: an expression representing a timezone.
|
||||
|
||||
@@ -587,6 +633,7 @@ class Arrow(object):
|
||||
Defaults to now in the current :class:`Arrow <arrow.arrow.Arrow>` object's timezone.
|
||||
:param locale: (optional) a ``str`` specifying a locale. Defaults to 'en_us'.
|
||||
:param only_distance: (optional) returns only time difference eg: "11 seconds" without "in" or "ago" part.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> earlier = arrow.utcnow().replace(hours=-2)
|
||||
@@ -651,7 +698,8 @@ class Arrow(object):
|
||||
elif diff < 29808000:
|
||||
self_months = self._datetime.year * 12 + self._datetime.month
|
||||
other_months = dt.year * 12 + dt.month
|
||||
months = sign * abs(other_months - self_months)
|
||||
|
||||
months = sign * int(max(abs(other_months - self_months), 2))
|
||||
|
||||
return locale.describe('months', months, only_distance=only_distance)
|
||||
|
||||
@@ -676,7 +724,7 @@ class Arrow(object):
|
||||
|
||||
def __sub__(self, other):
|
||||
|
||||
if isinstance(other, timedelta):
|
||||
if isinstance(other, (timedelta, relativedelta)):
|
||||
return self.fromdatetime(self._datetime - other, self._datetime.tzinfo)
|
||||
|
||||
elif isinstance(other, datetime):
|
||||
@@ -688,7 +736,11 @@ class Arrow(object):
|
||||
raise TypeError()
|
||||
|
||||
def __rsub__(self, other):
|
||||
return self.__sub__(other)
|
||||
|
||||
if isinstance(other, datetime):
|
||||
return other - self._datetime
|
||||
|
||||
raise TypeError()
|
||||
|
||||
|
||||
# comparisons
|
||||
@@ -702,8 +754,6 @@ class Arrow(object):
|
||||
if not isinstance(other, (Arrow, datetime)):
|
||||
return False
|
||||
|
||||
other = self._get_datetime(other)
|
||||
|
||||
return self._datetime == self._get_datetime(other)
|
||||
|
||||
def __ne__(self, other):
|
||||
@@ -882,7 +932,9 @@ class Arrow(object):
|
||||
return cls.max, limit
|
||||
|
||||
else:
|
||||
return end, sys.maxsize
|
||||
if limit is None:
|
||||
return end, sys.maxsize
|
||||
return end, limit
|
||||
|
||||
@staticmethod
|
||||
def _get_timestamp_from_input(timestamp):
|
||||
|
@@ -94,7 +94,7 @@ class DateTimeFormatter(object):
|
||||
tz = dateutil_tz.tzutc() if dt.tzinfo is None else dt.tzinfo
|
||||
total_minutes = int(util.total_seconds(tz.utcoffset(dt)) / 60)
|
||||
|
||||
sign = '+' if total_minutes > 0 else '-'
|
||||
sign = '+' if total_minutes >= 0 else '-'
|
||||
total_minutes = abs(total_minutes)
|
||||
hour, minute = divmod(total_minutes, 60)
|
||||
|
||||
|
@@ -7,8 +7,8 @@ import sys
|
||||
|
||||
|
||||
def get_locale(name):
|
||||
'''Returns an appropriate :class:`Locale <locale.Locale>` corresponding
|
||||
to an inpute locale name.
|
||||
'''Returns an appropriate :class:`Locale <arrow.locales.Locale>`
|
||||
corresponding to an inpute locale name.
|
||||
|
||||
:param name: the name of the locale.
|
||||
|
||||
@@ -186,7 +186,7 @@ class Locale(object):
|
||||
|
||||
class EnglishLocale(Locale):
|
||||
|
||||
names = ['en', 'en_us', 'en_gb', 'en_au', 'en_be', 'en_jp', 'en_za']
|
||||
names = ['en', 'en_us', 'en_gb', 'en_au', 'en_be', 'en_jp', 'en_za', 'en_ca']
|
||||
|
||||
past = '{0} ago'
|
||||
future = 'in {0}'
|
||||
@@ -263,10 +263,10 @@ class ItalianLocale(Locale):
|
||||
day_names = ['', 'lunedì', 'martedì', 'mercoledì', 'giovedì', 'venerdì', 'sabato', 'domenica']
|
||||
day_abbreviations = ['', 'lun', 'mar', 'mer', 'gio', 'ven', 'sab', 'dom']
|
||||
|
||||
ordinal_day_re = r'((?P<value>[1-3]?[0-9](?=°))°)'
|
||||
ordinal_day_re = r'((?P<value>[1-3]?[0-9](?=[ºª]))[ºª])'
|
||||
|
||||
def _ordinal_number(self, n):
|
||||
return '{0}°'.format(n)
|
||||
return '{0}º'.format(n)
|
||||
|
||||
|
||||
class SpanishLocale(Locale):
|
||||
@@ -297,10 +297,10 @@ class SpanishLocale(Locale):
|
||||
day_names = ['', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado', 'domingo']
|
||||
day_abbreviations = ['', 'lun', 'mar', 'mie', 'jue', 'vie', 'sab', 'dom']
|
||||
|
||||
ordinal_day_re = r'((?P<value>[1-3]?[0-9](?=°))°)'
|
||||
ordinal_day_re = r'((?P<value>[1-3]?[0-9](?=[ºª]))[ºª])'
|
||||
|
||||
def _ordinal_number(self, n):
|
||||
return '{0}°'.format(n)
|
||||
return '{0}º'.format(n)
|
||||
|
||||
|
||||
class FrenchLocale(Locale):
|
||||
@@ -379,7 +379,7 @@ class JapaneseLocale(Locale):
|
||||
|
||||
timeframes = {
|
||||
'now': '現在',
|
||||
'seconds': '秒',
|
||||
'seconds': '数秒',
|
||||
'minute': '1分',
|
||||
'minutes': '{0}分',
|
||||
'hour': '1時間',
|
||||
@@ -559,8 +559,8 @@ class KoreanLocale(Locale):
|
||||
|
||||
timeframes = {
|
||||
'now': '지금',
|
||||
'seconds': '몇초',
|
||||
'minute': '일 분',
|
||||
'seconds': '몇 초',
|
||||
'minute': '1분',
|
||||
'minutes': '{0}분',
|
||||
'hour': '1시간',
|
||||
'hours': '{0}시간',
|
||||
@@ -919,7 +919,7 @@ class NewNorwegianLocale(Locale):
|
||||
|
||||
class PortugueseLocale(Locale):
|
||||
names = ['pt', 'pt_pt']
|
||||
|
||||
|
||||
past = 'há {0}'
|
||||
future = 'em {0}'
|
||||
|
||||
@@ -946,11 +946,11 @@ class PortugueseLocale(Locale):
|
||||
day_names = ['', 'segunda-feira', 'terça-feira', 'quarta-feira', 'quinta-feira', 'sexta-feira',
|
||||
'sábado', 'domingo']
|
||||
day_abbreviations = ['', 'seg', 'ter', 'qua', 'qui', 'sex', 'sab', 'dom']
|
||||
|
||||
|
||||
|
||||
|
||||
class BrazilianPortugueseLocale(PortugueseLocale):
|
||||
names = ['pt_br']
|
||||
|
||||
|
||||
past = 'fazem {0}'
|
||||
|
||||
|
||||
@@ -1034,7 +1034,7 @@ class TurkishLocale(Locale):
|
||||
'days': '{0} gün',
|
||||
'month': 'bir ay',
|
||||
'months': '{0} ay',
|
||||
'year': 'a yıl',
|
||||
'year': 'yıl',
|
||||
'years': '{0} yıl',
|
||||
}
|
||||
|
||||
@@ -1047,6 +1047,37 @@ class TurkishLocale(Locale):
|
||||
day_abbreviations = ['', 'Pzt', 'Sal', 'Çar', 'Per', 'Cum', 'Cmt', 'Paz']
|
||||
|
||||
|
||||
class AzerbaijaniLocale(Locale):
|
||||
|
||||
names = ['az', 'az_az']
|
||||
|
||||
past = '{0} əvvəl'
|
||||
future = '{0} sonra'
|
||||
|
||||
timeframes = {
|
||||
'now': 'indi',
|
||||
'seconds': 'saniyə',
|
||||
'minute': 'bir dəqiqə',
|
||||
'minutes': '{0} dəqiqə',
|
||||
'hour': 'bir saat',
|
||||
'hours': '{0} saat',
|
||||
'day': 'bir gün',
|
||||
'days': '{0} gün',
|
||||
'month': 'bir ay',
|
||||
'months': '{0} ay',
|
||||
'year': 'il',
|
||||
'years': '{0} il',
|
||||
}
|
||||
|
||||
month_names = ['', 'Yanvar', 'Fevral', 'Mart', 'Aprel', 'May', 'İyun', 'İyul',
|
||||
'Avqust', 'Sentyabr', 'Oktyabr', 'Noyabr', 'Dekabr']
|
||||
month_abbreviations = ['', 'Yan', 'Fev', 'Mar', 'Apr', 'May', 'İyn', 'İyl', 'Avq',
|
||||
'Sen', 'Okt', 'Noy', 'Dek']
|
||||
|
||||
day_names = ['', 'Bazar ertəsi', 'Çərşənbə axşamı', 'Çərşənbə', 'Cümə axşamı', 'Cümə', 'Şənbə', 'Bazar']
|
||||
day_abbreviations = ['', 'Ber', 'Çax', 'Çər', 'Cax', 'Cüm', 'Şnb', 'Bzr']
|
||||
|
||||
|
||||
class ArabicLocale(Locale):
|
||||
|
||||
names = ['ar', 'ar_eg']
|
||||
@@ -1205,11 +1236,11 @@ class HindiLocale(Locale):
|
||||
future = '{0} बाद'
|
||||
|
||||
timeframes = {
|
||||
'now': 'अभि',
|
||||
'now': 'अभी',
|
||||
'seconds': 'सेकंड्',
|
||||
'minute': 'एक मिनट ',
|
||||
'minutes': '{0} मिनट ',
|
||||
'hour': 'एक घंट',
|
||||
'hour': 'एक घंटा',
|
||||
'hours': '{0} घंटे',
|
||||
'day': 'एक दिन',
|
||||
'days': '{0} दिन',
|
||||
@@ -1226,8 +1257,8 @@ class HindiLocale(Locale):
|
||||
'PM': 'शाम',
|
||||
}
|
||||
|
||||
month_names = ['', 'जनवरी', 'फ़रवरी', 'मार्च', 'अप्रैल ', 'मई', 'जून', 'जुलाई',
|
||||
'आगस्त', 'सितम्बर', 'अकतूबर', 'नवेम्बर', 'दिसम्बर']
|
||||
month_names = ['', 'जनवरी', 'फरवरी', 'मार्च', 'अप्रैल ', 'मई', 'जून', 'जुलाई',
|
||||
'अगस्त', 'सितंबर', 'अक्टूबर', 'नवंबर', 'दिसंबर']
|
||||
month_abbreviations = ['', 'जन', 'फ़र', 'मार्च', 'अप्रै', 'मई', 'जून', 'जुलाई', 'आग',
|
||||
'सित', 'अकत', 'नवे', 'दिस']
|
||||
|
||||
@@ -1284,7 +1315,8 @@ class CzechLocale(Locale):
|
||||
|
||||
|
||||
def _format_timeframe(self, timeframe, delta):
|
||||
'''Czech aware time frame format function, takes into account the differences between past and future forms.'''
|
||||
'''Czech aware time frame format function, takes into account
|
||||
the differences between past and future forms.'''
|
||||
form = self.timeframes[timeframe]
|
||||
if isinstance(form, dict):
|
||||
if delta == 0:
|
||||
@@ -1293,7 +1325,7 @@ class CzechLocale(Locale):
|
||||
form = form['future']
|
||||
else:
|
||||
form = form['past']
|
||||
delta = abs(delta)
|
||||
delta = abs(delta)
|
||||
|
||||
if isinstance(form, list):
|
||||
if 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20):
|
||||
@@ -1303,6 +1335,78 @@ class CzechLocale(Locale):
|
||||
|
||||
return form.format(delta)
|
||||
|
||||
|
||||
class SlovakLocale(Locale):
|
||||
names = ['sk', 'sk_sk']
|
||||
|
||||
timeframes = {
|
||||
'now': 'Teraz',
|
||||
'seconds': {
|
||||
'past': 'pár sekundami',
|
||||
'future': ['{0} sekundy', '{0} sekúnd']
|
||||
},
|
||||
'minute': {'past': 'minútou', 'future': 'minútu', 'zero': '{0} minút'},
|
||||
'minutes': {
|
||||
'past': '{0} minútami',
|
||||
'future': ['{0} minúty', '{0} minút']
|
||||
},
|
||||
'hour': {'past': 'hodinou', 'future': 'hodinu', 'zero': '{0} hodín'},
|
||||
'hours': {
|
||||
'past': '{0} hodinami',
|
||||
'future': ['{0} hodiny', '{0} hodín']
|
||||
},
|
||||
'day': {'past': 'dňom', 'future': 'deň', 'zero': '{0} dní'},
|
||||
'days': {
|
||||
'past': '{0} dňami',
|
||||
'future': ['{0} dni', '{0} dní']
|
||||
},
|
||||
'month': {'past': 'mesiacom', 'future': 'mesiac', 'zero': '{0} mesiacov'},
|
||||
'months': {
|
||||
'past': '{0} mesiacmi',
|
||||
'future': ['{0} mesiace', '{0} mesiacov']
|
||||
},
|
||||
'year': {'past': 'rokom', 'future': 'rok', 'zero': '{0} rokov'},
|
||||
'years': {
|
||||
'past': '{0} rokmi',
|
||||
'future': ['{0} roky', '{0} rokov']
|
||||
}
|
||||
}
|
||||
|
||||
past = 'Pred {0}'
|
||||
future = 'O {0}'
|
||||
|
||||
month_names = ['', 'január', 'február', 'marec', 'apríl', 'máj', 'jún',
|
||||
'júl', 'august', 'september', 'október', 'november', 'december']
|
||||
month_abbreviations = ['', 'jan', 'feb', 'mar', 'apr', 'máj', 'jún', 'júl',
|
||||
'aug', 'sep', 'okt', 'nov', 'dec']
|
||||
|
||||
day_names = ['', 'pondelok', 'utorok', 'streda', 'štvrtok', 'piatok',
|
||||
'sobota', 'nedeľa']
|
||||
day_abbreviations = ['', 'po', 'ut', 'st', 'št', 'pi', 'so', 'ne']
|
||||
|
||||
|
||||
def _format_timeframe(self, timeframe, delta):
|
||||
'''Slovak aware time frame format function, takes into account
|
||||
the differences between past and future forms.'''
|
||||
form = self.timeframes[timeframe]
|
||||
if isinstance(form, dict):
|
||||
if delta == 0:
|
||||
form = form['zero'] # And *never* use 0 in the singular!
|
||||
elif delta > 0:
|
||||
form = form['future']
|
||||
else:
|
||||
form = form['past']
|
||||
delta = abs(delta)
|
||||
|
||||
if isinstance(form, list):
|
||||
if 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20):
|
||||
form = form[0]
|
||||
else:
|
||||
form = form[1]
|
||||
|
||||
return form.format(delta)
|
||||
|
||||
|
||||
class FarsiLocale(Locale):
|
||||
|
||||
names = ['fa', 'fa_ir']
|
||||
@@ -1463,7 +1567,7 @@ class MarathiLocale(Locale):
|
||||
|
||||
day_names = ['', 'सोमवार', 'मंगळवार', 'बुधवार', 'गुरुवार', 'शुक्रवार', 'शनिवार', 'रविवार']
|
||||
day_abbreviations = ['', 'सोम', 'मंगळ', 'बुध', 'गुरु', 'शुक्र', 'शनि', 'रवि']
|
||||
|
||||
|
||||
def _map_locales():
|
||||
|
||||
locales = {}
|
||||
@@ -1471,14 +1575,14 @@ def _map_locales():
|
||||
for cls_name, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass):
|
||||
if issubclass(cls, Locale):
|
||||
for name in cls.names:
|
||||
locales[name.lower()] = cls
|
||||
locales[name.lower()] = cls
|
||||
|
||||
return locales
|
||||
|
||||
class CatalaLocale(Locale):
|
||||
names = ['ca', 'ca_ca']
|
||||
class CatalanLocale(Locale):
|
||||
names = ['ca', 'ca_es', 'ca_ad', 'ca_fr', 'ca_it']
|
||||
past = 'Fa {0}'
|
||||
future = '{0}' # I don't know what's the right phrase in catala for the future.
|
||||
future = 'En {0}'
|
||||
|
||||
timeframes = {
|
||||
'now': 'Ara mateix',
|
||||
@@ -1490,15 +1594,15 @@ class CatalaLocale(Locale):
|
||||
'day': 'un dia',
|
||||
'days': '{0} dies',
|
||||
'month': 'un mes',
|
||||
'months': '{0} messos',
|
||||
'months': '{0} mesos',
|
||||
'year': 'un any',
|
||||
'years': '{0} anys',
|
||||
}
|
||||
|
||||
month_names = ['', 'Jener', 'Febrer', 'Març', 'Abril', 'Maig', 'Juny', 'Juliol', 'Agost', 'Setembre', 'Octubre', 'Novembre', 'Decembre']
|
||||
month_abbreviations = ['', 'Jener', 'Febrer', 'Març', 'Abril', 'Maig', 'Juny', 'Juliol', 'Agost', 'Setembre', 'Octubre', 'Novembre', 'Decembre']
|
||||
day_names = ['', 'Dilluns', 'Dimars', 'Dimecres', 'Dijous', 'Divendres', 'Disabte', 'Diumenge']
|
||||
day_abbreviations = ['', 'Dilluns', 'Dimars', 'Dimecres', 'Dijous', 'Divendres', 'Disabte', 'Diumenge']
|
||||
month_names = ['', 'Gener', 'Febrer', 'Març', 'Abril', 'Maig', 'Juny', 'Juliol', 'Agost', 'Setembre', 'Octubre', 'Novembre', 'Desembre']
|
||||
month_abbreviations = ['', 'Gener', 'Febrer', 'Març', 'Abril', 'Maig', 'Juny', 'Juliol', 'Agost', 'Setembre', 'Octubre', 'Novembre', 'Desembre']
|
||||
day_names = ['', 'Dilluns', 'Dimarts', 'Dimecres', 'Dijous', 'Divendres', 'Dissabte', 'Diumenge']
|
||||
day_abbreviations = ['', 'Dilluns', 'Dimarts', 'Dimecres', 'Dijous', 'Divendres', 'Dissabte', 'Diumenge']
|
||||
|
||||
class BasqueLocale(Locale):
|
||||
names = ['eu', 'eu_eu']
|
||||
@@ -1587,6 +1691,50 @@ class HungarianLocale(Locale):
|
||||
return form.format(abs(delta))
|
||||
|
||||
|
||||
class EsperantoLocale(Locale):
|
||||
names = ['eo', 'eo_xx']
|
||||
past = 'antaŭ {0}'
|
||||
future = 'post {0}'
|
||||
|
||||
timeframes = {
|
||||
'now': 'nun',
|
||||
'seconds': 'kelkaj sekundoj',
|
||||
'minute': 'unu minuto',
|
||||
'minutes': '{0} minutoj',
|
||||
'hour': 'un horo',
|
||||
'hours': '{0} horoj',
|
||||
'day': 'unu tago',
|
||||
'days': '{0} tagoj',
|
||||
'month': 'unu monato',
|
||||
'months': '{0} monatoj',
|
||||
'year': 'unu jaro',
|
||||
'years': '{0} jaroj',
|
||||
}
|
||||
|
||||
month_names = ['', 'januaro', 'februaro', 'marto', 'aprilo', 'majo',
|
||||
'junio', 'julio', 'aŭgusto', 'septembro', 'oktobro',
|
||||
'novembro', 'decembro']
|
||||
month_abbreviations = ['', 'jan', 'feb', 'mar', 'apr', 'maj', 'jun',
|
||||
'jul', 'aŭg', 'sep', 'okt', 'nov', 'dec']
|
||||
|
||||
day_names = ['', 'lundo', 'mardo', 'merkredo', 'ĵaŭdo', 'vendredo',
|
||||
'sabato', 'dimanĉo']
|
||||
day_abbreviations = ['', 'lun', 'mar', 'mer', 'ĵaŭ', 'ven',
|
||||
'sab', 'dim']
|
||||
|
||||
meridians = {
|
||||
'am': 'atm',
|
||||
'pm': 'ptm',
|
||||
'AM': 'ATM',
|
||||
'PM': 'PTM',
|
||||
}
|
||||
|
||||
ordinal_day_re = r'((?P<value>[1-3]?[0-9](?=a))a)'
|
||||
|
||||
def _ordinal_number(self, n):
|
||||
return '{0}a'.format(n)
|
||||
|
||||
|
||||
class ThaiLocale(Locale):
|
||||
|
||||
names = ['th', 'th_th']
|
||||
@@ -1700,4 +1848,164 @@ class BengaliLocale(Locale):
|
||||
return '{0}ষ্ঠ'.format(n)
|
||||
|
||||
|
||||
class RomanshLocale(Locale):
|
||||
|
||||
names = ['rm', 'rm_ch']
|
||||
|
||||
past = 'avant {0}'
|
||||
future = 'en {0}'
|
||||
|
||||
timeframes = {
|
||||
'now': 'en quest mument',
|
||||
'seconds': 'secundas',
|
||||
'minute': 'ina minuta',
|
||||
'minutes': '{0} minutas',
|
||||
'hour': 'in\'ura',
|
||||
'hours': '{0} ura',
|
||||
'day': 'in di',
|
||||
'days': '{0} dis',
|
||||
'month': 'in mais',
|
||||
'months': '{0} mais',
|
||||
'year': 'in onn',
|
||||
'years': '{0} onns',
|
||||
}
|
||||
|
||||
month_names = [
|
||||
'', 'schaner', 'favrer', 'mars', 'avrigl', 'matg', 'zercladur',
|
||||
'fanadur', 'avust', 'settember', 'october', 'november', 'december'
|
||||
]
|
||||
|
||||
month_abbreviations = [
|
||||
'', 'schan', 'fav', 'mars', 'avr', 'matg', 'zer', 'fan', 'avu',
|
||||
'set', 'oct', 'nov', 'dec'
|
||||
]
|
||||
|
||||
day_names = [
|
||||
'', 'glindesdi', 'mardi', 'mesemna', 'gievgia', 'venderdi',
|
||||
'sonda', 'dumengia'
|
||||
]
|
||||
|
||||
day_abbreviations = [
|
||||
'', 'gli', 'ma', 'me', 'gie', 've', 'so', 'du'
|
||||
]
|
||||
|
||||
|
||||
class SwissLocale(Locale):
|
||||
|
||||
names = ['de', 'de_ch']
|
||||
|
||||
past = 'vor {0}'
|
||||
future = 'in {0}'
|
||||
|
||||
timeframes = {
|
||||
'now': 'gerade eben',
|
||||
'seconds': 'Sekunden',
|
||||
'minute': 'einer Minute',
|
||||
'minutes': '{0} Minuten',
|
||||
'hour': 'einer Stunde',
|
||||
'hours': '{0} Stunden',
|
||||
'day': 'einem Tag',
|
||||
'days': '{0} Tage',
|
||||
'month': 'einem Monat',
|
||||
'months': '{0} Monaten',
|
||||
'year': 'einem Jahr',
|
||||
'years': '{0} Jahren',
|
||||
}
|
||||
|
||||
month_names = [
|
||||
'', 'Januar', 'Februar', 'März', 'April', 'Mai', 'Juni', 'Juli',
|
||||
'August', 'September', 'Oktober', 'November', 'Dezember'
|
||||
]
|
||||
|
||||
month_abbreviations = [
|
||||
'', 'Jan', 'Feb', 'Mär', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep',
|
||||
'Okt', 'Nov', 'Dez'
|
||||
]
|
||||
|
||||
day_names = [
|
||||
'', 'Montag', 'Dienstag', 'Mittwoch', 'Donnerstag', 'Freitag',
|
||||
'Samstag', 'Sonntag'
|
||||
]
|
||||
|
||||
day_abbreviations = [
|
||||
'', 'Mo', 'Di', 'Mi', 'Do', 'Fr', 'Sa', 'So'
|
||||
]
|
||||
|
||||
|
||||
class RomanianLocale(Locale):
|
||||
names = ['ro', 'ro_ro']
|
||||
|
||||
past = '{0} în urmă'
|
||||
future = 'peste {0}'
|
||||
|
||||
timeframes = {
|
||||
'now': 'acum',
|
||||
'seconds': 'câteva secunde',
|
||||
'minute': 'un minut',
|
||||
'minutes': '{0} minute',
|
||||
'hour': 'o oră',
|
||||
'hours': '{0} ore',
|
||||
'day': 'o zi',
|
||||
'days': '{0} zile',
|
||||
'month': 'o lună',
|
||||
'months': '{0} luni',
|
||||
'year': 'un an',
|
||||
'years': '{0} ani',
|
||||
}
|
||||
|
||||
month_names = ['', 'ianuarie', 'februarie', 'martie', 'aprilie', 'mai', 'iunie', 'iulie',
|
||||
'august', 'septembrie', 'octombrie', 'noiembrie', 'decembrie']
|
||||
month_abbreviations = ['', 'ian', 'febr', 'mart', 'apr', 'mai', 'iun', 'iul', 'aug', 'sept', 'oct', 'nov', 'dec']
|
||||
|
||||
day_names = ['', 'luni', 'marți', 'miercuri', 'joi', 'vineri', 'sâmbătă', 'duminică']
|
||||
day_abbreviations = ['', 'Lun', 'Mar', 'Mie', 'Joi', 'Vin', 'Sâm', 'Dum']
|
||||
|
||||
|
||||
class SlovenianLocale(Locale):
|
||||
names = ['sl', 'sl_si']
|
||||
|
||||
past = 'pred {0}'
|
||||
future = 'čez {0}'
|
||||
|
||||
timeframes = {
|
||||
'now': 'zdaj',
|
||||
'seconds': 'sekund',
|
||||
'minute': 'minuta',
|
||||
'minutes': '{0} minutami',
|
||||
'hour': 'uro',
|
||||
'hours': '{0} ur',
|
||||
'day': 'dan',
|
||||
'days': '{0} dni',
|
||||
'month': 'mesec',
|
||||
'months': '{0} mesecev',
|
||||
'year': 'leto',
|
||||
'years': '{0} let',
|
||||
}
|
||||
|
||||
meridians = {
|
||||
'am': '',
|
||||
'pm': '',
|
||||
'AM': '',
|
||||
'PM': '',
|
||||
}
|
||||
|
||||
month_names = [
|
||||
'', 'Januar', 'Februar', 'Marec', 'April', 'Maj', 'Junij', 'Julij',
|
||||
'Avgust', 'September', 'Oktober', 'November', 'December'
|
||||
]
|
||||
|
||||
month_abbreviations = [
|
||||
'', 'Jan', 'Feb', 'Mar', 'Apr', 'Maj', 'Jun', 'Jul', 'Avg',
|
||||
'Sep', 'Okt', 'Nov', 'Dec'
|
||||
]
|
||||
|
||||
day_names = [
|
||||
'', 'Ponedeljek', 'Torek', 'Sreda', 'Četrtek', 'Petek', 'Sobota', 'Nedelja'
|
||||
]
|
||||
|
||||
day_abbreviations = [
|
||||
'', 'Pon', 'Tor', 'Sre', 'Čet', 'Pet', 'Sob', 'Ned'
|
||||
]
|
||||
|
||||
|
||||
_locales = _map_locales()
|
||||
|
@@ -5,7 +5,6 @@ from __future__ import unicode_literals
|
||||
from datetime import datetime
|
||||
from dateutil import tz
|
||||
import re
|
||||
|
||||
from arrow import locales
|
||||
|
||||
|
||||
@@ -15,16 +14,14 @@ class ParserError(RuntimeError):
|
||||
|
||||
class DateTimeParser(object):
|
||||
|
||||
_FORMAT_RE = re.compile('(YYY?Y?|MM?M?M?|Do|DD?D?D?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?Z?|a|A|X)')
|
||||
_FORMAT_RE = re.compile('(YYY?Y?|MM?M?M?|Do|DD?D?D?|d?d?d?d|HH?|hh?|mm?|ss?|S+|ZZ?Z?|a|A|X)')
|
||||
_ESCAPE_RE = re.compile('\[[^\[\]]*\]')
|
||||
|
||||
_ONE_THROUGH_SIX_DIGIT_RE = re.compile('\d{1,6}')
|
||||
_ONE_THROUGH_FIVE_DIGIT_RE = re.compile('\d{1,5}')
|
||||
_ONE_THROUGH_FOUR_DIGIT_RE = re.compile('\d{1,4}')
|
||||
_ONE_TWO_OR_THREE_DIGIT_RE = re.compile('\d{1,3}')
|
||||
_ONE_OR_MORE_DIGIT_RE = re.compile('\d+')
|
||||
_ONE_OR_TWO_DIGIT_RE = re.compile('\d{1,2}')
|
||||
_FOUR_DIGIT_RE = re.compile('\d{4}')
|
||||
_TWO_DIGIT_RE = re.compile('\d{2}')
|
||||
_TZ_RE = re.compile('[+\-]?\d{2}:?\d{2}')
|
||||
_TZ_RE = re.compile('[+\-]?\d{2}:?(\d{2})?')
|
||||
_TZ_NAME_RE = re.compile('\w[\w+\-/]+')
|
||||
|
||||
|
||||
@@ -47,12 +44,7 @@ class DateTimeParser(object):
|
||||
'ZZZ': _TZ_NAME_RE,
|
||||
'ZZ': _TZ_RE,
|
||||
'Z': _TZ_RE,
|
||||
'SSSSSS': _ONE_THROUGH_SIX_DIGIT_RE,
|
||||
'SSSSS': _ONE_THROUGH_FIVE_DIGIT_RE,
|
||||
'SSSS': _ONE_THROUGH_FOUR_DIGIT_RE,
|
||||
'SSS': _ONE_TWO_OR_THREE_DIGIT_RE,
|
||||
'SS': _ONE_OR_TWO_DIGIT_RE,
|
||||
'S': re.compile('\d'),
|
||||
'S': _ONE_OR_MORE_DIGIT_RE,
|
||||
}
|
||||
|
||||
MARKERS = ['YYYY', 'MM', 'DD']
|
||||
@@ -67,6 +59,10 @@ class DateTimeParser(object):
|
||||
'MMM': self._choice_re(self.locale.month_abbreviations[1:],
|
||||
re.IGNORECASE),
|
||||
'Do': re.compile(self.locale.ordinal_day_re),
|
||||
'dddd': self._choice_re(self.locale.day_names[1:], re.IGNORECASE),
|
||||
'ddd': self._choice_re(self.locale.day_abbreviations[1:],
|
||||
re.IGNORECASE),
|
||||
'd' : re.compile("[1-7]"),
|
||||
'a': self._choice_re(
|
||||
(self.locale.meridians['am'], self.locale.meridians['pm'])
|
||||
),
|
||||
@@ -88,11 +84,10 @@ class DateTimeParser(object):
|
||||
time_parts = re.split('[+-]', time_string, 1)
|
||||
has_tz = len(time_parts) > 1
|
||||
has_seconds = time_parts[0].count(':') > 1
|
||||
has_subseconds = '.' in time_parts[0]
|
||||
has_subseconds = re.search('[.,]', time_parts[0])
|
||||
|
||||
if has_subseconds:
|
||||
subseconds_token = 'S' * min(len(re.split('\D+', time_parts[0].split('.')[1], 1)[0]), 6)
|
||||
formats = ['YYYY-MM-DDTHH:mm:ss.%s' % subseconds_token]
|
||||
formats = ['YYYY-MM-DDTHH:mm:ss%sS' % has_subseconds.group()]
|
||||
elif has_seconds:
|
||||
formats = ['YYYY-MM-DDTHH:mm:ss']
|
||||
else:
|
||||
@@ -123,10 +118,18 @@ class DateTimeParser(object):
|
||||
# we construct a new string by replacing each
|
||||
# token by its pattern:
|
||||
# 'YYYY-MM-DD' -> '(?P<YYYY>\d{4})-(?P<MM>\d{2})-(?P<DD>\d{2})'
|
||||
fmt_pattern = fmt
|
||||
tokens = []
|
||||
offset = 0
|
||||
for m in self._FORMAT_RE.finditer(fmt):
|
||||
|
||||
# Extract the bracketed expressions to be reinserted later.
|
||||
escaped_fmt = re.sub(self._ESCAPE_RE, "#" , fmt)
|
||||
# Any number of S is the same as one.
|
||||
escaped_fmt = re.sub('S+', 'S', escaped_fmt)
|
||||
escaped_data = re.findall(self._ESCAPE_RE, fmt)
|
||||
|
||||
fmt_pattern = escaped_fmt
|
||||
|
||||
for m in self._FORMAT_RE.finditer(escaped_fmt):
|
||||
token = m.group(0)
|
||||
try:
|
||||
input_re = self._input_re_map[token]
|
||||
@@ -140,9 +143,20 @@ class DateTimeParser(object):
|
||||
# are returned in the order found by finditer.
|
||||
fmt_pattern = fmt_pattern[:m.start() + offset] + input_pattern + fmt_pattern[m.end() + offset:]
|
||||
offset += len(input_pattern) - (m.end() - m.start())
|
||||
match = re.search(fmt_pattern, string, flags=re.IGNORECASE)
|
||||
|
||||
final_fmt_pattern = ""
|
||||
a = fmt_pattern.split("#")
|
||||
b = escaped_data
|
||||
|
||||
# Due to the way Python splits, 'a' will always be longer
|
||||
for i in range(len(a)):
|
||||
final_fmt_pattern += a[i]
|
||||
if i < len(b):
|
||||
final_fmt_pattern += b[i][1:-1]
|
||||
|
||||
match = re.search(final_fmt_pattern, string, flags=re.IGNORECASE)
|
||||
if match is None:
|
||||
raise ParserError('Failed to match \'{0}\' when parsing \'{1}\''.format(fmt_pattern, string))
|
||||
raise ParserError('Failed to match \'{0}\' when parsing \'{1}\''.format(final_fmt_pattern, string))
|
||||
parts = {}
|
||||
for token in tokens:
|
||||
if token == 'Do':
|
||||
@@ -181,18 +195,22 @@ class DateTimeParser(object):
|
||||
elif token in ['ss', 's']:
|
||||
parts['second'] = int(value)
|
||||
|
||||
elif token == 'SSSSSS':
|
||||
parts['microsecond'] = int(value)
|
||||
elif token == 'SSSSS':
|
||||
parts['microsecond'] = int(value) * 10
|
||||
elif token == 'SSSS':
|
||||
parts['microsecond'] = int(value) * 100
|
||||
elif token == 'SSS':
|
||||
parts['microsecond'] = int(value) * 1000
|
||||
elif token == 'SS':
|
||||
parts['microsecond'] = int(value) * 10000
|
||||
elif token == 'S':
|
||||
parts['microsecond'] = int(value) * 100000
|
||||
# We have the *most significant* digits of an arbitrary-precision integer.
|
||||
# We want the six most significant digits as an integer, rounded.
|
||||
# FIXME: add nanosecond support somehow?
|
||||
value = value.ljust(7, str('0'))
|
||||
|
||||
# floating-point (IEEE-754) defaults to half-to-even rounding
|
||||
seventh_digit = int(value[6])
|
||||
if seventh_digit == 5:
|
||||
rounding = int(value[5]) % 2
|
||||
elif seventh_digit > 5:
|
||||
rounding = 1
|
||||
else:
|
||||
rounding = 0
|
||||
|
||||
parts['microsecond'] = int(value[:6]) + rounding
|
||||
|
||||
elif token == 'X':
|
||||
parts['timestamp'] = int(value)
|
||||
@@ -242,7 +260,7 @@ class DateTimeParser(object):
|
||||
try:
|
||||
_datetime = self.parse(string, fmt)
|
||||
break
|
||||
except:
|
||||
except ParserError:
|
||||
pass
|
||||
|
||||
if _datetime is None:
|
||||
@@ -273,7 +291,7 @@ class DateTimeParser(object):
|
||||
|
||||
class TzinfoParser(object):
|
||||
|
||||
_TZINFO_RE = re.compile('([+\-])?(\d\d):?(\d\d)')
|
||||
_TZINFO_RE = re.compile('([+\-])?(\d\d):?(\d\d)?')
|
||||
|
||||
@classmethod
|
||||
def parse(cls, string):
|
||||
@@ -292,6 +310,8 @@ class TzinfoParser(object):
|
||||
|
||||
if iso_match:
|
||||
sign, hours, minutes = iso_match.groups()
|
||||
if minutes is None:
|
||||
minutes = 0
|
||||
seconds = int(hours) * 3600 + int(minutes) * 60
|
||||
|
||||
if sign == '-':
|
||||
@@ -303,6 +323,6 @@ class TzinfoParser(object):
|
||||
tzinfo = tz.gettz(string)
|
||||
|
||||
if tzinfo is None:
|
||||
raise ParserError('Could not parse timezone expression "{0}"', string)
|
||||
raise ParserError('Could not parse timezone expression "{0}"'.format(string))
|
||||
|
||||
return tzinfo
|
||||
|
@@ -22,6 +22,8 @@ else: # pragma: no cover
|
||||
total_seconds = _total_seconds_27
|
||||
|
||||
def is_timestamp(value):
|
||||
if type(value) == bool:
|
||||
return False
|
||||
try:
|
||||
float(value)
|
||||
return True
|
||||
|
302
lib/cloudinary/__init__.py
Normal file
@@ -0,0 +1,302 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger("Cloudinary")
|
||||
ch = logging.StreamHandler()
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from six import python_2_unicode_compatible
|
||||
|
||||
from cloudinary import utils
|
||||
from cloudinary.compat import urlparse, parse_qs
|
||||
from cloudinary.search import Search
|
||||
|
||||
CF_SHARED_CDN = "d3jpl91pxevbkh.cloudfront.net"
|
||||
OLD_AKAMAI_SHARED_CDN = "cloudinary-a.akamaihd.net"
|
||||
AKAMAI_SHARED_CDN = "res.cloudinary.com"
|
||||
SHARED_CDN = AKAMAI_SHARED_CDN
|
||||
CL_BLANK = "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7"
|
||||
|
||||
VERSION = "1.11.0"
|
||||
USER_AGENT = "CloudinaryPython/" + VERSION
|
||||
""" :const: USER_AGENT """
|
||||
|
||||
USER_PLATFORM = ""
|
||||
"""
|
||||
Additional information to be passed with the USER_AGENT, e.g. "CloudinaryMagento/1.0.1".
|
||||
This value is set in platform-specific implementations that use cloudinary_php.
|
||||
|
||||
The format of the value should be <ProductName>/Version[ (comment)].
|
||||
@see http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.43
|
||||
|
||||
**Do not set this value in application code!**
|
||||
"""
|
||||
|
||||
|
||||
def get_user_agent():
|
||||
"""Provides the `USER_AGENT` string that is passed to the Cloudinary servers.
|
||||
Prepends `USER_PLATFORM` if it is defined.
|
||||
|
||||
:returns: the user agent
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
if USER_PLATFORM == "":
|
||||
return USER_AGENT
|
||||
else:
|
||||
return USER_PLATFORM + " " + USER_AGENT
|
||||
|
||||
|
||||
def import_django_settings():
|
||||
try:
|
||||
import django.conf
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
try:
|
||||
if 'CLOUDINARY' in dir(django.conf.settings):
|
||||
return django.conf.settings.CLOUDINARY
|
||||
else:
|
||||
return None
|
||||
except ImproperlyConfigured:
|
||||
return None
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
|
||||
class Config(object):
|
||||
def __init__(self):
|
||||
django_settings = import_django_settings()
|
||||
if django_settings:
|
||||
self.update(**django_settings)
|
||||
elif os.environ.get("CLOUDINARY_CLOUD_NAME"):
|
||||
self.update(
|
||||
cloud_name=os.environ.get("CLOUDINARY_CLOUD_NAME"),
|
||||
api_key=os.environ.get("CLOUDINARY_API_KEY"),
|
||||
api_secret=os.environ.get("CLOUDINARY_API_SECRET"),
|
||||
secure_distribution=os.environ.get("CLOUDINARY_SECURE_DISTRIBUTION"),
|
||||
private_cdn=os.environ.get("CLOUDINARY_PRIVATE_CDN") == 'true'
|
||||
)
|
||||
elif os.environ.get("CLOUDINARY_URL"):
|
||||
cloudinary_url = os.environ.get("CLOUDINARY_URL")
|
||||
self._parse_cloudinary_url(cloudinary_url)
|
||||
|
||||
def _parse_cloudinary_url(self, cloudinary_url):
|
||||
uri = urlparse(cloudinary_url.replace("cloudinary://", "http://"))
|
||||
for k, v in parse_qs(uri.query).items():
|
||||
if self._is_nested_key(k):
|
||||
self._put_nested_key(k, v)
|
||||
else:
|
||||
self.__dict__[k] = v[0]
|
||||
self.update(
|
||||
cloud_name=uri.hostname,
|
||||
api_key=uri.username,
|
||||
api_secret=uri.password,
|
||||
private_cdn=uri.path != ''
|
||||
)
|
||||
if uri.path != '':
|
||||
self.update(secure_distribution=uri.path[1:])
|
||||
|
||||
def __getattr__(self, i):
|
||||
if i in self.__dict__:
|
||||
return self.__dict__[i]
|
||||
else:
|
||||
return None
|
||||
|
||||
def update(self, **keywords):
|
||||
for k, v in keywords.items():
|
||||
self.__dict__[k] = v
|
||||
|
||||
def _is_nested_key(self, key):
|
||||
return re.match(r'\w+\[\w+\]', key)
|
||||
|
||||
def _put_nested_key(self, key, value):
|
||||
chain = re.split(r'[\[\]]+', key)
|
||||
chain = [key for key in chain if key]
|
||||
outer = self.__dict__
|
||||
last_key = chain.pop()
|
||||
for inner_key in chain:
|
||||
if inner_key in outer:
|
||||
inner = outer[inner_key]
|
||||
else:
|
||||
inner = dict()
|
||||
outer[inner_key] = inner
|
||||
outer = inner
|
||||
if isinstance(value, list):
|
||||
value = value[0]
|
||||
outer[last_key] = value
|
||||
|
||||
_config = Config()
|
||||
|
||||
|
||||
def config(**keywords):
|
||||
global _config
|
||||
_config.update(**keywords)
|
||||
return _config
|
||||
|
||||
|
||||
def reset_config():
|
||||
global _config
|
||||
_config = Config()
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class CloudinaryResource(object):
|
||||
def __init__(self, public_id=None, format=None, version=None,
|
||||
signature=None, url_options=None, metadata=None, type=None, resource_type=None,
|
||||
default_resource_type=None):
|
||||
self.metadata = metadata
|
||||
metadata = metadata or {}
|
||||
self.public_id = public_id or metadata.get('public_id')
|
||||
self.format = format or metadata.get('format')
|
||||
self.version = version or metadata.get('version')
|
||||
self.signature = signature or metadata.get('signature')
|
||||
self.type = type or metadata.get('type') or "upload"
|
||||
self.resource_type = resource_type or metadata.get('resource_type') or default_resource_type
|
||||
self.url_options = url_options or {}
|
||||
|
||||
def __str__(self):
|
||||
return self.public_id
|
||||
|
||||
def __len__(self):
|
||||
return len(self.public_id) if self.public_id is not None else 0
|
||||
|
||||
def validate(self):
|
||||
return self.signature == self.get_expected_signature()
|
||||
|
||||
def get_prep_value(self):
|
||||
if None in [self.public_id,
|
||||
self.type,
|
||||
self.resource_type]:
|
||||
return None
|
||||
prep = ''
|
||||
prep = prep + self.resource_type + '/' + self.type + '/'
|
||||
if self.version: prep = prep + 'v' + str(self.version) + '/'
|
||||
prep = prep + self.public_id
|
||||
if self.format: prep = prep + '.' + self.format
|
||||
return prep
|
||||
|
||||
def get_presigned(self):
|
||||
return self.get_prep_value() + '#' + self.get_expected_signature()
|
||||
|
||||
def get_expected_signature(self):
|
||||
return utils.api_sign_request({"public_id": self.public_id, "version": self.version}, config().api_secret)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return self.build_url(**self.url_options)
|
||||
|
||||
def __build_url(self, **options):
|
||||
combined_options = dict(format=self.format, version=self.version, type=self.type,
|
||||
resource_type=self.resource_type or "image")
|
||||
combined_options.update(options)
|
||||
public_id = combined_options.get('public_id') or self.public_id
|
||||
return utils.cloudinary_url(public_id, **combined_options)
|
||||
|
||||
def build_url(self, **options):
|
||||
return self.__build_url(**options)[0]
|
||||
|
||||
def default_poster_options(self, options):
|
||||
options["format"] = options.get("format", "jpg")
|
||||
|
||||
def default_source_types(self):
|
||||
return ['webm', 'mp4', 'ogv']
|
||||
|
||||
def image(self, **options):
|
||||
if options.get("resource_type", self.resource_type) == "video":
|
||||
self.default_poster_options(options)
|
||||
src, attrs = self.__build_url(**options)
|
||||
client_hints = attrs.pop("client_hints", config().client_hints)
|
||||
responsive = attrs.pop("responsive", False)
|
||||
hidpi = attrs.pop("hidpi", False)
|
||||
if (responsive or hidpi) and not client_hints:
|
||||
attrs["data-src"] = src
|
||||
classes = "cld-responsive" if responsive else "cld-hidpi"
|
||||
if "class" in attrs: classes += " " + attrs["class"]
|
||||
attrs["class"] = classes
|
||||
src = attrs.pop("responsive_placeholder", config().responsive_placeholder)
|
||||
if src == "blank": src = CL_BLANK
|
||||
|
||||
if src: attrs["src"] = src
|
||||
|
||||
return u"<img {0}/>".format(utils.html_attrs(attrs))
|
||||
|
||||
def video_thumbnail(self, **options):
|
||||
self.default_poster_options(options)
|
||||
return self.build_url(**options)
|
||||
|
||||
# Creates an HTML video tag for the provided +source+
|
||||
#
|
||||
# ==== Options
|
||||
# * <tt>source_types</tt> - Specify which source type the tag should include. defaults to webm, mp4 and ogv.
|
||||
# * <tt>source_transformation</tt> - specific transformations to use for a specific source type.
|
||||
# * <tt>poster</tt> - override default thumbnail:
|
||||
# * url: provide an ad hoc url
|
||||
# * options: with specific poster transformations and/or Cloudinary +:public_id+
|
||||
#
|
||||
# ==== Examples
|
||||
# CloudinaryResource("mymovie.mp4").video()
|
||||
# CloudinaryResource("mymovie.mp4").video(source_types = 'webm')
|
||||
# CloudinaryResource("mymovie.ogv").video(poster = "myspecialplaceholder.jpg")
|
||||
# CloudinaryResource("mymovie.webm").video(source_types = ['webm', 'mp4'], poster = {'effect': 'sepia'})
|
||||
def video(self, **options):
|
||||
public_id = options.get('public_id', self.public_id)
|
||||
source = re.sub("\.({0})$".format("|".join(self.default_source_types())), '', public_id)
|
||||
|
||||
source_types = options.pop('source_types', [])
|
||||
source_transformation = options.pop('source_transformation', {})
|
||||
fallback = options.pop('fallback_content', '')
|
||||
options['resource_type'] = options.pop('resource_type', self.resource_type or 'video')
|
||||
|
||||
if not source_types: source_types = self.default_source_types()
|
||||
video_options = options.copy()
|
||||
|
||||
if 'poster' in video_options:
|
||||
poster_options = video_options['poster']
|
||||
if isinstance(poster_options, dict):
|
||||
if 'public_id' in poster_options:
|
||||
video_options['poster'] = utils.cloudinary_url(poster_options['public_id'], **poster_options)[0]
|
||||
else:
|
||||
video_options['poster'] = self.video_thumbnail(public_id=source, **poster_options)
|
||||
else:
|
||||
video_options['poster'] = self.video_thumbnail(public_id=source, **options)
|
||||
|
||||
if not video_options['poster']: del video_options['poster']
|
||||
|
||||
nested_source_types = isinstance(source_types, list) and len(source_types) > 1
|
||||
if not nested_source_types:
|
||||
source = source + '.' + utils.build_array(source_types)[0]
|
||||
|
||||
video_url = utils.cloudinary_url(source, **video_options)
|
||||
video_options = video_url[1]
|
||||
if not nested_source_types:
|
||||
video_options['src'] = video_url[0]
|
||||
if 'html_width' in video_options: video_options['width'] = video_options.pop('html_width')
|
||||
if 'html_height' in video_options: video_options['height'] = video_options.pop('html_height')
|
||||
|
||||
sources = ""
|
||||
if nested_source_types:
|
||||
for source_type in source_types:
|
||||
transformation = options.copy()
|
||||
transformation.update(source_transformation.get(source_type, {}))
|
||||
src = utils.cloudinary_url(source, format=source_type, **transformation)[0]
|
||||
video_type = "ogg" if source_type == 'ogv' else source_type
|
||||
mime_type = "video/" + video_type
|
||||
sources += "<source {attributes}>".format(attributes=utils.html_attrs({'src': src, 'type': mime_type}))
|
||||
|
||||
html = "<video {attributes}>{sources}{fallback}</video>".format(
|
||||
attributes=utils.html_attrs(video_options), sources=sources, fallback=fallback)
|
||||
return html
|
||||
|
||||
|
||||
class CloudinaryImage(CloudinaryResource):
|
||||
def __init__(self, public_id=None, **kwargs):
|
||||
super(CloudinaryImage, self).__init__(public_id=public_id, default_resource_type="image", **kwargs)
|
||||
|
||||
|
||||
class CloudinaryVideo(CloudinaryResource):
|
||||
def __init__(self, public_id=None, **kwargs):
|
||||
super(CloudinaryVideo, self).__init__(public_id=public_id, default_resource_type="video", **kwargs)
|
448
lib/cloudinary/api.py
Normal file
@@ -0,0 +1,448 @@
|
||||
# Copyright Cloudinary
|
||||
|
||||
import email.utils
|
||||
import json
|
||||
import socket
|
||||
|
||||
import cloudinary
|
||||
from six import string_types
|
||||
|
||||
import urllib3
|
||||
import certifi
|
||||
|
||||
from cloudinary import utils
|
||||
from urllib3.exceptions import HTTPError
|
||||
|
||||
logger = cloudinary.logger
|
||||
|
||||
# intentionally one-liners
|
||||
class Error(Exception): pass
|
||||
class NotFound(Error): pass
|
||||
class NotAllowed(Error): pass
|
||||
class AlreadyExists(Error): pass
|
||||
class RateLimited(Error): pass
|
||||
class BadRequest(Error): pass
|
||||
class GeneralError(Error): pass
|
||||
class AuthorizationRequired(Error): pass
|
||||
|
||||
|
||||
EXCEPTION_CODES = {
|
||||
400: BadRequest,
|
||||
401: AuthorizationRequired,
|
||||
403: NotAllowed,
|
||||
404: NotFound,
|
||||
409: AlreadyExists,
|
||||
420: RateLimited,
|
||||
500: GeneralError
|
||||
}
|
||||
|
||||
|
||||
class Response(dict):
|
||||
def __init__(self, result, response, **kwargs):
|
||||
super(Response, self).__init__(**kwargs)
|
||||
self.update(result)
|
||||
self.rate_limit_allowed = int(response.headers["x-featureratelimit-limit"])
|
||||
self.rate_limit_reset_at = email.utils.parsedate(response.headers["x-featureratelimit-reset"])
|
||||
self.rate_limit_remaining = int(response.headers["x-featureratelimit-remaining"])
|
||||
|
||||
_http = urllib3.PoolManager(
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
ca_certs=certifi.where()
|
||||
)
|
||||
|
||||
|
||||
def ping(**options):
|
||||
return call_api("get", ["ping"], {}, **options)
|
||||
|
||||
|
||||
def usage(**options):
|
||||
return call_api("get", ["usage"], {}, **options)
|
||||
|
||||
|
||||
def resource_types(**options):
|
||||
return call_api("get", ["resources"], {}, **options)
|
||||
|
||||
|
||||
def resources(**options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", None)
|
||||
uri = ["resources", resource_type]
|
||||
if upload_type: uri.append(upload_type)
|
||||
params = only(options,
|
||||
"next_cursor", "max_results", "prefix", "tags", "context", "moderations", "direction", "start_at")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resources_by_tag(tag, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "tags", tag]
|
||||
params = only(options, "next_cursor", "max_results", "tags", "context", "moderations", "direction")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resources_by_moderation(kind, status, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "moderations", kind, status]
|
||||
params = only(options, "next_cursor", "max_results", "tags", "context", "moderations", "direction")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resources_by_ids(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = dict(only(options, "tags", "moderations", "context"), public_ids=public_ids)
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resource(public_id, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type, public_id]
|
||||
params = only(options, "exif", "faces", "colors", "image_metadata", "pages", "phash", "coordinates", "max_results")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def update(public_id, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type, public_id]
|
||||
params = only(options, "moderation_status", "raw_convert",
|
||||
"quality_override", "ocr",
|
||||
"categorization", "detection", "similarity_search",
|
||||
"background_removal", "notification_url")
|
||||
if "tags" in options:
|
||||
params["tags"] = ",".join(utils.build_array(options["tags"]))
|
||||
if "face_coordinates" in options:
|
||||
params["face_coordinates"] = utils.encode_double_array(options.get("face_coordinates"))
|
||||
if "custom_coordinates" in options:
|
||||
params["custom_coordinates"] = utils.encode_double_array(options.get("custom_coordinates"))
|
||||
if "context" in options:
|
||||
params["context"] = utils.encode_context(options.get("context"))
|
||||
if "auto_tagging" in options:
|
||||
params["auto_tagging"] = str(options.get("auto_tagging"))
|
||||
if "access_control" in options:
|
||||
params["access_control"] = utils.json_encode(utils.build_list_of_dicts(options.get("access_control")))
|
||||
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def delete_resources(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = __delete_resource_params(options, public_ids=public_ids)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_resources_by_prefix(prefix, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = __delete_resource_params(options, prefix=prefix)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_all_resources(**options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = __delete_resource_params(options, all=True)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_resources_by_tag(tag, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "tags", tag]
|
||||
params = __delete_resource_params(options)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_derived_resources(derived_resource_ids, **options):
|
||||
uri = ["derived_resources"]
|
||||
params = {"derived_resource_ids": derived_resource_ids}
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_derived_by_transformation(public_ids, transformations,
|
||||
resource_type='image', type='upload', invalidate=None,
|
||||
**options):
|
||||
"""
|
||||
Delete derived resources of public ids, identified by transformations
|
||||
|
||||
:param public_ids: the base resources
|
||||
:type public_ids: list of str
|
||||
:param transformations: the transformation of derived resources, optionally including the format
|
||||
:type transformations: list of (dict or str)
|
||||
:param type: The upload type
|
||||
:type type: str
|
||||
:param resource_type: The type of the resource: defaults to "image"
|
||||
:type resource_type: str
|
||||
:param invalidate: (optional) True to invalidate the resources after deletion
|
||||
:type invalidate: bool
|
||||
:return: a list of the public ids for which derived resources were deleted
|
||||
:rtype: dict
|
||||
"""
|
||||
uri = ["resources", resource_type, type]
|
||||
if not isinstance(public_ids, list):
|
||||
public_ids = [public_ids]
|
||||
params = {"public_ids": public_ids,
|
||||
"transformations": utils.build_eager(transformations),
|
||||
"keep_original": True}
|
||||
if invalidate is not None:
|
||||
params['invalidate'] = invalidate
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def tags(**options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["tags", resource_type]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results", "prefix"), **options)
|
||||
|
||||
|
||||
def transformations(**options):
|
||||
uri = ["transformations"]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def transformation(transformation, **options):
|
||||
uri = ["transformations", transformation_string(transformation)]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def delete_transformation(transformation, **options):
|
||||
uri = ["transformations", transformation_string(transformation)]
|
||||
return call_api("delete", uri, {}, **options)
|
||||
|
||||
|
||||
# updates - currently only supported update is the "allowed_for_strict" boolean flag and unsafe_update
|
||||
def update_transformation(transformation, **options):
|
||||
uri = ["transformations", transformation_string(transformation)]
|
||||
updates = only(options, "allowed_for_strict")
|
||||
if "unsafe_update" in options:
|
||||
updates["unsafe_update"] = transformation_string(options.get("unsafe_update"))
|
||||
if not updates: raise Exception("No updates given")
|
||||
|
||||
return call_api("put", uri, updates, **options)
|
||||
|
||||
|
||||
def create_transformation(name, definition, **options):
|
||||
uri = ["transformations", name]
|
||||
return call_api("post", uri, {"transformation": transformation_string(definition)}, **options)
|
||||
|
||||
|
||||
def publish_by_ids(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "publish_resources"]
|
||||
params = dict(only(options, "type", "overwrite", "invalidate"), public_ids=public_ids)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def publish_by_prefix(prefix, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "publish_resources"]
|
||||
params = dict(only(options, "type", "overwrite", "invalidate"), prefix=prefix)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def publish_by_tag(tag, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "publish_resources"]
|
||||
params = dict(only(options, "type", "overwrite", "invalidate"), tag=tag)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def upload_presets(**options):
|
||||
uri = ["upload_presets"]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def upload_preset(name, **options):
|
||||
uri = ["upload_presets", name]
|
||||
return call_api("get", uri, only(options, "max_results"), **options)
|
||||
|
||||
|
||||
def delete_upload_preset(name, **options):
|
||||
uri = ["upload_presets", name]
|
||||
return call_api("delete", uri, {}, **options)
|
||||
|
||||
|
||||
def update_upload_preset(name, **options):
|
||||
uri = ["upload_presets", name]
|
||||
params = utils.build_upload_params(**options)
|
||||
params = utils.cleanup_params(params)
|
||||
params.update(only(options, "unsigned", "disallow_public_id"))
|
||||
return call_api("put", uri, params, **options)
|
||||
|
||||
|
||||
def create_upload_preset(**options):
|
||||
uri = ["upload_presets"]
|
||||
params = utils.build_upload_params(**options)
|
||||
params = utils.cleanup_params(params)
|
||||
params.update(only(options, "unsigned", "disallow_public_id", "name"))
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def root_folders(**options):
|
||||
return call_api("get", ["folders"], {}, **options)
|
||||
|
||||
|
||||
def subfolders(of_folder_path, **options):
|
||||
return call_api("get", ["folders", of_folder_path], {}, **options)
|
||||
|
||||
|
||||
def restore(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type, "restore"]
|
||||
params = dict(public_ids=public_ids)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def upload_mappings(**options):
|
||||
uri = ["upload_mappings"]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def delete_upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def update_upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
params.update(only(options, "template"))
|
||||
return call_api("put", uri, params, **options)
|
||||
|
||||
|
||||
def create_upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
params.update(only(options, "template"))
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def list_streaming_profiles(**options):
|
||||
uri = ["streaming_profiles"]
|
||||
return call_api('GET', uri, {}, **options)
|
||||
|
||||
|
||||
def get_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles", name]
|
||||
return call_api('GET', uri, {}, **options)
|
||||
|
||||
|
||||
def delete_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles", name]
|
||||
return call_api('DELETE', uri, {}, **options)
|
||||
|
||||
|
||||
def create_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles"]
|
||||
params = __prepare_streaming_profile_params(**options)
|
||||
params["name"] = name
|
||||
return call_api('POST', uri, params, **options)
|
||||
|
||||
|
||||
def update_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles", name]
|
||||
params = __prepare_streaming_profile_params(**options)
|
||||
return call_api('PUT', uri, params, **options)
|
||||
|
||||
|
||||
def call_json_api(method, uri, jsonBody, **options):
|
||||
logger.debug(jsonBody)
|
||||
data = json.dumps(jsonBody).encode('utf-8')
|
||||
return _call_api(method, uri, body=data, headers={'Content-Type': 'application/json'}, **options)
|
||||
|
||||
|
||||
def call_api(method, uri, params, **options):
|
||||
return _call_api(method, uri, params=params, **options)
|
||||
|
||||
|
||||
def _call_api(method, uri, params=None, body=None, headers=None, **options):
|
||||
prefix = options.pop("upload_prefix",
|
||||
cloudinary.config().upload_prefix) or "https://api.cloudinary.com"
|
||||
cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name)
|
||||
if not cloud_name: raise Exception("Must supply cloud_name")
|
||||
api_key = options.pop("api_key", cloudinary.config().api_key)
|
||||
if not api_key: raise Exception("Must supply api_key")
|
||||
api_secret = options.pop("api_secret", cloudinary.config().api_secret)
|
||||
if not cloud_name: raise Exception("Must supply api_secret")
|
||||
api_url = "/".join([prefix, "v1_1", cloud_name] + uri)
|
||||
|
||||
processed_params = None
|
||||
if isinstance(params, dict):
|
||||
processed_params = {}
|
||||
for key, value in params.items():
|
||||
if isinstance(value, list):
|
||||
value_list = {"{}[{}]".format(key, i): i_value for i, i_value in enumerate(value)}
|
||||
processed_params.update(value_list)
|
||||
elif value:
|
||||
processed_params[key] = value
|
||||
|
||||
# Add authentication
|
||||
req_headers = urllib3.make_headers(
|
||||
basic_auth="{0}:{1}".format(api_key, api_secret),
|
||||
user_agent=cloudinary.get_user_agent()
|
||||
)
|
||||
if headers is not None:
|
||||
req_headers.update(headers)
|
||||
kw = {}
|
||||
if 'timeout' in options:
|
||||
kw['timeout'] = options['timeout']
|
||||
if body is not None:
|
||||
kw['body'] = body
|
||||
try:
|
||||
response = _http.request(method.upper(), api_url, processed_params, req_headers, **kw)
|
||||
body = response.data
|
||||
except HTTPError as e:
|
||||
raise GeneralError("Unexpected error {0}", e.message)
|
||||
except socket.error as e:
|
||||
raise GeneralError("Socket Error: %s" % (str(e)))
|
||||
|
||||
try:
|
||||
result = json.loads(body.decode('utf-8'))
|
||||
except Exception as e:
|
||||
# Error is parsing json
|
||||
raise GeneralError("Error parsing server response (%d) - %s. Got - %s" % (response.status, body, e))
|
||||
|
||||
if "error" in result:
|
||||
exception_class = EXCEPTION_CODES.get(response.status) or Exception
|
||||
exception_class = exception_class
|
||||
raise exception_class("Error {0} - {1}".format(response.status, result["error"]["message"]))
|
||||
|
||||
return Response(result, response)
|
||||
|
||||
|
||||
def only(source, *keys):
|
||||
return {key: source[key] for key in keys if key in source}
|
||||
|
||||
|
||||
def transformation_string(transformation):
|
||||
if isinstance(transformation, string_types):
|
||||
return transformation
|
||||
else:
|
||||
return cloudinary.utils.generate_transformation_string(**transformation)[0]
|
||||
|
||||
|
||||
def __prepare_streaming_profile_params(**options):
|
||||
params = only(options, "display_name")
|
||||
if "representations" in options:
|
||||
representations = [{"transformation": transformation_string(trans)} for trans in options["representations"]]
|
||||
params["representations"] = json.dumps(representations)
|
||||
return params
|
||||
|
||||
def __delete_resource_params(options, **params):
|
||||
p = dict(transformations=utils.build_eager(options.get('transformations')),
|
||||
**only(options, "keep_original", "next_cursor", "invalidate"))
|
||||
p.update(params)
|
||||
return p
|
47
lib/cloudinary/auth_token.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import hashlib
|
||||
import hmac
|
||||
import re
|
||||
import time
|
||||
from binascii import a2b_hex
|
||||
from cloudinary.compat import quote_plus
|
||||
|
||||
AUTH_TOKEN_NAME = "__cld_token__"
|
||||
|
||||
|
||||
|
||||
def generate(url=None, acl=None, start_time=None, duration=None, expiration=None, ip=None, key=None,
|
||||
token_name=AUTH_TOKEN_NAME):
|
||||
|
||||
if expiration is None:
|
||||
if duration is not None:
|
||||
start = start_time if start_time is not None else int(time.mktime(time.gmtime()))
|
||||
expiration = start + duration
|
||||
else:
|
||||
raise Exception("Must provide either expiration or duration")
|
||||
|
||||
token_parts = []
|
||||
if ip is not None: token_parts.append("ip=" + ip)
|
||||
if start_time is not None: token_parts.append("st=%d" % start_time)
|
||||
token_parts.append("exp=%d" % expiration)
|
||||
if acl is not None: token_parts.append("acl=%s" % _escape_to_lower(acl))
|
||||
to_sign = list(token_parts)
|
||||
if url is not None:
|
||||
to_sign.append("url=%s" % _escape_to_lower(url))
|
||||
auth = _digest("~".join(to_sign), key)
|
||||
token_parts.append("hmac=%s" % auth)
|
||||
return "%(token_name)s=%(token)s" % {"token_name": token_name, "token": "~".join(token_parts)}
|
||||
|
||||
|
||||
def _digest(message, key):
|
||||
bin_key = a2b_hex(key)
|
||||
return hmac.new(bin_key, message.encode('utf-8'), hashlib.sha256).hexdigest()
|
||||
|
||||
|
||||
def _escape_to_lower(url):
|
||||
escaped_url = quote_plus(url)
|
||||
|
||||
def toLowercase(match):
|
||||
return match.group(0).lower()
|
||||
|
||||
escaped_url = re.sub(r'%..', toLowercase, escaped_url)
|
||||
return escaped_url
|
34
lib/cloudinary/compat.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# Copyright Cloudinary
|
||||
import six.moves.urllib.parse
|
||||
urlencode = six.moves.urllib.parse.urlencode
|
||||
unquote = six.moves.urllib.parse.unquote
|
||||
urlparse = six.moves.urllib.parse.urlparse
|
||||
parse_qs = six.moves.urllib.parse.parse_qs
|
||||
parse_qsl = six.moves.urllib.parse.parse_qsl
|
||||
quote_plus = six.moves.urllib.parse.quote_plus
|
||||
httplib = six.moves.http_client
|
||||
from six import PY3, string_types, StringIO, BytesIO
|
||||
urllib2 = six.moves.urllib.request
|
||||
NotConnected = six.moves.http_client.NotConnected
|
||||
|
||||
if PY3:
|
||||
to_bytes = lambda s: s.encode('utf8')
|
||||
to_bytearray = lambda s: bytearray(s, 'utf8')
|
||||
to_string = lambda b: b.decode('utf8')
|
||||
|
||||
else:
|
||||
to_bytes = str
|
||||
to_bytearray = str
|
||||
to_string = str
|
||||
|
||||
try:
|
||||
cldrange = xrange
|
||||
except NameError:
|
||||
def cldrange(*args, **kwargs):
|
||||
return iter(range(*args, **kwargs))
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
134
lib/cloudinary/forms.py
Normal file
@@ -0,0 +1,134 @@
|
||||
from django import forms
|
||||
from cloudinary import CloudinaryResource
|
||||
import cloudinary.uploader
|
||||
import cloudinary.utils
|
||||
import re
|
||||
import json
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
def cl_init_js_callbacks(form, request):
|
||||
for field in form.fields.values():
|
||||
if isinstance(field, CloudinaryJsFileField):
|
||||
field.enable_callback(request)
|
||||
|
||||
|
||||
class CloudinaryInput(forms.TextInput):
|
||||
input_type = 'file'
|
||||
|
||||
def render(self, name, value, attrs=None):
|
||||
attrs = self.build_attrs(attrs)
|
||||
options = attrs.get('options', {})
|
||||
attrs["options"] = ''
|
||||
|
||||
params = cloudinary.utils.build_upload_params(**options)
|
||||
if options.get("unsigned"):
|
||||
params = cloudinary.utils.cleanup_params(params)
|
||||
else:
|
||||
params = cloudinary.utils.sign_request(params, options)
|
||||
|
||||
if 'resource_type' not in options: options['resource_type'] = 'auto'
|
||||
cloudinary_upload_url = cloudinary.utils.cloudinary_api_url("upload", **options)
|
||||
|
||||
attrs["data-url"] = cloudinary_upload_url
|
||||
attrs["data-form-data"] = json.dumps(params)
|
||||
attrs["data-cloudinary-field"] = name
|
||||
chunk_size = options.get("chunk_size", None)
|
||||
if chunk_size: attrs["data-max-chunk-size"] = chunk_size
|
||||
attrs["class"] = " ".join(["cloudinary-fileupload", attrs.get("class", "")])
|
||||
|
||||
widget = super(CloudinaryInput, self).render("file", None, attrs=attrs)
|
||||
if value:
|
||||
if isinstance(value, CloudinaryResource):
|
||||
value_string = value.get_presigned()
|
||||
else:
|
||||
value_string = value
|
||||
widget += forms.HiddenInput().render(name, value_string)
|
||||
return widget
|
||||
|
||||
|
||||
class CloudinaryJsFileField(forms.Field):
|
||||
default_error_messages = {
|
||||
'required': _(u"No file selected!")
|
||||
}
|
||||
|
||||
def __init__(self, attrs=None, options=None, autosave=True, *args, **kwargs):
|
||||
if attrs is None: attrs = {}
|
||||
if options is None: options = {}
|
||||
self.autosave = autosave
|
||||
attrs = attrs.copy()
|
||||
attrs["options"] = options.copy()
|
||||
|
||||
field_options = {'widget': CloudinaryInput(attrs=attrs)}
|
||||
field_options.update(kwargs)
|
||||
super(CloudinaryJsFileField, self).__init__(*args, **field_options)
|
||||
|
||||
def enable_callback(self, request):
|
||||
from django.contrib.staticfiles.storage import staticfiles_storage
|
||||
self.widget.attrs["options"]["callback"] = request.build_absolute_uri(
|
||||
staticfiles_storage.url("html/cloudinary_cors.html"))
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert to CloudinaryResource"""
|
||||
if not value: return None
|
||||
m = re.search(r'^([^/]+)/([^/]+)/v(\d+)/([^#]+)#([^/]+)$', value)
|
||||
if not m:
|
||||
raise forms.ValidationError("Invalid format")
|
||||
resource_type = m.group(1)
|
||||
upload_type = m.group(2)
|
||||
version = m.group(3)
|
||||
filename = m.group(4)
|
||||
signature = m.group(5)
|
||||
m = re.search(r'(.*)\.(.*)', filename)
|
||||
if not m:
|
||||
raise forms.ValidationError("Invalid file name")
|
||||
public_id = m.group(1)
|
||||
image_format = m.group(2)
|
||||
return CloudinaryResource(public_id,
|
||||
format=image_format,
|
||||
version=version,
|
||||
signature=signature,
|
||||
type=upload_type,
|
||||
resource_type=resource_type)
|
||||
|
||||
def validate(self, value):
|
||||
"""Validate the signature"""
|
||||
# Use the parent's handling of required fields, etc.
|
||||
super(CloudinaryJsFileField, self).validate(value)
|
||||
if not value: return
|
||||
if not value.validate():
|
||||
raise forms.ValidationError("Signature mismatch")
|
||||
|
||||
|
||||
class CloudinaryUnsignedJsFileField(CloudinaryJsFileField):
|
||||
def __init__(self, upload_preset, attrs=None, options=None, autosave=True, *args, **kwargs):
|
||||
if attrs is None:
|
||||
attrs = {}
|
||||
if options is None:
|
||||
options = {}
|
||||
options = options.copy()
|
||||
options.update({"unsigned": True, "upload_preset": upload_preset})
|
||||
super(CloudinaryUnsignedJsFileField, self).__init__(attrs, options, autosave, *args, **kwargs)
|
||||
|
||||
|
||||
class CloudinaryFileField(forms.FileField):
|
||||
my_default_error_messages = {
|
||||
'required': _(u"No file selected!")
|
||||
}
|
||||
default_error_messages = forms.FileField.default_error_messages.copy()
|
||||
default_error_messages.update(my_default_error_messages)
|
||||
|
||||
def __init__(self, options=None, autosave=True, *args, **kwargs):
|
||||
self.autosave = autosave
|
||||
self.options = options or {}
|
||||
super(CloudinaryFileField, self).__init__(*args, **kwargs)
|
||||
|
||||
def to_python(self, value):
|
||||
"""Upload and convert to CloudinaryResource"""
|
||||
value = super(CloudinaryFileField, self).to_python(value)
|
||||
if not value:
|
||||
return None
|
||||
if self.autosave:
|
||||
return cloudinary.uploader.upload_image(value, **self.options)
|
||||
else:
|
||||
return value
|
121
lib/cloudinary/models.py
Normal file
@@ -0,0 +1,121 @@
|
||||
import re
|
||||
|
||||
|
||||
from cloudinary import CloudinaryResource, forms, uploader
|
||||
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
from django.db import models
|
||||
|
||||
# Add introspection rules for South, if it's installed.
|
||||
try:
|
||||
from south.modelsinspector import add_introspection_rules
|
||||
add_introspection_rules([], ["^cloudinary.models.CloudinaryField"])
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
CLOUDINARY_FIELD_DB_RE = r'(?:(?P<resource_type>image|raw|video)/(?P<type>upload|private|authenticated)/)?(?:v(?P<version>\d+)/)?(?P<public_id>.*?)(\.(?P<format>[^.]+))?$'
|
||||
|
||||
|
||||
# Taken from six - https://pythonhosted.org/six/
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(meta):
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
class CloudinaryField(models.Field):
|
||||
description = "A resource stored in Cloudinary"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
options = {'max_length': 255}
|
||||
self.default_form_class = kwargs.pop("default_form_class", forms.CloudinaryFileField)
|
||||
options.update(kwargs)
|
||||
self.type = options.pop("type", "upload")
|
||||
self.resource_type = options.pop("resource_type", "image")
|
||||
self.width_field = options.pop("width_field", None)
|
||||
self.height_field = options.pop("height_field", None)
|
||||
super(CloudinaryField, self).__init__(*args, **options)
|
||||
|
||||
def get_internal_type(self):
|
||||
return 'CharField'
|
||||
|
||||
def value_to_string(self, obj):
|
||||
# We need to support both legacy `_get_val_from_obj` and new `value_from_object` models.Field methods.
|
||||
# It would be better to wrap it with try -> except AttributeError -> fallback to legacy.
|
||||
# Unfortunately, we can catch AttributeError exception from `value_from_object` function itself.
|
||||
# Parsing exception string is an overkill here, that's why we check for attribute existence
|
||||
|
||||
if hasattr(self, 'value_from_object'):
|
||||
value = self.value_from_object(obj)
|
||||
else: # fallback for legacy django versions
|
||||
value = self._get_val_from_obj(obj)
|
||||
|
||||
return self.get_prep_value(value)
|
||||
|
||||
def parse_cloudinary_resource(self, value):
|
||||
m = re.match(CLOUDINARY_FIELD_DB_RE, value)
|
||||
resource_type = m.group('resource_type') or self.resource_type
|
||||
upload_type = m.group('type') or self.type
|
||||
return CloudinaryResource(
|
||||
type=upload_type,
|
||||
resource_type=resource_type,
|
||||
version=m.group('version'),
|
||||
public_id=m.group('public_id'),
|
||||
format=m.group('format')
|
||||
)
|
||||
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
if value is None:
|
||||
return value
|
||||
return self.parse_cloudinary_resource(value)
|
||||
|
||||
def to_python(self, value):
|
||||
if isinstance(value, CloudinaryResource):
|
||||
return value
|
||||
elif isinstance(value, UploadedFile):
|
||||
return value
|
||||
elif value is None:
|
||||
return value
|
||||
else:
|
||||
return self.parse_cloudinary_resource(value)
|
||||
|
||||
def upload_options_with_filename(self, model_instance, filename):
|
||||
return self.upload_options(model_instance)
|
||||
|
||||
def upload_options(self, model_instance):
|
||||
return {}
|
||||
|
||||
def pre_save(self, model_instance, add):
|
||||
value = super(CloudinaryField, self).pre_save(model_instance, add)
|
||||
if isinstance(value, UploadedFile):
|
||||
options = {"type": self.type, "resource_type": self.resource_type}
|
||||
options.update(self.upload_options_with_filename(model_instance, value.name))
|
||||
instance_value = uploader.upload_resource(value, **options)
|
||||
setattr(model_instance, self.attname, instance_value)
|
||||
if self.width_field:
|
||||
setattr(model_instance, self.width_field, instance_value.metadata['width'])
|
||||
if self.height_field:
|
||||
setattr(model_instance, self.height_field, instance_value.metadata['height'])
|
||||
return self.get_prep_value(instance_value)
|
||||
else:
|
||||
return value
|
||||
|
||||
def get_prep_value(self, value):
|
||||
if not value:
|
||||
return self.get_default()
|
||||
if isinstance(value, CloudinaryResource):
|
||||
return value.get_prep_value()
|
||||
else:
|
||||
return value
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
options = {"type": self.type, "resource_type": self.resource_type}
|
||||
options.update(kwargs.pop('options', {}))
|
||||
defaults = {'form_class': self.default_form_class, 'options': options, 'autosave': False}
|
||||
defaults.update(kwargs)
|
||||
return super(CloudinaryField, self).formfield(**defaults)
|
34
lib/cloudinary/poster/__init__.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# MIT licensed code copied from https://bitbucket.org/chrisatlee/poster
|
||||
#
|
||||
# Copyright (c) 2011 Chris AtLee
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
"""poster module
|
||||
|
||||
Support for streaming HTTP uploads, and multipart/form-data encoding
|
||||
|
||||
```poster.version``` is a 3-tuple of integers representing the version number.
|
||||
New releases of poster will always have a version number that compares greater
|
||||
than an older version of poster.
|
||||
New in version 0.6."""
|
||||
|
||||
import cloudinary.poster.streaminghttp
|
||||
import cloudinary.poster.encode
|
||||
|
||||
version = (0, 8, 2) # Thanks JP!
|
447
lib/cloudinary/poster/encode.py
Normal file
@@ -0,0 +1,447 @@
|
||||
# MIT licensed code copied from https://bitbucket.org/chrisatlee/poster
|
||||
"""multipart/form-data encoding module
|
||||
|
||||
This module provides functions that faciliate encoding name/value pairs
|
||||
as multipart/form-data suitable for a HTTP POST or PUT request.
|
||||
|
||||
multipart/form-data is the standard way to upload files over HTTP"""
|
||||
|
||||
__all__ = ['gen_boundary', 'encode_and_quote', 'MultipartParam',
|
||||
'encode_string', 'encode_file_header', 'get_body_size', 'get_headers',
|
||||
'multipart_encode']
|
||||
|
||||
try:
|
||||
from io import UnsupportedOperation
|
||||
except ImportError:
|
||||
UnsupportedOperation = None
|
||||
|
||||
try:
|
||||
import uuid
|
||||
def gen_boundary():
|
||||
"""Returns a random string to use as the boundary for a message"""
|
||||
return uuid.uuid4().hex
|
||||
except ImportError:
|
||||
import random, sha
|
||||
def gen_boundary():
|
||||
"""Returns a random string to use as the boundary for a message"""
|
||||
bits = random.getrandbits(160)
|
||||
return sha.new(str(bits)).hexdigest()
|
||||
|
||||
import re, os, mimetypes
|
||||
from cloudinary.compat import (PY3, string_types, to_bytes, to_string,
|
||||
to_bytearray, quote_plus, advance_iterator)
|
||||
try:
|
||||
from email.header import Header
|
||||
except ImportError:
|
||||
# Python 2.4
|
||||
from email.Header import Header
|
||||
|
||||
if PY3:
|
||||
def encode_and_quote(data):
|
||||
if data is None:
|
||||
return None
|
||||
return quote_plus(to_bytes(data))
|
||||
|
||||
else:
|
||||
def encode_and_quote(data):
|
||||
"""If ``data`` is unicode, return quote_plus(data.encode("utf-8")) otherwise return quote_plus(data)"""
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
if isinstance(data, unicode):
|
||||
data = data.encode("utf-8")
|
||||
return quote_plus(data)
|
||||
|
||||
if PY3:
|
||||
def _strify(s):
|
||||
if s is None:
|
||||
return None
|
||||
elif isinstance(s, bytes):
|
||||
return s
|
||||
else:
|
||||
try:
|
||||
return to_bytes(s)
|
||||
except AttributeError:
|
||||
return to_bytes(str(s))
|
||||
else:
|
||||
def _strify(s):
|
||||
"""If s is a unicode string, encode it to UTF-8 and return the results, otherwise return str(s), or None if s is None"""
|
||||
if s is None:
|
||||
return None
|
||||
if isinstance(s, unicode):
|
||||
return s.encode("utf-8")
|
||||
return str(s)
|
||||
|
||||
class MultipartParam(object):
|
||||
"""Represents a single parameter in a multipart/form-data request
|
||||
|
||||
``name`` is the name of this parameter.
|
||||
|
||||
If ``value`` is set, it must be a string or unicode object to use as the
|
||||
data for this parameter.
|
||||
|
||||
If ``filename`` is set, it is what to say that this parameter's filename
|
||||
is. Note that this does not have to be the actual filename any local file.
|
||||
|
||||
If ``filetype`` is set, it is used as the Content-Type for this parameter.
|
||||
If unset it defaults to "text/plain; charset=utf8"
|
||||
|
||||
If ``filesize`` is set, it specifies the length of the file ``fileobj``
|
||||
|
||||
If ``fileobj`` is set, it must be a file-like object that supports
|
||||
.read().
|
||||
|
||||
Both ``value`` and ``fileobj`` must not be set, doing so will
|
||||
raise a ValueError assertion.
|
||||
|
||||
If ``fileobj`` is set, and ``filesize`` is not specified, then
|
||||
the file's size will be determined first by stat'ing ``fileobj``'s
|
||||
file descriptor, and if that fails, by seeking to the end of the file,
|
||||
recording the current position as the size, and then by seeking back to the
|
||||
beginning of the file.
|
||||
|
||||
``cb`` is a callable which will be called from iter_encode with (self,
|
||||
current, total), representing the current parameter, current amount
|
||||
transferred, and the total size.
|
||||
"""
|
||||
def __init__(self, name, value=None, filename=None, filetype=None,
|
||||
filesize=None, fileobj=None, cb=None):
|
||||
self.name = Header(name).encode()
|
||||
self.value = _strify(value)
|
||||
if filename is None:
|
||||
self.filename = None
|
||||
else:
|
||||
if PY3:
|
||||
byte_filename = filename.encode("ascii", "xmlcharrefreplace")
|
||||
self.filename = to_string(byte_filename)
|
||||
encoding = 'unicode_escape'
|
||||
else:
|
||||
if isinstance(filename, unicode):
|
||||
# Encode with XML entities
|
||||
self.filename = filename.encode("ascii", "xmlcharrefreplace")
|
||||
else:
|
||||
self.filename = str(filename)
|
||||
encoding = 'string_escape'
|
||||
self.filename = self.filename.encode(encoding).replace(to_bytes('"'), to_bytes('\\"'))
|
||||
self.filetype = _strify(filetype)
|
||||
|
||||
self.filesize = filesize
|
||||
self.fileobj = fileobj
|
||||
self.cb = cb
|
||||
|
||||
if self.value is not None and self.fileobj is not None:
|
||||
raise ValueError("Only one of value or fileobj may be specified")
|
||||
|
||||
if fileobj is not None and filesize is None:
|
||||
# Try and determine the file size
|
||||
try:
|
||||
self.filesize = os.fstat(fileobj.fileno()).st_size
|
||||
except (OSError, AttributeError, UnsupportedOperation):
|
||||
try:
|
||||
fileobj.seek(0, 2)
|
||||
self.filesize = fileobj.tell()
|
||||
fileobj.seek(0)
|
||||
except:
|
||||
raise ValueError("Could not determine filesize")
|
||||
|
||||
def __cmp__(self, other):
|
||||
attrs = ['name', 'value', 'filename', 'filetype', 'filesize', 'fileobj']
|
||||
myattrs = [getattr(self, a) for a in attrs]
|
||||
oattrs = [getattr(other, a) for a in attrs]
|
||||
return cmp(myattrs, oattrs)
|
||||
|
||||
def reset(self):
|
||||
if self.fileobj is not None:
|
||||
self.fileobj.seek(0)
|
||||
elif self.value is None:
|
||||
raise ValueError("Don't know how to reset this parameter")
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, paramname, filename):
|
||||
"""Returns a new MultipartParam object constructed from the local
|
||||
file at ``filename``.
|
||||
|
||||
``filesize`` is determined by os.path.getsize(``filename``)
|
||||
|
||||
``filetype`` is determined by mimetypes.guess_type(``filename``)[0]
|
||||
|
||||
``filename`` is set to os.path.basename(``filename``)
|
||||
"""
|
||||
|
||||
return cls(paramname, filename=os.path.basename(filename),
|
||||
filetype=mimetypes.guess_type(filename)[0],
|
||||
filesize=os.path.getsize(filename),
|
||||
fileobj=open(filename, "rb"))
|
||||
|
||||
@classmethod
|
||||
def from_params(cls, params):
|
||||
"""Returns a list of MultipartParam objects from a sequence of
|
||||
name, value pairs, MultipartParam instances,
|
||||
or from a mapping of names to values
|
||||
|
||||
The values may be strings or file objects, or MultipartParam objects.
|
||||
MultipartParam object names must match the given names in the
|
||||
name,value pairs or mapping, if applicable."""
|
||||
if hasattr(params, 'items'):
|
||||
params = params.items()
|
||||
|
||||
retval = []
|
||||
for item in params:
|
||||
if isinstance(item, cls):
|
||||
retval.append(item)
|
||||
continue
|
||||
name, value = item
|
||||
if isinstance(value, cls):
|
||||
assert value.name == name
|
||||
retval.append(value)
|
||||
continue
|
||||
if hasattr(value, 'read'):
|
||||
# Looks like a file object
|
||||
filename = getattr(value, 'name', None)
|
||||
if filename is not None:
|
||||
filetype = mimetypes.guess_type(filename)[0]
|
||||
else:
|
||||
filetype = None
|
||||
|
||||
retval.append(cls(name=name, filename=filename,
|
||||
filetype=filetype, fileobj=value))
|
||||
else:
|
||||
retval.append(cls(name, value))
|
||||
return retval
|
||||
|
||||
def encode_hdr(self, boundary):
|
||||
"""Returns the header of the encoding of this parameter"""
|
||||
boundary = encode_and_quote(boundary)
|
||||
|
||||
headers = ["--%s" % boundary]
|
||||
|
||||
if self.filename:
|
||||
disposition = 'form-data; name="%s"; filename="%s"' % (self.name,
|
||||
to_string(self.filename))
|
||||
else:
|
||||
disposition = 'form-data; name="%s"' % self.name
|
||||
|
||||
headers.append("Content-Disposition: %s" % disposition)
|
||||
|
||||
if self.filetype:
|
||||
filetype = to_string(self.filetype)
|
||||
else:
|
||||
filetype = "text/plain; charset=utf-8"
|
||||
|
||||
headers.append("Content-Type: %s" % filetype)
|
||||
|
||||
headers.append("")
|
||||
headers.append("")
|
||||
|
||||
return "\r\n".join(headers)
|
||||
|
||||
def encode(self, boundary):
|
||||
"""Returns the string encoding of this parameter"""
|
||||
if self.value is None:
|
||||
value = self.fileobj.read()
|
||||
else:
|
||||
value = self.value
|
||||
|
||||
if re.search(to_bytes("^--%s$" % re.escape(boundary)), value, re.M):
|
||||
raise ValueError("boundary found in encoded string")
|
||||
|
||||
return to_bytes(self.encode_hdr(boundary)) + value + b"\r\n"
|
||||
|
||||
def iter_encode(self, boundary, blocksize=4096):
|
||||
"""Yields the encoding of this parameter
|
||||
If self.fileobj is set, then blocks of ``blocksize`` bytes are read and
|
||||
yielded."""
|
||||
total = self.get_size(boundary)
|
||||
current = 0
|
||||
if self.value is not None:
|
||||
block = self.encode(boundary)
|
||||
current += len(block)
|
||||
yield block
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
else:
|
||||
block = to_bytes(self.encode_hdr(boundary))
|
||||
current += len(block)
|
||||
yield block
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
last_block = to_bytearray("")
|
||||
encoded_boundary = "--%s" % encode_and_quote(boundary)
|
||||
boundary_exp = re.compile(to_bytes("^%s$" % re.escape(encoded_boundary)),
|
||||
re.M)
|
||||
while True:
|
||||
block = self.fileobj.read(blocksize)
|
||||
if not block:
|
||||
current += 2
|
||||
yield to_bytes("\r\n")
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
break
|
||||
last_block += block
|
||||
if boundary_exp.search(last_block):
|
||||
raise ValueError("boundary found in file data")
|
||||
last_block = last_block[-len(to_bytes(encoded_boundary))-2:]
|
||||
current += len(block)
|
||||
yield block
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
|
||||
def get_size(self, boundary):
|
||||
"""Returns the size in bytes that this param will be when encoded
|
||||
with the given boundary."""
|
||||
if self.filesize is not None:
|
||||
valuesize = self.filesize
|
||||
else:
|
||||
valuesize = len(self.value)
|
||||
|
||||
return len(self.encode_hdr(boundary)) + 2 + valuesize
|
||||
|
||||
def encode_string(boundary, name, value):
|
||||
"""Returns ``name`` and ``value`` encoded as a multipart/form-data
|
||||
variable. ``boundary`` is the boundary string used throughout
|
||||
a single request to separate variables."""
|
||||
|
||||
return MultipartParam(name, value).encode(boundary)
|
||||
|
||||
def encode_file_header(boundary, paramname, filesize, filename=None,
|
||||
filetype=None):
|
||||
"""Returns the leading data for a multipart/form-data field that contains
|
||||
file data.
|
||||
|
||||
``boundary`` is the boundary string used throughout a single request to
|
||||
separate variables.
|
||||
|
||||
``paramname`` is the name of the variable in this request.
|
||||
|
||||
``filesize`` is the size of the file data.
|
||||
|
||||
``filename`` if specified is the filename to give to this field. This
|
||||
field is only useful to the server for determining the original filename.
|
||||
|
||||
``filetype`` if specified is the MIME type of this file.
|
||||
|
||||
The actual file data should be sent after this header has been sent.
|
||||
"""
|
||||
|
||||
return MultipartParam(paramname, filesize=filesize, filename=filename,
|
||||
filetype=filetype).encode_hdr(boundary)
|
||||
|
||||
def get_body_size(params, boundary):
|
||||
"""Returns the number of bytes that the multipart/form-data encoding
|
||||
of ``params`` will be."""
|
||||
size = sum(p.get_size(boundary) for p in MultipartParam.from_params(params))
|
||||
return size + len(boundary) + 6
|
||||
|
||||
def get_headers(params, boundary):
|
||||
"""Returns a dictionary with Content-Type and Content-Length headers
|
||||
for the multipart/form-data encoding of ``params``."""
|
||||
headers = {}
|
||||
boundary = quote_plus(boundary)
|
||||
headers['Content-Type'] = "multipart/form-data; boundary=%s" % boundary
|
||||
headers['Content-Length'] = str(get_body_size(params, boundary))
|
||||
return headers
|
||||
|
||||
class multipart_yielder:
|
||||
def __init__(self, params, boundary, cb):
|
||||
self.params = params
|
||||
self.boundary = boundary
|
||||
self.cb = cb
|
||||
|
||||
self.i = 0
|
||||
self.p = None
|
||||
self.param_iter = None
|
||||
self.current = 0
|
||||
self.total = get_body_size(params, boundary)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
return self.next()
|
||||
|
||||
def next(self):
|
||||
"""generator function to yield multipart/form-data representation
|
||||
of parameters"""
|
||||
if self.param_iter is not None:
|
||||
try:
|
||||
block = advance_iterator(self.param_iter)
|
||||
self.current += len(block)
|
||||
if self.cb:
|
||||
self.cb(self.p, self.current, self.total)
|
||||
return block
|
||||
except StopIteration:
|
||||
self.p = None
|
||||
self.param_iter = None
|
||||
|
||||
if self.i is None:
|
||||
raise StopIteration
|
||||
elif self.i >= len(self.params):
|
||||
self.param_iter = None
|
||||
self.p = None
|
||||
self.i = None
|
||||
block = to_bytes("--%s--\r\n" % self.boundary)
|
||||
self.current += len(block)
|
||||
if self.cb:
|
||||
self.cb(self.p, self.current, self.total)
|
||||
return block
|
||||
|
||||
self.p = self.params[self.i]
|
||||
self.param_iter = self.p.iter_encode(self.boundary)
|
||||
self.i += 1
|
||||
return advance_iterator(self)
|
||||
|
||||
def reset(self):
|
||||
self.i = 0
|
||||
self.current = 0
|
||||
for param in self.params:
|
||||
param.reset()
|
||||
|
||||
def multipart_encode(params, boundary=None, cb=None):
|
||||
"""Encode ``params`` as multipart/form-data.
|
||||
|
||||
``params`` should be a sequence of (name, value) pairs or MultipartParam
|
||||
objects, or a mapping of names to values.
|
||||
Values are either strings parameter values, or file-like objects to use as
|
||||
the parameter value. The file-like objects must support .read() and either
|
||||
.fileno() or both .seek() and .tell().
|
||||
|
||||
If ``boundary`` is set, then it as used as the MIME boundary. Otherwise
|
||||
a randomly generated boundary will be used. In either case, if the
|
||||
boundary string appears in the parameter values a ValueError will be
|
||||
raised.
|
||||
|
||||
If ``cb`` is set, it should be a callback which will get called as blocks
|
||||
of data are encoded. It will be called with (param, current, total),
|
||||
indicating the current parameter being encoded, the current amount encoded,
|
||||
and the total amount to encode.
|
||||
|
||||
Returns a tuple of `datagen`, `headers`, where `datagen` is a
|
||||
generator that will yield blocks of data that make up the encoded
|
||||
parameters, and `headers` is a dictionary with the assoicated
|
||||
Content-Type and Content-Length headers.
|
||||
|
||||
Examples:
|
||||
|
||||
>>> datagen, headers = multipart_encode( [("key", "value1"), ("key", "value2")] )
|
||||
>>> s = "".join(datagen)
|
||||
>>> assert "value2" in s and "value1" in s
|
||||
|
||||
>>> p = MultipartParam("key", "value2")
|
||||
>>> datagen, headers = multipart_encode( [("key", "value1"), p] )
|
||||
>>> s = "".join(datagen)
|
||||
>>> assert "value2" in s and "value1" in s
|
||||
|
||||
>>> datagen, headers = multipart_encode( {"key": "value1"} )
|
||||
>>> s = "".join(datagen)
|
||||
>>> assert "value2" not in s and "value1" in s
|
||||
|
||||
"""
|
||||
if boundary is None:
|
||||
boundary = gen_boundary()
|
||||
else:
|
||||
boundary = quote_plus(boundary)
|
||||
|
||||
headers = get_headers(params, boundary)
|
||||
params = MultipartParam.from_params(params)
|
||||
|
||||
return multipart_yielder(params, boundary, cb), headers
|
201
lib/cloudinary/poster/streaminghttp.py
Normal file
@@ -0,0 +1,201 @@
|
||||
# MIT licensed code copied from https://bitbucket.org/chrisatlee/poster
|
||||
"""Streaming HTTP uploads module.
|
||||
|
||||
This module extends the standard httplib and urllib2 objects so that
|
||||
iterable objects can be used in the body of HTTP requests.
|
||||
|
||||
In most cases all one should have to do is call :func:`register_openers()`
|
||||
to register the new streaming http handlers which will take priority over
|
||||
the default handlers, and then you can use iterable objects in the body
|
||||
of HTTP requests.
|
||||
|
||||
**N.B.** You must specify a Content-Length header if using an iterable object
|
||||
since there is no way to determine in advance the total size that will be
|
||||
yielded, and there is no way to reset an interator.
|
||||
|
||||
Example usage:
|
||||
|
||||
>>> from StringIO import StringIO
|
||||
>>> import urllib2, poster.streaminghttp
|
||||
|
||||
>>> opener = poster.streaminghttp.register_openers()
|
||||
|
||||
>>> s = "Test file data"
|
||||
>>> f = StringIO(s)
|
||||
|
||||
>>> req = urllib2.Request("http://localhost:5000", f,
|
||||
... {'Content-Length': str(len(s))})
|
||||
"""
|
||||
|
||||
import sys, socket
|
||||
from cloudinary.compat import httplib, urllib2, NotConnected
|
||||
|
||||
__all__ = ['StreamingHTTPConnection', 'StreamingHTTPRedirectHandler',
|
||||
'StreamingHTTPHandler', 'register_openers']
|
||||
|
||||
if hasattr(httplib, 'HTTPS'):
|
||||
__all__.extend(['StreamingHTTPSHandler', 'StreamingHTTPSConnection'])
|
||||
|
||||
class _StreamingHTTPMixin:
|
||||
"""Mixin class for HTTP and HTTPS connections that implements a streaming
|
||||
send method."""
|
||||
def send(self, value):
|
||||
"""Send ``value`` to the server.
|
||||
|
||||
``value`` can be a string object, a file-like object that supports
|
||||
a .read() method, or an iterable object that supports a .next()
|
||||
method.
|
||||
"""
|
||||
# Based on python 2.6's httplib.HTTPConnection.send()
|
||||
if self.sock is None:
|
||||
if self.auto_open:
|
||||
self.connect()
|
||||
else:
|
||||
raise NotConnected()
|
||||
|
||||
# send the data to the server. if we get a broken pipe, then close
|
||||
# the socket. we want to reconnect when somebody tries to send again.
|
||||
#
|
||||
# NOTE: we DO propagate the error, though, because we cannot simply
|
||||
# ignore the error... the caller will know if they can retry.
|
||||
if self.debuglevel > 0:
|
||||
print("send:", repr(value))
|
||||
try:
|
||||
blocksize = 8192
|
||||
if hasattr(value, 'read') :
|
||||
if hasattr(value, 'seek'):
|
||||
value.seek(0)
|
||||
if self.debuglevel > 0:
|
||||
print("sendIng a read()able")
|
||||
data = value.read(blocksize)
|
||||
while data:
|
||||
self.sock.sendall(data)
|
||||
data = value.read(blocksize)
|
||||
elif hasattr(value, 'next'):
|
||||
if hasattr(value, 'reset'):
|
||||
value.reset()
|
||||
if self.debuglevel > 0:
|
||||
print("sendIng an iterable")
|
||||
for data in value:
|
||||
self.sock.sendall(data)
|
||||
else:
|
||||
self.sock.sendall(value)
|
||||
except socket.error:
|
||||
e = sys.exc_info()[1]
|
||||
if e[0] == 32: # Broken pipe
|
||||
self.close()
|
||||
raise
|
||||
|
||||
class StreamingHTTPConnection(_StreamingHTTPMixin, httplib.HTTPConnection):
|
||||
"""Subclass of `httplib.HTTPConnection` that overrides the `send()` method
|
||||
to support iterable body objects"""
|
||||
|
||||
class StreamingHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||
"""Subclass of `urllib2.HTTPRedirectHandler` that overrides the
|
||||
`redirect_request` method to properly handle redirected POST requests
|
||||
|
||||
This class is required because python 2.5's HTTPRedirectHandler does
|
||||
not remove the Content-Type or Content-Length headers when requesting
|
||||
the new resource, but the body of the original request is not preserved.
|
||||
"""
|
||||
|
||||
handler_order = urllib2.HTTPRedirectHandler.handler_order - 1
|
||||
|
||||
# From python2.6 urllib2's HTTPRedirectHandler
|
||||
def redirect_request(self, req, fp, code, msg, headers, newurl):
|
||||
"""Return a Request or None in response to a redirect.
|
||||
|
||||
This is called by the http_error_30x methods when a
|
||||
redirection response is received. If a redirection should
|
||||
take place, return a new Request to allow http_error_30x to
|
||||
perform the redirect. Otherwise, raise HTTPError if no-one
|
||||
else should try to handle this url. Return None if you can't
|
||||
but another Handler might.
|
||||
"""
|
||||
m = req.get_method()
|
||||
if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
|
||||
or code in (301, 302, 303) and m == "POST"):
|
||||
# Strictly (according to RFC 2616), 301 or 302 in response
|
||||
# to a POST MUST NOT cause a redirection without confirmation
|
||||
# from the user (of urllib2, in this case). In practice,
|
||||
# essentially all clients do redirect in this case, so we
|
||||
# do the same.
|
||||
# be conciliant with URIs containing a space
|
||||
newurl = newurl.replace(' ', '%20')
|
||||
newheaders = dict((k, v) for k, v in req.headers.items()
|
||||
if k.lower() not in (
|
||||
"content-length", "content-type")
|
||||
)
|
||||
return urllib2.Request(newurl,
|
||||
headers=newheaders,
|
||||
origin_req_host=req.get_origin_req_host(),
|
||||
unverifiable=True)
|
||||
else:
|
||||
raise urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp)
|
||||
|
||||
class StreamingHTTPHandler(urllib2.HTTPHandler):
|
||||
"""Subclass of `urllib2.HTTPHandler` that uses
|
||||
StreamingHTTPConnection as its http connection class."""
|
||||
|
||||
handler_order = urllib2.HTTPHandler.handler_order - 1
|
||||
|
||||
def http_open(self, req):
|
||||
"""Open a StreamingHTTPConnection for the given request"""
|
||||
return self.do_open(StreamingHTTPConnection, req)
|
||||
|
||||
def http_request(self, req):
|
||||
"""Handle a HTTP request. Make sure that Content-Length is specified
|
||||
if we're using an interable value"""
|
||||
# Make sure that if we're using an iterable object as the request
|
||||
# body, that we've also specified Content-Length
|
||||
if req.has_data():
|
||||
data = req.get_data()
|
||||
if hasattr(data, 'read') or hasattr(data, 'next'):
|
||||
if not req.has_header('Content-length'):
|
||||
raise ValueError(
|
||||
"No Content-Length specified for iterable body")
|
||||
return urllib2.HTTPHandler.do_request_(self, req)
|
||||
|
||||
if hasattr(httplib, 'HTTPS'):
|
||||
class StreamingHTTPSConnection(_StreamingHTTPMixin,
|
||||
httplib.HTTPSConnection):
|
||||
"""Subclass of `httplib.HTTSConnection` that overrides the `send()`
|
||||
method to support iterable body objects"""
|
||||
|
||||
class StreamingHTTPSHandler(urllib2.HTTPSHandler):
|
||||
"""Subclass of `urllib2.HTTPSHandler` that uses
|
||||
StreamingHTTPSConnection as its http connection class."""
|
||||
|
||||
handler_order = urllib2.HTTPSHandler.handler_order - 1
|
||||
|
||||
def https_open(self, req):
|
||||
return self.do_open(StreamingHTTPSConnection, req)
|
||||
|
||||
def https_request(self, req):
|
||||
# Make sure that if we're using an iterable object as the request
|
||||
# body, that we've also specified Content-Length
|
||||
if req.has_data():
|
||||
data = req.get_data()
|
||||
if hasattr(data, 'read') or hasattr(data, 'next'):
|
||||
if not req.has_header('Content-length'):
|
||||
raise ValueError(
|
||||
"No Content-Length specified for iterable body")
|
||||
return urllib2.HTTPSHandler.do_request_(self, req)
|
||||
|
||||
|
||||
def get_handlers():
|
||||
handlers = [StreamingHTTPHandler, StreamingHTTPRedirectHandler]
|
||||
if hasattr(httplib, "HTTPS"):
|
||||
handlers.append(StreamingHTTPSHandler)
|
||||
return handlers
|
||||
|
||||
def register_openers():
|
||||
"""Register the streaming http handlers in the global urllib2 default
|
||||
opener object.
|
||||
|
||||
Returns the created OpenerDirector object."""
|
||||
opener = urllib2.build_opener(*get_handlers())
|
||||
|
||||
urllib2.install_opener(opener)
|
||||
|
||||
return opener
|
59
lib/cloudinary/search.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import json
|
||||
from copy import deepcopy
|
||||
from . import api
|
||||
|
||||
|
||||
class Search:
|
||||
"""Build and execute a search query."""
|
||||
def __init__(self):
|
||||
self.query = {}
|
||||
|
||||
def expression(self, value):
|
||||
"""Specify the search query expression."""
|
||||
self.query["expression"] = value
|
||||
return self
|
||||
|
||||
def max_results(self, value):
|
||||
"""Set the max results to return"""
|
||||
self.query["max_results"] = value
|
||||
return self
|
||||
|
||||
def next_cursor(self, value):
|
||||
"""Get next page in the query using the ``next_cursor`` value from a previous invocation."""
|
||||
self.query["next_cursor"] = value
|
||||
return self
|
||||
|
||||
def sort_by(self, field_name, direction=None):
|
||||
"""Add a field to sort results by. If not provided, direction is ``desc``."""
|
||||
if direction is None:
|
||||
direction = 'desc'
|
||||
self._add("sort_by", {field_name: direction})
|
||||
return self
|
||||
|
||||
def aggregate(self, value):
|
||||
"""Aggregate field."""
|
||||
self._add("aggregate", value)
|
||||
return self
|
||||
|
||||
def with_field(self, value):
|
||||
"""Request an additional field in the result set."""
|
||||
self._add("with_field", value)
|
||||
return self
|
||||
|
||||
def to_json(self):
|
||||
return json.dumps(self.query)
|
||||
|
||||
def execute(self, **options):
|
||||
"""Execute the search and return results."""
|
||||
options["content_type"] = 'application/json'
|
||||
uri = ['resources','search']
|
||||
return api.call_json_api('post', uri, self.as_dict(), **options)
|
||||
|
||||
def _add(self, name, value):
|
||||
if name not in self.query:
|
||||
self.query[name] = []
|
||||
self.query[name].append(value)
|
||||
return self
|
||||
|
||||
def as_dict(self):
|
||||
return deepcopy(self.query)
|
43
lib/cloudinary/static/html/cloudinary_cors.html
Normal file
@@ -0,0 +1,43 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
/*
|
||||
json2.js
|
||||
2011-10-19
|
||||
|
||||
Public Domain.
|
||||
|
||||
NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
||||
|
||||
See http://www.JSON.org/js.html
|
||||
|
||||
This code should be minified before deployment.
|
||||
See http://javascript.crockford.com/jsmin.html
|
||||
|
||||
USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
|
||||
NOT CONTROL.
|
||||
|
||||
*/
|
||||
var JSON;if(!JSON){JSON={}}(function(){function str(a,b){var c,d,e,f,g=gap,h,i=b[a];if(i&&typeof i==="object"&&typeof i.toJSON==="function"){i=i.toJSON(a)}if(typeof rep==="function"){i=rep.call(b,a,i)}switch(typeof i){case"string":return quote(i);case"number":return isFinite(i)?String(i):"null";case"boolean":case"null":return String(i);case"object":if(!i){return"null"}gap+=indent;h=[];if(Object.prototype.toString.apply(i)==="[object Array]"){f=i.length;for(c=0;c<f;c+=1){h[c]=str(c,i)||"null"}e=h.length===0?"[]":gap?"[\n"+gap+h.join(",\n"+gap)+"\n"+g+"]":"["+h.join(",")+"]";gap=g;return e}if(rep&&typeof rep==="object"){f=rep.length;for(c=0;c<f;c+=1){if(typeof rep[c]==="string"){d=rep[c];e=str(d,i);if(e){h.push(quote(d)+(gap?": ":":")+e)}}}}else{for(d in i){if(Object.prototype.hasOwnProperty.call(i,d)){e=str(d,i);if(e){h.push(quote(d)+(gap?": ":":")+e)}}}}e=h.length===0?"{}":gap?"{\n"+gap+h.join(",\n"+gap)+"\n"+g+"}":"{"+h.join(",")+"}";gap=g;return e}}function quote(a){escapable.lastIndex=0;return escapable.test(a)?'"'+a.replace(escapable,function(a){var b=meta[a];return typeof b==="string"?b:"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)})+'"':'"'+a+'"'}function f(a){return a<10?"0"+a:a}"use strict";if(typeof Date.prototype.toJSON!=="function"){Date.prototype.toJSON=function(a){return isFinite(this.valueOf())?this.getUTCFullYear()+"-"+f(this.getUTCMonth()+1)+"-"+f(this.getUTCDate())+"T"+f(this.getUTCHours())+":"+f(this.getUTCMinutes())+":"+f(this.getUTCSeconds())+"Z":null};String.prototype.toJSON=Number.prototype.toJSON=Boolean.prototype.toJSON=function(a){return this.valueOf()}}var cx=/[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,escapable=/[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,gap,indent,meta={"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r":"\\r",'"':'\\"',"\\":"\\\\"},rep;if(typeof JSON.stringify!=="function"){JSON.stringify=function(a,b,c){var d;gap="";indent="";if(typeof c==="number"){for(d=0;d<c;d+=1){indent+=" "}}else if(typeof c==="string"){indent=c}rep=b;if(b&&typeof b!=="function"&&(typeof b!=="object"||typeof b.length!=="number")){throw new Error("JSON.stringify")}return str("",{"":a})}}if(typeof JSON.parse!=="function"){JSON.parse=function(text,reviver){function walk(a,b){var c,d,e=a[b];if(e&&typeof e==="object"){for(c in e){if(Object.prototype.hasOwnProperty.call(e,c)){d=walk(e,c);if(d!==undefined){e[c]=d}else{delete e[c]}}}}return reviver.call(a,b,e)}var j;text=String(text);cx.lastIndex=0;if(cx.test(text)){text=text.replace(cx,function(a){return"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)})}if(/^[\],:{}\s]*$/.test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,""))){j=eval("("+text+")");return typeof reviver==="function"?walk({"":j},""):j}throw new SyntaxError("JSON.parse")}}})()
|
||||
/* end of json2.js */
|
||||
;
|
||||
function parse(query) {
|
||||
var result = {};
|
||||
var params = query.split("&");
|
||||
for (var i = 0; i < params.length; i++) {
|
||||
var param = params[i].split("=");
|
||||
result[param[0]] = decodeURIComponent(param[1]);
|
||||
}
|
||||
return JSON.stringify(result);
|
||||
}
|
||||
|
||||
document.body.textContent = document.body.innerText = parse(window.location.search.slice(1));
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
2
lib/cloudinary/static/js/canvas-to-blob.min.js
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
!function(t){"use strict";var e=t.HTMLCanvasElement&&t.HTMLCanvasElement.prototype,o=t.Blob&&function(){try{return Boolean(new Blob)}catch(t){return!1}}(),n=o&&t.Uint8Array&&function(){try{return 100===new Blob([new Uint8Array(100)]).size}catch(t){return!1}}(),r=t.BlobBuilder||t.WebKitBlobBuilder||t.MozBlobBuilder||t.MSBlobBuilder,a=/^data:((.*?)(;charset=.*?)?)(;base64)?,/,i=(o||r)&&t.atob&&t.ArrayBuffer&&t.Uint8Array&&function(t){var e,i,l,u,c,f,b,d,B;if(!(e=t.match(a)))throw new Error("invalid data URI");for(i=e[2]?e[1]:"text/plain"+(e[3]||";charset=US-ASCII"),l=!!e[4],u=t.slice(e[0].length),c=l?atob(u):decodeURIComponent(u),f=new ArrayBuffer(c.length),b=new Uint8Array(f),d=0;d<c.length;d+=1)b[d]=c.charCodeAt(d);return o?new Blob([n?b:f],{type:i}):((B=new r).append(f),B.getBlob(i))};t.HTMLCanvasElement&&!e.toBlob&&(e.mozGetAsFile?e.toBlob=function(t,o,n){var r=this;setTimeout(function(){t(n&&e.toDataURL&&i?i(r.toDataURL(o,n)):r.mozGetAsFile("blob",o))})}:e.toDataURL&&i&&(e.toBlob=function(t,e,o){var n=this;setTimeout(function(){t(i(n.toDataURL(e,o)))})})),"function"==typeof define&&define.amd?define(function(){return i}):"object"==typeof module&&module.exports?module.exports=i:t.dataURLtoBlob=i}(window);
|
||||
//# sourceMappingURL=canvas-to-blob.min.js.map
|