Compare commits
49 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
be058eaff7 | ||
![]() |
f409dda2ef | ||
![]() |
f409cdda8f | ||
![]() |
9cd6396c35 | ||
![]() |
ee754ea533 | ||
![]() |
36de20dd75 | ||
![]() |
a957e8eb4f | ||
![]() |
14a90d84ec | ||
![]() |
fae9bc618a | ||
![]() |
3248e6500e | ||
![]() |
c17bf79d79 | ||
![]() |
1ff1270bfa | ||
![]() |
b1a2cf33d8 | ||
![]() |
b2292e98c1 | ||
![]() |
4d156a8911 | ||
![]() |
7193b6518b | ||
![]() |
cff6b44109 | ||
![]() |
fb7ad9438e | ||
![]() |
afc265a188 | ||
![]() |
01fe7bf612 | ||
![]() |
1cb75bd053 | ||
![]() |
0eaea4d011 | ||
![]() |
67377a2561 | ||
![]() |
a8aae9f1f5 | ||
![]() |
a9ce92decb | ||
![]() |
c19162295a | ||
![]() |
58796c45ed | ||
![]() |
d94b348780 | ||
![]() |
95f92bd292 | ||
![]() |
bc52ac3559 | ||
![]() |
8bbc6a6611 | ||
![]() |
8902b93a26 | ||
![]() |
ae36af807d | ||
![]() |
fd256625c6 | ||
![]() |
bee543a25a | ||
![]() |
55eb79cb52 | ||
![]() |
35965a8320 | ||
![]() |
8a902ae3e6 | ||
![]() |
52bed5bf98 | ||
![]() |
9e83f6d779 | ||
![]() |
0ba5012464 | ||
![]() |
73ff28465d | ||
![]() |
7484d65dbb | ||
![]() |
4a120e7a54 | ||
![]() |
8d63d85821 | ||
![]() |
5cec84a802 | ||
![]() |
48da41690d | ||
![]() |
1c82241f30 | ||
![]() |
b1ea3bcd4e |
51
CHANGELOG.md
51
CHANGELOG.md
@@ -1,5 +1,56 @@
|
||||
# Changelog
|
||||
|
||||
## v1.3.5 (2016-02-02)
|
||||
|
||||
* Fix: Removing unique constraints from database.
|
||||
* Fix: Unable to expand media info table when missing "Added At" date.
|
||||
* Fix: Server verification for unpublished servers.
|
||||
* Fix: Updating PMS identifier for server change.
|
||||
* Add: {stream_time}, {remaining_time}, and {progress_time} to notification options.
|
||||
* Add: Powershell script support. (Thanks @Hellowlol)
|
||||
* Add: Method to delete duplicate libraries.
|
||||
* Change: Daemonize before running start up tasks.
|
||||
|
||||
|
||||
## v1.3.4 (2016-01-29)
|
||||
|
||||
* Fix: Activity checker not starting with library update (history not logging).
|
||||
* Fix: Libraries duplicated in database.
|
||||
* Fix: Buffer notifications even when disabled when using websockets.
|
||||
* Fix: Libraries and Users lists not refreshing.
|
||||
* Fix: Server verification in settings.
|
||||
* Fix: Empty libraries not added to database.
|
||||
* Add: Unique identifiers to notification options.
|
||||
* Remove: Requirement of media type toggles for recently added notifications.
|
||||
* Remove: Built in Twitter key and secret.
|
||||
* Change: Unnecessary quoting of script arguments.
|
||||
* Change: Facebook notification instructions.
|
||||
|
||||
|
||||
## v1.3.3 (2016-01-26)
|
||||
|
||||
* Fix: Plays by Month graph not loading.
|
||||
* Change: Disable caching for datatables.
|
||||
* Change: Improved updating library data in the database again.
|
||||
|
||||
|
||||
## v1.3.2 (2016-01-24)
|
||||
|
||||
* Fix: 'datestamp' and 'timestamp' for server notifications.
|
||||
* Change: New method for updating library data in database.
|
||||
|
||||
|
||||
## v1.3.1 (2016-01-23)
|
||||
|
||||
* Fix: Notifiers authorization popups for reverse proxies.
|
||||
* Fix: Empty brackets in titles on tables.
|
||||
* Fix: Star rating overlapping text.
|
||||
* Fix: Unable to startup when library refresh fails.
|
||||
* Fix: Unable to parse 'datestamp' and 'timestamp' format.
|
||||
* Change: Rename "Last Watched" to "Last Played".
|
||||
* Change: More descriptive libraries updating message.
|
||||
|
||||
|
||||
## v1.3.0 (2016-01-23)
|
||||
|
||||
* Add: Brand new Libraries section.
|
||||
|
@@ -153,12 +153,12 @@ def main():
|
||||
# Put the database in the DATA_DIR
|
||||
plexpy.DB_FILE = os.path.join(plexpy.DATA_DIR, 'plexpy.db')
|
||||
|
||||
# Read config and start logging
|
||||
plexpy.initialize(config_file)
|
||||
|
||||
if plexpy.DAEMON:
|
||||
plexpy.daemonize()
|
||||
|
||||
# Read config and start logging
|
||||
plexpy.initialize(config_file)
|
||||
|
||||
# Force the http port if neccessary
|
||||
if args.port:
|
||||
http_port = args.port
|
||||
|
@@ -1326,8 +1326,7 @@ a:hover .summary-poster-face-track .summary-poster-face-overlay span {
|
||||
white-space: nowrap;
|
||||
margin-top: 3px;
|
||||
height: 21px;
|
||||
position: absolute;
|
||||
right: 0;
|
||||
float: right;
|
||||
}
|
||||
.star-rating .star-icon {
|
||||
width: auto;
|
||||
|
@@ -692,7 +692,7 @@ DOCUMENTATION :: END
|
||||
<li>
|
||||
<div class="home-platforms-instance-info">
|
||||
<div class="home-platforms-instance-name">
|
||||
<h4>Last Watched</h4>
|
||||
<h4>Last Played</h4>
|
||||
</div>
|
||||
<div class="home-platforms-instance-last-user">
|
||||
<h4>
|
||||
|
@@ -22,7 +22,7 @@ history_table_options = {
|
||||
"emptyTable": "No data in table"
|
||||
},
|
||||
"pagingType": "bootstrap",
|
||||
"stateSave": true,
|
||||
"stateSave": false,
|
||||
"processing": false,
|
||||
"serverSide": true,
|
||||
"pageLength": 25,
|
||||
@@ -133,20 +133,23 @@ history_table_options = {
|
||||
"data":"full_title",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
var parent_info = '';
|
||||
var media_type = '';
|
||||
var thumb_popover = '';
|
||||
if (rowData['media_type'] === 'movie') {
|
||||
if (rowData['year']) { parent_info = ' (' + rowData['year'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + ' (' + rowData['year'] + ')</span>'
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'episode') {
|
||||
if (rowData['parent_media_index'] && rowData['media_index']) { parent_info = ' (S' + rowData['parent_media_index'] + '· E' + rowData['media_index'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Episode"><i class="fa fa-television fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + ' \
|
||||
(S' + rowData['parent_media_index'] + '· E' + rowData['media_index'] + ')</span>'
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'track') {
|
||||
if (rowData['parent_title']) { parent_info = ' (' + rowData['parent_title'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Track"><i class="fa fa-music fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80" data-width="80">' + cellData + ' (' + rowData['parent_title'] + ')</span>'
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else {
|
||||
$(td).html('<a href="info?rating_key=' + rowData['rating_key'] + '">' + cellData + '</a>');
|
||||
|
@@ -97,21 +97,24 @@ history_table_modal_options = {
|
||||
"data":"full_title",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
var parent_info = '';
|
||||
var media_type = '';
|
||||
var thumb_popover = '';
|
||||
if (rowData['media_type'] === 'movie') {
|
||||
if (rowData['year']) { parent_info = ' (' + rowData['year'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + ' (' + rowData['year'] + ')</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'episode') {
|
||||
if (rowData['parent_media_index'] && rowData['media_index']) { parent_info = ' (S' + rowData['parent_media_index'] + '· E' + rowData['media_index'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Episode"><i class="fa fa-television fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + ' \
|
||||
(S' + rowData['parent_media_index'] + '· E' + rowData['media_index'] + ')</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'track') {
|
||||
if (rowData['parent_title']) { parent_info = ' (' + rowData['parent_title'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Track"><i class="fa fa-music fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80" data-width="80">' + cellData + ' (' + rowData['parent_title'] + ')</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else {
|
||||
$(td).html('<a href="info?rating_key=' + rowData['rating_key'] + '">' + cellData + '</a>');
|
||||
}
|
||||
|
@@ -16,7 +16,7 @@ libraries_list_table_options = {
|
||||
"pageLength": 10,
|
||||
"order": [ 2, 'asc'],
|
||||
"autoWidth": true,
|
||||
"stateSave": true,
|
||||
"stateSave": false,
|
||||
"pagingType": "bootstrap",
|
||||
"columnDefs": [
|
||||
{
|
||||
@@ -133,23 +133,27 @@ libraries_list_table_options = {
|
||||
},
|
||||
{
|
||||
"targets": [8],
|
||||
"data":"last_watched",
|
||||
"data":"last_played",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== null && cellData !== '') {
|
||||
var parent_info = '';
|
||||
var media_type = '';
|
||||
var thumb_popover = ''
|
||||
var thumb_popover = '';
|
||||
if (rowData['media_type'] === 'movie') {
|
||||
if (rowData['year']) { parent_info = ' (' + rowData['year'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=120&fallback=poster" data-height="120" data-width="80">' + cellData + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'episode') {
|
||||
if (rowData['parent_media_index'] && rowData['media_index']) { parent_info = ' (S' + rowData['parent_media_index'] + '· E' + rowData['media_index'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Episode"><i class="fa fa-television fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=120&fallback=poster" data-height="120" data-width="80">' + cellData + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'track') {
|
||||
if (rowData['parent_title']) { parent_info = ' (' + rowData['parent_title'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Track"><i class="fa fa-music fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=80&fallback=poster" data-height="80" data-width="80">' + cellData + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type']) {
|
||||
$(td).html('<a href="info?rating_key=' + rowData['rating_key'] + '">' + cellData + '</a>');
|
||||
}
|
||||
|
@@ -5,7 +5,7 @@ var log_table_options = {
|
||||
"pagingType": "bootstrap",
|
||||
"order": [ 0, 'desc'],
|
||||
"pageLength": 50,
|
||||
"stateSave": true,
|
||||
"stateSave": false,
|
||||
"language": {
|
||||
"search":"Search: ",
|
||||
"lengthMenu":"Show _MENU_ lines per page",
|
||||
|
@@ -34,9 +34,12 @@ media_info_table_options = {
|
||||
"targets": [0],
|
||||
"data": "added_at",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== null && cellData !== '') {
|
||||
if (rowData) {
|
||||
var expand_details = '';
|
||||
var date = moment(cellData, "X").format(date_format);
|
||||
var date = '';
|
||||
if (cellData !== null && cellData !== '') {
|
||||
date = moment(cellData, "X").format(date_format);
|
||||
}
|
||||
if (rowData['media_type'] === 'show') {
|
||||
expand_details = '<span class="expand-media-info-tooltip" data-toggle="tooltip" title="Show Seasons"><i class="fa fa-plus-circle fa-fw"></i></span>';
|
||||
$(td).html('<div><a href="#"><div style="float: left;">' + expand_details + ' ' + date + '</div></a></div>');
|
||||
@@ -66,11 +69,13 @@ media_info_table_options = {
|
||||
"data": "title",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== null && cellData !== '') {
|
||||
var parent_info = '';
|
||||
var media_type = '';
|
||||
var thumb_popover = '';
|
||||
if (rowData['media_type'] === 'movie') {
|
||||
if (rowData['year']) { parent_info = ' (' + rowData['year'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + ' (' + rowData['year'] + ')</span>'
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'show') {
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="TV Show"><i class="fa fa-television fa-fw"></i></span>';
|
||||
@@ -199,7 +204,7 @@ media_info_table_options = {
|
||||
},
|
||||
{
|
||||
"targets": [10],
|
||||
"data": "last_watched",
|
||||
"data": "last_played",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== null && cellData !== '') {
|
||||
date = moment(cellData, "X").format(date_format);
|
||||
@@ -427,7 +432,7 @@ function childTableFormatMedia(rowData) {
|
||||
'<th align="left" id="audio_codec">Audio Codec</th>' +
|
||||
'<th align="left" id="audio_channels">Audio Channels</th>' +
|
||||
'<th align="left" id="file_size">File Size</th>' +
|
||||
'<th align="left" id="last_watched">Last Watched</th>' +
|
||||
'<th align="left" id="last_played">Last Played</th>' +
|
||||
'<th align="left" id="total_plays">Total Plays</th>' +
|
||||
'</tr>' +
|
||||
'</thead>' +
|
||||
|
@@ -5,7 +5,7 @@ var plex_log_table_options = {
|
||||
"pagingType": "bootstrap",
|
||||
"order": [ 0, 'desc'],
|
||||
"pageLength": 50,
|
||||
"stateSave": true,
|
||||
"stateSave": false,
|
||||
"language": {
|
||||
"search":"Search: ",
|
||||
"lengthMenu":"Show _MENU_ lines per page",
|
||||
|
@@ -4,7 +4,7 @@ sync_table_options = {
|
||||
"pagingType": "bootstrap",
|
||||
"order": [ [ 0, 'desc'], [ 1, 'asc'], [2, 'asc'] ],
|
||||
"pageLength": 25,
|
||||
"stateSave": true,
|
||||
"stateSave": false,
|
||||
"language": {
|
||||
"search":"Search: ",
|
||||
"lengthMenu":"Show _MENU_ lines per page",
|
||||
|
@@ -8,7 +8,7 @@ user_ip_table_options = {
|
||||
"infoFiltered":"(filtered from _MAX_ total entries)",
|
||||
"emptyTable": "No data in table",
|
||||
},
|
||||
"stateSave": true,
|
||||
"stateSave": false,
|
||||
"pagingType": "bootstrap",
|
||||
"processing": false,
|
||||
"serverSide": true,
|
||||
@@ -64,12 +64,12 @@ user_ip_table_options = {
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData) {
|
||||
var transcode_dec = '';
|
||||
if (rowData['video_decision'] === 'transcode') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span> ';
|
||||
} else if (rowData['video_decision'] === 'copy') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span> ';
|
||||
} else if (rowData['video_decision'] === 'direct play' || rowData['video_decision'] === '') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span> ';
|
||||
if (rowData['video_decision'] === 'transcode' || rowData['audio_decision'] === 'transcode') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>';
|
||||
} else if (rowData['video_decision'] === 'copy' || rowData['audio_decision'] === 'copy') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>';
|
||||
} else if (rowData['video_decision'] === 'direct play' || rowData['audio_decision'] === 'direct play') {
|
||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>';
|
||||
}
|
||||
$(td).html('<div><a href="#" data-target="#info-modal" data-toggle="modal"><div style="float: left;">' + transcode_dec + ' ' + cellData + '</div></a></div>');
|
||||
} else {
|
||||
@@ -81,23 +81,27 @@ user_ip_table_options = {
|
||||
},
|
||||
{
|
||||
"targets": [4],
|
||||
"data":"last_watched",
|
||||
"data":"last_played",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
var parent_info = '';
|
||||
var media_type = '';
|
||||
var thumb_popover = ''
|
||||
var thumb_popover = '';
|
||||
if (rowData['media_type'] === 'movie') {
|
||||
if (rowData['year']) { parent_info = ' (' + rowData['year'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=120&fallback=poster" data-height="120" data-width="80">' + cellData + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'episode') {
|
||||
if (rowData['parent_media_index'] && rowData['media_index']) { parent_info = ' (S' + rowData['parent_media_index'] + '· E' + rowData['media_index'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Episode"><i class="fa fa-television fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=120&fallback=poster" data-height="120" data-width="80">' + cellData + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'track') {
|
||||
if (rowData['parent_title']) { parent_info = ' (' + rowData['parent_title'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Track"><i class="fa fa-music fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=80&fallback=poster" data-height="80" data-width="80">' + cellData + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type']) {
|
||||
$(td).html('<a href="info?rating_key=' + rowData['rating_key'] + '">' + cellData + '</a>');
|
||||
} else {
|
||||
|
@@ -16,7 +16,7 @@ users_list_table_options = {
|
||||
"pageLength": 10,
|
||||
"order": [ 2, 'asc'],
|
||||
"autoWidth": true,
|
||||
"stateSave": true,
|
||||
"stateSave": false,
|
||||
"pagingType": "bootstrap",
|
||||
"columnDefs": [
|
||||
{
|
||||
@@ -137,23 +137,27 @@ users_list_table_options = {
|
||||
},
|
||||
{
|
||||
"targets": [7],
|
||||
"data":"last_watched",
|
||||
"data":"last_played",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== null && cellData !== '') {
|
||||
var parent_info = '';
|
||||
var media_type = '';
|
||||
var thumb_popover = ''
|
||||
var thumb_popover = '';
|
||||
if (rowData['media_type'] === 'movie') {
|
||||
if (rowData['year']) { parent_info = ' (' + rowData['year'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=120&fallback=poster" data-height="120" data-width="80">' + cellData + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'episode') {
|
||||
if (rowData['parent_media_index'] && rowData['media_index']) { parent_info = ' (S' + rowData['parent_media_index'] + '· E' + rowData['media_index'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Episode"><i class="fa fa-television fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=120&fallback=poster" data-height="120" data-width="80">' + cellData + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'track') {
|
||||
if (rowData['parent_title']) { parent_info = ' (' + rowData['parent_title'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Track"><i class="fa fa-music fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=80&height=80&fallback=poster" data-height="80" data-width="80">' + cellData + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type']) {
|
||||
$(td).html('<a href="info?rating_key=' + rowData['rating_key'] + '">' + cellData + '</a>');
|
||||
}
|
||||
|
@@ -13,7 +13,7 @@
|
||||
</div>
|
||||
% elif config['update_section_ids'] == -1:
|
||||
<div id="update_section_ids_message" style="text-align: center; margin-top: 20px;">
|
||||
<i class="fa fa-refresh fa-spin"></i> PlexPy is updating library IDs in the database. This could take a few minutes depending on the size of your database.
|
||||
<i class="fa fa-refresh fa-spin"></i> PlexPy is updating library IDs in the database. This could take a few minutes to hours depending on the size of your database.
|
||||
<br />
|
||||
You may leave this page and come back later.
|
||||
</div>
|
||||
@@ -46,7 +46,7 @@
|
||||
<th align="left" id="parent_count">Total Seasons / Albums</th>
|
||||
<th align="left" id="child_count">Total Episodes / Tracks</th>
|
||||
<th align="left" id="last_accessed">Last Accessed</th>
|
||||
<th align="left" id="last_watched">Last Watched</th>
|
||||
<th align="left" id="last_played">Last Played</th>
|
||||
<th align="left" id="total_plays">Total Plays</th>
|
||||
</tr>
|
||||
</thead>
|
||||
@@ -177,7 +177,7 @@
|
||||
$("#refresh-libraries-list").click(function () {
|
||||
if ("${config['update_section_ids']}" == "1") {
|
||||
$('#update_section_ids_message').html(
|
||||
'<i class="fa fa-refresh fa-spin"></i> PlexPy is updating library IDs in the database. This could take a few minutes depending on the size of your database.' +
|
||||
'<i class="fa fa-refresh fa-spin"></i> PlexPy is updating library IDs in the database. This could take a few minutes to hours depending on the size of your database.' +
|
||||
'<br />' +
|
||||
'You may leave this page and come back later.');
|
||||
$(this).prop('disabled', true);
|
||||
|
@@ -240,7 +240,7 @@ DOCUMENTATION :: END
|
||||
<th align="left" id="audio_codec">Audio Codec</th>
|
||||
<th align="left" id="audio_channels">Audio Channels</th>
|
||||
<th align="left" id="file_size">File Size</th>
|
||||
<th align="left" id="last_watched">Last Watched</th>
|
||||
<th align="left" id="last_played">Last Played</th>
|
||||
<th align="left" id="total_plays">Total Plays</th>
|
||||
</tr>
|
||||
</thead>
|
||||
|
@@ -143,7 +143,7 @@ from plexpy import helpers
|
||||
|
||||
$('#osxnotifyregister').click(function () {
|
||||
var osx_notify_app = $('#osx_notify_app').val();
|
||||
$.get('/osxnotifyregister', { 'app': osx_notify_app }, function (data) { showMsg('<i class="fa fa-check"></i> ' + data, false, true, 3000); });
|
||||
$.get('osxnotifyregister', { 'app': osx_notify_app }, function (data) { showMsg('<i class="fa fa-check"></i> ' + data, false, true, 3000); });
|
||||
})
|
||||
|
||||
$('#save-notification-item').click(function () {
|
||||
@@ -163,12 +163,12 @@ from plexpy import helpers
|
||||
});
|
||||
|
||||
$('#twitterStep1').click(function () {
|
||||
$.get('/twitterStep1', function (data) {window.open(data); })
|
||||
$.get('twitterStep1', function (data) {window.open(data); })
|
||||
.done(function () { showMsg('<i class="fa fa-check"></i> Confirm Authorization. Check pop-up blocker if no response.', false, true, 3000); });
|
||||
});
|
||||
$('#twitterStep2').click(function () {
|
||||
var twitter_key = $('#twitter_key').val();
|
||||
$.get('/twitterStep2', { 'key': twitter_key }, function (data) { showMsg('<i class="fa fa-check"></i> ' + data, false, true, 3000); });
|
||||
$.get('twitterStep2', { 'key': twitter_key }, function (data) { showMsg('<i class="fa fa-check"></i> ' + data, false, true, 3000); });
|
||||
});
|
||||
|
||||
function disableFacebookRequest() {
|
||||
@@ -181,8 +181,12 @@ from plexpy import helpers
|
||||
});
|
||||
|
||||
$('#facebookStep1').click(function () {
|
||||
// Remove trailing '/' from Facebook redirect URI
|
||||
if ($('#facebook_redirect_uri') && $('#facebook_redirect_uri').val().endsWith('/')) {
|
||||
$('#facebook_redirect_uri').val($('#facebook_redirect_uri').val().slice(0, -1));
|
||||
}
|
||||
doAjaxCall('set_notification_config', $(this), 'tabs', true);
|
||||
$.get('/facebookStep1', function (data) { window.open(data); })
|
||||
$.get('facebookStep1', function (data) { window.open(data); })
|
||||
.done(function () { showMsg('<i class="fa fa-check"></i> Confirm Authorization. Check pop-up blocker if no response.', false, true, 3000); });
|
||||
});
|
||||
|
||||
|
@@ -145,7 +145,7 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
|
||||
<li class="card card-sortable">
|
||||
<div class="card-handle"><i class="fa fa-bars"></i></div>
|
||||
<label>
|
||||
<input type="checkbox" id="hscard-last_watched" name="hscard-last_watched" value="last_watched"> Last Watched
|
||||
<input type="checkbox" id="hscard-last_watched" name="hscard-last_watched" value="last_watched"> Last Played
|
||||
</label>
|
||||
</li>
|
||||
<li class="card card-sortable">
|
||||
@@ -359,6 +359,10 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
|
||||
</label>
|
||||
<p class="help-block">Force PlexPy to connect to your Plex Server via SSL. Your server needs to have remote access enabled.</p>
|
||||
</div>
|
||||
|
||||
<input type="hidden" id="pms_identifier" name="pms_identifier" value="${config['pms_identifier']}">
|
||||
<input type="checkbox" name="server_changed" id="server_changed" value="1" style="display:none">
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Plex Logs</h3>
|
||||
</div>
|
||||
@@ -374,8 +378,6 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
|
||||
<a href="https://support.plex.tv/hc/en-us/articles/200250417-Plex-Media-Server-Log-Files" target="_blank">Click here</a> for help. This is required if you enable IP logging (for PMS 0.9.12 and below). </p>
|
||||
</div>
|
||||
|
||||
<input type="hidden" id="pms_identifier" name="pms_identifier" value="${config['pms_identifier']}">
|
||||
|
||||
<input type="button" class="btn btn-bright save-button" value="Save" data-success="Changes saved successfully">
|
||||
|
||||
</div>
|
||||
@@ -896,111 +898,111 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
|
||||
<div class="modal-body">
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td align="center" colspan="3"><h5>Day</h5></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td width="100"><strong>DD</strong></td>
|
||||
<td width="300">Numeric, with leading zeros</td>
|
||||
<td>01 to 31</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>D</strong></td>
|
||||
<td>Numeric, without leading zeros</td>
|
||||
<td>1 to 31</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>Do</strong></td>
|
||||
<td>The English suffix for the day of the month</td>
|
||||
<td>st, nd or th in the 1st, 2nd or 15th.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center" colspan="3"><h5>Year</h5></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>YYYY</strong></td>
|
||||
<td>Numeric, 4 digits</td>
|
||||
<td>Eg., 1999, 2003</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>YY</strong></td>
|
||||
<td>Numeric, 2 digits</td>
|
||||
<td>Eg., 99, 03</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="center" colspan="3"><h5>Month</h5></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>MM</strong></td>
|
||||
<td>Numeric, with leading zeros</td>
|
||||
<td>01 to 31</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>M</strong></td>
|
||||
<td>Numeric, without leading zeros</td>
|
||||
<td>1 to 31</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>MMMM</strong></td>
|
||||
<td>Textual full</td>
|
||||
<td>January to December</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>MMM</strong></td>
|
||||
<td>Textual three letters</td>
|
||||
<td>Jan to Dec</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center" colspan="3"><h5>Month</h5></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>MM</strong></td>
|
||||
<td>Numeric, with leading zeros</td>
|
||||
<td>01-12</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>M</strong></td>
|
||||
<td>Numeric, without leading zeros</td>
|
||||
<td>1-12</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>MMMM</strong></td>
|
||||
<td>Textual full</td>
|
||||
<td>January-December</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>MMM</strong></td>
|
||||
<td>Textual three letters</td>
|
||||
<td>Jan-Dec</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="center" colspan="3"><h5>Year</h5></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>YYYY</strong></td>
|
||||
<td>Numeric, 4 digits</td>
|
||||
<td>Eg., 1999, 2003</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>YY</strong></td>
|
||||
<td>Numeric, 2 digits</td>
|
||||
<td>Eg., 99, 03</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center" colspan="3"><h5>Day</h5></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td width="100"><strong>DD</strong></td>
|
||||
<td width="300">Numeric, with leading zeros</td>
|
||||
<td>01-31</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>D</strong></td>
|
||||
<td>Numeric, without leading zeros</td>
|
||||
<td>1-31</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>Do</strong></td>
|
||||
<td>The English suffix for the day of the month</td>
|
||||
<td>st, nd or th in the 1st, 2nd or 15th.</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="center" colspan="3"><h5>Time</h5></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>a</strong></td>
|
||||
<td width="300">am/pm Lowercase</td>
|
||||
<td>am, pm</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>A</strong></td>
|
||||
<td>AM/PM Uppercase</td>
|
||||
<td>AM, PM</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>h</strong></td>
|
||||
<td>Hour, 12-hour, without leading zeros</td>
|
||||
<td>1-12</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>hh</strong></td>
|
||||
<td>Hour, 12-hour, with leading zeros</td>
|
||||
<td>01-12</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>H</strong></td>
|
||||
<td>Hour, 24-hour, without leading zeros</td>
|
||||
<td>0-23</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>HH</strong></td>
|
||||
<td>Hour, 24-hour, with leading zeros</td>
|
||||
<td>00-23</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>mm</strong></td>
|
||||
<td>Minutes, with leading zeros</td>
|
||||
<td>00-59</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>ss</strong></td>
|
||||
<td>Seconds, with leading zeros</td>
|
||||
<td>00-59</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>zz</strong></td>
|
||||
<td>Timezone abbreviation</td>
|
||||
<td>Eg., EST, MDT ...</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center" colspan="3"><h5>Time</h5></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>a</strong></td>
|
||||
<td width="300">am/pm Lowercase</td>
|
||||
<td>am, pm</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>A</strong></td>
|
||||
<td>AM/PM Uppercase</td>
|
||||
<td>AM, PM</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>h</strong></td>
|
||||
<td>Hour, 12-hour, without leading zeros</td>
|
||||
<td>1-12</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>hh</strong></td>
|
||||
<td>Hour, 12-hour, with leading zeros</td>
|
||||
<td>01-12</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>H</strong></td>
|
||||
<td>Hour, 24-hour, without leading zeros</td>
|
||||
<td>0-23</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>HH</strong></td>
|
||||
<td>Hour, 24-hour, with leading zeros</td>
|
||||
<td>00-23</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>mm</strong></td>
|
||||
<td>Minutes, with leading zeros</td>
|
||||
<td>00-59</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>ss</strong></td>
|
||||
<td>Seconds, with leading zeros</td>
|
||||
<td>00-59</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>zz</strong></td>
|
||||
<td>Timezone abbreviation</td>
|
||||
<td>Eg., EST, MDT ...</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
@@ -1072,7 +1074,7 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
Server Details
|
||||
Global
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
@@ -1085,6 +1087,18 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
|
||||
<td><strong>{server_uptime}</strong></td>
|
||||
<td>The uptime (in days, hours, mins, secs) of your Plex Server.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{action}</strong></td>
|
||||
<td>The action that triggered the notification.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{datestamp}</strong></td>
|
||||
<td>The date (in date format) the notification was triggered.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{timestamp}</strong></td>
|
||||
<td>The time (in time format) the notification was triggered.</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<table class="notification-params">
|
||||
@@ -1100,18 +1114,6 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
|
||||
<td><strong>{streams}</strong></td>
|
||||
<td>The number of concurrent streams.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{action}</strong></td>
|
||||
<td>The action that triggered the notification.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{datestamp}</strong></td>
|
||||
<td>The date the notification was triggered.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{timestamp}</strong></td>
|
||||
<td>The time the notification was triggered.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{user}</strong></td>
|
||||
<td>The username of the person streaming.</td>
|
||||
@@ -1128,22 +1130,30 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
|
||||
<td><strong>{ip_address}</strong></td>
|
||||
<td>The IP address of the device being used for playback. (PMS 0.9.14 and above)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{media_type}</strong></td>
|
||||
<td>The type of media being played (movie, episode, track).</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{stream_duration}</strong></td>
|
||||
<td>The stream duration (in minutes) for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{stream_time}</strong></td>
|
||||
<td>The stream duration (in time format) for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{remaining_duration}</strong></td>
|
||||
<td>The remaining duration (in minutes) for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{progress}</strong></td>
|
||||
<td><strong>{remaining_time}</strong></td>
|
||||
<td>The remaining duration (in time format) for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{progress_duration}</strong></td>
|
||||
<td>The last reported offset (in minutes) for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{progress_time}</strong></td>
|
||||
<td>The last reported offset (in time format) for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{progress_percent}</strong></td>
|
||||
<td>The last reported progress percent for the item.</td>
|
||||
@@ -1224,6 +1234,14 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
|
||||
<td><strong>{transcode_audio_channels}</strong></td>
|
||||
<td>The audio channels of the transcoded media.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{session_key}</strong></td>
|
||||
<td>The unique identifier for the session.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{user_id}</strong></td>
|
||||
<td>The unique identifier for the user.</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<table class="notification-params">
|
||||
@@ -1235,37 +1253,41 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><strong>{media_type}</strong></td>
|
||||
<td>The type of media (movie, episode, track).</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{title}</strong></td>
|
||||
<td>The full title of the item being played.</td>
|
||||
<td>The full title of the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{library_name}</strong></td>
|
||||
<td>The library title of the item being played.</td>
|
||||
<td>The library title of the media item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{show_name}</strong></td>
|
||||
<td>The title of the TV series being played.</td>
|
||||
<td>The title of the TV series.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{episode_name}</strong></td>
|
||||
<td>The title of the episode being played.</td>
|
||||
<td>The title of the episode.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{artist_name}</strong></td>
|
||||
<td>The name of the artist being played.</td>
|
||||
<td>The name of the artistd.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{album_name}</strong></td>
|
||||
<td>The title of the album being played.</td>
|
||||
<td>The title of the album.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{track_name}</strong></td>
|
||||
<td>The title of the track being played.</td>
|
||||
<td>The title of the track.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{season_num}</strong></td>
|
||||
<td>The season number for the media item if item is episode.</td>
|
||||
<td>The season number for the item if item is episode.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{season_num00}</strong></td>
|
||||
@@ -1273,43 +1295,51 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{episode_num}</strong></td>
|
||||
<td>The episode number for the media item if item is episode.</td>
|
||||
<td>The episode number for the item if item is episode.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{episode_num00}</strong></td>
|
||||
<td>The two digit episode number.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{track_num}</strong></td>
|
||||
<td>The track number for the item if item is track.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{track_num00}</strong></td>
|
||||
<td>The two digit track number.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{year}</strong></td>
|
||||
<td>The release year for the media item.</td>
|
||||
<td>The release year for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{studio}</strong></td>
|
||||
<td>The studio for the media item.</td>
|
||||
<td>The studio for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{content_rating}</strong></td>
|
||||
<td>The content rating for the media item. (e.g. TV-MA, TV-PG, etc.)</td>
|
||||
<td>The content rating for the item. (e.g. TV-MA, TV-PG, etc.)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{directors}</strong></td>
|
||||
<td>A list of directors for the media item.</td>
|
||||
<td>A list of directors for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{writers}</strong></td>
|
||||
<td>A list of writers for the media item.</td>
|
||||
<td>A list of writers for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{actors}</strong></td>
|
||||
<td>A list of actors for the media item.</td>
|
||||
<td>A list of actors for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{genres}</strong></td>
|
||||
<td>A list of genres for the media item.</td>
|
||||
<td>A list of genres for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{summary}</strong></td>
|
||||
<td>A short plot summary for the media item.</td>
|
||||
<td>A short plot summary for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{tagline}</strong></td>
|
||||
@@ -1323,6 +1353,22 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
|
||||
<td><strong>{duration}</strong></td>
|
||||
<td>The duration (in minutes) for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{section_id}</strong></td>
|
||||
<td>The unique identifier for the library.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{rating_key}</strong></td>
|
||||
<td>The unique identifier for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{parent_rating_key}</strong></td>
|
||||
<td>The unique identifier for the item's parent (season or album).</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{grandparent_rating_key}</strong></td>
|
||||
<td>The unique identifier for the item's grandparent (TV show or artist).</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
@@ -1534,15 +1580,16 @@ $(document).ready(function() {
|
||||
serverChanged = true;
|
||||
$("#pms_identifier").val("");
|
||||
$("#pms-verify-status").html("");
|
||||
$("#server_changed").prop('checked', true);
|
||||
verifyServer();
|
||||
});
|
||||
|
||||
function verifyServer(_callback) {
|
||||
var pms_ip = $("#pms_ip").val()
|
||||
var pms_port = $("#pms_port").val()
|
||||
var pms_identifier = $("#pms_identifier").val()
|
||||
var pms_ssl = $("#pms_ssl").val()
|
||||
var pms_is_remote = $("#pms_is_remote").val()
|
||||
var pms_ip = $("#pms_ip").val();
|
||||
var pms_port = $("#pms_port").val();
|
||||
var pms_identifier = $("#pms_identifier").val();
|
||||
var pms_ssl = $("#pms_ssl").is(':checked') ? 1 : 0;
|
||||
var pms_is_remote = $("#pms_is_remote").is(':checked') ? 1 : 0;
|
||||
if (($("#pms_ip").val() !== '') || ($("#pms_port").val() !== '')) {
|
||||
$("#pms-verify").html('<i class="fa fa-refresh fa-spin"></i>');
|
||||
$('#pms-verify').fadeIn('fast');
|
||||
@@ -1551,15 +1598,16 @@ $(document).ready(function() {
|
||||
data : { hostname: pms_ip, port: pms_port, identifier: pms_identifier, ssl: pms_ssl, remote: pms_is_remote },
|
||||
cache: true,
|
||||
async: true,
|
||||
timeout: 5000,
|
||||
timeout: 10000,
|
||||
error: function(jqXHR, textStatus, errorThrown) {
|
||||
$("#pms-verify").html('<i class="fa fa-close"></i>');
|
||||
$('#pms-verify').fadeIn('fast');
|
||||
$("#pms-ip-group").addClass("has-error");
|
||||
},
|
||||
success: function (xml) {
|
||||
if ($(xml).find('MediaContainer').attr('machineIdentifier')) {
|
||||
$("#pms_identifier").val($(xml).find('MediaContainer').attr('machineIdentifier'));
|
||||
success: function (json) {
|
||||
var machine_identifier = json;
|
||||
if (machine_identifier) {
|
||||
$("#pms_identifier").val(machine_identifier);
|
||||
$("#pms-verify").html('<i class="fa fa-check"></i>');
|
||||
$('#pms-verify').fadeIn('fast');
|
||||
$("#pms-ip-group").removeClass("has-error");
|
||||
|
@@ -147,7 +147,7 @@ from plexpy import helpers
|
||||
<th align="left">IP Address</th>
|
||||
<th align="left">Last Platform</th>
|
||||
<th align="left">Last Player</th>
|
||||
<th align="left">Last Watched</th>
|
||||
<th align="left">Last Played</th>
|
||||
<th align="left">Play Count</th>
|
||||
</tr>
|
||||
</thead>
|
||||
|
@@ -30,7 +30,7 @@
|
||||
<th align="left" id="last_known_ip">Last Known IP</th>
|
||||
<th align="left" id="last_platform">Last Platform</th>
|
||||
<th align="left" id="last_player">Last Player</th>
|
||||
<th align="left" id="last_watched">Last Watched</th>
|
||||
<th align="left" id="last_played">Last Played</th>
|
||||
<th align="left" id="total_plays">Total Plays</th>
|
||||
</tr>
|
||||
</thead>
|
||||
|
@@ -169,6 +169,7 @@ from plexpy import common
|
||||
<input type="checkbox" name="launch_browser" id="launch_browser" value="1" ${config['launch_browser']}>
|
||||
<input type="checkbox" name="refresh_users_on_startup" id="refresh_users_on_startup" value="1" ${config['refresh_users_on_startup']}>
|
||||
<input type="checkbox" name="refresh_libraries_on_startup" id="refresh_libraries_on_startup" value="1" ${config['refresh_libraries_on_startup']}>
|
||||
<input type="checkbox" name="server_changed" id="server_changed" value="1" checked>
|
||||
<input type="checkbox" name="first_run_complete" id="first_run_complete" value="1" checked>
|
||||
<input type="checkbox" name="check_github" id="check_github" value="1" checked>
|
||||
<input type="text" name="home_stats_cards" id="home_stats_cards" value="first_run_wizard">
|
||||
@@ -392,9 +393,10 @@ from plexpy import common
|
||||
$("#pms-verify-status").html('<i class="fa fa-exclamation-circle"></i> This is not a Plex Server!');
|
||||
$('#pms-verify-status').fadeIn('fast');
|
||||
},
|
||||
success: function (xml) {
|
||||
if ($(xml).find('MediaContainer').attr('machineIdentifier')) {
|
||||
$("#pms_identifier").val($(xml).find('MediaContainer').attr('machineIdentifier'));
|
||||
success: function (json) {
|
||||
var machine_identifier = json;
|
||||
if (machine_identifier) {
|
||||
$("#pms_identifier").val(machine_identifier);
|
||||
$("#pms-verify-status").html('<i class="fa fa-check"></i> Server found!');
|
||||
$('#pms-verify-status').fadeIn('fast');
|
||||
pms_verified = true;
|
||||
|
8
lib/arrow/__init__.py
Normal file
8
lib/arrow/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .arrow import Arrow
|
||||
from .factory import ArrowFactory
|
||||
from .api import get, now, utcnow
|
||||
|
||||
__version__ = '0.7.0'
|
||||
VERSION = __version__
|
55
lib/arrow/api.py
Normal file
55
lib/arrow/api.py
Normal file
@@ -0,0 +1,55 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Provides the default implementation of :class:`ArrowFactory <arrow.factory.ArrowFactory>`
|
||||
methods for use as a module API.
|
||||
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from arrow.factory import ArrowFactory
|
||||
|
||||
|
||||
# internal default factory.
|
||||
_factory = ArrowFactory()
|
||||
|
||||
|
||||
def get(*args, **kwargs):
|
||||
''' Implements the default :class:`ArrowFactory <arrow.factory.ArrowFactory>`
|
||||
``get`` method.
|
||||
|
||||
'''
|
||||
|
||||
return _factory.get(*args, **kwargs)
|
||||
|
||||
def utcnow():
|
||||
''' Implements the default :class:`ArrowFactory <arrow.factory.ArrowFactory>`
|
||||
``utcnow`` method.
|
||||
|
||||
'''
|
||||
|
||||
return _factory.utcnow()
|
||||
|
||||
|
||||
def now(tz=None):
|
||||
''' Implements the default :class:`ArrowFactory <arrow.factory.ArrowFactory>`
|
||||
``now`` method.
|
||||
|
||||
'''
|
||||
|
||||
return _factory.now(tz)
|
||||
|
||||
|
||||
def factory(type):
|
||||
''' Returns an :class:`.ArrowFactory` for the specified :class:`Arrow <arrow.arrow.Arrow>`
|
||||
or derived type.
|
||||
|
||||
:param type: the type, :class:`Arrow <arrow.arrow.Arrow>` or derived.
|
||||
|
||||
'''
|
||||
|
||||
return ArrowFactory(type)
|
||||
|
||||
|
||||
__all__ = ['get', 'utcnow', 'now', 'factory', 'iso']
|
||||
|
896
lib/arrow/arrow.py
Normal file
896
lib/arrow/arrow.py
Normal file
@@ -0,0 +1,896 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Provides the :class:`Arrow <arrow.arrow.Arrow>` class, an enhanced ``datetime``
|
||||
replacement.
|
||||
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from datetime import datetime, timedelta, tzinfo
|
||||
from dateutil import tz as dateutil_tz
|
||||
from dateutil.relativedelta import relativedelta
|
||||
import calendar
|
||||
import sys
|
||||
|
||||
from arrow import util, locales, parser, formatter
|
||||
|
||||
|
||||
class Arrow(object):
|
||||
'''An :class:`Arrow <arrow.arrow.Arrow>` object.
|
||||
|
||||
Implements the ``datetime`` interface, behaving as an aware ``datetime`` while implementing
|
||||
additional functionality.
|
||||
|
||||
:param year: the calendar year.
|
||||
:param month: the calendar month.
|
||||
:param day: the calendar day.
|
||||
:param hour: (optional) the hour. Defaults to 0.
|
||||
:param minute: (optional) the minute, Defaults to 0.
|
||||
:param second: (optional) the second, Defaults to 0.
|
||||
:param microsecond: (optional) the microsecond. Defaults 0.
|
||||
:param tzinfo: (optional) the ``tzinfo`` object. Defaults to ``None``.
|
||||
|
||||
If tzinfo is None, it is assumed to be UTC on creation.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> import arrow
|
||||
>>> arrow.Arrow(2013, 5, 5, 12, 30, 45)
|
||||
<Arrow [2013-05-05T12:30:45+00:00]>
|
||||
|
||||
'''
|
||||
|
||||
resolution = datetime.resolution
|
||||
|
||||
_ATTRS = ['year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond']
|
||||
_ATTRS_PLURAL = ['{0}s'.format(a) for a in _ATTRS]
|
||||
|
||||
def __init__(self, year, month, day, hour=0, minute=0, second=0, microsecond=0,
|
||||
tzinfo=None):
|
||||
|
||||
if util.isstr(tzinfo):
|
||||
tzinfo = parser.TzinfoParser.parse(tzinfo)
|
||||
tzinfo = tzinfo or dateutil_tz.tzutc()
|
||||
|
||||
self._datetime = datetime(year, month, day, hour, minute, second,
|
||||
microsecond, tzinfo)
|
||||
|
||||
|
||||
# factories: single object, both original and from datetime.
|
||||
|
||||
@classmethod
|
||||
def now(cls, tzinfo=None):
|
||||
'''Constructs an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now".
|
||||
|
||||
:param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time.
|
||||
|
||||
'''
|
||||
|
||||
utc = datetime.utcnow().replace(tzinfo=dateutil_tz.tzutc())
|
||||
dt = utc.astimezone(dateutil_tz.tzlocal() if tzinfo is None else tzinfo)
|
||||
|
||||
return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
|
||||
dt.microsecond, dt.tzinfo)
|
||||
|
||||
@classmethod
|
||||
def utcnow(cls):
|
||||
''' Constructs an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now" in UTC
|
||||
time.
|
||||
|
||||
'''
|
||||
|
||||
dt = datetime.utcnow()
|
||||
|
||||
return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
|
||||
dt.microsecond, dateutil_tz.tzutc())
|
||||
|
||||
@classmethod
|
||||
def fromtimestamp(cls, timestamp, tzinfo=None):
|
||||
''' Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a timestamp.
|
||||
|
||||
:param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either.
|
||||
:param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time.
|
||||
|
||||
'''
|
||||
|
||||
tzinfo = tzinfo or dateutil_tz.tzlocal()
|
||||
timestamp = cls._get_timestamp_from_input(timestamp)
|
||||
dt = datetime.fromtimestamp(timestamp, tzinfo)
|
||||
|
||||
return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
|
||||
dt.microsecond, tzinfo)
|
||||
|
||||
@classmethod
|
||||
def utcfromtimestamp(cls, timestamp):
|
||||
'''Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a timestamp, in UTC time.
|
||||
|
||||
:param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either.
|
||||
|
||||
'''
|
||||
|
||||
timestamp = cls._get_timestamp_from_input(timestamp)
|
||||
dt = datetime.utcfromtimestamp(timestamp)
|
||||
|
||||
return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
|
||||
dt.microsecond, dateutil_tz.tzutc())
|
||||
|
||||
@classmethod
|
||||
def fromdatetime(cls, dt, tzinfo=None):
|
||||
''' Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a ``datetime`` and optional
|
||||
``tzinfo`` object.
|
||||
|
||||
:param dt: the ``datetime``
|
||||
:param tzinfo: (optional) a ``tzinfo`` object. Defaults to UTC.
|
||||
|
||||
'''
|
||||
|
||||
tzinfo = tzinfo or dt.tzinfo or dateutil_tz.tzutc()
|
||||
|
||||
return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
|
||||
dt.microsecond, tzinfo)
|
||||
|
||||
@classmethod
|
||||
def fromdate(cls, date, tzinfo=None):
|
||||
''' Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a ``date`` and optional
|
||||
``tzinfo`` object. Time values are set to 0.
|
||||
|
||||
:param date: the ``date``
|
||||
:param tzinfo: (optional) a ``tzinfo`` object. Defaults to UTC.
|
||||
'''
|
||||
|
||||
tzinfo = tzinfo or dateutil_tz.tzutc()
|
||||
|
||||
return cls(date.year, date.month, date.day, tzinfo=tzinfo)
|
||||
|
||||
@classmethod
|
||||
def strptime(cls, date_str, fmt, tzinfo=None):
|
||||
''' Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a date string and format,
|
||||
in the style of ``datetime.strptime``.
|
||||
|
||||
:param date_str: the date string.
|
||||
:param fmt: the format string.
|
||||
:param tzinfo: (optional) an optional ``tzinfo``
|
||||
'''
|
||||
|
||||
dt = datetime.strptime(date_str, fmt)
|
||||
tzinfo = tzinfo or dt.tzinfo
|
||||
|
||||
return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
|
||||
dt.microsecond, tzinfo)
|
||||
|
||||
|
||||
# factories: ranges and spans
|
||||
|
||||
@classmethod
|
||||
def range(cls, frame, start, end=None, tz=None, limit=None):
|
||||
''' Returns an array of :class:`Arrow <arrow.arrow.Arrow>` objects, representing
|
||||
an iteration of time between two inputs.
|
||||
|
||||
:param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...).
|
||||
:param start: A datetime expression, the start of the range.
|
||||
:param end: (optional) A datetime expression, the end of the range.
|
||||
:param tz: (optional) A timezone expression. Defaults to UTC.
|
||||
:param limit: (optional) A maximum number of tuples to return.
|
||||
|
||||
**NOTE**: the **end** or **limit** must be provided. Call with **end** alone to
|
||||
return the entire range, with **limit** alone to return a maximum # of results from the
|
||||
start, and with both to cap a range at a maximum # of results.
|
||||
|
||||
Supported frame values: year, quarter, month, week, day, hour, minute, second
|
||||
|
||||
Recognized datetime expressions:
|
||||
|
||||
- An :class:`Arrow <arrow.arrow.Arrow>` object.
|
||||
- A ``datetime`` object.
|
||||
|
||||
Recognized timezone expressions:
|
||||
|
||||
- A ``tzinfo`` object.
|
||||
- A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'.
|
||||
- A ``str`` in ISO-8601 style, as in '+07:00'.
|
||||
- A ``str``, one of the following: 'local', 'utc', 'UTC'.
|
||||
|
||||
Usage:
|
||||
|
||||
>>> start = datetime(2013, 5, 5, 12, 30)
|
||||
>>> end = datetime(2013, 5, 5, 17, 15)
|
||||
>>> for r in arrow.Arrow.range('hour', start, end):
|
||||
... print repr(r)
|
||||
...
|
||||
<Arrow [2013-05-05T12:30:00+00:00]>
|
||||
<Arrow [2013-05-05T13:30:00+00:00]>
|
||||
<Arrow [2013-05-05T14:30:00+00:00]>
|
||||
<Arrow [2013-05-05T15:30:00+00:00]>
|
||||
<Arrow [2013-05-05T16:30:00+00:00]>
|
||||
|
||||
'''
|
||||
|
||||
_, frame_relative, relative_steps = cls._get_frames(frame)
|
||||
|
||||
tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz)
|
||||
|
||||
start = cls._get_datetime(start).replace(tzinfo=tzinfo)
|
||||
end, limit = cls._get_iteration_params(end, limit)
|
||||
end = cls._get_datetime(end).replace(tzinfo=tzinfo)
|
||||
|
||||
current = cls.fromdatetime(start)
|
||||
results = []
|
||||
|
||||
while current <= end and len(results) < limit:
|
||||
results.append(current)
|
||||
|
||||
values = [getattr(current, f) for f in cls._ATTRS]
|
||||
current = cls(*values, tzinfo=tzinfo) + relativedelta(**{frame_relative: relative_steps})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
@classmethod
|
||||
def span_range(cls, frame, start, end, tz=None, limit=None):
|
||||
''' Returns an array of tuples, each :class:`Arrow <arrow.arrow.Arrow>` objects,
|
||||
representing a series of timespans between two inputs.
|
||||
|
||||
:param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...).
|
||||
:param start: A datetime expression, the start of the range.
|
||||
:param end: (optional) A datetime expression, the end of the range.
|
||||
:param tz: (optional) A timezone expression. Defaults to UTC.
|
||||
:param limit: (optional) A maximum number of tuples to return.
|
||||
|
||||
**NOTE**: the **end** or **limit** must be provided. Call with **end** alone to
|
||||
return the entire range, with **limit** alone to return a maximum # of results from the
|
||||
start, and with both to cap a range at a maximum # of results.
|
||||
|
||||
Supported frame values: year, quarter, month, week, day, hour, minute, second
|
||||
|
||||
Recognized datetime expressions:
|
||||
|
||||
- An :class:`Arrow <arrow.arrow.Arrow>` object.
|
||||
- A ``datetime`` object.
|
||||
|
||||
Recognized timezone expressions:
|
||||
|
||||
- A ``tzinfo`` object.
|
||||
- A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'.
|
||||
- A ``str`` in ISO-8601 style, as in '+07:00'.
|
||||
- A ``str``, one of the following: 'local', 'utc', 'UTC'.
|
||||
|
||||
Usage:
|
||||
|
||||
>>> start = datetime(2013, 5, 5, 12, 30)
|
||||
>>> end = datetime(2013, 5, 5, 17, 15)
|
||||
>>> for r in arrow.Arrow.span_range('hour', start, end):
|
||||
... print r
|
||||
...
|
||||
(<Arrow [2013-05-05T12:00:00+00:00]>, <Arrow [2013-05-05T12:59:59.999999+00:00]>)
|
||||
(<Arrow [2013-05-05T13:00:00+00:00]>, <Arrow [2013-05-05T13:59:59.999999+00:00]>)
|
||||
(<Arrow [2013-05-05T14:00:00+00:00]>, <Arrow [2013-05-05T14:59:59.999999+00:00]>)
|
||||
(<Arrow [2013-05-05T15:00:00+00:00]>, <Arrow [2013-05-05T15:59:59.999999+00:00]>)
|
||||
(<Arrow [2013-05-05T16:00:00+00:00]>, <Arrow [2013-05-05T16:59:59.999999+00:00]>)
|
||||
|
||||
'''
|
||||
tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz)
|
||||
start = cls.fromdatetime(start, tzinfo).span(frame)[0]
|
||||
_range = cls.range(frame, start, end, tz, limit)
|
||||
return [r.span(frame) for r in _range]
|
||||
|
||||
|
||||
# representations
|
||||
|
||||
def __repr__(self):
|
||||
|
||||
dt = self._datetime
|
||||
attrs = ', '.join([str(i) for i in [dt.year, dt.month, dt.day, dt.hour, dt.minute,
|
||||
dt.second, dt.microsecond]])
|
||||
|
||||
return '<{0} [{1}]>'.format(self.__class__.__name__, self.__str__())
|
||||
|
||||
def __str__(self):
|
||||
return self._datetime.isoformat()
|
||||
|
||||
def __format__(self, formatstr):
|
||||
|
||||
if len(formatstr) > 0:
|
||||
return self.format(formatstr)
|
||||
|
||||
return str(self)
|
||||
|
||||
def __hash__(self):
|
||||
return self._datetime.__hash__()
|
||||
|
||||
|
||||
# attributes & properties
|
||||
|
||||
def __getattr__(self, name):
|
||||
|
||||
if name == 'week':
|
||||
return self.isocalendar()[1]
|
||||
|
||||
if not name.startswith('_'):
|
||||
value = getattr(self._datetime, name, None)
|
||||
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
return object.__getattribute__(self, name)
|
||||
|
||||
@property
|
||||
def tzinfo(self):
|
||||
''' Gets the ``tzinfo`` of the :class:`Arrow <arrow.arrow.Arrow>` object. '''
|
||||
|
||||
return self._datetime.tzinfo
|
||||
|
||||
@tzinfo.setter
|
||||
def tzinfo(self, tzinfo):
|
||||
''' Sets the ``tzinfo`` of the :class:`Arrow <arrow.arrow.Arrow>` object. '''
|
||||
|
||||
self._datetime = self._datetime.replace(tzinfo=tzinfo)
|
||||
|
||||
@property
|
||||
def datetime(self):
|
||||
''' Returns a datetime representation of the :class:`Arrow <arrow.arrow.Arrow>` object. '''
|
||||
|
||||
return self._datetime
|
||||
|
||||
@property
|
||||
def naive(self):
|
||||
''' Returns a naive datetime representation of the :class:`Arrow <arrow.arrow.Arrow>` object. '''
|
||||
|
||||
return self._datetime.replace(tzinfo=None)
|
||||
|
||||
@property
|
||||
def timestamp(self):
|
||||
''' Returns a timestamp representation of the :class:`Arrow <arrow.arrow.Arrow>` object. '''
|
||||
|
||||
return calendar.timegm(self._datetime.utctimetuple())
|
||||
|
||||
@property
|
||||
def float_timestamp(self):
|
||||
''' Returns a floating-point representation of the :class:`Arrow <arrow.arrow.Arrow>` object. '''
|
||||
|
||||
return self.timestamp + float(self.microsecond) / 1000000
|
||||
|
||||
|
||||
# mutation and duplication.
|
||||
|
||||
def clone(self):
|
||||
''' Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, cloned from the current one.
|
||||
|
||||
Usage:
|
||||
|
||||
>>> arw = arrow.utcnow()
|
||||
>>> cloned = arw.clone()
|
||||
|
||||
'''
|
||||
|
||||
return self.fromdatetime(self._datetime)
|
||||
|
||||
def replace(self, **kwargs):
|
||||
''' Returns a new :class:`Arrow <arrow.arrow.Arrow>` object with attributes updated
|
||||
according to inputs.
|
||||
|
||||
Use single property names to set their value absolutely:
|
||||
|
||||
>>> import arrow
|
||||
>>> arw = arrow.utcnow()
|
||||
>>> arw
|
||||
<Arrow [2013-05-11T22:27:34.787885+00:00]>
|
||||
>>> arw.replace(year=2014, month=6)
|
||||
<Arrow [2014-06-11T22:27:34.787885+00:00]>
|
||||
|
||||
Use plural property names to shift their current value relatively:
|
||||
|
||||
>>> arw.replace(years=1, months=-1)
|
||||
<Arrow [2014-04-11T22:27:34.787885+00:00]>
|
||||
|
||||
You can also provide a timezone expression can also be replaced:
|
||||
|
||||
>>> arw.replace(tzinfo=tz.tzlocal())
|
||||
<Arrow [2013-05-11T22:27:34.787885-07:00]>
|
||||
|
||||
Recognized timezone expressions:
|
||||
|
||||
- A ``tzinfo`` object.
|
||||
- A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'.
|
||||
- A ``str`` in ISO-8601 style, as in '+07:00'.
|
||||
- A ``str``, one of the following: 'local', 'utc', 'UTC'.
|
||||
|
||||
'''
|
||||
|
||||
absolute_kwargs = {}
|
||||
relative_kwargs = {}
|
||||
|
||||
for key, value in kwargs.items():
|
||||
|
||||
if key in self._ATTRS:
|
||||
absolute_kwargs[key] = value
|
||||
elif key in self._ATTRS_PLURAL or key == 'weeks':
|
||||
relative_kwargs[key] = value
|
||||
elif key == 'week':
|
||||
raise AttributeError('setting absolute week is not supported')
|
||||
elif key !='tzinfo':
|
||||
raise AttributeError()
|
||||
|
||||
current = self._datetime.replace(**absolute_kwargs)
|
||||
current += relativedelta(**relative_kwargs)
|
||||
|
||||
tzinfo = kwargs.get('tzinfo')
|
||||
|
||||
if tzinfo is not None:
|
||||
tzinfo = self._get_tzinfo(tzinfo)
|
||||
current = current.replace(tzinfo=tzinfo)
|
||||
|
||||
return self.fromdatetime(current)
|
||||
|
||||
def to(self, tz):
|
||||
''' Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, converted to the target
|
||||
timezone.
|
||||
|
||||
:param tz: an expression representing a timezone.
|
||||
|
||||
Recognized timezone expressions:
|
||||
|
||||
- A ``tzinfo`` object.
|
||||
- A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'.
|
||||
- A ``str`` in ISO-8601 style, as in '+07:00'.
|
||||
- A ``str``, one of the following: 'local', 'utc', 'UTC'.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> utc = arrow.utcnow()
|
||||
>>> utc
|
||||
<Arrow [2013-05-09T03:49:12.311072+00:00]>
|
||||
|
||||
>>> utc.to('US/Pacific')
|
||||
<Arrow [2013-05-08T20:49:12.311072-07:00]>
|
||||
|
||||
>>> utc.to(tz.tzlocal())
|
||||
<Arrow [2013-05-08T20:49:12.311072-07:00]>
|
||||
|
||||
>>> utc.to('-07:00')
|
||||
<Arrow [2013-05-08T20:49:12.311072-07:00]>
|
||||
|
||||
>>> utc.to('local')
|
||||
<Arrow [2013-05-08T20:49:12.311072-07:00]>
|
||||
|
||||
>>> utc.to('local').to('utc')
|
||||
<Arrow [2013-05-09T03:49:12.311072+00:00]>
|
||||
|
||||
'''
|
||||
|
||||
if not isinstance(tz, tzinfo):
|
||||
tz = parser.TzinfoParser.parse(tz)
|
||||
|
||||
dt = self._datetime.astimezone(tz)
|
||||
|
||||
return self.__class__(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
|
||||
dt.microsecond, dt.tzinfo)
|
||||
|
||||
def span(self, frame, count=1):
|
||||
''' Returns two new :class:`Arrow <arrow.arrow.Arrow>` objects, representing the timespan
|
||||
of the :class:`Arrow <arrow.arrow.Arrow>` object in a given timeframe.
|
||||
|
||||
:param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...).
|
||||
:param count: (optional) the number of frames to span.
|
||||
|
||||
Supported frame values: year, quarter, month, week, day, hour, minute, second
|
||||
|
||||
Usage::
|
||||
|
||||
>>> arrow.utcnow()
|
||||
<Arrow [2013-05-09T03:32:36.186203+00:00]>
|
||||
|
||||
>>> arrow.utcnow().span('hour')
|
||||
(<Arrow [2013-05-09T03:00:00+00:00]>, <Arrow [2013-05-09T03:59:59.999999+00:00]>)
|
||||
|
||||
>>> arrow.utcnow().span('day')
|
||||
(<Arrow [2013-05-09T00:00:00+00:00]>, <Arrow [2013-05-09T23:59:59.999999+00:00]>)
|
||||
|
||||
>>> arrow.utcnow().span('day', count=2)
|
||||
(<Arrow [2013-05-09T00:00:00+00:00]>, <Arrow [2013-05-10T23:59:59.999999+00:00]>)
|
||||
|
||||
'''
|
||||
|
||||
frame_absolute, frame_relative, relative_steps = self._get_frames(frame)
|
||||
|
||||
if frame_absolute == 'week':
|
||||
attr = 'day'
|
||||
elif frame_absolute == 'quarter':
|
||||
attr = 'month'
|
||||
else:
|
||||
attr = frame_absolute
|
||||
|
||||
index = self._ATTRS.index(attr)
|
||||
frames = self._ATTRS[:index + 1]
|
||||
|
||||
values = [getattr(self, f) for f in frames]
|
||||
|
||||
for i in range(3 - len(values)):
|
||||
values.append(1)
|
||||
|
||||
floor = self.__class__(*values, tzinfo=self.tzinfo)
|
||||
|
||||
if frame_absolute == 'week':
|
||||
floor = floor + relativedelta(days=-(self.isoweekday() - 1))
|
||||
elif frame_absolute == 'quarter':
|
||||
floor = floor + relativedelta(months=-((self.month - 1) % 3))
|
||||
|
||||
ceil = floor + relativedelta(
|
||||
**{frame_relative: count * relative_steps}) + relativedelta(microseconds=-1)
|
||||
|
||||
return floor, ceil
|
||||
|
||||
def floor(self, frame):
|
||||
''' Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, representing the "floor"
|
||||
of the timespan of the :class:`Arrow <arrow.arrow.Arrow>` object in a given timeframe.
|
||||
Equivalent to the first element in the 2-tuple returned by
|
||||
:func:`span <arrow.arrow.Arrow.span>`.
|
||||
|
||||
:param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...).
|
||||
|
||||
Usage::
|
||||
|
||||
>>> arrow.utcnow().floor('hour')
|
||||
<Arrow [2013-05-09T03:00:00+00:00]>
|
||||
'''
|
||||
|
||||
return self.span(frame)[0]
|
||||
|
||||
def ceil(self, frame):
|
||||
''' Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, representing the "ceiling"
|
||||
of the timespan of the :class:`Arrow <arrow.arrow.Arrow>` object in a given timeframe.
|
||||
Equivalent to the second element in the 2-tuple returned by
|
||||
:func:`span <arrow.arrow.Arrow.span>`.
|
||||
|
||||
:param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...).
|
||||
|
||||
Usage::
|
||||
|
||||
>>> arrow.utcnow().ceil('hour')
|
||||
<Arrow [2013-05-09T03:59:59.999999+00:00]>
|
||||
'''
|
||||
|
||||
return self.span(frame)[1]
|
||||
|
||||
|
||||
# string output and formatting.
|
||||
|
||||
def format(self, fmt='YYYY-MM-DD HH:mm:ssZZ', locale='en_us'):
|
||||
''' Returns a string representation of the :class:`Arrow <arrow.arrow.Arrow>` object,
|
||||
formatted according to a format string.
|
||||
|
||||
:param fmt: the format string.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> arrow.utcnow().format('YYYY-MM-DD HH:mm:ss ZZ')
|
||||
'2013-05-09 03:56:47 -00:00'
|
||||
|
||||
>>> arrow.utcnow().format('X')
|
||||
'1368071882'
|
||||
|
||||
>>> arrow.utcnow().format('MMMM DD, YYYY')
|
||||
'May 09, 2013'
|
||||
|
||||
>>> arrow.utcnow().format()
|
||||
'2013-05-09 03:56:47 -00:00'
|
||||
|
||||
'''
|
||||
|
||||
return formatter.DateTimeFormatter(locale).format(self._datetime, fmt)
|
||||
|
||||
|
||||
def humanize(self, other=None, locale='en_us', only_distance=False):
|
||||
''' Returns a localized, humanized representation of a relative difference in time.
|
||||
|
||||
:param other: (optional) an :class:`Arrow <arrow.arrow.Arrow>` or ``datetime`` object.
|
||||
Defaults to now in the current :class:`Arrow <arrow.arrow.Arrow>` object's timezone.
|
||||
:param locale: (optional) a ``str`` specifying a locale. Defaults to 'en_us'.
|
||||
:param only_distance: (optional) returns only time difference eg: "11 seconds" without "in" or "ago" part.
|
||||
Usage::
|
||||
|
||||
>>> earlier = arrow.utcnow().replace(hours=-2)
|
||||
>>> earlier.humanize()
|
||||
'2 hours ago'
|
||||
|
||||
>>> later = later = earlier.replace(hours=4)
|
||||
>>> later.humanize(earlier)
|
||||
'in 4 hours'
|
||||
|
||||
'''
|
||||
|
||||
locale = locales.get_locale(locale)
|
||||
|
||||
if other is None:
|
||||
utc = datetime.utcnow().replace(tzinfo=dateutil_tz.tzutc())
|
||||
dt = utc.astimezone(self._datetime.tzinfo)
|
||||
|
||||
elif isinstance(other, Arrow):
|
||||
dt = other._datetime
|
||||
|
||||
elif isinstance(other, datetime):
|
||||
if other.tzinfo is None:
|
||||
dt = other.replace(tzinfo=self._datetime.tzinfo)
|
||||
else:
|
||||
dt = other.astimezone(self._datetime.tzinfo)
|
||||
|
||||
else:
|
||||
raise TypeError()
|
||||
|
||||
delta = int(util.total_seconds(self._datetime - dt))
|
||||
sign = -1 if delta < 0 else 1
|
||||
diff = abs(delta)
|
||||
delta = diff
|
||||
|
||||
if diff < 10:
|
||||
return locale.describe('now', only_distance=only_distance)
|
||||
|
||||
if diff < 45:
|
||||
return locale.describe('seconds', sign, only_distance=only_distance)
|
||||
|
||||
elif diff < 90:
|
||||
return locale.describe('minute', sign, only_distance=only_distance)
|
||||
elif diff < 2700:
|
||||
minutes = sign * int(max(delta / 60, 2))
|
||||
return locale.describe('minutes', minutes, only_distance=only_distance)
|
||||
|
||||
elif diff < 5400:
|
||||
return locale.describe('hour', sign, only_distance=only_distance)
|
||||
elif diff < 79200:
|
||||
hours = sign * int(max(delta / 3600, 2))
|
||||
return locale.describe('hours', hours, only_distance=only_distance)
|
||||
|
||||
elif diff < 129600:
|
||||
return locale.describe('day', sign, only_distance=only_distance)
|
||||
elif diff < 2160000:
|
||||
days = sign * int(max(delta / 86400, 2))
|
||||
return locale.describe('days', days, only_distance=only_distance)
|
||||
|
||||
elif diff < 3888000:
|
||||
return locale.describe('month', sign, only_distance=only_distance)
|
||||
elif diff < 29808000:
|
||||
self_months = self._datetime.year * 12 + self._datetime.month
|
||||
other_months = dt.year * 12 + dt.month
|
||||
months = sign * abs(other_months - self_months)
|
||||
|
||||
return locale.describe('months', months, only_distance=only_distance)
|
||||
|
||||
elif diff < 47260800:
|
||||
return locale.describe('year', sign, only_distance=only_distance)
|
||||
else:
|
||||
years = sign * int(max(delta / 31536000, 2))
|
||||
return locale.describe('years', years, only_distance=only_distance)
|
||||
|
||||
|
||||
# math
|
||||
|
||||
def __add__(self, other):
|
||||
|
||||
if isinstance(other, (timedelta, relativedelta)):
|
||||
return self.fromdatetime(self._datetime + other, self._datetime.tzinfo)
|
||||
|
||||
raise TypeError()
|
||||
|
||||
def __radd__(self, other):
|
||||
return self.__add__(other)
|
||||
|
||||
def __sub__(self, other):
|
||||
|
||||
if isinstance(other, timedelta):
|
||||
return self.fromdatetime(self._datetime - other, self._datetime.tzinfo)
|
||||
|
||||
elif isinstance(other, datetime):
|
||||
return self._datetime - other
|
||||
|
||||
elif isinstance(other, Arrow):
|
||||
return self._datetime - other._datetime
|
||||
|
||||
raise TypeError()
|
||||
|
||||
def __rsub__(self, other):
|
||||
return self.__sub__(other)
|
||||
|
||||
|
||||
# comparisons
|
||||
|
||||
def _cmperror(self, other):
|
||||
raise TypeError('can\'t compare \'{0}\' to \'{1}\''.format(
|
||||
type(self), type(other)))
|
||||
|
||||
def __eq__(self, other):
|
||||
|
||||
if not isinstance(other, (Arrow, datetime)):
|
||||
return False
|
||||
|
||||
other = self._get_datetime(other)
|
||||
|
||||
return self._datetime == self._get_datetime(other)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __gt__(self, other):
|
||||
|
||||
if not isinstance(other, (Arrow, datetime)):
|
||||
self._cmperror(other)
|
||||
|
||||
return self._datetime > self._get_datetime(other)
|
||||
|
||||
def __ge__(self, other):
|
||||
|
||||
if not isinstance(other, (Arrow, datetime)):
|
||||
self._cmperror(other)
|
||||
|
||||
return self._datetime >= self._get_datetime(other)
|
||||
|
||||
def __lt__(self, other):
|
||||
|
||||
if not isinstance(other, (Arrow, datetime)):
|
||||
self._cmperror(other)
|
||||
|
||||
return self._datetime < self._get_datetime(other)
|
||||
|
||||
def __le__(self, other):
|
||||
|
||||
if not isinstance(other, (Arrow, datetime)):
|
||||
self._cmperror(other)
|
||||
|
||||
return self._datetime <= self._get_datetime(other)
|
||||
|
||||
|
||||
# datetime methods
|
||||
|
||||
def date(self):
|
||||
''' Returns a ``date`` object with the same year, month and day. '''
|
||||
|
||||
return self._datetime.date()
|
||||
|
||||
def time(self):
|
||||
''' Returns a ``time`` object with the same hour, minute, second, microsecond. '''
|
||||
|
||||
return self._datetime.time()
|
||||
|
||||
def timetz(self):
|
||||
''' Returns a ``time`` object with the same hour, minute, second, microsecond and tzinfo. '''
|
||||
|
||||
return self._datetime.timetz()
|
||||
|
||||
def astimezone(self, tz):
|
||||
''' Returns a ``datetime`` object, adjusted to the specified tzinfo.
|
||||
|
||||
:param tz: a ``tzinfo`` object.
|
||||
|
||||
'''
|
||||
|
||||
return self._datetime.astimezone(tz)
|
||||
|
||||
def utcoffset(self):
|
||||
''' Returns a ``timedelta`` object representing the whole number of minutes difference from UTC time. '''
|
||||
|
||||
return self._datetime.utcoffset()
|
||||
|
||||
def dst(self):
|
||||
''' Returns the daylight savings time adjustment. '''
|
||||
return self._datetime.dst()
|
||||
|
||||
def timetuple(self):
|
||||
''' Returns a ``time.struct_time``, in the current timezone. '''
|
||||
|
||||
return self._datetime.timetuple()
|
||||
|
||||
def utctimetuple(self):
|
||||
''' Returns a ``time.struct_time``, in UTC time. '''
|
||||
|
||||
return self._datetime.utctimetuple()
|
||||
|
||||
def toordinal(self):
|
||||
''' Returns the proleptic Gregorian ordinal of the date. '''
|
||||
|
||||
return self._datetime.toordinal()
|
||||
|
||||
def weekday(self):
|
||||
''' Returns the day of the week as an integer (0-6). '''
|
||||
|
||||
return self._datetime.weekday()
|
||||
|
||||
def isoweekday(self):
|
||||
''' Returns the ISO day of the week as an integer (1-7). '''
|
||||
|
||||
return self._datetime.isoweekday()
|
||||
|
||||
def isocalendar(self):
|
||||
''' Returns a 3-tuple, (ISO year, ISO week number, ISO weekday). '''
|
||||
|
||||
return self._datetime.isocalendar()
|
||||
|
||||
def isoformat(self, sep='T'):
|
||||
'''Returns an ISO 8601 formatted representation of the date and time. '''
|
||||
|
||||
return self._datetime.isoformat(sep)
|
||||
|
||||
def ctime(self):
|
||||
''' Returns a ctime formatted representation of the date and time. '''
|
||||
|
||||
return self._datetime.ctime()
|
||||
|
||||
def strftime(self, format):
|
||||
''' Formats in the style of ``datetime.strptime``.
|
||||
|
||||
:param format: the format string.
|
||||
|
||||
'''
|
||||
|
||||
return self._datetime.strftime(format)
|
||||
|
||||
def for_json(self):
|
||||
'''Serializes for the ``for_json`` protocol of simplejson.'''
|
||||
return self.isoformat()
|
||||
|
||||
# internal tools.
|
||||
|
||||
@staticmethod
|
||||
def _get_tzinfo(tz_expr):
|
||||
|
||||
if tz_expr is None:
|
||||
return dateutil_tz.tzutc()
|
||||
if isinstance(tz_expr, tzinfo):
|
||||
return tz_expr
|
||||
else:
|
||||
try:
|
||||
return parser.TzinfoParser.parse(tz_expr)
|
||||
except parser.ParserError:
|
||||
raise ValueError('\'{0}\' not recognized as a timezone'.format(
|
||||
tz_expr))
|
||||
|
||||
@classmethod
|
||||
def _get_datetime(cls, expr):
|
||||
|
||||
if isinstance(expr, Arrow):
|
||||
return expr.datetime
|
||||
|
||||
if isinstance(expr, datetime):
|
||||
return expr
|
||||
|
||||
try:
|
||||
expr = float(expr)
|
||||
return cls.utcfromtimestamp(expr).datetime
|
||||
except:
|
||||
raise ValueError(
|
||||
'\'{0}\' not recognized as a timestamp or datetime'.format(expr))
|
||||
|
||||
@classmethod
|
||||
def _get_frames(cls, name):
|
||||
|
||||
if name in cls._ATTRS:
|
||||
return name, '{0}s'.format(name), 1
|
||||
|
||||
elif name in ['week', 'weeks']:
|
||||
return 'week', 'weeks', 1
|
||||
elif name in ['quarter', 'quarters']:
|
||||
return 'quarter', 'months', 3
|
||||
|
||||
raise AttributeError()
|
||||
|
||||
@classmethod
|
||||
def _get_iteration_params(cls, end, limit):
|
||||
|
||||
if end is None:
|
||||
|
||||
if limit is None:
|
||||
raise Exception('one of \'end\' or \'limit\' is required')
|
||||
|
||||
return cls.max, limit
|
||||
|
||||
else:
|
||||
return end, sys.maxsize
|
||||
|
||||
@staticmethod
|
||||
def _get_timestamp_from_input(timestamp):
|
||||
|
||||
try:
|
||||
return float(timestamp)
|
||||
except:
|
||||
raise ValueError('cannot parse \'{0}\' as a timestamp'.format(timestamp))
|
||||
|
||||
Arrow.min = Arrow.fromdatetime(datetime.min)
|
||||
Arrow.max = Arrow.fromdatetime(datetime.max)
|
254
lib/arrow/factory.py
Normal file
254
lib/arrow/factory.py
Normal file
@@ -0,0 +1,254 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Implements the :class:`ArrowFactory <arrow.factory.ArrowFactory>` class,
|
||||
providing factory methods for common :class:`Arrow <arrow.arrow.Arrow>`
|
||||
construction scenarios.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from arrow.arrow import Arrow
|
||||
from arrow import parser
|
||||
from arrow.util import is_timestamp, isstr
|
||||
|
||||
from datetime import datetime, tzinfo, date
|
||||
from dateutil import tz as dateutil_tz
|
||||
from time import struct_time
|
||||
import calendar
|
||||
|
||||
|
||||
class ArrowFactory(object):
|
||||
''' A factory for generating :class:`Arrow <arrow.arrow.Arrow>` objects.
|
||||
|
||||
:param type: (optional) the :class:`Arrow <arrow.arrow.Arrow>`-based class to construct from.
|
||||
Defaults to :class:`Arrow <arrow.arrow.Arrow>`.
|
||||
|
||||
'''
|
||||
|
||||
def __init__(self, type=Arrow):
|
||||
self.type = type
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
''' Returns an :class:`Arrow <arrow.arrow.Arrow>` object based on flexible inputs.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> import arrow
|
||||
|
||||
**No inputs** to get current UTC time::
|
||||
|
||||
>>> arrow.get()
|
||||
<Arrow [2013-05-08T05:51:43.316458+00:00]>
|
||||
|
||||
**None** to also get current UTC time::
|
||||
|
||||
>>> arrow.get(None)
|
||||
<Arrow [2013-05-08T05:51:43.316458+00:00]>
|
||||
|
||||
**One** :class:`Arrow <arrow.arrow.Arrow>` object, to get a copy.
|
||||
|
||||
>>> arw = arrow.utcnow()
|
||||
>>> arrow.get(arw)
|
||||
<Arrow [2013-10-23T15:21:54.354846+00:00]>
|
||||
|
||||
**One** ``str``, ``float``, or ``int``, convertible to a floating-point timestamp, to get that timestamp in UTC::
|
||||
|
||||
>>> arrow.get(1367992474.293378)
|
||||
<Arrow [2013-05-08T05:54:34.293378+00:00]>
|
||||
|
||||
>>> arrow.get(1367992474)
|
||||
<Arrow [2013-05-08T05:54:34+00:00]>
|
||||
|
||||
>>> arrow.get('1367992474.293378')
|
||||
<Arrow [2013-05-08T05:54:34.293378+00:00]>
|
||||
|
||||
>>> arrow.get('1367992474')
|
||||
<Arrow [2013-05-08T05:54:34+0struct_time0:00]>
|
||||
|
||||
**One** ISO-8601-formatted ``str``, to parse it::
|
||||
|
||||
>>> arrow.get('2013-09-29T01:26:43.830580')
|
||||
<Arrow [2013-09-29T01:26:43.830580+00:00]>
|
||||
|
||||
**One** ``tzinfo``, to get the current time in that timezone::
|
||||
|
||||
>>> arrow.get(tz.tzlocal())
|
||||
<Arrow [2013-05-07T22:57:28.484717-07:00]>
|
||||
|
||||
**One** naive ``datetime``, to get that datetime in UTC::
|
||||
|
||||
>>> arrow.get(datetime(2013, 5, 5))
|
||||
<Arrow [2013-05-05T00:00:00+00:00]>
|
||||
|
||||
**One** aware ``datetime``, to get that datetime::
|
||||
|
||||
>>> arrow.get(datetime(2013, 5, 5, tzinfo=tz.tzlocal()))
|
||||
<Arrow [2013-05-05T00:00:00-07:00]>
|
||||
|
||||
**One** naive ``date``, to get that date in UTC::
|
||||
|
||||
>>> arrow.get(date(2013, 5, 5))
|
||||
<Arrow [2013-05-05T00:00:00+00:00]>
|
||||
|
||||
**Two** arguments, a naive or aware ``datetime``, and a timezone expression (as above)::
|
||||
|
||||
>>> arrow.get(datetime(2013, 5, 5), 'US/Pacific')
|
||||
<Arrow [2013-05-05T00:00:00-07:00]>
|
||||
|
||||
**Two** arguments, a naive ``date``, and a timezone expression (as above)::
|
||||
|
||||
>>> arrow.get(date(2013, 5, 5), 'US/Pacific')
|
||||
<Arrow [2013-05-05T00:00:00-07:00]>
|
||||
|
||||
**Two** arguments, both ``str``, to parse the first according to the format of the second::
|
||||
|
||||
>>> arrow.get('2013-05-05 12:30:45', 'YYYY-MM-DD HH:mm:ss')
|
||||
<Arrow [2013-05-05T12:30:45+00:00]>
|
||||
|
||||
**Two** arguments, first a ``str`` to parse and second a ``list`` of formats to try::
|
||||
|
||||
>>> arrow.get('2013-05-05 12:30:45', ['MM/DD/YYYY', 'YYYY-MM-DD HH:mm:ss'])
|
||||
<Arrow [2013-05-05T12:30:45+00:00]>
|
||||
|
||||
**Three or more** arguments, as for the constructor of a ``datetime``::
|
||||
|
||||
>>> arrow.get(2013, 5, 5, 12, 30, 45)
|
||||
<Arrow [2013-05-05T12:30:45+00:00]>
|
||||
|
||||
**One** time.struct time::
|
||||
>>> arrow.get(gmtime(0))
|
||||
<Arrow [1970-01-01T00:00:00+00:00]>
|
||||
|
||||
'''
|
||||
|
||||
arg_count = len(args)
|
||||
locale = kwargs.get('locale', 'en_us')
|
||||
tz = kwargs.get('tzinfo', None)
|
||||
|
||||
# () -> now, @ utc.
|
||||
if arg_count == 0:
|
||||
if isinstance(tz, tzinfo):
|
||||
return self.type.now(tz)
|
||||
return self.type.utcnow()
|
||||
|
||||
if arg_count == 1:
|
||||
arg = args[0]
|
||||
|
||||
# (None) -> now, @ utc.
|
||||
if arg is None:
|
||||
return self.type.utcnow()
|
||||
|
||||
# try (int, float, str(int), str(float)) -> utc, from timestamp.
|
||||
if is_timestamp(arg):
|
||||
return self.type.utcfromtimestamp(arg)
|
||||
|
||||
# (Arrow) -> from the object's datetime.
|
||||
if isinstance(arg, Arrow):
|
||||
return self.type.fromdatetime(arg.datetime)
|
||||
|
||||
# (datetime) -> from datetime.
|
||||
if isinstance(arg, datetime):
|
||||
return self.type.fromdatetime(arg)
|
||||
|
||||
# (date) -> from date.
|
||||
if isinstance(arg, date):
|
||||
return self.type.fromdate(arg)
|
||||
|
||||
# (tzinfo) -> now, @ tzinfo.
|
||||
elif isinstance(arg, tzinfo):
|
||||
return self.type.now(arg)
|
||||
|
||||
# (str) -> now, @ tzinfo.
|
||||
elif isstr(arg):
|
||||
dt = parser.DateTimeParser(locale).parse_iso(arg)
|
||||
return self.type.fromdatetime(dt)
|
||||
|
||||
# (struct_time) -> from struct_time
|
||||
elif isinstance(arg, struct_time):
|
||||
return self.type.utcfromtimestamp(calendar.timegm(arg))
|
||||
|
||||
else:
|
||||
raise TypeError('Can\'t parse single argument type of \'{0}\''.format(type(arg)))
|
||||
|
||||
elif arg_count == 2:
|
||||
|
||||
arg_1, arg_2 = args[0], args[1]
|
||||
|
||||
if isinstance(arg_1, datetime):
|
||||
|
||||
# (datetime, tzinfo) -> fromdatetime @ tzinfo/string.
|
||||
if isinstance(arg_2, tzinfo) or isstr(arg_2):
|
||||
return self.type.fromdatetime(arg_1, arg_2)
|
||||
else:
|
||||
raise TypeError('Can\'t parse two arguments of types \'datetime\', \'{0}\''.format(
|
||||
type(arg_2)))
|
||||
|
||||
# (date, tzinfo/str) -> fromdate @ tzinfo/string.
|
||||
elif isinstance(arg_1, date):
|
||||
|
||||
if isinstance(arg_2, tzinfo) or isstr(arg_2):
|
||||
return self.type.fromdate(arg_1, tzinfo=arg_2)
|
||||
else:
|
||||
raise TypeError('Can\'t parse two arguments of types \'date\', \'{0}\''.format(
|
||||
type(arg_2)))
|
||||
|
||||
# (str, format) -> parse.
|
||||
elif isstr(arg_1) and (isstr(arg_2) or isinstance(arg_2, list)):
|
||||
dt = parser.DateTimeParser(locale).parse(args[0], args[1])
|
||||
return self.type.fromdatetime(dt, tzinfo=tz)
|
||||
|
||||
else:
|
||||
raise TypeError('Can\'t parse two arguments of types \'{0}\', \'{1}\''.format(
|
||||
type(arg_1), type(arg_2)))
|
||||
|
||||
# 3+ args -> datetime-like via constructor.
|
||||
else:
|
||||
return self.type(*args, **kwargs)
|
||||
|
||||
def utcnow(self):
|
||||
'''Returns an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now" in UTC time.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> import arrow
|
||||
>>> arrow.utcnow()
|
||||
<Arrow [2013-05-08T05:19:07.018993+00:00]>
|
||||
'''
|
||||
|
||||
return self.type.utcnow()
|
||||
|
||||
def now(self, tz=None):
|
||||
'''Returns an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now".
|
||||
|
||||
:param tz: (optional) An expression representing a timezone. Defaults to local time.
|
||||
|
||||
Recognized timezone expressions:
|
||||
|
||||
- A ``tzinfo`` object.
|
||||
- A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'.
|
||||
- A ``str`` in ISO-8601 style, as in '+07:00'.
|
||||
- A ``str``, one of the following: 'local', 'utc', 'UTC'.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> import arrow
|
||||
>>> arrow.now()
|
||||
<Arrow [2013-05-07T22:19:11.363410-07:00]>
|
||||
|
||||
>>> arrow.now('US/Pacific')
|
||||
<Arrow [2013-05-07T22:19:15.251821-07:00]>
|
||||
|
||||
>>> arrow.now('+02:00')
|
||||
<Arrow [2013-05-08T07:19:25.618646+02:00]>
|
||||
|
||||
>>> arrow.now('local')
|
||||
<Arrow [2013-05-07T22:19:39.130059-07:00]>
|
||||
'''
|
||||
|
||||
if tz is None:
|
||||
tz = dateutil_tz.tzlocal()
|
||||
elif not isinstance(tz, tzinfo):
|
||||
tz = parser.TzinfoParser.parse(tz)
|
||||
|
||||
return self.type.now(tz)
|
105
lib/arrow/formatter.py
Normal file
105
lib/arrow/formatter.py
Normal file
@@ -0,0 +1,105 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
|
||||
import calendar
|
||||
import re
|
||||
from dateutil import tz as dateutil_tz
|
||||
from arrow import util, locales
|
||||
|
||||
|
||||
class DateTimeFormatter(object):
|
||||
|
||||
_FORMAT_RE = re.compile('(YYY?Y?|MM?M?M?|Do|DD?D?D?|d?dd?d?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?|a|A|X)')
|
||||
|
||||
def __init__(self, locale='en_us'):
|
||||
|
||||
self.locale = locales.get_locale(locale)
|
||||
|
||||
def format(cls, dt, fmt):
|
||||
|
||||
return cls._FORMAT_RE.sub(lambda m: cls._format_token(dt, m.group(0)), fmt)
|
||||
|
||||
def _format_token(self, dt, token):
|
||||
|
||||
if token == 'YYYY':
|
||||
return self.locale.year_full(dt.year)
|
||||
if token == 'YY':
|
||||
return self.locale.year_abbreviation(dt.year)
|
||||
|
||||
if token == 'MMMM':
|
||||
return self.locale.month_name(dt.month)
|
||||
if token == 'MMM':
|
||||
return self.locale.month_abbreviation(dt.month)
|
||||
if token == 'MM':
|
||||
return '{0:02d}'.format(dt.month)
|
||||
if token == 'M':
|
||||
return str(dt.month)
|
||||
|
||||
if token == 'DDDD':
|
||||
return '{0:03d}'.format(dt.timetuple().tm_yday)
|
||||
if token == 'DDD':
|
||||
return str(dt.timetuple().tm_yday)
|
||||
if token == 'DD':
|
||||
return '{0:02d}'.format(dt.day)
|
||||
if token == 'D':
|
||||
return str(dt.day)
|
||||
|
||||
if token == 'Do':
|
||||
return self.locale.ordinal_number(dt.day)
|
||||
|
||||
if token == 'dddd':
|
||||
return self.locale.day_name(dt.isoweekday())
|
||||
if token == 'ddd':
|
||||
return self.locale.day_abbreviation(dt.isoweekday())
|
||||
if token == 'd':
|
||||
return str(dt.isoweekday())
|
||||
|
||||
if token == 'HH':
|
||||
return '{0:02d}'.format(dt.hour)
|
||||
if token == 'H':
|
||||
return str(dt.hour)
|
||||
if token == 'hh':
|
||||
return '{0:02d}'.format(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12))
|
||||
if token == 'h':
|
||||
return str(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12))
|
||||
|
||||
if token == 'mm':
|
||||
return '{0:02d}'.format(dt.minute)
|
||||
if token == 'm':
|
||||
return str(dt.minute)
|
||||
|
||||
if token == 'ss':
|
||||
return '{0:02d}'.format(dt.second)
|
||||
if token == 's':
|
||||
return str(dt.second)
|
||||
|
||||
if token == 'SSSSSS':
|
||||
return str('{0:06d}'.format(int(dt.microsecond)))
|
||||
if token == 'SSSSS':
|
||||
return str('{0:05d}'.format(int(dt.microsecond / 10)))
|
||||
if token == 'SSSS':
|
||||
return str('{0:04d}'.format(int(dt.microsecond / 100)))
|
||||
if token == 'SSS':
|
||||
return str('{0:03d}'.format(int(dt.microsecond / 1000)))
|
||||
if token == 'SS':
|
||||
return str('{0:02d}'.format(int(dt.microsecond / 10000)))
|
||||
if token == 'S':
|
||||
return str(int(dt.microsecond / 100000))
|
||||
|
||||
if token == 'X':
|
||||
return str(calendar.timegm(dt.utctimetuple()))
|
||||
|
||||
if token in ['ZZ', 'Z']:
|
||||
separator = ':' if token == 'ZZ' else ''
|
||||
tz = dateutil_tz.tzutc() if dt.tzinfo is None else dt.tzinfo
|
||||
total_minutes = int(util.total_seconds(tz.utcoffset(dt)) / 60)
|
||||
|
||||
sign = '+' if total_minutes > 0 else '-'
|
||||
total_minutes = abs(total_minutes)
|
||||
hour, minute = divmod(total_minutes, 60)
|
||||
|
||||
return '{0}{1:02d}{2}{3:02d}'.format(sign, hour, separator, minute)
|
||||
|
||||
if token in ('a', 'A'):
|
||||
return self.locale.meridian(dt.hour, token)
|
||||
|
1703
lib/arrow/locales.py
Normal file
1703
lib/arrow/locales.py
Normal file
File diff suppressed because it is too large
Load Diff
308
lib/arrow/parser.py
Normal file
308
lib/arrow/parser.py
Normal file
@@ -0,0 +1,308 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from datetime import datetime
|
||||
from dateutil import tz
|
||||
import re
|
||||
|
||||
from arrow import locales
|
||||
|
||||
|
||||
class ParserError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
class DateTimeParser(object):
|
||||
|
||||
_FORMAT_RE = re.compile('(YYY?Y?|MM?M?M?|Do|DD?D?D?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?Z?|a|A|X)')
|
||||
|
||||
_ONE_THROUGH_SIX_DIGIT_RE = re.compile('\d{1,6}')
|
||||
_ONE_THROUGH_FIVE_DIGIT_RE = re.compile('\d{1,5}')
|
||||
_ONE_THROUGH_FOUR_DIGIT_RE = re.compile('\d{1,4}')
|
||||
_ONE_TWO_OR_THREE_DIGIT_RE = re.compile('\d{1,3}')
|
||||
_ONE_OR_TWO_DIGIT_RE = re.compile('\d{1,2}')
|
||||
_FOUR_DIGIT_RE = re.compile('\d{4}')
|
||||
_TWO_DIGIT_RE = re.compile('\d{2}')
|
||||
_TZ_RE = re.compile('[+\-]?\d{2}:?\d{2}')
|
||||
_TZ_NAME_RE = re.compile('\w[\w+\-/]+')
|
||||
|
||||
|
||||
_BASE_INPUT_RE_MAP = {
|
||||
'YYYY': _FOUR_DIGIT_RE,
|
||||
'YY': _TWO_DIGIT_RE,
|
||||
'MM': _TWO_DIGIT_RE,
|
||||
'M': _ONE_OR_TWO_DIGIT_RE,
|
||||
'DD': _TWO_DIGIT_RE,
|
||||
'D': _ONE_OR_TWO_DIGIT_RE,
|
||||
'HH': _TWO_DIGIT_RE,
|
||||
'H': _ONE_OR_TWO_DIGIT_RE,
|
||||
'hh': _TWO_DIGIT_RE,
|
||||
'h': _ONE_OR_TWO_DIGIT_RE,
|
||||
'mm': _TWO_DIGIT_RE,
|
||||
'm': _ONE_OR_TWO_DIGIT_RE,
|
||||
'ss': _TWO_DIGIT_RE,
|
||||
's': _ONE_OR_TWO_DIGIT_RE,
|
||||
'X': re.compile('\d+'),
|
||||
'ZZZ': _TZ_NAME_RE,
|
||||
'ZZ': _TZ_RE,
|
||||
'Z': _TZ_RE,
|
||||
'SSSSSS': _ONE_THROUGH_SIX_DIGIT_RE,
|
||||
'SSSSS': _ONE_THROUGH_FIVE_DIGIT_RE,
|
||||
'SSSS': _ONE_THROUGH_FOUR_DIGIT_RE,
|
||||
'SSS': _ONE_TWO_OR_THREE_DIGIT_RE,
|
||||
'SS': _ONE_OR_TWO_DIGIT_RE,
|
||||
'S': re.compile('\d'),
|
||||
}
|
||||
|
||||
MARKERS = ['YYYY', 'MM', 'DD']
|
||||
SEPARATORS = ['-', '/', '.']
|
||||
|
||||
def __init__(self, locale='en_us'):
|
||||
|
||||
self.locale = locales.get_locale(locale)
|
||||
self._input_re_map = self._BASE_INPUT_RE_MAP.copy()
|
||||
self._input_re_map.update({
|
||||
'MMMM': self._choice_re(self.locale.month_names[1:], re.IGNORECASE),
|
||||
'MMM': self._choice_re(self.locale.month_abbreviations[1:],
|
||||
re.IGNORECASE),
|
||||
'Do': re.compile(self.locale.ordinal_day_re),
|
||||
'a': self._choice_re(
|
||||
(self.locale.meridians['am'], self.locale.meridians['pm'])
|
||||
),
|
||||
# note: 'A' token accepts both 'am/pm' and 'AM/PM' formats to
|
||||
# ensure backwards compatibility of this token
|
||||
'A': self._choice_re(self.locale.meridians.values())
|
||||
})
|
||||
|
||||
def parse_iso(self, string):
|
||||
|
||||
has_time = 'T' in string or ' ' in string.strip()
|
||||
space_divider = ' ' in string.strip()
|
||||
|
||||
if has_time:
|
||||
if space_divider:
|
||||
date_string, time_string = string.split(' ', 1)
|
||||
else:
|
||||
date_string, time_string = string.split('T', 1)
|
||||
time_parts = re.split('[+-]', time_string, 1)
|
||||
has_tz = len(time_parts) > 1
|
||||
has_seconds = time_parts[0].count(':') > 1
|
||||
has_subseconds = '.' in time_parts[0]
|
||||
|
||||
if has_subseconds:
|
||||
subseconds_token = 'S' * min(len(re.split('\D+', time_parts[0].split('.')[1], 1)[0]), 6)
|
||||
formats = ['YYYY-MM-DDTHH:mm:ss.%s' % subseconds_token]
|
||||
elif has_seconds:
|
||||
formats = ['YYYY-MM-DDTHH:mm:ss']
|
||||
else:
|
||||
formats = ['YYYY-MM-DDTHH:mm']
|
||||
else:
|
||||
has_tz = False
|
||||
# generate required formats: YYYY-MM-DD, YYYY-MM-DD, YYYY
|
||||
# using various separators: -, /, .
|
||||
l = len(self.MARKERS)
|
||||
formats = [separator.join(self.MARKERS[:l-i])
|
||||
for i in range(l)
|
||||
for separator in self.SEPARATORS]
|
||||
|
||||
if has_time and has_tz:
|
||||
formats = [f + 'Z' for f in formats]
|
||||
|
||||
if space_divider:
|
||||
formats = [item.replace('T', ' ', 1) for item in formats]
|
||||
|
||||
return self._parse_multiformat(string, formats)
|
||||
|
||||
def parse(self, string, fmt):
|
||||
|
||||
if isinstance(fmt, list):
|
||||
return self._parse_multiformat(string, fmt)
|
||||
|
||||
# fmt is a string of tokens like 'YYYY-MM-DD'
|
||||
# we construct a new string by replacing each
|
||||
# token by its pattern:
|
||||
# 'YYYY-MM-DD' -> '(?P<YYYY>\d{4})-(?P<MM>\d{2})-(?P<DD>\d{2})'
|
||||
fmt_pattern = fmt
|
||||
tokens = []
|
||||
offset = 0
|
||||
for m in self._FORMAT_RE.finditer(fmt):
|
||||
token = m.group(0)
|
||||
try:
|
||||
input_re = self._input_re_map[token]
|
||||
except KeyError:
|
||||
raise ParserError('Unrecognized token \'{0}\''.format(token))
|
||||
input_pattern = '(?P<{0}>{1})'.format(token, input_re.pattern)
|
||||
tokens.append(token)
|
||||
# a pattern doesn't have the same length as the token
|
||||
# it replaces! We keep the difference in the offset variable.
|
||||
# This works because the string is scanned left-to-right and matches
|
||||
# are returned in the order found by finditer.
|
||||
fmt_pattern = fmt_pattern[:m.start() + offset] + input_pattern + fmt_pattern[m.end() + offset:]
|
||||
offset += len(input_pattern) - (m.end() - m.start())
|
||||
match = re.search(fmt_pattern, string, flags=re.IGNORECASE)
|
||||
if match is None:
|
||||
raise ParserError('Failed to match \'{0}\' when parsing \'{1}\''.format(fmt_pattern, string))
|
||||
parts = {}
|
||||
for token in tokens:
|
||||
if token == 'Do':
|
||||
value = match.group('value')
|
||||
else:
|
||||
value = match.group(token)
|
||||
self._parse_token(token, value, parts)
|
||||
return self._build_datetime(parts)
|
||||
|
||||
def _parse_token(self, token, value, parts):
|
||||
|
||||
if token == 'YYYY':
|
||||
parts['year'] = int(value)
|
||||
elif token == 'YY':
|
||||
value = int(value)
|
||||
parts['year'] = 1900 + value if value > 68 else 2000 + value
|
||||
|
||||
elif token in ['MMMM', 'MMM']:
|
||||
parts['month'] = self.locale.month_number(value.lower())
|
||||
|
||||
elif token in ['MM', 'M']:
|
||||
parts['month'] = int(value)
|
||||
|
||||
elif token in ['DD', 'D']:
|
||||
parts['day'] = int(value)
|
||||
|
||||
elif token in ['Do']:
|
||||
parts['day'] = int(value)
|
||||
|
||||
elif token.upper() in ['HH', 'H']:
|
||||
parts['hour'] = int(value)
|
||||
|
||||
elif token in ['mm', 'm']:
|
||||
parts['minute'] = int(value)
|
||||
|
||||
elif token in ['ss', 's']:
|
||||
parts['second'] = int(value)
|
||||
|
||||
elif token == 'SSSSSS':
|
||||
parts['microsecond'] = int(value)
|
||||
elif token == 'SSSSS':
|
||||
parts['microsecond'] = int(value) * 10
|
||||
elif token == 'SSSS':
|
||||
parts['microsecond'] = int(value) * 100
|
||||
elif token == 'SSS':
|
||||
parts['microsecond'] = int(value) * 1000
|
||||
elif token == 'SS':
|
||||
parts['microsecond'] = int(value) * 10000
|
||||
elif token == 'S':
|
||||
parts['microsecond'] = int(value) * 100000
|
||||
|
||||
elif token == 'X':
|
||||
parts['timestamp'] = int(value)
|
||||
|
||||
elif token in ['ZZZ', 'ZZ', 'Z']:
|
||||
parts['tzinfo'] = TzinfoParser.parse(value)
|
||||
|
||||
elif token in ['a', 'A']:
|
||||
if value in (
|
||||
self.locale.meridians['am'],
|
||||
self.locale.meridians['AM']
|
||||
):
|
||||
parts['am_pm'] = 'am'
|
||||
elif value in (
|
||||
self.locale.meridians['pm'],
|
||||
self.locale.meridians['PM']
|
||||
):
|
||||
parts['am_pm'] = 'pm'
|
||||
|
||||
@staticmethod
|
||||
def _build_datetime(parts):
|
||||
|
||||
timestamp = parts.get('timestamp')
|
||||
|
||||
if timestamp:
|
||||
tz_utc = tz.tzutc()
|
||||
return datetime.fromtimestamp(timestamp, tz=tz_utc)
|
||||
|
||||
am_pm = parts.get('am_pm')
|
||||
hour = parts.get('hour', 0)
|
||||
|
||||
if am_pm == 'pm' and hour < 12:
|
||||
hour += 12
|
||||
elif am_pm == 'am' and hour == 12:
|
||||
hour = 0
|
||||
|
||||
return datetime(year=parts.get('year', 1), month=parts.get('month', 1),
|
||||
day=parts.get('day', 1), hour=hour, minute=parts.get('minute', 0),
|
||||
second=parts.get('second', 0), microsecond=parts.get('microsecond', 0),
|
||||
tzinfo=parts.get('tzinfo'))
|
||||
|
||||
def _parse_multiformat(self, string, formats):
|
||||
|
||||
_datetime = None
|
||||
|
||||
for fmt in formats:
|
||||
try:
|
||||
_datetime = self.parse(string, fmt)
|
||||
break
|
||||
except:
|
||||
pass
|
||||
|
||||
if _datetime is None:
|
||||
raise ParserError('Could not match input to any of {0} on \'{1}\''.format(formats, string))
|
||||
|
||||
return _datetime
|
||||
|
||||
@staticmethod
|
||||
def _map_lookup(input_map, key):
|
||||
|
||||
try:
|
||||
return input_map[key]
|
||||
except KeyError:
|
||||
raise ParserError('Could not match "{0}" to {1}'.format(key, input_map))
|
||||
|
||||
@staticmethod
|
||||
def _try_timestamp(string):
|
||||
|
||||
try:
|
||||
return float(string)
|
||||
except:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _choice_re(choices, flags=0):
|
||||
return re.compile('({0})'.format('|'.join(choices)), flags=flags)
|
||||
|
||||
|
||||
class TzinfoParser(object):
|
||||
|
||||
_TZINFO_RE = re.compile('([+\-])?(\d\d):?(\d\d)')
|
||||
|
||||
@classmethod
|
||||
def parse(cls, string):
|
||||
|
||||
tzinfo = None
|
||||
|
||||
if string == 'local':
|
||||
tzinfo = tz.tzlocal()
|
||||
|
||||
elif string in ['utc', 'UTC']:
|
||||
tzinfo = tz.tzutc()
|
||||
|
||||
else:
|
||||
|
||||
iso_match = cls._TZINFO_RE.match(string)
|
||||
|
||||
if iso_match:
|
||||
sign, hours, minutes = iso_match.groups()
|
||||
seconds = int(hours) * 3600 + int(minutes) * 60
|
||||
|
||||
if sign == '-':
|
||||
seconds *= -1
|
||||
|
||||
tzinfo = tz.tzoffset(None, seconds)
|
||||
|
||||
else:
|
||||
tzinfo = tz.gettz(string)
|
||||
|
||||
if tzinfo is None:
|
||||
raise ParserError('Could not parse timezone expression "{0}"', string)
|
||||
|
||||
return tzinfo
|
45
lib/arrow/util.py
Normal file
45
lib/arrow/util.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
|
||||
# python 2.6 / 2.7 definitions for total_seconds function.
|
||||
|
||||
def _total_seconds_27(td): # pragma: no cover
|
||||
return td.total_seconds()
|
||||
|
||||
def _total_seconds_26(td):
|
||||
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 1e6) / 1e6
|
||||
|
||||
|
||||
# get version info and assign correct total_seconds function.
|
||||
|
||||
version = '{0}.{1}.{2}'.format(*sys.version_info[:3])
|
||||
|
||||
if version < '2.7': # pragma: no cover
|
||||
total_seconds = _total_seconds_26
|
||||
else: # pragma: no cover
|
||||
total_seconds = _total_seconds_27
|
||||
|
||||
def is_timestamp(value):
|
||||
try:
|
||||
float(value)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
# python 2.7 / 3.0+ definitions for isstr function.
|
||||
|
||||
try: # pragma: no cover
|
||||
basestring
|
||||
|
||||
def isstr(s):
|
||||
return isinstance(s, basestring)
|
||||
|
||||
except NameError: #pragma: no cover
|
||||
|
||||
def isstr(s):
|
||||
return isinstance(s, str)
|
||||
|
||||
|
||||
__all__ = ['total_seconds', 'is_timestamp', 'isstr']
|
2
lib/dateutil/__init__.py
Normal file
2
lib/dateutil/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
__version__ = "2.4.2"
|
89
lib/dateutil/easter.py
Normal file
89
lib/dateutil/easter.py
Normal file
@@ -0,0 +1,89 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module offers a generic easter computing method for any given year, using
|
||||
Western, Orthodox or Julian algorithms.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
|
||||
__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"]
|
||||
|
||||
EASTER_JULIAN = 1
|
||||
EASTER_ORTHODOX = 2
|
||||
EASTER_WESTERN = 3
|
||||
|
||||
|
||||
def easter(year, method=EASTER_WESTERN):
|
||||
"""
|
||||
This method was ported from the work done by GM Arts,
|
||||
on top of the algorithm by Claus Tondering, which was
|
||||
based in part on the algorithm of Ouding (1940), as
|
||||
quoted in "Explanatory Supplement to the Astronomical
|
||||
Almanac", P. Kenneth Seidelmann, editor.
|
||||
|
||||
This algorithm implements three different easter
|
||||
calculation methods:
|
||||
|
||||
1 - Original calculation in Julian calendar, valid in
|
||||
dates after 326 AD
|
||||
2 - Original method, with date converted to Gregorian
|
||||
calendar, valid in years 1583 to 4099
|
||||
3 - Revised method, in Gregorian calendar, valid in
|
||||
years 1583 to 4099 as well
|
||||
|
||||
These methods are represented by the constants:
|
||||
|
||||
EASTER_JULIAN = 1
|
||||
EASTER_ORTHODOX = 2
|
||||
EASTER_WESTERN = 3
|
||||
|
||||
The default method is method 3.
|
||||
|
||||
More about the algorithm may be found at:
|
||||
|
||||
http://users.chariot.net.au/~gmarts/eastalg.htm
|
||||
|
||||
and
|
||||
|
||||
http://www.tondering.dk/claus/calendar.html
|
||||
|
||||
"""
|
||||
|
||||
if not (1 <= method <= 3):
|
||||
raise ValueError("invalid method")
|
||||
|
||||
# g - Golden year - 1
|
||||
# c - Century
|
||||
# h - (23 - Epact) mod 30
|
||||
# i - Number of days from March 21 to Paschal Full Moon
|
||||
# j - Weekday for PFM (0=Sunday, etc)
|
||||
# p - Number of days from March 21 to Sunday on or before PFM
|
||||
# (-6 to 28 methods 1 & 3, to 56 for method 2)
|
||||
# e - Extra days to add for method 2 (converting Julian
|
||||
# date to Gregorian date)
|
||||
|
||||
y = year
|
||||
g = y % 19
|
||||
e = 0
|
||||
if method < 3:
|
||||
# Old method
|
||||
i = (19*g + 15) % 30
|
||||
j = (y + y//4 + i) % 7
|
||||
if method == 2:
|
||||
# Extra dates to convert Julian to Gregorian date
|
||||
e = 10
|
||||
if y > 1600:
|
||||
e = e + y//100 - 16 - (y//100 - 16)//4
|
||||
else:
|
||||
# New method
|
||||
c = y//100
|
||||
h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30
|
||||
i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11))
|
||||
j = (y + y//4 + i + 2 - c + c//4) % 7
|
||||
|
||||
# p can be from -6 to 56 corresponding to dates 22 March to 23 May
|
||||
# (later dates apply to method 2, although 23 May never actually occurs)
|
||||
p = i - j + e
|
||||
d = 1 + (p + 27 + (p + 6)//40) % 31
|
||||
m = 3 + (p + 26)//30
|
||||
return datetime.date(int(y), int(m), int(d))
|
1205
lib/dateutil/parser.py
Normal file
1205
lib/dateutil/parser.py
Normal file
File diff suppressed because it is too large
Load Diff
450
lib/dateutil/relativedelta.py
Normal file
450
lib/dateutil/relativedelta.py
Normal file
@@ -0,0 +1,450 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import calendar
|
||||
|
||||
from six import integer_types
|
||||
|
||||
__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"]
|
||||
|
||||
|
||||
class weekday(object):
|
||||
__slots__ = ["weekday", "n"]
|
||||
|
||||
def __init__(self, weekday, n=None):
|
||||
self.weekday = weekday
|
||||
self.n = n
|
||||
|
||||
def __call__(self, n):
|
||||
if n == self.n:
|
||||
return self
|
||||
else:
|
||||
return self.__class__(self.weekday, n)
|
||||
|
||||
def __eq__(self, other):
|
||||
try:
|
||||
if self.weekday != other.weekday or self.n != other.n:
|
||||
return False
|
||||
except AttributeError:
|
||||
return False
|
||||
return True
|
||||
|
||||
def __repr__(self):
|
||||
s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday]
|
||||
if not self.n:
|
||||
return s
|
||||
else:
|
||||
return "%s(%+d)" % (s, self.n)
|
||||
|
||||
MO, TU, WE, TH, FR, SA, SU = weekdays = tuple([weekday(x) for x in range(7)])
|
||||
|
||||
|
||||
class relativedelta(object):
|
||||
"""
|
||||
The relativedelta type is based on the specification of the excellent
|
||||
work done by M.-A. Lemburg in his
|
||||
`mx.DateTime <http://www.egenix.com/files/python/mxDateTime.html>`_ extension.
|
||||
However, notice that this type does *NOT* implement the same algorithm as
|
||||
his work. Do *NOT* expect it to behave like mx.DateTime's counterpart.
|
||||
|
||||
There are two different ways to build a relativedelta instance. The
|
||||
first one is passing it two date/datetime classes::
|
||||
|
||||
relativedelta(datetime1, datetime2)
|
||||
|
||||
The second one is passing it any number of the following keyword arguments::
|
||||
|
||||
relativedelta(arg1=x,arg2=y,arg3=z...)
|
||||
|
||||
year, month, day, hour, minute, second, microsecond:
|
||||
Absolute information (argument is singular); adding or subtracting a
|
||||
relativedelta with absolute information does not perform an aritmetic
|
||||
operation, but rather REPLACES the corresponding value in the
|
||||
original datetime with the value(s) in relativedelta.
|
||||
|
||||
years, months, weeks, days, hours, minutes, seconds, microseconds:
|
||||
Relative information, may be negative (argument is plural); adding
|
||||
or subtracting a relativedelta with relative information performs
|
||||
the corresponding aritmetic operation on the original datetime value
|
||||
with the information in the relativedelta.
|
||||
|
||||
weekday:
|
||||
One of the weekday instances (MO, TU, etc). These instances may
|
||||
receive a parameter N, specifying the Nth weekday, which could
|
||||
be positive or negative (like MO(+1) or MO(-2). Not specifying
|
||||
it is the same as specifying +1. You can also use an integer,
|
||||
where 0=MO.
|
||||
|
||||
leapdays:
|
||||
Will add given days to the date found, if year is a leap
|
||||
year, and the date found is post 28 of february.
|
||||
|
||||
yearday, nlyearday:
|
||||
Set the yearday or the non-leap year day (jump leap days).
|
||||
These are converted to day/month/leapdays information.
|
||||
|
||||
Here is the behavior of operations with relativedelta:
|
||||
|
||||
1. Calculate the absolute year, using the 'year' argument, or the
|
||||
original datetime year, if the argument is not present.
|
||||
|
||||
2. Add the relative 'years' argument to the absolute year.
|
||||
|
||||
3. Do steps 1 and 2 for month/months.
|
||||
|
||||
4. Calculate the absolute day, using the 'day' argument, or the
|
||||
original datetime day, if the argument is not present. Then,
|
||||
subtract from the day until it fits in the year and month
|
||||
found after their operations.
|
||||
|
||||
5. Add the relative 'days' argument to the absolute day. Notice
|
||||
that the 'weeks' argument is multiplied by 7 and added to
|
||||
'days'.
|
||||
|
||||
6. Do steps 1 and 2 for hour/hours, minute/minutes, second/seconds,
|
||||
microsecond/microseconds.
|
||||
|
||||
7. If the 'weekday' argument is present, calculate the weekday,
|
||||
with the given (wday, nth) tuple. wday is the index of the
|
||||
weekday (0-6, 0=Mon), and nth is the number of weeks to add
|
||||
forward or backward, depending on its signal. Notice that if
|
||||
the calculated date is already Monday, for example, using
|
||||
(0, 1) or (0, -1) won't change the day.
|
||||
"""
|
||||
|
||||
def __init__(self, dt1=None, dt2=None,
|
||||
years=0, months=0, days=0, leapdays=0, weeks=0,
|
||||
hours=0, minutes=0, seconds=0, microseconds=0,
|
||||
year=None, month=None, day=None, weekday=None,
|
||||
yearday=None, nlyearday=None,
|
||||
hour=None, minute=None, second=None, microsecond=None):
|
||||
if dt1 and dt2:
|
||||
# datetime is a subclass of date. So both must be date
|
||||
if not (isinstance(dt1, datetime.date) and
|
||||
isinstance(dt2, datetime.date)):
|
||||
raise TypeError("relativedelta only diffs datetime/date")
|
||||
# We allow two dates, or two datetimes, so we coerce them to be
|
||||
# of the same type
|
||||
if (isinstance(dt1, datetime.datetime) !=
|
||||
isinstance(dt2, datetime.datetime)):
|
||||
if not isinstance(dt1, datetime.datetime):
|
||||
dt1 = datetime.datetime.fromordinal(dt1.toordinal())
|
||||
elif not isinstance(dt2, datetime.datetime):
|
||||
dt2 = datetime.datetime.fromordinal(dt2.toordinal())
|
||||
self.years = 0
|
||||
self.months = 0
|
||||
self.days = 0
|
||||
self.leapdays = 0
|
||||
self.hours = 0
|
||||
self.minutes = 0
|
||||
self.seconds = 0
|
||||
self.microseconds = 0
|
||||
self.year = None
|
||||
self.month = None
|
||||
self.day = None
|
||||
self.weekday = None
|
||||
self.hour = None
|
||||
self.minute = None
|
||||
self.second = None
|
||||
self.microsecond = None
|
||||
self._has_time = 0
|
||||
|
||||
months = (dt1.year*12+dt1.month)-(dt2.year*12+dt2.month)
|
||||
self._set_months(months)
|
||||
dtm = self.__radd__(dt2)
|
||||
if dt1 < dt2:
|
||||
while dt1 > dtm:
|
||||
months += 1
|
||||
self._set_months(months)
|
||||
dtm = self.__radd__(dt2)
|
||||
else:
|
||||
while dt1 < dtm:
|
||||
months -= 1
|
||||
self._set_months(months)
|
||||
dtm = self.__radd__(dt2)
|
||||
delta = dt1 - dtm
|
||||
self.seconds = delta.seconds+delta.days*86400
|
||||
self.microseconds = delta.microseconds
|
||||
else:
|
||||
self.years = years
|
||||
self.months = months
|
||||
self.days = days+weeks*7
|
||||
self.leapdays = leapdays
|
||||
self.hours = hours
|
||||
self.minutes = minutes
|
||||
self.seconds = seconds
|
||||
self.microseconds = microseconds
|
||||
self.year = year
|
||||
self.month = month
|
||||
self.day = day
|
||||
self.hour = hour
|
||||
self.minute = minute
|
||||
self.second = second
|
||||
self.microsecond = microsecond
|
||||
|
||||
if isinstance(weekday, integer_types):
|
||||
self.weekday = weekdays[weekday]
|
||||
else:
|
||||
self.weekday = weekday
|
||||
|
||||
yday = 0
|
||||
if nlyearday:
|
||||
yday = nlyearday
|
||||
elif yearday:
|
||||
yday = yearday
|
||||
if yearday > 59:
|
||||
self.leapdays = -1
|
||||
if yday:
|
||||
ydayidx = [31, 59, 90, 120, 151, 181, 212,
|
||||
243, 273, 304, 334, 366]
|
||||
for idx, ydays in enumerate(ydayidx):
|
||||
if yday <= ydays:
|
||||
self.month = idx+1
|
||||
if idx == 0:
|
||||
self.day = yday
|
||||
else:
|
||||
self.day = yday-ydayidx[idx-1]
|
||||
break
|
||||
else:
|
||||
raise ValueError("invalid year day (%d)" % yday)
|
||||
|
||||
self._fix()
|
||||
|
||||
def _fix(self):
|
||||
if abs(self.microseconds) > 999999:
|
||||
s = self.microseconds//abs(self.microseconds)
|
||||
div, mod = divmod(self.microseconds*s, 1000000)
|
||||
self.microseconds = mod*s
|
||||
self.seconds += div*s
|
||||
if abs(self.seconds) > 59:
|
||||
s = self.seconds//abs(self.seconds)
|
||||
div, mod = divmod(self.seconds*s, 60)
|
||||
self.seconds = mod*s
|
||||
self.minutes += div*s
|
||||
if abs(self.minutes) > 59:
|
||||
s = self.minutes//abs(self.minutes)
|
||||
div, mod = divmod(self.minutes*s, 60)
|
||||
self.minutes = mod*s
|
||||
self.hours += div*s
|
||||
if abs(self.hours) > 23:
|
||||
s = self.hours//abs(self.hours)
|
||||
div, mod = divmod(self.hours*s, 24)
|
||||
self.hours = mod*s
|
||||
self.days += div*s
|
||||
if abs(self.months) > 11:
|
||||
s = self.months//abs(self.months)
|
||||
div, mod = divmod(self.months*s, 12)
|
||||
self.months = mod*s
|
||||
self.years += div*s
|
||||
if (self.hours or self.minutes or self.seconds or self.microseconds
|
||||
or self.hour is not None or self.minute is not None or
|
||||
self.second is not None or self.microsecond is not None):
|
||||
self._has_time = 1
|
||||
else:
|
||||
self._has_time = 0
|
||||
|
||||
def _set_months(self, months):
|
||||
self.months = months
|
||||
if abs(self.months) > 11:
|
||||
s = self.months//abs(self.months)
|
||||
div, mod = divmod(self.months*s, 12)
|
||||
self.months = mod*s
|
||||
self.years = div*s
|
||||
else:
|
||||
self.years = 0
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, relativedelta):
|
||||
return relativedelta(years=other.years+self.years,
|
||||
months=other.months+self.months,
|
||||
days=other.days+self.days,
|
||||
hours=other.hours+self.hours,
|
||||
minutes=other.minutes+self.minutes,
|
||||
seconds=other.seconds+self.seconds,
|
||||
microseconds=(other.microseconds +
|
||||
self.microseconds),
|
||||
leapdays=other.leapdays or self.leapdays,
|
||||
year=other.year or self.year,
|
||||
month=other.month or self.month,
|
||||
day=other.day or self.day,
|
||||
weekday=other.weekday or self.weekday,
|
||||
hour=other.hour or self.hour,
|
||||
minute=other.minute or self.minute,
|
||||
second=other.second or self.second,
|
||||
microsecond=(other.microsecond or
|
||||
self.microsecond))
|
||||
if not isinstance(other, datetime.date):
|
||||
raise TypeError("unsupported type for add operation")
|
||||
elif self._has_time and not isinstance(other, datetime.datetime):
|
||||
other = datetime.datetime.fromordinal(other.toordinal())
|
||||
year = (self.year or other.year)+self.years
|
||||
month = self.month or other.month
|
||||
if self.months:
|
||||
assert 1 <= abs(self.months) <= 12
|
||||
month += self.months
|
||||
if month > 12:
|
||||
year += 1
|
||||
month -= 12
|
||||
elif month < 1:
|
||||
year -= 1
|
||||
month += 12
|
||||
day = min(calendar.monthrange(year, month)[1],
|
||||
self.day or other.day)
|
||||
repl = {"year": year, "month": month, "day": day}
|
||||
for attr in ["hour", "minute", "second", "microsecond"]:
|
||||
value = getattr(self, attr)
|
||||
if value is not None:
|
||||
repl[attr] = value
|
||||
days = self.days
|
||||
if self.leapdays and month > 2 and calendar.isleap(year):
|
||||
days += self.leapdays
|
||||
ret = (other.replace(**repl)
|
||||
+ datetime.timedelta(days=days,
|
||||
hours=self.hours,
|
||||
minutes=self.minutes,
|
||||
seconds=self.seconds,
|
||||
microseconds=self.microseconds))
|
||||
if self.weekday:
|
||||
weekday, nth = self.weekday.weekday, self.weekday.n or 1
|
||||
jumpdays = (abs(nth)-1)*7
|
||||
if nth > 0:
|
||||
jumpdays += (7-ret.weekday()+weekday) % 7
|
||||
else:
|
||||
jumpdays += (ret.weekday()-weekday) % 7
|
||||
jumpdays *= -1
|
||||
ret += datetime.timedelta(days=jumpdays)
|
||||
return ret
|
||||
|
||||
def __radd__(self, other):
|
||||
return self.__add__(other)
|
||||
|
||||
def __rsub__(self, other):
|
||||
return self.__neg__().__radd__(other)
|
||||
|
||||
def __sub__(self, other):
|
||||
if not isinstance(other, relativedelta):
|
||||
raise TypeError("unsupported type for sub operation")
|
||||
return relativedelta(years=self.years-other.years,
|
||||
months=self.months-other.months,
|
||||
days=self.days-other.days,
|
||||
hours=self.hours-other.hours,
|
||||
minutes=self.minutes-other.minutes,
|
||||
seconds=self.seconds-other.seconds,
|
||||
microseconds=self.microseconds-other.microseconds,
|
||||
leapdays=self.leapdays or other.leapdays,
|
||||
year=self.year or other.year,
|
||||
month=self.month or other.month,
|
||||
day=self.day or other.day,
|
||||
weekday=self.weekday or other.weekday,
|
||||
hour=self.hour or other.hour,
|
||||
minute=self.minute or other.minute,
|
||||
second=self.second or other.second,
|
||||
microsecond=self.microsecond or other.microsecond)
|
||||
|
||||
def __neg__(self):
|
||||
return relativedelta(years=-self.years,
|
||||
months=-self.months,
|
||||
days=-self.days,
|
||||
hours=-self.hours,
|
||||
minutes=-self.minutes,
|
||||
seconds=-self.seconds,
|
||||
microseconds=-self.microseconds,
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
def __bool__(self):
|
||||
return not (not self.years and
|
||||
not self.months and
|
||||
not self.days and
|
||||
not self.hours and
|
||||
not self.minutes and
|
||||
not self.seconds and
|
||||
not self.microseconds and
|
||||
not self.leapdays and
|
||||
self.year is None and
|
||||
self.month is None and
|
||||
self.day is None and
|
||||
self.weekday is None and
|
||||
self.hour is None and
|
||||
self.minute is None and
|
||||
self.second is None and
|
||||
self.microsecond is None)
|
||||
# Compatibility with Python 2.x
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __mul__(self, other):
|
||||
f = float(other)
|
||||
return relativedelta(years=int(self.years*f),
|
||||
months=int(self.months*f),
|
||||
days=int(self.days*f),
|
||||
hours=int(self.hours*f),
|
||||
minutes=int(self.minutes*f),
|
||||
seconds=int(self.seconds*f),
|
||||
microseconds=int(self.microseconds*f),
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
__rmul__ = __mul__
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, relativedelta):
|
||||
return False
|
||||
if self.weekday or other.weekday:
|
||||
if not self.weekday or not other.weekday:
|
||||
return False
|
||||
if self.weekday.weekday != other.weekday.weekday:
|
||||
return False
|
||||
n1, n2 = self.weekday.n, other.weekday.n
|
||||
if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)):
|
||||
return False
|
||||
return (self.years == other.years and
|
||||
self.months == other.months and
|
||||
self.days == other.days and
|
||||
self.hours == other.hours and
|
||||
self.minutes == other.minutes and
|
||||
self.seconds == other.seconds and
|
||||
self.leapdays == other.leapdays and
|
||||
self.year == other.year and
|
||||
self.month == other.month and
|
||||
self.day == other.day and
|
||||
self.hour == other.hour and
|
||||
self.minute == other.minute and
|
||||
self.second == other.second and
|
||||
self.microsecond == other.microsecond)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __div__(self, other):
|
||||
return self.__mul__(1/float(other))
|
||||
|
||||
__truediv__ = __div__
|
||||
|
||||
def __repr__(self):
|
||||
l = []
|
||||
for attr in ["years", "months", "days", "leapdays",
|
||||
"hours", "minutes", "seconds", "microseconds"]:
|
||||
value = getattr(self, attr)
|
||||
if value:
|
||||
l.append("%s=%+d" % (attr, value))
|
||||
for attr in ["year", "month", "day", "weekday",
|
||||
"hour", "minute", "second", "microsecond"]:
|
||||
value = getattr(self, attr)
|
||||
if value is not None:
|
||||
l.append("%s=%s" % (attr, repr(value)))
|
||||
return "%s(%s)" % (self.__class__.__name__, ", ".join(l))
|
||||
|
||||
# vim:ts=4:sw=4:et
|
1375
lib/dateutil/rrule.py
Normal file
1375
lib/dateutil/rrule.py
Normal file
File diff suppressed because it is too large
Load Diff
986
lib/dateutil/tz.py
Normal file
986
lib/dateutil/tz.py
Normal file
@@ -0,0 +1,986 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module offers timezone implementations subclassing the abstract
|
||||
:py:`datetime.tzinfo` type. There are classes to handle tzfile format files
|
||||
(usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`, etc), TZ
|
||||
environment string (in all known formats), given ranges (with help from
|
||||
relative deltas), local machine timezone, fixed offset timezone, and UTC
|
||||
timezone.
|
||||
"""
|
||||
import datetime
|
||||
import struct
|
||||
import time
|
||||
import sys
|
||||
import os
|
||||
|
||||
from six import string_types, PY3
|
||||
|
||||
try:
|
||||
from dateutil.tzwin import tzwin, tzwinlocal
|
||||
except ImportError:
|
||||
tzwin = tzwinlocal = None
|
||||
|
||||
relativedelta = None
|
||||
parser = None
|
||||
rrule = None
|
||||
|
||||
__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange",
|
||||
"tzstr", "tzical", "tzwin", "tzwinlocal", "gettz"]
|
||||
|
||||
|
||||
def tzname_in_python2(myfunc):
|
||||
"""Change unicode output into bytestrings in Python 2
|
||||
|
||||
tzname() API changed in Python 3. It used to return bytes, but was changed
|
||||
to unicode strings
|
||||
"""
|
||||
def inner_func(*args, **kwargs):
|
||||
if PY3:
|
||||
return myfunc(*args, **kwargs)
|
||||
else:
|
||||
return myfunc(*args, **kwargs).encode()
|
||||
return inner_func
|
||||
|
||||
ZERO = datetime.timedelta(0)
|
||||
EPOCHORDINAL = datetime.datetime.utcfromtimestamp(0).toordinal()
|
||||
|
||||
|
||||
class tzutc(datetime.tzinfo):
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return ZERO
|
||||
|
||||
def dst(self, dt):
|
||||
return ZERO
|
||||
|
||||
@tzname_in_python2
|
||||
def tzname(self, dt):
|
||||
return "UTC"
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, tzutc) or
|
||||
(isinstance(other, tzoffset) and other._offset == ZERO))
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s()" % self.__class__.__name__
|
||||
|
||||
__reduce__ = object.__reduce__
|
||||
|
||||
|
||||
class tzoffset(datetime.tzinfo):
|
||||
|
||||
def __init__(self, name, offset):
|
||||
self._name = name
|
||||
self._offset = datetime.timedelta(seconds=offset)
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self._offset
|
||||
|
||||
def dst(self, dt):
|
||||
return ZERO
|
||||
|
||||
@tzname_in_python2
|
||||
def tzname(self, dt):
|
||||
return self._name
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, tzoffset) and
|
||||
self._offset == other._offset)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s, %s)" % (self.__class__.__name__,
|
||||
repr(self._name),
|
||||
self._offset.days*86400+self._offset.seconds)
|
||||
|
||||
__reduce__ = object.__reduce__
|
||||
|
||||
|
||||
class tzlocal(datetime.tzinfo):
|
||||
|
||||
_std_offset = datetime.timedelta(seconds=-time.timezone)
|
||||
if time.daylight:
|
||||
_dst_offset = datetime.timedelta(seconds=-time.altzone)
|
||||
else:
|
||||
_dst_offset = _std_offset
|
||||
|
||||
def utcoffset(self, dt):
|
||||
if self._isdst(dt):
|
||||
return self._dst_offset
|
||||
else:
|
||||
return self._std_offset
|
||||
|
||||
def dst(self, dt):
|
||||
if self._isdst(dt):
|
||||
return self._dst_offset-self._std_offset
|
||||
else:
|
||||
return ZERO
|
||||
|
||||
@tzname_in_python2
|
||||
def tzname(self, dt):
|
||||
return time.tzname[self._isdst(dt)]
|
||||
|
||||
def _isdst(self, dt):
|
||||
# We can't use mktime here. It is unstable when deciding if
|
||||
# the hour near to a change is DST or not.
|
||||
#
|
||||
# timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour,
|
||||
# dt.minute, dt.second, dt.weekday(), 0, -1))
|
||||
# return time.localtime(timestamp).tm_isdst
|
||||
#
|
||||
# The code above yields the following result:
|
||||
#
|
||||
# >>> import tz, datetime
|
||||
# >>> t = tz.tzlocal()
|
||||
# >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname()
|
||||
# 'BRDT'
|
||||
# >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname()
|
||||
# 'BRST'
|
||||
# >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname()
|
||||
# 'BRST'
|
||||
# >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname()
|
||||
# 'BRDT'
|
||||
# >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname()
|
||||
# 'BRDT'
|
||||
#
|
||||
# Here is a more stable implementation:
|
||||
#
|
||||
timestamp = ((dt.toordinal() - EPOCHORDINAL) * 86400
|
||||
+ dt.hour * 3600
|
||||
+ dt.minute * 60
|
||||
+ dt.second)
|
||||
return time.localtime(timestamp+time.timezone).tm_isdst
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, tzlocal):
|
||||
return False
|
||||
return (self._std_offset == other._std_offset and
|
||||
self._dst_offset == other._dst_offset)
|
||||
return True
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s()" % self.__class__.__name__
|
||||
|
||||
__reduce__ = object.__reduce__
|
||||
|
||||
|
||||
class _ttinfo(object):
|
||||
__slots__ = ["offset", "delta", "isdst", "abbr", "isstd", "isgmt"]
|
||||
|
||||
def __init__(self):
|
||||
for attr in self.__slots__:
|
||||
setattr(self, attr, None)
|
||||
|
||||
def __repr__(self):
|
||||
l = []
|
||||
for attr in self.__slots__:
|
||||
value = getattr(self, attr)
|
||||
if value is not None:
|
||||
l.append("%s=%s" % (attr, repr(value)))
|
||||
return "%s(%s)" % (self.__class__.__name__, ", ".join(l))
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, _ttinfo):
|
||||
return False
|
||||
return (self.offset == other.offset and
|
||||
self.delta == other.delta and
|
||||
self.isdst == other.isdst and
|
||||
self.abbr == other.abbr and
|
||||
self.isstd == other.isstd and
|
||||
self.isgmt == other.isgmt)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __getstate__(self):
|
||||
state = {}
|
||||
for name in self.__slots__:
|
||||
state[name] = getattr(self, name, None)
|
||||
return state
|
||||
|
||||
def __setstate__(self, state):
|
||||
for name in self.__slots__:
|
||||
if name in state:
|
||||
setattr(self, name, state[name])
|
||||
|
||||
|
||||
class tzfile(datetime.tzinfo):
|
||||
|
||||
# http://www.twinsun.com/tz/tz-link.htm
|
||||
# ftp://ftp.iana.org/tz/tz*.tar.gz
|
||||
|
||||
def __init__(self, fileobj, filename=None):
|
||||
file_opened_here = False
|
||||
if isinstance(fileobj, string_types):
|
||||
self._filename = fileobj
|
||||
fileobj = open(fileobj, 'rb')
|
||||
file_opened_here = True
|
||||
elif filename is not None:
|
||||
self._filename = filename
|
||||
elif hasattr(fileobj, "name"):
|
||||
self._filename = fileobj.name
|
||||
else:
|
||||
self._filename = repr(fileobj)
|
||||
|
||||
# From tzfile(5):
|
||||
#
|
||||
# The time zone information files used by tzset(3)
|
||||
# begin with the magic characters "TZif" to identify
|
||||
# them as time zone information files, followed by
|
||||
# sixteen bytes reserved for future use, followed by
|
||||
# six four-byte values of type long, written in a
|
||||
# ``standard'' byte order (the high-order byte
|
||||
# of the value is written first).
|
||||
try:
|
||||
if fileobj.read(4).decode() != "TZif":
|
||||
raise ValueError("magic not found")
|
||||
|
||||
fileobj.read(16)
|
||||
|
||||
(
|
||||
# The number of UTC/local indicators stored in the file.
|
||||
ttisgmtcnt,
|
||||
|
||||
# The number of standard/wall indicators stored in the file.
|
||||
ttisstdcnt,
|
||||
|
||||
# The number of leap seconds for which data is
|
||||
# stored in the file.
|
||||
leapcnt,
|
||||
|
||||
# The number of "transition times" for which data
|
||||
# is stored in the file.
|
||||
timecnt,
|
||||
|
||||
# The number of "local time types" for which data
|
||||
# is stored in the file (must not be zero).
|
||||
typecnt,
|
||||
|
||||
# The number of characters of "time zone
|
||||
# abbreviation strings" stored in the file.
|
||||
charcnt,
|
||||
|
||||
) = struct.unpack(">6l", fileobj.read(24))
|
||||
|
||||
# The above header is followed by tzh_timecnt four-byte
|
||||
# values of type long, sorted in ascending order.
|
||||
# These values are written in ``standard'' byte order.
|
||||
# Each is used as a transition time (as returned by
|
||||
# time(2)) at which the rules for computing local time
|
||||
# change.
|
||||
|
||||
if timecnt:
|
||||
self._trans_list = struct.unpack(">%dl" % timecnt,
|
||||
fileobj.read(timecnt*4))
|
||||
else:
|
||||
self._trans_list = []
|
||||
|
||||
# Next come tzh_timecnt one-byte values of type unsigned
|
||||
# char; each one tells which of the different types of
|
||||
# ``local time'' types described in the file is associated
|
||||
# with the same-indexed transition time. These values
|
||||
# serve as indices into an array of ttinfo structures that
|
||||
# appears next in the file.
|
||||
|
||||
if timecnt:
|
||||
self._trans_idx = struct.unpack(">%dB" % timecnt,
|
||||
fileobj.read(timecnt))
|
||||
else:
|
||||
self._trans_idx = []
|
||||
|
||||
# Each ttinfo structure is written as a four-byte value
|
||||
# for tt_gmtoff of type long, in a standard byte
|
||||
# order, followed by a one-byte value for tt_isdst
|
||||
# and a one-byte value for tt_abbrind. In each
|
||||
# structure, tt_gmtoff gives the number of
|
||||
# seconds to be added to UTC, tt_isdst tells whether
|
||||
# tm_isdst should be set by localtime(3), and
|
||||
# tt_abbrind serves as an index into the array of
|
||||
# time zone abbreviation characters that follow the
|
||||
# ttinfo structure(s) in the file.
|
||||
|
||||
ttinfo = []
|
||||
|
||||
for i in range(typecnt):
|
||||
ttinfo.append(struct.unpack(">lbb", fileobj.read(6)))
|
||||
|
||||
abbr = fileobj.read(charcnt).decode()
|
||||
|
||||
# Then there are tzh_leapcnt pairs of four-byte
|
||||
# values, written in standard byte order; the
|
||||
# first value of each pair gives the time (as
|
||||
# returned by time(2)) at which a leap second
|
||||
# occurs; the second gives the total number of
|
||||
# leap seconds to be applied after the given time.
|
||||
# The pairs of values are sorted in ascending order
|
||||
# by time.
|
||||
|
||||
# Not used, for now
|
||||
# if leapcnt:
|
||||
# leap = struct.unpack(">%dl" % (leapcnt*2),
|
||||
# fileobj.read(leapcnt*8))
|
||||
|
||||
# Then there are tzh_ttisstdcnt standard/wall
|
||||
# indicators, each stored as a one-byte value;
|
||||
# they tell whether the transition times associated
|
||||
# with local time types were specified as standard
|
||||
# time or wall clock time, and are used when
|
||||
# a time zone file is used in handling POSIX-style
|
||||
# time zone environment variables.
|
||||
|
||||
if ttisstdcnt:
|
||||
isstd = struct.unpack(">%db" % ttisstdcnt,
|
||||
fileobj.read(ttisstdcnt))
|
||||
|
||||
# Finally, there are tzh_ttisgmtcnt UTC/local
|
||||
# indicators, each stored as a one-byte value;
|
||||
# they tell whether the transition times associated
|
||||
# with local time types were specified as UTC or
|
||||
# local time, and are used when a time zone file
|
||||
# is used in handling POSIX-style time zone envi-
|
||||
# ronment variables.
|
||||
|
||||
if ttisgmtcnt:
|
||||
isgmt = struct.unpack(">%db" % ttisgmtcnt,
|
||||
fileobj.read(ttisgmtcnt))
|
||||
|
||||
# ** Everything has been read **
|
||||
finally:
|
||||
if file_opened_here:
|
||||
fileobj.close()
|
||||
|
||||
# Build ttinfo list
|
||||
self._ttinfo_list = []
|
||||
for i in range(typecnt):
|
||||
gmtoff, isdst, abbrind = ttinfo[i]
|
||||
# Round to full-minutes if that's not the case. Python's
|
||||
# datetime doesn't accept sub-minute timezones. Check
|
||||
# http://python.org/sf/1447945 for some information.
|
||||
gmtoff = (gmtoff+30)//60*60
|
||||
tti = _ttinfo()
|
||||
tti.offset = gmtoff
|
||||
tti.delta = datetime.timedelta(seconds=gmtoff)
|
||||
tti.isdst = isdst
|
||||
tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)]
|
||||
tti.isstd = (ttisstdcnt > i and isstd[i] != 0)
|
||||
tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0)
|
||||
self._ttinfo_list.append(tti)
|
||||
|
||||
# Replace ttinfo indexes for ttinfo objects.
|
||||
trans_idx = []
|
||||
for idx in self._trans_idx:
|
||||
trans_idx.append(self._ttinfo_list[idx])
|
||||
self._trans_idx = tuple(trans_idx)
|
||||
|
||||
# Set standard, dst, and before ttinfos. before will be
|
||||
# used when a given time is before any transitions,
|
||||
# and will be set to the first non-dst ttinfo, or to
|
||||
# the first dst, if all of them are dst.
|
||||
self._ttinfo_std = None
|
||||
self._ttinfo_dst = None
|
||||
self._ttinfo_before = None
|
||||
if self._ttinfo_list:
|
||||
if not self._trans_list:
|
||||
self._ttinfo_std = self._ttinfo_first = self._ttinfo_list[0]
|
||||
else:
|
||||
for i in range(timecnt-1, -1, -1):
|
||||
tti = self._trans_idx[i]
|
||||
if not self._ttinfo_std and not tti.isdst:
|
||||
self._ttinfo_std = tti
|
||||
elif not self._ttinfo_dst and tti.isdst:
|
||||
self._ttinfo_dst = tti
|
||||
if self._ttinfo_std and self._ttinfo_dst:
|
||||
break
|
||||
else:
|
||||
if self._ttinfo_dst and not self._ttinfo_std:
|
||||
self._ttinfo_std = self._ttinfo_dst
|
||||
|
||||
for tti in self._ttinfo_list:
|
||||
if not tti.isdst:
|
||||
self._ttinfo_before = tti
|
||||
break
|
||||
else:
|
||||
self._ttinfo_before = self._ttinfo_list[0]
|
||||
|
||||
# Now fix transition times to become relative to wall time.
|
||||
#
|
||||
# I'm not sure about this. In my tests, the tz source file
|
||||
# is setup to wall time, and in the binary file isstd and
|
||||
# isgmt are off, so it should be in wall time. OTOH, it's
|
||||
# always in gmt time. Let me know if you have comments
|
||||
# about this.
|
||||
laststdoffset = 0
|
||||
self._trans_list = list(self._trans_list)
|
||||
for i in range(len(self._trans_list)):
|
||||
tti = self._trans_idx[i]
|
||||
if not tti.isdst:
|
||||
# This is std time.
|
||||
self._trans_list[i] += tti.offset
|
||||
laststdoffset = tti.offset
|
||||
else:
|
||||
# This is dst time. Convert to std.
|
||||
self._trans_list[i] += laststdoffset
|
||||
self._trans_list = tuple(self._trans_list)
|
||||
|
||||
def _find_ttinfo(self, dt, laststd=0):
|
||||
timestamp = ((dt.toordinal() - EPOCHORDINAL) * 86400
|
||||
+ dt.hour * 3600
|
||||
+ dt.minute * 60
|
||||
+ dt.second)
|
||||
idx = 0
|
||||
for trans in self._trans_list:
|
||||
if timestamp < trans:
|
||||
break
|
||||
idx += 1
|
||||
else:
|
||||
return self._ttinfo_std
|
||||
if idx == 0:
|
||||
return self._ttinfo_before
|
||||
if laststd:
|
||||
while idx > 0:
|
||||
tti = self._trans_idx[idx-1]
|
||||
if not tti.isdst:
|
||||
return tti
|
||||
idx -= 1
|
||||
else:
|
||||
return self._ttinfo_std
|
||||
else:
|
||||
return self._trans_idx[idx-1]
|
||||
|
||||
def utcoffset(self, dt):
|
||||
if not self._ttinfo_std:
|
||||
return ZERO
|
||||
return self._find_ttinfo(dt).delta
|
||||
|
||||
def dst(self, dt):
|
||||
if not self._ttinfo_dst:
|
||||
return ZERO
|
||||
tti = self._find_ttinfo(dt)
|
||||
if not tti.isdst:
|
||||
return ZERO
|
||||
|
||||
# The documentation says that utcoffset()-dst() must
|
||||
# be constant for every dt.
|
||||
return tti.delta-self._find_ttinfo(dt, laststd=1).delta
|
||||
|
||||
# An alternative for that would be:
|
||||
#
|
||||
# return self._ttinfo_dst.offset-self._ttinfo_std.offset
|
||||
#
|
||||
# However, this class stores historical changes in the
|
||||
# dst offset, so I belive that this wouldn't be the right
|
||||
# way to implement this.
|
||||
|
||||
@tzname_in_python2
|
||||
def tzname(self, dt):
|
||||
if not self._ttinfo_std:
|
||||
return None
|
||||
return self._find_ttinfo(dt).abbr
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, tzfile):
|
||||
return False
|
||||
return (self._trans_list == other._trans_list and
|
||||
self._trans_idx == other._trans_idx and
|
||||
self._ttinfo_list == other._ttinfo_list)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (self.__class__.__name__, repr(self._filename))
|
||||
|
||||
def __reduce__(self):
|
||||
if not os.path.isfile(self._filename):
|
||||
raise ValueError("Unpickable %s class" % self.__class__.__name__)
|
||||
return (self.__class__, (self._filename,))
|
||||
|
||||
|
||||
class tzrange(datetime.tzinfo):
|
||||
def __init__(self, stdabbr, stdoffset=None,
|
||||
dstabbr=None, dstoffset=None,
|
||||
start=None, end=None):
|
||||
global relativedelta
|
||||
if not relativedelta:
|
||||
from dateutil import relativedelta
|
||||
self._std_abbr = stdabbr
|
||||
self._dst_abbr = dstabbr
|
||||
if stdoffset is not None:
|
||||
self._std_offset = datetime.timedelta(seconds=stdoffset)
|
||||
else:
|
||||
self._std_offset = ZERO
|
||||
if dstoffset is not None:
|
||||
self._dst_offset = datetime.timedelta(seconds=dstoffset)
|
||||
elif dstabbr and stdoffset is not None:
|
||||
self._dst_offset = self._std_offset+datetime.timedelta(hours=+1)
|
||||
else:
|
||||
self._dst_offset = ZERO
|
||||
if dstabbr and start is None:
|
||||
self._start_delta = relativedelta.relativedelta(
|
||||
hours=+2, month=4, day=1, weekday=relativedelta.SU(+1))
|
||||
else:
|
||||
self._start_delta = start
|
||||
if dstabbr and end is None:
|
||||
self._end_delta = relativedelta.relativedelta(
|
||||
hours=+1, month=10, day=31, weekday=relativedelta.SU(-1))
|
||||
else:
|
||||
self._end_delta = end
|
||||
|
||||
def utcoffset(self, dt):
|
||||
if self._isdst(dt):
|
||||
return self._dst_offset
|
||||
else:
|
||||
return self._std_offset
|
||||
|
||||
def dst(self, dt):
|
||||
if self._isdst(dt):
|
||||
return self._dst_offset-self._std_offset
|
||||
else:
|
||||
return ZERO
|
||||
|
||||
@tzname_in_python2
|
||||
def tzname(self, dt):
|
||||
if self._isdst(dt):
|
||||
return self._dst_abbr
|
||||
else:
|
||||
return self._std_abbr
|
||||
|
||||
def _isdst(self, dt):
|
||||
if not self._start_delta:
|
||||
return False
|
||||
year = datetime.datetime(dt.year, 1, 1)
|
||||
start = year+self._start_delta
|
||||
end = year+self._end_delta
|
||||
dt = dt.replace(tzinfo=None)
|
||||
if start < end:
|
||||
return dt >= start and dt < end
|
||||
else:
|
||||
return dt >= start or dt < end
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, tzrange):
|
||||
return False
|
||||
return (self._std_abbr == other._std_abbr and
|
||||
self._dst_abbr == other._dst_abbr and
|
||||
self._std_offset == other._std_offset and
|
||||
self._dst_offset == other._dst_offset and
|
||||
self._start_delta == other._start_delta and
|
||||
self._end_delta == other._end_delta)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(...)" % self.__class__.__name__
|
||||
|
||||
__reduce__ = object.__reduce__
|
||||
|
||||
|
||||
class tzstr(tzrange):
|
||||
|
||||
def __init__(self, s):
|
||||
global parser
|
||||
if not parser:
|
||||
from dateutil import parser
|
||||
self._s = s
|
||||
|
||||
res = parser._parsetz(s)
|
||||
if res is None:
|
||||
raise ValueError("unknown string format")
|
||||
|
||||
# Here we break the compatibility with the TZ variable handling.
|
||||
# GMT-3 actually *means* the timezone -3.
|
||||
if res.stdabbr in ("GMT", "UTC"):
|
||||
res.stdoffset *= -1
|
||||
|
||||
# We must initialize it first, since _delta() needs
|
||||
# _std_offset and _dst_offset set. Use False in start/end
|
||||
# to avoid building it two times.
|
||||
tzrange.__init__(self, res.stdabbr, res.stdoffset,
|
||||
res.dstabbr, res.dstoffset,
|
||||
start=False, end=False)
|
||||
|
||||
if not res.dstabbr:
|
||||
self._start_delta = None
|
||||
self._end_delta = None
|
||||
else:
|
||||
self._start_delta = self._delta(res.start)
|
||||
if self._start_delta:
|
||||
self._end_delta = self._delta(res.end, isend=1)
|
||||
|
||||
def _delta(self, x, isend=0):
|
||||
kwargs = {}
|
||||
if x.month is not None:
|
||||
kwargs["month"] = x.month
|
||||
if x.weekday is not None:
|
||||
kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week)
|
||||
if x.week > 0:
|
||||
kwargs["day"] = 1
|
||||
else:
|
||||
kwargs["day"] = 31
|
||||
elif x.day:
|
||||
kwargs["day"] = x.day
|
||||
elif x.yday is not None:
|
||||
kwargs["yearday"] = x.yday
|
||||
elif x.jyday is not None:
|
||||
kwargs["nlyearday"] = x.jyday
|
||||
if not kwargs:
|
||||
# Default is to start on first sunday of april, and end
|
||||
# on last sunday of october.
|
||||
if not isend:
|
||||
kwargs["month"] = 4
|
||||
kwargs["day"] = 1
|
||||
kwargs["weekday"] = relativedelta.SU(+1)
|
||||
else:
|
||||
kwargs["month"] = 10
|
||||
kwargs["day"] = 31
|
||||
kwargs["weekday"] = relativedelta.SU(-1)
|
||||
if x.time is not None:
|
||||
kwargs["seconds"] = x.time
|
||||
else:
|
||||
# Default is 2AM.
|
||||
kwargs["seconds"] = 7200
|
||||
if isend:
|
||||
# Convert to standard time, to follow the documented way
|
||||
# of working with the extra hour. See the documentation
|
||||
# of the tzinfo class.
|
||||
delta = self._dst_offset-self._std_offset
|
||||
kwargs["seconds"] -= delta.seconds+delta.days*86400
|
||||
return relativedelta.relativedelta(**kwargs)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (self.__class__.__name__, repr(self._s))
|
||||
|
||||
|
||||
class _tzicalvtzcomp(object):
|
||||
def __init__(self, tzoffsetfrom, tzoffsetto, isdst,
|
||||
tzname=None, rrule=None):
|
||||
self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom)
|
||||
self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto)
|
||||
self.tzoffsetdiff = self.tzoffsetto-self.tzoffsetfrom
|
||||
self.isdst = isdst
|
||||
self.tzname = tzname
|
||||
self.rrule = rrule
|
||||
|
||||
|
||||
class _tzicalvtz(datetime.tzinfo):
|
||||
def __init__(self, tzid, comps=[]):
|
||||
self._tzid = tzid
|
||||
self._comps = comps
|
||||
self._cachedate = []
|
||||
self._cachecomp = []
|
||||
|
||||
def _find_comp(self, dt):
|
||||
if len(self._comps) == 1:
|
||||
return self._comps[0]
|
||||
dt = dt.replace(tzinfo=None)
|
||||
try:
|
||||
return self._cachecomp[self._cachedate.index(dt)]
|
||||
except ValueError:
|
||||
pass
|
||||
lastcomp = None
|
||||
lastcompdt = None
|
||||
for comp in self._comps:
|
||||
if not comp.isdst:
|
||||
# Handle the extra hour in DST -> STD
|
||||
compdt = comp.rrule.before(dt-comp.tzoffsetdiff, inc=True)
|
||||
else:
|
||||
compdt = comp.rrule.before(dt, inc=True)
|
||||
if compdt and (not lastcompdt or lastcompdt < compdt):
|
||||
lastcompdt = compdt
|
||||
lastcomp = comp
|
||||
if not lastcomp:
|
||||
# RFC says nothing about what to do when a given
|
||||
# time is before the first onset date. We'll look for the
|
||||
# first standard component, or the first component, if
|
||||
# none is found.
|
||||
for comp in self._comps:
|
||||
if not comp.isdst:
|
||||
lastcomp = comp
|
||||
break
|
||||
else:
|
||||
lastcomp = comp[0]
|
||||
self._cachedate.insert(0, dt)
|
||||
self._cachecomp.insert(0, lastcomp)
|
||||
if len(self._cachedate) > 10:
|
||||
self._cachedate.pop()
|
||||
self._cachecomp.pop()
|
||||
return lastcomp
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self._find_comp(dt).tzoffsetto
|
||||
|
||||
def dst(self, dt):
|
||||
comp = self._find_comp(dt)
|
||||
if comp.isdst:
|
||||
return comp.tzoffsetdiff
|
||||
else:
|
||||
return ZERO
|
||||
|
||||
@tzname_in_python2
|
||||
def tzname(self, dt):
|
||||
return self._find_comp(dt).tzname
|
||||
|
||||
def __repr__(self):
|
||||
return "<tzicalvtz %s>" % repr(self._tzid)
|
||||
|
||||
__reduce__ = object.__reduce__
|
||||
|
||||
|
||||
class tzical(object):
|
||||
def __init__(self, fileobj):
|
||||
global rrule
|
||||
if not rrule:
|
||||
from dateutil import rrule
|
||||
|
||||
if isinstance(fileobj, string_types):
|
||||
self._s = fileobj
|
||||
# ical should be encoded in UTF-8 with CRLF
|
||||
fileobj = open(fileobj, 'r')
|
||||
elif hasattr(fileobj, "name"):
|
||||
self._s = fileobj.name
|
||||
else:
|
||||
self._s = repr(fileobj)
|
||||
|
||||
self._vtz = {}
|
||||
|
||||
self._parse_rfc(fileobj.read())
|
||||
|
||||
def keys(self):
|
||||
return list(self._vtz.keys())
|
||||
|
||||
def get(self, tzid=None):
|
||||
if tzid is None:
|
||||
keys = list(self._vtz.keys())
|
||||
if len(keys) == 0:
|
||||
raise ValueError("no timezones defined")
|
||||
elif len(keys) > 1:
|
||||
raise ValueError("more than one timezone available")
|
||||
tzid = keys[0]
|
||||
return self._vtz.get(tzid)
|
||||
|
||||
def _parse_offset(self, s):
|
||||
s = s.strip()
|
||||
if not s:
|
||||
raise ValueError("empty offset")
|
||||
if s[0] in ('+', '-'):
|
||||
signal = (-1, +1)[s[0] == '+']
|
||||
s = s[1:]
|
||||
else:
|
||||
signal = +1
|
||||
if len(s) == 4:
|
||||
return (int(s[:2])*3600+int(s[2:])*60)*signal
|
||||
elif len(s) == 6:
|
||||
return (int(s[:2])*3600+int(s[2:4])*60+int(s[4:]))*signal
|
||||
else:
|
||||
raise ValueError("invalid offset: "+s)
|
||||
|
||||
def _parse_rfc(self, s):
|
||||
lines = s.splitlines()
|
||||
if not lines:
|
||||
raise ValueError("empty string")
|
||||
|
||||
# Unfold
|
||||
i = 0
|
||||
while i < len(lines):
|
||||
line = lines[i].rstrip()
|
||||
if not line:
|
||||
del lines[i]
|
||||
elif i > 0 and line[0] == " ":
|
||||
lines[i-1] += line[1:]
|
||||
del lines[i]
|
||||
else:
|
||||
i += 1
|
||||
|
||||
tzid = None
|
||||
comps = []
|
||||
invtz = False
|
||||
comptype = None
|
||||
for line in lines:
|
||||
if not line:
|
||||
continue
|
||||
name, value = line.split(':', 1)
|
||||
parms = name.split(';')
|
||||
if not parms:
|
||||
raise ValueError("empty property name")
|
||||
name = parms[0].upper()
|
||||
parms = parms[1:]
|
||||
if invtz:
|
||||
if name == "BEGIN":
|
||||
if value in ("STANDARD", "DAYLIGHT"):
|
||||
# Process component
|
||||
pass
|
||||
else:
|
||||
raise ValueError("unknown component: "+value)
|
||||
comptype = value
|
||||
founddtstart = False
|
||||
tzoffsetfrom = None
|
||||
tzoffsetto = None
|
||||
rrulelines = []
|
||||
tzname = None
|
||||
elif name == "END":
|
||||
if value == "VTIMEZONE":
|
||||
if comptype:
|
||||
raise ValueError("component not closed: "+comptype)
|
||||
if not tzid:
|
||||
raise ValueError("mandatory TZID not found")
|
||||
if not comps:
|
||||
raise ValueError(
|
||||
"at least one component is needed")
|
||||
# Process vtimezone
|
||||
self._vtz[tzid] = _tzicalvtz(tzid, comps)
|
||||
invtz = False
|
||||
elif value == comptype:
|
||||
if not founddtstart:
|
||||
raise ValueError("mandatory DTSTART not found")
|
||||
if tzoffsetfrom is None:
|
||||
raise ValueError(
|
||||
"mandatory TZOFFSETFROM not found")
|
||||
if tzoffsetto is None:
|
||||
raise ValueError(
|
||||
"mandatory TZOFFSETFROM not found")
|
||||
# Process component
|
||||
rr = None
|
||||
if rrulelines:
|
||||
rr = rrule.rrulestr("\n".join(rrulelines),
|
||||
compatible=True,
|
||||
ignoretz=True,
|
||||
cache=True)
|
||||
comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto,
|
||||
(comptype == "DAYLIGHT"),
|
||||
tzname, rr)
|
||||
comps.append(comp)
|
||||
comptype = None
|
||||
else:
|
||||
raise ValueError("invalid component end: "+value)
|
||||
elif comptype:
|
||||
if name == "DTSTART":
|
||||
rrulelines.append(line)
|
||||
founddtstart = True
|
||||
elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"):
|
||||
rrulelines.append(line)
|
||||
elif name == "TZOFFSETFROM":
|
||||
if parms:
|
||||
raise ValueError(
|
||||
"unsupported %s parm: %s " % (name, parms[0]))
|
||||
tzoffsetfrom = self._parse_offset(value)
|
||||
elif name == "TZOFFSETTO":
|
||||
if parms:
|
||||
raise ValueError(
|
||||
"unsupported TZOFFSETTO parm: "+parms[0])
|
||||
tzoffsetto = self._parse_offset(value)
|
||||
elif name == "TZNAME":
|
||||
if parms:
|
||||
raise ValueError(
|
||||
"unsupported TZNAME parm: "+parms[0])
|
||||
tzname = value
|
||||
elif name == "COMMENT":
|
||||
pass
|
||||
else:
|
||||
raise ValueError("unsupported property: "+name)
|
||||
else:
|
||||
if name == "TZID":
|
||||
if parms:
|
||||
raise ValueError(
|
||||
"unsupported TZID parm: "+parms[0])
|
||||
tzid = value
|
||||
elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"):
|
||||
pass
|
||||
else:
|
||||
raise ValueError("unsupported property: "+name)
|
||||
elif name == "BEGIN" and value == "VTIMEZONE":
|
||||
tzid = None
|
||||
comps = []
|
||||
invtz = True
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (self.__class__.__name__, repr(self._s))
|
||||
|
||||
if sys.platform != "win32":
|
||||
TZFILES = ["/etc/localtime", "localtime"]
|
||||
TZPATHS = ["/usr/share/zoneinfo", "/usr/lib/zoneinfo", "/etc/zoneinfo"]
|
||||
else:
|
||||
TZFILES = []
|
||||
TZPATHS = []
|
||||
|
||||
|
||||
def gettz(name=None):
|
||||
tz = None
|
||||
if not name:
|
||||
try:
|
||||
name = os.environ["TZ"]
|
||||
except KeyError:
|
||||
pass
|
||||
if name is None or name == ":":
|
||||
for filepath in TZFILES:
|
||||
if not os.path.isabs(filepath):
|
||||
filename = filepath
|
||||
for path in TZPATHS:
|
||||
filepath = os.path.join(path, filename)
|
||||
if os.path.isfile(filepath):
|
||||
break
|
||||
else:
|
||||
continue
|
||||
if os.path.isfile(filepath):
|
||||
try:
|
||||
tz = tzfile(filepath)
|
||||
break
|
||||
except (IOError, OSError, ValueError):
|
||||
pass
|
||||
else:
|
||||
tz = tzlocal()
|
||||
else:
|
||||
if name.startswith(":"):
|
||||
name = name[:-1]
|
||||
if os.path.isabs(name):
|
||||
if os.path.isfile(name):
|
||||
tz = tzfile(name)
|
||||
else:
|
||||
tz = None
|
||||
else:
|
||||
for path in TZPATHS:
|
||||
filepath = os.path.join(path, name)
|
||||
if not os.path.isfile(filepath):
|
||||
filepath = filepath.replace(' ', '_')
|
||||
if not os.path.isfile(filepath):
|
||||
continue
|
||||
try:
|
||||
tz = tzfile(filepath)
|
||||
break
|
||||
except (IOError, OSError, ValueError):
|
||||
pass
|
||||
else:
|
||||
tz = None
|
||||
if tzwin is not None:
|
||||
try:
|
||||
tz = tzwin(name)
|
||||
except WindowsError:
|
||||
tz = None
|
||||
if not tz:
|
||||
from dateutil.zoneinfo import gettz
|
||||
tz = gettz(name)
|
||||
if not tz:
|
||||
for c in name:
|
||||
# name must have at least one offset to be a tzstr
|
||||
if c in "0123456789":
|
||||
try:
|
||||
tz = tzstr(name)
|
||||
except ValueError:
|
||||
pass
|
||||
break
|
||||
else:
|
||||
if name in ("GMT", "UTC"):
|
||||
tz = tzutc()
|
||||
elif name in time.tzname:
|
||||
tz = tzlocal()
|
||||
return tz
|
||||
|
||||
# vim:ts=4:sw=4:et
|
184
lib/dateutil/tzwin.py
Normal file
184
lib/dateutil/tzwin.py
Normal file
@@ -0,0 +1,184 @@
|
||||
# This code was originally contributed by Jeffrey Harris.
|
||||
import datetime
|
||||
import struct
|
||||
|
||||
from six.moves import winreg
|
||||
|
||||
__all__ = ["tzwin", "tzwinlocal"]
|
||||
|
||||
ONEWEEK = datetime.timedelta(7)
|
||||
|
||||
TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones"
|
||||
TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones"
|
||||
TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation"
|
||||
|
||||
|
||||
def _settzkeyname():
|
||||
handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
|
||||
try:
|
||||
winreg.OpenKey(handle, TZKEYNAMENT).Close()
|
||||
TZKEYNAME = TZKEYNAMENT
|
||||
except WindowsError:
|
||||
TZKEYNAME = TZKEYNAME9X
|
||||
handle.Close()
|
||||
return TZKEYNAME
|
||||
|
||||
TZKEYNAME = _settzkeyname()
|
||||
|
||||
|
||||
class tzwinbase(datetime.tzinfo):
|
||||
"""tzinfo class based on win32's timezones available in the registry."""
|
||||
|
||||
def utcoffset(self, dt):
|
||||
if self._isdst(dt):
|
||||
return datetime.timedelta(minutes=self._dstoffset)
|
||||
else:
|
||||
return datetime.timedelta(minutes=self._stdoffset)
|
||||
|
||||
def dst(self, dt):
|
||||
if self._isdst(dt):
|
||||
minutes = self._dstoffset - self._stdoffset
|
||||
return datetime.timedelta(minutes=minutes)
|
||||
else:
|
||||
return datetime.timedelta(0)
|
||||
|
||||
def tzname(self, dt):
|
||||
if self._isdst(dt):
|
||||
return self._dstname
|
||||
else:
|
||||
return self._stdname
|
||||
|
||||
def list():
|
||||
"""Return a list of all time zones known to the system."""
|
||||
handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
|
||||
tzkey = winreg.OpenKey(handle, TZKEYNAME)
|
||||
result = [winreg.EnumKey(tzkey, i)
|
||||
for i in range(winreg.QueryInfoKey(tzkey)[0])]
|
||||
tzkey.Close()
|
||||
handle.Close()
|
||||
return result
|
||||
list = staticmethod(list)
|
||||
|
||||
def display(self):
|
||||
return self._display
|
||||
|
||||
def _isdst(self, dt):
|
||||
if not self._dstmonth:
|
||||
# dstmonth == 0 signals the zone has no daylight saving time
|
||||
return False
|
||||
dston = picknthweekday(dt.year, self._dstmonth, self._dstdayofweek,
|
||||
self._dsthour, self._dstminute,
|
||||
self._dstweeknumber)
|
||||
dstoff = picknthweekday(dt.year, self._stdmonth, self._stddayofweek,
|
||||
self._stdhour, self._stdminute,
|
||||
self._stdweeknumber)
|
||||
if dston < dstoff:
|
||||
return dston <= dt.replace(tzinfo=None) < dstoff
|
||||
else:
|
||||
return not dstoff <= dt.replace(tzinfo=None) < dston
|
||||
|
||||
|
||||
class tzwin(tzwinbase):
|
||||
|
||||
def __init__(self, name):
|
||||
self._name = name
|
||||
|
||||
# multiple contexts only possible in 2.7 and 3.1, we still support 2.6
|
||||
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
|
||||
with winreg.OpenKey(handle,
|
||||
"%s\%s" % (TZKEYNAME, name)) as tzkey:
|
||||
keydict = valuestodict(tzkey)
|
||||
|
||||
self._stdname = keydict["Std"].encode("iso-8859-1")
|
||||
self._dstname = keydict["Dlt"].encode("iso-8859-1")
|
||||
|
||||
self._display = keydict["Display"]
|
||||
|
||||
# See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm
|
||||
tup = struct.unpack("=3l16h", keydict["TZI"])
|
||||
self._stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1
|
||||
self._dstoffset = self._stdoffset-tup[2] # + DaylightBias * -1
|
||||
|
||||
# for the meaning see the win32 TIME_ZONE_INFORMATION structure docs
|
||||
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx
|
||||
(self._stdmonth,
|
||||
self._stddayofweek, # Sunday = 0
|
||||
self._stdweeknumber, # Last = 5
|
||||
self._stdhour,
|
||||
self._stdminute) = tup[4:9]
|
||||
|
||||
(self._dstmonth,
|
||||
self._dstdayofweek, # Sunday = 0
|
||||
self._dstweeknumber, # Last = 5
|
||||
self._dsthour,
|
||||
self._dstminute) = tup[12:17]
|
||||
|
||||
def __repr__(self):
|
||||
return "tzwin(%s)" % repr(self._name)
|
||||
|
||||
def __reduce__(self):
|
||||
return (self.__class__, (self._name,))
|
||||
|
||||
|
||||
class tzwinlocal(tzwinbase):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
|
||||
|
||||
with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey:
|
||||
keydict = valuestodict(tzlocalkey)
|
||||
|
||||
self._stdname = keydict["StandardName"].encode("iso-8859-1")
|
||||
self._dstname = keydict["DaylightName"].encode("iso-8859-1")
|
||||
|
||||
try:
|
||||
with winreg.OpenKey(
|
||||
handle, "%s\%s" % (TZKEYNAME, self._stdname)) as tzkey:
|
||||
_keydict = valuestodict(tzkey)
|
||||
self._display = _keydict["Display"]
|
||||
except OSError:
|
||||
self._display = None
|
||||
|
||||
self._stdoffset = -keydict["Bias"]-keydict["StandardBias"]
|
||||
self._dstoffset = self._stdoffset-keydict["DaylightBias"]
|
||||
|
||||
# See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm
|
||||
tup = struct.unpack("=8h", keydict["StandardStart"])
|
||||
|
||||
(self._stdmonth,
|
||||
self._stddayofweek, # Sunday = 0
|
||||
self._stdweeknumber, # Last = 5
|
||||
self._stdhour,
|
||||
self._stdminute) = tup[1:6]
|
||||
|
||||
tup = struct.unpack("=8h", keydict["DaylightStart"])
|
||||
|
||||
(self._dstmonth,
|
||||
self._dstdayofweek, # Sunday = 0
|
||||
self._dstweeknumber, # Last = 5
|
||||
self._dsthour,
|
||||
self._dstminute) = tup[1:6]
|
||||
|
||||
def __reduce__(self):
|
||||
return (self.__class__, ())
|
||||
|
||||
|
||||
def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
|
||||
"""dayofweek == 0 means Sunday, whichweek 5 means last instance"""
|
||||
first = datetime.datetime(year, month, 1, hour, minute)
|
||||
weekdayone = first.replace(day=((dayofweek-first.isoweekday()) % 7+1))
|
||||
for n in range(whichweek):
|
||||
dt = weekdayone+(whichweek-n)*ONEWEEK
|
||||
if dt.month == month:
|
||||
return dt
|
||||
|
||||
|
||||
def valuestodict(key):
|
||||
"""Convert a registry key's values to a dictionary."""
|
||||
dict = {}
|
||||
size = winreg.QueryInfoKey(key)[1]
|
||||
for i in range(size):
|
||||
data = winreg.EnumValue(key, i)
|
||||
dict[data[0]] = data[1]
|
||||
return dict
|
@@ -176,7 +176,7 @@ def initialize(config_file):
|
||||
plextv.refresh_users()
|
||||
|
||||
# Refresh the libraries list on startup
|
||||
if CONFIG.PMS_TOKEN and CONFIG.REFRESH_LIBRARIES_ON_STARTUP:
|
||||
if CONFIG.PMS_IP and CONFIG.PMS_TOKEN and CONFIG.REFRESH_LIBRARIES_ON_STARTUP:
|
||||
pmsconnect.refresh_libraries()
|
||||
|
||||
# Store the original umask
|
||||
@@ -282,7 +282,7 @@ def initialize_scheduler():
|
||||
else:
|
||||
seconds = 0
|
||||
|
||||
if CONFIG.PMS_IP and CONFIG.PMS_TOKEN and CONFIG.UPDATE_SECTION_IDS != -1:
|
||||
if CONFIG.PMS_IP and CONFIG.PMS_TOKEN:
|
||||
schedule_job(plextv.get_real_pms_url, 'Refresh Plex Server URLs',
|
||||
hours=12, minutes=0, seconds=0)
|
||||
schedule_job(pmsconnect.get_server_friendly_name, 'Refresh Plex Server Name',
|
||||
@@ -713,8 +713,8 @@ def dbcheck():
|
||||
|
||||
# Upgrade library_sections table from earlier versions (remove UNIQUE constraint on section_id)
|
||||
try:
|
||||
result = c_db.execute('PRAGMA index_xinfo("sqlite_autoindex_library_sections_1")')
|
||||
if result and 'server_id' not in [row[2] for row in result]:
|
||||
result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="library_sections"').fetchone()
|
||||
if 'section_id INTEGER UNIQUE' in result[0]:
|
||||
logger.debug(u"Altering database. Removing unique constraint on section_id from library_sections table.")
|
||||
c_db.execute(
|
||||
'CREATE TABLE library_sections_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, '
|
||||
@@ -739,7 +739,7 @@ def dbcheck():
|
||||
'ALTER TABLE library_sections_temp RENAME TO library_sections'
|
||||
)
|
||||
except sqlite3.OperationalError:
|
||||
logger.debug(u"Unable to remove section_id unique constraint from library_sections.")
|
||||
logger.warn(u"Unable to remove section_id unique constraint from library_sections.")
|
||||
try:
|
||||
c_db.execute(
|
||||
'DROP TABLE library_sections_temp'
|
||||
@@ -747,10 +747,21 @@ def dbcheck():
|
||||
except:
|
||||
pass
|
||||
|
||||
# Upgrade library_sections table from earlier versions (remove duplicated libraries)
|
||||
try:
|
||||
result = c_db.execute('SELECT * FROM library_sections WHERE server_id = ""')
|
||||
if result.rowcount > 0:
|
||||
logger.debug(u"Altering database. Removing duplicate libraries from library_sections table.")
|
||||
c_db.execute(
|
||||
'DELETE FROM library_sections WHERE server_id = ""'
|
||||
)
|
||||
except sqlite3.OperationalError:
|
||||
logger.warn(u"Unable to remove duplicate libraries from library_sections table.")
|
||||
|
||||
# Upgrade users table from earlier versions (remove UNIQUE constraint on username)
|
||||
try:
|
||||
result = c_db.execute('PRAGMA index_xinfo("sqlite_autoindex_users_2")')
|
||||
if result and 'username' in [row[2] for row in result]:
|
||||
result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="users"').fetchone()
|
||||
if 'username TEXT NOT NULL UNIQUE' in result[0]:
|
||||
logger.debug(u"Altering database. Removing unique constraint on username from users table.")
|
||||
c_db.execute(
|
||||
'CREATE TABLE users_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, '
|
||||
@@ -773,7 +784,7 @@ def dbcheck():
|
||||
'ALTER TABLE users_temp RENAME TO users'
|
||||
)
|
||||
except sqlite3.OperationalError:
|
||||
logger.debug(u"Unable to remove username unique constraint from users.")
|
||||
logger.warn(u"Unable to remove username unique constraint from users.")
|
||||
try:
|
||||
c_db.execute(
|
||||
'DROP TABLE users_temp'
|
||||
|
@@ -156,8 +156,8 @@ class ActivityHandler(object):
|
||||
(self.get_session_key(), buffer_last_triggered))
|
||||
time_since_last_trigger = int(time.time()) - int(buffer_last_triggered)
|
||||
|
||||
if current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \
|
||||
time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT:
|
||||
if plexpy.CONFIG.BUFFER_THRESHOLD > 0 and (current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and \
|
||||
time_since_last_trigger == 0 or time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT):
|
||||
ap.set_session_buffer_trigger_time(session_key=self.get_session_key())
|
||||
threading.Thread(target=notification_handler.notify,
|
||||
kwargs=dict(stream_data=db_stream, notify_action='buffer')).start()
|
||||
|
@@ -360,9 +360,10 @@ _CONFIG_DEFINITIONS = {
|
||||
'TV_NOTIFY_ON_STOP': (int, 'Monitoring', 0),
|
||||
'TV_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),
|
||||
'TWITTER_ENABLED': (int, 'Twitter', 0),
|
||||
'TWITTER_PASSWORD': (str, 'Twitter', ''),
|
||||
'TWITTER_PREFIX': (str, 'Twitter', 'PlexPy'),
|
||||
'TWITTER_USERNAME': (str, 'Twitter', ''),
|
||||
'TWITTER_ACCESS_TOKEN': (str, 'Twitter', ''),
|
||||
'TWITTER_ACCESS_TOKEN_SECRET': (str, 'Twitter', ''),
|
||||
'TWITTER_CONSUMER_KEY': (str, 'Twitter', ''),
|
||||
'TWITTER_CONSUMER_SECRET': (str, 'Twitter', ''),
|
||||
'TWITTER_ON_PLAY': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_STOP': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_PAUSE': (int, 'Twitter', 0),
|
||||
@@ -511,6 +512,7 @@ class Config(object):
|
||||
self.MOVIE_LOGGING_ENABLE = 0
|
||||
self.TV_LOGGING_ENABLE = 0
|
||||
self.CONFIG_VERSION = '1'
|
||||
|
||||
if self.CONFIG_VERSION == '1':
|
||||
# Change home_stats_cards to list
|
||||
if self.HOME_STATS_CARDS:
|
||||
@@ -524,4 +526,20 @@ class Config(object):
|
||||
if 'library_statistics' in home_library_cards:
|
||||
home_library_cards.remove('library_statistics')
|
||||
self.HOME_LIBRARY_CARDS = home_library_cards
|
||||
self.CONFIG_VERSION = '2'
|
||||
self.CONFIG_VERSION = '2'
|
||||
|
||||
if self.CONFIG_VERSION == '2':
|
||||
self.NOTIFY_ON_START_SUBJECT_TEXT = self.NOTIFY_ON_START_SUBJECT_TEXT.replace('{progress}','{progress_duration}')
|
||||
self.NOTIFY_ON_START_BODY_TEXT = self.NOTIFY_ON_START_BODY_TEXT.replace('{progress}','{progress_duration}')
|
||||
self.NOTIFY_ON_STOP_SUBJECT_TEXT = self.NOTIFY_ON_STOP_SUBJECT_TEXT.replace('{progress}','{progress_duration}')
|
||||
self.NOTIFY_ON_STOP_BODY_TEXT = self.NOTIFY_ON_STOP_BODY_TEXT.replace('{progress}','{progress_duration}')
|
||||
self.NOTIFY_ON_PAUSE_SUBJECT_TEXT = self.NOTIFY_ON_PAUSE_SUBJECT_TEXT.replace('{progress}','{progress_duration}')
|
||||
self.NOTIFY_ON_PAUSE_BODY_TEXT = self.NOTIFY_ON_PAUSE_BODY_TEXT.replace('{progress}','{progress_duration}')
|
||||
self.NOTIFY_ON_RESUME_SUBJECT_TEXT = self.NOTIFY_ON_RESUME_SUBJECT_TEXT.replace('{progress}','{progress_duration}')
|
||||
self.NOTIFY_ON_RESUME_BODY_TEXT = self.NOTIFY_ON_RESUME_BODY_TEXT.replace('{progress}','{progress_duration}')
|
||||
self.NOTIFY_ON_BUFFER_SUBJECT_TEXT = self.NOTIFY_ON_BUFFER_SUBJECT_TEXT.replace('{progress}','{progress_duration}')
|
||||
self.NOTIFY_ON_BUFFER_BODY_TEXT = self.NOTIFY_ON_BUFFER_BODY_TEXT.replace('{progress}','{progress_duration}')
|
||||
self.NOTIFY_ON_WATCHED_SUBJECT_TEXT = self.NOTIFY_ON_WATCHED_SUBJECT_TEXT.replace('{progress}','{progress_duration}')
|
||||
self.NOTIFY_ON_WATCHED_BODY_TEXT = self.NOTIFY_ON_WATCHED_BODY_TEXT.replace('{progress}','{progress_duration}')
|
||||
self.NOTIFY_SCRIPTS_ARGS_TEXT = self.NOTIFY_SCRIPTS_ARGS_TEXT.replace('{progress}','{progress_duration}')
|
||||
self.CONFIG_VERSION = '3'
|
||||
|
@@ -14,9 +14,9 @@
|
||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from plexpy import logger, database, helpers, common
|
||||
import plexpy
|
||||
|
||||
import datetime
|
||||
import locale
|
||||
|
||||
|
||||
class Graphs(object):
|
||||
@@ -321,7 +321,7 @@ class Graphs(object):
|
||||
dt = datetime.datetime(*month_item[:6])
|
||||
date_string = dt.strftime('%Y-%m')
|
||||
|
||||
categories.append(dt.strftime('%b %Y').decode(locale.getlocale()[1]))
|
||||
categories.append(dt.strftime('%b %Y').decode(plexpy.SYS_ENCODING, 'replace'))
|
||||
series_1_value = 0
|
||||
series_2_value = 0
|
||||
series_3_value = 0
|
||||
|
@@ -135,6 +135,15 @@ def convert_seconds(s):
|
||||
|
||||
return minutes
|
||||
|
||||
def convert_seconds_to_minutes(s):
|
||||
|
||||
if str(s).isdigit():
|
||||
minutes = round(float(s) / 60, 0)
|
||||
|
||||
return math.trunc(minutes)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def today():
|
||||
today = datetime.date.today()
|
||||
@@ -446,85 +455,4 @@ def sanitize(string):
|
||||
if string:
|
||||
return unicode(string).replace('<','<').replace('>','>')
|
||||
else:
|
||||
return ''
|
||||
|
||||
def parse_js_date(date):
|
||||
"""
|
||||
Taken from moment library.
|
||||
|
||||
Translate the easy-to-use JavaScript format strings to Python's cumbersome
|
||||
strftime format. Also, this is some ugly code -- and it's completely
|
||||
order-dependent.
|
||||
"""
|
||||
# AM/PM
|
||||
if 'A' in date:
|
||||
date = date.replace('A', '%p')
|
||||
elif 'a' in date:
|
||||
date = date.replace('a', '%P')
|
||||
# 24 hours
|
||||
if 'HH' in date:
|
||||
date = date.replace('HH', '%H')
|
||||
elif 'H' in date:
|
||||
date = date.replace('H', '%k')
|
||||
# 12 hours
|
||||
elif 'hh' in date:
|
||||
date = date.replace('hh', '%I')
|
||||
elif 'h' in date:
|
||||
date = date.replace('h', '%l')
|
||||
# Minutes
|
||||
if 'mm' in date:
|
||||
date = date.replace('mm', '%min')
|
||||
elif 'm' in date:
|
||||
date = date.replace('m', '%min')
|
||||
# Seconds
|
||||
if 'ss' in date:
|
||||
date = date.replace('ss', '%S')
|
||||
elif 's' in date:
|
||||
date = date.replace('s', '%S')
|
||||
# Milliseconds
|
||||
if 'SSS' in date:
|
||||
date = date.replace('SSS', '%3')
|
||||
# Years
|
||||
if 'YYYY' in date:
|
||||
date = date.replace('YYYY', '%Y')
|
||||
elif 'YY' in date:
|
||||
date = date.replace('YY', '%y')
|
||||
# Months
|
||||
if 'MMMM' in date:
|
||||
date = date.replace('MMMM', '%B')
|
||||
elif 'MMM' in date:
|
||||
date = date.replace('MMM', '%b')
|
||||
elif 'MM' in date:
|
||||
date = date.replace('MM', '%m')
|
||||
elif 'M' in date:
|
||||
date = date.replace('M', '%m')
|
||||
# Days of the week
|
||||
if 'dddd' in date:
|
||||
date = date.replace('dddd', '%A')
|
||||
elif 'ddd' in date:
|
||||
date = date.replace('ddd', '%a')
|
||||
elif 'dd' in date:
|
||||
date = date.replace('dd', '%w')
|
||||
elif 'd' in date:
|
||||
date = date.replace('d', '%u')
|
||||
# Days of the year
|
||||
if 'DDDD' in date:
|
||||
date = date.replace('DDDD', '%j')
|
||||
elif 'DDD' in date:
|
||||
date = date.replace('DDD', '%j')
|
||||
# Days of the month
|
||||
elif 'DD' in date:
|
||||
date = date.replace('DD', '%d')
|
||||
# 'Do' not valid python time format
|
||||
elif 'Do' in date:
|
||||
date = date.replace('Do', '')
|
||||
elif 'D' in date:
|
||||
date = date.replace('D', '%d')
|
||||
# Timezone
|
||||
if 'zz' in date:
|
||||
date = date.replace('zz', '%Z')
|
||||
# A necessary evil right now...
|
||||
if '%min' in date:
|
||||
date = date.replace('%min', '%M')
|
||||
|
||||
return date
|
||||
return ''
|
@@ -44,7 +44,8 @@ class HTTPHandler(object):
|
||||
headers=None,
|
||||
output_format='raw',
|
||||
return_type=False,
|
||||
no_token=False):
|
||||
no_token=False,
|
||||
timeout=20):
|
||||
|
||||
valid_request_types = ['GET', 'POST', 'PUT', 'DELETE']
|
||||
|
||||
@@ -56,12 +57,12 @@ class HTTPHandler(object):
|
||||
if proto.upper() == 'HTTPS':
|
||||
if not self.ssl_verify and hasattr(ssl, '_create_unverified_context'):
|
||||
context = ssl._create_unverified_context()
|
||||
handler = HTTPSConnection(host=self.host, port=self.port, timeout=20, context=context)
|
||||
handler = HTTPSConnection(host=self.host, port=self.port, timeout=timeout, context=context)
|
||||
logger.warn(u"PlexPy HTTP Handler :: Unverified HTTPS request made. This connection is not secure.")
|
||||
else:
|
||||
handler = HTTPSConnection(host=self.host, port=self.port, timeout=20)
|
||||
handler = HTTPSConnection(host=self.host, port=self.port, timeout=timeout)
|
||||
else:
|
||||
handler = HTTPConnection(host=self.host, port=self.port, timeout=20)
|
||||
handler = HTTPConnection(host=self.host, port=self.port, timeout=timeout)
|
||||
|
||||
token_string = ''
|
||||
if not no_token:
|
||||
|
@@ -18,62 +18,86 @@ import plexpy
|
||||
|
||||
def update_section_ids():
|
||||
from plexpy import pmsconnect, activity_pinger
|
||||
import threading
|
||||
#import threading
|
||||
|
||||
plexpy.CONFIG.UPDATE_SECTION_IDS = -1
|
||||
|
||||
logger.info(u"PlexPy Libraries :: Updating section_id's in database.")
|
||||
|
||||
logger.debug(u"PlexPy Libraries :: Disabling monitoring while update in progress.")
|
||||
plexpy.schedule_job(activity_pinger.check_active_sessions, 'Check for active sessions',
|
||||
hours=0, minutes=0, seconds=0)
|
||||
plexpy.schedule_job(activity_pinger.check_recently_added, 'Check for recently added items',
|
||||
hours=0, minutes=0, seconds=0)
|
||||
plexpy.schedule_job(activity_pinger.check_server_response, 'Check for server response',
|
||||
hours=0, minutes=0, seconds=0)
|
||||
#logger.debug(u"PlexPy Libraries :: Disabling monitoring while update in progress.")
|
||||
#plexpy.schedule_job(activity_pinger.check_active_sessions, 'Check for active sessions',
|
||||
# hours=0, minutes=0, seconds=0)
|
||||
#plexpy.schedule_job(activity_pinger.check_recently_added, 'Check for recently added items',
|
||||
# hours=0, minutes=0, seconds=0)
|
||||
#plexpy.schedule_job(activity_pinger.check_server_response, 'Check for server response',
|
||||
# hours=0, minutes=0, seconds=0)
|
||||
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
try:
|
||||
query = 'SELECT id, rating_key FROM session_history_metadata WHERE section_id IS NULL'
|
||||
result = monitor_db.select(query=query)
|
||||
query = 'SELECT id, rating_key, grandparent_rating_key, media_type ' \
|
||||
'FROM session_history_metadata WHERE section_id IS NULL'
|
||||
history_results = monitor_db.select(query=query)
|
||||
query = 'SELECT section_id, section_type FROM library_sections'
|
||||
library_results = monitor_db.select(query=query)
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy Libraries :: Unable to execute database query for update_section_ids: %s." % e)
|
||||
|
||||
logger.warn(u"PlexPy Libraries :: Unable to update section_id's in database.")
|
||||
plexpy.CONFIG.__setattr__('UPDATE_SECTION_IDS', 1)
|
||||
plexpy.CONFIG.UPDATE_SECTION_IDS = 1
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
logger.debug(u"PlexPy Libraries :: Re-enabling monitoring.")
|
||||
plexpy.initialize_scheduler()
|
||||
#logger.debug(u"PlexPy Libraries :: Re-enabling monitoring.")
|
||||
#plexpy.initialize_scheduler()
|
||||
return None
|
||||
|
||||
if not history_results:
|
||||
plexpy.CONFIG.UPDATE_SECTION_IDS = 0
|
||||
plexpy.CONFIG.write()
|
||||
return None
|
||||
|
||||
logger.info(u"PlexPy Libraries :: Updating section_id's in database.")
|
||||
|
||||
# Add thread filter to the logger
|
||||
logger.debug(u"PlexPy Libraries :: Disabling logging in the current thread while update in progress.")
|
||||
thread_filter = logger.NoThreadFilter(threading.current_thread().name)
|
||||
for handler in logger.logger.handlers:
|
||||
handler.addFilter(thread_filter)
|
||||
#logger.debug(u"PlexPy Libraries :: Disabling logging in the current thread while update in progress.")
|
||||
#thread_filter = logger.NoThreadFilter(threading.current_thread().name)
|
||||
#for handler in logger.logger.handlers:
|
||||
# handler.addFilter(thread_filter)
|
||||
|
||||
# Get rating_key: section_id mapping pairs
|
||||
key_mappings = {}
|
||||
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
for library in library_results:
|
||||
section_id = library['section_id']
|
||||
section_type = library['section_type']
|
||||
|
||||
if section_type != 'photo':
|
||||
library_children = pms_connect.get_library_children_details(section_id=section_id,
|
||||
section_type=section_type)
|
||||
if library_children:
|
||||
children_list = library_children['childern_list']
|
||||
key_mappings.update({child['rating_key']:child['section_id'] for child in children_list})
|
||||
else:
|
||||
logger.warn(u"PlexPy Libraries :: Unable to get a list of library items for section_id %s." % section_id)
|
||||
|
||||
error_keys = set()
|
||||
for item in result:
|
||||
id = item['id']
|
||||
rating_key = item['rating_key']
|
||||
metadata = pms_connect.get_metadata_details(rating_key=rating_key)
|
||||
|
||||
if metadata:
|
||||
metadata = metadata['metadata']
|
||||
section_keys = {'id': id}
|
||||
section_values = {'section_id': metadata['section_id']}
|
||||
monitor_db.upsert('session_history_metadata', key_dict=section_keys, value_dict=section_values)
|
||||
for item in history_results:
|
||||
rating_key = item['grandparent_rating_key'] if item['media_type'] != 'movie' else item['rating_key']
|
||||
section_id = key_mappings.get(str(rating_key), None)
|
||||
|
||||
if section_id:
|
||||
try:
|
||||
section_keys = {'id': item['id']}
|
||||
section_values = {'section_id': section_id}
|
||||
monitor_db.upsert('session_history_metadata', key_dict=section_keys, value_dict=section_values)
|
||||
except:
|
||||
error_keys.add(item['rating_key'])
|
||||
else:
|
||||
error_keys.add(rating_key)
|
||||
error_keys.add(item['rating_key'])
|
||||
|
||||
# Remove thread filter from the logger
|
||||
for handler in logger.logger.handlers:
|
||||
handler.removeFilter(thread_filter)
|
||||
logger.debug(u"PlexPy Libraries :: Re-enabling logging in the current thread.")
|
||||
#for handler in logger.logger.handlers:
|
||||
# handler.removeFilter(thread_filter)
|
||||
#logger.debug(u"PlexPy Libraries :: Re-enabling logging in the current thread.")
|
||||
|
||||
if error_keys:
|
||||
logger.info(u"PlexPy Libraries :: Updated all section_id's in database except for rating_keys: %s." %
|
||||
@@ -81,11 +105,11 @@ def update_section_ids():
|
||||
else:
|
||||
logger.info(u"PlexPy Libraries :: Updated all section_id's in database.")
|
||||
|
||||
plexpy.CONFIG.__setattr__('UPDATE_SECTION_IDS', 0)
|
||||
plexpy.CONFIG.UPDATE_SECTION_IDS = 0
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
logger.debug(u"PlexPy Libraries :: Re-enabling monitoring.")
|
||||
plexpy.initialize_scheduler()
|
||||
#logger.debug(u"PlexPy Libraries :: Re-enabling monitoring.")
|
||||
#plexpy.initialize_scheduler()
|
||||
|
||||
return True
|
||||
|
||||
@@ -111,12 +135,16 @@ class Libraries(object):
|
||||
'COUNT(session_history.id) AS plays',
|
||||
'MAX(session_history.started) AS last_accessed',
|
||||
'MAX(session_history.id) AS id',
|
||||
'session_history_metadata.full_title AS last_watched',
|
||||
'session_history_metadata.full_title AS last_played',
|
||||
'session_history.rating_key',
|
||||
'session_history_metadata.media_type',
|
||||
'session_history_metadata.thumb',
|
||||
'session_history_metadata.parent_thumb',
|
||||
'session_history_metadata.grandparent_thumb',
|
||||
'session_history_metadata.media_type',
|
||||
'session_history.rating_key',
|
||||
'session_history_metadata.parent_title',
|
||||
'session_history_metadata.year',
|
||||
'session_history_metadata.media_index',
|
||||
'session_history_metadata.parent_media_index',
|
||||
'session_history_media_info.video_decision',
|
||||
'library_sections.do_notify',
|
||||
'library_sections.do_notify_created',
|
||||
@@ -174,11 +202,14 @@ class Libraries(object):
|
||||
'plays': item['plays'],
|
||||
'last_accessed': item['last_accessed'],
|
||||
'id': item['id'],
|
||||
'last_watched': item['last_watched'],
|
||||
'thumb': thumb,
|
||||
'media_type': item['media_type'],
|
||||
'last_played': item['last_played'],
|
||||
'rating_key': item['rating_key'],
|
||||
'video_decision': item['video_decision'],
|
||||
'media_type': item['media_type'],
|
||||
'thumb': thumb,
|
||||
'parent_title': item['parent_title'],
|
||||
'year': item['year'],
|
||||
'media_index': item['media_index'],
|
||||
'parent_media_index': item['parent_media_index'],
|
||||
'do_notify': helpers.checked(item['do_notify']),
|
||||
'do_notify_created': helpers.checked(item['do_notify_created']),
|
||||
'keep_history': helpers.checked(item['keep_history'])
|
||||
@@ -236,7 +267,7 @@ class Libraries(object):
|
||||
group_by = 'rating_key'
|
||||
|
||||
try:
|
||||
query = 'SELECT MAX(session_history.started) AS last_watched, COUNT(DISTINCT session_history.%s) AS play_count, ' \
|
||||
query = 'SELECT MAX(session_history.started) AS last_played, COUNT(DISTINCT session_history.%s) AS play_count, ' \
|
||||
'session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key ' \
|
||||
'FROM session_history ' \
|
||||
'JOIN session_history_metadata ON session_history.id = session_history_metadata.id ' \
|
||||
@@ -249,7 +280,7 @@ class Libraries(object):
|
||||
|
||||
watched_list = {}
|
||||
for item in result:
|
||||
watched_list[str(item[group_by])] = {'last_watched': item['last_watched'],
|
||||
watched_list[str(item[group_by])] = {'last_played': item['last_played'],
|
||||
'play_count': item['play_count']}
|
||||
|
||||
rows = []
|
||||
@@ -344,14 +375,14 @@ class Libraries(object):
|
||||
except IOError as e:
|
||||
logger.debug(u"PlexPy Libraries :: Unable to create cache file for section_id %s." % section_id)
|
||||
|
||||
# Update the last_watched and play_count
|
||||
# Update the last_played and play_count
|
||||
for item in rows:
|
||||
watched_item = watched_list.get(item['rating_key'], None)
|
||||
if watched_item:
|
||||
item['last_watched'] = watched_item['last_watched']
|
||||
item['last_played'] = watched_item['last_played']
|
||||
item['play_count'] = watched_item['play_count']
|
||||
else:
|
||||
item['last_watched'] = None
|
||||
item['last_played'] = None
|
||||
item['play_count'] = None
|
||||
|
||||
results = []
|
||||
@@ -546,11 +577,10 @@ class Libraries(object):
|
||||
return library_details
|
||||
else:
|
||||
logger.warn(u"PlexPy Libraries :: Unable to retrieve library from local database. Requesting library list refresh.")
|
||||
# Let's first refresh the user list to make sure the user isn't newly added and not in the db yet
|
||||
# Let's first refresh the libraries list to make sure the library isn't newly added and not in the db yet
|
||||
pmsconnect.refresh_libraries()
|
||||
try:
|
||||
if section_id:
|
||||
# Refresh libraries
|
||||
pmsconnect.refresh_libraries()
|
||||
query = 'SELECT section_id, section_name, section_type, count, parent_count, child_count, ' \
|
||||
'thumb AS library_thumb, custom_thumb_url AS custom_thumb, art, ' \
|
||||
'do_notify, do_notify_created, keep_history ' \
|
||||
@@ -849,3 +879,21 @@ class Libraries(object):
|
||||
return 'Unable to delete media info table cache, section_id not valid.'
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy Libraries :: Unable to delete media info table cache: %s." % e)
|
||||
|
||||
def delete_duplicate_libraries(self):
|
||||
from plexpy import plextv
|
||||
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
# Refresh the PMS_URL to make sure the server_id is updated
|
||||
plextv.get_real_pms_url()
|
||||
|
||||
server_id = plexpy.CONFIG.PMS_IDENTIFIER
|
||||
|
||||
try:
|
||||
logger.debug(u"PlexPy Libraries :: Deleting libraries where server_id does not match %s." % server_id)
|
||||
monitor_db.action('DELETE FROM library_sections WHERE server_id != ?', [server_id])
|
||||
|
||||
return 'Deleted duplicate libraries from the database.'
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy Libraries :: Unable to delete duplicate libraries: %s." % e)
|
@@ -16,6 +16,7 @@
|
||||
|
||||
import re
|
||||
import time
|
||||
import arrow
|
||||
|
||||
from plexpy import logger, config, notifiers, database, helpers, plextv, pmsconnect
|
||||
import plexpy
|
||||
@@ -210,23 +211,17 @@ def notify(stream_data=None, notify_action=None):
|
||||
|
||||
def notify_timeline(timeline_data=None, notify_action=None):
|
||||
if timeline_data and notify_action:
|
||||
if (timeline_data['media_type'] == 'movie' and plexpy.CONFIG.MOVIE_NOTIFY_ENABLE) \
|
||||
or ((timeline_data['media_type'] == 'show' or timeline_data['media_type'] == 'episode') \
|
||||
and plexpy.CONFIG.TV_NOTIFY_ENABLE) \
|
||||
or ((timeline_data['media_type'] == 'artist' or timeline_data['media_type'] == 'track') \
|
||||
and plexpy.CONFIG.MUSIC_NOTIFY_ENABLE):
|
||||
|
||||
for agent in notifiers.available_notification_agents():
|
||||
if agent['on_created'] and notify_action == 'created':
|
||||
# Build and send notification
|
||||
notify_strings = build_notify_text(timeline=timeline_data, state=notify_action)
|
||||
notifiers.send_notification(config_id=agent['id'],
|
||||
subject=notify_strings[0],
|
||||
body=notify_strings[1],
|
||||
notify_action=notify_action,
|
||||
script_args=notify_strings[2])
|
||||
# Set the notification state in the db
|
||||
set_notify_state(session=timeline_data, state=notify_action, agent_info=agent)
|
||||
for agent in notifiers.available_notification_agents():
|
||||
if agent['on_created'] and notify_action == 'created':
|
||||
# Build and send notification
|
||||
notify_strings = build_notify_text(timeline=timeline_data, state=notify_action)
|
||||
notifiers.send_notification(config_id=agent['id'],
|
||||
subject=notify_strings[0],
|
||||
body=notify_strings[1],
|
||||
notify_action=notify_action,
|
||||
script_args=notify_strings[2])
|
||||
# Set the notification state in the db
|
||||
set_notify_state(session=timeline_data, state=notify_action, agent_info=agent)
|
||||
|
||||
elif not timeline_data and notify_action:
|
||||
for agent in notifiers.available_notification_agents():
|
||||
@@ -345,6 +340,11 @@ def set_notify_state(session, state, agent_info):
|
||||
|
||||
|
||||
def build_notify_text(session=None, timeline=None, state=None):
|
||||
# Get time formats
|
||||
date_format = plexpy.CONFIG.DATE_FORMAT.replace('Do','').replace('zz','')
|
||||
time_format = plexpy.CONFIG.TIME_FORMAT.replace('Do','').replace('zz','')
|
||||
duration_format = plexpy.CONFIG.TIME_FORMAT.replace('Do','').replace('zz','').replace('a','').replace('A','')
|
||||
|
||||
# Get the server name
|
||||
server_name = plexpy.CONFIG.PMS_NAME
|
||||
|
||||
@@ -433,78 +433,30 @@ def build_notify_text(session=None, timeline=None, state=None):
|
||||
else:
|
||||
full_title = metadata['title']
|
||||
|
||||
duration = helpers.convert_milliseconds_to_minutes(metadata['duration'])
|
||||
|
||||
# Default values
|
||||
user = ''
|
||||
platform = ''
|
||||
player = ''
|
||||
ip_address = 'N/A'
|
||||
stream_duration = 0
|
||||
view_offset = 0
|
||||
container = ''
|
||||
video_codec = ''
|
||||
video_bitrate = ''
|
||||
video_width = ''
|
||||
video_height = ''
|
||||
video_resolution = ''
|
||||
video_framerate = ''
|
||||
aspect_ratio = ''
|
||||
audio_codec = ''
|
||||
audio_channels = ''
|
||||
transcode_decision = ''
|
||||
video_decision = ''
|
||||
audio_decision = ''
|
||||
transcode_container = ''
|
||||
transcode_video_codec = ''
|
||||
transcode_video_width = ''
|
||||
transcode_video_height = ''
|
||||
transcode_audio_codec = ''
|
||||
transcode_audio_channels = ''
|
||||
|
||||
# Session values
|
||||
if session:
|
||||
# Generate a combined transcode decision value
|
||||
video_decision = session['video_decision'].title()
|
||||
audio_decision = session['audio_decision'].title()
|
||||
if session is None:
|
||||
session = {}
|
||||
|
||||
if session['video_decision'] == 'transcode' or session['audio_decision'] == 'transcode':
|
||||
transcode_decision = 'Transcode'
|
||||
elif session['video_decision'] == 'copy' or session['audio_decision'] == 'copy':
|
||||
transcode_decision = 'Direct Stream'
|
||||
else:
|
||||
transcode_decision = 'Direct Play'
|
||||
|
||||
if state != 'play':
|
||||
if session['paused_counter']:
|
||||
stream_duration = int((time.time() - helpers.cast_to_float(session['started']) -
|
||||
helpers.cast_to_float(session['paused_counter'])) / 60)
|
||||
else:
|
||||
stream_duration = int((time.time() - helpers.cast_to_float(session['started'])) / 60)
|
||||
|
||||
view_offset = helpers.convert_milliseconds_to_minutes(session['view_offset'])
|
||||
user = session['friendly_name']
|
||||
platform = session['platform']
|
||||
player = session['player']
|
||||
ip_address = session['ip_address'] if session['ip_address'] else 'N/A'
|
||||
container = session['container']
|
||||
video_codec = session['video_codec']
|
||||
video_bitrate = session['bitrate']
|
||||
video_width = session['width']
|
||||
video_height = session['height']
|
||||
video_resolution = session['video_resolution']
|
||||
video_framerate = session['video_framerate']
|
||||
aspect_ratio = session['aspect_ratio']
|
||||
audio_codec = session['audio_codec']
|
||||
audio_channels = session['audio_channels']
|
||||
transcode_container = session['transcode_container']
|
||||
transcode_video_codec = session['transcode_video_codec']
|
||||
transcode_video_width = session['transcode_width']
|
||||
transcode_video_height = session['transcode_height']
|
||||
transcode_audio_codec = session['transcode_audio_codec']
|
||||
transcode_audio_channels = session['transcode_audio_channels']
|
||||
# Generate a combined transcode decision value
|
||||
if session.get('video_decision','') == 'transcode' or session.get('audio_decision','') == 'transcode':
|
||||
transcode_decision = 'Transcode'
|
||||
elif session.get('video_decision','') == 'copy' or session.get('audio_decision','') == 'copy':
|
||||
transcode_decision = 'Direct Stream'
|
||||
else:
|
||||
transcode_decision = 'Direct Play'
|
||||
|
||||
if state != 'play':
|
||||
stream_duration = helpers.convert_seconds_to_minutes(
|
||||
time.time() -
|
||||
helpers.cast_to_float(session.get('started', 0)) -
|
||||
helpers.cast_to_float(session.get('paused_counter', 0)))
|
||||
else:
|
||||
stream_duration = 0
|
||||
|
||||
view_offset = helpers.convert_milliseconds_to_minutes(session.get('view_offset', 0))
|
||||
duration = helpers.convert_milliseconds_to_minutes(metadata['duration'])
|
||||
progress_percent = helpers.get_percent(view_offset, duration)
|
||||
remaining_duration = duration - view_offset
|
||||
|
||||
# Fix metadata params for notify recently added grandparent
|
||||
if state == 'created' and plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_GRANDPARENT:
|
||||
@@ -520,40 +472,48 @@ def build_notify_text(session=None, timeline=None, state=None):
|
||||
album_name = metadata['parent_title']
|
||||
track_name = metadata['title']
|
||||
|
||||
available_params = {'server_name': server_name,
|
||||
available_params = {# Global paramaters
|
||||
'server_name': server_name,
|
||||
'server_uptime': server_uptime,
|
||||
'action': state.title(),
|
||||
'datestamp': arrow.now().format(date_format),
|
||||
'timestamp': arrow.now().format(time_format),
|
||||
# Stream parameters
|
||||
'streams': stream_count,
|
||||
'action': state,
|
||||
'datestamp': time.strftime(helpers.parse_js_date(plexpy.CONFIG.DATE_FORMAT)),
|
||||
'timestamp': time.strftime(helpers.parse_js_date(plexpy.CONFIG.TIME_FORMAT)),
|
||||
'user': user,
|
||||
'platform': platform,
|
||||
'player': player,
|
||||
'ip_address': ip_address,
|
||||
'media_type': metadata['media_type'],
|
||||
'user': session.get('friendly_name',''),
|
||||
'platform': session.get('platform',''),
|
||||
'player': session.get('player',''),
|
||||
'ip_address': session.get('ip_address','N/A'),
|
||||
'stream_duration': stream_duration,
|
||||
'remaining_duration': duration - view_offset,
|
||||
'progress': view_offset,
|
||||
'stream_time': arrow.get(stream_duration * 60).format(duration_format),
|
||||
'remaining_duration': remaining_duration,
|
||||
'remaining_time': arrow.get(remaining_duration * 60).format(duration_format),
|
||||
'progress_duration': view_offset,
|
||||
'progress_time': arrow.get(view_offset * 60).format(duration_format),
|
||||
'progress_percent': progress_percent,
|
||||
'container': container,
|
||||
'video_codec': video_codec,
|
||||
'video_bitrate': video_bitrate,
|
||||
'video_width': video_width,
|
||||
'video_height': video_height,
|
||||
'video_resolution': video_resolution,
|
||||
'video_framerate': video_framerate,
|
||||
'aspect_ratio': aspect_ratio,
|
||||
'audio_codec': audio_codec,
|
||||
'audio_channels': audio_channels,
|
||||
'container': session.get('container',''),
|
||||
'video_codec': session.get('video_codec',''),
|
||||
'video_bitrate': session.get('bitrate',''),
|
||||
'video_width': session.get('width',''),
|
||||
'video_height': session.get('height',''),
|
||||
'video_resolution': session.get('video_resolution',''),
|
||||
'video_framerate': session.get('video_framerate',''),
|
||||
'aspect_ratio': session.get('aspect_ratio',''),
|
||||
'audio_codec': session.get('audio_codec',''),
|
||||
'audio_channels': session.get('audio_channels',''),
|
||||
'transcode_decision': transcode_decision,
|
||||
'video_decision': video_decision,
|
||||
'audio_decision': audio_decision,
|
||||
'transcode_container': transcode_container,
|
||||
'transcode_video_codec': transcode_video_codec,
|
||||
'transcode_video_width': transcode_video_width,
|
||||
'transcode_video_height': transcode_video_height,
|
||||
'transcode_audio_codec': transcode_audio_codec,
|
||||
'transcode_audio_channels': transcode_audio_channels,
|
||||
'video_decision': session.get('video_decision','').title(),
|
||||
'audio_decision': session.get('audio_decision','').title(),
|
||||
'transcode_container': session.get('transcode_container',''),
|
||||
'transcode_video_codec': session.get('transcode_video_codec',''),
|
||||
'transcode_video_width': session.get('transcode_width',''),
|
||||
'transcode_video_height': session.get('transcode_height',''),
|
||||
'transcode_audio_codec': session.get('transcode_audio_codec',''),
|
||||
'transcode_audio_channels': session.get('transcode_audio_channels',''),
|
||||
'session_key': session.get('session_key',''),
|
||||
'user_id': session.get('user_id',''),
|
||||
# Metadata parameters
|
||||
'media_type': metadata['media_type'],
|
||||
'title': full_title,
|
||||
'library_name': metadata['library_name'],
|
||||
'show_name': show_name,
|
||||
@@ -565,6 +525,8 @@ def build_notify_text(session=None, timeline=None, state=None):
|
||||
'season_num00': metadata['parent_media_index'].zfill(2),
|
||||
'episode_num': metadata['media_index'].zfill(1),
|
||||
'episode_num00': metadata['media_index'].zfill(2),
|
||||
'track_num': metadata['media_index'].zfill(1),
|
||||
'track_num00': metadata['media_index'].zfill(2),
|
||||
'year': metadata['year'],
|
||||
'studio': metadata['studio'],
|
||||
'content_rating': metadata['content_rating'],
|
||||
@@ -575,7 +537,11 @@ def build_notify_text(session=None, timeline=None, state=None):
|
||||
'summary': metadata['summary'],
|
||||
'tagline': metadata['tagline'],
|
||||
'rating': metadata['rating'],
|
||||
'duration': duration
|
||||
'duration': metadata['duration'],
|
||||
'section_id': metadata['section_id'],
|
||||
'rating_key': metadata['rating_key'],
|
||||
'parent_rating_key': metadata['parent_rating_key'],
|
||||
'grandparent_rating_key': metadata['grandparent_rating_key']
|
||||
}
|
||||
|
||||
# Default subject text
|
||||
@@ -584,10 +550,6 @@ def build_notify_text(session=None, timeline=None, state=None):
|
||||
# Default scripts args
|
||||
script_args = []
|
||||
|
||||
# Regex to match {param} but not "{param}"
|
||||
params_to_quote = re.compile(r'(?<!\")([\{][^}]+[\}])(?!\"\})')
|
||||
script_args_text = re.sub(params_to_quote, r'"\g<0>"', script_args_text)
|
||||
|
||||
if script_args_text:
|
||||
try:
|
||||
script_args = [unicode(arg).format(**available_params) for arg in script_args_text.split()]
|
||||
@@ -598,9 +560,9 @@ def build_notify_text(session=None, timeline=None, state=None):
|
||||
|
||||
if state == 'play':
|
||||
# Default body text
|
||||
body_text = '%s (%s) is watching %s' % (session['friendly_name'],
|
||||
session['player'],
|
||||
full_title)
|
||||
body_text = '%s (%s) started playing %s' % (session['friendly_name'],
|
||||
session['player'],
|
||||
full_title)
|
||||
|
||||
if on_start_subject and on_start_body:
|
||||
try:
|
||||
@@ -767,6 +729,10 @@ def build_notify_text(session=None, timeline=None, state=None):
|
||||
|
||||
|
||||
def build_server_notify_text(state=None):
|
||||
# Get time formats
|
||||
date_format = plexpy.CONFIG.DATE_FORMAT.replace('Do','').replace('zz','')
|
||||
time_format = plexpy.CONFIG.TIME_FORMAT.replace('Do','').replace('zz','')
|
||||
|
||||
# Get the server name
|
||||
server_name = plexpy.CONFIG.PMS_NAME
|
||||
|
||||
@@ -791,11 +757,12 @@ def build_server_notify_text(state=None):
|
||||
on_intup_body = plexpy.CONFIG.NOTIFY_ON_INTUP_BODY_TEXT
|
||||
script_args_text = plexpy.CONFIG.NOTIFY_SCRIPTS_ARGS_TEXT
|
||||
|
||||
available_params = {'server_name': server_name,
|
||||
available_params = {# Global paramaters
|
||||
'server_name': server_name,
|
||||
'server_uptime': server_uptime,
|
||||
'action': state,
|
||||
'datestamp': time.strftime(helpers.parse_js_date(plexpy.CONFIG.DATE_FORMAT)),
|
||||
'timestamp': time.strftime(helpers.parse_js_date(plexpy.CONFIG.TIME_FORMAT))}
|
||||
'action': state.title(),
|
||||
'datestamp': arrow.now().format(date_format),
|
||||
'timestamp': arrow.now().format(time_format)}
|
||||
|
||||
# Default text
|
||||
subject_text = 'PlexPy (%s)' % server_name
|
||||
@@ -803,10 +770,6 @@ def build_server_notify_text(state=None):
|
||||
# Default scripts args
|
||||
script_args = []
|
||||
|
||||
# Regex to match {param} but not "{param}"
|
||||
params_to_quote = re.compile(r'(?<!\")([\{][^}]+[\}])(?!\"\})')
|
||||
script_args_text = re.sub(params_to_quote, r'"\g<0>"', script_args_text)
|
||||
|
||||
if script_args_text:
|
||||
try:
|
||||
script_args = [unicode(arg).format(**available_params) for arg in script_args_text.split()]
|
||||
@@ -911,4 +874,4 @@ def build_server_notify_text(state=None):
|
||||
|
||||
def strip_tag(data):
|
||||
p = re.compile(r'<.*?>')
|
||||
return p.sub('', data)
|
||||
return p.sub('', data)
|
@@ -34,7 +34,7 @@ from pynma import pynma
|
||||
import gntp.notifier
|
||||
import oauth2 as oauth
|
||||
import pythontwitter as twitter
|
||||
import pythonfacebook as facebook
|
||||
import pythonfacebook as facebook
|
||||
|
||||
import plexpy
|
||||
from plexpy import logger, helpers, request
|
||||
@@ -58,7 +58,7 @@ AGENT_IDS = {"Growl": 0,
|
||||
"Scripts": 15,
|
||||
"Facebook": 16}
|
||||
|
||||
|
||||
|
||||
def available_notification_agents():
|
||||
agents = [{'name': 'Growl',
|
||||
'id': AGENT_IDS['Growl'],
|
||||
@@ -1165,8 +1165,10 @@ class TwitterNotifier(object):
|
||||
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
|
||||
|
||||
def __init__(self):
|
||||
self.consumer_key = "2LdJKXHDUwJtjYBsdwJisIOsh"
|
||||
self.consumer_secret = "QWbUcZzAIiL4zbDCIhy2EdUkV8yEEav3qMdo5y3FugxCFelWrA"
|
||||
self.access_token = plexpy.CONFIG.TWITTER_ACCESS_TOKEN
|
||||
self.access_token_secret = plexpy.CONFIG.TWITTER_ACCESS_TOKEN_SECRET
|
||||
self.consumer_key = plexpy.CONFIG.TWITTER_CONSUMER_KEY
|
||||
self.consumer_secret = plexpy.CONFIG.TWITTER_CONSUMER_SECRET
|
||||
|
||||
def notify(self, subject, message):
|
||||
if not subject or not message:
|
||||
@@ -1191,16 +1193,16 @@ class TwitterNotifier(object):
|
||||
else:
|
||||
request_token = dict(parse_qsl(content))
|
||||
|
||||
plexpy.CONFIG.TWITTER_USERNAME = request_token['oauth_token']
|
||||
plexpy.CONFIG.TWITTER_PASSWORD = request_token['oauth_token_secret']
|
||||
plexpy.CONFIG.TWITTER_ACCESS_TOKEN = request_token['oauth_token']
|
||||
plexpy.CONFIG.TWITTER_ACCESS_TOKEN_SECRET = request_token['oauth_token_secret']
|
||||
|
||||
return self.AUTHORIZATION_URL + "?oauth_token=" + request_token['oauth_token']
|
||||
|
||||
def _get_credentials(self, key):
|
||||
request_token = {}
|
||||
|
||||
request_token['oauth_token'] = plexpy.CONFIG.TWITTER_USERNAME
|
||||
request_token['oauth_token_secret'] = plexpy.CONFIG.TWITTER_PASSWORD
|
||||
request_token['oauth_token'] = plexpy.CONFIG.TWITTER_ACCESS_TOKEN
|
||||
request_token['oauth_token_secret'] = plexpy.CONFIG.TWITTER_ACCESS_TOKEN_SECRET
|
||||
request_token['oauth_callback_confirmed'] = 'true'
|
||||
|
||||
token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret'])
|
||||
@@ -1225,20 +1227,20 @@ class TwitterNotifier(object):
|
||||
else:
|
||||
# logger.info(u"PlexPy Notifiers :: Your Twitter Access Token key: %s" % access_token['oauth_token'])
|
||||
# logger.info(u"PlexPy Notifiers :: Access Token secret: %s" % access_token['oauth_token_secret'])
|
||||
plexpy.CONFIG.TWITTER_USERNAME = access_token['oauth_token']
|
||||
plexpy.CONFIG.TWITTER_PASSWORD = access_token['oauth_token_secret']
|
||||
plexpy.CONFIG.TWITTER_ACCESS_TOKEN = access_token['oauth_token']
|
||||
plexpy.CONFIG.TWITTER_ACCESS_TOKEN_SECRET = access_token['oauth_token_secret']
|
||||
plexpy.CONFIG.write()
|
||||
return True
|
||||
|
||||
def _send_tweet(self, message=None):
|
||||
username = self.consumer_key
|
||||
password = self.consumer_secret
|
||||
access_token_key = plexpy.CONFIG.TWITTER_USERNAME
|
||||
access_token_secret = plexpy.CONFIG.TWITTER_PASSWORD
|
||||
consumer_key = self.consumer_key
|
||||
consumer_secret = self.consumer_secret
|
||||
access_token = self.access_token
|
||||
access_token_secret = self.access_token_secret
|
||||
|
||||
# logger.info(u"PlexPy Notifiers :: Sending tweet: " + message)
|
||||
|
||||
api = twitter.Api(username, password, access_token_key, access_token_secret)
|
||||
api = twitter.Api(consumer_key, consumer_secret, access_token, access_token_secret)
|
||||
|
||||
try:
|
||||
api.PostUpdate(message)
|
||||
@@ -1251,30 +1253,37 @@ class TwitterNotifier(object):
|
||||
|
||||
def return_config_options(self):
|
||||
config_option = [{'label': 'Instructions',
|
||||
'description': 'Step 1: Click the <strong>Request Authorization</strong> button below.<br>\
|
||||
Step 2: Input the <strong>Authorization Key</strong> you received from Step 1 below.<br>\
|
||||
Step 3: Click the <strong>Verify Key</strong> button below.',
|
||||
'description': 'Step 1: Visit <a href="https://apps.twitter.com/" target="_blank"> \
|
||||
Twitter Apps</a> to <strong>Create New App</strong>. A vaild "Website" is not required.<br>\
|
||||
Step 2: Go to <strong>Keys and Access Tokens</strong> and click \
|
||||
<strong>Create my access token</strong>.<br>\
|
||||
Step 3: Fill in the <strong>Consumer Key</strong>, <strong>Consumer Secret</strong>, \
|
||||
<strong>Access Token</strong>, and <strong>Access Token Secret</strong> below.',
|
||||
'input_type': 'help'
|
||||
},
|
||||
{'label': 'Request Authorization',
|
||||
'value': 'Request Authorization',
|
||||
'name': 'twitterStep1',
|
||||
'description': 'Request Twitter authorization. (Ensure you allow the browser pop-up).',
|
||||
'input_type': 'button'
|
||||
},
|
||||
{'label': 'Authorization Key',
|
||||
'value': '',
|
||||
'name': 'twitter_key',
|
||||
'description': 'Your Twitter authorization key.',
|
||||
{'label': 'Twitter Consumer Key',
|
||||
'value': self.consumer_key,
|
||||
'name': 'twitter_consumer_key',
|
||||
'description': 'Your Twitter consumer key.',
|
||||
'input_type': 'text'
|
||||
},
|
||||
{'label': 'Verify Key',
|
||||
'value': 'Verify Key',
|
||||
'name': 'twitterStep2',
|
||||
'description': 'Verify your Twitter authorization key.',
|
||||
'input_type': 'button'
|
||||
{'label': 'Twitter Consumer Secret',
|
||||
'value': self.consumer_secret,
|
||||
'name': 'twitter_consumer_secret',
|
||||
'description': 'Your Twitter consumer secret.',
|
||||
'input_type': 'text'
|
||||
},
|
||||
{'input_type': 'nosave'
|
||||
{'label': 'Twitter Access Token',
|
||||
'value': self.access_token,
|
||||
'name': 'twitter_access_token',
|
||||
'description': 'Your Twitter access token.',
|
||||
'input_type': 'text'
|
||||
},
|
||||
{'label': 'Twitter Access Token Secret',
|
||||
'value': self.access_token_secret,
|
||||
'name': 'twitter_access_token_secret',
|
||||
'description': 'Your Twitter access token secret.',
|
||||
'input_type': 'text'
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1668,10 +1677,10 @@ class TELEGRAM(object):
|
||||
'description': 'Your Telegram bot token. Contact <a href="http://telegram.me/BotFather" target="_blank">@BotFather</a> on Telegram to get one.',
|
||||
'input_type': 'text'
|
||||
},
|
||||
{'label': 'Telegram Chat ID',
|
||||
{'label': 'Telegram Chat ID, Group ID, or Channel Username',
|
||||
'value': self.chat_id,
|
||||
'name': 'telegram_chat_id',
|
||||
'description': 'Your Telegram Chat ID, Group ID, or channel username. Contact <a href="http://telegram.me/myidbot" target="_blank">@myidbot</a> on Telegram to get an ID.',
|
||||
'description': 'Your Telegram Chat ID, Group ID, or @channelusername. Contact <a href="http://telegram.me/myidbot" target="_blank">@myidbot</a> on Telegram to get an ID.',
|
||||
'input_type': 'text'
|
||||
}
|
||||
]
|
||||
@@ -1768,7 +1777,7 @@ class SLACK(object):
|
||||
class Scripts(object):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.script_exts = ('.bat', '.cmd', '.exe', '.php', '.pl', '.py', '.pyw', '.rb', '.sh')
|
||||
self.script_exts = ('.bat', '.cmd', '.exe', '.php', '.pl', '.ps1', '.py', '.pyw', '.rb', '.sh')
|
||||
|
||||
def conf(self, options):
|
||||
return cherrypy.config['config'].get('Scripts', options)
|
||||
@@ -1798,7 +1807,7 @@ class Scripts(object):
|
||||
|
||||
return scripts
|
||||
|
||||
def notify(self, subject='', message='', notify_action='', script_args=[], *args, **kwargs):
|
||||
def notify(self, subject='', message='', notify_action='', script_args=None, *args, **kwargs):
|
||||
"""
|
||||
Args:
|
||||
subject(string, optional): Head text,
|
||||
@@ -1808,7 +1817,10 @@ class Scripts(object):
|
||||
"""
|
||||
logger.debug(u"PlexPy Notifiers :: Trying to run notify script, action: %s, arguments: %s" %
|
||||
(notify_action if notify_action else None, script_args if script_args else None))
|
||||
|
||||
|
||||
if script_args is None:
|
||||
script_args = []
|
||||
|
||||
if not plexpy.CONFIG.SCRIPTS_FOLDER:
|
||||
return
|
||||
|
||||
@@ -1860,14 +1872,16 @@ class Scripts(object):
|
||||
|
||||
name, ext = os.path.splitext(script)
|
||||
|
||||
if ext == '.py':
|
||||
prefix = 'python'
|
||||
elif ext == '.pyw':
|
||||
prefix = 'pythonw'
|
||||
elif ext == '.php':
|
||||
if ext == '.php':
|
||||
prefix = 'php'
|
||||
elif ext == '.pl':
|
||||
prefix = 'perl'
|
||||
elif ext == '.ps1':
|
||||
prefix = 'powershell -executionPolicy bypass -file'
|
||||
elif ext == '.py':
|
||||
prefix = 'python'
|
||||
elif ext == '.pyw':
|
||||
prefix = 'pythonw'
|
||||
elif ext == '.rb':
|
||||
prefix = 'ruby'
|
||||
else:
|
||||
@@ -1877,7 +1891,7 @@ class Scripts(object):
|
||||
script = script.encode(plexpy.SYS_ENCODING, 'ignore')
|
||||
|
||||
if prefix:
|
||||
script = [prefix, script]
|
||||
script = prefix.split() + [script]
|
||||
else:
|
||||
script = [script]
|
||||
|
||||
@@ -2016,7 +2030,7 @@ class Scripts(object):
|
||||
|
||||
return config_option
|
||||
|
||||
|
||||
|
||||
class FacebookNotifier(object):
|
||||
|
||||
def __init__(self):
|
||||
@@ -2041,7 +2055,7 @@ class FacebookNotifier(object):
|
||||
|
||||
def _get_credentials(self, code):
|
||||
logger.info(u"PlexPy Notifiers :: Requesting access token from Facebook")
|
||||
|
||||
|
||||
try:
|
||||
# Request user access token
|
||||
api = facebook.GraphAPI(version='2.5')
|
||||
@@ -2050,19 +2064,19 @@ class FacebookNotifier(object):
|
||||
app_id=self.app_id,
|
||||
app_secret=self.app_secret)
|
||||
access_token = response['access_token']
|
||||
|
||||
|
||||
# Request extended user access token
|
||||
api = facebook.GraphAPI(access_token=access_token, version='2.5')
|
||||
response = api.extend_access_token(app_id=self.app_id,
|
||||
app_secret=self.app_secret)
|
||||
access_token = response['access_token']
|
||||
|
||||
|
||||
plexpy.CONFIG.FACEBOOK_TOKEN = access_token
|
||||
plexpy.CONFIG.write()
|
||||
except Exception as e:
|
||||
logger.error(u"PlexPy Notifiers :: Error requesting Facebook access token: %s" % e)
|
||||
return False
|
||||
|
||||
|
||||
return True
|
||||
|
||||
def _post_facebook(self, message=None):
|
||||
@@ -2088,12 +2102,16 @@ class FacebookNotifier(object):
|
||||
config_option = [{'label': 'Instructions',
|
||||
'description': '<strong>Facebook notifications are currently experimental!</strong><br><br> \
|
||||
Step 1: Visit <a href="https://developers.facebook.com/apps/" target="_blank"> \
|
||||
Facebook Developers</a> to create a new app using <strong>advanced setup</strong>.<br>\
|
||||
Step 2: Go to <strong>Settings > Advanced</strong> and fill in \
|
||||
Facebook Developers</a> to add a new app using <strong>basic setup</strong>.<br>\
|
||||
Step 2: Go to <strong>Settings > Basic</strong> and fill in a \
|
||||
<strong>Contact Email</strong>.<br>\
|
||||
Step 3: Go to <strong>Settings > Advanced</strong> and fill in \
|
||||
<strong>Valid OAuth redirect URIs</strong> with your PlexPy URL (i.e. http://localhost:8181).<br>\
|
||||
Step 3: Fill in the <strong>PlexPy URL</strong> below with the exact same URL from Step 2.<br>\
|
||||
Step 4: Fill in the <strong>App ID</strong> and <strong>App Secret</strong> below.<br>\
|
||||
Step 5: Click the <strong>Request Authorization</strong> button below.',
|
||||
Step 4: Go to <strong>App Review</strong> and toggle public to <strong>Yes</strong>.<br>\
|
||||
Step 5: Fill in the <strong>PlexPy URL</strong> below with the exact same URL from Step 3.<br>\
|
||||
Step 6: Fill in the <strong>App ID</strong> and <strong>App Secret</strong> below.<br>\
|
||||
Step 7: Click the <strong>Request Authorization</strong> button below.<br> \
|
||||
Step 8: Fill in the <strong>Group ID</strong> below.',
|
||||
'input_type': 'help'
|
||||
},
|
||||
{'label': 'PlexPy URL',
|
||||
|
@@ -383,7 +383,6 @@ class PlexTV(object):
|
||||
return []
|
||||
|
||||
plextv_resources = self.get_plextv_resources(include_https=include_https)
|
||||
server_urls = []
|
||||
|
||||
try:
|
||||
xml_parse = minidom.parseString(plextv_resources)
|
||||
@@ -400,36 +399,51 @@ class PlexTV(object):
|
||||
logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_urls: %s." % e)
|
||||
return []
|
||||
|
||||
# Function to get all connections for a device
|
||||
def get_connections(device):
|
||||
conn = []
|
||||
connections = device.getElementsByTagName('Connection')
|
||||
|
||||
for c in connections:
|
||||
server_details = {"protocol": helpers.get_xml_attr(c, 'protocol'),
|
||||
"address": helpers.get_xml_attr(c, 'address'),
|
||||
"port": helpers.get_xml_attr(c, 'port'),
|
||||
"uri": helpers.get_xml_attr(c, 'uri'),
|
||||
"local": helpers.get_xml_attr(c, 'local')
|
||||
}
|
||||
conn.append(server_details)
|
||||
|
||||
return conn
|
||||
|
||||
server_urls = []
|
||||
|
||||
# Try to match the device
|
||||
for a in xml_head:
|
||||
if helpers.get_xml_attr(a, 'clientIdentifier') == server_id:
|
||||
connections = a.getElementsByTagName('Connection')
|
||||
for connection in connections:
|
||||
server_details = {"protocol": helpers.get_xml_attr(connection, 'protocol'),
|
||||
"address": helpers.get_xml_attr(connection, 'address'),
|
||||
"port": helpers.get_xml_attr(connection, 'port'),
|
||||
"uri": helpers.get_xml_attr(connection, 'uri'),
|
||||
"local": helpers.get_xml_attr(connection, 'local')
|
||||
}
|
||||
server_urls = get_connections(a)
|
||||
break
|
||||
|
||||
# Else no device match found
|
||||
if not server_urls:
|
||||
# Try to match the PMS_IP and PMS_PORT
|
||||
for a in xml_head:
|
||||
if helpers.get_xml_attr(a, 'provides') == 'server':
|
||||
connections = a.getElementsByTagName('Connection')
|
||||
|
||||
server_urls.append(server_details)
|
||||
# Else try to match the PMS_IP and PMS_PORT
|
||||
else:
|
||||
connections = a.getElementsByTagName('Connection')
|
||||
for connection in connections:
|
||||
if helpers.get_xml_attr(connection, 'address') == plexpy.CONFIG.PMS_IP and \
|
||||
int(helpers.get_xml_attr(connection, 'port')) == plexpy.CONFIG.PMS_PORT:
|
||||
for connection in connections:
|
||||
if helpers.get_xml_attr(connection, 'address') == plexpy.CONFIG.PMS_IP and \
|
||||
int(helpers.get_xml_attr(connection, 'port')) == plexpy.CONFIG.PMS_PORT:
|
||||
|
||||
plexpy.CONFIG.PMS_IDENTIFIER = helpers.get_xml_attr(a, 'clientIdentifier')
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
logger.info(u"PlexPy PlexTV :: PMS identifier changed from %s to %s." % \
|
||||
(server_id, plexpy.CONFIG.PMS_IDENTIFIER))
|
||||
|
||||
server_urls = get_connections(a)
|
||||
break
|
||||
|
||||
plexpy.CONFIG.PMS_IDENTIFIER = helpers.get_xml_attr(a, 'clientIdentifier')
|
||||
|
||||
logger.info(u"PlexPy PlexTV :: PMS identifier changed from %s to %s." % \
|
||||
(server_id, plexpy.CONFIG.PMS_IDENTIFIER))
|
||||
|
||||
server_details = {"protocol": helpers.get_xml_attr(connection, 'protocol'),
|
||||
"address": helpers.get_xml_attr(connection, 'address'),
|
||||
"port": helpers.get_xml_attr(connection, 'port'),
|
||||
"uri": helpers.get_xml_attr(connection, 'uri'),
|
||||
"local": helpers.get_xml_attr(connection, 'local')
|
||||
}
|
||||
if server_urls:
|
||||
break
|
||||
|
||||
return server_urls
|
||||
|
@@ -40,15 +40,19 @@ def get_server_friendly_name():
|
||||
|
||||
def refresh_libraries():
|
||||
logger.info(u"PlexPy Pmsconnect :: Requesting libraries list refresh...")
|
||||
library_sections = PmsConnect().get_library_details()
|
||||
|
||||
server_id = plexpy.CONFIG.PMS_IDENTIFIER
|
||||
if not server_id:
|
||||
logger.error(u"PlexPy Pmsconnect :: No PMS identifier, cannot refresh libraries. Verify server in settings.")
|
||||
return
|
||||
|
||||
library_keys = []
|
||||
library_sections = PmsConnect().get_library_details()
|
||||
|
||||
if library_sections:
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
library_keys = []
|
||||
|
||||
for section in library_sections:
|
||||
section_keys = {'server_id': server_id,
|
||||
'section_id': section['section_id']}
|
||||
@@ -72,10 +76,11 @@ def refresh_libraries():
|
||||
plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', library_keys)
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
if plexpy.CONFIG.UPDATE_SECTION_IDS == 1:
|
||||
if plexpy.CONFIG.UPDATE_SECTION_IDS == 1 or plexpy.CONFIG.UPDATE_SECTION_IDS == -1:
|
||||
from plexpy import libraries
|
||||
import threading
|
||||
|
||||
# Start library section_id update on it's own thread
|
||||
threading.Thread(target=libraries.update_section_ids).start()
|
||||
|
||||
logger.info(u"PlexPy Pmsconnect :: Libraries list refreshed.")
|
||||
@@ -1588,9 +1593,9 @@ class PmsConnect(object):
|
||||
sort_type = ''
|
||||
|
||||
if str(section_id).isdigit():
|
||||
library_data = self.get_library_list(section_id, list_type, count, sort_type, output_format='xml')
|
||||
library_data = self.get_library_list(str(section_id), list_type, count, sort_type, output_format='xml')
|
||||
elif str(rating_key).isdigit():
|
||||
library_data = self.get_children_list(rating_key, output_format='xml')
|
||||
library_data = self.get_children_list(str(rating_key), output_format='xml')
|
||||
else:
|
||||
logger.warn(u"PlexPy Pmsconnect :: get_library_children called by invalid section_id or rating_key provided.")
|
||||
return []
|
||||
@@ -1681,7 +1686,7 @@ class PmsConnect(object):
|
||||
|
||||
server_library_stats = []
|
||||
|
||||
if server_libraries['libraries_count'] != '0':
|
||||
if server_libraries and server_libraries['libraries_count'] != '0':
|
||||
libraries_list = server_libraries['libraries_list']
|
||||
|
||||
for library in libraries_list:
|
||||
@@ -1689,8 +1694,8 @@ class PmsConnect(object):
|
||||
section_id = library['section_id']
|
||||
children_list = self.get_library_children_details(section_id=section_id, section_type=section_type, count='1')
|
||||
|
||||
if children_list and children_list['library_count'] != '0':
|
||||
library_stats = {'section_id': library['section_id'],
|
||||
if children_list:
|
||||
library_stats = {'section_id': section_id,
|
||||
'section_name': library['section_name'],
|
||||
'section_type': section_type,
|
||||
'thumb': library['thumb'],
|
||||
|
@@ -34,15 +34,19 @@ class Users(object):
|
||||
'COUNT(session_history.id) AS plays',
|
||||
'MAX(session_history.started) AS last_seen',
|
||||
'MAX(session_history.id) AS id',
|
||||
'session_history_metadata.full_title AS last_watched',
|
||||
'session_history_metadata.full_title AS last_played',
|
||||
'session_history.ip_address',
|
||||
'session_history.platform',
|
||||
'session_history.player',
|
||||
'session_history.rating_key',
|
||||
'session_history_metadata.media_type',
|
||||
'session_history_metadata.thumb',
|
||||
'session_history_metadata.parent_thumb',
|
||||
'session_history_metadata.grandparent_thumb',
|
||||
'session_history_metadata.media_type',
|
||||
'session_history.rating_key',
|
||||
'session_history_metadata.parent_title',
|
||||
'session_history_metadata.year',
|
||||
'session_history_metadata.media_index',
|
||||
'session_history_metadata.parent_media_index',
|
||||
'session_history_media_info.video_decision',
|
||||
'session_history_media_info.audio_decision',
|
||||
'users.do_notify as do_notify',
|
||||
@@ -97,14 +101,18 @@ class Users(object):
|
||||
'user_thumb': user_thumb,
|
||||
'plays': item['plays'],
|
||||
'last_seen': item['last_seen'],
|
||||
'last_watched': item['last_watched'],
|
||||
'last_played': item['last_played'],
|
||||
'id': item['id'],
|
||||
'ip_address': item['ip_address'],
|
||||
'platform': platform,
|
||||
'player': item['player'],
|
||||
'thumb': thumb,
|
||||
'media_type': item['media_type'],
|
||||
'rating_key': item['rating_key'],
|
||||
'media_type': item['media_type'],
|
||||
'thumb': thumb,
|
||||
'parent_title': item['parent_title'],
|
||||
'year': item['year'],
|
||||
'media_index': item['media_index'],
|
||||
'parent_media_index': item['parent_media_index'],
|
||||
'video_decision': item['video_decision'],
|
||||
'audio_decision': item['audio_decision'],
|
||||
'do_notify': helpers.checked(item['do_notify']),
|
||||
@@ -132,13 +140,18 @@ class Users(object):
|
||||
'COUNT(session_history.id) AS play_count',
|
||||
'session_history.platform',
|
||||
'session_history.player',
|
||||
'session_history_metadata.full_title AS last_watched',
|
||||
'session_history.rating_key',
|
||||
'session_history_metadata.full_title AS last_played',
|
||||
'session_history_metadata.thumb',
|
||||
'session_history_metadata.parent_thumb',
|
||||
'session_history_metadata.grandparent_thumb',
|
||||
'session_history_metadata.media_type',
|
||||
'session_history.rating_key',
|
||||
'session_history_metadata.parent_title',
|
||||
'session_history_metadata.year',
|
||||
'session_history_metadata.media_index',
|
||||
'session_history_metadata.parent_media_index',
|
||||
'session_history_media_info.video_decision',
|
||||
'session_history_media_info.audio_decision',
|
||||
'session_history.user',
|
||||
'session_history.user_id as custom_user_id',
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE \
|
||||
@@ -188,11 +201,16 @@ class Users(object):
|
||||
'play_count': item['play_count'],
|
||||
'platform': platform,
|
||||
'player': item['player'],
|
||||
'last_watched': item['last_watched'],
|
||||
'last_played': item['last_played'],
|
||||
'rating_key': item['rating_key'],
|
||||
'thumb': thumb,
|
||||
'media_type': item['media_type'],
|
||||
'rating_key': item['rating_key'],
|
||||
'parent_title': item['parent_title'],
|
||||
'year': item['year'],
|
||||
'media_index': item['media_index'],
|
||||
'parent_media_index': item['parent_media_index'],
|
||||
'video_decision': item['video_decision'],
|
||||
'audio_decision': item['audio_decision'],
|
||||
'friendly_name': item['friendly_name']
|
||||
}
|
||||
|
||||
@@ -274,10 +292,9 @@ class Users(object):
|
||||
else:
|
||||
logger.warn(u"PlexPy Users :: Unable to retrieve user from local database. Requesting user list refresh.")
|
||||
# Let's first refresh the user list to make sure the user isn't newly added and not in the db yet
|
||||
plextv.refresh_users()
|
||||
try:
|
||||
if str(user_id).isdigit():
|
||||
# Refresh users
|
||||
plextv.refresh_users()
|
||||
query = 'SELECT user_id, username, friendly_name, thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \
|
||||
'email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history ' \
|
||||
'FROM users ' \
|
||||
|
@@ -1,2 +1,2 @@
|
||||
PLEXPY_VERSION = "master"
|
||||
PLEXPY_RELEASE_VERSION = "1.3.0"
|
||||
PLEXPY_RELEASE_VERSION = "1.3.5"
|
||||
|
@@ -492,7 +492,20 @@ class WebInterface(object):
|
||||
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||
return json.dumps({'message': 'no data received'})
|
||||
else:
|
||||
return json.dumps({'message': 'Cannot refresh library while getting file sizes.'})
|
||||
return json.dumps({'message': 'Cannot refresh library while getting file sizes.'})
|
||||
|
||||
@cherrypy.expose
|
||||
def delete_duplicate_libraries(self):
|
||||
library_data = libraries.Libraries()
|
||||
|
||||
result = library_data.delete_duplicate_libraries()
|
||||
|
||||
if result:
|
||||
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||
return json.dumps({'message': result})
|
||||
else:
|
||||
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||
return json.dumps({'message': 'Unable to delete duplicate libraries from the database.'})
|
||||
|
||||
##### Users #####
|
||||
|
||||
@@ -1151,6 +1164,7 @@ class WebInterface(object):
|
||||
del kwargs[use_config]
|
||||
|
||||
# Check if we should refresh our data
|
||||
server_changed = False
|
||||
refresh_libraries = False
|
||||
refresh_users = False
|
||||
reschedule = False
|
||||
@@ -1173,11 +1187,6 @@ class WebInterface(object):
|
||||
(kwargs['monitor_remote_access'] != plexpy.CONFIG.MONITOR_REMOTE_ACCESS):
|
||||
reschedule = True
|
||||
|
||||
if 'pms_ip' in kwargs:
|
||||
if kwargs['pms_ip'] != plexpy.CONFIG.PMS_IP:
|
||||
refresh_libraries = True
|
||||
refresh_users = True
|
||||
|
||||
# Remove config with 'hscard-' prefix and change home_stats_cards to list
|
||||
if 'home_stats_cards' in kwargs:
|
||||
for k in kwargs.keys():
|
||||
@@ -1198,16 +1207,24 @@ class WebInterface(object):
|
||||
if kwargs['home_library_cards'] == ['first_run_wizard']:
|
||||
refresh_libraries = True
|
||||
|
||||
if 'server_changed' in kwargs:
|
||||
del kwargs['server_changed']
|
||||
server_changed = True
|
||||
refresh_users = True
|
||||
refresh_libraries = True
|
||||
|
||||
plexpy.CONFIG.process_kwargs(kwargs)
|
||||
|
||||
# Write the config
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
# Get new server URLs for SSL communications.
|
||||
plextv.get_real_pms_url()
|
||||
if server_changed:
|
||||
plextv.get_real_pms_url()
|
||||
|
||||
# Get new server friendly name.
|
||||
pmsconnect.get_server_friendly_name()
|
||||
if server_changed:
|
||||
pmsconnect.get_server_friendly_name()
|
||||
|
||||
# Reconfigure scheduler if intervals changed
|
||||
if reschedule:
|
||||
@@ -1380,27 +1397,39 @@ class WebInterface(object):
|
||||
def get_server_id(self, hostname=None, port=None, identifier=None, ssl=0, remote=0, **kwargs):
|
||||
from plexpy import http_handler
|
||||
|
||||
if hostname and port:
|
||||
# Set PMS attributes to get the real PMS url
|
||||
plexpy.CONFIG.__setattr__('PMS_IP', hostname)
|
||||
plexpy.CONFIG.__setattr__('PMS_PORT', port)
|
||||
plexpy.CONFIG.__setattr__('PMS_IDENTIFIER', identifier)
|
||||
plexpy.CONFIG.__setattr__('PMS_SSL', ssl)
|
||||
plexpy.CONFIG.__setattr__('PMS_IS_REMOTE', remote)
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
plextv.get_real_pms_url()
|
||||
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
request = pms_connect.get_local_server_identity()
|
||||
|
||||
if request:
|
||||
cherrypy.response.headers['Content-type'] = 'application/xml'
|
||||
return request
|
||||
else:
|
||||
logger.warn(u"Unable to retrieve data for get_server_id.")
|
||||
return None
|
||||
# Attempt to get the pms_identifier from plex.tv if the server is published
|
||||
# Works for all PMS SSL settings
|
||||
if not identifier and hostname and port:
|
||||
plex_tv = plextv.PlexTV()
|
||||
servers = plex_tv.discover()
|
||||
|
||||
for server in servers:
|
||||
if server['ip'] == hostname and server['port'] == port:
|
||||
identifier = server['clientIdentifier']
|
||||
break
|
||||
|
||||
# Fallback to checking /identity endpoint is server is unpublished
|
||||
# Cannot set SSL settings on the PMS if unpublished so 'http' is okay
|
||||
if not identifier:
|
||||
request_handler = http_handler.HTTPHandler(host=hostname,
|
||||
port=port,
|
||||
token=None)
|
||||
uri = '/identity'
|
||||
request = request_handler.make_request(uri=uri,
|
||||
proto='http',
|
||||
request_type='GET',
|
||||
output_format='xml',
|
||||
no_token=True,
|
||||
timeout=10)
|
||||
if request:
|
||||
xml_head = request.getElementsByTagName('MediaContainer')[0]
|
||||
identifier = xml_head.getAttribute('machineIdentifier')
|
||||
|
||||
if identifier:
|
||||
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||
return json.dumps(identifier)
|
||||
else:
|
||||
logger.warn('Unable to retrieve the PMS identifier.')
|
||||
return None
|
||||
|
||||
@cherrypy.expose
|
||||
|
Reference in New Issue
Block a user