Compare commits

...

66 Commits

Author SHA1 Message Date
JonnyWong16
6e3147c5f5 v2.1.37 2019-10-11 20:26:54 -07:00
JonnyWong16
1b09f225ff Remove blank line 2019-10-11 20:14:25 -07:00
JonnyWong16
3cf8c4f8a8 Fix rare case when HTTP_ROOT is None when retrieving URL 2019-10-11 20:11:13 -07:00
JonnyWong16
30be4b473f Log individual custom conditions 2019-10-11 19:54:48 -07:00
JonnyWong16
6908034a86 Add ability to delete MusicBrainz lookup info 2019-10-06 16:50:26 -07:00
JonnyWong16
cba43f675a Save MusicBrainz lookups in the database 2019-10-06 15:16:19 -07:00
JonnyWong16
6ff826bc3a Fix Last.fm URLs linking to the artist page instead of the album page 2019-10-05 23:58:17 -07:00
JonnyWong16
c7afd10ec0 Add notification parameters for MusicBrainz lookup 2019-10-05 23:57:48 -07:00
JonnyWong16
b39d5174f9 Add 3rd party MusicBrains lookup setting 2019-10-05 23:44:13 -07:00
JonnyWong16
501bc0ab3f Add musicbrainzngs v0.7dev 2019-10-05 23:27:16 -07:00
JonnyWong16
688d28b5ea Limit Discord description to 2048 characters 2019-10-05 23:26:41 -07:00
JonnyWong16
27d2c7b078 v2.1.36-beta 2019-10-05 21:59:14 -07:00
JonnyWong16
2fb12ccf65 Fix updating activity card when rating key changes for the same session key (Fixes Tautulli/Tautulli-Issues#96) 2019-10-05 21:48:11 -07:00
JonnyWong16
cb92d159c1 Add parent_guid and grandparent_guid to get_activity and get_metadata API commands 2019-10-05 20:21:15 -07:00
JonnyWong16
64bdf4237c Separate progressive vs. interlaced video on stream type graphs 2019-09-27 13:44:08 -07:00
JonnyWong16
fd7b4ec7e3 Get srouce video_full_resolution from Plex metadata 2019-09-27 13:26:55 -07:00
JonnyWong16
57eb57d4d7 Use video_full_resolution in stream data modal 2019-09-27 13:04:40 -07:00
JonnyWong16
7974e9505b Save session video_scan_type and video_full_resolution to the database 2019-09-27 13:04:17 -07:00
JonnyWong16
7498fb37b5 Update database with video_scan_type and video_full_resolution 2019-09-27 13:03:34 -07:00
JonnyWong16
2cc3e88e6c Fix Albums typo on media info table 2019-09-26 20:14:48 -07:00
JonnyWong16
5fd8cfeb80 v2.1.35-beta 2019-09-24 18:09:05 -07:00
JonnyWong16
b295566a4e Clear metadata cache before retrieving new sessions 2019-09-19 21:01:37 -07:00
JonnyWong16
e0943a2d55 Clear metadata cache on startup check for active sessions 2019-09-19 20:55:19 -07:00
JonnyWong16
3015740c3e Merge pull request #1354 from samwiseg0/fix/blank_audio
Fix blank audio on activity card when changing audio streams during direct play
2019-09-19 19:40:14 -07:00
JonnyWong16
ec9ff2f803 Merge pull request #1353 from samwiseg0/fix/video_scan_type
Add video scan type and standardize video resolution. Fixes Tautulli/Tautulli-Issues#194
2019-09-19 19:39:40 -07:00
JonnyWong16
ec8aae9122 Fix Tautulli logging out after saving settings and restarting 2019-09-19 19:37:31 -07:00
samwiseg0
52e608cc43 Fix blank audio on activity card when changing audio streams 2019-09-19 22:29:44 -04:00
samwiseg0
8213f270e5 Move full resolution outside if statement 2019-09-19 22:26:43 -04:00
JonnyWong16
7085042b0d Merge pull request #1352 from samwiseg0/fix/watch_stats
Add year to SQL query for movie watch stats
2019-09-19 19:04:06 -07:00
JonnyWong16
6a411d2458 Merge pull request #1351 from samwiseg0/fix/release_date
Fix release date to be a string vs integer
2019-09-19 19:03:57 -07:00
JonnyWong16
38e2fbabb8 Merge pull request #1350 from samwiseg0/fix/lin_hw_decode
Fix hardware decode not reflected in GUI
2019-09-19 19:03:48 -07:00
samwiseg0
85709f754a Update API docs to reflect changes 2019-09-19 21:22:37 -04:00
samwiseg0
623a1e8a91 Update webui to utilize video full resolution 2019-09-19 21:22:02 -04:00
samwiseg0
de69945ebe Define notification parameters 2019-09-19 20:57:08 -04:00
samwiseg0
7095fa6ac6 Remove overrides 2019-09-19 20:53:18 -04:00
samwiseg0
a59e8298fd Use video_full_resolution for optimized versions in the activity card 2019-09-19 20:50:17 -04:00
samwiseg0
2737d52279 Set the full resolution of the source video and stream video 2019-09-19 20:47:39 -04:00
samwiseg0
0ac1ad4386 Create video_scan_type and stream_video_scan_type 2019-09-19 20:46:38 -04:00
samwiseg0
2db328ac31 Standardize videoResolution to be lowercase 2019-09-19 20:45:53 -04:00
samwiseg0
b6de4ad054 Add year to SQL query watch stats. Fixes Tautulli/Tautulli-Issues#195 2019-09-16 23:44:48 -04:00
samwiseg0
cfea7164b7 Fix release date to be a str vs int 2019-09-16 22:50:31 -04:00
samwiseg0
7e7e5a6be4 Add nvdec for linux decode support. Fixes Tautulli/Tautulli-Issues#193 2019-09-16 21:30:09 -04:00
samwiseg0
df57f4c009 Remove duplicate nvenc 2019-09-16 21:24:57 -04:00
JonnyWong16
c2185c4ce5 Fix notificaiton parameter prefix and suffix not being substituted correctly 2019-09-07 16:37:38 -07:00
JonnyWong16
08714436c3 v2.1.34 2019-09-03 21:46:48 -07:00
JonnyWong16
f65f5d07c0 Add product to get_history API command 2019-09-03 19:52:54 -07:00
JonnyWong16
a9b10c4560 Add Product column to history tables 2019-09-03 19:52:33 -07:00
JonnyWong16
589fbd3158 Add TVMaze and TMDB IDs to notification parameters after lookup 2019-08-27 19:54:54 -07:00
JonnyWong16
0ffc8c5d19 Update JWT secret instead of UUID 2019-08-24 22:00:31 -07:00
JonnyWong16
7498617b74 Flag update JWT UUID after restarting 2019-08-24 21:13:39 -07:00
JonnyWong16
f21d505ab8 Force logout all clients when changing the admin password 2019-08-24 20:49:56 -07:00
JonnyWong16
7b16af0585 Fix verifying PMS with unpublished hostnames (Fixes Tautulli/Tautulli-Issues#190) 2019-08-16 21:27:34 -07:00
JonnyWong16
a83108282a Fix add title to searchable media info fields 2019-08-14 22:50:23 -07:00
JonnyWong16
1c4d01d6ec Fix libraries/users table respect grouping setting 2019-08-09 19:15:53 -07:00
JonnyWong16
22e6d4067d Missing space for episode titles on tables 2019-08-09 19:01:27 -07:00
JonnyWong16
1046b29c1a v2.1.33 2019-07-27 08:44:26 -07:00
JonnyWong16
d6127e28f3 Remove email sort 2019-07-22 09:03:20 -07:00
JonnyWong16
25a949356d Allow seraching by email address in dropdown menu 2019-07-21 09:46:17 -07:00
JonnyWong16
72a012b817 Update Plex OAuth headers 2019-07-18 11:39:16 -07:00
JonnyWong16
f439bd639c Make sure config has name and value when masking passwords 2019-07-10 21:37:32 -07:00
JonnyWong16
91476a420a Mask notifier and newsletter config passwords (Fixes Tautulli/Tautulli-Issues#172) 2019-07-08 23:50:48 -07:00
JonnyWong16
96c0f9cad5 Add if Docker container to platform 2019-07-08 22:30:58 -07:00
JonnyWong16
df50559495 Fix overwriting version number with None 2019-07-02 08:58:56 -07:00
JonnyWong16
6d35bd7947 v2.1.32 2019-06-26 19:14:30 -07:00
JonnyWong16
d27356bbba Fix timezone error with newsletter scheduler because QNAP devices use a stupid "local" timezone (Fixes Tautulli/Tautulli-Issues#183) 2019-06-26 19:11:33 -07:00
JonnyWong16
3054a824ce v2.1.31 2019-06-24 21:45:57 -07:00
44 changed files with 3115 additions and 212 deletions

19
API.md
View File

@@ -174,7 +174,7 @@ Delete the 3rd party API lookup info.
```
Required parameters:
rating_key (int): 1234
(Note: Must be the movie, show, or artist rating key)
(Note: Must be the movie, show, artist, album, or track rating key)
Optional parameters:
None
@@ -416,6 +416,7 @@ Returns:
"Drama",
"Fantasy"
],
"grandparent_guid": "com.plexapp.agents.thetvdb://121361?lang=en",
"grandparent_rating_key": "1219",
"grandparent_thumb": "/library/metadata/1219/thumb/1503306930",
"grandparent_title": "Game of Thrones",
@@ -443,6 +444,7 @@ Returns:
"optimized_version_title": "",
"originally_available_at": "2016-04-24",
"original_title": "",
"parent_guid": "com.plexapp.agents.thetvdb://121361/6?lang=en",
"parent_media_index": "6",
"parent_rating_key": "153036",
"parent_thumb": "/library/metadata/153036/thumb/1503889210",
@@ -507,7 +509,9 @@ Returns:
"stream_video_language": "",
"stream_video_language_code": "",
"stream_video_ref_frames": "4",
"stream_video_full_resolution": "1080p",
"stream_video_resolution": "1080",
"stream_video_scan_type": "progressive",
"stream_video_width": "1920",
"studio": "HBO",
"subtitle_codec": "",
@@ -560,12 +564,14 @@ Returns:
"video_decision": "direct play",
"video_frame_rate": "23.976",
"video_framerate": "24p",
"video_full_resolution": "1080p",
"video_height": "1078",
"video_language": "",
"video_language_code": "",
"video_profile": "high",
"video_ref_frames": "4",
"video_resolution": "1080",
"video_scan_type": "progressive",
"video_width": "1920",
"view_offset": "1000",
"width": "1920",
@@ -700,8 +706,9 @@ Returns:
"parent_title": "",
"paused_counter": 0,
"percent_complete": 84,
"platform": "Chrome",
"player": "Plex Web (Chrome)",
"platform": "Windows",
"product": "Plex for Windows",
"player": "Castle-PC",
"rating_key": 4348,
"reference_id": 1123,
"session_key": null,
@@ -833,6 +840,7 @@ Required parameters:
None
Optional parameters:
grouping (int): 0 or 1
order_column (str): "library_thumb", "section_name", "section_type", "count", "parent_count",
"child_count", "last_accessed", "last_played", "plays", "duration"
order_dir (str): "desc" or "asc"
@@ -1108,6 +1116,7 @@ Returns:
"Drama",
"Fantasy"
],
"grandparent_guid": "com.plexapp.agents.thetvdb://121361?lang=en",
"grandparent_rating_key": "1219",
"grandparent_thumb": "/library/metadata/1219/thumb/1462175063",
"grandparent_title": "Game of Thrones",
@@ -1148,6 +1157,7 @@ Returns:
"video_language_code": "",
"video_profile": "high",
"video_ref_frames": "4",
"video_scan_type": "progressive",
"video_width": "1920",
"selected": 0
},
@@ -1182,6 +1192,7 @@ Returns:
],
"video_codec": "h264",
"video_framerate": "24p",
"video_full_resolution": "1080p",
"video_profile": "high",
"video_resolution": "1080",
"width": "1920"
@@ -1190,6 +1201,7 @@ Returns:
"media_type": "episode",
"original_title": "",
"originally_available_at": "2016-04-24",
"parent_guid": "com.plexapp.agents.thetvdb://121361/6?lang=en",
"parent_media_index": "6",
"parent_rating_key": "153036",
"parent_thumb": "/library/metadata/153036/thumb/1462175062",
@@ -2341,6 +2353,7 @@ Required parameters:
None
Optional parameters:
grouping (int): 0 or 1
order_column (str): "user_thumb", "friendly_name", "last_seen", "ip_address", "platform",
"player", "last_played", "plays", "duration"
order_dir (str): "desc" or "asc"

View File

@@ -1,5 +1,82 @@
# Changelog
## v2.1.37 (2019-10-11)
* Notifications:
* Fix: Last.fm URLs linking to artist page instead of the album page.
* New: Added option for MusicBrainz lookup for music notifications. Option must be enabled under 3rd Part APIs in the settings.
* New: Added MusicBrainz ID and MusicBrainz URL notification parameters.
* Change: Automatically truncate Discord description summary to 2048 characters.
## v2.1.36-beta (2019-10-05)
* Monitoring:
* Fix: Activity card title not updating after pre-rolls or auto-play.
* History:
* Fix: Display correct interlaced or progressive video scan type on stream data modal.
* Graphs:
* New: Separate interlaced and progressive video scan type on source and stream resolution graphs.
* API:
* New: Added parent_guid and grandparent_guid to get_activity and get_metadata commands.
## v2.1.35-beta (2019-09-24)
* Monitoring:
* Fix: Audio shown as blank on activity cards when changing audio tracks during direct play.
* Fix: Display correct interlaced or progressive video scan type on activity cards.
* New: Added flag for Nvidia hardware decoding on activity cards.
* Notifications:
* Fix: Notification parameter prefix and suffix were not substituted correctly.
* Fix: Release Date notification parameter was incorrectly casted to an integer instead of a string.
* New: Added video scan type and full resolution notification parameters.
* UI:
* Fix: Movies with the same title but different year being grouped on the homepage stats cards.
* API:
* New: Added video scan type and full resolution values to get_activity command.
* Other:
* Fix: Tautulli logging out every time after saving settings and restarting.
## v2.1.34 (2019-09-03)
* History:
* New: Added Product column to history tables.
* Notifications:
* Fix: IMDB/TMDb/TVDB/TVmaze ID notification parameters showing blank values after lookup.
* UI:
* Fix: Libraries and Users tables did not respect the group history setting.
* API:
* Fix: Title field was not searchable in get_library_media_info command.
* New: Added grouping option to get_libraries_table and get_users_table commands.
* New: Added product value to get_history command.
* Other:
* Fix: Could not verify Plex Media Server with unpublished hostnames.
* Change: Automatically logout all Tautulli instances when changing the admin password.
## v2.1.33 (2019-07-27)
* Notifications:
* Change: Mask notification agent password fields.
* Change: Enable searching by email address in dropdown menu.
* Other:
* Fix: Version number being overwritten with "None" which prevented updating in some instances.
* Change: Update Plex OAuth request headers.
## v2.1.32 (2019-06-26)
* Newsletters:
* Fix: Newsletter scheduler issue for QNAP devices using an invalid "local" timezone preventing Tautulli from starting.
## v2.1.31 (2019-06-24)
* No additional changes from v2.1.31-beta.
## v2.1.31-beta (2019-06-13)
* Monitoring:
@@ -23,6 +100,7 @@
## v2.1.29 (2019-05-11)
* No additional changes from v2.1.29-beta.
@@ -167,6 +245,7 @@
## v2.1.20 (2018-09-05)
* No additional changes from v2.1.20-beta.

View File

@@ -69,7 +69,7 @@ DOCUMENTATION :: END
% endif
<tr>
<td>Platform:</td>
<td>${common.PLATFORM} ${common.PLATFORM_RELEASE} (${common.PLATFORM_VERSION + (' - {}'.format(common.PLATFORM_LINUX_DISTRO) if common.PLATFORM_LINUX_DISTRO else '')})</td>
<td>${'[Docker] ' if plexpy.DOCKER else ''}${common.PLATFORM} ${common.PLATFORM_RELEASE} (${common.PLATFORM_VERSION + (' - {}'.format(common.PLATFORM_LINUX_DISTRO) if common.PLATFORM_LINUX_DISTRO else '')})</td>
</tr>
<tr>
<td>System Timezone:</td>

View File

@@ -232,11 +232,11 @@ DOCUMENTATION :: END
hw_d = ' (HW)' if data['transcode_hw_decoding'] else ''
hw_e = ' (HW)' if data['transcode_hw_encoding'] else ''
%>
Transcode (${data['video_codec'].upper()}${hw_d} ${VIDEO_RESOLUTION_OVERRIDES.get(data['video_resolution'], data['video_resolution'])} <i class="fa fa-long-arrow-right"></i> ${data['stream_video_codec'].upper()}${hw_e} ${VIDEO_RESOLUTION_OVERRIDES.get(data['stream_video_resolution'], data['stream_video_resolution'])})
Transcode (${data['video_codec'].upper()}${hw_d} ${data['video_full_resolution']} <i class="fa fa-long-arrow-right"></i> ${data['stream_video_codec'].upper()}${hw_e} ${data['stream_video_full_resolution']})
% elif data['stream_video_decision'] == 'copy':
Direct Stream (${data['stream_video_codec'].upper()} ${VIDEO_RESOLUTION_OVERRIDES.get(data['stream_video_resolution'], data['stream_video_resolution'])})
Direct Stream (${data['stream_video_codec'].upper()} ${data['stream_video_full_resolution']})
% else:
Direct Play (${data['stream_video_codec'].upper()} ${VIDEO_RESOLUTION_OVERRIDES.get(data['stream_video_resolution'], data['stream_video_resolution'])})
Direct Play (${data['stream_video_codec'].upper()} ${data['stream_video_full_resolution']})
% endif
% elif data['media_type'] == 'photo':
Direct Play (${data['width']}x${data['height']})

View File

@@ -60,7 +60,8 @@
<th align="left" id="friendly_name">User</th>
<th align="left" id="ip_address">IP Address</th>
<th align="left" id="platform">Platform</th>
<th align="left" id="device">Player</th>
<th align="left" id="product">Product</th>
<th align="left" id="player">Player</th>
<th align="left" id="title">Title</th>
<th align="left" id="started">Started</th>
<th align="left" id="paused_counter">Paused</th>
@@ -143,7 +144,7 @@
var colvis = new $.fn.dataTable.ColVis(history_table, {
buttonText: '<i class="fa fa-columns"></i> Select columns',
buttonClass: 'btn btn-dark',
exclude: [0, 11]
exclude: [0, 12]
});
$(colvis.button()).appendTo('div.colvis-button-bar');

View File

@@ -26,6 +26,7 @@
<th align="left" id="friendly_name">User</th>
<th align="left" id="ip_address">IP Address</th>
<th align="left" id="platform">Platform</th>
<th align="left" id="product">Product</th>
<th align="left" id="device">Player</th>
<th align="left" id="title">Title</th>
<th align="left" id="started">Started</th>
@@ -61,7 +62,7 @@
};
history_table = $('#history_table_modal').DataTable(history_table_options);
history_table.columns([0, 3, 4, 8, 10, 11]).visible(false);
history_table.columns([0, 3, 4, 5, 9, 11, 12]).visible(false);
clearSearchButton('history_table_modal', history_table);

View File

@@ -355,8 +355,9 @@
var session_id = s.session_id;
var instance = $('#activity-instance-' + key);
// Create a new instance if it doesn't exist
if (!(instance.length)) {
// Create a new instance if it doesn't exist or recreate the entire instance
// if the rating key changed (for movies or episodes) with the same session key
if (!(instance.length) || (s.media_type !== 'track' && s.rating_key !== instance.data('rating_key').toString())) {
create_instances.push(key);
getActivityInstance(key);
return;
@@ -382,7 +383,7 @@
// Switching tracks can be under the same session key, so need to update the info.
if (s.media_type === 'track') {
// Update if artist changed
if (s.grandparent_rating_key !== instance.data('grandparent_rating_key')) {
if (s.grandparent_rating_key !== instance.data('grandparent_rating_key').toString()) {
$('#background-' + key).css('background-image', 'url(pms_image_proxy?img=' + s.art + '&width=500&height=280&opacity=40&background=282828&blur=3&fallback=art&refresh=true)');
$('#metadata-grandparent_title-' + key)
.attr('href', 'info?rating_key=' + s.grandparent_rating_key)
@@ -390,7 +391,7 @@
.text(s.original_title || s.grandparent_title);
}
// Update cover if album changed
if (s.parent_rating_key !== instance.data('parent_rating_key')) {
if (s.parent_rating_key !== instance.data('parent_rating_key').toString()) {
$('#poster-' + key).css('background-image', 'url(pms_image_proxy?img=' + s.parent_thumb + '&width=300&height=300&fallback=poster&refresh=true)');
$('#poster-' + key + '-bg').css('background-image', 'url(pms_image_proxy?img=' + s.parent_thumb + '&width=300&height=300&opacity=60&background=282828&blur=3&fallback=poster&refresh=true)');
$('#poster-url-' + key)
@@ -402,7 +403,7 @@
.text(s.parent_title);
}
// Update cover if track changed
if (s.rating_key !== instance.data('rating_key')) {
if (s.rating_key !== instance.data('rating_key').toString()) {
$('#metadata-grandparent_title-' + key)
.attr('href', 'info?rating_key=' + s.grandparent_rating_key)
.attr('title', s.original_title || s.grandparent_title)
@@ -445,7 +446,7 @@
v_res = '4k';
break;
default:
v_res = s.video_resolution + 'p'
v_res = s.video_full_resolution;
}
var sv_res = '';
switch (s.stream_video_resolution.toLowerCase()) {
@@ -456,7 +457,7 @@
sv_res = '4k';
break;
default:
sv_res = s.stream_video_resolution + 'p'
sv_res = s.stream_video_full_resolution;
}
if (s.stream_video_decision === 'transcode') {
var hw_d = (s.transcode_hw_decoding === 1) ? ' (HW)' : '';
@@ -580,7 +581,14 @@
session_key: session_key
},
complete: function(xhr, status) {
var instance = $('#activity-instance-' + session_key);
if (instance.length) {
instance.replaceWith(xhr.responseText);
} else {
$('#currentActivity').append(xhr.responseText);
}
$('#activity-instance-' + session_key + ' .dashboard-activity-info-scroller').scrollbar();
$('#activity-instance-' + session_key + ' [data-toggle=tooltip]').tooltip({ container: 'body', placement: 'right', delay: 50 });
$('#terminate-button-' + session_key).tooltip('destroy').tooltip({ container: 'body', placement: 'left', delay: 50 });

View File

@@ -405,11 +405,11 @@ DOCUMENTATION :: END
</a>
</div>
% endif
% if data.get('tvmaze_id') or data.get('themoviedb_id'):
% if data.get('tvmaze_id') or data.get('themoviedb_id') or data.get('musicbrainz_id'):
<div class="btn-group">
<button class="btn btn-danger btn-edit" data-toggle="modal" aria-pressed="false" autocomplete="off" id="delete-lookup-info"
data-id="${data['grandparent_rating_key'] if data['media_type'] in ('episode', 'track') else data['parent_rating_key'] if data['media_type'] in ('season', 'album') else data['rating_key']}"
data-title="${data['grandparent_title'] if data['media_type'] in ('episode', 'track') else data['parent_title'] if data['media_type'] in ('season', 'album') else data['title']}">
data-id="${data['grandparent_rating_key'] if data['media_type'] == 'episode' else data['parent_rating_key'] if data['media_type'] == 'season' else data['rating_key']}"
data-title="${data['grandparent_title'] if data['media_type'] == 'episode' else data['parent_title'] if data['media_type'] == 'season' else data['title']}">
<i class="fa fa-search"></i> Delete Lookup Info
</button>
</div>
@@ -451,6 +451,7 @@ DOCUMENTATION :: END
<th align="left" id="friendly_name">User</th>
<th align="left" id="ip_address">IP Address</th>
<th align="left" id="platform">Platform</th>
<th align="left" id="product">Product</th>
<th align="left" id="player">Player</th>
<th align="left" id="title">Title</th>
<th align="left" id="started">Started</th>
@@ -613,7 +614,7 @@ DOCUMENTATION :: END
$(document).ready(function () {
get_history();
history_table = $('#history_table-RK-${data["rating_key"]}').DataTable(history_table_options);
var colvis = new $.fn.dataTable.ColVis(history_table, { buttonText: '<i class="fa fa-columns"></i> Select columns', buttonClass: 'btn btn-dark', exclude: [0, 11] });
var colvis = new $.fn.dataTable.ColVis(history_table, { buttonText: '<i class="fa fa-columns"></i> Select columns', buttonClass: 'btn btn-dark', exclude: [0, 12] });
$(colvis.button()).appendTo('div.colvis-button-bar');
clearSearchButton('history_table-RK-${data["rating_key"]}', history_table);
@@ -750,7 +751,7 @@ DOCUMENTATION :: END
});
</script>
% endif
% if data.get('tvmaze_id') or data.get('themoviedb_id'):
% if data.get('tvmaze_id') or data.get('themoviedb_id') or data.get('musicbrainz_id'):
<script>
$('#delete-lookup-info').on('click', function () {
var msg = 'Are you sure you want to delete the 3rd party API lookup for <strong>' + $(this).data('title') + '</strong>?<br><br>' +

View File

@@ -568,8 +568,11 @@ function getPlexHeaders() {
'X-Plex-Client-Identifier': getLocalStorage('Tautulli_ClientID', uuidv4(), false),
'X-Plex-Platform': p.name,
'X-Plex-Platform-Version': p.version,
'X-Plex-Model': 'Plex OAuth',
'X-Plex-Device': p.os,
'X-Plex-Device-Name': p.name
'X-Plex-Device-Name': p.name,
'X-Plex-Device-Screen-Resolution': window.screen.width + 'x' + window.screen.height,
'X-Plex-Language': 'en'
};
}
@@ -655,7 +658,21 @@ function PlexOAuth(success, error, pre) {
const pin = data.pin;
const code = data.code;
plex_oauth_window.location = 'https://app.plex.tv/auth/#!?clientID=' + x_plex_headers['X-Plex-Client-Identifier'] + '&code=' + code;
var oauth_params = {
'clientID': x_plex_headers['X-Plex-Client-Identifier'],
'context[device][product]': x_plex_headers['X-Plex-Product'],
'context[device][version]': x_plex_headers['X-Plex-Version'],
'context[device][platform]': x_plex_headers['X-Plex-Platform'],
'context[device][platformVersion]': x_plex_headers['X-Plex-Platform-Version'],
'context[device][device]': x_plex_headers['X-Plex-Device'],
'context[device][deviceName]': x_plex_headers['X-Plex-Device-Name'],
'context[device][model]': x_plex_headers['X-Plex-Model'],
'context[device][screenResolution]': x_plex_headers['X-Plex-Device-Screen-Resolution'],
'context[device][layout]': 'desktop',
'code': code
}
plex_oauth_window.location = 'https://app.plex.tv/auth/#!?' + encodeData(oauth_params);
polling = pin;
(function poll() {
@@ -694,3 +711,9 @@ function PlexOAuth(success, error, pre) {
}
});
}
function encodeData(data) {
return Object.keys(data).map(function(key) {
return [key, data[key]].map(encodeURIComponent).join("=");
}).join("&");
}

View File

@@ -115,7 +115,7 @@ history_table_options = {
"data": "platform",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== '') {
$(td).html(cellData);
$(td).html(capitalizeFirstLetter(cellData));
}
},
"width": "10%",
@@ -123,6 +123,17 @@ history_table_options = {
},
{
"targets": [5],
"data": "product",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== '') {
$(td).html(cellData);
}
},
"width": "10%",
"className": "no-wrap"
},
{
"targets": [6],
"data": "player",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== '') {
@@ -137,11 +148,11 @@ history_table_options = {
$(td).html('<div><a href="#" data-target="#info-modal" data-toggle="modal"><div style="float: left;">' + transcode_dec + '&nbsp;' + cellData + '</div></a></div>');
}
},
"width": "12%",
"width": "10%",
"className": "no-wrap modal-control"
},
{
"targets": [6],
"targets": [7],
"data": "full_title",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData !== '') {
@@ -171,11 +182,11 @@ history_table_options = {
}
}
},
"width": "33%",
"width": "25%",
"className": "datatable-wrap"
},
{
"targets": [7],
"targets": [8],
"data": "started",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData === null) {
@@ -189,7 +200,7 @@ history_table_options = {
"className": "no-wrap"
},
{
"targets": [8],
"targets": [9],
"data": "paused_counter",
"render": function (data, type, full) {
if (data !== null) {
@@ -203,7 +214,7 @@ history_table_options = {
"className": "no-wrap"
},
{
"targets": [9],
"targets": [10],
"data": "stopped",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData === null || (rowData['state'] != null && rowData['state'] != "stopped")) {
@@ -217,7 +228,7 @@ history_table_options = {
"className": "no-wrap"
},
{
"targets": [10],
"targets": [11],
"data": "duration",
"render": function (data, type, full) {
if (data !== null) {
@@ -231,7 +242,7 @@ history_table_options = {
"className": "no-wrap"
},
{
"targets": [11],
"targets": [12],
"data": "watched_status",
"createdCell": function (td, cellData, rowData, row, col) {
if (cellData == 1) {
@@ -489,7 +500,8 @@ function childTableFormat(rowData) {
'<th align="left" id="friendly_name">User</th>' +
'<th align="left" id="ip_address">IP Address</th>' +
'<th align="left" id="platform">Platform</th>' +
'<th align="left" id="platform">Player</th>' +
'<th align="left" id="product">Product</th>' +
'<th align="left" id="player">Player</th>' +
'<th align="left" id="title">Title</th>' +
'<th align="left" id="started">Started</th>' +
'<th align="left" id="paused_counter">Paused</th>' +

View File

@@ -50,7 +50,7 @@ media_info_table_options = {
expand_details = '<span class="expand-media-info-tooltip" data-toggle="tooltip" title="Show Episodes"><i class="fa fa-plus-circle fa-fw"></i></span>';
$(td).html('<div><a href="#"><div style="float: left;">' + expand_details + '&nbsp;' + date + '</div></a></div>');
} else if (rowData['media_type'] === 'artist') {
expand_details = '<span class="expand-media-info-tooltip" data-toggle="tooltip" title="Show Albumns"><i class="fa fa-plus-circle fa-fw"></i></span>';
expand_details = '<span class="expand-media-info-tooltip" data-toggle="tooltip" title="Show Albums"><i class="fa fa-plus-circle fa-fw"></i></span>';
$(td).html('<div><a href="#"><div style="float: left;">' + expand_details + '&nbsp;' + date + '</div></a></div>');
} else if (rowData['media_type'] === 'album') {
expand_details = '<span class="expand-media-info-tooltip" data-toggle="tooltip" title="Show Tracks"><i class="fa fa-plus-circle fa-fw"></i></span>';

View File

@@ -205,6 +205,7 @@ DOCUMENTATION :: END
<th align="left" id="friendly_name">User</th>
<th align="left" id="ip_address">IP Address</th>
<th align="left" id="platform">Platform</th>
<th align="left" id="product">Product</th>
<th align="left" id="player">Player</th>
<th align="left" id="title">Title</th>
<th align="left" id="started">Started</th>
@@ -385,7 +386,7 @@ DOCUMENTATION :: END
};
history_table = $('#history_table-SID-${data["section_id"]}').DataTable(history_table_options);
var colvis = new $.fn.dataTable.ColVis(history_table, { buttonText: '<i class="fa fa-columns"></i> Select columns', buttonClass: 'btn btn-dark', exclude: [0, 11] });
var colvis = new $.fn.dataTable.ColVis(history_table, { buttonText: '<i class="fa fa-columns"></i> Select columns', buttonClass: 'btn btn-dark', exclude: [0, 12] });
$(colvis.button()).appendTo('#button-bar-history');
clearSearchButton('history_table-SID-${data["section_id"]}', history_table);

View File

@@ -584,6 +584,7 @@
var $email_selectors = $('#newsletter_email_to, #newsletter_email_cc, #newsletter_email_bcc').selectize({
plugins: ['remove_button'],
maxItems: null,
searchField: ['text', 'value'],
render: {
item: function(item, escape) {
return '<div>' +

View File

@@ -566,6 +566,7 @@
var $email_selectors = $('#email_to, #email_cc, #email_bcc').selectize({
plugins: ['remove_button'],
maxItems: null,
searchField: ['text', 'value'],
render: {
item: function(item, escape) {
return '<div>' +

View File

@@ -1163,6 +1163,12 @@
</label>
<p class="help-block">Enable to lookup links to TVmaze (and IMDb if needed) for TV shows when available.</p>
</div>
<div class="checkbox">
<label>
<input type="checkbox" name="musicbrainz_lookup" id="musicbrainz_lookup" value="1" ${config['musicbrainz_lookup']}> Lookup MusicBrainz Links
</label>
<p class="help-block">Enable to lookup links to MusicBrainz for music when available.</p>
</div>
<p><input type="button" class="btn btn-bright save-button" value="Save" data-success="Changes saved successfully"></p>

View File

@@ -96,8 +96,8 @@ DOCUMENTATION :: END
% if data['media_type'] != 'track':
<tr>
<td>Resolution</td>
<td>${VIDEO_RESOLUTION_OVERRIDES.get(data['stream_video_resolution'], data['stream_video_resolution'])}</td>
<td>${VIDEO_RESOLUTION_OVERRIDES.get(data['video_resolution'], data['video_resolution'])}</td>
<td>${data['stream_video_full_resolution']}</td>
<td>${data['video_full_resolution']}</td>
</tr>
% endif
<tr>

View File

@@ -184,6 +184,7 @@ DOCUMENTATION :: END
<th align="left" id="friendly_name">User</th>
<th align="left" id="ip_address">IP Address</th>
<th align="left" id="platform">Platform</th>
<th align="left" id="product">Product</th>
<th align="left" id="player">Player</th>
<th align="left" id="title">Title</th>
<th align="left" id="started">Started</th>
@@ -425,7 +426,7 @@ DOCUMENTATION :: END
history_table = $('#history_table-UID-${data["user_id"]}').DataTable(history_table_options);
history_table.column(2).visible(false);
var colvis = new $.fn.dataTable.ColVis(history_table, { buttonText: '<i class="fa fa-columns"></i> Select columns', buttonClass: 'btn btn-dark', exclude: [0, 11] });
var colvis = new $.fn.dataTable.ColVis(history_table, { buttonText: '<i class="fa fa-columns"></i> Select columns', buttonClass: 'btn btn-dark', exclude: [0, 12] });
$(colvis.button()).appendTo('#button-bar-history');
clearSearchButton('history_table-UID-${data["user_id"]}', history_table);

View File

@@ -0,0 +1,2 @@
from musicbrainzngs.musicbrainz import *
from musicbrainzngs.caa import *

187
lib/musicbrainzngs/caa.py Normal file
View File

@@ -0,0 +1,187 @@
# This file is part of the musicbrainzngs library
# Copyright (C) Alastair Porter, Wieland Hoffmann, and others
# This file is distributed under a BSD-2-Clause type license.
# See the COPYING file for more information.
__all__ = [
'set_caa_hostname', 'get_image_list', 'get_release_group_image_list',
'get_release_group_image_front', 'get_image_front', 'get_image_back',
'get_image'
]
import json
from musicbrainzngs import compat
from musicbrainzngs import musicbrainz
from musicbrainzngs.util import _unicode
hostname = "coverartarchive.org"
https = True
def set_caa_hostname(new_hostname, use_https=False):
"""Set the base hostname for Cover Art Archive requests.
Defaults to 'coverartarchive.org', accessing over https.
For backwards compatibility, `use_https` is False by default.
:param str new_hostname: The hostname (and port) of the CAA server to connect to
:param bool use_https: `True` if the host should be accessed using https. Default is `False`
"""
global hostname
global https
hostname = new_hostname
https = use_https
def _caa_request(mbid, imageid=None, size=None, entitytype="release"):
""" Make a CAA request.
:param imageid: ``front``, ``back`` or a number from the listing obtained
with :meth:`get_image_list`.
:type imageid: str
:param size: "250", "500", "1200"
:type size: str or None
:param entitytype: ``release`` or ``release-group``
:type entitytype: str
"""
# Construct the full URL for the request, including hostname and
# query string.
path = [entitytype, mbid]
if imageid and size:
path.append("%s-%s" % (imageid, size))
elif imageid:
path.append(imageid)
url = compat.urlunparse((
'https' if https else 'http',
hostname,
'/%s' % '/'.join(path),
'',
'',
''
))
musicbrainz._log.debug("GET request for %s" % (url, ))
# Set up HTTP request handler and URL opener.
httpHandler = compat.HTTPHandler(debuglevel=0)
handlers = [httpHandler]
opener = compat.build_opener(*handlers)
# Make request.
req = musicbrainz._MusicbrainzHttpRequest("GET", url, None)
# Useragent isn't needed for CAA, but we'll add it if it exists
if musicbrainz._useragent != "":
req.add_header('User-Agent', musicbrainz._useragent)
musicbrainz._log.debug("requesting with UA %s" % musicbrainz._useragent)
resp = musicbrainz._safe_read(opener, req, None)
# TODO: The content type declared by the CAA for JSON files is
# 'applicaiton/octet-stream'. This is not useful to detect whether the
# content is JSON, so default to decoding JSON if no imageid was supplied.
# http://tickets.musicbrainz.org/browse/CAA-75
if imageid:
# If we asked for an image, return the image
return resp
else:
# Otherwise it's json
data = _unicode(resp)
return json.loads(data)
def get_image_list(releaseid):
"""Get the list of cover art associated with a release.
The return value is the deserialized response of the `JSON listing
<http://musicbrainz.org/doc/Cover_Art_Archive/API#.2Frelease.2F.7Bmbid.7D.2F>`_
returned by the Cover Art Archive API.
If an error occurs then a :class:`~musicbrainzngs.ResponseError` will
be raised with one of the following HTTP codes:
* 400: `Releaseid` is not a valid UUID
* 404: No release exists with an MBID of `releaseid`
* 503: Ratelimit exceeded
"""
return _caa_request(releaseid)
def get_release_group_image_list(releasegroupid):
"""Get the list of cover art associated with a release group.
The return value is the deserialized response of the `JSON listing
<http://musicbrainz.org/doc/Cover_Art_Archive/API#.2Frelease-group.2F.7Bmbid.7D.2F>`_
returned by the Cover Art Archive API.
If an error occurs then a :class:`~musicbrainzngs.ResponseError` will
be raised with one of the following HTTP codes:
* 400: `Releaseid` is not a valid UUID
* 404: No release exists with an MBID of `releaseid`
* 503: Ratelimit exceeded
"""
return _caa_request(releasegroupid, entitytype="release-group")
def get_release_group_image_front(releasegroupid, size=None):
"""Download the front cover art for a release group.
The `size` argument and the possible error conditions are the same as for
:meth:`get_image`.
"""
return get_image(releasegroupid, "front", size=size,
entitytype="release-group")
def get_image_front(releaseid, size=None):
"""Download the front cover art for a release.
The `size` argument and the possible error conditions are the same as for
:meth:`get_image`.
"""
return get_image(releaseid, "front", size=size)
def get_image_back(releaseid, size=None):
"""Download the back cover art for a release.
The `size` argument and the possible error conditions are the same as for
:meth:`get_image`.
"""
return get_image(releaseid, "back", size=size)
def get_image(mbid, coverid, size=None, entitytype="release"):
"""Download cover art for a release. The coverart file to download
is specified by the `coverid` argument.
If `size` is not specified, download the largest copy present, which can be
very large.
If an error occurs then a :class:`~musicbrainzngs.ResponseError`
will be raised with one of the following HTTP codes:
* 400: `Releaseid` is not a valid UUID or `coverid` is invalid
* 404: No release exists with an MBID of `releaseid`
* 503: Ratelimit exceeded
:param coverid: ``front``, ``back`` or a number from the listing obtained with
:meth:`get_image_list`
:type coverid: int or str
:param size: "250", "500", "1200" or None. If it is None, the largest
available picture will be downloaded. If the image originally
uploaded to the Cover Art Archive was smaller than the
requested size, only the original image will be returned.
:type size: str or None
:param entitytype: The type of entity for which to download the cover art.
This is either ``release`` or ``release-group``.
:type entitytype: str
:return: The binary image data
:type: str
"""
if isinstance(coverid, int):
coverid = "%d" % (coverid, )
if isinstance(size, int):
size = "%d" % (size, )
return _caa_request(mbid, coverid, size=size, entitytype=entitytype)

View File

@@ -0,0 +1,61 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Kenneth Reitz.
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
pythoncompat
"""
import sys
# -------
# Pythons
# -------
# Syntax sugar.
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
# ---------
# Specifics
# ---------
if is_py2:
from StringIO import StringIO
from urllib2 import HTTPPasswordMgr, HTTPDigestAuthHandler, Request,\
HTTPHandler, build_opener, HTTPError, URLError
from httplib import BadStatusLine, HTTPException
from urlparse import urlunparse
from urllib import urlencode, quote_plus
bytes = str
unicode = unicode
basestring = basestring
elif is_py3:
from io import StringIO
from urllib.request import HTTPPasswordMgr, HTTPDigestAuthHandler, Request,\
HTTPHandler, build_opener
from urllib.error import HTTPError, URLError
from http.client import HTTPException, BadStatusLine
from urllib.parse import urlunparse, urlencode, quote_plus
unicode = str
bytes = bytes
basestring = (str,bytes)

817
lib/musicbrainzngs/mbxml.py Normal file
View File

@@ -0,0 +1,817 @@
# This file is part of the musicbrainzngs library
# Copyright (C) Alastair Porter, Adrian Sampson, and others
# This file is distributed under a BSD-2-Clause type license.
# See the COPYING file for more information.
import re
import xml.etree.ElementTree as ET
import logging
from . import util
def fixtag(tag, namespaces):
# given a decorated tag (of the form {uri}tag), return prefixed
# tag and namespace declaration, if any
if isinstance(tag, ET.QName):
tag = tag.text
namespace_uri, tag = tag[1:].split("}", 1)
prefix = namespaces.get(namespace_uri)
if prefix is None:
prefix = "ns%d" % len(namespaces)
namespaces[namespace_uri] = prefix
if prefix == "xml":
xmlns = None
else:
xmlns = ("xmlns:%s" % prefix, namespace_uri)
else:
xmlns = None
return "%s:%s" % (prefix, tag), xmlns
NS_MAP = {"http://musicbrainz.org/ns/mmd-2.0#": "ws2",
"http://musicbrainz.org/ns/ext#-2.0": "ext"}
_log = logging.getLogger("musicbrainzngs")
def get_error_message(error):
""" Given an error XML message from the webservice containing
<error><text>x</text><text>y</text></error>, return a list
of [x, y]"""
try:
tree = util.bytes_to_elementtree(error)
root = tree.getroot()
errors = []
if root.tag == "error":
for ch in root:
if ch.tag == "text":
errors.append(ch.text)
return errors
except ET.ParseError:
return None
def make_artist_credit(artists):
names = []
for artist in artists:
if isinstance(artist, dict):
if "name" in artist:
names.append(artist.get("name", ""))
else:
names.append(artist.get("artist", {}).get("name", ""))
else:
names.append(artist)
return "".join(names)
def parse_elements(valid_els, inner_els, element):
""" Extract single level subelements from an element.
For example, given the element:
<element>
<subelement>Text</subelement>
</element>
and a list valid_els that contains "subelement",
return a dict {'subelement': 'Text'}
Delegate the parsing of multi-level subelements to another function.
For example, given the element:
<element>
<subelement>
<a>Foo</a><b>Bar</b>
</subelement>
</element>
and a dictionary {'subelement': parse_subelement},
call parse_subelement(<subelement>) and
return a dict {'subelement': <result>}
if parse_subelement returns a tuple of the form
(True, {'subelement-key': <result>})
then merge the second element of the tuple into the
result (which may have a key other than 'subelement' or
more than 1 key)
"""
result = {}
for sub in element:
t = fixtag(sub.tag, NS_MAP)[0]
if ":" in t:
t = t.split(":")[1]
if t in valid_els:
result[t] = sub.text or ""
elif t in inner_els.keys():
inner_result = inner_els[t](sub)
if isinstance(inner_result, tuple) and inner_result[0]:
result.update(inner_result[1])
else:
result[t] = inner_result
# add counts for lists when available
m = re.match(r'([a-z0-9-]+)-list', t)
if m and "count" in sub.attrib:
result["%s-count" % m.group(1)] = int(sub.attrib["count"])
else:
_log.info("in <%s>, uncaught <%s>",
fixtag(element.tag, NS_MAP)[0], t)
return result
def parse_attributes(attributes, element):
""" Extract attributes from an element.
For example, given the element:
<element type="Group" />
and a list attributes that contains "type",
return a dict {'type': 'Group'}
"""
result = {}
for attr in element.attrib:
if "{" in attr:
a = fixtag(attr, NS_MAP)[0]
else:
a = attr
if a in attributes:
result[a] = element.attrib[attr]
else:
_log.info("in <%s>, uncaught attribute %s", fixtag(element.tag, NS_MAP)[0], attr)
return result
def parse_message(message):
tree = util.bytes_to_elementtree(message)
root = tree.getroot()
result = {}
valid_elements = {"area": parse_area,
"artist": parse_artist,
"instrument": parse_instrument,
"label": parse_label,
"place": parse_place,
"event": parse_event,
"release": parse_release,
"release-group": parse_release_group,
"series": parse_series,
"recording": parse_recording,
"work": parse_work,
"url": parse_url,
"disc": parse_disc,
"cdstub": parse_cdstub,
"isrc": parse_isrc,
"annotation-list": parse_annotation_list,
"area-list": parse_area_list,
"artist-list": parse_artist_list,
"label-list": parse_label_list,
"place-list": parse_place_list,
"event-list": parse_event_list,
"instrument-list": parse_instrument_list,
"release-list": parse_release_list,
"release-group-list": parse_release_group_list,
"series-list": parse_series_list,
"recording-list": parse_recording_list,
"work-list": parse_work_list,
"url-list": parse_url_list,
"collection-list": parse_collection_list,
"collection": parse_collection,
"message": parse_response_message
}
result.update(parse_elements([], valid_elements, root))
return result
def parse_response_message(message):
return parse_elements(["text"], {}, message)
def parse_collection_list(cl):
return [parse_collection(c) for c in cl]
def parse_collection(collection):
result = {}
attribs = ["id", "type", "entity-type"]
elements = ["name", "editor"]
inner_els = {"release-list": parse_release_list,
"artist-list": parse_artist_list,
"event-list": parse_event_list,
"place-list": parse_place_list,
"recording-list": parse_recording_list,
"work-list": parse_work_list}
result.update(parse_attributes(attribs, collection))
result.update(parse_elements(elements, inner_els, collection))
return result
def parse_annotation_list(al):
return [parse_annotation(a) for a in al]
def parse_annotation(annotation):
result = {}
attribs = ["type", "ext:score"]
elements = ["entity", "name", "text"]
result.update(parse_attributes(attribs, annotation))
result.update(parse_elements(elements, {}, annotation))
return result
def parse_lifespan(lifespan):
parts = parse_elements(["begin", "end", "ended"], {}, lifespan)
return parts
def parse_area_list(al):
return [parse_area(a) for a in al]
def parse_area(area):
result = {}
attribs = ["id", "type", "ext:score"]
elements = ["name", "sort-name", "disambiguation"]
inner_els = {"life-span": parse_lifespan,
"alias-list": parse_alias_list,
"relation-list": parse_relation_list,
"annotation": parse_annotation,
"iso-3166-1-code-list": parse_element_list,
"iso-3166-2-code-list": parse_element_list,
"iso-3166-3-code-list": parse_element_list}
result.update(parse_attributes(attribs, area))
result.update(parse_elements(elements, inner_els, area))
return result
def parse_artist_list(al):
return [parse_artist(a) for a in al]
def parse_artist(artist):
result = {}
attribs = ["id", "type", "ext:score"]
elements = ["name", "sort-name", "country", "user-rating",
"disambiguation", "gender", "ipi"]
inner_els = {"area": parse_area,
"begin-area": parse_area,
"end-area": parse_area,
"life-span": parse_lifespan,
"recording-list": parse_recording_list,
"relation-list": parse_relation_list,
"release-list": parse_release_list,
"release-group-list": parse_release_group_list,
"work-list": parse_work_list,
"tag-list": parse_tag_list,
"user-tag-list": parse_tag_list,
"rating": parse_rating,
"ipi-list": parse_element_list,
"isni-list": parse_element_list,
"alias-list": parse_alias_list,
"annotation": parse_annotation}
result.update(parse_attributes(attribs, artist))
result.update(parse_elements(elements, inner_els, artist))
return result
def parse_coordinates(c):
return parse_elements(['latitude', 'longitude'], {}, c)
def parse_place_list(pl):
return [parse_place(p) for p in pl]
def parse_place(place):
result = {}
attribs = ["id", "type", "ext:score"]
elements = ["name", "address",
"ipi", "disambiguation"]
inner_els = {"area": parse_area,
"coordinates": parse_coordinates,
"life-span": parse_lifespan,
"tag-list": parse_tag_list,
"user-tag-list": parse_tag_list,
"alias-list": parse_alias_list,
"relation-list": parse_relation_list,
"annotation": parse_annotation}
result.update(parse_attributes(attribs, place))
result.update(parse_elements(elements, inner_els, place))
return result
def parse_event_list(el):
return [parse_event(e) for e in el]
def parse_event(event):
result = {}
attribs = ["id", "type", "ext:score"]
elements = ["name", "time", "setlist", "cancelled", "disambiguation", "user-rating"]
inner_els = {"life-span": parse_lifespan,
"relation-list": parse_relation_list,
"alias-list": parse_alias_list,
"tag-list": parse_tag_list,
"user-tag-list": parse_tag_list,
"rating": parse_rating}
result.update(parse_attributes(attribs, event))
result.update(parse_elements(elements, inner_els, event))
return result
def parse_instrument(instrument):
result = {}
attribs = ["id", "type", "ext:score"]
elements = ["name", "description", "disambiguation"]
inner_els = {"relation-list": parse_relation_list,
"tag-list": parse_tag_list,
"alias-list": parse_alias_list,
"annotation": parse_annotation}
result.update(parse_attributes(attribs, instrument))
result.update(parse_elements(elements, inner_els, instrument))
return result
def parse_label_list(ll):
return [parse_label(l) for l in ll]
def parse_label(label):
result = {}
attribs = ["id", "type", "ext:score"]
elements = ["name", "sort-name", "country", "label-code", "user-rating",
"ipi", "disambiguation"]
inner_els = {"area": parse_area,
"life-span": parse_lifespan,
"release-list": parse_release_list,
"tag-list": parse_tag_list,
"user-tag-list": parse_tag_list,
"rating": parse_rating,
"ipi-list": parse_element_list,
"alias-list": parse_alias_list,
"relation-list": parse_relation_list,
"annotation": parse_annotation}
result.update(parse_attributes(attribs, label))
result.update(parse_elements(elements, inner_els, label))
return result
def parse_relation_target(tgt):
attributes = parse_attributes(['id'], tgt)
if 'id' in attributes:
return (True, {'target-id': attributes['id']})
else:
return (True, {'target-id': tgt.text})
def parse_relation_list(rl):
attribs = ["target-type"]
ttype = parse_attributes(attribs, rl)
key = "%s-relation-list" % ttype["target-type"]
return (True, {key: [parse_relation(r) for r in rl]})
def parse_relation(relation):
result = {}
attribs = ["type", "type-id"]
elements = ["target", "direction", "begin", "end", "ended", "ordering-key"]
inner_els = {"area": parse_area,
"artist": parse_artist,
"instrument": parse_instrument,
"label": parse_label,
"place": parse_place,
"event": parse_event,
"recording": parse_recording,
"release": parse_release,
"release-group": parse_release_group,
"series": parse_series,
"attribute-list": parse_element_list,
"work": parse_work,
"target": parse_relation_target
}
result.update(parse_attributes(attribs, relation))
result.update(parse_elements(elements, inner_els, relation))
# We parse attribute-list again to get attributes that have both
# text and attribute values
result.update(parse_elements(['target-credit'], {"attribute-list": parse_relation_attribute_list}, relation))
return result
def parse_relation_attribute_list(attributelist):
ret = []
for attribute in attributelist:
ret.append(parse_relation_attribute_element(attribute))
return (True, {"attributes": ret})
def parse_relation_attribute_element(element):
# Parses an attribute into a dictionary containing an element
# {"attribute": <text value>} and also an additional element
# containing any xml attributes.
# e.g <attribute value="BuxWV 1">number</attribute>
# -> {"attribute": "number", "value": "BuxWV 1"}
result = {}
for attr in element.attrib:
if "{" in attr:
a = fixtag(attr, NS_MAP)[0]
else:
a = attr
result[a] = element.attrib[attr]
result["attribute"] = element.text
return result
def parse_release(release):
result = {}
attribs = ["id", "ext:score"]
elements = ["title", "status", "disambiguation", "quality", "country",
"barcode", "date", "packaging", "asin"]
inner_els = {"text-representation": parse_text_representation,
"artist-credit": parse_artist_credit,
"label-info-list": parse_label_info_list,
"medium-list": parse_medium_list,
"release-group": parse_release_group,
"tag-list": parse_tag_list,
"user-tag-list": parse_tag_list,
"relation-list": parse_relation_list,
"annotation": parse_annotation,
"cover-art-archive": parse_caa,
"release-event-list": parse_release_event_list}
result.update(parse_attributes(attribs, release))
result.update(parse_elements(elements, inner_els, release))
if "artist-credit" in result:
result["artist-credit-phrase"] = make_artist_credit(
result["artist-credit"])
return result
def parse_medium_list(ml):
"""medium-list results from search have an additional
<track-count> element containing the number of tracks
over all mediums. Optionally add this"""
medium_list = []
track_count = None
for m in ml:
tag = fixtag(m.tag, NS_MAP)[0]
if tag == "ws2:medium":
medium_list.append(parse_medium(m))
elif tag == "ws2:track-count":
track_count = int(m.text)
ret = {"medium-list": medium_list}
if track_count is not None:
ret["medium-track-count"] = track_count
return (True, ret)
def parse_release_event_list(rel):
return [parse_release_event(re) for re in rel]
def parse_release_event(event):
result = {}
elements = ["date"]
inner_els = {"area": parse_area}
result.update(parse_elements(elements, inner_els, event))
return result
def parse_medium(medium):
result = {}
elements = ["position", "format", "title"]
inner_els = {"disc-list": parse_disc_list,
"pregap": parse_track,
"track-list": parse_track_list,
"data-track-list": parse_track_list}
result.update(parse_elements(elements, inner_els, medium))
return result
def parse_disc_list(dl):
return [parse_disc(d) for d in dl]
def parse_text_representation(textr):
return parse_elements(["language", "script"], {}, textr)
def parse_release_group(rg):
result = {}
attribs = ["id", "type", "ext:score"]
elements = ["title", "user-rating", "first-release-date", "primary-type",
"disambiguation"]
inner_els = {"artist-credit": parse_artist_credit,
"release-list": parse_release_list,
"tag-list": parse_tag_list,
"user-tag-list": parse_tag_list,
"secondary-type-list": parse_element_list,
"relation-list": parse_relation_list,
"rating": parse_rating,
"annotation": parse_annotation}
result.update(parse_attributes(attribs, rg))
result.update(parse_elements(elements, inner_els, rg))
if "artist-credit" in result:
result["artist-credit-phrase"] = make_artist_credit(result["artist-credit"])
return result
def parse_recording(recording):
result = {}
attribs = ["id", "ext:score"]
elements = ["title", "length", "user-rating", "disambiguation", "video"]
inner_els = {"artist-credit": parse_artist_credit,
"release-list": parse_release_list,
"tag-list": parse_tag_list,
"user-tag-list": parse_tag_list,
"rating": parse_rating,
"isrc-list": parse_external_id_list,
"relation-list": parse_relation_list,
"annotation": parse_annotation}
result.update(parse_attributes(attribs, recording))
result.update(parse_elements(elements, inner_els, recording))
if "artist-credit" in result:
result["artist-credit-phrase"] = make_artist_credit(result["artist-credit"])
return result
def parse_series_list(sl):
return [parse_series(s) for s in sl]
def parse_series(series):
result = {}
attribs = ["id", "type", "ext:score"]
elements = ["name", "disambiguation"]
inner_els = {"alias-list": parse_alias_list,
"relation-list": parse_relation_list,
"annotation": parse_annotation}
result.update(parse_attributes(attribs, series))
result.update(parse_elements(elements, inner_els, series))
return result
def parse_external_id_list(pl):
return [parse_attributes(["id"], p)["id"] for p in pl]
def parse_element_list(el):
return [e.text for e in el]
def parse_work_list(wl):
return [parse_work(w) for w in wl]
def parse_work(work):
result = {}
attribs = ["id", "ext:score", "type"]
elements = ["title", "user-rating", "language", "iswc", "disambiguation"]
inner_els = {"tag-list": parse_tag_list,
"user-tag-list": parse_tag_list,
"rating": parse_rating,
"alias-list": parse_alias_list,
"iswc-list": parse_element_list,
"relation-list": parse_relation_list,
"annotation": parse_response_message,
"attribute-list": parse_work_attribute_list
}
result.update(parse_attributes(attribs, work))
result.update(parse_elements(elements, inner_els, work))
return result
def parse_work_attribute_list(wal):
return [parse_work_attribute(wa) for wa in wal]
def parse_work_attribute(wa):
attribs = ["type"]
typeinfo = parse_attributes(attribs, wa)
result = {}
if typeinfo:
result = {"attribute": typeinfo["type"],
"value": wa.text}
return result
def parse_url_list(ul):
return [parse_url(u) for u in ul]
def parse_url(url):
result = {}
attribs = ["id"]
elements = ["resource"]
inner_els = {"relation-list": parse_relation_list}
result.update(parse_attributes(attribs, url))
result.update(parse_elements(elements, inner_els, url))
return result
def parse_disc(disc):
result = {}
attribs = ["id"]
elements = ["sectors"]
inner_els = {"release-list": parse_release_list,
"offset-list": parse_offset_list
}
result.update(parse_attributes(attribs, disc))
result.update(parse_elements(elements, inner_els, disc))
return result
def parse_cdstub(cdstub):
result = {}
attribs = ["id"]
elements = ["title", "artist", "barcode"]
inner_els = {"track-list": parse_track_list}
result.update(parse_attributes(attribs, cdstub))
result.update(parse_elements(elements, inner_els, cdstub))
return result
def parse_offset_list(ol):
return [int(o.text) for o in ol]
def parse_instrument_list(rl):
result = []
for r in rl:
result.append(parse_instrument(r))
return result
def parse_release_list(rl):
result = []
for r in rl:
result.append(parse_release(r))
return result
def parse_release_group_list(rgl):
result = []
for rg in rgl:
result.append(parse_release_group(rg))
return result
def parse_isrc(isrc):
result = {}
attribs = ["id"]
inner_els = {"recording-list": parse_recording_list}
result.update(parse_attributes(attribs, isrc))
result.update(parse_elements([], inner_els, isrc))
return result
def parse_recording_list(recs):
result = []
for r in recs:
result.append(parse_recording(r))
return result
def parse_artist_credit(ac):
result = []
for namecredit in ac:
result.append(parse_name_credit(namecredit))
join = parse_attributes(["joinphrase"], namecredit)
if "joinphrase" in join:
result.append(join["joinphrase"])
return result
def parse_name_credit(nc):
result = {}
elements = ["name"]
inner_els = {"artist": parse_artist}
result.update(parse_elements(elements, inner_els, nc))
return result
def parse_label_info_list(lil):
result = []
for li in lil:
result.append(parse_label_info(li))
return result
def parse_label_info(li):
result = {}
elements = ["catalog-number"]
inner_els = {"label": parse_label}
result.update(parse_elements(elements, inner_els, li))
return result
def parse_track_list(tl):
result = []
for t in tl:
result.append(parse_track(t))
return result
def parse_track(track):
result = {}
attribs = ["id"]
elements = ["number", "position", "title", "length"]
inner_els = {"recording": parse_recording,
"artist-credit": parse_artist_credit}
result.update(parse_attributes(attribs, track))
result.update(parse_elements(elements, inner_els, track))
if "artist-credit" in result.get("recording", {}) and "artist-credit" not in result:
result["artist-credit"] = result["recording"]["artist-credit"]
if "artist-credit" in result:
result["artist-credit-phrase"] = make_artist_credit(result["artist-credit"])
# Make a length field that contains track length or recording length
track_or_recording = None
if "length" in result:
track_or_recording = result["length"]
elif result.get("recording", {}).get("length"):
track_or_recording = result.get("recording", {}).get("length")
if track_or_recording:
result["track_or_recording_length"] = track_or_recording
return result
def parse_tag_list(tl):
return [parse_tag(t) for t in tl]
def parse_tag(tag):
result = {}
attribs = ["count"]
elements = ["name"]
result.update(parse_attributes(attribs, tag))
result.update(parse_elements(elements, {}, tag))
return result
def parse_rating(rating):
result = {}
attribs = ["votes-count"]
result.update(parse_attributes(attribs, rating))
result["rating"] = rating.text
return result
def parse_alias_list(al):
return [parse_alias(a) for a in al]
def parse_alias(alias):
result = {}
attribs = ["locale", "sort-name", "type", "primary",
"begin-date", "end-date"]
result.update(parse_attributes(attribs, alias))
result["alias"] = alias.text
return result
def parse_caa(caa_element):
result = {}
elements = ["artwork", "count", "front", "back", "darkened"]
result.update(parse_elements(elements, {}, caa_element))
return result
###
def make_barcode_request(release2barcode):
NS = "http://musicbrainz.org/ns/mmd-2.0#"
root = ET.Element("{%s}metadata" % NS)
rel_list = ET.SubElement(root, "{%s}release-list" % NS)
for release, barcode in release2barcode.items():
rel_xml = ET.SubElement(rel_list, "{%s}release" % NS)
bar_xml = ET.SubElement(rel_xml, "{%s}barcode" % NS)
rel_xml.set("{%s}id" % NS, release)
bar_xml.text = barcode
return ET.tostring(root, "utf-8")
def make_tag_request(**kwargs):
NS = "http://musicbrainz.org/ns/mmd-2.0#"
root = ET.Element("{%s}metadata" % NS)
for entity_type in ['artist', 'label', 'place', 'recording', 'release', 'release_group', 'work']:
entity_tags = kwargs.pop(entity_type + '_tags', None)
if entity_tags is not None:
e_list = ET.SubElement(root, "{%s}%s-list" % (NS, entity_type.replace('_', '-')))
for e, tags in entity_tags.items():
e_xml = ET.SubElement(e_list, "{%s}%s" % (NS, entity_type.replace('_', '-')))
e_xml.set("{%s}id" % NS, e)
taglist = ET.SubElement(e_xml, "{%s}user-tag-list" % NS)
for tag in tags:
usertag_xml = ET.SubElement(taglist, "{%s}user-tag" % NS)
name_xml = ET.SubElement(usertag_xml, "{%s}name" % NS)
name_xml.text = tag
if kwargs.keys():
raise TypeError("make_tag_request() got an unexpected keyword argument '%s'" % kwargs.popitem()[0])
return ET.tostring(root, "utf-8")
def make_rating_request(**kwargs):
NS = "http://musicbrainz.org/ns/mmd-2.0#"
root = ET.Element("{%s}metadata" % NS)
for entity_type in ['artist', 'label', 'recording', 'release_group', 'work']:
entity_ratings = kwargs.pop(entity_type + '_ratings', None)
if entity_ratings is not None:
e_list = ET.SubElement(root, "{%s}%s-list" % (NS, entity_type.replace('_', '-')))
for e, rating in entity_ratings.items():
e_xml = ET.SubElement(e_list, "{%s}%s" % (NS, entity_type.replace('_', '-')))
e_xml.set("{%s}id" % NS, e)
rating_xml = ET.SubElement(e_xml, "{%s}user-rating" % NS)
rating_xml.text = str(rating)
if kwargs.keys():
raise TypeError("make_rating_request() got an unexpected keyword argument '%s'" % kwargs.popitem()[0])
return ET.tostring(root, "utf-8")
def make_isrc_request(recording2isrcs):
NS = "http://musicbrainz.org/ns/mmd-2.0#"
root = ET.Element("{%s}metadata" % NS)
rec_list = ET.SubElement(root, "{%s}recording-list" % NS)
for rec, isrcs in recording2isrcs.items():
if len(isrcs) > 0:
rec_xml = ET.SubElement(rec_list, "{%s}recording" % NS)
rec_xml.set("{%s}id" % NS, rec)
isrc_list_xml = ET.SubElement(rec_xml, "{%s}isrc-list" % NS)
isrc_list_xml.set("{%s}count" % NS, str(len(isrcs)))
for isrc in isrcs:
isrc_xml = ET.SubElement(isrc_list_xml, "{%s}isrc" % NS)
isrc_xml.set("{%s}id" % NS, isrc)
return ET.tostring(root, "utf-8")

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,44 @@
# This file is part of the musicbrainzngs library
# Copyright (C) Alastair Porter, Adrian Sampson, and others
# This file is distributed under a BSD-2-Clause type license.
# See the COPYING file for more information.
import sys
import locale
import xml.etree.ElementTree as ET
from . import compat
def _unicode(string, encoding=None):
"""Try to decode byte strings to unicode.
This can only be a guess, but this might be better than failing.
It is safe to use this on numbers or strings that are already unicode.
"""
if isinstance(string, compat.unicode):
unicode_string = string
elif isinstance(string, compat.bytes):
# use given encoding, stdin, preferred until something != None is found
if encoding is None:
encoding = sys.stdin.encoding
if encoding is None:
encoding = locale.getpreferredencoding()
unicode_string = string.decode(encoding, "ignore")
else:
unicode_string = compat.unicode(string)
return unicode_string.replace('\x00', '').strip()
def bytes_to_elementtree(bytes_or_file):
"""Given a bytestring or a file-like object that will produce them,
parse and return an ElementTree.
"""
if isinstance(bytes_or_file, compat.basestring):
s = bytes_or_file
else:
s = bytes_or_file.read()
if compat.is_py3:
s = _unicode(s, "utf-8")
f = compat.StringIO(s)
tree = ET.ElementTree(file=f)
return tree

View File

@@ -158,8 +158,8 @@ def initialize(config_file):
logger.info(u"Starting Tautulli {}".format(
common.RELEASE
))
logger.info(u"{} {} ({}{})".format(
common.PLATFORM, common.PLATFORM_RELEASE, common.PLATFORM_VERSION,
logger.info(u"{}{} {} ({}{})".format(
'[Docker] ' if DOCKER else '', common.PLATFORM, common.PLATFORM_RELEASE, common.PLATFORM_VERSION,
' - {}'.format(common.PLATFORM_LINUX_DISTRO) if common.PLATFORM_LINUX_DISTRO else ''
))
logger.info(u"{} (UTC{})".format(
@@ -217,9 +217,10 @@ def initialize(config_file):
CONFIG.write()
# Check if Tautulli has a jwt_secret
if CONFIG.JWT_SECRET == '' or not CONFIG.JWT_SECRET:
if CONFIG.JWT_SECRET == '' or not CONFIG.JWT_SECRET or CONFIG.JWT_UPDATE_SECRET:
logger.debug(u"Generating JWT secret...")
CONFIG.JWT_SECRET = generate_uuid()
CONFIG.JWT_UPDATE_SECRET = False
CONFIG.write()
# Get the previous version from the file
@@ -253,7 +254,7 @@ def initialize(config_file):
# Check for new versions
if CONFIG.CHECK_GITHUB_ON_STARTUP and CONFIG.CHECK_GITHUB:
try:
LATEST_VERSION = versioncheck.check_update()
versioncheck.check_update()
except:
logger.exception(u"Unhandled exception")
LATEST_VERSION = CURRENT_VERSION
@@ -581,12 +582,12 @@ def dbcheck():
'view_offset INTEGER DEFAULT 0, duration INTEGER, video_decision TEXT, audio_decision TEXT, '
'transcode_decision TEXT, container TEXT, bitrate INTEGER, width INTEGER, height INTEGER, '
'video_codec TEXT, video_bitrate INTEGER, video_resolution TEXT, video_width INTEGER, video_height INTEGER, '
'video_framerate TEXT, aspect_ratio TEXT, '
'video_framerate TEXT, video_scan_type TEXT, video_full_resolution TEXT, aspect_ratio TEXT, '
'audio_codec TEXT, audio_bitrate INTEGER, audio_channels INTEGER, subtitle_codec TEXT, '
'stream_bitrate INTEGER, stream_video_resolution TEXT, quality_profile TEXT, '
'stream_container_decision TEXT, stream_container TEXT, '
'stream_video_decision TEXT, stream_video_codec TEXT, stream_video_bitrate INTEGER, stream_video_width INTEGER, '
'stream_video_height INTEGER, stream_video_framerate TEXT, '
'stream_video_height INTEGER, stream_video_framerate TEXT, stream_video_scan_type TEXT, stream_video_full_resolution TEXT, '
'stream_audio_decision TEXT, stream_audio_codec TEXT, stream_audio_bitrate INTEGER, stream_audio_channels INTEGER, '
'subtitles INTEGER, stream_subtitle_decision TEXT, stream_subtitle_codec TEXT, '
'transcode_protocol TEXT, transcode_container TEXT, '
@@ -616,7 +617,7 @@ def dbcheck():
'video_decision TEXT, audio_decision TEXT, transcode_decision TEXT, duration INTEGER DEFAULT 0, '
'container TEXT, bitrate INTEGER, width INTEGER, height INTEGER, video_bitrate INTEGER, video_bit_depth INTEGER, '
'video_codec TEXT, video_codec_level TEXT, video_width INTEGER, video_height INTEGER, video_resolution TEXT, '
'video_framerate TEXT, aspect_ratio TEXT, '
'video_framerate TEXT, video_scan_type TEXT, video_full_resolution TEXT, aspect_ratio TEXT, '
'audio_bitrate INTEGER, audio_codec TEXT, audio_channels INTEGER, transcode_protocol TEXT, '
'transcode_container TEXT, transcode_video_codec TEXT, transcode_audio_codec TEXT, '
'transcode_audio_channels INTEGER, transcode_width INTEGER, transcode_height INTEGER, '
@@ -626,7 +627,7 @@ def dbcheck():
'stream_container TEXT, stream_container_decision TEXT, stream_bitrate INTEGER, '
'stream_video_decision TEXT, stream_video_bitrate INTEGER, stream_video_codec TEXT, stream_video_codec_level TEXT, '
'stream_video_bit_depth INTEGER, stream_video_height INTEGER, stream_video_width INTEGER, stream_video_resolution TEXT, '
'stream_video_framerate TEXT, '
'stream_video_framerate TEXT, stream_video_scan_type TEXT, stream_video_full_resolution TEXT, '
'stream_audio_decision TEXT, stream_audio_codec TEXT, stream_audio_bitrate INTEGER, stream_audio_channels INTEGER, '
'stream_subtitle_decision TEXT, stream_subtitle_codec TEXT, stream_subtitle_container TEXT, stream_subtitle_forced INTEGER, '
'subtitles INTEGER, subtitle_codec TEXT, synced_version INTEGER, synced_version_profile TEXT, '
@@ -750,6 +751,13 @@ def dbcheck():
'themoviedb_id INTEGER, themoviedb_url TEXT, themoviedb_json TEXT)'
)
# musicbrainz_lookup table :: This table keeps record of the MusicBrainz lookups
c_db.execute(
'CREATE TABLE IF NOT EXISTS musicbrainz_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, '
'rating_key INTEGER, musicbrainz_id INTEGER, musicbrainz_url TEXT, musicbrainz_type TEXT, '
'musicbrainz_json TEXT)'
)
# image_hash_lookup table :: This table keeps record of the image hash lookups
c_db.execute(
'CREATE TABLE IF NOT EXISTS image_hash_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, '
@@ -1174,6 +1182,24 @@ def dbcheck():
'ALTER TABLE sessions ADD COLUMN rating_key_websocket TEXT'
)
# Upgrade sessions table from earlier versions
try:
c_db.execute('SELECT video_scan_type FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN video_scan_type TEXT'
)
c_db.execute(
'ALTER TABLE sessions ADD COLUMN video_full_resolution TEXT'
)
c_db.execute(
'ALTER TABLE sessions ADD COLUMN stream_video_scan_type TEXT'
)
c_db.execute(
'ALTER TABLE sessions ADD COLUMN stream_video_full_resolution TEXT'
)
# Upgrade session_history table from earlier versions
try:
c_db.execute('SELECT reference_id FROM session_history')
@@ -1473,6 +1499,45 @@ def dbcheck():
except sqlite3.OperationalError:
logger.warn(u"Unable to remove NULL values from session_history_media_info table.")
# Upgrade session_history_media_info table from earlier versions
try:
c_db.execute('SELECT video_scan_type FROM session_history_media_info')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history_media_info.")
c_db.execute(
'ALTER TABLE session_history_media_info ADD COLUMN video_scan_type TEXT'
)
c_db.execute(
'ALTER TABLE session_history_media_info ADD COLUMN video_full_resolution TEXT'
)
c_db.execute(
'ALTER TABLE session_history_media_info ADD COLUMN stream_video_scan_type TEXT'
)
c_db.execute(
'ALTER TABLE session_history_media_info ADD COLUMN stream_video_full_resolution TEXT'
)
c_db.execute(
'UPDATE session_history_media_info SET video_scan_type = "progressive" '
'WHERE video_resolution != ""'
)
c_db.execute(
'UPDATE session_history_media_info SET stream_video_scan_type = "progressive" '
'WHERE stream_video_resolution != "" AND stream_video_resolution IS NOT NULL'
)
c_db.execute(
'UPDATE session_history_media_info SET video_full_resolution = (CASE '
'WHEN video_resolution = "" OR video_resolution = "SD" OR video_resolution = "4k" THEN video_resolution '
'WHEN video_resolution = "sd" THEN "SD" '
'ELSE video_resolution || "p" END)'
)
c_db.execute(
'UPDATE session_history_media_info SET stream_video_full_resolution = ( '
'CASE WHEN stream_video_resolution = "" OR stream_video_resolution = "SD" OR stream_video_resolution = "4k" '
'THEN stream_video_resolution '
'WHEN stream_video_resolution = "sd" THEN "SD" '
'ELSE stream_video_resolution || "p" END)'
)
# Upgrade users table from earlier versions
try:
c_db.execute('SELECT do_notify FROM users')
@@ -1870,6 +1935,9 @@ def dbcheck():
c_db.execute(
'CREATE UNIQUE INDEX IF NOT EXISTS idx_themoviedb_lookup ON themoviedb_lookup (rating_key)'
)
c_db.execute(
'CREATE UNIQUE INDEX IF NOT EXISTS idx_musicbrainz_lookup ON musicbrainz_lookup (rating_key)'
)
conn_db.commit()
c_db.close()

View File

@@ -17,13 +17,13 @@ import threading
import time
import plexpy
import activity_handler
import activity_processor
import database
import helpers
import libraries
import logger
import notification_handler
import notifiers
import plextv
import pmsconnect
import web_socket
@@ -37,17 +37,23 @@ int_ping_count = 0
def check_active_sessions(ws_request=False):
with monitor_lock:
pms_connect = pmsconnect.PmsConnect()
session_list = pms_connect.get_current_activity()
monitor_db = database.MonitorDatabase()
monitor_process = activity_processor.ActivityProcessor()
db_streams = monitor_process.get_sessions()
# Clear the metadata cache
for stream in db_streams:
activity_handler.delete_metadata_cache(stream['session_key'])
pms_connect = pmsconnect.PmsConnect()
session_list = pms_connect.get_current_activity()
logger.debug(u"Tautulli Monitor :: Checking for active streams.")
if session_list:
media_container = session_list['sessions']
# Check our temp table for what we must do with the new streams
db_streams = monitor_process.get_sessions()
for stream in db_streams:
if any(d['session_key'] == str(stream['session_key']) and d['rating_key'] == str(stream['rating_key'])
for d in media_container):

View File

@@ -76,6 +76,8 @@ class ActivityProcessor(object):
'video_height': session.get('video_height', ''),
'video_resolution': session.get('video_resolution', ''),
'video_framerate': session.get('video_framerate', ''),
'video_scan_type': session.get('video_scan_type', ''),
'video_full_resolution': session.get('video_full_resolution', ''),
'aspect_ratio': session.get('aspect_ratio', ''),
'audio_codec': session.get('audio_codec', ''),
'audio_bitrate': session.get('audio_bitrate', ''),
@@ -106,6 +108,8 @@ class ActivityProcessor(object):
'stream_video_width': session.get('stream_video_width', ''),
'stream_video_height': session.get('stream_video_height', ''),
'stream_video_framerate': session.get('stream_video_framerate', ''),
'stream_video_scan_type': session.get('stream_video_scan_type', ''),
'stream_video_full_resolution': session.get('stream_video_full_resolution', ''),
'stream_audio_decision': session.get('stream_audio_decision', ''),
'stream_audio_codec': session.get('stream_audio_codec', ''),
'stream_audio_bitrate': session.get('stream_audio_bitrate', ''),
@@ -347,6 +351,8 @@ class ActivityProcessor(object):
'video_height': session['video_height'],
'video_resolution': session['video_resolution'],
'video_framerate': session['video_framerate'],
'video_scan_type': session['video_scan_type'],
'video_full_resolution': session['video_full_resolution'],
'aspect_ratio': session['aspect_ratio'],
'audio_codec': session['audio_codec'],
'audio_bitrate': session['audio_bitrate'],
@@ -379,6 +385,8 @@ class ActivityProcessor(object):
'stream_video_width': session['stream_video_width'],
'stream_video_resolution': session['stream_video_resolution'],
'stream_video_framerate': session['stream_video_framerate'],
'stream_video_scan_type': session['stream_video_scan_type'],
'stream_video_full_resolution': session['stream_video_full_resolution'],
'stream_audio_decision': session['stream_audio_decision'],
'stream_audio_codec': session['stream_audio_codec'],
'stream_audio_bitrate': session['stream_audio_bitrate'],

View File

@@ -119,11 +119,6 @@ AUDIO_CODEC_OVERRIDES = {
VIDEO_RESOLUTION_OVERRIDES = {
'sd': 'SD',
'480': '480p',
'540': '540p',
'576': '576p',
'720': '720p',
'1080': '1080p',
'4k': '4k'
}
@@ -168,11 +163,11 @@ HW_DECODERS = [
'dxva2',
'videotoolbox',
'mediacodecndk',
'vaapi'
'vaapi',
'nvdec'
]
HW_ENCODERS = [
'qsv',
'nvenc',
'mf',
'videotoolbox',
'mediacodecndk',
@@ -366,8 +361,10 @@ NOTIFICATION_PARAMETERS = [
{'name': 'Stream Video Bitrate', 'type': 'int', 'value': 'stream_video_bitrate', 'description': 'The video bitrate (in kbps) of the stream.'},
{'name': 'Stream Video Bit Depth', 'type': 'int', 'value': 'stream_video_bit_depth', 'description': 'The video bit depth of the stream.'},
{'name': 'Stream Video Framerate', 'type': 'str', 'value': 'stream_video_framerate', 'description': 'The video framerate of the stream.'},
{'name': 'Stream Video Full Resolution', 'type': 'str', 'value': 'stream_video_full_resolution', 'description': 'The video resolution of the stream with scan type.'},
{'name': 'Stream Video Ref Frames', 'type': 'int', 'value': 'stream_video_ref_frames', 'description': 'The video reference frames of the stream.'},
{'name': 'Stream Video Resolution', 'type': 'str', 'value': 'stream_video_resolution', 'description': 'The video resolution of the stream.'},
{'name': 'Stream Video Scan Type', 'type': 'str', 'value': 'stream_video_scan_type', 'description': 'The video scan type of the stream.'},
{'name': 'Stream Video Height', 'type': 'int', 'value': 'stream_video_height', 'description': 'The video height of the stream.'},
{'name': 'Stream Video Width', 'type': 'int', 'value': 'stream_video_width', 'description': 'The video width of the stream.'},
{'name': 'Stream Video Language', 'type': 'str', 'value': 'stream_video_language', 'description': 'The video language of the stream.'},
@@ -430,7 +427,7 @@ NOTIFICATION_PARAMETERS = [
{'name': 'Album Count', 'type': 'int', 'value': 'album_count', 'description': 'The number of albums.'},
{'name': 'Track Count', 'type': 'int', 'value': 'track_count', 'description': 'The number of tracks.'},
{'name': 'Year', 'type': 'int', 'value': 'year', 'description': 'The release year for the item.'},
{'name': 'Release Date', 'type': 'int', 'value': 'release_date', 'description': 'The release date (in date format) for the item.'},
{'name': 'Release Date', 'type': 'str', 'value': 'release_date', 'description': 'The release date (in date format) for the item.'},
{'name': 'Air Date', 'type': 'str', 'value': 'air_date', 'description': 'The air date (in date format) for the item.'},
{'name': 'Added Date', 'type': 'str', 'value': 'added_date', 'description': 'The date (in date format) the item was added to Plex.'},
{'name': 'Updated Date', 'type': 'str', 'value': 'updated_date', 'description': 'The date (in date format) the item was updated on Plex.'},
@@ -459,7 +456,9 @@ NOTIFICATION_PARAMETERS = [
{'name': 'TMDB URL', 'type': 'str', 'value': 'themoviedb_url', 'description': 'The TMDb URL for the movie or TV show.'},
{'name': 'TVmaze ID', 'type': 'int', 'value': 'tvmaze_id', 'description': 'The TVmaze ID for the TV show.', 'example': 'e.g. 290'},
{'name': 'TVmaze URL', 'type': 'str', 'value': 'tvmaze_url', 'description': 'The TVmaze URL for the TV show.'},
{'name': 'Last.fm URL', 'type': 'str', 'value': 'lastfm_url', 'description': 'The Last.fm URL for the album.'},
{'name': 'MusicBrainz ID', 'type': 'str', 'value': 'musicbrainz_id', 'description': 'The MusicBrainz ID for the artist, album, or track.', 'example': 'e.g. b670dfcf-9824-4309-a57e-03595aaba286'},
{'name': 'MusicBrainz URL', 'type': 'str', 'value': 'musicbrainz_url', 'description': 'The MusicBrainz URL for the artist, album, or track.'},
{'name': 'Last.fm URL', 'type': 'str', 'value': 'lastfm_url', 'description': 'The Last.fm URL for the album.', 'help_text': 'Music library agent must be Last.fm'},
{'name': 'Trakt.tv URL', 'type': 'str', 'value': 'trakt_url', 'description': 'The trakt.tv URL for the movie or TV show.'},
{'name': 'Container', 'type': 'str', 'value': 'container', 'description': 'The media container of the original media.'},
{'name': 'Bitrate', 'type': 'int', 'value': 'bitrate', 'description': 'The bitrate of the original media.'},
@@ -469,8 +468,10 @@ NOTIFICATION_PARAMETERS = [
{'name': 'Video Bitrate', 'type': 'int', 'value': 'video_bitrate', 'description': 'The video bitrate of the original media.'},
{'name': 'Video Bit Depth', 'type': 'int', 'value': 'video_bit_depth', 'description': 'The video bit depth of the original media.'},
{'name': 'Video Framerate', 'type': 'str', 'value': 'video_framerate', 'description': 'The video framerate of the original media.'},
{'name': 'Video Full Resolution', 'type': 'str', 'value': 'video_full_resolution', 'description': 'The video resolution of the original media with scan type.'},
{'name': 'Video Ref Frames', 'type': 'int', 'value': 'video_ref_frames', 'description': 'The video reference frames of the original media.'},
{'name': 'Video Resolution', 'type': 'str', 'value': 'video_resolution', 'description': 'The video resolution of the original media.'},
{'name': 'Video Scan Tpye', 'type': 'str', 'value': 'video_scan_type', 'description': 'The video scan type of the original media.'},
{'name': 'Video Height', 'type': 'int', 'value': 'video_height', 'description': 'The video height of the original media.'},
{'name': 'Video Width', 'type': 'int', 'value': 'video_width', 'description': 'The video width of the original media.'},
{'name': 'Video Language', 'type': 'str', 'value': 'video_language', 'description': 'The video language of the original media.'},

View File

@@ -302,6 +302,7 @@ _CONFIG_DEFINITIONS = {
'MUSIC_NOTIFY_ON_STOP': (int, 'Monitoring', 0),
'MUSIC_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),
'MUSIC_WATCHED_PERCENT': (int, 'Monitoring', 85),
'MUSICBRAINZ_LOOKUP': (int, 'General', 0),
'MONITOR_PMS_UPDATES': (int, 'Monitoring', 0),
'MONITOR_REMOTE_ACCESS': (int, 'Monitoring', 0),
'MONITORING_INTERVAL': (int, 'Monitoring', 60),
@@ -624,6 +625,7 @@ _CONFIG_DEFINITIONS = {
'XBMC_ON_CONCURRENT': (int, 'XBMC', 0),
'XBMC_ON_NEWDEVICE': (int, 'XBMC', 0),
'JWT_SECRET': (str, 'Advanced', ''),
'JWT_UPDATE_SECRET': (bool_int, 'Advanced', 0),
'SYSTEM_ANALYTICS': (int, 'Advanced', 1),
'WIN_SYS_TRAY': (int, 'General', 1)
}

View File

@@ -76,6 +76,7 @@ class DataFactory(object):
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
THEN users.username ELSE users.friendly_name END) AS friendly_name',
'platform',
'product',
'player',
'ip_address',
'session_history.media_type',
@@ -123,6 +124,7 @@ class DataFactory(object):
'(CASE WHEN friendly_name IS NULL OR TRIM(friendly_name) = "" \
THEN user ELSE friendly_name END) AS friendly_name',
'platform',
'product',
'player',
'ip_address',
'media_type',
@@ -225,6 +227,7 @@ class DataFactory(object):
'user': item['user'],
'friendly_name': item['friendly_name'],
'platform': platform,
'product': item['product'],
'player': item['player'],
'ip_address': item['ip_address'],
'media_type': item['media_type'],
@@ -294,7 +297,7 @@ class DataFactory(object):
' >= datetime("now", "-%s days", "localtime") ' \
' AND session_history.media_type = "movie" ' \
' GROUP BY %s) AS t ' \
'GROUP BY t.full_title ' \
'GROUP BY t.full_title, t.year ' \
'ORDER BY %s DESC, started DESC ' \
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
@@ -345,7 +348,7 @@ class DataFactory(object):
' >= datetime("now", "-%s days", "localtime") ' \
' AND session_history.media_type = "movie" ' \
' GROUP BY %s) AS t ' \
'GROUP BY t.full_title ' \
'GROUP BY t.full_title, t.year ' \
'ORDER BY users_watched DESC, %s DESC, started DESC ' \
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
@@ -872,12 +875,12 @@ class DataFactory(object):
user_cond = 'AND %s.user_id = %s ' % (table, session.get_session_user_id())
if row_id:
query = 'SELECT bitrate, video_resolution, ' \
query = 'SELECT bitrate, video_full_resolution, ' \
'optimized_version, optimized_version_profile, optimized_version_title, ' \
'synced_version, synced_version_profile, ' \
'container, video_codec, video_bitrate, video_width, video_height, video_framerate, aspect_ratio, ' \
'audio_codec, audio_bitrate, audio_channels, subtitle_codec, ' \
'stream_bitrate, stream_video_resolution, quality_profile, stream_container_decision, stream_container, ' \
'stream_bitrate, stream_video_full_resolution, quality_profile, stream_container_decision, stream_container, ' \
'stream_video_decision, stream_video_codec, stream_video_bitrate, stream_video_width, stream_video_height, ' \
'stream_video_framerate, ' \
'stream_audio_decision, stream_audio_codec, stream_audio_bitrate, stream_audio_channels, ' \
@@ -893,12 +896,12 @@ class DataFactory(object):
'WHERE session_history_media_info.id = ? %s' % user_cond
result = monitor_db.select(query, args=[row_id])
elif session_key:
query = 'SELECT bitrate, video_resolution, ' \
query = 'SELECT bitrate, video_full_resolution, ' \
'optimized_version, optimized_version_profile, optimized_version_title, ' \
'synced_version, synced_version_profile, ' \
'container, video_codec, video_bitrate, video_width, video_height, video_framerate, aspect_ratio, ' \
'audio_codec, audio_bitrate, audio_channels, subtitle_codec, ' \
'stream_bitrate, stream_video_resolution, quality_profile, stream_container_decision, stream_container, ' \
'stream_bitrate, stream_video_full_resolution, quality_profile, stream_container_decision, stream_container, ' \
'stream_video_decision, stream_video_codec, stream_video_bitrate, stream_video_width, stream_video_height, ' \
'stream_video_framerate, ' \
'stream_audio_decision, stream_audio_codec, stream_audio_bitrate, stream_audio_channels, ' \
@@ -921,7 +924,7 @@ class DataFactory(object):
# For backwards compatibility. Pick one new Tautulli key to check and override with old values.
if not item['stream_container']:
item['stream_video_resolution'] = item['video_resolution']
item['stream_video_full_resolution'] = item['video_full_resolution']
item['stream_container'] = item['transcode_container'] or item['container']
item['stream_video_decision'] = item['video_decision']
item['stream_video_codec'] = item['transcode_video_codec'] or item['video_codec']
@@ -935,7 +938,7 @@ class DataFactory(object):
pre_tautulli = 1
stream_output = {'bitrate': item['bitrate'],
'video_resolution': item['video_resolution'],
'video_full_resolution': item['video_full_resolution'],
'optimized_version': item['optimized_version'],
'optimized_version_profile': item['optimized_version_profile'],
'optimized_version_title': item['optimized_version_title'],
@@ -953,7 +956,7 @@ class DataFactory(object):
'audio_channels': item['audio_channels'],
'subtitle_codec': item['subtitle_codec'],
'stream_bitrate': item['stream_bitrate'],
'stream_video_resolution': item['stream_video_resolution'],
'stream_video_full_resolution': item['stream_video_full_resolution'],
'quality_profile': item['quality_profile'],
'stream_container_decision': item['stream_container_decision'],
'stream_container': item['stream_container'],
@@ -1313,15 +1316,16 @@ class DataFactory(object):
if str(rating_key).isdigit():
lookup_key = rating_key
elif metadata:
if metadata['media_type'] in ('movie', 'show', 'artist'):
if metadata['media_type'] in ('movie', 'show', 'artist', 'album', 'track'):
lookup_key = metadata['rating_key']
elif metadata['media_type'] in ('season', 'album'):
elif metadata['media_type'] == 'season':
lookup_key = metadata['parent_rating_key']
elif metadata['media_type'] in ('episode', 'track'):
elif metadata['media_type'] == 'episode':
lookup_key = metadata['grandparent_rating_key']
lookup_info = {'tvmaze_id': '',
'themoviedb_id': ''}
'themoviedb_id': '',
'musizbrainz_id': ''}
if lookup_key:
try:
@@ -1336,6 +1340,13 @@ class DataFactory(object):
themoviedb_info = monitor_db.select_single(query, args=[lookup_key])
if themoviedb_info:
lookup_info['themoviedb_id'] = themoviedb_info['themoviedb_id']
query = 'SELECT musicbrainz_id FROM musicbrainz_lookup ' \
'WHERE rating_key = ?'
musicbrainz_info = monitor_db.select_single(query, args=[lookup_key])
if musicbrainz_info:
lookup_info['musicbrainz_id'] = musicbrainz_info['musicbrainz_id']
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_lookup_info: %s." % e)
@@ -1349,7 +1360,8 @@ class DataFactory(object):
% (title, rating_key))
result_tvmaze = monitor_db.action('DELETE FROM tvmaze_lookup WHERE rating_key = ?', [rating_key])
result_themoviedb = monitor_db.action('DELETE FROM themoviedb_lookup WHERE rating_key = ?', [rating_key])
return True if (result_tvmaze or result_themoviedb) else False
result_musicbrainz = monitor_db.action('DELETE FROM musicbrainz_lookup WHERE rating_key = ?', [rating_key])
return True if (result_tvmaze or result_themoviedb or result_musicbrainz) else False
def get_search_query(self, rating_key=''):
monitor_db = database.MonitorDatabase()

View File

@@ -698,7 +698,7 @@ class Graphs(object):
try:
if y_axis == 'plays':
query = 'SELECT UPPER(session_history_media_info.video_resolution) AS resolution, ' \
query = 'SELECT session_history_media_info.video_full_resolution AS resolution, ' \
'SUM(CASE WHEN session_history_media_info.transcode_decision = "direct play" ' \
'THEN 1 ELSE 0 END) AS dp_count, ' \
'SUM(CASE WHEN session_history_media_info.transcode_decision = "copy" ' \
@@ -717,7 +717,7 @@ class Graphs(object):
result = monitor_db.select(query)
else:
query = 'SELECT UPPER(session_history_media_info.video_resolution) AS resolution,' \
query = 'SELECT session_history_media_info.video_full_resolution AS resolution,' \
'SUM(CASE WHEN session_history_media_info.transcode_decision = "direct play" ' \
'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \
' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS dp_count, ' \
@@ -749,10 +749,6 @@ class Graphs(object):
series_3 = []
for item in result:
if item['resolution'] not in ('4k', 'unknown'):
item['resolution'] = item['resolution'].upper()
if item['resolution'].isdigit():
item['resolution'] += 'p'
categories.append(item['resolution'])
series_1.append(item['dp_count'])
series_2.append(item['ds_count'])
@@ -789,7 +785,7 @@ class Graphs(object):
try:
if y_axis == 'plays':
query = 'SELECT ' \
'(CASE WHEN session_history_media_info.stream_video_resolution IS NULL THEN ' \
'(CASE WHEN session_history_media_info.stream_video_full_resolution IS NULL THEN ' \
'(CASE WHEN session_history_media_info.video_decision = "transcode" THEN ' \
'(CASE ' \
'WHEN session_history_media_info.transcode_height <= 360 THEN "SD" ' \
@@ -799,8 +795,8 @@ class Graphs(object):
'WHEN session_history_media_info.transcode_height <= 1080 THEN "1080" ' \
'WHEN session_history_media_info.transcode_height <= 1440 THEN "QHD" ' \
'WHEN session_history_media_info.transcode_height <= 2160 THEN "4k" ' \
'ELSE "unknown" END) ELSE UPPER(session_history_media_info.video_resolution) END) ' \
'ELSE UPPER(session_history_media_info.stream_video_resolution) END) AS resolution, ' \
'ELSE "unknown" END) ELSE session_history_media_info.video_full_resolution END) ' \
'ELSE session_history_media_info.stream_video_full_resolution END) AS resolution, ' \
'SUM(CASE WHEN session_history_media_info.transcode_decision = "direct play" ' \
'THEN 1 ELSE 0 END) AS dp_count, ' \
'SUM(CASE WHEN session_history_media_info.transcode_decision = "copy" ' \
@@ -820,7 +816,7 @@ class Graphs(object):
result = monitor_db.select(query)
else:
query = 'SELECT ' \
'(CASE WHEN session_history_media_info.stream_video_resolution IS NULL THEN ' \
'(CASE WHEN session_history_media_info.stream_video_full_resolution IS NULL THEN ' \
'(CASE WHEN session_history_media_info.video_decision = "transcode" THEN ' \
'(CASE ' \
'WHEN session_history_media_info.transcode_height <= 360 THEN "SD" ' \
@@ -830,8 +826,8 @@ class Graphs(object):
'WHEN session_history_media_info.transcode_height <= 1080 THEN "1080" ' \
'WHEN session_history_media_info.transcode_height <= 1440 THEN "QHD" ' \
'WHEN session_history_media_info.transcode_height <= 2160 THEN "4k" ' \
'ELSE "unknown" END) ELSE UPPER(session_history_media_info.video_resolution) END) ' \
'ELSE UPPER(session_history_media_info.stream_video_resolution) END) AS resolution, ' \
'ELSE "unknown" END) ELSE session_history_media_info.video_full_resolution END) ' \
'ELSE session_history_media_info.stream_video_full_resolution END) AS resolution, ' \
'SUM(CASE WHEN session_history_media_info.transcode_decision = "direct play" ' \
'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \
' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS dp_count, ' \
@@ -863,10 +859,6 @@ class Graphs(object):
series_3 = []
for item in result:
if item['resolution'] not in ('4k', 'unknown'):
item['resolution'] = item['resolution'].upper()
if item['resolution'].isdigit():
item['resolution'] += 'p'
categories.append(item['resolution'])
series_1.append(item['dp_count'])
series_2.append(item['ds_count'])

View File

@@ -1130,7 +1130,7 @@ def get_plexpy_url(hostname=None):
else:
port = ''
if plexpy.HTTP_ROOT.strip('/'):
if plexpy.HTTP_ROOT is not None and plexpy.HTTP_ROOT.strip('/'):
root = '/' + plexpy.HTTP_ROOT.strip('/')
else:
root = ''
@@ -1178,3 +1178,18 @@ def split_args(args=None):
return [arg.decode(plexpy.SYS_ENCODING, 'ignore')
for arg in shlex.split(args.encode(plexpy.SYS_ENCODING, 'ignore'))]
return []
def mask_config_passwords(config):
if isinstance(config, list):
for cfg in config:
if 'password' in cfg.get('name', '') and cfg.get('value', '') != '':
cfg['value'] = ' '
elif isinstance(config, dict):
for cfg, val in config.iteritems():
# Check for a password config keys and if the password is not blank
if 'password' in cfg and val != '':
# Set the password to blank so it is not exposed in the HTML form
config[cfg] = ' '
return config

View File

@@ -239,7 +239,7 @@ class Libraries(object):
def __init__(self):
pass
def get_datatables_list(self, kwargs=None):
def get_datatables_list(self, kwargs=None, grouping=None):
default_return = {'recordsFiltered': 0,
'recordsTotal': 0,
'draw': 0,
@@ -250,9 +250,14 @@ class Libraries(object):
custom_where = [['library_sections.deleted_section', 0]]
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
if session.get_session_shared_libraries():
custom_where.append(['library_sections.section_id', session.get_session_shared_libraries()])
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
columns = ['library_sections.section_id',
'library_sections.section_name',
'library_sections.section_type',
@@ -262,7 +267,7 @@ class Libraries(object):
'library_sections.thumb AS library_thumb',
'library_sections.custom_thumb_url AS custom_thumb',
'library_sections.art',
'COUNT(session_history.id) AS plays',
'COUNT(DISTINCT %s) AS plays' % group_by,
'SUM(CASE WHEN session_history.stopped > 0 THEN (session_history.stopped - session_history.started) \
ELSE 0 END) - SUM(CASE WHEN session_history.paused_counter IS NULL THEN 0 ELSE \
session_history.paused_counter END) AS duration',
@@ -533,7 +538,7 @@ class Libraries(object):
# Search results
search_value = json_data['search']['value'].lower()
if search_value:
searchable_columns = [d['data'] for d in json_data['columns'] if d['searchable']]
searchable_columns = [d['data'] for d in json_data['columns'] if d['searchable']] + ['title']
for row in rows:
for k,v in row.iteritems():
if k in searchable_columns and search_value in v.lower():

View File

@@ -61,13 +61,11 @@ def schedule_newsletter_job(newsletter_job_id, name='', func=None, remove_job=Fa
logger.info(u"Tautulli NewsletterHandler :: Removed scheduled newsletter: %s" % name)
else:
NEWSLETTER_SCHED.reschedule_job(
newsletter_job_id, args=args, trigger=CronTrigger().from_crontab(
cron, timezone=plexpy.SYS_TIMEZONE))
newsletter_job_id, args=args, trigger=CronTrigger.from_crontab(cron))
logger.info(u"Tautulli NewsletterHandler :: Re-scheduled newsletter: %s" % name)
elif not remove_job:
NEWSLETTER_SCHED.add_job(
func, args=args, id=newsletter_job_id, trigger=CronTrigger().from_crontab(
cron, timezone=plexpy.SYS_TIMEZONE))
func, args=args, id=newsletter_job_id, trigger=CronTrigger.from_crontab(cron))
logger.info(u"Tautulli NewsletterHandler :: Scheduled newsletter: %s" % name)

View File

@@ -125,7 +125,7 @@ def delete_newsletter(newsletter_id=None):
return False
def get_newsletter_config(newsletter_id=None):
def get_newsletter_config(newsletter_id=None, mask_passwords=False):
if str(newsletter_id).isdigit():
newsletter_id = int(newsletter_id)
else:
@@ -153,13 +153,16 @@ def get_newsletter_config(newsletter_id=None):
logger.error(u"Tautulli Newsletters :: Failed to get newsletter config options: %s." % e)
return
if mask_passwords:
newsletter_agent.email_config = helpers.mask_config_passwords(newsletter_agent.email_config)
result['subject'] = newsletter_agent.subject
result['body'] = newsletter_agent.body
result['message'] = newsletter_agent.message
result['config'] = newsletter_agent.config
result['email_config'] = newsletter_agent.email_config
result['config_options'] = newsletter_agent.return_config_options()
result['email_config_options'] = newsletter_agent.return_email_config_options()
result['config_options'] = newsletter_agent.return_config_options(mask_passwords=mask_passwords)
result['email_config_options'] = newsletter_agent.return_email_config_options(mask_passwords=mask_passwords)
return result
@@ -230,6 +233,13 @@ def set_newsletter_config(newsletter_id=None, agent_id=None, **kwargs):
email_config = {k[len(email_config_prefix):]: kwargs.pop(k)
for k in kwargs.keys() if k.startswith(email_config_prefix)}
for cfg, val in email_config.iteritems():
# Check for a password config keys and a blank password from the HTML form
if 'password' in cfg and val == ' ':
# Get the previous password so we don't overwrite it with a blank value
old_newsletter_config = get_newsletter_config(newsletter_id=newsletter_id)
email_config[cfg] = old_newsletter_config['email_config'][cfg]
subject = kwargs.pop('subject')
body = kwargs.pop('body')
message = kwargs.pop('message')
@@ -647,16 +657,21 @@ class Newsletter(object):
return filename
def return_config_options(self):
return self._return_config_options()
def return_config_options(self, mask_passwords=False):
config_options = self._return_config_options()
def _return_config_options(self):
config_options = []
# Mask password config options
if mask_passwords:
helpers.mask_config_passwords(config_options)
return config_options
def return_email_config_options(self):
config_options = EMAIL(self.email_config).return_config_options()
def _return_config_options(self):
config_options = []
return config_options
def return_email_config_options(self, mask_passwords=False):
config_options = EMAIL(self.email_config).return_config_options(mask_passwords=mask_passwords)
for c in config_options:
c['name'] = 'newsletter_' + c['name']
return config_options
@@ -926,10 +941,8 @@ class RecentlyAdded(Newsletter):
return parameters
def return_config_options(self):
config_options = self._return_config_options()
additional_config = [
def _return_config_options(self):
config_options = [
{'label': 'Included Libraries',
'value': self.config['incl_libraries'],
'description': 'Select the libraries to include in the newsletter.',
@@ -939,4 +952,4 @@ class RecentlyAdded(Newsletter):
}
]
return additional_config + config_options
return config_options

View File

@@ -27,6 +27,8 @@ from string import Formatter
import threading
import time
import musicbrainzngs
import plexpy
import activity_processor
import common
@@ -230,7 +232,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
evaluated_conditions = [None] # Set condition {0} to None
for condition in custom_conditions:
for i, condition in enumerate(custom_conditions):
parameter = condition['parameter']
operator = condition['operator']
values = condition['value']
@@ -239,7 +241,9 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
# Set blank conditions to True (skip)
if not parameter or not operator or not values:
evaluated_conditions.append(True)
evaluated = True
evaluated_conditions.append(evaluated)
logger.debug(u"Tautulli NotificationHandler :: {%s} Blank condition > %s" % (i+1, evaluated))
continue
# Make sure the condition values is in a list
@@ -258,8 +262,8 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
values = [helpers.cast_to_float(v) for v in values]
except ValueError as e:
logger.error(u"Tautulli NotificationHandler :: Unable to cast condition '%s', values '%s', to type '%s'."
% (parameter, values, parameter_type))
logger.error(u"Tautulli NotificationHandler :: {%s} Unable to cast condition '%s', values '%s', to type '%s'."
% (i+1, parameter, values, parameter_type))
return False
# Cast the parameter value to the correct type
@@ -274,50 +278,59 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
parameter_value = helpers.cast_to_float(parameter_value)
except ValueError as e:
logger.error(u"Tautulli NotificationHandler :: Unable to cast parameter '%s', value '%s', to type '%s'."
% (parameter, parameter_value, parameter_type))
logger.error(u"Tautulli NotificationHandler :: {%s} Unable to cast parameter '%s', value '%s', to type '%s'."
% (i+1, parameter, parameter_value, parameter_type))
return False
# Check each condition
if operator == 'contains':
evaluated_conditions.append(any(c in parameter_value for c in values))
evaluated = any(c in parameter_value for c in values)
elif operator == 'does not contain':
evaluated_conditions.append(all(c not in parameter_value for c in values))
evaluated = all(c not in parameter_value for c in values)
elif operator == 'is':
evaluated_conditions.append(any(parameter_value == c for c in values))
evaluated = any(parameter_value == c for c in values)
elif operator == 'is not':
evaluated_conditions.append(all(parameter_value != c for c in values))
evaluated = all(parameter_value != c for c in values)
elif operator == 'begins with':
evaluated_conditions.append(parameter_value.startswith(tuple(values)))
evaluated = parameter_value.startswith(tuple(values))
elif operator == 'ends with':
evaluated_conditions.append(parameter_value.endswith(tuple(values)))
evaluated = parameter_value.endswith(tuple(values))
elif operator == 'is greater than':
evaluated_conditions.append(any(parameter_value > c for c in values))
evaluated = any(parameter_value > c for c in values)
elif operator == 'is less than':
evaluated_conditions.append(any(parameter_value < c for c in values))
evaluated = any(parameter_value < c for c in values)
else:
logger.warn(u"Tautulli NotificationHandler :: Invalid condition operator '%s'." % operator)
evaluated_conditions.append(None)
evaluated = None
logger.warn(u"Tautulli NotificationHandler :: {%s} Invalid condition operator '%s' > %s."
% (i+1, operator, evaluated))
evaluated_conditions.append(evaluated)
logger.debug(u"Tautulli NotificationHandler :: {%s} %s | %s | %s > '%s' > %s"
% (i+1, parameter, operator, ' or '.join(["'%s'" % v for v in values]), parameter_value, evaluated))
if logic_groups:
# Format and evaluate the logic string
try:
evaluated_logic = helpers.eval_logic_groups_to_bool(logic_groups, evaluated_conditions)
logger.debug(u"Tautulli NotificationHandler :: Condition logic: %s > %s"
% (custom_conditions_logic, evaluated_logic))
except Exception as e:
logger.error(u"Tautulli NotificationHandler :: Unable to evaluate custom condition logic: %s." % e)
return False
else:
evaluated_logic = all(evaluated_conditions[1:])
logger.debug(u"Tautulli NotificationHandler :: Condition logic [blank]: %s > %s"
% (' and '.join(['{%s}' % (i+1) for i in range(len(custom_conditions))]), evaluated_logic))
logger.debug(u"Tautulli NotificationHandler :: Custom condition evaluated to '{}'. Conditions: {}.".format(
logger.debug(u"Tautulli NotificationHandler :: Custom conditions evaluated to '{}'. Conditions: {}.".format(
evaluated_logic, evaluated_conditions[1:]))
return evaluated_logic
@@ -575,7 +588,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
notify_params['trakt_url'] = 'https://trakt.tv/search/tmdb/' + notify_params['themoviedb_id'] + '?id_type=show'
if 'lastfm://' in notify_params['guid']:
notify_params['lastfm_id'] = notify_params['guid'].split('lastfm://')[1].rsplit('/', 1)[0]
notify_params['lastfm_id'] = '/'.join(notify_params['guid'].split('lastfm://')[1].split('?')[0].split('/')[:2])
notify_params['lastfm_url'] = 'https://www.last.fm/music/' + notify_params['lastfm_id']
# Get TheMovieDB info
@@ -622,6 +635,30 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
if tvmaze_info.get('imdb_id'):
notify_params['imdb_url'] = 'https://www.imdb.com/title/' + tvmaze_info['imdb_id']
# Get MusicBrainz info (for music only)
if plexpy.CONFIG.MUSICBRAINZ_LOOKUP and notify_params['media_type'] in ('artist', 'album', 'track'):
artist = release = recording = tracks = tnum = None
if notify_params['media_type'] == 'artist':
musicbrainz_type = 'artist'
artist = notify_params['title']
elif notify_params['media_type'] == 'album':
musicbrainz_type = 'release'
artist = notify_params['parent_title']
release = notify_params['title']
tracks = notify_params['children_count']
else:
musicbrainz_type = 'recording'
artist = notify_params['original_title']
release = notify_params['parent_title']
recording = notify_params['title']
tracks = notify_params['children_count']
tnum = notify_params['media_index']
musicbrainz_info = lookup_musicbrainz_info(musicbrainz_type=musicbrainz_type, rating_key=rating_key,
artist=artist, release=release, recording=recording, tracks=tracks,
tnum=tnum)
notify_params.update(musicbrainz_info)
if notify_params['media_type'] in ('movie', 'show', 'artist'):
poster_thumb = notify_params['thumb']
poster_key = notify_params['rating_key']
@@ -784,8 +821,10 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
'stream_video_bitrate': notify_params['stream_video_bitrate'],
'stream_video_bit_depth': notify_params['stream_video_bit_depth'],
'stream_video_framerate': notify_params['stream_video_framerate'],
'stream_video_full_resolution': notify_params['stream_video_full_resolution'],
'stream_video_ref_frames': notify_params['stream_video_ref_frames'],
'stream_video_resolution': notify_params['stream_video_resolution'],
'stream_video_scan_type': notify_params['stream_video_scan_type'],
'stream_video_height': notify_params['stream_video_height'],
'stream_video_width': notify_params['stream_video_width'],
'stream_video_language': notify_params['stream_video_language'],
@@ -880,6 +919,8 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
'themoviedb_url': notify_params['themoviedb_url'],
'tvmaze_id': notify_params['tvmaze_id'],
'tvmaze_url': notify_params['tvmaze_url'],
'musicbrainz_id': notify_params['musicbrainz_id'],
'musicbrainz_url': notify_params['musicbrainz_url'],
'lastfm_url': notify_params['lastfm_url'],
'trakt_url': notify_params['trakt_url'],
'container': notify_params['container'],
@@ -890,8 +931,10 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
'video_bitrate': notify_params['video_bitrate'],
'video_bit_depth': notify_params['video_bit_depth'],
'video_framerate': notify_params['video_framerate'],
'video_full_resolution': notify_params['video_full_resolution'],
'video_ref_frames': notify_params['video_ref_frames'],
'video_resolution': notify_params['video_resolution'],
'video_scan_type': notify_params['video_scan_type'],
'video_height': notify_params['height'],
'video_width': notify_params['width'],
'video_language': notify_params['video_language'],
@@ -1101,6 +1144,8 @@ def build_notify_text(subject='', body='', notify_action=None, parameters=None,
def strip_tag(data, agent_id=None):
# Substitute temporary tokens for < and > in parameter prefix and suffix
data = re.sub(r'{.+?}', lambda m: m.group().replace('<', '%temp_lt_token%').replace('>', '%temp_gt_token%'), data)
if agent_id == 7:
# Allow tags b, i, u, a[href], font[color] for Pushover
@@ -1109,11 +1154,11 @@ def strip_tag(data, agent_id=None):
'u': [],
'a': ['href'],
'font': ['color']}
return bleach.clean(data, tags=whitelist.keys(), attributes=whitelist, strip=True)
data = bleach.clean(data, tags=whitelist.keys(), attributes=whitelist, strip=True)
elif agent_id in (10, 14, 20):
# Don't remove tags for Email, Slack, and Discord
return data
pass
elif agent_id == 13:
# Allow tags b, i, code, pre, a[href] for Telegram
@@ -1122,11 +1167,14 @@ def strip_tag(data, agent_id=None):
'code': [],
'pre': [],
'a': ['href']}
return bleach.clean(data, tags=whitelist.keys(), attributes=whitelist, strip=True)
data = bleach.clean(data, tags=whitelist.keys(), attributes=whitelist, strip=True)
else:
whitelist = {}
return bleach.clean(data, tags=whitelist.keys(), attributes=whitelist, strip=True)
data = bleach.clean(data, tags=whitelist.keys(), attributes=whitelist, strip=True)
# Resubstitute temporary tokens for < and > in parameter prefix and suffix
return data.replace('%temp_lt_token%', '<').replace('%temp_gt_token%', '>')
def format_group_index(group_keys):
@@ -1332,6 +1380,7 @@ def lookup_tvmaze_by_id(rating_key=None, thetvdb_id=None, imdb_id=None):
'tvmaze_json': json.dumps(tvmaze_json)}
db.upsert(table_name='tvmaze_lookup', key_dict=keys, value_dict=tvmaze_info)
tvmaze_info.update(keys)
tvmaze_info.pop('tvmaze_json')
else:
@@ -1394,6 +1443,7 @@ def lookup_themoviedb_by_id(rating_key=None, thetvdb_id=None, imdb_id=None):
db.upsert(table_name='themoviedb_lookup', key_dict=keys, value_dict=themoviedb_info)
themoviedb_info.update(keys)
themoviedb_info.pop('themoviedb_json')
else:
@@ -1447,6 +1497,8 @@ def get_themoviedb_info(rating_key=None, media_type=None, themoviedb_id=None):
db.upsert(table_name='themoviedb_lookup', key_dict=keys, value_dict=themoviedb_info)
themoviedb_info.update(keys)
else:
if err_msg:
logger.error(u"Tautulli NotificationHandler :: {}".format(err_msg))
@@ -1457,6 +1509,69 @@ def get_themoviedb_info(rating_key=None, media_type=None, themoviedb_id=None):
return themoviedb_json
def lookup_musicbrainz_info(musicbrainz_type=None, rating_key=None, artist=None, release=None, recording=None,
tracks=None, tnum=None):
db = database.MonitorDatabase()
try:
query = 'SELECT musicbrainz_id, musicbrainz_url, musicbrainz_type FROM musicbrainz_lookup ' \
'WHERE rating_key = ?'
musicbrainz_info = db.select_single(query, args=[rating_key])
except Exception as e:
logger.warn(u"Tautulli NotificationHandler :: Unable to execute database query for lookup_musicbrainz: %s." % e)
return {}
if not musicbrainz_info:
musicbrainzngs.set_useragent(
common.PRODUCT,
common.RELEASE,
"https://tautulli.com",
)
if musicbrainz_type == 'artist':
logger.debug(u"Tautulli NotificationHandler :: Looking up MusicBrainz info for "
u"{} '{}'.".format(musicbrainz_type, artist))
result = musicbrainzngs.search_artists(artist=artist, strict=True, limit=1)
if result['artist-list']:
musicbrainz_info = result['artist-list'][0]
elif musicbrainz_type == 'release':
logger.debug(u"Tautulli NotificationHandler :: Looking up MusicBrainz info for "
u"{} '{} - {}'.".format(musicbrainz_type, artist, release))
result = musicbrainzngs.search_releases(artist=artist, release=release, tracks=tracks,
strict=True, limit=1)
if result['release-list']:
musicbrainz_info = result['release-list'][0]
elif musicbrainz_type == 'recording':
logger.debug(u"Tautulli NotificationHandler :: Looking up MusicBrainz info for "
u"{} '{} - {} - {}'.".format(musicbrainz_type, artist, release, recording))
result = musicbrainzngs.search_recordings(artist=artist, release=release, recording=recording,
tracks=tracks, tnum=tnum,
strict=True, limit=1)
if result['recording-list']:
musicbrainz_info = result['recording-list'][0]
if musicbrainz_info:
musicbrainz_id = musicbrainz_info['id']
musicbrainz_url = 'https://musicbrainz.org/' + musicbrainz_type + '/' + musicbrainz_id
keys = {'musicbrainz_id': musicbrainz_id}
musicbrainz_info = {'rating_key': rating_key,
'musicbrainz_url': musicbrainz_url,
'musicbrainz_type': musicbrainz_type,
'musicbrainz_json': json.dumps(musicbrainz_info)}
db.upsert(table_name='musicbrainz_lookup', key_dict=keys, value_dict=musicbrainz_info)
musicbrainz_info.update(keys)
musicbrainz_info.pop('musicbrainz_json')
else:
logger.warning(u"Tautulli NotificationHandler :: No match found on MusicBrainz.")
return musicbrainz_info
class CustomFormatter(Formatter):
def __init__(self, default='{{{0}}}'):
self.default = default

View File

@@ -455,7 +455,7 @@ def delete_notifier(notifier_id=None):
return False
def get_notifier_config(notifier_id=None):
def get_notifier_config(notifier_id=None, mask_passwords=False):
if str(notifier_id).isdigit():
notifier_id = int(notifier_id)
else:
@@ -472,11 +472,13 @@ def get_notifier_config(notifier_id=None):
try:
config = json.loads(result.pop('notifier_config', '{}'))
notifier_agent = get_agent_class(agent_id=result['agent_id'], config=config)
notifier_config = notifier_agent.return_config_options()
except Exception as e:
logger.error(u"Tautulli Notifiers :: Failed to get notifier config options: %s." % e)
return
if mask_passwords:
notifier_agent.config = helpers.mask_config_passwords(notifier_agent.config)
notify_actions = get_notify_actions(return_dict=True)
notifier_actions = {}
@@ -503,8 +505,8 @@ def get_notifier_config(notifier_id=None):
if not result['custom_conditions_logic']:
result['custom_conditions_logic'] = ''
result['config'] = config
result['config_options'] = notifier_config
result['config'] = notifier_agent.config
result['config_options'] = notifier_agent.return_config_options(mask_passwords=mask_passwords)
result['actions'] = notifier_actions
result['notify_text'] = notifier_text
@@ -587,6 +589,13 @@ def set_notifier_config(notifier_id=None, agent_id=None, **kwargs):
notifier_config = {k[len(config_prefix):]: kwargs.pop(k)
for k in kwargs.keys() if k.startswith(config_prefix)}
for cfg, val in notifier_config.iteritems():
# Check for a password config keys and a blank password from the HTML form
if 'password' in cfg and val == ' ':
# Get the previous password so we don't overwrite it with a blank value
old_notifier_config = get_notifier_config(notifier_id=notifier_id)
notifier_config[cfg] = old_notifier_config['config'][cfg]
agent_class = get_agent_class(agent_id=agent['id'], config=notifier_config)
keys = {'id': notifier_id}
@@ -670,7 +679,8 @@ class PrettyMetadata(object):
def get_music_providers():
return {'': '',
'plexweb': 'Plex Web',
'lastfm': 'Last.fm'
'lastfm': 'Last.fm',
'musicbrainz': 'MusicBrainz'
}
def get_poster_url(self):
@@ -698,6 +708,8 @@ class PrettyMetadata(object):
provider_name = 'Trakt.tv'
elif provider == 'lastfm':
provider_name = 'Last.fm'
elif provider == 'musicbrainz':
provider_name = 'MusicBrainz'
# else:
# if self.media_type == 'movie':
# provider_name = 'IMDb'
@@ -835,7 +847,16 @@ class Notifier(object):
return False
def return_config_options(self):
def return_config_options(self, mask_passwords=False):
config_options = self._return_config_options()
# Mask password config options
if mask_passwords:
helpers.mask_config_passwords(config_options)
return config_options
def _return_config_options(self):
config_options = []
return config_options
@@ -942,7 +963,7 @@ class ANDROIDAPP(Notifier):
return devices
def return_config_options(self):
def _return_config_options(self):
config_option = []
if not CRYPTODOME:
@@ -1058,7 +1079,7 @@ class BOXCAR(Notifier):
return sounds
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Boxcar Access Token',
'value': self.config['token'],
'name': 'boxcar_token',
@@ -1089,7 +1110,7 @@ class BROWSER(Notifier):
logger.info(u"Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
return True
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Allow Notifications',
'value': 'Allow Notifications',
'name': 'browser_allow_browser',
@@ -1180,7 +1201,7 @@ class DISCORD(Notifier):
attachment['image'] = {'url': poster_url}
if self.config['incl_description'] or pretty_metadata.media_type in ('artist', 'album', 'track'):
attachment['description'] = description
attachment['description'] = description[:2045] + (description[2045:] and '...')
fields = []
if provider_link:
@@ -1202,7 +1223,7 @@ class DISCORD(Notifier):
return self.make_request(self.config['hook'], params=params, headers=headers, json=data)
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Discord Webhook URL',
'value': self.config['hook'],
'name': 'discord_hook',
@@ -1389,7 +1410,7 @@ class EMAIL(Notifier):
return user_emails_to, user_emails_cc, user_emails_bcc
def return_config_options(self):
def _return_config_options(self):
user_emails_to, user_emails_cc, user_emails_bcc = self.get_user_emails()
config_option = [{'label': 'From Name',
@@ -1569,7 +1590,7 @@ class FACEBOOK(Notifier):
return self._post_facebook(**data)
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'OAuth Redirect URI',
'value': self.config['redirect_uri'],
'name': 'facebook_redirect_uri',
@@ -1699,7 +1720,7 @@ class GROUPME(Notifier):
return self.make_request('https://api.groupme.com/v3/bots/post', json=data)
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'GroupMe Access Token',
'value': self.config['access_token'],
'name': 'groupme_access_token',
@@ -1796,7 +1817,7 @@ class GROWL(Notifier):
logger.error(u"Tautulli Notifiers :: {name} notification failed: network error".format(name=self.NAME))
return False
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Growl Host',
'value': self.config['host'],
'name': 'growl_host',
@@ -1901,7 +1922,7 @@ class HIPCHAT(Notifier):
return self.make_request(self.config['hook'], headers=headers, json=data)
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Hipchat Custom Integrations URL',
'value': self.config['hook'],
'name': 'hipchat_hook',
@@ -2012,7 +2033,7 @@ class IFTTT(Notifier):
return self.make_request('https://maker.ifttt.com/trigger/{}/with/key/{}'.format(event, self.config['key']),
headers=headers, json=data)
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'IFTTT Webhook Key',
'value': self.config['key'],
'name': 'ifttt_key',
@@ -2131,7 +2152,7 @@ class JOIN(Notifier):
return devices
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Join API Key',
'value': self.config['api_key'],
'name': 'join_api_key',
@@ -2233,7 +2254,7 @@ class MQTT(Notifier):
return True
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Broker',
'value': self.config['broker'],
'name': 'mqtt_broker',
@@ -2335,7 +2356,7 @@ class NMA(Notifier):
logger.error(u"Tautulli Notifiers :: {name} notification failed.".format(name=self.NAME))
return False
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'NotifyMyAndroid API Key',
'value': self.config['api_key'],
'name': 'nma_api_key',
@@ -2437,7 +2458,7 @@ class OSX(Notifier):
logger.error(u"Tautulli Notifiers :: {name} failed: {e}".format(name=self.NAME, e=e))
return False
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Register Notify App',
'value': self.config['notify_app'],
'name': 'osx_notify_app',
@@ -2530,7 +2551,7 @@ class PLEX(Notifier):
return True
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Plex Home Theater Host Address',
'value': self.config['hosts'],
'name': 'plex_hosts',
@@ -2586,7 +2607,7 @@ class PROWL(Notifier):
return self.make_request('https://api.prowlapp.com/publicapi/add', headers=headers, data=data)
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Prowl API Key',
'value': self.config['key'],
'name': 'prowl_key',
@@ -2622,7 +2643,7 @@ class PUSHALOT(Notifier):
return self.make_request('https://pushalot.com/api/sendmessage', headers=headers, data=data)
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Pushalot API Key',
'value': self.config['api_key'],
'name': 'pushalot_api_key',
@@ -2722,7 +2743,7 @@ class PUSHBULLET(Notifier):
return devices
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Pushbullet Access Token',
'value': self.config['api_key'],
'name': 'pushbullet_api_key',
@@ -2888,7 +2909,7 @@ class PUSHOVER(Notifier):
# else:
# return {'': ''}
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Pushover API Token',
'value': self.config['api_token'],
'name': 'pushover_api_token',
@@ -3163,7 +3184,7 @@ class SCRIPTS(Notifier):
return True
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Supported File Types',
'description': '<span class="inline-pre">' + \
', '.join(self.script_exts.keys()) + '</span>',
@@ -3286,7 +3307,7 @@ class SLACK(Notifier):
return self.make_request(self.config['hook'], headers=headers, json=data)
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Slack Webhook URL',
'value': self.config['hook'],
'name': 'slack_hook',
@@ -3439,7 +3460,7 @@ class TELEGRAM(Notifier):
return self.make_request('https://api.telegram.org/bot{}/sendMessage'.format(self.config['bot_token']),
headers=headers, data=data)
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Telegram Bot Token',
'value': self.config['bot_token'],
'name': 'telegram_bot_token',
@@ -3537,7 +3558,7 @@ class TWITTER(Notifier):
else:
return self._send_tweet(body, attachment=poster_url)
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Twitter Consumer Key',
'value': self.config['consumer_key'],
'name': 'twitter_consumer_key',
@@ -3606,7 +3627,7 @@ class WEBHOOK(Notifier):
return self.make_request(self.config['hook'], method=self.config['method'], headers=headers, json=webhook_data)
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Webhook URL',
'value': self.config['hook'],
'name': 'webhook_hook',
@@ -3703,7 +3724,7 @@ class XBMC(Notifier):
return True
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Kodi Host Address',
'value': self.config['hosts'],
'name': 'xbmc_hosts',
@@ -3796,7 +3817,7 @@ class ZAPIER(Notifier):
return self.make_request(self.config['hook'], headers=headers, json=data)
def return_config_options(self):
def _return_config_options(self):
config_option = [{'label': 'Zapier Webhook URL',
'value': self.config['hook'],
'name': 'zapier_hook',

View File

@@ -710,6 +710,8 @@ class PmsConnect(object):
'updated_at': helpers.get_xml_attr(metadata_main, 'updatedAt'),
'last_viewed_at': helpers.get_xml_attr(metadata_main, 'lastViewedAt'),
'guid': helpers.get_xml_attr(metadata_main, 'guid'),
'parent_guid': helpers.get_xml_attr(metadata_main, 'parentGuid'),
'grandparent_guid': helpers.get_xml_attr(metadata_main, 'grandparentGuid'),
'directors': directors,
'writers': writers,
'actors': actors,
@@ -760,6 +762,8 @@ class PmsConnect(object):
'updated_at': helpers.get_xml_attr(metadata_main, 'updatedAt'),
'last_viewed_at': helpers.get_xml_attr(metadata_main, 'lastViewedAt'),
'guid': helpers.get_xml_attr(metadata_main, 'guid'),
'parent_guid': helpers.get_xml_attr(metadata_main, 'parentGuid'),
'grandparent_guid': helpers.get_xml_attr(metadata_main, 'grandparentGuid'),
'directors': directors,
'writers': writers,
'actors': actors,
@@ -807,6 +811,8 @@ class PmsConnect(object):
'updated_at': helpers.get_xml_attr(metadata_main, 'updatedAt'),
'last_viewed_at': helpers.get_xml_attr(metadata_main, 'lastViewedAt'),
'guid': helpers.get_xml_attr(metadata_main, 'guid'),
'parent_guid': helpers.get_xml_attr(metadata_main, 'parentGuid'),
'grandparent_guid': helpers.get_xml_attr(metadata_main, 'grandparentGuid'),
'directors': show_details['directors'],
'writers': show_details['writers'],
'actors': show_details['actors'],
@@ -871,6 +877,8 @@ class PmsConnect(object):
'updated_at': helpers.get_xml_attr(metadata_main, 'updatedAt'),
'last_viewed_at': helpers.get_xml_attr(metadata_main, 'lastViewedAt'),
'guid': helpers.get_xml_attr(metadata_main, 'guid'),
'parent_guid': helpers.get_xml_attr(metadata_main, 'parentGuid'),
'grandparent_guid': helpers.get_xml_attr(metadata_main, 'grandparentGuid'),
'directors': directors,
'writers': writers,
'actors': show_details['actors'],
@@ -917,6 +925,8 @@ class PmsConnect(object):
'updated_at': helpers.get_xml_attr(metadata_main, 'updatedAt'),
'last_viewed_at': helpers.get_xml_attr(metadata_main, 'lastViewedAt'),
'guid': helpers.get_xml_attr(metadata_main, 'guid'),
'parent_guid': helpers.get_xml_attr(metadata_main, 'parentGuid'),
'grandparent_guid': helpers.get_xml_attr(metadata_main, 'grandparentGuid'),
'directors': directors,
'writers': writers,
'actors': actors,
@@ -964,6 +974,8 @@ class PmsConnect(object):
'updated_at': helpers.get_xml_attr(metadata_main, 'updatedAt'),
'last_viewed_at': helpers.get_xml_attr(metadata_main, 'lastViewedAt'),
'guid': helpers.get_xml_attr(metadata_main, 'guid'),
'parent_guid': helpers.get_xml_attr(metadata_main, 'parentGuid'),
'grandparent_guid': helpers.get_xml_attr(metadata_main, 'grandparentGuid'),
'directors': directors,
'writers': writers,
'actors': actors,
@@ -1014,6 +1026,8 @@ class PmsConnect(object):
'updated_at': helpers.get_xml_attr(metadata_main, 'updatedAt'),
'last_viewed_at': helpers.get_xml_attr(metadata_main, 'lastViewedAt'),
'guid': helpers.get_xml_attr(metadata_main, 'guid'),
'parent_guid': helpers.get_xml_attr(metadata_main, 'parentGuid'),
'grandparent_guid': helpers.get_xml_attr(metadata_main, 'grandparentGuid'),
'directors': directors,
'writers': writers,
'actors': actors,
@@ -1060,6 +1074,8 @@ class PmsConnect(object):
'updated_at': helpers.get_xml_attr(metadata_main, 'updatedAt'),
'last_viewed_at': helpers.get_xml_attr(metadata_main, 'lastViewedAt'),
'guid': helpers.get_xml_attr(metadata_main, 'guid'),
'parent_guid': helpers.get_xml_attr(metadata_main, 'parentGuid'),
'grandparent_guid': helpers.get_xml_attr(metadata_main, 'grandparentGuid'),
'directors': directors,
'writers': writers,
'actors': actors,
@@ -1107,6 +1123,8 @@ class PmsConnect(object):
'updated_at': helpers.get_xml_attr(metadata_main, 'updatedAt'),
'last_viewed_at': helpers.get_xml_attr(metadata_main, 'lastViewedAt'),
'guid': helpers.get_xml_attr(metadata_main, 'guid'),
'parent_guid': helpers.get_xml_attr(metadata_main, 'parentGuid'),
'grandparent_guid': helpers.get_xml_attr(metadata_main, 'grandparentGuid'),
'directors': directors,
'writers': writers,
'actors': actors,
@@ -1156,6 +1174,8 @@ class PmsConnect(object):
'updated_at': helpers.get_xml_attr(metadata_main, 'updatedAt'),
'last_viewed_at': helpers.get_xml_attr(metadata_main, 'lastViewedAt'),
'guid': helpers.get_xml_attr(metadata_main, 'guid'),
'parent_guid': helpers.get_xml_attr(metadata_main, 'parentGuid'),
'grandparent_guid': helpers.get_xml_attr(metadata_main, 'grandparentGuid'),
'child_count': helpers.get_xml_attr(metadata_main, 'childCount'),
'directors': directors,
'writers': writers,
@@ -1202,6 +1222,8 @@ class PmsConnect(object):
'updated_at': helpers.get_xml_attr(metadata_main, 'updatedAt'),
'last_viewed_at': helpers.get_xml_attr(metadata_main, 'lastViewedAt'),
'guid': helpers.get_xml_attr(metadata_main, 'guid'),
'parent_guid': helpers.get_xml_attr(metadata_main, 'parentGuid'),
'grandparent_guid': helpers.get_xml_attr(metadata_main, 'grandparentGuid'),
'directors': directors,
'writers': writers,
'actors': actors,
@@ -1220,6 +1242,7 @@ class PmsConnect(object):
medias = []
media_items = metadata_main.getElementsByTagName('Media')
for media in media_items:
video_full_resolution_scan_type = None
parts = []
part_items = media.getElementsByTagName('Part')
@@ -1229,6 +1252,10 @@ class PmsConnect(object):
stream_items = part.getElementsByTagName('Stream')
for stream in stream_items:
if helpers.get_xml_attr(stream, 'streamType') == '1':
video_scan_type = helpers.get_xml_attr(stream, 'scanType')
if video_full_resolution_scan_type is None:
video_full_resolution_scan_type = video_scan_type
streams.append({'id': helpers.get_xml_attr(stream, 'id'),
'type': helpers.get_xml_attr(stream, 'streamType'),
'video_codec': helpers.get_xml_attr(stream, 'codec'),
@@ -1242,6 +1269,7 @@ class PmsConnect(object):
'video_language': helpers.get_xml_attr(stream, 'language'),
'video_language_code': helpers.get_xml_attr(stream, 'languageCode'),
'video_profile': helpers.get_xml_attr(stream, 'profile'),
'video_scan_type': helpers.get_xml_attr(stream, 'scanType'),
'selected': int(helpers.get_xml_attr(stream, 'selected') == '1')
})
@@ -1281,6 +1309,13 @@ class PmsConnect(object):
'selected': int(helpers.get_xml_attr(part, 'selected') == '1')
})
video_resolution = helpers.get_xml_attr(media, 'videoResolution').lower()
video_full_resolution = ''
if video_full_resolution_scan_type is not None:
video_full_resolution = common.VIDEO_RESOLUTION_OVERRIDES.get(
video_resolution, video_resolution + (video_full_resolution_scan_type[:1] or 'p')
)
audio_channels = helpers.get_xml_attr(media, 'audioChannels')
medias.append({'id': helpers.get_xml_attr(media, 'id'),
@@ -1290,7 +1325,8 @@ class PmsConnect(object):
'width': helpers.get_xml_attr(media, 'width'),
'aspect_ratio': helpers.get_xml_attr(media, 'aspectRatio'),
'video_codec': helpers.get_xml_attr(media, 'videoCodec'),
'video_resolution': helpers.get_xml_attr(media, 'videoResolution'),
'video_resolution': video_resolution,
'video_full_resolution': video_full_resolution,
'video_framerate': helpers.get_xml_attr(media, 'videoFrameRate'),
'video_profile': helpers.get_xml_attr(media, 'videoProfile'),
'audio_codec': helpers.get_xml_attr(media, 'audioCodec'),
@@ -1301,6 +1337,8 @@ class PmsConnect(object):
'parts': parts
})
video_full_resolution = helpers.get_xml_attr(media, 'videoResolution').lower()
metadata['media_info'] = medias
if metadata:
@@ -1644,12 +1682,15 @@ class PmsConnect(object):
video_stream_info = audio_stream_info = subtitle_stream_info = None
for stream in stream_media_parts_info.getElementsByTagName('Stream'):
if helpers.get_xml_attr(stream, 'streamType') == '1':
if video_stream_info is None or helpers.get_xml_attr(stream, 'selected') == '1':
video_stream_info = stream
elif helpers.get_xml_attr(stream, 'streamType') == '2' and helpers.get_xml_attr(stream, 'selected') == '1':
elif helpers.get_xml_attr(stream, 'streamType') == '2':
if audio_stream_info is None or helpers.get_xml_attr(stream, 'selected') == '1':
audio_stream_info = stream
elif helpers.get_xml_attr(stream, 'streamType') == '3' and helpers.get_xml_attr(stream, 'selected') == '1':
elif helpers.get_xml_attr(stream, 'streamType') == '3':
if subtitle_stream_info is None or helpers.get_xml_attr(stream, 'selected') == '1':
subtitle_stream_info = stream
video_id = audio_id = subtitle_id = None
@@ -1661,6 +1702,7 @@ class PmsConnect(object):
'stream_video_ref_frames': helpers.get_xml_attr(video_stream_info, 'refFrames'),
'stream_video_language': helpers.get_xml_attr(video_stream_info, 'language'),
'stream_video_language_code': helpers.get_xml_attr(video_stream_info, 'languageCode'),
'stream_video_scan_type': helpers.get_xml_attr(video_stream_info, 'scanType'),
'stream_video_decision': helpers.get_xml_attr(video_stream_info, 'decision') or 'direct play'
}
else:
@@ -1670,6 +1712,7 @@ class PmsConnect(object):
'stream_video_ref_frames': '',
'stream_video_language': '',
'stream_video_language_code': '',
'stream_video_scan_type': '',
'stream_video_decision': ''
}
@@ -1730,7 +1773,7 @@ class PmsConnect(object):
if helpers.cast_to_int(stream_video_width) >= 3840:
stream_video_resolution = '4k'
else:
stream_video_resolution = helpers.get_xml_attr(stream_media_info, 'videoResolution').rstrip('p')
stream_video_resolution = helpers.get_xml_attr(stream_media_info, 'videoResolution').rstrip('p').lower()
stream_audio_channels = helpers.get_xml_attr(stream_media_info, 'audioChannels')
@@ -1810,7 +1853,7 @@ class PmsConnect(object):
'height': helpers.get_xml_attr(stream_media_info, 'height'),
'width': helpers.get_xml_attr(stream_media_info, 'width'),
'video_codec': helpers.get_xml_attr(stream_media_info, 'videoCodec'),
'video_resolution': helpers.get_xml_attr(stream_media_info, 'videoResolution'),
'video_resolution': helpers.get_xml_attr(stream_media_info, 'videoResolution').lower(),
'audio_codec': helpers.get_xml_attr(stream_media_info, 'audioCodec'),
'audio_channels': audio_channels,
'audio_channel_layout': common.AUDIO_CHANNELS.get(audio_channels, audio_channels),
@@ -1849,6 +1892,7 @@ class PmsConnect(object):
'video_width': '',
'video_language': '',
'video_language_code': '',
'video_scan_type': '',
'video_profile': ''
}
source_audio_details = {'id': '',
@@ -1919,6 +1963,12 @@ class PmsConnect(object):
if transcode_details['transcode_video_codec'] == '*':
transcode_details['transcode_video_codec'] = source_video_details['video_codec']
if media_type in ('movie', 'episode', 'clip'):
# Set the full resolution by combining stream_video_resolution and stream_video_scan_type
stream_details['stream_video_full_resolution'] = common.VIDEO_RESOLUTION_OVERRIDES.get(
stream_details['stream_video_resolution'],
stream_details['stream_video_resolution'] + (video_details['stream_video_scan_type'][:1] or 'p'))
# Get the quality profile
if media_type in ('movie', 'episode', 'clip') and 'stream_bitrate' in stream_details:
if sync_id:
@@ -1945,8 +1995,7 @@ class PmsConnect(object):
if stream_details['optimized_version']:
source_bitrate = helpers.cast_to_int(source_media_details.get('bitrate'))
optimized_version_profile = '{} Mbps {}'.format(round(source_bitrate / 1000.0, 1),
plexpy.common.VIDEO_RESOLUTION_OVERRIDES.get(source_media_details['video_resolution'],
source_media_details['video_resolution']))
source_media_details['video_full_resolution'])
else:
optimized_version_profile = ''
@@ -2467,7 +2516,7 @@ class PmsConnect(object):
media_info = {'container': helpers.get_xml_attr(media, 'container'),
'bitrate': helpers.get_xml_attr(media, 'bitrate'),
'video_codec': helpers.get_xml_attr(media, 'videoCodec'),
'video_resolution': helpers.get_xml_attr(media, 'videoResolution'),
'video_resolution': helpers.get_xml_attr(media, 'videoResolution').lower(),
'video_framerate': helpers.get_xml_attr(media, 'videoFrameRate'),
'audio_codec': helpers.get_xml_attr(media, 'audioCodec'),
'audio_channels': helpers.get_xml_attr(media, 'audioChannels'),

View File

@@ -70,7 +70,7 @@ class Users(object):
def __init__(self):
pass
def get_datatables_list(self, kwargs=None):
def get_datatables_list(self, kwargs=None, grouping=None):
default_return = {'recordsFiltered': 0,
'recordsTotal': 0,
'draw': 0,
@@ -81,18 +81,23 @@ class Users(object):
custom_where = [['users.deleted_user', 0]]
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
if session.get_session_user_id():
custom_where.append(['users.user_id', session.get_session_user_id()])
if kwargs.get('user_id'):
custom_where.append(['users.user_id', kwargs.get('user_id')])
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
columns = ['users.user_id',
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
THEN users.username ELSE users.friendly_name END) AS friendly_name',
'users.thumb AS user_thumb',
'users.custom_avatar_url AS custom_thumb',
'COUNT(session_history.id) AS plays',
'COUNT(DISTINCT %s) AS plays' % group_by,
'SUM(CASE WHEN session_history.stopped > 0 THEN (session_history.stopped - session_history.started) \
ELSE 0 END) - SUM(CASE WHEN session_history.paused_counter IS NULL THEN 0 ELSE \
session_history.paused_counter END) AS duration',

View File

@@ -1,2 +1,2 @@
PLEXPY_BRANCH = "beta"
PLEXPY_RELEASE_VERSION = "v2.1.31-beta"
PLEXPY_BRANCH = "master"
PLEXPY_RELEASE_VERSION = "v2.1.37"

View File

@@ -353,7 +353,7 @@ class WebInterface(object):
@requireAuth()
@sanitize_out()
@addtoapi("get_libraries_table")
def get_library_list(self, **kwargs):
def get_library_list(self, grouping=None, **kwargs):
""" Get the data on the Tautulli libraries table.
```
@@ -361,6 +361,7 @@ class WebInterface(object):
None
Optional parameters:
grouping (int): 0 or 1
order_column (str): "library_thumb", "section_name", "section_type", "count", "parent_count",
"child_count", "last_accessed", "last_played", "plays", "duration"
order_dir (str): "desc" or "asc"
@@ -423,7 +424,7 @@ class WebInterface(object):
kwargs['json_data'] = build_datatables_json(kwargs, dt_columns, "section_name")
library_data = libraries.Libraries()
library_list = library_data.get_datatables_list(kwargs=kwargs)
library_list = library_data.get_datatables_list(kwargs=kwargs, grouping=grouping)
return library_list
@@ -1016,7 +1017,7 @@ class WebInterface(object):
@requireAuth()
@sanitize_out()
@addtoapi("get_users_table")
def get_user_list(self, **kwargs):
def get_user_list(self, grouping=None, **kwargs):
""" Get the data on Tautulli users table.
```
@@ -1024,6 +1025,7 @@ class WebInterface(object):
None
Optional parameters:
grouping (int): 0 or 1
order_column (str): "user_thumb", "friendly_name", "last_seen", "ip_address", "platform",
"player", "last_played", "plays", "duration"
order_dir (str): "desc" or "asc"
@@ -1082,7 +1084,7 @@ class WebInterface(object):
kwargs['json_data'] = build_datatables_json(kwargs, dt_columns, "friendly_name")
user_data = users.Users()
user_list = user_data.get_datatables_list(kwargs=kwargs)
user_list = user_data.get_datatables_list(kwargs=kwargs, grouping=grouping)
return user_list
@@ -1628,8 +1630,9 @@ class WebInterface(object):
"parent_title": "",
"paused_counter": 0,
"percent_complete": 84,
"platform": "Chrome",
"player": "Plex Web (Chrome)",
"platform": "Windows",
"product": "Plex for Windows",
"player": "Castle-PC",
"rating_key": 4348,
"reference_id": 1123,
"session_key": null,
@@ -1658,6 +1661,7 @@ class WebInterface(object):
("friendly_name", True, True),
("ip_address", True, True),
("platform", True, True),
("product", True, True),
("player", True, True),
("full_title", True, True),
("started", True, False),
@@ -2806,6 +2810,7 @@ class WebInterface(object):
"music_watched_percent": plexpy.CONFIG.MUSIC_WATCHED_PERCENT,
"themoviedb_lookup": checked(plexpy.CONFIG.THEMOVIEDB_LOOKUP),
"tvmaze_lookup": checked(plexpy.CONFIG.TVMAZE_LOOKUP),
"musicbrainz_lookup": checked(plexpy.CONFIG.MUSICBRAINZ_LOOKUP),
"show_advanced_settings": plexpy.CONFIG.SHOW_ADVANCED_SETTINGS,
"newsletter_dir": plexpy.CONFIG.NEWSLETTER_DIR,
"newsletter_self_hosted": checked(plexpy.CONFIG.NEWSLETTER_SELF_HOSTED),
@@ -2834,7 +2839,7 @@ class WebInterface(object):
"monitor_pms_updates", "monitor_remote_access", "get_file_sizes", "log_blacklist", "http_hash_password",
"allow_guest_access", "cache_images", "http_proxy", "http_basic_auth", "notify_concurrent_by_ip",
"history_table_activity", "plexpy_auto_update",
"themoviedb_lookup", "tvmaze_lookup", "http_plex_admin",
"themoviedb_lookup", "tvmaze_lookup", "musicbrainz_lookup", "http_plex_admin",
"newsletter_self_hosted", "newsletter_inline_styles", "win_sys_tray"
]
for checked_config in checked_configs:
@@ -2857,8 +2862,15 @@ class WebInterface(object):
kwargs['http_password'] = make_hash(kwargs['http_password'])
kwargs['http_hashed_password'] = 1
# Flag to refresh JWT uuid to log out clients
kwargs['jwt_update_secret'] = True
elif not kwargs.get('http_hash_password'):
kwargs['http_hashed_password'] = 0
# Flag to refresh JWT uuid to log out clients
kwargs['jwt_update_secret'] = True
else:
kwargs['http_hashed_password'] = 0
@@ -3168,13 +3180,13 @@ class WebInterface(object):
}
```
"""
result = notifiers.get_notifier_config(notifier_id=notifier_id)
result = notifiers.get_notifier_config(notifier_id=notifier_id, mask_passwords=True)
return result
@cherrypy.expose
@requireAuth(member_of("admin"))
def get_notifier_config_modal(self, notifier_id=None, **kwargs):
result = notifiers.get_notifier_config(notifier_id=notifier_id)
result = notifiers.get_notifier_config(notifier_id=notifier_id, mask_passwords=True)
parameters = [
{'name': param['name'], 'type': param['type'], 'value': param['value']}
@@ -3657,10 +3669,10 @@ class WebInterface(object):
identifier = server['clientIdentifier']
break
# Fallback to checking /identity endpoint is server is unpublished
# Fallback to checking /identity endpoint if the server is unpublished
# Cannot set SSL settings on the PMS if unpublished so 'http' is okay
if not identifier:
scheme = 'https' if ssl else 'http'
scheme = 'https' if helpers.cast_to_int(ssl) else 'http'
url = '{scheme}://{hostname}:{port}'.format(scheme=scheme, hostname=hostname, port=port)
uri = '/identity'
@@ -4290,7 +4302,7 @@ class WebInterface(object):
```
Required parameters:
rating_key (int): 1234
(Note: Must be the movie, show, or artist rating key)
(Note: Must be the movie, show, artist, album, or track rating key)
Optional parameters:
None
@@ -4548,6 +4560,7 @@ class WebInterface(object):
"Drama",
"Fantasy"
],
"grandparent_guid": "com.plexapp.agents.thetvdb://121361?lang=en",
"grandparent_rating_key": "1219",
"grandparent_thumb": "/library/metadata/1219/thumb/1462175063",
"grandparent_title": "Game of Thrones",
@@ -4588,6 +4601,7 @@ class WebInterface(object):
"video_language_code": "",
"video_profile": "high",
"video_ref_frames": "4",
"video_scan_type": "progressive",
"video_width": "1920",
"selected": 0
},
@@ -4622,6 +4636,7 @@ class WebInterface(object):
],
"video_codec": "h264",
"video_framerate": "24p",
"video_full_resolution": "1080p",
"video_profile": "high",
"video_resolution": "1080",
"width": "1920"
@@ -4630,6 +4645,7 @@ class WebInterface(object):
"media_type": "episode",
"original_title": "",
"originally_available_at": "2016-04-24",
"parent_guid": "com.plexapp.agents.thetvdb://121361/6?lang=en",
"parent_media_index": "6",
"parent_rating_key": "153036",
"parent_thumb": "/library/metadata/153036/thumb/1462175062",
@@ -4940,6 +4956,7 @@ class WebInterface(object):
"Drama",
"Fantasy"
],
"grandparent_guid": "com.plexapp.agents.thetvdb://121361?lang=en",
"grandparent_rating_key": "1219",
"grandparent_thumb": "/library/metadata/1219/thumb/1503306930",
"grandparent_title": "Game of Thrones",
@@ -4967,6 +4984,7 @@ class WebInterface(object):
"optimized_version_title": "",
"originally_available_at": "2016-04-24",
"original_title": "",
"parent_guid": "com.plexapp.agents.thetvdb://121361/6?lang=en",
"parent_media_index": "6",
"parent_rating_key": "153036",
"parent_thumb": "/library/metadata/153036/thumb/1503889210",
@@ -5031,7 +5049,9 @@ class WebInterface(object):
"stream_video_language": "",
"stream_video_language_code": "",
"stream_video_ref_frames": "4",
"stream_video_full_resolution": "1080p",
"stream_video_resolution": "1080",
"stream_video_scan_type": "progressive",
"stream_video_width": "1920",
"studio": "HBO",
"subtitle_codec": "",
@@ -5084,12 +5104,14 @@ class WebInterface(object):
"video_decision": "direct play",
"video_frame_rate": "23.976",
"video_framerate": "24p",
"video_full_resolution": "1080p",
"video_height": "1078",
"video_language": "",
"video_language_code": "",
"video_profile": "high",
"video_ref_frames": "4",
"video_resolution": "1080",
"video_scan_type": "progressive",
"video_width": "1920",
"view_offset": "1000",
"width": "1920",
@@ -5667,13 +5689,13 @@ class WebInterface(object):
}
```
"""
result = newsletters.get_newsletter_config(newsletter_id=newsletter_id)
result = newsletters.get_newsletter_config(newsletter_id=newsletter_id, mask_passwords=True)
return result
@cherrypy.expose
@requireAuth(member_of("admin"))
def get_newsletter_config_modal(self, newsletter_id=None, **kwargs):
result = newsletters.get_newsletter_config(newsletter_id=newsletter_id)
result = newsletters.get_newsletter_config(newsletter_id=newsletter_id, mask_passwords=True)
return serve_template(templatename="newsletter_config.html", newsletter=result)
@cherrypy.expose