More rename, more -python2

This commit is contained in:
2021-02-05 17:29:23 +01:00
parent b867dc9be2
commit 1df28243c3
40 changed files with 1424 additions and 1412 deletions

View File

@@ -15,37 +15,21 @@
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import str
from future.builtins import next
from future.builtins import object
import json
import os
import jellypy
if jellypy.PYTHON2:
import common
import database
import datatables
import helpers
import logger
import plextv
import pmsconnect
import session
import users
from plex import Plex
else:
from jellypy import common
from jellypy import database
from jellypy import datatables
from jellypy import helpers
from jellypy import logger
from jellypy import plextv
from jellypy import pmsconnect
from jellypy import session
from jellypy import users
from jellypy.plex import Plex
from jellypy import common
from jellypy import database
from jellypy import datatables
from jellypy import helpers
from jellypy import logger
from jellypy import plextv
from jellypy import pmsconnect
from jellypy import session
from jellypy import users
from jellypy.plex import Plex
def refresh_libraries():
@@ -378,9 +362,10 @@ class Libraries(object):
join_tables=['session_history_metadata',
'session_history',
'session_history_media_info'],
join_evals=[['session_history_metadata.section_id', 'library_sections.section_id'],
['session_history_metadata.id', 'session_history.id'],
['session_history_metadata.id', 'session_history_media_info.id']],
join_evals=[
['session_history_metadata.section_id', 'library_sections.section_id'],
['session_history_metadata.id', 'session_history.id'],
['session_history_metadata.id', 'session_history_media_info.id']],
kwargs=kwargs)
except Exception as e:
logger.warn("Tautulli Libraries :: Unable to execute database query for get_list: %s." % e)
@@ -452,7 +437,8 @@ class Libraries(object):
return dict
def get_datatables_media_info(self, section_id=None, section_type=None, rating_key=None, refresh=False, kwargs=None):
def get_datatables_media_info(self, section_id=None, section_type=None, rating_key=None, refresh=False,
kwargs=None):
default_return = {'recordsFiltered': 0,
'recordsTotal': 0,
'draw': 0,
@@ -505,7 +491,8 @@ class Libraries(object):
'GROUP BY session_history.%s ' % (count_by, group_by)
result = monitor_db.select(query, args=[section_id])
except Exception as e:
logger.warn("Tautulli Libraries :: Unable to execute database query for get_datatables_media_info2: %s." % e)
logger.warn(
"Tautulli Libraries :: Unable to execute database query for get_datatables_media_info2: %s." % e)
return default_return
watched_list = {}
@@ -522,8 +509,8 @@ class Libraries(object):
rows = json.load(inFile)
library_count = len(rows)
except IOError as e:
#logger.debug("Tautulli Libraries :: No JSON file for rating_key %s." % rating_key)
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key)
# logger.debug("Tautulli Libraries :: No JSON file for rating_key %s." % rating_key)
# logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key)
pass
elif section_id:
try:
@@ -532,8 +519,8 @@ class Libraries(object):
rows = json.load(inFile)
library_count = len(rows)
except IOError as e:
#logger.debug("Tautulli Libraries :: No JSON file for library section_id %s." % section_id)
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id)
# logger.debug("Tautulli Libraries :: No JSON file for library section_id %s." % section_id)
# logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id)
pass
# If no cache was imported, get all library children items
@@ -594,7 +581,8 @@ class Libraries(object):
# Cache the media info to a json file
if rating_key:
try:
outFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key))
outFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR,
'media_info_%s-%s.json' % (section_id, rating_key))
with open(outFilePath, 'w') as outFile:
json.dump(rows, outFile)
except IOError as e:
@@ -622,14 +610,14 @@ class Libraries(object):
# Get datatables JSON data
if kwargs.get('json_data'):
json_data = helpers.process_json_kwargs(json_kwargs=kwargs.get('json_data'))
#print json_data
# print json_data
# Search results
search_value = json_data['search']['value'].lower()
if search_value:
searchable_columns = [d['data'] for d in json_data['columns'] if d['searchable']] + ['title']
for row in rows:
for k,v in row.items():
for k, v in row.items():
if k in searchable_columns and search_value in v.lower():
results.append(row)
break
@@ -649,7 +637,9 @@ class Libraries(object):
elif sort_key in ('file_size', 'bitrate', 'added_at', 'last_played', 'play_count'):
results = sorted(results, key=lambda k: helpers.cast_to_int(k[sort_key]), reverse=reverse)
elif sort_key == 'video_resolution':
results = sorted(results, key=lambda k: helpers.cast_to_int(k[sort_key].replace('4k', '2160p').rstrip('p')), reverse=reverse)
results = sorted(results,
key=lambda k: helpers.cast_to_int(k[sort_key].replace('4k', '2160p').rstrip('p')),
reverse=reverse)
else:
results = sorted(results, key=lambda k: k[sort_key].lower(), reverse=reverse)
@@ -692,14 +682,14 @@ class Libraries(object):
rows = []
# Import media info cache from json file
if rating_key:
#logger.debug("Tautulli Libraries :: Getting file sizes for rating_key %s." % rating_key)
# logger.debug("Tautulli Libraries :: Getting file sizes for rating_key %s." % rating_key)
try:
inFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s-%s.json' % (section_id, rating_key))
with open(inFilePath, 'r') as inFile:
rows = json.load(inFile)
except IOError as e:
#logger.debug("Tautulli Libraries :: No JSON file for rating_key %s." % rating_key)
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key)
# logger.debug("Tautulli Libraries :: No JSON file for rating_key %s." % rating_key)
# logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key)
pass
elif section_id:
logger.debug("Tautulli Libraries :: Getting file sizes for section_id %s." % section_id)
@@ -708,8 +698,8 @@ class Libraries(object):
with open(inFilePath, 'r') as inFile:
rows = json.load(inFile)
except IOError as e:
#logger.debug("Tautulli Libraries :: No JSON file for library section_id %s." % section_id)
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id)
# logger.debug("Tautulli Libraries :: No JSON file for library section_id %s." % section_id)
# logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id)
pass
# Get the total file size for each item
@@ -727,7 +717,7 @@ class Libraries(object):
media_info = media_part_info = {}
if 'media_info' in child_metadata and len(child_metadata['media_info']) > 0:
media_info = child_metadata['media_info'][0]
if 'parts' in media_info and len (media_info['parts']) > 0:
if 'parts' in media_info and len(media_info['parts']) > 0:
media_part_info = next((p for p in media_info['parts'] if p['selected']),
media_info['parts'][0])
@@ -742,22 +732,25 @@ class Libraries(object):
with open(outFilePath, 'w') as outFile:
json.dump(rows, outFile)
except IOError as e:
logger.debug("Tautulli Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key)
logger.debug(
"Tautulli Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key)
elif section_id:
try:
outFilePath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'media_info_%s.json' % section_id)
with open(outFilePath, 'w') as outFile:
json.dump(rows, outFile)
except IOError as e:
logger.debug("Tautulli Libraries :: Unable to create cache file with file sizes for section_id %s." % section_id)
logger.debug(
"Tautulli Libraries :: Unable to create cache file with file sizes for section_id %s." % section_id)
if rating_key:
#logger.debug("Tautulli Libraries :: File sizes updated for rating_key %s." % rating_key)
# logger.debug("Tautulli Libraries :: File sizes updated for rating_key %s." % rating_key)
pass
elif section_id:
logger.debug("Tautulli Libraries :: File sizes updated for section_id %s." % section_id)
return True
def set_config(self, section_id=None, custom_thumb='', custom_art='',
do_notify=1, keep_history=1, do_notify_created=1):
if section_id:
@@ -856,8 +849,9 @@ class Libraries(object):
return library_details
else:
logger.warn("Tautulli Libraries :: Unable to retrieve library %s from database. Requesting library list refresh."
% section_id)
logger.warn(
"Tautulli Libraries :: Unable to retrieve library %s from database. Requesting library list refresh."
% section_id)
# Let's first refresh the libraries list to make sure the library isn't newly added and not in the db yet
refresh_libraries()
@@ -1016,36 +1010,36 @@ class Libraries(object):
result = []
for row in result:
if row['media_type'] == 'episode' and row['parent_thumb']:
thumb = row['parent_thumb']
elif row['media_type'] == 'episode':
thumb = row['grandparent_thumb']
else:
thumb = row['thumb']
if row['media_type'] == 'episode' and row['parent_thumb']:
thumb = row['parent_thumb']
elif row['media_type'] == 'episode':
thumb = row['grandparent_thumb']
else:
thumb = row['thumb']
recent_output = {'row_id': row['id'],
'media_type': row['media_type'],
'rating_key': row['rating_key'],
'parent_rating_key': row['parent_rating_key'],
'grandparent_rating_key': row['grandparent_rating_key'],
'title': row['title'],
'parent_title': row['parent_title'],
'grandparent_title': row['grandparent_title'],
'original_title': row['original_title'],
'thumb': thumb,
'media_index': row['media_index'],
'parent_media_index': row['parent_media_index'],
'year': row['year'],
'originally_available_at': row['originally_available_at'],
'live': row['live'],
'guid': row['guid'],
'time': row['started'],
'user': row['user'],
'section_id': row['section_id'],
'content_rating': row['content_rating'],
'labels': row['labels'].split(';') if row['labels'] else (),
}
recently_watched.append(recent_output)
recent_output = {'row_id': row['id'],
'media_type': row['media_type'],
'rating_key': row['rating_key'],
'parent_rating_key': row['parent_rating_key'],
'grandparent_rating_key': row['grandparent_rating_key'],
'title': row['title'],
'parent_title': row['parent_title'],
'grandparent_title': row['grandparent_title'],
'original_title': row['original_title'],
'thumb': thumb,
'media_index': row['media_index'],
'parent_media_index': row['parent_media_index'],
'year': row['year'],
'originally_available_at': row['originally_available_at'],
'live': row['live'],
'guid': row['guid'],
'time': row['started'],
'user': row['user'],
'section_id': row['section_id'],
'content_rating': row['content_rating'],
'labels': row['labels'].split(';') if row['labels'] else (),
}
recently_watched.append(recent_output)
return session.mask_session_info(recently_watched)