Rename; Removed deps from repo

This commit is contained in:
2021-02-05 16:30:46 +01:00
parent 97f80adf0b
commit b867dc9be2
1680 changed files with 1603 additions and 290357 deletions

2405
jellypy/__init__.py Normal file

File diff suppressed because it is too large Load Diff

720
jellypy/activity_handler.py Normal file
View File

@@ -0,0 +1,720 @@
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import str
from future.builtins import object
import datetime
import os
import time
from apscheduler.triggers.date import DateTrigger
import pytz
import jellypy
if jellypy.PYTHON2:
import activity_processor
import datafactory
import helpers
import logger
import notification_handler
import pmsconnect
else:
from jellypy import activity_processor
from jellypy import datafactory
from jellypy import helpers
from jellypy import logger
from jellypy import notification_handler
from jellypy import pmsconnect
ACTIVITY_SCHED = None
RECENTLY_ADDED_QUEUE = {}
class ActivityHandler(object):
def __init__(self, timeline):
self.timeline = timeline
def is_valid_session(self):
if 'sessionKey' in self.timeline:
if str(self.timeline['sessionKey']).isdigit():
return True
return False
def get_session_key(self):
if self.is_valid_session():
return int(self.timeline['sessionKey'])
return None
def get_rating_key(self):
if self.is_valid_session():
return self.timeline['ratingKey']
return None
def get_metadata(self, skip_cache=False):
cache_key = None if skip_cache else self.get_session_key()
pms_connect = pmsconnect.PmsConnect()
metadata = pms_connect.get_metadata_details(rating_key=self.get_rating_key(), cache_key=cache_key)
if metadata:
return metadata
return None
def get_live_session(self, skip_cache=False):
pms_connect = pmsconnect.PmsConnect()
session_list = pms_connect.get_current_activity(skip_cache=skip_cache)
if session_list:
for session in session_list['sessions']:
if int(session['session_key']) == self.get_session_key():
# Live sessions don't have rating keys in sessions
# Get it from the websocket data
if not session['rating_key']:
session['rating_key'] = self.get_rating_key()
session['rating_key_websocket'] = self.get_rating_key()
return session
return None
def update_db_session(self, session=None, notify=False):
if session is None:
session = self.get_live_session()
if session:
# Update our session temp table values
ap = activity_processor.ActivityProcessor()
ap.write_session(session=session, notify=notify)
self.set_session_state()
def set_session_state(self):
ap = activity_processor.ActivityProcessor()
ap.set_session_state(session_key=self.get_session_key(),
state=self.timeline['state'],
view_offset=self.timeline['viewOffset'],
stopped=helpers.timestamp())
def on_start(self):
if self.is_valid_session():
session = self.get_live_session(skip_cache=True)
if not session:
return
# Some DLNA clients create a new session temporarily when browsing the library
# Wait and get session again to make sure it is an actual session
if session['platform'] == 'DLNA':
time.sleep(1)
session = self.get_live_session()
if not session:
return
logger.debug("Tautulli ActivityHandler :: Session %s started by user %s (%s) with ratingKey %s (%s)%s."
% (str(session['session_key']), str(session['user_id']), session['username'],
str(session['rating_key']), session['full_title'], '[Live TV]' if session['live'] else ''))
# Send notification after updating db
#jellypy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
# Write the new session to our temp session table
self.update_db_session(session=session, notify=True)
# Schedule a callback to force stop a stale stream 5 minutes later
schedule_callback('session_key-{}'.format(self.get_session_key()),
func=force_stop_stream,
args=[self.get_session_key(), session['full_title'], session['username']],
minutes=5)
def on_stop(self, force_stop=False):
if self.is_valid_session():
logger.debug("Tautulli ActivityHandler :: Session %s %sstopped."
% (str(self.get_session_key()), 'force ' if force_stop else ''))
# Set the session last_paused timestamp
ap = activity_processor.ActivityProcessor()
ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None)
# Update the session state and viewOffset
# Set force_stop to true to disable the state set
if not force_stop:
self.set_session_state()
# Retrieve the session data from our temp table
db_session = ap.get_session_by_key(session_key=self.get_session_key())
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_stop'})
# Write it to the history table
monitor_proc = activity_processor.ActivityProcessor()
row_id = monitor_proc.write_session_history(session=db_session)
if row_id:
schedule_callback('session_key-{}'.format(self.get_session_key()), remove_job=True)
# Remove the session from our temp session table
logger.debug("Tautulli ActivityHandler :: Removing sessionKey %s ratingKey %s from session queue"
% (str(self.get_session_key()), str(self.get_rating_key())))
ap.delete_session(row_id=row_id)
delete_metadata_cache(self.get_session_key())
else:
schedule_callback('session_key-{}'.format(self.get_session_key()),
func=force_stop_stream,
args=[self.get_session_key(), db_session['full_title'], db_session['user']],
seconds=30)
def on_pause(self, still_paused=False):
if self.is_valid_session():
if not still_paused:
logger.debug("Tautulli ActivityHandler :: Session %s paused." % str(self.get_session_key()))
# Set the session last_paused timestamp
ap = activity_processor.ActivityProcessor()
ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=helpers.timestamp())
# Update the session state and viewOffset
self.update_db_session()
# Retrieve the session data from our temp table
db_session = ap.get_session_by_key(session_key=self.get_session_key())
if not still_paused:
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_pause'})
def on_resume(self):
if self.is_valid_session():
logger.debug("Tautulli ActivityHandler :: Session %s resumed." % str(self.get_session_key()))
# Set the session last_paused timestamp
ap = activity_processor.ActivityProcessor()
ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None)
# Update the session state and viewOffset
self.update_db_session()
# Retrieve the session data from our temp table
db_session = ap.get_session_by_key(session_key=self.get_session_key())
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_resume'})
def on_change(self):
if self.is_valid_session():
logger.debug("Tautulli ActivityHandler :: Session %s has changed transcode decision." % str(self.get_session_key()))
# Update the session state and viewOffset
self.update_db_session()
# Retrieve the session data from our temp table
ap = activity_processor.ActivityProcessor()
db_session = ap.get_session_by_key(session_key=self.get_session_key())
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_change'})
def on_buffer(self):
if self.is_valid_session():
logger.debug("Tautulli ActivityHandler :: Session %s is buffering." % self.get_session_key())
ap = activity_processor.ActivityProcessor()
db_stream = ap.get_session_by_key(session_key=self.get_session_key())
# Increment our buffer count
ap.increment_session_buffer_count(session_key=self.get_session_key())
# Get our current buffer count
current_buffer_count = ap.get_session_buffer_count(self.get_session_key())
logger.debug("Tautulli ActivityHandler :: Session %s buffer count is %s." %
(self.get_session_key(), current_buffer_count))
# Get our last triggered time
buffer_last_triggered = ap.get_session_buffer_trigger_time(self.get_session_key())
# Update the session state and viewOffset
self.update_db_session()
time_since_last_trigger = 0
if buffer_last_triggered:
logger.debug("Tautulli ActivityHandler :: Session %s buffer last triggered at %s." %
(self.get_session_key(), buffer_last_triggered))
time_since_last_trigger = helpers.timestamp() - int(buffer_last_triggered)
if current_buffer_count >= jellypy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \
time_since_last_trigger >= jellypy.CONFIG.BUFFER_WAIT:
ap.set_session_buffer_trigger_time(session_key=self.get_session_key())
# Retrieve the session data from our temp table
db_session = ap.get_session_by_key(session_key=self.get_session_key())
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_buffer'})
def on_error(self):
if self.is_valid_session():
logger.debug("Tautulli ActivityHandler :: Session %s encountered an error." % str(self.get_session_key()))
# Update the session state and viewOffset
self.update_db_session()
# Retrieve the session data from our temp table
ap = activity_processor.ActivityProcessor()
db_session = ap.get_session_by_key(session_key=self.get_session_key())
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_error'})
# This function receives events from our websocket connection
def process(self):
if self.is_valid_session():
ap = activity_processor.ActivityProcessor()
db_session = ap.get_session_by_key(session_key=self.get_session_key())
this_state = self.timeline['state']
this_rating_key = str(self.timeline['ratingKey'])
this_key = self.timeline['key']
this_transcode_key = self.timeline.get('transcodeSession', '')
# Get the live tv session uuid
this_live_uuid = this_key.split('/')[-1] if this_key.startswith('/livetv/sessions') else None
# If we already have this session in the temp table, check for state changes
if db_session:
# Re-schedule the callback to reset the 5 minutes timer
schedule_callback('session_key-{}'.format(self.get_session_key()),
func=force_stop_stream,
args=[self.get_session_key(), db_session['full_title'], db_session['user']],
minutes=5)
last_state = db_session['state']
last_rating_key = str(db_session['rating_key'])
last_live_uuid = db_session['live_uuid']
last_transcode_key = db_session['transcode_key'].split('/')[-1]
last_paused = db_session['last_paused']
last_rating_key_websocket = db_session['rating_key_websocket']
last_guid = db_session['guid']
this_guid = last_guid
# Check guid for live TV metadata every 60 seconds
if db_session['live'] and helpers.timestamp() - db_session['stopped'] > 60:
metadata = self.get_metadata(skip_cache=True)
if metadata:
this_guid = metadata['guid']
# Make sure the same item is being played
if (this_rating_key == last_rating_key
or this_rating_key == last_rating_key_websocket
or this_live_uuid == last_live_uuid) \
and this_guid == last_guid:
# Update the session state and viewOffset
if this_state == 'playing':
# Update the session in our temp session table
# if the last set temporary stopped time exceeds 60 seconds
if helpers.timestamp() - db_session['stopped'] > 60:
self.update_db_session()
# Start our state checks
if this_state != last_state:
if this_state == 'paused':
self.on_pause()
elif last_paused and this_state == 'playing':
self.on_resume()
elif this_state == 'stopped':
self.on_stop()
elif this_state == 'error':
self.on_error()
elif this_state == 'paused':
# Update the session last_paused timestamp
self.on_pause(still_paused=True)
if this_state == 'buffering':
self.on_buffer()
if this_transcode_key != last_transcode_key and this_state != 'stopped':
self.on_change()
# If a client doesn't register stop events (I'm looking at you PHT!) check if the ratingKey has changed
else:
# Manually stop and start
# Set force_stop so that we don't overwrite our last viewOffset
self.on_stop(force_stop=True)
self.on_start()
# Monitor if the stream has reached the watch percentage for notifications
# The only purpose of this is for notifications
if not db_session['watched'] and this_state != 'buffering':
progress_percent = helpers.get_percent(self.timeline['viewOffset'], db_session['duration'])
watched_percent = {'movie': jellypy.CONFIG.MOVIE_WATCHED_PERCENT,
'episode': jellypy.CONFIG.TV_WATCHED_PERCENT,
'track': jellypy.CONFIG.MUSIC_WATCHED_PERCENT,
'clip': jellypy.CONFIG.TV_WATCHED_PERCENT
}
if progress_percent >= watched_percent.get(db_session['media_type'], 101):
logger.debug("Tautulli ActivityHandler :: Session %s watched."
% str(self.get_session_key()))
ap.set_watched(session_key=self.get_session_key())
watched_notifiers = notification_handler.get_notify_state_enabled(
session=db_session, notify_action='on_watched', notified=False)
for d in watched_notifiers:
jellypy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(),
'notifier_id': d['notifier_id'],
'notify_action': 'on_watched'})
else:
# We don't have this session in our table yet, start a new one.
if this_state != 'buffering':
self.on_start()
class TimelineHandler(object):
def __init__(self, timeline):
self.timeline = timeline
def is_item(self):
if 'itemID' in self.timeline:
return True
return False
def get_rating_key(self):
if self.is_item():
return int(self.timeline['itemID'])
return None
def get_metadata(self):
pms_connect = pmsconnect.PmsConnect()
metadata = pms_connect.get_metadata_details(self.get_rating_key())
if metadata:
return metadata
return None
# This function receives events from our websocket connection
def process(self):
if self.is_item():
global RECENTLY_ADDED_QUEUE
rating_key = self.get_rating_key()
media_types = {1: 'movie',
2: 'show',
3: 'season',
4: 'episode',
8: 'artist',
9: 'album',
10: 'track'}
identifier = self.timeline.get('identifier')
state_type = self.timeline.get('state')
media_type = media_types.get(self.timeline.get('type'))
section_id = helpers.cast_to_int(self.timeline.get('sectionID', 0))
title = self.timeline.get('title', 'Unknown')
metadata_state = self.timeline.get('metadataState')
media_state = self.timeline.get('mediaState')
queue_size = self.timeline.get('queueSize')
# Return if it is not a library event (i.e. DVR EPG event)
if identifier != 'com.plexapp.plugins.library':
return
# Add a new media item to the recently added queue
if media_type and section_id > 0 and \
((state_type == 0 and metadata_state == 'created')): # or \
#(jellypy.CONFIG.NOTIFY_RECENTLY_ADDED_UPGRADE and state_type in (1, 5) and \
#media_state == 'analyzing' and queue_size is None)):
if media_type in ('episode', 'track'):
metadata = self.get_metadata()
if metadata:
grandparent_title = metadata['grandparent_title']
grandparent_rating_key = int(metadata['grandparent_rating_key'])
parent_rating_key = int(metadata['parent_rating_key'])
grandparent_set = RECENTLY_ADDED_QUEUE.get(grandparent_rating_key, set())
grandparent_set.add(parent_rating_key)
RECENTLY_ADDED_QUEUE[grandparent_rating_key] = grandparent_set
parent_set = RECENTLY_ADDED_QUEUE.get(parent_rating_key, set())
parent_set.add(rating_key)
RECENTLY_ADDED_QUEUE[parent_rating_key] = parent_set
RECENTLY_ADDED_QUEUE[rating_key] = set([grandparent_rating_key])
logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s, grandparent %s) added to recently added queue."
% (title, str(rating_key), str(grandparent_rating_key)))
# Schedule a callback to clear the recently added queue
schedule_callback('rating_key-{}'.format(grandparent_rating_key),
func=clear_recently_added_queue,
args=[grandparent_rating_key, grandparent_title],
seconds=jellypy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
elif media_type in ('season', 'album'):
metadata = self.get_metadata()
if metadata:
parent_title = metadata['parent_title']
parent_rating_key = int(metadata['parent_rating_key'])
parent_set = RECENTLY_ADDED_QUEUE.get(parent_rating_key, set())
parent_set.add(rating_key)
RECENTLY_ADDED_QUEUE[parent_rating_key] = parent_set
logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s , parent %s) added to recently added queue."
% (title, str(rating_key), str(parent_rating_key)))
# Schedule a callback to clear the recently added queue
schedule_callback('rating_key-{}'.format(parent_rating_key),
func=clear_recently_added_queue,
args=[parent_rating_key, parent_title],
seconds=jellypy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
else:
queue_set = RECENTLY_ADDED_QUEUE.get(rating_key, set())
RECENTLY_ADDED_QUEUE[rating_key] = queue_set
logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s) added to recently added queue."
% (title, str(rating_key)))
# Schedule a callback to clear the recently added queue
schedule_callback('rating_key-{}'.format(rating_key),
func=clear_recently_added_queue,
args=[rating_key, title],
seconds=jellypy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
# A movie, show, or artist is done processing
elif media_type in ('movie', 'show', 'artist') and section_id > 0 and \
state_type == 5 and metadata_state is None and queue_size is None and \
rating_key in RECENTLY_ADDED_QUEUE:
logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s) done processing metadata."
% (title, str(rating_key)))
# An item was deleted, make sure it is removed from the queue
elif state_type == 9 and metadata_state == 'deleted':
if rating_key in RECENTLY_ADDED_QUEUE and not RECENTLY_ADDED_QUEUE[rating_key]:
logger.debug("Tautulli TimelineHandler :: Library item %s removed from recently added queue."
% str(rating_key))
del_keys(rating_key)
# Remove the callback if the item is removed
schedule_callback('rating_key-{}'.format(rating_key), remove_job=True)
class ReachabilityHandler(object):
def __init__(self, data):
self.data = data
def is_reachable(self):
if 'reachability' in self.data:
return self.data['reachability']
return False
def remote_access_enabled(self):
pms_connect = pmsconnect.PmsConnect()
pref = pms_connect.get_server_pref(pref='PublishServerOnPlexOnlineKey')
return helpers.bool_true(pref)
def on_down(self, server_response):
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_extdown', 'remote_access_info': server_response})
def on_up(self, server_response):
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_extup', 'remote_access_info': server_response})
def process(self):
# Check if remote access is enabled
if not self.remote_access_enabled():
return
# Do nothing if remote access is still up and hasn't changed
if self.is_reachable() and jellypy.PLEX_REMOTE_ACCESS_UP:
return
pms_connect = pmsconnect.PmsConnect()
server_response = pms_connect.get_server_response()
if server_response:
# Waiting for port mapping
if server_response['mapping_state'] == 'waiting':
logger.warn("Tautulli ReachabilityHandler :: Remote access waiting for port mapping.")
elif jellypy.PLEX_REMOTE_ACCESS_UP is not False and server_response['reason']:
logger.warn("Tautulli ReachabilityHandler :: Remote access failed: %s" % server_response['reason'])
logger.info("Tautulli ReachabilityHandler :: Plex remote access is down.")
jellypy.PLEX_REMOTE_ACCESS_UP = False
if not ACTIVITY_SCHED.get_job('on_extdown'):
logger.debug("Tautulli ReachabilityHandler :: Schedule remote access down callback in %d seconds.",
jellypy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD)
schedule_callback('on_extdown', func=self.on_down, args=[server_response],
seconds=jellypy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD)
elif jellypy.PLEX_REMOTE_ACCESS_UP is False and not server_response['reason']:
logger.info("Tautulli ReachabilityHandler :: Plex remote access is back up.")
jellypy.PLEX_REMOTE_ACCESS_UP = True
if ACTIVITY_SCHED.get_job('on_extdown'):
logger.debug("Tautulli ReachabilityHandler :: Cancelling scheduled remote access down callback.")
schedule_callback('on_extdown', remove_job=True)
else:
self.on_up(server_response)
elif jellypy.PLEX_REMOTE_ACCESS_UP is None:
jellypy.PLEX_REMOTE_ACCESS_UP = self.is_reachable()
def del_keys(key):
if isinstance(key, set):
for child_key in key:
del_keys(child_key)
elif key in RECENTLY_ADDED_QUEUE:
del_keys(RECENTLY_ADDED_QUEUE.pop(key))
def schedule_callback(id, func=None, remove_job=False, args=None, **kwargs):
if ACTIVITY_SCHED.get_job(id):
if remove_job:
ACTIVITY_SCHED.remove_job(id)
else:
ACTIVITY_SCHED.reschedule_job(
id, args=args, trigger=DateTrigger(
run_date=datetime.datetime.now(pytz.UTC) + datetime.timedelta(**kwargs),
timezone=pytz.UTC))
elif not remove_job:
ACTIVITY_SCHED.add_job(
func, args=args, id=id, trigger=DateTrigger(
run_date=datetime.datetime.now(pytz.UTC) + datetime.timedelta(**kwargs),
timezone=pytz.UTC),
misfire_grace_time=None)
def force_stop_stream(session_key, title, user):
ap = activity_processor.ActivityProcessor()
session = ap.get_session_by_key(session_key=session_key)
row_id = ap.write_session_history(session=session)
if row_id:
# If session is written to the database successfully, remove the session from the session table
logger.info("Tautulli ActivityHandler :: Removing stale stream with sessionKey %s ratingKey %s from session queue"
% (session['session_key'], session['rating_key']))
ap.delete_session(row_id=row_id)
delete_metadata_cache(session_key)
else:
session['write_attempts'] += 1
if session['write_attempts'] < jellypy.CONFIG.SESSION_DB_WRITE_ATTEMPTS:
logger.warn("Tautulli ActivityHandler :: Failed to write stream with sessionKey %s ratingKey %s to the database. " \
"Will try again in 30 seconds. Write attempt %s."
% (session['session_key'], session['rating_key'], str(session['write_attempts'])))
ap.increment_write_attempts(session_key=session_key)
# Reschedule for 30 seconds later
schedule_callback('session_key-{}'.format(session_key), func=force_stop_stream,
args=[session_key, session['full_title'], session['user']], seconds=30)
else:
logger.warn("Tautulli ActivityHandler :: Failed to write stream with sessionKey %s ratingKey %s to the database. " \
"Removing session from the database. Write attempt %s."
% (session['session_key'], session['rating_key'], str(session['write_attempts'])))
logger.info("Tautulli ActivityHandler :: Removing stale stream with sessionKey %s ratingKey %s from session queue"
% (session['session_key'], session['rating_key']))
ap.delete_session(session_key=session_key)
delete_metadata_cache(session_key)
def clear_recently_added_queue(rating_key, title):
child_keys = RECENTLY_ADDED_QUEUE[rating_key]
if jellypy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT and len(child_keys) > 1:
on_created(rating_key, child_keys=child_keys)
elif child_keys:
for child_key in child_keys:
grandchild_keys = RECENTLY_ADDED_QUEUE.get(child_key, [])
if jellypy.CONFIG.NOTIFY_GROUP_RECENTLY_ADDED_PARENT and len(grandchild_keys) > 1:
on_created(child_key, child_keys=grandchild_keys)
elif grandchild_keys:
for grandchild_key in grandchild_keys:
on_created(grandchild_key)
else:
on_created(child_key)
else:
on_created(rating_key)
# Remove all keys
del_keys(rating_key)
def on_created(rating_key, **kwargs):
logger.debug("Tautulli TimelineHandler :: Library item %s added to Plex." % str(rating_key))
pms_connect = pmsconnect.PmsConnect()
metadata = pms_connect.get_metadata_details(rating_key)
if metadata:
notify = True
# now = helpers.timestamp()
#
# if helpers.cast_to_int(metadata['added_at']) < now - 86400: # Updated more than 24 hours ago
# logger.debug("Tautulli TimelineHandler :: Library item %s added more than 24 hours ago. Not notifying."
# % str(rating_key))
# notify = False
data_factory = datafactory.DataFactory()
if 'child_keys' not in kwargs:
if data_factory.get_recently_added_item(rating_key):
logger.debug("Tautulli TimelineHandler :: Library item %s added already. Not notifying again."
% str(rating_key))
notify = False
if notify:
data = {'timeline_data': metadata, 'notify_action': 'on_created'}
data.update(kwargs)
jellypy.NOTIFY_QUEUE.put(data)
all_keys = [rating_key]
if 'child_keys' in kwargs:
all_keys.extend(kwargs['child_keys'])
for key in all_keys:
data_factory.set_recently_added_item(key)
logger.debug("Added %s items to the recently_added database table." % str(len(all_keys)))
else:
logger.error("Tautulli TimelineHandler :: Unable to retrieve metadata for rating_key %s" % str(rating_key))
def delete_metadata_cache(session_key):
try:
os.remove(os.path.join(jellypy.CONFIG.CACHE_DIR, 'session_metadata/metadata-sessionKey-%s.json' % session_key))
except OSError as e:
logger.error("Tautulli ActivityHandler :: Failed to remove metadata cache file (sessionKey %s): %s"
% (session_key, e))

275
jellypy/activity_pinger.py Normal file
View File

@@ -0,0 +1,275 @@
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import str
import threading
import jellypy
if jellypy.PYTHON2:
import activity_handler
import activity_processor
import database
import helpers
import libraries
import logger
import notification_handler
import plextv
import pmsconnect
import web_socket
else:
from jellypy import activity_handler
from jellypy import activity_processor
from jellypy import database
from jellypy import helpers
from jellypy import libraries
from jellypy import logger
from jellypy import notification_handler
from jellypy import plextv
from jellypy import pmsconnect
from jellypy import web_socket
monitor_lock = threading.Lock()
ext_ping_count = 0
ext_ping_error = None
int_ping_count = 0
def check_active_sessions(ws_request=False):
with monitor_lock:
monitor_db = database.MonitorDatabase()
monitor_process = activity_processor.ActivityProcessor()
db_streams = monitor_process.get_sessions()
# Clear the metadata cache
for stream in db_streams:
activity_handler.delete_metadata_cache(stream['session_key'])
pms_connect = pmsconnect.PmsConnect()
session_list = pms_connect.get_current_activity()
logger.debug("Tautulli Monitor :: Checking for active streams.")
if session_list:
media_container = session_list['sessions']
# Check our temp table for what we must do with the new streams
for stream in db_streams:
if any(d['session_key'] == str(stream['session_key']) and d['rating_key'] == str(stream['rating_key'])
for d in media_container):
# The user's session is still active
for session in media_container:
if session['session_key'] == str(stream['session_key']) and \
session['rating_key'] == str(stream['rating_key']):
# The user is still playing the same media item
# Here we can check the play states
if session['state'] != stream['state']:
if session['state'] == 'paused':
logger.debug("Tautulli Monitor :: Session %s paused." % stream['session_key'])
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_pause'})
if session['state'] == 'playing' and stream['state'] == 'paused':
logger.debug("Tautulli Monitor :: Session %s resumed." % stream['session_key'])
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_resume'})
if session['state'] == 'error':
logger.debug("Tautulli Monitor :: Session %s encountered an error." % stream['session_key'])
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_error'})
if stream['state'] == 'paused' and not ws_request:
# The stream is still paused so we need to increment the paused_counter
# Using the set config parameter as the interval, probably not the most accurate but
# it will have to do for now. If it's a websocket request don't use this method.
paused_counter = int(stream['paused_counter']) + jellypy.CONFIG.MONITORING_INTERVAL
monitor_db.action('UPDATE sessions SET paused_counter = ? '
'WHERE session_key = ? AND rating_key = ?',
[paused_counter, stream['session_key'], stream['rating_key']])
if session['state'] == 'buffering' and jellypy.CONFIG.BUFFER_THRESHOLD > 0:
# The stream is buffering so we need to increment the buffer_count
# We're going just increment on every monitor ping,
# would be difficult to keep track otherwise
monitor_db.action('UPDATE sessions SET buffer_count = buffer_count + 1 '
'WHERE session_key = ? AND rating_key = ?',
[stream['session_key'], stream['rating_key']])
# Check the current buffer count and last buffer to determine if we should notify
buffer_values = monitor_db.select('SELECT buffer_count, buffer_last_triggered '
'FROM sessions '
'WHERE session_key = ? AND rating_key = ?',
[stream['session_key'], stream['rating_key']])
if buffer_values[0]['buffer_count'] >= jellypy.CONFIG.BUFFER_THRESHOLD:
# Push any notifications -
# Push it on it's own thread so we don't hold up our db actions
# Our first buffer notification
if buffer_values[0]['buffer_count'] == jellypy.CONFIG.BUFFER_THRESHOLD:
logger.info("Tautulli Monitor :: User '%s' has triggered a buffer warning."
% stream['user'])
# Set the buffer trigger time
monitor_db.action('UPDATE sessions '
'SET buffer_last_triggered = strftime("%s","now") '
'WHERE session_key = ? AND rating_key = ?',
[stream['session_key'], stream['rating_key']])
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_buffer'})
else:
# Subsequent buffer notifications after wait time
if helpers.timestamp() > buffer_values[0]['buffer_last_triggered'] + \
jellypy.CONFIG.BUFFER_WAIT:
logger.info("Tautulli Monitor :: User '%s' has triggered multiple buffer warnings."
% stream['user'])
# Set the buffer trigger time
monitor_db.action('UPDATE sessions '
'SET buffer_last_triggered = strftime("%s","now") '
'WHERE session_key = ? AND rating_key = ?',
[stream['session_key'], stream['rating_key']])
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_buffer'})
logger.debug("Tautulli Monitor :: Session %s is buffering. Count is now %s. Last triggered %s."
% (stream['session_key'],
buffer_values[0]['buffer_count'],
buffer_values[0]['buffer_last_triggered']))
# Check if the user has reached the offset in the media we defined as the "watched" percent
# Don't trigger if state is buffer as some clients push the progress to the end when
# buffering on start.
if session['state'] != 'buffering':
progress_percent = helpers.get_percent(session['view_offset'], session['duration'])
notify_states = notification_handler.get_notify_state(session=session)
if (session['media_type'] == 'movie' and progress_percent >= jellypy.CONFIG.MOVIE_WATCHED_PERCENT or
session['media_type'] == 'episode' and progress_percent >= jellypy.CONFIG.TV_WATCHED_PERCENT or
session['media_type'] == 'track' and progress_percent >= jellypy.CONFIG.MUSIC_WATCHED_PERCENT) \
and not any(d['notify_action'] == 'on_watched' for d in notify_states):
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_watched'})
else:
# The user has stopped playing a stream
if stream['state'] != 'stopped':
logger.debug("Tautulli Monitor :: Session %s stopped." % stream['session_key'])
if not stream['stopped']:
# Set the stream stop time
stream['stopped'] = helpers.timestamp()
monitor_db.action('UPDATE sessions SET stopped = ?, state = ? '
'WHERE session_key = ? AND rating_key = ?',
[stream['stopped'], 'stopped', stream['session_key'], stream['rating_key']])
progress_percent = helpers.get_percent(stream['view_offset'], stream['duration'])
notify_states = notification_handler.get_notify_state(session=stream)
if (stream['media_type'] == 'movie' and progress_percent >= jellypy.CONFIG.MOVIE_WATCHED_PERCENT or
stream['media_type'] == 'episode' and progress_percent >= jellypy.CONFIG.TV_WATCHED_PERCENT or
stream['media_type'] == 'track' and progress_percent >= jellypy.CONFIG.MUSIC_WATCHED_PERCENT) \
and not any(d['notify_action'] == 'on_watched' for d in notify_states):
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_watched'})
jellypy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_stop'})
# Write the item history on playback stop
row_id = monitor_process.write_session_history(session=stream)
if row_id:
# If session is written to the databaase successfully, remove the session from the session table
logger.debug("Tautulli Monitor :: Removing sessionKey %s ratingKey %s from session queue"
% (stream['session_key'], stream['rating_key']))
monitor_process.delete_session(row_id=row_id)
else:
stream['write_attempts'] += 1
if stream['write_attempts'] < jellypy.CONFIG.SESSION_DB_WRITE_ATTEMPTS:
logger.warn("Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \
"Will try again on the next pass. Write attempt %s."
% (stream['session_key'], stream['rating_key'], str(stream['write_attempts'])))
monitor_process.increment_write_attempts(session_key=stream['session_key'])
else:
logger.warn("Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \
"Removing session from the database. Write attempt %s."
% (stream['session_key'], stream['rating_key'], str(stream['write_attempts'])))
logger.debug("Tautulli Monitor :: Removing sessionKey %s ratingKey %s from session queue"
% (stream['session_key'], stream['rating_key']))
monitor_process.delete_session(session_key=stream['session_key'])
# Process the newly received session data
for session in media_container:
new_session = monitor_process.write_session(session)
if new_session:
logger.debug("Tautulli Monitor :: Session %s started by user %s (%s) with ratingKey %s (%s)%s."
% (str(session['session_key']), str(session['user_id']), session['username'],
str(session['rating_key']), session['full_title'], '[Live TV]' if session['live'] else ''))
else:
logger.debug("Tautulli Monitor :: Unable to read session list.")
def connect_server(log=True, startup=False):
if jellypy.CONFIG.PMS_IS_CLOUD:
if log:
logger.info("Tautulli Monitor :: Checking for Plex Cloud server status...")
plex_tv = plextv.PlexTV()
status = plex_tv.get_cloud_server_status()
if status is True:
logger.info("Tautulli Monitor :: Plex Cloud server is active.")
elif status is False:
if log:
logger.info("Tautulli Monitor :: Plex Cloud server is sleeping.")
else:
if log:
logger.error("Tautulli Monitor :: Failed to retrieve Plex Cloud server status.")
if not status and startup:
web_socket.on_disconnect()
else:
status = True
if status:
if log and not startup:
logger.info("Tautulli Monitor :: Attempting to reconnect Plex server...")
try:
web_socket.start_thread()
except Exception as e:
logger.error("Websocket :: Unable to open connection: %s." % e)
def check_server_updates():
with monitor_lock:
logger.info("Tautulli Monitor :: Checking for PMS updates...")
plex_tv = plextv.PlexTV()
download_info = plex_tv.get_plex_downloads()
if download_info:
logger.info("Tautulli Monitor :: Current PMS version: %s", jellypy.CONFIG.PMS_VERSION)
if download_info['update_available']:
logger.info("Tautulli Monitor :: PMS update available version: %s", download_info['version'])
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_pmsupdate', 'pms_download_info': download_info})
else:
logger.info("Tautulli Monitor :: No PMS update available.")

View File

@@ -0,0 +1,664 @@
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import str
from future.builtins import object
from collections import defaultdict
import json
import jellypy
if jellypy.PYTHON2:
import database
import helpers
import libraries
import logger
import pmsconnect
import users
else:
from jellypy import database
from jellypy import helpers
from jellypy import libraries
from jellypy import logger
from jellypy import pmsconnect
from jellypy import users
class ActivityProcessor(object):
def __init__(self):
self.db = database.MonitorDatabase()
def write_session(self, session=None, notify=True):
if session:
values = {'session_key': session.get('session_key', ''),
'session_id': session.get('session_id', ''),
'transcode_key': session.get('transcode_key', ''),
'section_id': session.get('section_id', ''),
'rating_key': session.get('rating_key', ''),
'media_type': session.get('media_type', ''),
'state': session.get('state', ''),
'user_id': session.get('user_id', ''),
'user': session.get('user', ''),
'machine_id': session.get('machine_id', ''),
'title': session.get('title', ''),
'parent_title': session.get('parent_title', ''),
'grandparent_title': session.get('grandparent_title', ''),
'original_title': session.get('original_title', ''),
'full_title': session.get('full_title', ''),
'media_index': session.get('media_index', ''),
'parent_media_index': session.get('parent_media_index', ''),
'thumb': session.get('thumb', ''),
'parent_thumb': session.get('parent_thumb', ''),
'grandparent_thumb': session.get('grandparent_thumb', ''),
'year': session.get('year', ''),
'friendly_name': session.get('friendly_name', ''),
'ip_address': session.get('ip_address', ''),
'bandwidth': session.get('bandwidth', 0),
'location': session.get('location', ''),
'player': session.get('player', ''),
'product': session.get('product', ''),
'platform': session.get('platform', ''),
'parent_rating_key': session.get('parent_rating_key', ''),
'grandparent_rating_key': session.get('grandparent_rating_key', ''),
'originally_available_at': session.get('originally_available_at', ''),
'added_at': session.get('added_at', ''),
'guid': session.get('guid', ''),
'view_offset': session.get('view_offset', ''),
'duration': session.get('duration', ''),
'video_decision': session.get('video_decision', ''),
'audio_decision': session.get('audio_decision', ''),
'transcode_decision': session.get('transcode_decision', ''),
'width': session.get('width', ''),
'height': session.get('height', ''),
'container': session.get('container', ''),
'bitrate': session.get('bitrate', ''),
'video_codec': session.get('video_codec', ''),
'video_bitrate': session.get('video_bitrate', ''),
'video_width': session.get('video_width', ''),
'video_height': session.get('video_height', ''),
'video_resolution': session.get('video_resolution', ''),
'video_framerate': session.get('video_framerate', ''),
'video_scan_type': session.get('video_scan_type', ''),
'video_full_resolution': session.get('video_full_resolution', ''),
'video_dynamic_range': session.get('video_dynamic_range', ''),
'aspect_ratio': session.get('aspect_ratio', ''),
'audio_codec': session.get('audio_codec', ''),
'audio_bitrate': session.get('audio_bitrate', ''),
'audio_channels': session.get('audio_channels', ''),
'subtitle_codec': session.get('subtitle_codec', ''),
'transcode_protocol': session.get('transcode_protocol', ''),
'transcode_container': session.get('transcode_container', ''),
'transcode_video_codec': session.get('transcode_video_codec', ''),
'transcode_audio_codec': session.get('transcode_audio_codec', ''),
'transcode_audio_channels': session.get('transcode_audio_channels', ''),
'transcode_width': session.get('stream_video_width', ''),
'transcode_height': session.get('stream_video_height', ''),
'transcode_hw_decoding': session.get('transcode_hw_decoding', ''),
'transcode_hw_encoding': session.get('transcode_hw_encoding', ''),
'synced_version': session.get('synced_version', ''),
'synced_version_profile': session.get('synced_version_profile', ''),
'optimized_version': session.get('optimized_version', ''),
'optimized_version_profile': session.get('optimized_version_profile', ''),
'optimized_version_title': session.get('optimized_version_title', ''),
'stream_bitrate': session.get('stream_bitrate', ''),
'stream_video_resolution': session.get('stream_video_resolution', ''),
'quality_profile': session.get('quality_profile', ''),
'stream_container_decision': session.get('stream_container_decision', ''),
'stream_container': session.get('stream_container', ''),
'stream_video_decision': session.get('stream_video_decision', ''),
'stream_video_codec': session.get('stream_video_codec', ''),
'stream_video_bitrate': session.get('stream_video_bitrate', ''),
'stream_video_width': session.get('stream_video_width', ''),
'stream_video_height': session.get('stream_video_height', ''),
'stream_video_framerate': session.get('stream_video_framerate', ''),
'stream_video_scan_type': session.get('stream_video_scan_type', ''),
'stream_video_full_resolution': session.get('stream_video_full_resolution', ''),
'stream_video_dynamic_range': session.get('stream_video_dynamic_range', ''),
'stream_audio_decision': session.get('stream_audio_decision', ''),
'stream_audio_codec': session.get('stream_audio_codec', ''),
'stream_audio_bitrate': session.get('stream_audio_bitrate', ''),
'stream_audio_channels': session.get('stream_audio_channels', ''),
'stream_subtitle_decision': session.get('stream_subtitle_decision', ''),
'stream_subtitle_codec': session.get('stream_subtitle_codec', ''),
'subtitles': session.get('subtitles', 0),
'live': session.get('live', 0),
'live_uuid': session.get('live_uuid', ''),
'secure': session.get('secure', None),
'relayed': session.get('relayed', 0),
'rating_key_websocket': session.get('rating_key_websocket', ''),
'raw_stream_info': json.dumps(session),
'channel_call_sign': session.get('channel_call_sign', ''),
'channel_identifier': session.get('channel_identifier', ''),
'channel_thumb': session.get('channel_thumb', ''),
'stopped': helpers.timestamp()
}
keys = {'session_key': session.get('session_key', ''),
'rating_key': session.get('rating_key', '')}
result = self.db.upsert('sessions', values, keys)
if result == 'insert':
# If it's our first write then time stamp it.
started = helpers.timestamp()
initial_stream = self.is_initial_stream(user_id=values['user_id'],
machine_id=values['machine_id'],
media_type=values['media_type'],
started=started)
timestamp = {'started': started, 'initial_stream': initial_stream}
self.db.upsert('sessions', timestamp, keys)
# Check if any notification agents have notifications enabled
if notify:
session.update(timestamp)
jellypy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
# Add Live TV library if it hasn't been added
if values['live']:
libraries.add_live_tv_library()
return True
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0):
section_id = session['section_id'] if not is_import else import_metadata['section_id']
if not is_import:
user_data = users.Users()
user_details = user_data.get_details(user_id=session['user_id'])
library_data = libraries.Libraries()
library_details = library_data.get_details(section_id=section_id)
# Return false if failed to retrieve user or library details
if not user_details or not library_details:
return False
if session:
logging_enabled = False
# Reload json from raw stream info
if session.get('raw_stream_info'):
raw_stream_info = json.loads(session['raw_stream_info'])
# Don't overwrite id, session_key, stopped, view_offset
raw_stream_info.pop('id', None)
raw_stream_info.pop('session_key', None)
raw_stream_info.pop('stopped', None)
raw_stream_info.pop('view_offset', None)
session.update(raw_stream_info)
session = defaultdict(str, session)
if is_import:
if str(session['stopped']).isdigit():
stopped = int(session['stopped'])
else:
stopped = helpers.timestamp()
elif session['stopped']:
stopped = int(session['stopped'])
else:
stopped = helpers.timestamp()
self.set_session_state(session_key=session['session_key'],
state='stopped',
stopped=stopped)
if not is_import:
self.write_continued_session(user_id=session['user_id'],
machine_id=session['machine_id'],
media_type=session['media_type'],
stopped=stopped)
if str(session['rating_key']).isdigit() and session['media_type'] in ('movie', 'episode', 'track'):
logging_enabled = True
else:
logger.debug("Tautulli ActivityProcessor :: Session %s ratingKey %s not logged. "
"Does not meet logging criteria. Media type is '%s'" %
(session['session_key'], session['rating_key'], session['media_type']))
return session['id']
real_play_time = stopped - helpers.cast_to_int(session['started']) - helpers.cast_to_int(session['paused_counter'])
if not is_import and jellypy.CONFIG.LOGGING_IGNORE_INTERVAL:
if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
(real_play_time < int(jellypy.CONFIG.LOGGING_IGNORE_INTERVAL)):
logging_enabled = False
logger.debug("Tautulli ActivityProcessor :: Play duration for session %s ratingKey %s is %s secs "
"which is less than %s seconds, so we're not logging it." %
(session['session_key'], session['rating_key'], str(real_play_time),
jellypy.CONFIG.LOGGING_IGNORE_INTERVAL))
if not is_import and session['media_type'] == 'track':
if real_play_time < 15 and helpers.cast_to_int(session['duration']) >= 30:
logging_enabled = False
logger.debug("Tautulli ActivityProcessor :: Play duration for session %s ratingKey %s is %s secs, "
"looks like it was skipped so we're not logging it" %
(session['session_key'], session['rating_key'], str(real_play_time)))
elif is_import and import_ignore_interval:
if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
(real_play_time < int(import_ignore_interval)):
logging_enabled = False
logger.debug("Tautulli ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s "
"seconds, so we're not logging it." %
(session['rating_key'], str(real_play_time), import_ignore_interval))
if not is_import and not user_details['keep_history']:
logging_enabled = False
logger.debug("Tautulli ActivityProcessor :: History logging for user '%s' is disabled." % user_details['username'])
elif not is_import and not library_details['keep_history']:
logging_enabled = False
logger.debug("Tautulli ActivityProcessor :: History logging for library '%s' is disabled." % library_details['section_name'])
if logging_enabled:
# Fetch metadata first so we can return false if it fails
if not is_import:
logger.debug("Tautulli ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key'])
pms_connect = pmsconnect.PmsConnect()
if session['live']:
metadata = pms_connect.get_metadata_details(rating_key=str(session['rating_key']),
cache_key=session['session_key'],
return_cache=True)
else:
metadata = pms_connect.get_metadata_details(rating_key=str(session['rating_key']))
if not metadata:
return False
else:
media_info = {}
if 'media_info' in metadata and len(metadata['media_info']) > 0:
media_info = metadata['media_info'][0]
else:
metadata = import_metadata
## TODO: Fix media info from imports. Temporary media info from import session.
media_info = session
# logger.debug("Tautulli ActivityProcessor :: Attempting to write sessionKey %s to session_history table..."
# % session['session_key'])
keys = {'id': None}
values = {'started': session['started'],
'stopped': stopped,
'rating_key': session['rating_key'],
'parent_rating_key': session['parent_rating_key'],
'grandparent_rating_key': session['grandparent_rating_key'],
'media_type': session['media_type'],
'user_id': session['user_id'],
'user': session['user'],
'ip_address': session['ip_address'],
'paused_counter': session['paused_counter'],
'player': session['player'],
'product': session['product'],
'product_version': session['product_version'],
'platform': session['platform'],
'platform_version': session['platform_version'],
'profile': session['profile'],
'machine_id': session['machine_id'],
'bandwidth': session['bandwidth'],
'location': session['location'],
'quality_profile': session['quality_profile'],
'view_offset': session['view_offset'],
'secure': session['secure'],
'relayed': session['relayed']
}
# logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history transaction..."
# % session['session_key'])
self.db.upsert(table_name='session_history', key_dict=keys, value_dict=values)
# Get the last insert row id
last_id = self.db.last_insert_id()
new_session = prev_session = None
prev_progress_percent = media_watched_percent = 0
if session['live']:
# Check if we should group the session, select the last guid from the user
query = 'SELECT session_history.id, session_history_metadata.guid, session_history.reference_id ' \
'FROM session_history ' \
'JOIN session_history_metadata ON session_history.id == session_history_metadata.id ' \
'WHERE session_history.user_id = ? ORDER BY session_history.id DESC LIMIT 1 '
args = [session['user_id']]
result = self.db.select(query=query, args=args)
if len(result) > 0:
new_session = {'id': last_id,
'guid': metadata['guid'],
'reference_id': last_id}
prev_session = {'id': result[0]['id'],
'guid': result[0]['guid'],
'reference_id': result[0]['reference_id']}
else:
# Check if we should group the session, select the last two rows from the user
query = 'SELECT id, rating_key, view_offset, reference_id FROM session_history ' \
'WHERE user_id = ? AND rating_key = ? ORDER BY id DESC LIMIT 2 '
args = [session['user_id'], session['rating_key']]
result = self.db.select(query=query, args=args)
if len(result) > 1:
new_session = {'id': result[0]['id'],
'rating_key': result[0]['rating_key'],
'view_offset': result[0]['view_offset'],
'reference_id': result[0]['reference_id']}
prev_session = {'id': result[1]['id'],
'rating_key': result[1]['rating_key'],
'view_offset': result[1]['view_offset'],
'reference_id': result[1]['reference_id']}
watched_percent = {'movie': jellypy.CONFIG.MOVIE_WATCHED_PERCENT,
'episode': jellypy.CONFIG.TV_WATCHED_PERCENT,
'track': jellypy.CONFIG.MUSIC_WATCHED_PERCENT
}
prev_progress_percent = helpers.get_percent(prev_session['view_offset'], session['duration'])
media_watched_percent = watched_percent.get(session['media_type'], 0)
query = 'UPDATE session_history SET reference_id = ? WHERE id = ? '
# If previous session view offset less than watched percent,
# and new session view offset is greater,
# then set the reference_id to the previous row,
# else set the reference_id to the new id
if prev_session is None and new_session is None:
args = [last_id, last_id]
elif prev_progress_percent < media_watched_percent and \
prev_session['view_offset'] <= new_session['view_offset'] or \
session['live'] and prev_session['guid'] == new_session['guid']:
args = [prev_session['reference_id'], new_session['id']]
else:
args = [new_session['id'], new_session['id']]
self.db.action(query=query, args=args)
# logger.debug("Tautulli ActivityProcessor :: Successfully written history item, last id for session_history is %s"
# % last_id)
# Write the session_history_media_info table
# logger.debug("Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_media_info table..."
# % session['session_key'])
keys = {'id': last_id}
values = {'rating_key': session['rating_key'],
'video_decision': session['video_decision'],
'audio_decision': session['audio_decision'],
'transcode_decision': session['transcode_decision'],
'duration': session['duration'],
'container': session['container'],
'bitrate': session['bitrate'],
'width': session['width'],
'height': session['height'],
'video_bit_depth': session['video_bit_depth'],
'video_bitrate': session['video_bitrate'],
'video_codec': session['video_codec'],
'video_codec_level': session['video_codec_level'],
'video_width': session['video_width'],
'video_height': session['video_height'],
'video_resolution': session['video_resolution'],
'video_framerate': session['video_framerate'],
'video_scan_type': session['video_scan_type'],
'video_full_resolution': session['video_full_resolution'],
'video_dynamic_range': session['video_dynamic_range'],
'aspect_ratio': session['aspect_ratio'],
'audio_codec': session['audio_codec'],
'audio_bitrate': session['audio_bitrate'],
'audio_channels': session['audio_channels'],
'subtitle_codec': session['subtitle_codec'],
'transcode_protocol': session['transcode_protocol'],
'transcode_container': session['transcode_container'],
'transcode_video_codec': session['transcode_video_codec'],
'transcode_audio_codec': session['transcode_audio_codec'],
'transcode_audio_channels': session['transcode_audio_channels'],
'transcode_width': session['transcode_width'],
'transcode_height': session['transcode_height'],
'transcode_hw_requested': session['transcode_hw_requested'],
'transcode_hw_full_pipeline': session['transcode_hw_full_pipeline'],
'transcode_hw_decoding': session['transcode_hw_decoding'],
'transcode_hw_decode': session['transcode_hw_decode'],
'transcode_hw_decode_title': session['transcode_hw_decode_title'],
'transcode_hw_encoding': session['transcode_hw_encoding'],
'transcode_hw_encode': session['transcode_hw_encode'],
'transcode_hw_encode_title': session['transcode_hw_encode_title'],
'stream_container': session['stream_container'],
'stream_container_decision': session['stream_container_decision'],
'stream_bitrate': session['stream_bitrate'],
'stream_video_decision': session['stream_video_decision'],
'stream_video_bitrate': session['stream_video_bitrate'],
'stream_video_codec': session['stream_video_codec'],
'stream_video_codec_level': session['stream_video_codec_level'],
'stream_video_bit_depth': session['stream_video_bit_depth'],
'stream_video_height': session['stream_video_height'],
'stream_video_width': session['stream_video_width'],
'stream_video_resolution': session['stream_video_resolution'],
'stream_video_framerate': session['stream_video_framerate'],
'stream_video_scan_type': session['stream_video_scan_type'],
'stream_video_full_resolution': session['stream_video_full_resolution'],
'stream_video_dynamic_range': session['stream_video_dynamic_range'],
'stream_audio_decision': session['stream_audio_decision'],
'stream_audio_codec': session['stream_audio_codec'],
'stream_audio_bitrate': session['stream_audio_bitrate'],
'stream_audio_channels': session['stream_audio_channels'],
'stream_subtitle_decision': session['stream_subtitle_decision'],
'stream_subtitle_codec': session['stream_subtitle_codec'],
'stream_subtitle_container': session['stream_subtitle_container'],
'stream_subtitle_forced': session['stream_subtitle_forced'],
'subtitles': session['subtitles'],
'synced_version': session['synced_version'],
'synced_version_profile': session['synced_version_profile'],
'optimized_version': session['optimized_version'],
'optimized_version_profile': session['optimized_version_profile'],
'optimized_version_title': session['optimized_version_title']
}
# logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_media_info transaction..."
# % session['session_key'])
self.db.upsert(table_name='session_history_media_info', key_dict=keys, value_dict=values)
# Write the session_history_metadata table
directors = ";".join(metadata['directors'])
writers = ";".join(metadata['writers'])
actors = ";".join(metadata['actors'])
genres = ";".join(metadata['genres'])
labels = ";".join(metadata['labels'])
# logger.debug("Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_metadata table..."
# % session['session_key'])
keys = {'id': last_id}
values = {'rating_key': session['rating_key'],
'parent_rating_key': session['parent_rating_key'],
'grandparent_rating_key': session['grandparent_rating_key'],
'title': session['title'],
'parent_title': session['parent_title'],
'grandparent_title': session['grandparent_title'],
'original_title': session['original_title'],
'full_title': session['full_title'],
'media_index': metadata['media_index'],
'parent_media_index': metadata['parent_media_index'],
'section_id': metadata['section_id'],
'thumb': metadata['thumb'],
'parent_thumb': metadata['parent_thumb'],
'grandparent_thumb': metadata['grandparent_thumb'],
'art': metadata['art'],
'media_type': session['media_type'],
'year': metadata['year'],
'originally_available_at': metadata['originally_available_at'],
'added_at': metadata['added_at'],
'updated_at': metadata['updated_at'],
'last_viewed_at': metadata['last_viewed_at'],
'content_rating': metadata['content_rating'],
'summary': metadata['summary'],
'tagline': metadata['tagline'],
'rating': metadata['rating'],
'duration': metadata['duration'],
'guid': metadata['guid'],
'directors': directors,
'writers': writers,
'actors': actors,
'genres': genres,
'studio': metadata['studio'],
'labels': labels,
'live': session['live'],
'channel_call_sign': media_info.get('channel_call_sign', ''),
'channel_identifier': media_info.get('channel_identifier', ''),
'channel_thumb': media_info.get('channel_thumb', '')
}
# logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_metadata transaction..."
# % session['session_key'])
self.db.upsert(table_name='session_history_metadata', key_dict=keys, value_dict=values)
# Return the session row id when the session is successfully written to the database
return session['id']
def get_sessions(self, user_id=None, ip_address=None):
query = 'SELECT * FROM sessions'
args = []
if str(user_id).isdigit():
ip = ' GROUP BY ip_address' if ip_address else ''
query += ' WHERE user_id = ?' + ip
args.append(user_id)
sessions = self.db.select(query, args)
return sessions
def get_session_by_key(self, session_key=None):
if str(session_key).isdigit():
session = self.db.select_single('SELECT * FROM sessions '
'WHERE session_key = ? ',
args=[session_key])
if session:
return session
return None
def get_session_by_id(self, session_id=None):
if session_id:
session = self.db.select_single('SELECT * FROM sessions '
'WHERE session_id = ? ',
args=[session_id])
if session:
return session
return None
def set_session_state(self, session_key=None, state=None, **kwargs):
if str(session_key).isdigit():
values = {}
if state:
values['state'] = state
for k, v in kwargs.items():
values[k] = v
keys = {'session_key': session_key}
result = self.db.upsert('sessions', values, keys)
return result
return None
def delete_session(self, session_key=None, row_id=None):
if str(session_key).isdigit():
self.db.action('DELETE FROM sessions WHERE session_key = ?', [session_key])
elif str(row_id).isdigit():
self.db.action('DELETE FROM sessions WHERE id = ?', [row_id])
def set_session_last_paused(self, session_key=None, timestamp=None):
if str(session_key).isdigit():
result = self.db.select('SELECT last_paused, paused_counter '
'FROM sessions '
'WHERE session_key = ?', args=[session_key])
paused_counter = None
for session in result:
if session['last_paused']:
paused_offset = helpers.timestamp() - int(session['last_paused'])
if session['paused_counter']:
paused_counter = int(session['paused_counter']) + int(paused_offset)
else:
paused_counter = int(paused_offset)
values = {'last_paused': timestamp}
if paused_counter:
values['paused_counter'] = paused_counter
keys = {'session_key': session_key}
self.db.upsert('sessions', values, keys)
def increment_session_buffer_count(self, session_key=None):
if str(session_key).isdigit():
self.db.action('UPDATE sessions SET buffer_count = buffer_count + 1 '
'WHERE session_key = ?',
[session_key])
def get_session_buffer_count(self, session_key=None):
if str(session_key).isdigit():
buffer_count = self.db.select_single('SELECT buffer_count '
'FROM sessions '
'WHERE session_key = ?',
[session_key])
if buffer_count:
return buffer_count['buffer_count']
return 0
def set_session_buffer_trigger_time(self, session_key=None):
if str(session_key).isdigit():
self.db.action('UPDATE sessions SET buffer_last_triggered = strftime("%s","now") '
'WHERE session_key = ?',
[session_key])
def get_session_buffer_trigger_time(self, session_key=None):
if str(session_key).isdigit():
last_time = self.db.select_single('SELECT buffer_last_triggered '
'FROM sessions '
'WHERE session_key = ?',
[session_key])
if last_time:
return last_time['buffer_last_triggered']
return None
def set_temp_stopped(self):
stopped_time = helpers.timestamp()
self.db.action('UPDATE sessions SET stopped = ?', [stopped_time])
def increment_write_attempts(self, session_key=None):
if str(session_key).isdigit():
session = self.get_session_by_key(session_key=session_key)
self.db.action('UPDATE sessions SET write_attempts = ? WHERE session_key = ?',
[session['write_attempts'] + 1, session_key])
def set_watched(self, session_key=None):
self.db.action('UPDATE sessions SET watched = ?'
'WHERE session_key = ?',
[1, session_key])
def write_continued_session(self, user_id=None, machine_id=None, media_type=None, stopped=None):
keys = {'user_id': user_id, 'machine_id': machine_id, 'media_type': media_type}
values = {'stopped': stopped}
self.db.upsert(table_name='sessions_continued', key_dict=keys, value_dict=values)
def is_initial_stream(self, user_id=None, machine_id=None, media_type=None, started=None):
last_session = self.db.select_single('SELECT stopped '
'FROM sessions_continued '
'WHERE user_id = ? AND machine_id = ? AND media_type = ? '
'ORDER BY stopped DESC',
[user_id, machine_id, media_type])
return int(started - last_session.get('stopped', 0) >= jellypy.CONFIG.NOTIFY_CONTINUED_SESSION_THRESHOLD)

835
jellypy/api2.py Normal file
View File

@@ -0,0 +1,835 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import str
from future.builtins import object
from hashing_passwords import check_hash
from io import open
import hashlib
import inspect
import json
import os
import random
import re
import time
import traceback
import cherrypy
import xmltodict
import jellypy
if jellypy.PYTHON2:
import common
import config
import database
import helpers
import libraries
import logger
import mobile_app
import notification_handler
import notifiers
import newsletter_handler
import newsletters
import plextv
import users
else:
from jellypy import common
from jellypy import config
from jellypy import database
from jellypy import helpers
from jellypy import libraries
from jellypy import logger
from jellypy import mobile_app
from jellypy import notification_handler
from jellypy import notifiers
from jellypy import newsletter_handler
from jellypy import newsletters
from jellypy import plextv
from jellypy import users
class API2(object):
def __init__(self, **kwargs):
self._api_valid_methods = self._api_docs().keys()
self._api_authenticated = False
self._api_out_type = 'json' # default
self._api_msg = None
self._api_debug = None
self._api_cmd = None
self._api_apikey = None
self._api_callback = None # JSONP
self._api_result_type = 'error'
self._api_response_code = None
self._api_profileme = None # For profiling the api call
self._api_kwargs = None # Cleaned kwargs
self._api_app = False
def _api_docs(self, md=False):
""" Makes the api docs. """
docs = {}
for f, _ in inspect.getmembers(self, predicate=inspect.ismethod):
if not f.startswith('_') and not f.startswith('_api'):
if md is True:
docs[f] = inspect.getdoc(getattr(self, f)) if inspect.getdoc(getattr(self, f)) else None
else:
docs[f] = ' '.join(inspect.getdoc(getattr(self, f)).split()) if inspect.getdoc(getattr(self, f)) else None
return docs
def docs_md(self):
""" Return the api docs formatted with markdown."""
return self._api_make_md()
def docs(self):
""" Return the api docs as a dict where commands are keys, docstring are value."""
return self._api_docs()
def _api_validate(self, *args, **kwargs):
""" Sets class vars and remove unneeded parameters. """
if not jellypy.CONFIG.API_ENABLED:
self._api_msg = 'API not enabled'
self._api_response_code = 404
elif not jellypy.CONFIG.API_KEY:
self._api_msg = 'API key not generated'
self._api_response_code = 401
elif len(jellypy.CONFIG.API_KEY) != 32:
self._api_msg = 'API key not generated correctly'
self._api_response_code = 401
elif 'apikey' not in kwargs:
self._api_msg = 'Parameter apikey is required'
self._api_response_code = 401
elif 'cmd' not in kwargs:
self._api_msg = 'Parameter cmd is required. Possible commands are: %s' % ', '.join(self._api_valid_methods)
self._api_response_code = 400
elif 'cmd' in kwargs and kwargs.get('cmd') not in self._api_valid_methods:
self._api_msg = 'Unknown command: %s. Possible commands are: %s' % (kwargs.get('cmd', ''), ', '.join(sorted(self._api_valid_methods)))
self._api_response_code = 400
self._api_callback = kwargs.pop('callback', None)
self._api_apikey = kwargs.pop('apikey', None)
self._api_cmd = kwargs.pop('cmd', None)
self._api_debug = kwargs.pop('debug', False)
self._api_profileme = kwargs.pop('profileme', None)
# Allow override for the api.
self._api_out_type = kwargs.pop('out_type', 'json')
if 'app' in kwargs and helpers.bool_true(kwargs.pop('app')):
self._api_app = True
if jellypy.CONFIG.API_ENABLED and not self._api_msg or self._api_cmd in ('get_apikey', 'docs', 'docs_md'):
if not self._api_app and self._api_apikey == jellypy.CONFIG.API_KEY:
self._api_authenticated = True
elif self._api_app and self._api_apikey == mobile_app.get_temp_device_token() and \
self._api_cmd == 'register_device':
self._api_authenticated = True
elif self._api_app and mobile_app.get_mobile_device_by_token(self._api_apikey):
mobile_app.set_last_seen(self._api_apikey)
self._api_authenticated = True
else:
self._api_msg = 'Invalid apikey'
self._api_response_code = 401
if self._api_authenticated and self._api_cmd in self._api_valid_methods:
self._api_msg = None
self._api_kwargs = kwargs
elif not self._api_authenticated and self._api_cmd in ('get_apikey', 'docs', 'docs_md'):
self._api_authenticated = True
# Remove the old error msg
self._api_msg = None
self._api_kwargs = kwargs
if self._api_msg:
logger.api_debug('Tautulli APIv2 :: %s.' % self._api_msg)
logger.api_debug('Tautulli APIv2 :: Cleaned kwargs: %s' % self._api_kwargs)
return self._api_kwargs
def get_logs(self, sort='', search='', order='desc', regex='', start=0, end=0, **kwargs):
"""
Get the Tautulli logs.
```
Required parameters:
None
Optional parameters:
sort (str): "time", "thread", "msg", "loglevel"
search (str): A string to search for
order (str): "desc" or "asc"
regex (str): A regex string to search for
start (int): Row number to start from
end (int): Row number to end at
Returns:
json:
[{"loglevel": "DEBUG",
"msg": "Latest version is 2d10b0748c7fa2ee4cf59960c3d3fffc6aa9512b",
"thread": "MainThread",
"time": "2016-05-08 09:36:51 "
},
{...},
{...}
]
```
"""
logfile = os.path.join(jellypy.CONFIG.LOG_DIR, logger.FILENAME)
templog = []
start = int(start)
end = int(end)
if regex:
logger.api_debug("Tautulli APIv2 :: Filtering log using regex '%s'" % regex)
reg = re.compile(regex, flags=re.I)
with open(logfile, 'r', encoding='utf-8') as f:
for line in f.readlines():
temp_loglevel_and_time = None
try:
temp_loglevel_and_time = line.split('- ')
loglvl = temp_loglevel_and_time[1].split(' :')[0].strip()
tl_tread = line.split(' :: ')
if loglvl is None:
msg = line.replace('\n', '')
else:
msg = line.split(' : ')[1].replace('\n', '')
thread = tl_tread[1].split(' : ')[0]
except IndexError:
# We assume this is a traceback
tl = (len(templog) - 1)
templog[tl]['msg'] += helpers.sanitize(line.replace('\n', ''))
continue
if len(line) > 1 and temp_loglevel_and_time is not None and loglvl in line:
d = {
'time': temp_loglevel_and_time[0],
'loglevel': loglvl,
'msg': helpers.sanitize(msg.replace('\n', '')),
'thread': thread
}
templog.append(d)
if order == 'desc':
templog = templog[::-1]
if end > 0 or start > 0:
logger.api_debug("Tautulli APIv2 :: Slicing the log from %s to %s" % (start, end))
templog = templog[start:end]
if sort:
logger.api_debug("Tautulli APIv2 :: Sorting log based on '%s'" % sort)
templog = sorted(templog, key=lambda k: k[sort])
if search:
logger.api_debug("Tautulli APIv2 :: Searching log values for '%s'" % search)
tt = [d for d in templog for k, v in d.items() if search.lower() in v.lower()]
if len(tt):
templog = tt
if regex:
tt = []
for l in templog:
stringdict = ' '.join('{}{}'.format(k, v) for k, v in l.items())
if reg.search(stringdict):
tt.append(l)
if len(tt):
templog = tt
return templog
def get_settings(self, key=''):
""" Gets all settings from the config file.
```
Required parameters:
None
Optional parameters:
key (str): Name of a config section to return
Returns:
json:
{"General": {"api_enabled": true, ...}
"Advanced": {"cache_sizemb": "32", ...},
...
}
```
"""
interface_dir = os.path.join(jellypy.PROG_DIR, 'data/interfaces/')
interface_list = [name for name in os.listdir(interface_dir) if
os.path.isdir(os.path.join(interface_dir, name))]
conf = jellypy.CONFIG._config
config = {}
# Truthify the dict
for k, v in conf.items():
if isinstance(v, dict):
d = {}
for kk, vv in v.items():
if vv == '0' or vv == '1':
d[kk] = bool(vv)
else:
d[kk] = vv
config[k] = d
if k == 'General':
config[k]['interface'] = interface_dir
config[k]['interface_list'] = interface_list
if key:
return config.get(key)
return config
def sql(self, query=''):
""" Query the Tautulli database with raw SQL. Automatically makes a backup of
the database if the latest backup is older then 24h. `api_sql` must be
manually enabled in the config file while Tautulli is shut down.
```
Required parameters:
query (str): The SQL query
Optional parameters:
None
Returns:
None
```
"""
if not jellypy.CONFIG.API_SQL:
self._api_msg = 'SQL not enabled for the API.'
return
if not query:
self._api_msg = 'No SQL query provided.'
return
# allow the user to shoot them self
# in the foot but not in the head..
if not len(os.listdir(jellypy.CONFIG.BACKUP_DIR)):
self.backup_db()
else:
# If the backup is less then 24 h old lets make a backup
if not any(os.path.getctime(os.path.join(jellypy.CONFIG.BACKUP_DIR, file_)) > (time.time() - 86400)
and file_.endswith('.db') for file_ in os.listdir(jellypy.CONFIG.BACKUP_DIR)):
self.backup_db()
db = database.MonitorDatabase()
rows = db.select(query)
return rows
def backup_config(self):
""" Create a manual backup of the `config.ini` file."""
data = config.make_backup()
self._api_result_type = 'success' if data else 'error'
return data
def backup_db(self):
""" Create a manual backup of the `jellypy.db` file."""
data = database.make_backup()
self._api_result_type = 'success' if data else 'error'
return data
def restart(self, **kwargs):
""" Restart Tautulli."""
jellypy.SIGNAL = 'restart'
self._api_msg = 'Restarting Tautulli'
self._api_result_type = 'success'
def update(self, **kwargs):
""" Update Tautulli."""
jellypy.SIGNAL = 'update'
self._api_msg = 'Updating Tautulli'
self._api_result_type = 'success'
def refresh_libraries_list(self, **kwargs):
""" Refresh the Tautulli libraries list."""
data = libraries.refresh_libraries()
self._api_result_type = 'success' if data else 'error'
return data
def refresh_users_list(self, **kwargs):
""" Refresh the Tautulli users list."""
data = users.refresh_users()
self._api_result_type = 'success' if data else 'error'
return data
def register_device(self, device_id='', device_name='', friendly_name='', onesignal_id=None,
min_version='', **kwargs):
""" Registers the Tautulli Android App for notifications.
```
Required parameters:
device_id (str): The unique device identifier for the mobile device
device_name (str): The device name of the mobile device
Optional parameters:
friendly_name (str): A friendly name to identify the mobile device
onesignal_id (str): The OneSignal id for the mobile device
min_version (str): The minimum Tautulli version supported by the mobile device, e.g. v2.5.6
Returns:
json:
{"pms_identifier": "08u2phnlkdshf890bhdlksghnljsahgleikjfg9t",
"pms_ip": "10.10.10.1",
"pms_is_remote": 0,
"pms_name": "Winterfell-Server",
"pms_platform": "Windows",
"pms_plexpass": 1,
"pms_port": 32400,
"pms_ssl": 0,
"pms_url": "http://10.10.10.1:32400",
"pms_url_manual": 0,
"pms_version": "1.20.0.3133-fede5bdc7"
"server_id": "2ce060c87958445d8399a7a0c5663755",
"tautulli_install_type": "git",
"tautulli_branch": "master",
"tautulli_commit": "14b98a32e085d969f010f0249c3d2f660db50880",
"tautulli_platform": "Windows",
"tautulli_platform_device_name": "Winterfell-PC",
"tautulli_platform_linux_distro": "",
"tautulli_platform_release": "10",
"tautulli_platform_version": "10.0.18362",
"tautulli_python_version": "3.8.3"
"tautulli_version": "v2.5.6",
}
```
"""
if not device_id:
self._api_msg = 'Device registration failed: no device id provided.'
self._api_result_type = 'error'
return
elif not device_name:
self._api_msg = 'Device registration failed: no device name provided.'
self._api_result_type = 'error'
return
elif min_version and helpers.version_to_tuple(min_version) > helpers.version_to_tuple(common.RELEASE):
self._api_msg = 'Device registration failed: Tautulli version {} ' \
'does not meet the minimum requirement of {}.'.format(common.RELEASE, min_version)
self._api_result_type = 'error'
return
## TODO: Temporary for backwards compatibility, assume device_id is onesignal_id
if device_id and onesignal_id is None:
onesignal_id = device_id
result = mobile_app.add_mobile_device(device_id=device_id,
device_name=device_name,
device_token=self._api_apikey,
friendly_name=friendly_name,
onesignal_id=onesignal_id)
if result:
self._api_msg = 'Device registration successful.'
self._api_result_type = 'success'
mobile_app.set_temp_device_token(True)
plex_server = plextv.get_server_resources(return_info=True)
tautulli = jellypy.get_tautulli_info()
data = {"server_id": jellypy.CONFIG.PMS_UUID}
data.update(plex_server)
data.update(tautulli)
return data
else:
self._api_msg = 'Device registration failed: database error.'
self._api_result_type = 'error'
return
def notify(self, notifier_id='', subject='', body='', **kwargs):
""" Send a notification using Tautulli.
```
Required parameters:
notifier_id (int): The ID number of the notification agent
subject (str): The subject of the message
body (str): The body of the message
Optional parameters:
headers (str): The JSON headers for webhook notifications
script_args (str): The arguments for script notifications
Returns:
None
```
"""
if not notifier_id:
self._api_msg = 'Notification failed: no notifier id provided.'
self._api_result_type = 'error'
return
notifier = notifiers.get_notifier_config(notifier_id=notifier_id)
if not notifier:
self._api_msg = 'Notification failed: invalid notifier_id provided %s.' % notifier_id
self._api_result_type = 'error'
return
logger.api_debug('Tautulli APIv2 :: Sending notification.')
success = notification_handler.notify(notifier_id=notifier_id,
notify_action='api',
subject=subject,
body=body,
**kwargs)
if success:
self._api_msg = 'Notification sent.'
self._api_result_type = 'success'
else:
self._api_msg = 'Notification failed.'
self._api_result_type = 'error'
return
def notify_newsletter(self, newsletter_id='', subject='', body='', message='', **kwargs):
""" Send a newsletter using Tautulli.
```
Required parameters:
newsletter_id (int): The ID number of the newsletter agent
Optional parameters:
subject (str): The subject of the newsletter
body (str): The body of the newsletter
message (str): The message of the newsletter
Returns:
None
```
"""
if not newsletter_id:
self._api_msg = 'Newsletter failed: no newsletter id provided.'
self._api_result_type = 'error'
return
newsletter = newsletters.get_newsletter_config(newsletter_id=newsletter_id)
if not newsletter:
self._api_msg = 'Newsletter failed: invalid newsletter_id provided %s.' % newsletter_id
self._api_result_type = 'error'
return
logger.api_debug('Tautulli APIv2 :: Sending newsletter.')
success = newsletter_handler.notify(newsletter_id=newsletter_id,
notify_action='api',
subject=subject,
body=body,
message=message,
**kwargs)
if success:
self._api_msg = 'Newsletter sent.'
self._api_result_type = 'success'
else:
self._api_msg = 'Newsletter failed.'
self._api_result_type = 'error'
return
def _api_make_md(self):
""" Tries to make a API.md to simplify the api docs. """
head = '''## General structure
The API endpoint is
```
http://IP_ADDRESS:PORT + [/HTTP_ROOT] + /api/v2?apikey=$apikey&cmd=$command
```
Example:
```
http://localhost:8181/api/v2?apikey=66198313a092496b8a725867d2223b5f&cmd=get_metadata&rating_key=153037
```
Response example (default `json`)
```
{
"response": {
"data": [
{
"loglevel": "INFO",
"msg": "Signal 2 caught, saving and exiting...",
"thread": "MainThread",
"time": "22-sep-2015 01:42:56 "
}
],
"message": null,
"result": "success"
}
}
```
```
General optional parameters:
out_type: "json" or "xml"
callback: "pong"
debug: 1
```
## API methods'''
body = ''
doc = self._api_docs(md=True)
for k in sorted(doc):
v = doc.get(k)
body += '### %s\n' % k
body += '' if not v else v + '\n'
body += '\n\n'
result = head + '\n\n' + body
return '<pre>' + result + '</pre>'
def get_apikey(self, username='', password=''):
""" Get the apikey. Username and password are required
if auth is enabled. Makes and saves the apikey if it does not exist.
```
Required parameters:
None
Optional parameters:
username (str): Your Tautulli username
password (str): Your Tautulli password
Returns:
string: "apikey"
```
"""
data = None
apikey = hashlib.sha224(str(random.getrandbits(256)).encode('utf-8')).hexdigest()[0:32]
if jellypy.CONFIG.HTTP_USERNAME and jellypy.CONFIG.HTTP_PASSWORD:
authenticated = False
if jellypy.CONFIG.HTTP_HASHED_PASSWORD and \
username == jellypy.CONFIG.HTTP_USERNAME and check_hash(password, jellypy.CONFIG.HTTP_PASSWORD):
authenticated = True
elif not jellypy.CONFIG.HTTP_HASHED_PASSWORD and \
username == jellypy.CONFIG.HTTP_USERNAME and password == jellypy.CONFIG.HTTP_PASSWORD:
authenticated = True
if authenticated:
if jellypy.CONFIG.API_KEY:
data = jellypy.CONFIG.API_KEY
else:
data = apikey
jellypy.CONFIG.API_KEY = apikey
jellypy.CONFIG.write()
else:
self._api_msg = 'Authentication is enabled, please add the correct username and password to the parameters'
else:
if jellypy.CONFIG.API_KEY:
data = jellypy.CONFIG.API_KEY
else:
# Make a apikey if the doesn't exist
data = apikey
jellypy.CONFIG.API_KEY = apikey
jellypy.CONFIG.write()
return data
def _api_responds(self, result_type='error', data=None, msg=''):
""" Formats the result to a predefined dict so we can change it the to
the desired output by _api_out_as """
if data is None:
data = {}
return {"response": {"result": result_type, "message": msg, "data": data}}
def _api_out_as(self, out):
""" Formats the response to the desired output """
if self._api_cmd == 'docs_md':
return out['response']['data']
elif self._api_cmd and self._api_cmd.startswith('download_'):
return out['response']['data']
elif self._api_cmd == 'pms_image_proxy':
if 'return_hash' not in self._api_kwargs:
cherrypy.response.headers['Content-Type'] = 'image/jpeg'
return out['response']['data']
elif self._api_cmd == 'get_geoip_lookup':
# Remove nested data and put error message inside data for backwards compatibility
out['response']['data'] = out['response']['data'].get('data')
if not out['response']['data']:
out['response']['data'] = {'error': out['response']['message']}
if self._api_out_type == 'json':
cherrypy.response.headers['Content-Type'] = 'application/json;charset=UTF-8'
try:
if self._api_debug:
out = json.dumps(out, indent=4, sort_keys=True, ensure_ascii=False)
else:
out = json.dumps(out, ensure_ascii=False)
if self._api_callback is not None:
cherrypy.response.headers['Content-Type'] = 'application/javascript'
# wrap with JSONP call if requested
out = self._api_callback + '(' + out + ');'
# if we fail to generate the output fake an error
except Exception as e:
logger.api_exception('Tautulli APIv2 :: ' + traceback.format_exc())
self._api_response_code = 500
out['message'] = traceback.format_exc()
out['result'] = 'error'
elif self._api_out_type == 'xml':
cherrypy.response.headers['Content-Type'] = 'application/xml;charset=UTF-8'
try:
out = xmltodict.unparse(out, pretty=True)
except Exception as e:
logger.api_error('Tautulli APIv2 :: Failed to parse xml result')
self._api_response_code = 500
try:
out['message'] = e
out['result'] = 'error'
out = xmltodict.unparse(out, pretty=True)
except Exception as e:
logger.api_error('Tautulli APIv2 :: Failed to parse xml result error message %s' % e)
out = '''<?xml version="1.0" encoding="utf-8"?>
<response>
<message>%s</message>
<data></data>
<result>error</result>
</response>
''' % e
return out.encode('utf-8')
def _api_run(self, *args, **kwargs):
""" handles the stuff from the handler """
# Make sure the device ID is not shown in the logs
if kwargs.get('cmd') == 'register_device':
if kwargs.get('device_id'):
logger._BLACKLIST_WORDS.add(kwargs['device_id'])
if kwargs.get('onesignal_id'):
logger._BLACKLIST_WORDS.add(kwargs['onesignal_id'])
elif kwargs.get('cmd') == 'get_apikey':
if kwargs.get('password'):
logger._BLACKLIST_WORDS.add(kwargs['password'])
result = {}
logger.api_debug('Tautulli APIv2 :: API called with kwargs: %s' % kwargs)
self._api_validate(**kwargs)
if self._api_cmd and self._api_authenticated:
call = getattr(self, self._api_cmd)
# Profile is written to console.
if self._api_profileme:
from profilehooks import profile
call = profile(call, immediate=True)
# We allow this to fail so we get a
# traceback in the browser
try:
result = call(**self._api_kwargs)
except Exception as e:
logger.api_error('Tautulli APIv2 :: Failed to run %s with %s: %s' % (self._api_cmd, self._api_kwargs, e))
self._api_response_code = 500
if self._api_debug:
cherrypy.request.show_tracebacks = True
# Reraise the exception so the traceback hits the browser
raise
self._api_msg = 'Check the logs for errors'
ret = None
# The api decorated function can return different result types.
# convert it to a list/dict before we change it to the users
# wanted output
try:
if isinstance(result, (dict, list)):
ret = result
else:
raise Exception
except Exception:
try:
ret = json.loads(result)
except (ValueError, TypeError):
try:
ret = xmltodict.parse(result, attr_prefix='')
except:
pass
# Fallback if we cant "parse the response"
if ret is None:
ret = result
if (ret is not None or self._api_result_type == 'success') and self._api_authenticated:
# To allow override for restart etc
# if the call returns some data we are gonna assume its a success
self._api_result_type = 'success'
self._api_response_code = 200
# Since some of them methods use a api like response for the ui
# {result: error, message: 'Some shit happened'}
if isinstance(ret, dict):
if ret.get('message'):
self._api_msg = ret.pop('message', None)
if ret.get('result'):
self._api_result_type = ret.pop('result', None)
if self._api_result_type == 'success' and not self._api_response_code:
self._api_response_code = 200
elif self._api_result_type == 'error' and not self._api_response_code:
self._api_response_code = 400
if not self._api_response_code:
self._api_response_code = 500
cherrypy.response.status = self._api_response_code
return self._api_out_as(self._api_responds(result_type=self._api_result_type, msg=self._api_msg, data=ret))

75
jellypy/classes.py Normal file
View File

@@ -0,0 +1,75 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
#########################################
## Stolen from Sick-Beard's classes.py ##
#########################################
from __future__ import unicode_literals
from future.moves.urllib.request import FancyURLopener
import jellypy
if jellypy.PYTHON2:
from common import USER_AGENT
else:
from jellypy.common import USER_AGENT
class PlexPyURLopener(FancyURLopener):
version = USER_AGENT
class AuthURLOpener(PlexPyURLopener):
"""
URLOpener class that supports http auth without needing interactive password entry.
If the provided username/password don't work it simply fails.
user: username to use for HTTP auth
pw: password to use for HTTP auth
"""
def __init__(self, user, pw):
self.username = user
self.password = pw
# remember if we've tried the username/password before
self.numTries = 0
# call the base class
FancyURLopener.__init__(self)
def prompt_user_passwd(self, host, realm):
"""
Override this function and instead of prompting just give the
username/password that were provided when the class was instantiated.
"""
# if this is the first try then provide a username/password
if self.numTries == 0:
self.numTries = 1
return (self.username, self.password)
# if we've tried before then return blank which cancels the request
else:
return ('', '')
# this is pretty much just a hack for convenience
def openit(self, url):
self.numTries = 0
return PlexPyURLopener.open(self, url)

648
jellypy/common.py Normal file
View File

@@ -0,0 +1,648 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import distro
import platform
from collections import OrderedDict
import jellypy
if jellypy.PYTHON2:
import version
else:
from jellypy import version
# Identify Our Application
PRODUCT = 'Tautulli'
PLATFORM = platform.system()
PLATFORM_RELEASE = platform.release()
PLATFORM_VERSION = platform.version()
PLATFORM_LINUX_DISTRO = ' '.join(x for x in distro.linux_distribution() if x)
PLATFORM_DEVICE_NAME = platform.node()
PYTHON_VERSION = platform.python_version()
BRANCH = version.PLEXPY_BRANCH
RELEASE = version.PLEXPY_RELEASE_VERSION
USER_AGENT = '{}/{} ({} {})'.format(PRODUCT, RELEASE, PLATFORM, PLATFORM_RELEASE)
DEFAULT_USER_THUMB = "interfaces/default/images/gravatar-default-80x80.png"
DEFAULT_POSTER_THUMB = "interfaces/default/images/poster.png"
DEFAULT_COVER_THUMB = "interfaces/default/images/cover.png"
DEFAULT_ART = "interfaces/default/images/art.png"
DEFAULT_LIVE_TV_POSTER_THUMB = "interfaces/default/images/poster-live.png"
DEFAULT_LIVE_TV_ART = "interfaces/default/images/art-live.png"
DEFAULT_LIVE_TV_ART_FULL = "interfaces/default/images/art-live-full.png"
DEFAULT_LIVE_TV_THUMB = "interfaces/default/images/libraries/live.png"
ONLINE_POSTER_THUMB = "https://tautulli.com/images/poster.png"
ONLINE_COVER_THUMB = "https://tautulli.com/images/cover.png"
ONLINE_ART = "https://tautulli.com/images/art.png"
LIVE_TV_SECTION_ID = 999999 # Fake section_id for Live TV library
LIVE_TV_SECTION_NAME = "Live TV" # Fake section_name for Live TV library
DEFAULT_IMAGES = {
'poster': DEFAULT_POSTER_THUMB,
'cover': DEFAULT_COVER_THUMB,
'art': DEFAULT_ART,
'poster-live': DEFAULT_LIVE_TV_POSTER_THUMB,
'art-live': DEFAULT_LIVE_TV_ART,
'art-live-full': DEFAULT_LIVE_TV_ART_FULL
}
MEDIA_TYPE_HEADERS = {
'movie': 'Movies',
'show': 'TV Shows',
'season': 'Seasons',
'episode': 'Episodes',
'artist': 'Artists',
'album': 'Albums',
'track': 'Tracks',
'video': 'Videos',
'audio': 'Tracks',
'photo': 'Photos'
}
PLATFORM_NAME_OVERRIDES = {
'Konvergo': 'Plex Media Player',
'Mystery 3': 'Playstation 3',
'Mystery 4': 'Playstation 4',
'Mystery 5': 'Xbox 360',
'WebMAF': 'Playstation 4',
'windows': 'Windows',
'osx': 'macOS'
}
PMS_PLATFORM_NAME_OVERRIDES = {
'MacOSX': 'Mac'
}
PLATFORM_NAMES = {
'android': 'android',
'apple tv': 'atv',
'chrome': 'chrome',
'chromecast': 'chromecast',
'dlna': 'dlna',
'firefox': 'firefox',
'internet explorer': 'ie',
'ios': 'ios',
'ipad': 'ios',
'iphone': 'ios',
'kodi': 'kodi',
'linux': 'linux',
'nexus': 'android',
'macos': 'macos',
'microsoft edge': 'msedge',
'netcast': 'lg',
'opera': 'opera',
'osx': 'macos',
'playstation': 'playstation',
'plex home theater': 'plex',
'plex media player': 'plex',
'plexamp': 'plexamp',
'plextogether': 'synclounge',
'roku': 'roku',
'safari': 'safari',
'samsung': 'samsung',
'synclounge': 'synclounge',
'tivo': 'tivo',
'tizen': 'samsung',
'tvos': 'atv',
'vizio': 'opera',
'webos': 'lg',
'wiiu': 'wiiu',
'windows': 'windows',
'windows phone': 'wp',
'xbmc': 'xbmc',
'xbox': 'xbox'
}
PLATFORM_NAMES = OrderedDict(sorted(list(PLATFORM_NAMES.items()), key=lambda k: k[0], reverse=True))
MEDIA_FLAGS_AUDIO = {
'ac.?3': 'dolby_digital',
'truehd': 'dolby_truehd',
'(dca|dta)': 'dts',
'dts(hd_|-hd|-)?ma': 'dca-ma',
'vorbis': 'ogg'
}
MEDIA_FLAGS_VIDEO = {
'avc1': 'h264',
'wmv(1|2)': 'wmv',
'wmv3': 'wmvhd'
}
AUDIO_CODEC_OVERRIDES = {
'truehd': 'TrueHD'
}
VIDEO_RESOLUTION_OVERRIDES = {
'sd': 'SD',
'4k': '4k'
}
AUDIO_CHANNELS = {
'1': 'Mono',
'2': 'Stereo',
'3': '2.1',
'4': '3.1',
'6': '5.1',
'7': '6.1',
'8': '7.1'
}
VIDEO_QUALITY_PROFILES = {
20000: '20 Mbps 1080p',
12000: '12 Mbps 1080p',
10000: '10 Mbps 1080p',
8000: '8 Mbps 1080p',
4000: '4 Mbps 720p',
3000: '3 Mbps 720p',
2000: '2 Mbps 720p',
1500: '1.5 Mbps 480p',
720: '0.7 Mbps 328p',
320: '0.3 Mbps 240p',
208: '0.2 Mbps 160p',
96: '0.096 Mbps',
64: '0.064 Mbps'
}
VIDEO_QUALITY_PROFILES = OrderedDict(sorted(list(VIDEO_QUALITY_PROFILES.items()), key=lambda k: k[0], reverse=True))
AUDIO_QUALITY_PROFILES = {
512: '512 kbps',
320: '320 kbps',
256: '256 kbps',
192: '192 kbps',
128: '128 kbps',
96: '96 kbps'
}
AUDIO_QUALITY_PROFILES = OrderedDict(sorted(list(AUDIO_QUALITY_PROFILES.items()), key=lambda k: k[0], reverse=True))
HW_DECODERS = [
'dxva2',
'videotoolbox',
'mediacodecndk',
'vaapi',
'nvdec'
]
HW_ENCODERS = [
'qsv',
'mf',
'videotoolbox',
'mediacodecndk',
'vaapi',
'nvenc',
'x264'
]
EXTRA_TYPES = {
'1': 'Trailer',
'2': 'Deleted Scene',
'3': 'Interview',
'5': 'Behind the Scenes',
'6': 'Scene',
'10': 'Featurette',
'11': 'Short'
}
SCHEDULER_LIST = [
('Check GitHub for updates', 'websocket'),
('Check for server response', 'websocket'),
('Check for active sessions', 'websocket'),
('Check for recently added items', 'websocket'),
('Check for server remote access', 'websocket'),
('Check for Plex updates', 'scheduled'),
('Refresh users list', 'scheduled'),
('Refresh libraries list', 'scheduled'),
('Refresh Plex server URLs', 'scheduled'),
('Optimize Tautulli database', 'scheduled'),
('Backup Tautulli database', 'scheduled'),
('Backup Tautulli config', 'scheduled')
]
SCHEDULER_LIST = OrderedDict(SCHEDULER_LIST)
DATE_TIME_FORMATS = [
{
'category': 'Year',
'parameters': [
{'value': 'YYYY', 'description': 'Numeric, four digits', 'example': '1999, 2003'},
{'value': 'YY', 'description': 'Numeric, two digits', 'example': '99, 03'}
]
},
{
'category': 'Month',
'parameters': [
{'value': 'MMMM', 'description': 'Textual, full', 'example': 'January-December'},
{'value': 'MMM', 'description': 'Textual, three letters', 'example': 'Jan-Dec'},
{'value': 'MM', 'description': 'Numeric, with leading zeros', 'example': '01-12'},
{'value': 'M', 'description': 'Numeric, without leading zeros', 'example': '1-12'},
{'value': 'Mo', 'description': 'Numeric, with suffix', 'example': '1st, 2nd ... 12th'},
]
},
{
'category': 'Day of the Year',
'parameters': [
{'value': 'DDDD', 'description': 'Numeric, with leading zeros', 'example': '001-365'},
{'value': 'DDD', 'description': 'Numeric, without leading zeros', 'example': '1-365'},
{'value': 'DDDo', 'description': 'Numeric, with suffix', 'example': '1st, 2nd, ... 365th'},
]
},
{
'category': 'Day of the Month',
'parameters': [
{'value': 'DD', 'description': 'Numeric, with leading zeros', 'example': '01-31'},
{'value': 'D', 'description': 'Numeric, without leading zeros', 'example': '1-31'},
{'value': 'Do', 'description': 'Numeric, with suffix', 'example': '1st, 2nd ... 31st'},
]
},
{
'category': 'Day of the Week',
'parameters': [
{'value': 'dddd', 'description': 'Textual, full', 'example': 'Sunday-Saturday'},
{'value': 'ddd', 'description': 'Textual, three letters', 'example': 'Sun-Sat'},
{'value': 'dd', 'description': 'Textual, two letters', 'example': 'Su-Sa'},
{'value': 'd', 'description': 'Numeric', 'example': '0-6'},
{'value': 'do', 'description': 'Numeric, with suffix', 'example': '0th, 1st ... 6th'},
]
},
{
'category': 'Hour',
'parameters': [
{'value': 'HH', 'description': '24-hour, with leading zeros', 'example': '00-23'},
{'value': 'H', 'description': '24-hour, without leading zeros', 'example': '0-23'},
{'value': 'hh', 'description': '12-hour, with leading zeros', 'example': '01-12'},
{'value': 'h', 'description': '12-hour, without leading zeros', 'example': '1-12'},
]
},
{
'category': 'Minute',
'parameters': [
{'value': 'mm', 'description': 'Numeric, with leading zeros', 'example': '00-59'},
{'value': 'm', 'description': 'Numeric, without leading zeros', 'example': '0-59'},
]
},
{
'category': 'Second',
'parameters': [
{'value': 'ss', 'description': 'Numeric, with leading zeros', 'example': '00-59'},
{'value': 's', 'description': 'Numeric, without leading zeros', 'example': '0-59'},
]
},
{
'category': 'AM / PM',
'parameters': [
{'value': 'A', 'description': 'AM/PM uppercase', 'example': 'AM, PM'},
{'value': 'a', 'description': 'am/pm lowercase', 'example': 'am, pm'},
]
},
{
'category': 'Timezone',
'parameters': [
{'value': 'ZZ', 'description': 'UTC offset', 'example': '+0100, -0700'},
{'value': 'Z', 'description': 'UTC offset', 'example': '+01:00, -07:00'},
]
},
{
'category': 'Timestamp',
'parameters': [
{'value': 'X', 'description': 'Unix timestamp', 'example': 'E.g. 1456887825'},
]
},
]
NOTIFICATION_PARAMETERS = [
{
'category': 'Global',
'parameters': [
{'name': 'Tautulli Version', 'type': 'str', 'value': 'tautulli_version', 'description': 'The current version of Tautulli.'},
{'name': 'Tautulli Remote', 'type': 'str', 'value': 'tautulli_remote', 'description': 'The current git remote of Tautulli.'},
{'name': 'Tautulli Branch', 'type': 'str', 'value': 'tautulli_branch', 'description': 'The current git branch of Tautulli.'},
{'name': 'Tautulli Commit', 'type': 'str', 'value': 'tautulli_commit', 'description': 'The current git commit hash of Tautulli.'},
{'name': 'Server Name', 'type': 'str', 'value': 'server_name', 'description': 'The name of your Plex Server.'},
{'name': 'Server IP', 'type': 'str', 'value': 'server_ip', 'description': 'The connection IP address for your Plex Server.'},
{'name': 'Server Port', 'type': 'int', 'value': 'server_port', 'description': 'The connection port for your Plex Server.'},
{'name': 'Server URL', 'type': 'str', 'value': 'server_url', 'description': 'The connection URL for your Plex Server.'},
{'name': 'Server Platform', 'type': 'str', 'value': 'server_platform', 'description': 'The platform of your Plex Server.'},
{'name': 'Server Version', 'type': 'str', 'value': 'server_version', 'description': 'The current version of your Plex Server.'},
{'name': 'Server ID', 'type': 'str', 'value': 'server_machine_id', 'description': 'The unique identifier for your Plex Server.'},
{'name': 'Action', 'type': 'str', 'value': 'action', 'description': 'The action that triggered the notification.'},
{'name': 'Current Year', 'type': 'int', 'value': 'current_year', 'description': 'The year when the notification is triggered.'},
{'name': 'Current Month', 'type': 'int', 'value': 'current_month', 'description': 'The month when the notification is triggered.', 'example': '1 to 12'},
{'name': 'Current Day', 'type': 'int', 'value': 'current_day', 'description': 'The day when the notification is triggered.', 'example': '1 to 31'},
{'name': 'Current Hour', 'type': 'int', 'value': 'current_hour', 'description': 'The hour when the notification is triggered.', 'example': '0 to 23'},
{'name': 'Current Minute', 'type': 'int', 'value': 'current_minute', 'description': 'The minute when the notification is triggered.', 'example': '0 to 59'},
{'name': 'Current Second', 'type': 'int', 'value': 'current_second', 'description': 'The second when the notification is triggered.', 'example': '0 to 59'},
{'name': 'Current Weekday', 'type': 'int', 'value': 'current_weekday', 'description': 'The ISO weekday when the notification is triggered.', 'example': '1 (Mon) to 7 (Sun)'},
{'name': 'Current Week', 'type': 'int', 'value': 'current_week', 'description': 'The ISO week number when the notification is triggered.', 'example': '1 to 52'},
{'name': 'Datestamp', 'type': 'str', 'value': 'datestamp', 'description': 'The date (in date format) when the notification is triggered.'},
{'name': 'Timestamp', 'type': 'str', 'value': 'timestamp', 'description': 'The time (in time format) when the notification is triggered.'},
{'name': 'Unix Time', 'type': 'int', 'value': 'unixtime', 'description': 'The unix timestamp when the notification is triggered.'},
{'name': 'UTC Time', 'type': 'int', 'value': 'utctime', 'description': 'The UTC timestamp in ISO format when the notification is triggered.'},
]
},
{
'category': 'Stream Details',
'parameters': [
{'name': 'Streams', 'type': 'int', 'value': 'streams', 'description': 'The total number of concurrent streams.'},
{'name': 'Direct Plays', 'type': 'int', 'value': 'direct_plays', 'description': 'The total number of concurrent direct plays.'},
{'name': 'Direct Streams', 'type': 'int', 'value': 'direct_streams', 'description': 'The total number of concurrent direct streams.'},
{'name': 'Transcodes', 'type': 'int', 'value': 'transcodes', 'description': 'The total number of concurrent transcodes.'},
{'name': 'Total Bandwidth', 'type': 'int', 'value': 'total_bandwidth', 'description': 'The total Plex Streaming Brain reserved bandwidth (in kbps).', 'help_text': 'not the used bandwidth'},
{'name': 'LAN Bandwidth', 'type': 'int', 'value': 'lan_bandwidth', 'description': 'The total Plex Streaming Brain reserved LAN bandwidth (in kbps).', 'help_text': 'not the used bandwidth'},
{'name': 'WAN Bandwidth', 'type': 'int', 'value': 'wan_bandwidth', 'description': 'The total Plex Streaming Brain reserved WAN bandwidth (in kbps).', 'help_text': 'not the used bandwidth'},
{'name': 'User Streams', 'type': 'int', 'value': 'user_streams', 'description': 'The number of concurrent streams by the user streaming.'},
{'name': 'User Direct Plays', 'type': 'int', 'value': 'user_direct_plays', 'description': 'The number of concurrent direct plays by the user streaming.'},
{'name': 'User Direct Streams', 'type': 'int', 'value': 'user_direct_streams', 'description': 'The number of concurrent direct streams by the user streaming.'},
{'name': 'User Transcodes', 'type': 'int', 'value': 'user_transcodes', 'description': 'The number of concurrent transcodes by the user streaming.'},
{'name': 'User', 'type': 'str', 'value': 'user', 'description': 'The friendly name of the user streaming.'},
{'name': 'Username', 'type': 'str', 'value': 'username', 'description': 'The username of the user streaming.'},
{'name': 'User Email', 'type': 'str', 'value': 'user_email', 'description': 'The email address of the user streaming.'},
{'name': 'User Thumb', 'type': 'str', 'value': 'user_thumb', 'description': 'The profile picture URL of the user streaming.'},
{'name': 'Device', 'type': 'str', 'value': 'device', 'description': 'The type of client device being used for playback.'},
{'name': 'Platform', 'type': 'str', 'value': 'platform', 'description': 'The type of client platform being used for playback.'},
{'name': 'Product', 'type': 'str', 'value': 'product', 'description': 'The type of client product being used for playback.'},
{'name': 'Player', 'type': 'str', 'value': 'player', 'description': 'The name of the player being used for playback.'},
{'name': 'Initial Stream', 'type': 'int', 'value': 'initial_stream', 'description': 'If the stream is the initial stream of a continuous streaming session.', 'example': '0 or 1'},
{'name': 'IP Address', 'type': 'str', 'value': 'ip_address', 'description': 'The IP address of the device being used for playback.'},
{'name': 'Stream Duration', 'type': 'int', 'value': 'stream_duration', 'description': 'The duration (in minutes) for the stream.'},
{'name': 'Stream Time', 'type': 'str', 'value': 'stream_time', 'description': 'The duration (in time format) of the stream.'},
{'name': 'Remaining Duration', 'type': 'int', 'value': 'remaining_duration', 'description': 'The remaining duration (in minutes) of the stream.'},
{'name': 'Remaining Time', 'type': 'str', 'value': 'remaining_time', 'description': 'The remaining duration (in time format) of the stream.'},
{'name': 'Progress Duration', 'type': 'int', 'value': 'progress_duration', 'description': 'The last reported offset (in minutes) of the stream.'},
{'name': 'Progress Time', 'type': 'str', 'value': 'progress_time', 'description': 'The last reported offset (in time format) of the stream.'},
{'name': 'Progress Percent', 'type': 'int', 'value': 'progress_percent', 'description': 'The last reported progress percent of the stream.'},
{'name': 'Transcode Decision', 'type': 'str', 'value': 'transcode_decision', 'description': 'The transcode decision of the stream.'},
{'name': 'Container Decision', 'type': 'str', 'value': 'container_decision', 'description': 'The container transcode decision of the stream.'},
{'name': 'Video Decision', 'type': 'str', 'value': 'video_decision', 'description': 'The video transcode decision of the stream.'},
{'name': 'Audio Decision', 'type': 'str', 'value': 'audio_decision', 'description': 'The audio transcode decision of the stream.'},
{'name': 'Subtitle Decision', 'type': 'str', 'value': 'subtitle_decision', 'description': 'The subtitle transcode decision of the stream.'},
{'name': 'Quality Profile', 'type': 'str', 'value': 'quality_profile', 'description': 'The Plex quality profile of the stream.', 'example': 'e.g. Original, 4 Mbps 720p, etc.'},
{'name': 'Optimized Version', 'type': 'int', 'value': 'optimized_version', 'description': 'If the stream is an optimized version.', 'example': '0 or 1'},
{'name': 'Optimized Version Profile', 'type': 'str', 'value': 'optimized_version_profile', 'description': 'The optimized version profile of the stream.'},
{'name': 'Synced Version', 'type': 'int', 'value': 'synced_version', 'description': 'If the stream is an synced version.', 'example': '0 or 1'},
{'name': 'Live', 'type': 'int', 'value': 'live', 'description': 'If the stream is live TV.', 'example': '0 or 1'},
{'name': 'Channel Call Sign', 'type': 'str', 'value': 'channel_call_sign', 'description': 'The Live TV channel call sign.'},
{'name': 'Channel Identifier', 'type': 'str', 'value': 'channel_identifier', 'description': 'The Live TV channel number.'},
{'name': 'Channel Thumb', 'type': 'str', 'value': 'channel_thumb', 'description': 'The URL for the Live TV channel logo.'},
{'name': 'Secure', 'type': 'int', 'value': 'secure', 'description': 'If the stream is using a secure connection.', 'example': '0 or 1'},
{'name': 'Relayed', 'type': 'int', 'value': 'relayed', 'description': 'If the stream is using Plex Relay.', 'example': '0 or 1'},
{'name': 'Stream Local', 'type': 'int', 'value': 'stream_local', 'description': 'If the stream is local.', 'example': '0 or 1'},
{'name': 'Stream Location', 'type': 'str', 'value': 'stream_location', 'description': 'The network location of the stream.', 'example': 'lan or wan'},
{'name': 'Stream Bandwidth', 'type': 'int', 'value': 'stream_bandwidth', 'description': 'The Plex Streaming Brain reserved bandwidth (in kbps) of the stream.', 'help_text': 'not the used bandwidth'},
{'name': 'Stream Container', 'type': 'str', 'value': 'stream_container', 'description': 'The media container of the stream.'},
{'name': 'Stream Bitrate', 'type': 'int', 'value': 'stream_bitrate', 'description': 'The bitrate (in kbps) of the stream.'},
{'name': 'Stream Aspect Ratio', 'type': 'float', 'value': 'stream_aspect_ratio', 'description': 'The aspect ratio of the stream.'},
{'name': 'Stream Video Codec', 'type': 'str', 'value': 'stream_video_codec', 'description': 'The video codec of the stream.'},
{'name': 'Stream Video Codec Level', 'type': 'int', 'value': 'stream_video_codec_level', 'description': 'The video codec level of the stream.'},
{'name': 'Stream Video Bitrate', 'type': 'int', 'value': 'stream_video_bitrate', 'description': 'The video bitrate (in kbps) of the stream.'},
{'name': 'Stream Video Bit Depth', 'type': 'int', 'value': 'stream_video_bit_depth', 'description': 'The video bit depth of the stream.'},
{'name': 'Stream Video Chroma Subsampling', 'type': 'str', 'value': 'stream_video_chroma_subsampling', 'description': 'The video chroma subsampling of the stream.'},
{'name': 'Stream Video Color Primaries', 'type': 'str', 'value': 'stream_video_color_primaries', 'description': 'The video color primaries of the stream.'},
{'name': 'Stream Video Color Range', 'type': 'str', 'value': 'stream_video_color_range', 'description': 'The video color range of the stream.'},
{'name': 'Stream Video Color Space', 'type': 'str', 'value': 'stream_video_color_space', 'description': 'The video color space of the stream.'},
{'name': 'Stream Video Color Transfer Function', 'type': 'str', 'value': 'stream_video_color_trc', 'description': 'The video transfer function of the stream.'},
{'name': 'Stream Video Dynamic Range', 'type': 'str', 'value': 'stream_video_dynamic_range', 'description': 'The video dynamic range of the stream.', 'example': 'HDR or SDR'},
{'name': 'Stream Video Framerate', 'type': 'str', 'value': 'stream_video_framerate', 'description': 'The video framerate of the stream.'},
{'name': 'Stream Video Full Resolution', 'type': 'str', 'value': 'stream_video_full_resolution', 'description': 'The video resolution of the stream with scan type.'},
{'name': 'Stream Video Ref Frames', 'type': 'int', 'value': 'stream_video_ref_frames', 'description': 'The video reference frames of the stream.'},
{'name': 'Stream Video Resolution', 'type': 'str', 'value': 'stream_video_resolution', 'description': 'The video resolution of the stream.'},
{'name': 'Stream Video Scan Type', 'type': 'str', 'value': 'stream_video_scan_type', 'description': 'The video scan type of the stream.'},
{'name': 'Stream Video Height', 'type': 'int', 'value': 'stream_video_height', 'description': 'The video height of the stream.'},
{'name': 'Stream Video Width', 'type': 'int', 'value': 'stream_video_width', 'description': 'The video width of the stream.'},
{'name': 'Stream Video Language', 'type': 'str', 'value': 'stream_video_language', 'description': 'The video language of the stream.'},
{'name': 'Stream Video Language Code', 'type': 'str', 'value': 'stream_video_language_code', 'description': 'The video language code of the stream.'},
{'name': 'Stream Audio Bitrate', 'type': 'int', 'value': 'stream_audio_bitrate', 'description': 'The audio bitrate of the stream.'},
{'name': 'Stream Audio Bitrate Mode', 'type': 'str', 'value': 'stream_audio_bitrate_mode', 'description': 'The audio bitrate mode of the stream.', 'example': 'cbr or vbr'},
{'name': 'Stream Audio Codec', 'type': 'str', 'value': 'stream_audio_codec', 'description': 'The audio codec of the stream.'},
{'name': 'Stream Audio Channels', 'type': 'float', 'value': 'stream_audio_channels', 'description': 'The audio channels of the stream.'},
{'name': 'Stream Audio Channel Layout', 'type': 'str', 'value': 'stream_audio_channel_layout', 'description': 'The audio channel layout of the stream.'},
{'name': 'Stream Audio Sample Rate', 'type': 'int', 'value': 'stream_audio_sample_rate', 'description': 'The audio sample rate (in Hz) of the stream.'},
{'name': 'Stream Audio Language', 'type': 'str', 'value': 'stream_audio_language', 'description': 'The audio language of the stream.'},
{'name': 'Stream Audio Language Code', 'type': 'str', 'value': 'stream_audio_language_code', 'description': 'The audio language code of the stream.'},
{'name': 'Stream Subtitle Codec', 'type': 'str', 'value': 'stream_subtitle_codec', 'description': 'The subtitle codec of the stream.'},
{'name': 'Stream Subtitle Container', 'type': 'str', 'value': 'stream_subtitle_container', 'description': 'The subtitle container of the stream.'},
{'name': 'Stream Subtitle Format', 'type': 'str', 'value': 'stream_subtitle_format', 'description': 'The subtitle format of the stream.'},
{'name': 'Stream Subtitle Forced', 'type': 'int', 'value': 'stream_subtitle_forced', 'description': 'If the subtitles are forced.', 'example': '0 or 1'},
{'name': 'Stream Subtitle Language', 'type': 'str', 'value': 'stream_subtitle_language', 'description': 'The subtitle language of the stream.'},
{'name': 'Stream Subtitle Language Code', 'type': 'str', 'value': 'stream_subtitle_language_code', 'description': 'The subtitle language code of the stream.'},
{'name': 'Stream Subtitle Location', 'type': 'str', 'value': 'stream_subtitle_location', 'description': 'The subtitle location of the stream.'},
{'name': 'Transcode Container', 'type': 'str', 'value': 'transcode_container', 'description': 'The media container of the transcoded stream.'},
{'name': 'Transcode Video Codec', 'type': 'str', 'value': 'transcode_video_codec', 'description': 'The video codec of the transcoded stream.'},
{'name': 'Transcode Video Width', 'type': 'int', 'value': 'transcode_video_width', 'description': 'The video width of the transcoded stream.'},
{'name': 'Transcode Video Height', 'type': 'int', 'value': 'transcode_video_height', 'description': 'The video height of the transcoded stream.'},
{'name': 'Transcode Audio Codec', 'type': 'str', 'value': 'transcode_audio_codec', 'description': 'The audio codec of the transcoded stream.'},
{'name': 'Transcode Audio Channels', 'type': 'float', 'value': 'transcode_audio_channels', 'description': 'The audio channels of the transcoded stream.'},
{'name': 'Transcode HW Requested', 'type': 'int', 'value': 'transcode_hw_requested', 'description': 'If hardware decoding/encoding was requested.', 'example': '0 or 1'},
{'name': 'Transcode HW Decoding', 'type': 'int', 'value': 'transcode_hw_decoding', 'description': 'If hardware decoding is used.', 'example': '0 or 1'},
{'name': 'Transcode HW Decoding Codec', 'type': 'str', 'value': 'transcode_hw_decode', 'description': 'The hardware decoding codec.'},
{'name': 'Transcode HW Decoding Title', 'type': 'str', 'value': 'transcode_hw_decode_title', 'description': 'The hardware decoding codec title.'},
{'name': 'Transcode HW Encoding', 'type': 'int', 'value': 'transcode_hw_encoding', 'description': 'If hardware encoding is used.', 'example': '0 or 1'},
{'name': 'Transcode HW Encoding Codec', 'type': 'str', 'value': 'transcode_hw_encode', 'description': 'The hardware encoding codec.'},
{'name': 'Transcode HW Encoding Title', 'type': 'str', 'value': 'transcode_hw_encode_title', 'description': 'The hardware encoding codec title.'},
{'name': 'Session Key', 'type': 'str', 'value': 'session_key', 'description': 'The unique identifier for the session.'},
{'name': 'Transcode Key', 'type': 'str', 'value': 'transcode_key', 'description': 'The unique identifier for the transcode session.'},
{'name': 'Session ID', 'type': 'str', 'value': 'session_id', 'description': 'The unique identifier for the stream.'},
{'name': 'User ID', 'type': 'int', 'value': 'user_id', 'description': 'The unique identifier for the user.'},
{'name': 'Machine ID', 'type': 'str', 'value': 'machine_id', 'description': 'The unique identifier for the player.'},
]
},
{
'category': 'Source Metadata Details',
'parameters': [
{'name': 'Media Type', 'type': 'str', 'value': 'media_type', 'description': 'The type of media.', 'example': 'movie, show, season, episode, artist, album, track, clip'},
{'name': 'Title', 'type': 'str', 'value': 'title', 'description': 'The full title of the item.'},
{'name': 'Library Name', 'type': 'str', 'value': 'library_name', 'description': 'The library name of the item.'},
{'name': 'Show Name', 'type': 'str', 'value': 'show_name', 'description': 'The title of the TV series.'},
{'name': 'Episode Name', 'type': 'str', 'value': 'episode_name', 'description': 'The title of the episode.'},
{'name': 'Artist Name', 'type': 'str', 'value': 'artist_name', 'description': 'The name of the artist.'},
{'name': 'Album Name', 'type': 'str', 'value': 'album_name', 'description': 'The title of the album.'},
{'name': 'Track Name', 'type': 'str', 'value': 'track_name', 'description': 'The title of the track.'},
{'name': 'Track Artist', 'type': 'str', 'value': 'track_artist', 'description': 'The name of the artist of the track.'},
{'name': 'Season Number', 'type': 'int', 'value': 'season_num', 'description': 'The season number.', 'example': 'e.g. 1, or 1-3'},
{'name': 'Season Number 00', 'type': 'int', 'value': 'season_num00', 'description': 'The two digit season number.', 'example': 'e.g. 01, or 01-03'},
{'name': 'Episode Number', 'type': 'int', 'value': 'episode_num', 'description': 'The episode number.', 'example': 'e.g. 6, or 6-10'},
{'name': 'Episode Number 00', 'type': 'int', 'value': 'episode_num00', 'description': 'The two digit episode number.', 'example': 'e.g. 06, or 06-10'},
{'name': 'Track Number', 'type': 'int', 'value': 'track_num', 'description': 'The track number.', 'example': 'e.g. 4, or 4-10'},
{'name': 'Track Number 00', 'type': 'int', 'value': 'track_num00', 'description': 'The two digit track number.', 'example': 'e.g. 04, or 04-10'},
{'name': 'Season Count', 'type': 'int', 'value': 'season_count', 'description': 'The number of seasons.'},
{'name': 'Episode Count', 'type': 'int', 'value': 'episode_count', 'description': 'The number of episodes.'},
{'name': 'Album Count', 'type': 'int', 'value': 'album_count', 'description': 'The number of albums.'},
{'name': 'Track Count', 'type': 'int', 'value': 'track_count', 'description': 'The number of tracks.'},
{'name': 'Year', 'type': 'int', 'value': 'year', 'description': 'The release year for the item.'},
{'name': 'Release Date', 'type': 'str', 'value': 'release_date', 'description': 'The release date (in date format) for the item.'},
{'name': 'Air Date', 'type': 'str', 'value': 'air_date', 'description': 'The air date (in date format) for the item.'},
{'name': 'Added Date', 'type': 'str', 'value': 'added_date', 'description': 'The date (in date format) the item was added to Plex.'},
{'name': 'Updated Date', 'type': 'str', 'value': 'updated_date', 'description': 'The date (in date format) the item was updated on Plex.'},
{'name': 'Last Viewed Date', 'type': 'str', 'value': 'last_viewed_date', 'description': 'The date (in date format) the item was last viewed on Plex.'},
{'name': 'Studio', 'type': 'str', 'value': 'studio', 'description': 'The studio for the item.'},
{'name': 'Content Rating', 'type': 'str', 'value': 'content_rating', 'description': 'The content rating for the item.', 'example': 'e.g. TV-MA, TV-PG, etc.'},
{'name': 'Directors', 'type': 'str', 'value': 'directors', 'description': 'A list of directors for the item.'},
{'name': 'Writers', 'type': 'str', 'value': 'writers', 'description': 'A list of writers for the item.'},
{'name': 'Actors', 'type': 'str', 'value': 'actors', 'description': 'A list of actors for the item.'},
{'name': 'Genres', 'type': 'str', 'value': 'genres', 'description': 'A list of genres for the item.'},
{'name': 'Labels', 'type': 'str', 'value': 'labels', 'description': 'A list of labels for the item.'},
{'name': 'Collections', 'type': 'str', 'value': 'collections', 'description': 'A list of collections for the item.'},
{'name': 'Summary', 'type': 'str', 'value': 'summary', 'description': 'A short plot summary for the item.'},
{'name': 'Tagline', 'type': 'str', 'value': 'tagline', 'description': 'A tagline for the media item.'},
{'name': 'Rating', 'type': 'float', 'value': 'rating', 'description': 'The rating (out of 10) for the item.'},
{'name': 'Critic Rating', 'type': 'int', 'value': 'critic_rating', 'description': 'The critic rating (%) for the item.', 'help_text': 'Ratings source must be Rotten Tomatoes for the Plex Movie agent'},
{'name': 'Audience Rating', 'type': 'float', 'value': 'audience_rating', 'description': 'The audience rating for the item.', 'help_text': 'Rating out of 10 for IMDB, percentage (%) for Rotten Tomatoes and TMDB.'},
{'name': 'User Rating', 'type': 'float', 'value': 'user_rating', 'description': 'The user (star) rating (out of 10) for the item.'},
{'name': 'Duration', 'type': 'int', 'value': 'duration', 'description': 'The duration (in minutes) for the item.'},
{'name': 'Poster URL', 'type': 'str', 'value': 'poster_url', 'description': 'A URL for the movie, TV show, or album poster.'},
{'name': 'Plex ID', 'type': 'str', 'value': 'plex_id', 'description': 'The Plex ID for the item.', 'example': 'e.g. 5d7769a9594b2b001e6a6b7e'},
{'name': 'Plex URL', 'type': 'str', 'value': 'plex_url', 'description': 'The Plex URL to your server for the item.'},
{'name': 'IMDB ID', 'type': 'str', 'value': 'imdb_id', 'description': 'The IMDB ID for the movie.', 'example': 'e.g. tt2488496'},
{'name': 'IMDB URL', 'type': 'str', 'value': 'imdb_url', 'description': 'The IMDB URL for the movie.'},
{'name': 'TVDB ID', 'type': 'int', 'value': 'thetvdb_id', 'description': 'The TVDB ID for the TV show.', 'example': 'e.g. 121361'},
{'name': 'TVDB URL', 'type': 'str', 'value': 'thetvdb_url', 'description': 'The TVDB URL for the TV show.'},
{'name': 'TMDB ID', 'type': 'int', 'value': 'themoviedb_id', 'description': 'The TMDb ID for the movie or TV show.', 'example': 'e.g. 15260'},
{'name': 'TMDB URL', 'type': 'str', 'value': 'themoviedb_url', 'description': 'The TMDb URL for the movie or TV show.'},
{'name': 'TVmaze ID', 'type': 'int', 'value': 'tvmaze_id', 'description': 'The TVmaze ID for the TV show.', 'example': 'e.g. 290'},
{'name': 'TVmaze URL', 'type': 'str', 'value': 'tvmaze_url', 'description': 'The TVmaze URL for the TV show.'},
{'name': 'MusicBrainz ID', 'type': 'str', 'value': 'musicbrainz_id', 'description': 'The MusicBrainz ID for the artist, album, or track.', 'example': 'e.g. b670dfcf-9824-4309-a57e-03595aaba286'},
{'name': 'MusicBrainz URL', 'type': 'str', 'value': 'musicbrainz_url', 'description': 'The MusicBrainz URL for the artist, album, or track.'},
{'name': 'Last.fm URL', 'type': 'str', 'value': 'lastfm_url', 'description': 'The Last.fm URL for the album.', 'help_text': 'Music library agent must be Last.fm'},
{'name': 'Trakt.tv URL', 'type': 'str', 'value': 'trakt_url', 'description': 'The trakt.tv URL for the movie or TV show.'},
{'name': 'Container', 'type': 'str', 'value': 'container', 'description': 'The media container of the original media.'},
{'name': 'Bitrate', 'type': 'int', 'value': 'bitrate', 'description': 'The bitrate of the original media.'},
{'name': 'Aspect Ratio', 'type': 'float', 'value': 'aspect_ratio', 'description': 'The aspect ratio of the original media.'},
{'name': 'Video Codec', 'type': 'str', 'value': 'video_codec', 'description': 'The video codec of the original media.'},
{'name': 'Video Codec Level', 'type': 'int', 'value': 'video_codec_level', 'description': 'The video codec level of the original media.'},
{'name': 'Video Bitrate', 'type': 'int', 'value': 'video_bitrate', 'description': 'The video bitrate of the original media.'},
{'name': 'Video Bit Depth', 'type': 'int', 'value': 'video_bit_depth', 'description': 'The video bit depth of the original media.'},
{'name': 'Video Chroma Subsampling', 'type': 'str', 'value': 'video_chroma_subsampling', 'description': 'The video chroma subsampling of the original media.'},
{'name': 'Video Color Primaries', 'type': 'str', 'value': 'video_color_primaries', 'description': 'The video color primaries of the original media.'},
{'name': 'Video Color Range', 'type': 'str', 'value': 'video_color_range', 'description': 'The video color range of the original media.'},
{'name': 'Video Color Space', 'type': 'str', 'value': 'video_color_space', 'description': 'The video color space of the original media.'},
{'name': 'Video Color Transfer Function', 'type': 'str', 'value': 'video_color_trc', 'description': 'The video transfer function of the original media.'},
{'name': 'Video Dynamic Range', 'type': 'str', 'value': 'video_dynamic_range', 'description': 'The video dynamic range of the original media.', 'example': 'HDR or SDR'},
{'name': 'Video Framerate', 'type': 'str', 'value': 'video_framerate', 'description': 'The video framerate of the original media.'},
{'name': 'Video Full Resolution', 'type': 'str', 'value': 'video_full_resolution', 'description': 'The video resolution of the original media with scan type.'},
{'name': 'Video Ref Frames', 'type': 'int', 'value': 'video_ref_frames', 'description': 'The video reference frames of the original media.'},
{'name': 'Video Resolution', 'type': 'str', 'value': 'video_resolution', 'description': 'The video resolution of the original media.'},
{'name': 'Video Scan Type', 'type': 'str', 'value': 'video_scan_type', 'description': 'The video scan type of the original media.'},
{'name': 'Video Height', 'type': 'int', 'value': 'video_height', 'description': 'The video height of the original media.'},
{'name': 'Video Width', 'type': 'int', 'value': 'video_width', 'description': 'The video width of the original media.'},
{'name': 'Video Language', 'type': 'str', 'value': 'video_language', 'description': 'The video language of the original media.'},
{'name': 'Video Language Code', 'type': 'str', 'value': 'video_language_code', 'description': 'The video language code of the original media.'},
{'name': 'Audio Bitrate', 'type': 'int', 'value': 'audio_bitrate', 'description': 'The audio bitrate of the original media.'},
{'name': 'Audio Bitrate Mode', 'type': 'str', 'value': 'audio_bitrate_mode', 'description': 'The audio bitrate mode of the original media.', 'example': 'cbr or vbr'},
{'name': 'Audio Codec', 'type': 'str', 'value': 'audio_codec', 'description': 'The audio codec of the original media.'},
{'name': 'Audio Channels', 'type': 'float', 'value': 'audio_channels', 'description': 'The audio channels of the original media.'},
{'name': 'Audio Channel Layout', 'type': 'str', 'value': 'audio_channel_layout', 'description': 'The audio channel layout of the original media.'},
{'name': 'Audio Sample Rate', 'type': 'int', 'value': 'audio_sample_rate', 'description': 'The audio sample rate (in Hz) of the original media.'},
{'name': 'Audio Language', 'type': 'str', 'value': 'audio_language', 'description': 'The audio language of the original media.'},
{'name': 'Audio Language Code', 'type': 'str', 'value': 'audio_language_code', 'description': 'The audio language code of the original media.'},
{'name': 'Subtitle Codec', 'type': 'str', 'value': 'subtitle_codec', 'description': 'The subtitle codec of the original media.'},
{'name': 'Subtitle Container', 'type': 'str', 'value': 'subtitle_container', 'description': 'The subtitle container of the original media.'},
{'name': 'Subtitle Format', 'type': 'str', 'value': 'subtitle_format', 'description': 'The subtitle format of the original media.'},
{'name': 'Subtitle Forced', 'type': 'int', 'value': 'subtitle_forced', 'description': 'If the subtitles are forced.', 'example': '0 or 1'},
{'name': 'Subtitle Location', 'type': 'str', 'value': 'subtitle_location', 'description': 'The subtitle location of the original media.'},
{'name': 'Subtitle Language', 'type': 'str', 'value': 'subtitle_language', 'description': 'The subtitle language of the original media.'},
{'name': 'Subtitle Language Code', 'type': 'str', 'value': 'subtitle_language_code', 'description': 'The subtitle language code of the original media.'},
{'name': 'File', 'type': 'str', 'value': 'file', 'description': 'The file path to the item.'},
{'name': 'Filename', 'type': 'str', 'value': 'filename', 'description': 'The file name of the item.'},
{'name': 'File Size', 'type': 'int', 'value': 'file_size', 'description': 'The file size of the item.'},
{'name': 'Section ID', 'type': 'int', 'value': 'section_id', 'description': 'The unique identifier for the library.'},
{'name': 'Rating Key', 'type': 'int', 'value': 'rating_key', 'description': 'The unique identifier for the movie, episode, or track.'},
{'name': 'Parent Rating Key', 'type': 'int', 'value': 'parent_rating_key', 'description': 'The unique identifier for the season or album.'},
{'name': 'Grandparent Rating Key', 'type': 'int', 'value': 'grandparent_rating_key', 'description': 'The unique identifier for the TV show or artist.'},
{'name': 'Art', 'type': 'str', 'value': 'art', 'description': 'The Plex background art for the media.'},
{'name': 'Thumb', 'type': 'str', 'value': 'thumb', 'description': 'The Plex thumbnail for the movie or episode.'},
{'name': 'Parent Thumb', 'type': 'str', 'value': 'parent_thumb', 'description': 'The Plex thumbnail for the season or album.'},
{'name': 'Grandparent Thumb', 'type': 'str', 'value': 'grandparent_thumb', 'description': 'The Plex thumbnail for the TV show or artist.'},
{'name': 'Poster Thumb', 'type': 'str', 'value': 'poster_thumb', 'description': 'The Plex thumbnail for the poster image.'},
{'name': 'Poster Title', 'type': 'str', 'value': 'poster_title', 'description': 'The title for the poster image.'},
{'name': 'Indexes', 'type': 'int', 'value': 'indexes', 'description': 'If the media has video preview thumbnails.', 'example': '0 or 1'},
]
},
{
'category': 'Plex Remote Access',
'parameters': [
{'name': 'Remote Access Mapping State', 'type': 'str', 'value': 'remote_access_mapping_state', 'description': 'The mapping state of the Plex remote access port.'},
{'name': 'Remote Access Mapping Error', 'type': 'str', 'value': 'remote_access_mapping_error', 'description': 'The mapping error of the Plex remote access port.'},
{'name': 'Remote Access Public IP Address', 'type': 'str', 'value': 'remote_access_public_address', 'description': 'The Plex remote access public IP address.'},
{'name': 'Remote Access Public Port', 'type': 'str', 'value': 'remote_access_public_port', 'description': 'The Plex remote access public port.'},
{'name': 'Remote Access Private IP Address', 'type': 'str', 'value': 'remote_access_private_address', 'description': 'The Plex remote access private IP address.'},
{'name': 'Remote Access Private Port', 'type': 'str', 'value': 'remote_access_private_port', 'description': 'The Plex remote access private port.'},
{'name': 'Remote Access Failure Reason', 'type': 'str', 'value': 'remote_access_reason', 'description': 'The failure reason for Plex remote access going down.'},
]
},
{
'category': 'Plex Update Available',
'parameters': [
{'name': 'Update Version', 'type': 'str', 'value': 'update_version', 'description': 'The available update version for your Plex Server.'},
{'name': 'Update Url', 'type': 'str', 'value': 'update_url', 'description': 'The download URL for the available update.'},
{'name': 'Update Release Date', 'type': 'str', 'value': 'update_release_date', 'description': 'The release date of the available update.'},
{'name': 'Update Channel', 'type': 'str', 'value': 'update_channel', 'description': 'The update channel.', 'example': 'Public or Plex Pass'},
{'name': 'Update Platform', 'type': 'str', 'value': 'update_platform', 'description': 'The platform of your Plex Server.'},
{'name': 'Update Distro', 'type': 'str', 'value': 'update_distro', 'description': 'The distro of your Plex Server.'},
{'name': 'Update Distro Build', 'type': 'str', 'value': 'update_distro_build', 'description': 'The distro build of your Plex Server.'},
{'name': 'Update Requirements', 'type': 'str', 'value': 'update_requirements', 'description': 'The requirements for the available update.'},
{'name': 'Update Extra Info', 'type': 'str', 'value': 'update_extra_info', 'description': 'Any extra info for the available update.'},
{'name': 'Update Changelog Added', 'type': 'str', 'value': 'update_changelog_added', 'description': 'The added changelog for the available update.'},
{'name': 'Update Changelog Fixed', 'type': 'str', 'value': 'update_changelog_fixed', 'description': 'The fixed changelog for the available update.'},
]
},
{
'category': 'Tautulli Update Available',
'parameters': [
{'name': 'Tautulli Update Version', 'type': 'str', 'value': 'tautulli_update_version', 'description': 'The available update version for Tautulli.'},
{'name': 'Tautulli Update Release URL', 'type': 'str', 'value': 'tautulli_update_release_url', 'description': 'The release page URL on GitHub.'},
{'name': 'Tautulli Update Tar', 'type': 'str', 'value': 'tautulli_update_tar', 'description': 'The tar download URL for the available update.'},
{'name': 'Tautulli Update Zip', 'type': 'str', 'value': 'tautulli_update_zip', 'description': 'The zip download URL for the available update.'},
{'name': 'Tautulli Update Commit', 'type': 'str', 'value': 'tautulli_update_commit', 'description': 'The commit hash for the available update.'},
{'name': 'Tautulli Update Behind', 'type': 'int', 'value': 'tautulli_update_behind', 'description': 'The number of commits behind for the available update.'},
{'name': 'Tautulli Update Changelog', 'type': 'str', 'value': 'tautulli_update_changelog', 'description': 'The changelog for the available update.'},
]
},
]
NEWSLETTER_PARAMETERS = [
{
'category': 'Global',
'parameters': [
{'name': 'Server Name', 'type': 'str', 'value': 'server_name', 'description': 'The name of your Plex Server.'},
{'name': 'Start Date', 'type': 'str', 'value': 'start_date', 'description': 'The start date of the newsletter.'},
{'name': 'End Date', 'type': 'str', 'value': 'end_date', 'description': 'The end date of the newsletter.'},
{'name': 'Current Year', 'type': 'int', 'value': 'current_year', 'description': 'The year of the start date of the newsletter.'},
{'name': 'Current Month', 'type': 'int', 'value': 'current_month', 'description': 'The month of the start date of the newsletter.', 'example': '1 to 12'},
{'name': 'Current Day', 'type': 'int', 'value': 'current_day', 'description': 'The day of the start date of the newsletter.', 'example': '1 to 31'},
{'name': 'Current Hour', 'type': 'int', 'value': 'current_hour', 'description': 'The hour of the start date of the newsletter.', 'example': '0 to 23'},
{'name': 'Current Minute', 'type': 'int', 'value': 'current_minute', 'description': 'The minute of the start date of the newsletter.', 'example': '0 to 59'},
{'name': 'Current Second', 'type': 'int', 'value': 'current_second', 'description': 'The second of the start date of the newsletter.', 'example': '0 to 59'},
{'name': 'Current Weekday', 'type': 'int', 'value': 'current_weekday', 'description': 'The ISO weekday of the start date of the newsletter.', 'example': '1 (Mon) to 7 (Sun)'},
{'name': 'Current Week', 'type': 'int', 'value': 'current_week', 'description': 'The ISO week number of the start date of the newsletter.', 'example': '1 to 52'},
{'name': 'Newsletter Time Frame', 'type': 'int', 'value': 'newsletter_time_frame', 'description': 'The time frame included in the newsletter.'},
{'name': 'Newsletter Time Frame Units', 'type': 'str', 'value': 'newsletter_time_frame_units', 'description': 'The time frame units included in the newsletter.'},
{'name': 'Newsletter URL', 'type': 'str', 'value': 'newsletter_url', 'description': 'The self-hosted URL to the newsletter.'},
{'name': 'Newsletter Static URL', 'type': 'str', 'value': 'newsletter_static_url', 'description': 'The static self-hosted URL to the latest scheduled newsletter for the agent.'},
{'name': 'Newsletter UUID', 'type': 'str', 'value': 'newsletter_uuid', 'description': 'The unique identifier for the newsletter.'},
{'name': 'Newsletter ID', 'type': 'int', 'value': 'newsletter_id', 'description': 'The unique ID number for the newsletter agent.'},
{'name': 'Newsletter ID Name', 'type': 'int', 'value': 'newsletter_id_name', 'description': 'The unique ID name for the newsletter agent.'},
{'name': 'Newsletter Password', 'type': 'str', 'value': 'newsletter_password', 'description': 'The password required to view the newsletter if enabled.'},
]
},
{
'category': 'Recently Added',
'parameters': [
{'name': 'Included Libraries', 'type': 'str', 'value': 'newsletter_libraries', 'description': 'The list of libraries included in the newsletter.'},
]
}
]

548
jellypy/config.py Normal file
View File

@@ -0,0 +1,548 @@
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import object
from future.builtins import str
import os
import re
import shutil
import time
import threading
from configobj import ConfigObj, ParseError
import jellypy
if jellypy.PYTHON2:
import helpers
import logger
else:
from jellypy import helpers
from jellypy import logger
def bool_int(value):
"""
Casts a config value into a 0 or 1
"""
if isinstance(value, str):
if value.lower() in ('', '0', 'false', 'f', 'no', 'n', 'off'):
value = 0
return int(bool(value))
FILENAME = "config.ini"
_CONFIG_DEFINITIONS = {
'ALLOW_GUEST_ACCESS': (int, 'General', 0),
'DATE_FORMAT': (str, 'General', 'YYYY-MM-DD'),
'PMS_IDENTIFIER': (str, 'PMS', ''),
'PMS_IP': (str, 'PMS', '127.0.0.1'),
'PMS_IS_CLOUD': (int, 'PMS', 0),
'PMS_IS_REMOTE': (int, 'PMS', 0),
'PMS_LOGS_FOLDER': (str, 'PMS', ''),
'PMS_LOGS_LINE_CAP': (int, 'PMS', 1000),
'PMS_NAME': (str, 'PMS', ''),
'PMS_PORT': (int, 'PMS', 32400),
'PMS_TOKEN': (str, 'PMS', ''),
'PMS_SSL': (int, 'PMS', 0),
'PMS_URL': (str, 'PMS', ''),
'PMS_URL_OVERRIDE': (str, 'PMS', ''),
'PMS_URL_MANUAL': (int, 'PMS', 0),
'PMS_USE_BIF': (int, 'PMS', 0),
'PMS_UUID': (str, 'PMS', ''),
'PMS_TIMEOUT': (int, 'Advanced', 15),
'PMS_PLEXPASS': (int, 'PMS', 0),
'PMS_PLATFORM': (str, 'PMS', ''),
'PMS_VERSION': (str, 'PMS', ''),
'PMS_UPDATE_CHANNEL': (str, 'PMS', 'plex'),
'PMS_UPDATE_DISTRO': (str, 'PMS', ''),
'PMS_UPDATE_DISTRO_BUILD': (str, 'PMS', ''),
'PMS_UPDATE_CHECK_INTERVAL': (int, 'Advanced', 24),
'PMS_WEB_URL': (str, 'PMS', 'https://app.plex.tv/desktop'),
'TIME_FORMAT': (str, 'General', 'HH:mm'),
'ANON_REDIRECT': (str, 'General', 'https://www.nullrefer.com/?'),
'API_ENABLED': (int, 'General', 1),
'API_KEY': (str, 'General', ''),
'API_SQL': (int, 'General', 0),
'BUFFER_THRESHOLD': (int, 'Monitoring', 10),
'BUFFER_WAIT': (int, 'Monitoring', 900),
'BACKUP_DAYS': (int, 'General', 3),
'BACKUP_DIR': (str, 'General', ''),
'BACKUP_INTERVAL': (int, 'General', 6),
'CACHE_DIR': (str, 'General', ''),
'CACHE_IMAGES': (int, 'General', 1),
'CACHE_SIZEMB': (int, 'Advanced', 32),
'CHECK_GITHUB': (int, 'General', 1),
'CHECK_GITHUB_INTERVAL': (int, 'General', 360),
'CHECK_GITHUB_ON_STARTUP': (int, 'General', 1),
'CHECK_GITHUB_CACHE_SECONDS': (int, 'Advanced', 3600),
'CLEANUP_FILES': (int, 'General', 0),
'CLOUDINARY_CLOUD_NAME': (str, 'Cloudinary', ''),
'CLOUDINARY_API_KEY': (str, 'Cloudinary', ''),
'CLOUDINARY_API_SECRET': (str, 'Cloudinary', ''),
'CONFIG_VERSION': (int, 'Advanced', 0),
'DO_NOT_OVERRIDE_GIT_BRANCH': (int, 'General', 0),
'ENABLE_HTTPS': (int, 'General', 0),
'EXPORT_DIR': (str, 'General', ''),
'EXPORT_THREADS': (int, 'Advanced', 8),
'FIRST_RUN_COMPLETE': (int, 'General', 0),
'FREEZE_DB': (int, 'General', 0),
'GET_FILE_SIZES': (int, 'General', 0),
'GET_FILE_SIZES_HOLD': (dict, 'General', {'section_ids': [], 'rating_keys': []}),
'GIT_BRANCH': (str, 'General', 'master'),
'GIT_PATH': (str, 'General', ''),
'GIT_REMOTE': (str, 'General', 'origin'),
'GIT_TOKEN': (str, 'General', ''),
'GIT_USER': (str, 'General', 'Tautulli'),
'GIT_REPO': (str, 'General', 'Tautulli'),
'GROUP_HISTORY_TABLES': (int, 'General', 1),
'HISTORY_TABLE_ACTIVITY': (int, 'General', 1),
'HOME_SECTIONS': (list, 'General', ['current_activity', 'watch_stats', 'library_stats', 'recently_added']),
'HOME_LIBRARY_CARDS': (list, 'General', ['first_run']),
'HOME_STATS_CARDS': (list, 'General', ['top_movies', 'popular_movies', 'top_tv', 'popular_tv', 'top_music',
'popular_music', 'last_watched', 'top_users', 'top_platforms', 'most_concurrent']),
'HOME_REFRESH_INTERVAL': (int, 'General', 10),
'HTTPS_CREATE_CERT': (int, 'General', 1),
'HTTPS_CERT': (str, 'General', ''),
'HTTPS_CERT_CHAIN': (str, 'General', ''),
'HTTPS_KEY': (str, 'General', ''),
'HTTPS_DOMAIN': (str, 'General', 'localhost'),
'HTTPS_IP': (str, 'General', '127.0.0.1'),
'HTTP_BASIC_AUTH': (int, 'General', 0),
'HTTP_ENVIRONMENT': (str, 'General', 'production'),
'HTTP_HASH_PASSWORD': (int, 'General', 0),
'HTTP_HASHED_PASSWORD': (int, 'General', 0),
'HTTP_HOST': (str, 'General', '0.0.0.0'),
'HTTP_PASSWORD': (str, 'General', ''),
'HTTP_PORT': (int, 'General', 8181),
'HTTP_PROXY': (int, 'General', 0),
'HTTP_ROOT': (str, 'General', ''),
'HTTP_USERNAME': (str, 'General', ''),
'HTTP_PLEX_ADMIN': (int, 'General', 0),
'HTTP_BASE_URL': (str, 'General', ''),
'HTTP_RATE_LIMIT_ATTEMPTS': (int, 'General', 10),
'HTTP_RATE_LIMIT_ATTEMPTS_INTERVAL': (int, 'General', 300),
'HTTP_RATE_LIMIT_LOCKOUT_TIME': (int, 'General', 300),
'INTERFACE': (str, 'General', 'default'),
'IMGUR_CLIENT_ID': (str, 'Monitoring', ''),
'JOURNAL_MODE': (str, 'Advanced', 'WAL'),
'LAUNCH_BROWSER': (int, 'General', 1),
'LAUNCH_STARTUP': (int, 'General', 1),
'LOG_BLACKLIST': (int, 'General', 1),
'LOG_DIR': (str, 'General', ''),
'LOGGING_IGNORE_INTERVAL': (int, 'Monitoring', 120),
'METADATA_CACHE_SECONDS': (int, 'Advanced', 1800),
'MOVIE_WATCHED_PERCENT': (int, 'Monitoring', 85),
'MUSIC_WATCHED_PERCENT': (int, 'Monitoring', 85),
'MUSICBRAINZ_LOOKUP': (int, 'General', 0),
'MONITOR_PMS_UPDATES': (int, 'Monitoring', 0),
'MONITORING_INTERVAL': (int, 'Monitoring', 60),
'NEWSLETTER_AUTH': (int, 'Newsletter', 0),
'NEWSLETTER_PASSWORD': (str, 'Newsletter', ''),
'NEWSLETTER_CUSTOM_DIR': (str, 'Newsletter', ''),
'NEWSLETTER_INLINE_STYLES': (int, 'Newsletter', 1),
'NEWSLETTER_TEMPLATES': (str, 'Newsletter', 'newsletters'),
'NEWSLETTER_DIR': (str, 'Newsletter', ''),
'NEWSLETTER_SELF_HOSTED': (int, 'Newsletter', 0),
'NOTIFICATION_THREADS': (int, 'Advanced', 2),
'NOTIFY_CONSECUTIVE': (int, 'Monitoring', 1),
'NOTIFY_CONTINUED_SESSION_THRESHOLD': (int, 'Monitoring', 15),
'NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT': (int, 'Monitoring', 1),
'NOTIFY_GROUP_RECENTLY_ADDED_PARENT': (int, 'Monitoring', 1),
'NOTIFY_UPLOAD_POSTERS': (int, 'Monitoring', 0),
'NOTIFY_RECENTLY_ADDED_DELAY': (int, 'Monitoring', 300),
'NOTIFY_RECENTLY_ADDED_GRANDPARENT': (int, 'Monitoring', 0),
'NOTIFY_RECENTLY_ADDED_UPGRADE': (int, 'Monitoring', 0),
'NOTIFY_REMOTE_ACCESS_THRESHOLD': (int, 'Monitoring', 60),
'NOTIFY_CONCURRENT_BY_IP': (int, 'Monitoring', 0),
'NOTIFY_CONCURRENT_THRESHOLD': (int, 'Monitoring', 2),
'NOTIFY_NEW_DEVICE_INITIAL_ONLY': (int, 'Monitoring', 1),
'PLEXPY_AUTO_UPDATE': (int, 'General', 0),
'REFRESH_LIBRARIES_INTERVAL': (int, 'Monitoring', 12),
'REFRESH_LIBRARIES_ON_STARTUP': (int, 'Monitoring', 1),
'REFRESH_USERS_INTERVAL': (int, 'Monitoring', 12),
'REFRESH_USERS_ON_STARTUP': (int, 'Monitoring', 1),
'SESSION_DB_WRITE_ATTEMPTS': (int, 'Advanced', 5),
'SHOW_ADVANCED_SETTINGS': (int, 'General', 0),
'SYNCHRONOUS_MODE': (str, 'Advanced', 'NORMAL'),
'THEMOVIEDB_APIKEY': (str, 'General', 'e9a6655bae34bf694a0f3e33338dc28e'),
'THEMOVIEDB_LOOKUP': (int, 'General', 0),
'TVMAZE_LOOKUP': (int, 'General', 0),
'TV_WATCHED_PERCENT': (int, 'Monitoring', 85),
'UPDATE_DB_INTERVAL': (int, 'General', 24),
'UPDATE_SHOW_CHANGELOG': (int, 'General', 1),
'VERBOSE_LOGS': (int, 'Advanced', 1),
'VERIFY_SSL_CERT': (bool_int, 'Advanced', 1),
'WEBSOCKET_MONITOR_PING_PONG': (int, 'Advanced', 0),
'WEBSOCKET_CONNECTION_ATTEMPTS': (int, 'Advanced', 5),
'WEBSOCKET_CONNECTION_TIMEOUT': (int, 'Advanced', 5),
'WEEK_START_MONDAY': (int, 'General', 0),
'JWT_SECRET': (str, 'Advanced', ''),
'JWT_UPDATE_SECRET': (bool_int, 'Advanced', 0),
'SYSTEM_ANALYTICS': (int, 'Advanced', 1),
'SYS_TRAY_ICON': (int, 'General', 1),
}
_BLACKLIST_KEYS = ['_APITOKEN', '_TOKEN', '_KEY', '_SECRET', '_PASSWORD', '_APIKEY', '_ID', '_HOOK']
_WHITELIST_KEYS = ['HTTPS_KEY']
_DO_NOT_IMPORT_KEYS = [
'FIRST_RUN_COMPLETE', 'GET_FILE_SIZES_HOLD', 'GIT_PATH', 'PMS_LOGS_FOLDER',
'BACKUP_DIR', 'CACHE_DIR', 'EXPORT_DIR', 'LOG_DIR', 'NEWSLETTER_DIR', 'NEWSLETTER_CUSTOM_DIR',
'HTTP_HOST', 'HTTP_PORT', 'HTTP_ROOT',
'HTTP_USERNAME', 'HTTP_PASSWORD', 'HTTP_HASH_PASSWORD', 'HTTP_HASHED_PASSWORD',
'ENABLE_HTTPS', 'HTTPS_CREATE_CERT', 'HTTPS_CERT', 'HTTPS_CERT_CHAIN', 'HTTPS_KEY'
]
_DO_NOT_IMPORT_KEYS_DOCKER = [
'PLEXPY_AUTO_UPDATE', 'GIT_REMOTE', 'GIT_BRANCH'
]
IS_IMPORTING = False
IMPORT_THREAD = None
def set_is_importing(value):
global IS_IMPORTING
IS_IMPORTING = value
def set_import_thread(config=None, backup=False):
global IMPORT_THREAD
if config:
if IMPORT_THREAD:
return
IMPORT_THREAD = threading.Thread(target=import_tautulli_config,
kwargs={'config': config, 'backup': backup})
else:
IMPORT_THREAD = None
def import_tautulli_config(config=None, backup=False):
if IS_IMPORTING:
logger.warn("Tautulli Config :: Another Tautulli config is currently being imported. "
"Please wait until it is complete before importing another config.")
return False
if backup:
# Make a backup of the current config first
logger.info("Tautulli Config :: Creating a config backup before importing.")
if not make_backup():
logger.error("Tautulli Config :: Failed to import Tautulli config: failed to create config backup")
return False
# Create a new Config object with the imported config file
try:
imported_config = Config(config, is_import=True)
except:
logger.error("Tautulli Config :: Failed to import Tautulli config: error reading imported config file")
return False
logger.info("Tautulli Config :: Importing Tautulli config '%s'...", config)
set_is_importing(True)
# Remove keys that should not be imported
for key in _DO_NOT_IMPORT_KEYS:
delattr(imported_config, key)
if jellypy.DOCKER or jellypy.SNAP:
for key in _DO_NOT_IMPORT_KEYS_DOCKER:
delattr(imported_config, key)
# Merge the imported config file into the current config file
jellypy.CONFIG._config.merge(imported_config._config)
jellypy.CONFIG.write()
logger.info("Tautulli Config :: Tautulli config import complete.")
set_import_thread(None)
set_is_importing(False)
# Restart to apply changes
jellypy.SIGNAL = 'restart'
def make_backup(cleanup=False, scheduler=False):
""" Makes a backup of config file, removes all but the last 5 backups """
if scheduler:
backup_file = 'config.backup-{}.sched.ini'.format(helpers.now())
else:
backup_file = 'config.backup-{}.ini'.format(helpers.now())
backup_folder = jellypy.CONFIG.BACKUP_DIR
backup_file_fp = os.path.join(backup_folder, backup_file)
# In case the user has deleted it manually
if not os.path.exists(backup_folder):
os.makedirs(backup_folder)
jellypy.CONFIG.write()
shutil.copyfile(jellypy.CONFIG_FILE, backup_file_fp)
if cleanup:
now = time.time()
# Delete all scheduled backup older than BACKUP_DAYS.
for root, dirs, files in os.walk(backup_folder):
ini_files = [os.path.join(root, f) for f in files if f.endswith('.sched.ini')]
for file_ in ini_files:
if os.stat(file_).st_mtime < now - jellypy.CONFIG.BACKUP_DAYS * 86400:
try:
os.remove(file_)
except OSError as e:
logger.error("Tautulli Config :: Failed to delete %s from the backup folder: %s" % (file_, e))
if backup_file in os.listdir(backup_folder):
logger.debug("Tautulli Config :: Successfully backed up %s to %s" % (jellypy.CONFIG_FILE, backup_file))
return True
else:
logger.error("Tautulli Config :: Failed to backup %s to %s" % (jellypy.CONFIG_FILE, backup_file))
return False
# pylint:disable=R0902
# it might be nice to refactor for fewer instance variables
class Config(object):
""" Wraps access to particular values in a config file """
def __init__(self, config_file, is_import=False):
""" Initialize the config with values from a file """
self._config_file = config_file
try:
self._config = ConfigObj(self._config_file, encoding='utf-8')
except ParseError as e:
logger.error("Tautulli Config :: Error reading configuration file: %s", e)
raise
for key in _CONFIG_DEFINITIONS:
self.check_setting(key)
if not is_import:
self._upgrade()
self._blacklist()
def _blacklist(self):
""" Add tokens and passwords to blacklisted words in logger """
blacklist = set()
for key, subkeys in self._config.items():
for subkey, value in subkeys.items():
if isinstance(value, str) and len(value.strip()) > 5 and \
subkey.upper() not in _WHITELIST_KEYS and any(bk in subkey.upper() for bk in _BLACKLIST_KEYS):
blacklist.add(value.strip())
logger._BLACKLIST_WORDS.update(blacklist)
def _define(self, name):
key = name.upper()
ini_key = name.lower()
definition = _CONFIG_DEFINITIONS[key]
if len(definition) == 3:
definition_type, section, default = definition
else:
definition_type, section, _, default = definition
return key, definition_type, section, ini_key, default
def check_section(self, section):
""" Check if INI section exists, if not create it """
if section not in self._config:
self._config[section] = {}
return True
else:
return False
def check_setting(self, key):
""" Cast any value in the config to the right type or use the default """
key, definition_type, section, ini_key, default = self._define(key)
self.check_section(section)
try:
my_val = definition_type(self._config[section][ini_key])
except Exception:
my_val = definition_type(default)
self._config[section][ini_key] = my_val
return my_val
def write(self):
""" Make a copy of the stored config and write it to the configured file """
new_config = ConfigObj(encoding="UTF-8")
new_config.filename = self._config_file
# first copy over everything from the old config, even if it is not
# correctly defined to keep from losing data
for key, subkeys in self._config.items():
if key not in new_config:
new_config[key] = {}
for subkey, value in subkeys.items():
new_config[key][subkey] = value
# next make sure that everything we expect to have defined is so
for key in _CONFIG_DEFINITIONS:
key, definition_type, section, ini_key, default = self._define(key)
self.check_setting(key)
if section not in new_config:
new_config[section] = {}
new_config[section][ini_key] = self._config[section][ini_key]
# Write it to file
logger.info("Tautulli Config :: Writing configuration to file")
try:
new_config.write()
except IOError as e:
logger.error("Tautulli Config :: Error writing configuration file: %s", e)
self._blacklist()
def __getattr__(self, name):
"""
Returns something from the ini unless it is a real property
of the configuration object or is not all caps.
"""
if not re.match(r'[A-Z_]+$', name):
return super(Config, self).__getattr__(name)
else:
return self.check_setting(name)
def __setattr__(self, name, value):
"""
Maps all-caps properties to ini values unless they exist on the
configuration object.
"""
if not re.match(r'[A-Z_]+$', name):
super(Config, self).__setattr__(name, value)
return value
else:
key, definition_type, section, ini_key, default = self._define(name)
self._config[section][ini_key] = definition_type(value)
return self._config[section][ini_key]
def __delattr__(self, name):
"""
Deletes a key from the configuration object.
"""
if not re.match(r'[A-Z_]+$', name):
return super(Config, self).__delattr__(name)
else:
key, definition_type, section, ini_key, default = self._define(name)
del self._config[section][ini_key]
def process_kwargs(self, kwargs):
"""
Given a big bunch of key value pairs, apply them to the ini.
"""
for name, value in kwargs.items():
key, definition_type, section, ini_key, default = self._define(name)
self._config[section][ini_key] = definition_type(value)
def _upgrade(self):
"""
Upgrades config file from previous verisions and bumps up config version
"""
if self.CONFIG_VERSION == 0:
self.CONFIG_VERSION = 1
if self.CONFIG_VERSION == 1:
# Change home_stats_cards to list
if self.HOME_STATS_CARDS:
home_stats_cards = ''.join(self.HOME_STATS_CARDS).split(', ')
if 'watch_statistics' in home_stats_cards:
home_stats_cards.remove('watch_statistics')
self.HOME_STATS_CARDS = home_stats_cards
# Change home_library_cards to list
if self.HOME_LIBRARY_CARDS:
home_library_cards = ''.join(self.HOME_LIBRARY_CARDS).split(', ')
if 'library_statistics' in home_library_cards:
home_library_cards.remove('library_statistics')
self.HOME_LIBRARY_CARDS = home_library_cards
self.CONFIG_VERSION = 2
if self.CONFIG_VERSION == 2:
self.CONFIG_VERSION = 3
if self.CONFIG_VERSION == 3:
if self.HTTP_ROOT == '/':
self.HTTP_ROOT = ''
self.CONFIG_VERSION = 4
if self.CONFIG_VERSION == 4:
if not len(self.HOME_STATS_CARDS) and 'watch_stats' in self.HOME_SECTIONS:
home_sections = self.HOME_SECTIONS
home_sections.remove('watch_stats')
self.HOME_SECTIONS = home_sections
if not len(self.HOME_LIBRARY_CARDS) and 'library_stats' in self.HOME_SECTIONS:
home_sections = self.HOME_SECTIONS
home_sections.remove('library_stats')
self.HOME_SECTIONS = home_sections
self.CONFIG_VERSION = 5
if self.CONFIG_VERSION == 5:
self.MONITOR_PMS_UPDATES = 0
self.CONFIG_VERSION = 6
if self.CONFIG_VERSION == 6:
if self.GIT_USER.lower() == 'drzoidberg33':
self.GIT_USER = 'JonnyWong16'
self.CONFIG_VERSION = 7
if self.CONFIG_VERSION == 7:
self.CONFIG_VERSION = 8
if self.CONFIG_VERSION == 8:
self.CONFIG_VERSION = 9
if self.CONFIG_VERSION == 9:
if self.PMS_UPDATE_CHANNEL == 'plexpass':
self.PMS_UPDATE_CHANNEL = 'beta'
self.CONFIG_VERSION = 10
if self.CONFIG_VERSION == 10:
self.GIT_USER = 'Tautulli'
self.GIT_REPO = 'Tautulli'
self.CONFIG_VERSION = 11
if self.CONFIG_VERSION == 11:
self.ANON_REDIRECT = self.ANON_REDIRECT.replace('http://www.nullrefer.com/?',
'https://www.nullrefer.com/?')
self.CONFIG_VERSION = 12
if self.CONFIG_VERSION == 12:
self.BUFFER_THRESHOLD = max(self.BUFFER_THRESHOLD, 10)
self.CONFIG_VERSION = 13
if self.CONFIG_VERSION == 13:
self.CONFIG_VERSION = 14
if self.CONFIG_VERSION == 14:
if jellypy.DOCKER:
self.PLEXPY_AUTO_UPDATE = 0
self.CONFIG_VERSION = 15
if self.CONFIG_VERSION == 15:
if self.HTTP_ROOT and self.HTTP_ROOT != '/':
self.JWT_UPDATE_SECRET = True
self.CONFIG_VERSION = 16
if self.CONFIG_VERSION == 16:
if jellypy.SNAP:
self.PLEXPY_AUTO_UPDATE = 0
self.CONFIG_VERSION = 17

473
jellypy/database.py Normal file
View File

@@ -0,0 +1,473 @@
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import str
from future.builtins import object
import os
import sqlite3
import shutil
import threading
import time
import jellypy
if jellypy.PYTHON2:
import helpers
import logger
else:
from jellypy import helpers
from jellypy import logger
FILENAME = "tautulli.db"
db_lock = threading.Lock()
IS_IMPORTING = False
def set_is_importing(value):
global IS_IMPORTING
IS_IMPORTING = value
def validate_database(database=None):
try:
connection = sqlite3.connect(database, timeout=20)
except (sqlite3.OperationalError, sqlite3.DatabaseError, ValueError) as e:
logger.error("Tautulli Database :: Invalid database specified: %s", e)
return 'Invalid database specified'
except Exception as e:
logger.error("Tautulli Database :: Uncaught exception: %s", e)
return 'Uncaught exception'
try:
connection.execute('SELECT started from session_history')
connection.close()
except (sqlite3.OperationalError, sqlite3.DatabaseError, ValueError) as e:
logger.error("Tautulli Database :: Invalid database specified: %s", e)
return 'Invalid database specified'
except Exception as e:
logger.error("Tautulli Database :: Uncaught exception: %s", e)
return 'Uncaught exception'
return 'success'
def import_tautulli_db(database=None, method=None, backup=False):
if IS_IMPORTING:
logger.warn("Tautulli Database :: Another Tautulli database is currently being imported. "
"Please wait until it is complete before importing another database.")
return False
db_validate = validate_database(database=database)
if not db_validate == 'success':
logger.error("Tautulli Database :: Failed to import Tautulli database: %s", db_validate)
return False
if method not in ('merge', 'overwrite'):
logger.error("Tautulli Database :: Failed to import Tautulli database: invalid import method '%s'", method)
return False
if backup:
# Make a backup of the current database first
logger.info("Tautulli Database :: Creating a database backup before importing.")
if not make_backup():
logger.error("Tautulli Database :: Failed to import Tautulli database: failed to create database backup")
return False
logger.info("Tautulli Database :: Importing Tautulli database '%s' with import method '%s'...", database, method)
set_is_importing(True)
db = MonitorDatabase()
db.connection.execute('BEGIN IMMEDIATE')
db.connection.execute('ATTACH ? AS import_db', [database])
# Get the current number of used ids in the session_history table
session_history_seq = db.select_single('SELECT seq FROM sqlite_sequence WHERE name = "session_history"')
session_history_rows = session_history_seq.get('seq', 0)
session_history_tables = ('session_history', 'session_history_metadata', 'session_history_media_info')
if method == 'merge':
logger.info("Tautulli Database :: Creating temporary database tables to re-index grouped session history.")
for table_name in session_history_tables:
db.action('CREATE TABLE {table}_copy AS SELECT * FROM import_db.{table}'.format(table=table_name))
db.action('UPDATE {table}_copy SET id = id + ?'.format(table=table_name),
[session_history_rows])
if table_name == 'session_history':
db.action('UPDATE {table}_copy SET reference_id = reference_id + ?'.format(table=table_name),
[session_history_rows])
# Keep track of all table columns so that duplicates can be removed after importing
table_columns = {}
tables = db.select('SELECT name FROM import_db.sqlite_master '
'WHERE type = "table" AND name NOT LIKE "sqlite_%"'
'ORDER BY name')
for table in tables:
table_name = table['name']
if table_name == 'sessions':
# Skip temporary sessions table
continue
current_table = db.select('PRAGMA main.table_info({table})'.format(table=table_name))
if not current_table:
# Skip table does not exits
continue
logger.info("Tautulli Database :: Importing database table '%s'.", table_name)
if method == 'overwrite':
# Clear the table and reset the autoincrement ids
db.action('DELETE FROM {table}'.format(table=table_name))
db.action('DELETE FROM sqlite_sequence WHERE name = ?', [table_name])
if method == 'merge' and table_name in session_history_tables:
from_db_name = 'main'
from_table_name = table_name + '_copy'
else:
from_db_name = 'import_db'
from_table_name = table_name
# Get the list of columns to import
current_columns = [c['name'] for c in current_table]
import_table = db.select('PRAGMA {from_db}.table_info({from_table})'.format(from_db=from_db_name,
from_table=from_table_name))
if method == 'merge' and table_name not in session_history_tables:
import_columns = [c['name'] for c in import_table if c['name'] in current_columns and not c['pk']]
else:
import_columns = [c['name'] for c in import_table if c['name'] in current_columns]
table_columns[table_name] = import_columns
insert_columns = ', '.join(import_columns)
# Insert the data with ignore instead of replace to be safe
db.action('INSERT OR IGNORE INTO {table} ({columns}) '
'SELECT {columns} FROM {from_db}.{from_table}'.format(table=table_name,
columns=insert_columns,
from_db=from_db_name,
from_table=from_table_name))
db.connection.execute('DETACH import_db')
if method == 'merge':
for table_name, columns in sorted(table_columns.items()):
duplicate_columns = ', '.join([c for c in columns if c not in ('id', 'reference_id')])
logger.info("Tautulli Database :: Removing duplicate rows from database table '%s'.", table_name)
if table_name in session_history_tables[1:]:
db.action('DELETE FROM {table} WHERE id NOT IN '
'(SELECT id FROM session_history)'.format(table=table_name))
else:
db.action('DELETE FROM {table} WHERE id NOT IN '
'(SELECT MIN(id) FROM {table} GROUP BY {columns})'.format(table=table_name,
columns=duplicate_columns))
logger.info("Tautulli Database :: Deleting temporary database tables.")
for table_name in session_history_tables:
db.action('DROP TABLE {table}_copy'.format(table=table_name))
vacuum()
logger.info("Tautulli Database :: Tautulli database import complete.")
set_is_importing(False)
def integrity_check():
monitor_db = MonitorDatabase()
result = monitor_db.select_single('PRAGMA integrity_check')
return result
def clear_table(table=None):
if table:
monitor_db = MonitorDatabase()
logger.debug("Tautulli Database :: Clearing database table '%s'." % table)
try:
monitor_db.action('DELETE FROM %s' % table)
vacuum()
return True
except Exception as e:
logger.error("Tautulli Database :: Failed to clear database table '%s': %s." % (table, e))
return False
def delete_sessions():
logger.info("Tautulli Database :: Clearing temporary sessions from database.")
return clear_table('sessions')
def delete_recently_added():
logger.info("Tautulli Database :: Clearing recently added items from database.")
return clear_table('recently_added')
def delete_exports():
logger.info("Tautulli Database :: Clearing exported items from database.")
return clear_table('exports')
def delete_rows_from_table(table, row_ids):
if row_ids and isinstance(row_ids, str):
row_ids = list(map(helpers.cast_to_int, row_ids.split(',')))
if row_ids:
logger.info("Tautulli Database :: Deleting row ids %s from %s database table", row_ids, table)
# SQlite versions prior to 3.32.0 (2020-05-22) have maximum variable limit of 999
# https://sqlite.org/limits.html
sqlite_max_variable_number = 999
monitor_db = MonitorDatabase()
try:
for row_ids_group in helpers.chunk(row_ids, sqlite_max_variable_number):
query = "DELETE FROM " + table + " WHERE id IN (%s) " % ','.join(['?'] * len(row_ids_group))
monitor_db.action(query, row_ids_group)
vacuum()
except Exception as e:
logger.error("Tautulli Database :: Failed to delete rows from %s database table: %s" % (table, e))
return False
return True
def delete_session_history_rows(row_ids=None):
success = []
for table in ('session_history', 'session_history_media_info', 'session_history_metadata'):
success.append(delete_rows_from_table(table=table, row_ids=row_ids))
return all(success)
def delete_user_history(user_id=None):
if str(user_id).isdigit():
monitor_db = MonitorDatabase()
# Get all history associated with the user_id
result = monitor_db.select('SELECT id FROM session_history WHERE user_id = ?',
[user_id])
row_ids = [row['id'] for row in result]
logger.info("Tautulli Database :: Deleting all history for user_id %s from database." % user_id)
return delete_session_history_rows(row_ids=row_ids)
def delete_library_history(section_id=None):
if str(section_id).isdigit():
monitor_db = MonitorDatabase()
# Get all history associated with the section_id
result = monitor_db.select('SELECT session_history.id FROM session_history '
'JOIN session_history_metadata ON session_history.id = session_history_metadata.id '
'WHERE session_history_metadata.section_id = ?',
[section_id])
row_ids = [row['id'] for row in result]
logger.info("Tautulli Database :: Deleting all history for library section_id %s from database." % section_id)
return delete_session_history_rows(row_ids=row_ids)
def vacuum():
monitor_db = MonitorDatabase()
logger.info("Tautulli Database :: Vacuuming database.")
try:
monitor_db.action('VACUUM')
except Exception as e:
logger.error("Tautulli Database :: Failed to vacuum database: %s" % e)
def optimize():
monitor_db = MonitorDatabase()
logger.info("Tautulli Database :: Optimizing database.")
try:
monitor_db.action('PRAGMA optimize')
except Exception as e:
logger.error("Tautulli Database :: Failed to optimize database: %s" % e)
def optimize_db():
vacuum()
optimize()
def db_filename(filename=FILENAME):
""" Returns the filepath to the db """
return os.path.join(jellypy.DATA_DIR, filename)
def make_backup(cleanup=False, scheduler=False):
""" Makes a backup of db, removes all but the last 5 backups """
# Check the integrity of the database first
integrity = (integrity_check()['integrity_check'] == 'ok')
corrupt = ''
if not integrity:
corrupt = '.corrupt'
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpydbcorrupt'})
if scheduler:
backup_file = 'tautulli.backup-{}{}.sched.db'.format(helpers.now(), corrupt)
else:
backup_file = 'tautulli.backup-{}{}.db'.format(helpers.now(), corrupt)
backup_folder = jellypy.CONFIG.BACKUP_DIR
backup_file_fp = os.path.join(backup_folder, backup_file)
# In case the user has deleted it manually
if not os.path.exists(backup_folder):
os.makedirs(backup_folder)
db = MonitorDatabase()
db.connection.execute('BEGIN IMMEDIATE')
shutil.copyfile(db_filename(), backup_file_fp)
db.connection.rollback()
# Only cleanup if the database integrity is okay
if cleanup and integrity:
now = time.time()
# Delete all scheduled backup older than BACKUP_DAYS.
for root, dirs, files in os.walk(backup_folder):
db_files = [os.path.join(root, f) for f in files if f.endswith('.sched.db')]
for file_ in db_files:
if os.stat(file_).st_mtime < now - jellypy.CONFIG.BACKUP_DAYS * 86400:
try:
os.remove(file_)
except OSError as e:
logger.error("Tautulli Database :: Failed to delete %s from the backup folder: %s" % (file_, e))
if backup_file in os.listdir(backup_folder):
logger.debug("Tautulli Database :: Successfully backed up %s to %s" % (db_filename(), backup_file))
return True
else:
logger.error("Tautulli Database :: Failed to backup %s to %s" % (db_filename(), backup_file))
return False
def get_cache_size():
# This will protect against typecasting problems produced by empty string and None settings
if not jellypy.CONFIG.CACHE_SIZEMB:
# sqlite will work with this (very slowly)
return 0
return int(jellypy.CONFIG.CACHE_SIZEMB)
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
class MonitorDatabase(object):
def __init__(self, filename=FILENAME):
self.filename = filename
self.connection = sqlite3.connect(db_filename(filename), timeout=20)
# Set database synchronous mode (default NORMAL)
self.connection.execute("PRAGMA synchronous = %s" % jellypy.CONFIG.SYNCHRONOUS_MODE)
# Set database journal mode (default WAL)
self.connection.execute("PRAGMA journal_mode = %s" % jellypy.CONFIG.JOURNAL_MODE)
# Set database cache size (default 32MB)
self.connection.execute("PRAGMA cache_size = -%s" % (get_cache_size() * 1024))
self.connection.row_factory = dict_factory
def action(self, query, args=None, return_last_id=False):
if query is None:
return
with db_lock:
sql_result = None
attempts = 0
while attempts < 5:
try:
with self.connection as c:
if args is None:
sql_result = c.execute(query)
else:
sql_result = c.execute(query, args)
# Our transaction was successful, leave the loop
break
except sqlite3.OperationalError as e:
e = str(e)
if "unable to open database file" in e or "database is locked" in e:
logger.warn("Tautulli Database :: Database Error: %s", e)
attempts += 1
time.sleep(1)
else:
logger.error("Tautulli Database :: Database error: %s", e)
raise
except sqlite3.DatabaseError as e:
logger.error("Tautulli Database :: Fatal Error executing %s :: %s", query, e)
raise
return sql_result
def select(self, query, args=None):
sql_results = self.action(query, args).fetchall()
if sql_results is None or sql_results == [None]:
return []
return sql_results
def select_single(self, query, args=None):
sql_results = self.action(query, args).fetchone()
if sql_results is None or sql_results == "":
return {}
return sql_results
def upsert(self, table_name, value_dict, key_dict):
trans_type = 'update'
changes_before = self.connection.total_changes
gen_params = lambda my_dict: [x + " = ?" for x in my_dict]
update_query = "UPDATE " + table_name + " SET " + ", ".join(gen_params(value_dict)) + \
" WHERE " + " AND ".join(gen_params(key_dict))
self.action(update_query, list(value_dict.values()) + list(key_dict.values()))
if self.connection.total_changes == changes_before:
trans_type = 'insert'
insert_query = (
"INSERT INTO " + table_name + " (" + ", ".join(list(value_dict.keys()) + list(key_dict.keys())) + ")" +
" VALUES (" + ", ".join(["?"] * len(list(value_dict.keys()) + list(key_dict.keys()))) + ")"
)
try:
self.action(insert_query, list(value_dict.values()) + list(key_dict.values()))
except sqlite3.IntegrityError:
logger.info("Tautulli Database :: Queries failed: %s and %s", update_query, insert_query)
# We want to know if it was an update or insert
return trans_type
def last_insert_id(self):
# Get the last insert row id
result = self.select_single(query='SELECT last_insert_rowid() AS last_id')
if result:
return result.get('last_id', None)

1906
jellypy/datafactory.py Normal file

File diff suppressed because it is too large Load Diff

288
jellypy/datatables.py Normal file
View File

@@ -0,0 +1,288 @@
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import object
import re
import jellypy
if jellypy.PYTHON2:
import database
import helpers
import logger
else:
from jellypy import database
from jellypy import helpers
from jellypy import logger
class DataTables(object):
"""
Server side processing for Datatables
"""
def __init__(self):
self.ssp_db = database.MonitorDatabase()
def ssp_query(self,
table_name=None,
table_name_union=None,
columns=[],
columns_union=[],
custom_where=[],
custom_where_union=[],
group_by=[],
group_by_union=[],
join_types=[],
join_tables=[],
join_evals=[],
kwargs=None):
if not table_name:
logger.error('Tautulli DataTables :: No table name received.')
return None
# Fetch all our parameters
if kwargs.get('json_data'):
parameters = helpers.process_json_kwargs(json_kwargs=kwargs.get('json_data'))
else:
logger.error('Tautulli DataTables :: Parameters for Datatables must be sent as a serialised json object '
'named json_data.')
return None
extracted_columns = self.extract_columns(columns=columns)
join = self.build_join(join_types, join_tables, join_evals)
group = self.build_grouping(group_by)
c_where, cw_args = self.build_custom_where(custom_where)
order = self.build_order(parameters['order'],
extracted_columns['column_named'],
parameters['columns'])
where, w_args = self.build_where(parameters['search']['value'],
extracted_columns['column_named'],
parameters['columns'])
# Build union parameters
if table_name_union:
extracted_columns_union = self.extract_columns(columns=columns_union)
group_u = self.build_grouping(group_by_union)
c_where_u, cwu_args = self.build_custom_where(custom_where_union)
union = 'UNION SELECT %s FROM %s %s %s' % (extracted_columns_union['column_string'],
table_name_union,
c_where_u,
group_u)
else:
union = ''
cwu_args = []
args = cw_args + cwu_args + w_args
# Build the query
query = 'SELECT * FROM (SELECT %s FROM %s %s %s %s %s) %s %s' \
% (extracted_columns['column_string'], table_name, join, c_where, group, union, where, order)
# logger.debug("Query: %s" % query)
# Execute the query
filtered = self.ssp_db.select(query, args=args)
# Remove NULL rows
filtered = [row for row in filtered if not all(v is None for v in row.values())]
# Build grand totals
totalcount = self.ssp_db.select('SELECT COUNT(id) as total_count from %s' % table_name)[0]['total_count']
# Get draw counter
draw_counter = int(parameters['draw'])
# Paginate results
result = filtered[parameters['start']:(parameters['start'] + parameters['length'])]
output = {'result': result,
'draw': draw_counter,
'filteredCount': len(filtered),
'totalCount': totalcount}
return output
def build_grouping(self, group_by=[]):
# Build grouping
group = ''
for g in group_by:
group += g + ', '
if group:
group = 'GROUP BY ' + group.rstrip(', ')
return group
def build_join(self, join_types=[], join_tables=[], join_evals=[]):
# Build join parameters
join = ''
for i, join_type in enumerate(join_types):
if join_type.upper() == 'LEFT OUTER JOIN':
join += 'LEFT OUTER JOIN %s ON %s = %s ' % (join_tables[i], join_evals[i][0], join_evals[i][1])
elif join_type.upper() == 'JOIN' or join_type.upper() == 'INNER JOIN':
join += 'JOIN %s ON %s = %s ' % (join_tables[i], join_evals[i][0], join_evals[i][1])
return join
def build_custom_where(self, custom_where=[]):
# Build custom where parameters
c_where = ''
args = []
for w in custom_where:
if isinstance(w[1], (list, tuple)) and len(w[1]):
c_where += '('
for w_ in w[1]:
if w_ == None:
c_where += w[0] + ' IS NULL OR '
elif str(w_).startswith('LIKE '):
c_where += w[0] + ' LIKE ? OR '
args.append(w_[5:])
else:
c_where += w[0] + ' = ? OR '
args.append(w_)
c_where = c_where.rstrip(' OR ') + ') AND '
else:
if w[1] == None:
c_where += w[0] + ' IS NULL AND '
elif str(w[1]).startswith('LIKE '):
c_where += w[0] + ' LIKE ? AND '
args.append(w[1][5:])
else:
c_where += w[0] + ' = ? AND '
args.append(w[1])
if c_where:
c_where = 'WHERE ' + c_where.rstrip(' AND ')
return c_where, args
def build_order(self, order_param=[], columns=[], dt_columns=[]):
# Build ordering
order = ''
for o in order_param:
sort_order = ' COLLATE NOCASE'
if o['dir'] == 'desc':
sort_order += ' DESC'
# We first see if a name was sent though for the column sort.
if dt_columns[int(o['column'])]['data']:
# We have a name, now check if it's a valid column name for our query
# so we don't just inject a random value
if any(d.lower() == dt_columns[int(o['column'])]['data'].lower()
for d in columns):
order += dt_columns[int(o['column'])]['data'] + '%s, ' % sort_order
else:
# if we receive a bogus name, rather not sort at all.
pass
# If no name exists for the column, just use the column index to sort
else:
order += columns[int(o['column'])] + ', '
if order:
order = 'ORDER BY ' + order.rstrip(', ')
return order
def build_where(self, search_param='', columns=[], dt_columns=[]):
# Build where parameters
where = ''
args = []
if search_param:
for i, s in enumerate(dt_columns):
if s['searchable']:
# We first see if a name was sent though for the column search.
if s['data']:
# We have a name, now check if it's a valid column name for our query
# so we don't just inject a random value
if any(d.lower() == s['data'].lower() for d in columns):
where += s['data'] + ' LIKE ? OR '
args.append('%' + search_param + '%')
else:
# if we receive a bogus name, rather not search at all.
pass
# If no name exists for the column, just use the column index to search
else:
where += columns[i] + ' LIKE ? OR '
args.append('%' + search_param + '%')
if where:
where = 'WHERE ' + where.rstrip(' OR ')
return where, args
# This method extracts column data from our column list
# The first parameter is required, the match_columns parameter is optional and will cause the function to
# only return results if the value also exists in the match_columns 'data' field
@staticmethod
def extract_columns(columns=None, match_columns=None):
columns_string = ''
columns_literal = []
columns_named = []
columns_order = []
for column in columns:
# We allow using "as" in column names for more complex sql functions.
# This function breaks up the column to get all it's parts.
as_search = re.compile(' as ', re.IGNORECASE)
if re.search(as_search, column):
column_named = re.split(as_search, column)[1].rpartition('.')[-1]
column_literal = re.split(as_search, column)[0]
column_order = re.split(as_search, column)[1]
if match_columns:
if any(d['data'].lower() == column_named.lower() for d in match_columns):
columns_string += column + ', '
columns_literal.append(column_literal)
columns_named.append(column_named)
columns_order.append(column_order)
else:
columns_string += column + ', '
columns_literal.append(column_literal)
columns_named.append(column_named)
columns_order.append(column_order)
else:
column_named = column.rpartition('.')[-1]
if match_columns:
if any(d['data'].lower() == column_named.lower() for d in match_columns):
columns_string += column + ', '
columns_literal.append(column)
columns_named.append(column_named)
columns_order.append(column)
else:
columns_string += column + ', '
columns_literal.append(column)
columns_named.append(column_named)
columns_order.append(column)
columns_string = columns_string.rstrip(', ')
# We return a dict of the column params
# column_string is a comma seperated list of the exact column variables received.
# column_literal is the text before the "as" if we have an "as". Usually a function.
# column_named is the text after the "as", if we have an "as". Any table prefix is also stripped off.
# We use this to match with columns received from the Datatables request.
# column_order is the text after the "as", if we have an "as". Any table prefix is left intact.
column_data = {'column_string': columns_string,
'column_literal': columns_literal,
'column_named': columns_named,
'column_order': columns_order
}
return column_data

23
jellypy/exceptions.py Normal file
View File

@@ -0,0 +1,23 @@
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
class PlexPyException(Exception):
"""
Generic Tautulli Exception - should never be thrown, only subclassed
"""

2418
jellypy/exporter.py Normal file

File diff suppressed because it is too large Load Diff

1222
jellypy/graphs.py Normal file

File diff suppressed because it is too large Load Diff

1662
jellypy/helpers.py Normal file

File diff suppressed because it is too large Load Diff

217
jellypy/http_handler.py Normal file
View File

@@ -0,0 +1,217 @@
# -*- coding: utf-8 -*-
# This file is part of PlexPy.
#
# PlexPy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PlexPy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import object
from future.builtins import str
from multiprocessing.dummy import Pool as ThreadPool
from future.moves.urllib.parse import urljoin
import certifi
import requests
import urllib3
import jellypy
if jellypy.PYTHON2:
import helpers
import logger
else:
from jellypy import helpers
from jellypy import logger
class HTTPHandler(object):
"""
Retrieve data from Plex Server
"""
def __init__(self, urls, headers=None, token=None, timeout=10, ssl_verify=True, silent=False):
self._valid_request_types = {'GET', 'POST', 'PUT', 'DELETE'}
self._silent = silent
if isinstance(urls, str):
self.urls = urls.split() or urls.split(',')
else:
self.urls = urls
if headers:
self.headers = headers
else:
self.headers = {
'X-Plex-Product': jellypy.common.PRODUCT,
'X-Plex-Version': jellypy.common.RELEASE,
'X-Plex-Client-Identifier': jellypy.CONFIG.PMS_UUID,
'X-Plex-Platform': jellypy.common.PLATFORM,
'X-Plex-Platform-Version': jellypy.common.PLATFORM_RELEASE,
'X-Plex-Device': '{} {}'.format(jellypy.common.PLATFORM,
jellypy.common.PLATFORM_RELEASE),
'X-Plex-Device-Name': jellypy.common.PLATFORM_DEVICE_NAME
}
self.token = token
if self.token:
self.headers['X-Plex-Token'] = self.token
self._session = requests.Session()
self.timeout = timeout
self.ssl_verify = certifi.where() if ssl_verify else False
if not self.ssl_verify:
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
self.uri = None
self.data = None
self.request_type = 'GET'
self.output_format = 'raw'
self.return_type = False
self.callback = None
self.request_kwargs = {}
def make_request(self,
uri=None,
headers=None,
data=None,
request_type='GET',
output_format='raw',
return_type=False,
no_token=False,
timeout=None,
callback=None,
**request_kwargs):
"""
Handle the HTTP requests.
Output: list
"""
self.uri = str(uri)
self.data = data
self.request_type = request_type.upper()
self.output_format = output_format.lower()
self.return_type = return_type
self.callback = callback
self.timeout = timeout or self.timeout
self.request_kwargs = request_kwargs
if self.request_type not in self._valid_request_types:
logger.debug("HTTP request made but unsupported request type given.")
return None
if uri:
request_urls = [urljoin(str(url), self.uri) for url in self.urls]
if no_token:
self.headers.pop('X-Plex-Token', None)
if headers:
self.headers.update(headers)
responses = []
for r in self._http_requests_pool(request_urls):
responses.append(r)
return responses[0]
else:
logger.debug("HTTP request made but no uri endpoint provided.")
return None
def _http_requests_pool(self, urls, workers=10, chunk=None):
"""Generator function to request urls in chunks"""
# From cpython
if chunk is None:
chunk, extra = divmod(len(urls), workers * 4)
if extra:
chunk += 1
if len(urls) == 0:
chunk = 0
if len(urls) == 1:
yield self._http_requests_single(urls[0])
else:
pool = ThreadPool(workers)
try:
for work in pool.imap_unordered(self._http_requests_single, urls, chunk):
yield work
except Exception as e:
if not self._silent:
logger.error("Failed to yield request: %s" % e)
finally:
pool.close()
pool.join()
def _http_requests_single(self, url):
"""Request the data from the url"""
error_msg = "Failed to access uri endpoint %s. " % self.uri
try:
r = self._session.request(self.request_type, url, headers=self.headers, data=self.data,
timeout=self.timeout, verify=self.ssl_verify, **self.request_kwargs)
r.raise_for_status()
except requests.exceptions.Timeout as e:
if not self._silent:
logger.error(error_msg + "Request timed out: %s", e)
return None
except requests.exceptions.SSLError as e:
if not self._silent:
logger.error(error_msg + "Is your server maybe accepting SSL connections only? %s", e)
return None
except requests.exceptions.HTTPError as e:
if not self._silent:
logger.error(error_msg + "Status code %s", e)
return None
except requests.exceptions.ConnectionError as e:
if not self._silent:
logger.error(error_msg + "Connection error: %s", e)
return None
except requests.exceptions.RequestException as e:
if not self._silent:
logger.error(error_msg + "Uncaught exception: %s", e)
return None
response_status = r.status_code
response_content = r.content
response_headers = r.headers
if response_status in (200, 201):
return self._http_format_output(response_content, response_headers)
def _http_format_output(self, response_content, response_headers):
"""Formats the request response to the desired type"""
try:
if self.output_format == 'text':
output = response_content.decode('utf-8', 'ignore')
elif self.output_format == 'dict':
output = helpers.convert_xml_to_dict(response_content)
elif self.output_format == 'json':
output = helpers.convert_xml_to_json(response_content)
elif self.output_format == 'xml':
output = helpers.parse_xml(response_content)
else:
output = response_content
if self.callback:
return self.callback(output)
if self.return_type:
return output, response_headers['Content-Type']
return output
except Exception as e:
if not self._silent:
logger.warn("Failed format response from uri %s to %s error %s" % (self.uri, self.output_format, e))
return None

1178
jellypy/libraries.py Normal file

File diff suppressed because it is too large Load Diff

108
jellypy/lock.py Normal file
View File

@@ -0,0 +1,108 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import object
import future.moves.queue as queue
import time
import threading
import jellypy
if jellypy.PYTHON2:
import logger
else:
from jellypy import logger
class TimedLock(object):
"""
Enforce request rate limit if applicable. This uses the lock so there
is synchronized access to the API. When N threads enter this method, the
first will pass trough, since there there was no last request recorded.
The last request time will be set. Then, the second thread will unlock,
and see that the last request was X seconds ago. It will sleep
(request_limit - X) seconds, and then continue. Then the third one will
unblock, and so on. After all threads finish, the total time will at
least be (N * request_limit) seconds. If some request takes longer than
request_limit seconds, the next unblocked thread will wait less.
"""
def __init__(self, minimum_delta=0):
"""
Set up the lock
"""
self.lock = threading.Lock()
self.last_used = 0
self.minimum_delta = minimum_delta
self.queue = queue.Queue()
def __enter__(self):
"""
Called when with lock: is invoked
"""
self.lock.acquire()
delta = time.time() - self.last_used
sleep_amount = self.minimum_delta - delta
if sleep_amount >= 0:
# zero sleeps give the cpu a chance to task-switch
logger.debug('Sleeping %s (interval)', sleep_amount)
time.sleep(sleep_amount)
while not self.queue.empty():
try:
seconds = self.queue.get(False)
logger.debug('Sleeping %s (queued)', seconds)
time.sleep(seconds)
except queue.Empty:
continue
self.queue.task_done()
def __exit__(self, type, value, traceback):
"""
Called when exiting the with block.
"""
self.last_used = time.time()
self.lock.release()
def snooze(self, seconds):
"""
Asynchronously add time to the next request. Can be called outside
of the lock context, but it is possible for the next lock holder
to not check the queue until after something adds time to it.
"""
# We use a queue so that we don't have to synchronize
# across threads and with or without locks
logger.info('Adding %s to queue', seconds)
self.queue.put(seconds)
class FakeLock(object):
"""
If no locking or request throttling is needed, use this
"""
def __enter__(self):
"""
Do nothing on enter
"""
pass
def __exit__(self, type, value, traceback):
"""
Do nothing on exit
"""
pass

108
jellypy/log_reader.py Normal file
View File

@@ -0,0 +1,108 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from io import open
import os
import jellypy
if jellypy.PYTHON2:
import helpers
import logger
else:
from jellypy import helpers
from jellypy import logger
def get_log_tail(window=20, parsed=True, log_type="server"):
if jellypy.CONFIG.PMS_LOGS_FOLDER:
log_file = ""
if log_type == "server":
log_file = os.path.join(jellypy.CONFIG.PMS_LOGS_FOLDER, 'Plex Media Server.log')
elif log_type == "scanner":
log_file = os.path.join(jellypy.CONFIG.PMS_LOGS_FOLDER, 'Plex Media Scanner.log')
else:
return []
try:
logfile = open(log_file, "r", encoding="utf-8")
except IOError as e:
logger.error('Unable to open Plex Log file. %s' % e)
return []
log_lines = tail(logfile, window)
if parsed:
line_error = False
clean_lines = []
for i in log_lines:
try:
log_time = i.split(' [')[0]
log_level = i.split('] ', 1)[1].split(' - ', 1)[0]
log_msg = i.split('] ', 1)[1].split(' - ', 1)[1]
full_line = [log_time, log_level, log_msg]
clean_lines.append(full_line)
except:
line_error = True
full_line = ['', '', 'Unable to parse log line.']
clean_lines.append(full_line)
if line_error:
logger.error('Tautulli was unable to parse some lines of the Plex Media Server log.')
return clean_lines
else:
raw_lines = []
for i in log_lines:
raw_lines.append(helpers.latinToAscii(i))
return raw_lines
return log_lines
# http://stackoverflow.com/a/13790289/2405162
def tail(f, lines=1, _buffer=4098):
"""Tail a file and get X lines from the end"""
# place holder for the lines found
lines_found = []
# block counter will be multiplied by buffer
# to get the block size from the end
block_counter = -1
# loop until we find X lines
while len(lines_found) < lines:
try:
f.seek(block_counter * _buffer, os.SEEK_END)
except IOError: # either file is too small, or too many lines requested
f.seek(0)
lines_found = f.readlines()
break
lines_found = f.readlines()
# we found enough lines, get out
if len(lines_found) > lines:
break
# decrement the block counter to get the
# next X bytes
block_counter -= 1
return lines_found[-lines:]

443
jellypy/logger.py Normal file
View File

@@ -0,0 +1,443 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import str
from logutils.queue import QueueHandler, QueueListener
from logging import handlers
import cherrypy
import contextlib
import errno
import logging
import multiprocessing
import os
import re
import sys
import threading
import traceback
import jellypy
if jellypy.PYTHON2:
import helpers
from config import _BLACKLIST_KEYS, _WHITELIST_KEYS
else:
from jellypy import helpers
from jellypy.config import _BLACKLIST_KEYS, _WHITELIST_KEYS
# These settings are for file logging only
FILENAME = "tautulli.log"
FILENAME_API = "tautulli_api.log"
FILENAME_PLEX_WEBSOCKET = "plex_websocket.log"
MAX_SIZE = 5000000 # 5 MB
MAX_FILES = 5
_BLACKLIST_WORDS = set()
# Tautulli logger
logger = logging.getLogger("tautulli")
# Tautulli API logger
logger_api = logging.getLogger("tautulli_api")
# Tautulli websocket logger
logger_plex_websocket = logging.getLogger("plex_websocket")
# Global queue for multiprocessing logging
queue = None
def blacklist_config(config):
blacklist = set()
blacklist_keys = ['HOOK', 'APIKEY', 'KEY', 'PASSWORD', 'TOKEN']
for key, value in config.items():
if isinstance(value, str) and len(value.strip()) > 5 and \
key.upper() not in _WHITELIST_KEYS and (key.upper() in blacklist_keys or
any(bk in key.upper() for bk in _BLACKLIST_KEYS)):
blacklist.add(value.strip())
_BLACKLIST_WORDS.update(blacklist)
class NoThreadFilter(logging.Filter):
"""
Log filter for the current thread
"""
def __init__(self, threadName):
super(NoThreadFilter, self).__init__()
self.threadName = threadName
def filter(self, record):
return not record.threadName == self.threadName
# Taken from Hellowlol/HTPC-Manager
class BlacklistFilter(logging.Filter):
"""
Log filter for blacklisted tokens and passwords
"""
def __init__(self):
super(BlacklistFilter, self).__init__()
def filter(self, record):
if not jellypy.CONFIG.LOG_BLACKLIST:
return True
for item in _BLACKLIST_WORDS:
try:
if item in record.msg:
record.msg = record.msg.replace(item, 16 * '*')
args = []
for arg in record.args:
try:
arg_str = str(arg)
if item in arg_str:
arg_str = arg_str.replace(item, 16 * '*')
arg = arg_str
except:
pass
args.append(arg)
record.args = tuple(args)
except:
pass
return True
class RegexFilter(logging.Filter):
"""
Base class for regex log filter
"""
def __init__(self):
super(RegexFilter, self).__init__()
self.regex = re.compile(r'')
def filter(self, record):
if not jellypy.CONFIG.LOG_BLACKLIST:
return True
try:
matches = self.regex.findall(record.msg)
for match in matches:
record.msg = self.replace(record.msg, match)
args = []
for arg in record.args:
try:
arg_str = str(arg)
matches = self.regex.findall(arg_str)
if matches:
for match in matches:
arg_str = self.replace(arg_str, match)
arg = arg_str
except:
pass
args.append(arg)
record.args = tuple(args)
except:
pass
return True
def replace(self, text, match):
return text
class PublicIPFilter(RegexFilter):
"""
Log filter for public IP addresses
"""
def __init__(self):
super(PublicIPFilter, self).__init__()
# Currently only checking for ipv4 addresses
self.regex = re.compile(r'[0-9]+(?:[.-][0-9]+){3}(?!\d*-[a-z0-9]{6})')
def replace(self, text, ip):
if helpers.is_public_ip(ip.replace('-', '.')):
partition = '-' if '-' in ip else '.'
return text.replace(ip, partition.join(['***'] * 4))
return text
class EmailFilter(RegexFilter):
"""
Log filter for email addresses
"""
def __init__(self):
super(EmailFilter, self).__init__()
self.regex = re.compile(r'([a-z0-9!#$%&\'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&\'*+/=?^_`{|}~-]+)*@'
r'(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?)',
re.IGNORECASE)
def replace(self, text, email):
email_parts = email.partition('@')
return text.replace(email, 16 * '*' + email_parts[1] + 8 * '*')
class PlexTokenFilter(RegexFilter):
"""
Log filter for X-Plex-Token
"""
def __init__(self):
super(PlexTokenFilter, self).__init__()
self.regex = re.compile(r'X-Plex-Token(?:=|%3D)([a-zA-Z0-9]+)')
def replace(self, text, token):
return text.replace(token, 16 * '*')
@contextlib.contextmanager
def listener():
"""
Wrapper that create a QueueListener, starts it and automatically stops it.
To be used in a with statement in the main process, for multiprocessing.
"""
global queue
# Initialize queue if not already done
if queue is None:
try:
queue = multiprocessing.Queue()
except OSError as e:
queue = False
# Some machines don't have access to /dev/shm. See
# http://stackoverflow.com/questions/2009278 for more information.
if e.errno == errno.EACCES:
logger.warning('Multiprocess logging disabled, because '
'current user cannot map shared memory. You won\'t see any' \
'logging generated by the worker processed.')
# Multiprocess logging may be disabled.
if not queue:
yield
else:
queue_listener = QueueListener(queue, *logger.handlers)
try:
queue_listener.start()
yield
finally:
queue_listener.stop()
def initMultiprocessing():
"""
Remove all handlers and add QueueHandler on top. This should only be called
inside a multiprocessing worker process, since it changes the logger
completely.
"""
# Multiprocess logging may be disabled.
if not queue:
return
# Remove all handlers and add the Queue handler as the only one.
for handler in logger.handlers[:]:
logger.removeHandler(handler)
queue_handler = QueueHandler(queue)
queue_handler.setLevel(logging.DEBUG)
logger.addHandler(queue_handler)
# Change current thread name for log record
threading.current_thread().name = multiprocessing.current_process().name
def initLogger(console=False, log_dir=False, verbose=False):
"""
Setup logging for Tautulli. It uses the logger instance with the name
'tautulli'. Three log handlers are added:
* RotatingFileHandler: for the file tautulli.log
* LogListHandler: for Web UI
* StreamHandler: for console (if console)
Console logging is only enabled if console is set to True. This method can
be invoked multiple times, during different stages of Tautulli.
"""
# Close and remove old handlers. This is required to reinit the loggers
# at runtime
log_handlers = logger.handlers[:] + \
logger_api.handlers[:] + \
logger_plex_websocket.handlers[:] + \
cherrypy.log.error_log.handlers[:]
for handler in log_handlers:
# Just make sure it is cleaned up.
if isinstance(handler, handlers.RotatingFileHandler):
handler.close()
elif isinstance(handler, logging.StreamHandler):
handler.flush()
logger.removeHandler(handler)
logger_api.removeHandler(handler)
logger_plex_websocket.removeHandler(handler)
cherrypy.log.error_log.removeHandler(handler)
# Configure the logger to accept all messages
logger.propagate = False
logger.setLevel(logging.DEBUG if verbose else logging.INFO)
logger_api.propagate = False
logger_api.setLevel(logging.DEBUG if verbose else logging.INFO)
logger_plex_websocket.propagate = False
logger_plex_websocket.setLevel(logging.DEBUG if verbose else logging.INFO)
cherrypy.log.error_log.propagate = False
# Setup file logger
if log_dir:
file_formatter = logging.Formatter('%(asctime)s - %(levelname)-7s :: %(threadName)s : %(message)s', '%Y-%m-%d %H:%M:%S')
# Main Tautulli logger
filename = os.path.join(log_dir, FILENAME)
file_handler = handlers.RotatingFileHandler(filename, maxBytes=MAX_SIZE, backupCount=MAX_FILES, encoding='utf-8')
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(file_formatter)
logger.addHandler(file_handler)
cherrypy.log.error_log.addHandler(file_handler)
# Tautulli API logger
filename = os.path.join(log_dir, FILENAME_API)
file_handler = handlers.RotatingFileHandler(filename, maxBytes=MAX_SIZE, backupCount=MAX_FILES, encoding='utf-8')
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(file_formatter)
logger_api.addHandler(file_handler)
# Tautulli websocket logger
filename = os.path.join(log_dir, FILENAME_PLEX_WEBSOCKET)
file_handler = handlers.RotatingFileHandler(filename, maxBytes=MAX_SIZE, backupCount=MAX_FILES, encoding='utf-8')
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(file_formatter)
logger_plex_websocket.addHandler(file_handler)
# Setup console logger
if console:
console_formatter = logging.Formatter('%(asctime)s - %(levelname)s :: %(threadName)s : %(message)s', '%Y-%m-%d %H:%M:%S')
console_handler = logging.StreamHandler()
console_handler.setFormatter(console_formatter)
console_handler.setLevel(logging.DEBUG)
logger.addHandler(console_handler)
cherrypy.log.error_log.addHandler(console_handler)
# Add filters to log handlers
# Only add filters after the config file has been initialized
# Nothing prior to initialization should contain sensitive information
if not jellypy.DEV and jellypy.CONFIG:
log_handlers = logger.handlers + \
logger_api.handlers + \
logger_plex_websocket.handlers + \
cherrypy.log.error_log.handlers
for handler in log_handlers:
handler.addFilter(BlacklistFilter())
handler.addFilter(PublicIPFilter())
handler.addFilter(EmailFilter())
handler.addFilter(PlexTokenFilter())
# Install exception hooks
initHooks()
def initHooks(global_exceptions=True, thread_exceptions=True, pass_original=True):
"""
This method installs exception catching mechanisms. Any exception caught
will pass through the exception hook, and will be logged to the logger as
an error. Additionally, a traceback is provided.
This is very useful for crashing threads and any other bugs, that may not
be exposed when running as daemon.
The default exception hook is still considered, if pass_original is True.
"""
def excepthook(*exception_info):
# We should always catch this to prevent loops!
try:
message = "".join(traceback.format_exception(*exception_info))
logger.error("Uncaught exception: %s", message)
except:
pass
# Original excepthook
if pass_original:
sys.__excepthook__(*exception_info)
# Global exception hook
if global_exceptions:
sys.excepthook = excepthook
# Thread exception hook
if thread_exceptions:
old_init = threading.Thread.__init__
def new_init(self, *args, **kwargs):
old_init(self, *args, **kwargs)
old_run = self.run
def new_run(*args, **kwargs):
try:
old_run(*args, **kwargs)
except (KeyboardInterrupt, SystemExit):
raise
except:
excepthook(*sys.exc_info())
self.run = new_run
# Monkey patch the run() by monkey patching the __init__ method
threading.Thread.__init__ = new_init
def shutdown():
logging.shutdown()
# Expose logger methods
# Main Tautulli logger
info = logger.info
warn = logger.warning
error = logger.error
debug = logger.debug
warning = logger.warning
exception = logger.exception
# Tautulli API logger
api_info = logger_api.info
api_warn = logger_api.warning
api_error = logger_api.error
api_debug = logger_api.debug
api_warning = logger_api.warning
api_exception = logger_api.exception
# Tautulli websocket logger
websocket_info = logger_plex_websocket.info
websocket_warn = logger_plex_websocket.warning
websocket_error = logger_plex_websocket.error
websocket_debug = logger_plex_websocket.debug
websocket_warning = logger_plex_websocket.warning
websocket_exception = logger_plex_websocket.exception

204
jellypy/macos.py Normal file
View File

@@ -0,0 +1,204 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
import sys
import plistlib
try:
import AppKit
import Foundation
HAS_PYOBJC = True
except ImportError:
HAS_PYOBJC = False
if HAS_PYOBJC:
import rumps
import jellypy
if jellypy.PYTHON2:
import common
import logger
import versioncheck
else:
from jellypy import common
from jellypy import logger
from jellypy import versioncheck
class MacOSSystemTray(object):
def __init__(self):
self.image_dir = os.path.join(jellypy.PROG_DIR, 'data/interfaces/', jellypy.CONFIG.INTERFACE, 'images')
self.icon = os.path.join(self.image_dir, 'logo-flat-white.ico')
if jellypy.UPDATE_AVAILABLE:
self.update_title = 'Check for Updates - Update Available!'
else:
self.update_title = 'Check for Updates'
self.menu = [
rumps.MenuItem('Open Tautulli', callback=self.tray_open),
None,
rumps.MenuItem('Start Tautulli at Login', callback=self.tray_startup),
rumps.MenuItem('Open Browser when Tautulli Starts', callback=self.tray_browser),
None,
rumps.MenuItem(self.update_title, callback=self.tray_check_update),
rumps.MenuItem('Restart', callback=self.tray_restart),
rumps.MenuItem('Quit', callback=self.tray_quit)
]
if not jellypy.FROZEN:
self.menu.insert(6, rumps.MenuItem('Update', callback=self.tray_update))
self.menu[2].state = jellypy.CONFIG.LAUNCH_STARTUP
self.menu[3].state = jellypy.CONFIG.LAUNCH_BROWSER
self.tray_icon = rumps.App(common.PRODUCT, icon=self.icon, template=True,
menu=self.menu, quit_button=None)
def start(self):
logger.info("Launching MacOS menu bar icon.")
try:
self.tray_icon.run()
except Exception as e:
logger.error("Unable to launch menu bar icon: %s." % e)
def shutdown(self):
rumps.quit_application()
def update(self, **kwargs):
if 'icon' in kwargs:
self.tray_icon.icon = kwargs['icon']
def tray_open(self, tray_icon):
jellypy.launch_browser(jellypy.CONFIG.HTTP_HOST, jellypy.HTTP_PORT, jellypy.HTTP_ROOT)
def tray_startup(self, tray_icon):
jellypy.CONFIG.LAUNCH_STARTUP = not jellypy.CONFIG.LAUNCH_STARTUP
set_startup()
def tray_browser(self, tray_icon):
jellypy.CONFIG.LAUNCH_BROWSER = not jellypy.CONFIG.LAUNCH_BROWSER
set_startup()
def tray_check_update(self, tray_icon):
versioncheck.check_update()
def tray_update(self, tray_icon):
if jellypy.UPDATE_AVAILABLE:
jellypy.SIGNAL = 'update'
else:
self.update_title = 'Check for Updates - No Update Available'
self.menu[5].title = self.update_title
def tray_restart(self, tray_icon):
jellypy.SIGNAL = 'restart'
def tray_quit(self, tray_icon):
jellypy.SIGNAL = 'shutdown'
def change_tray_update_icon(self):
if jellypy.UPDATE_AVAILABLE:
self.update_title = 'Check for Updates - Update Available!'
else:
self.update_title = 'Check for Updates'
self.menu[5].title = self.update_title
def change_tray_icons(self):
self.tray_icon.menu['Start Tautulli at Login'].state = jellypy.CONFIG.LAUNCH_STARTUP
self.tray_icon.menu['Open Browser when Tautulli Starts'].state = jellypy.CONFIG.LAUNCH_BROWSER
def set_startup():
if jellypy.MAC_SYS_TRAY_ICON:
jellypy.MAC_SYS_TRAY_ICON.change_tray_icons()
if jellypy.INSTALL_TYPE == 'macos':
if jellypy.CONFIG.LAUNCH_STARTUP:
try:
subprocess.Popen(['osascript',
'-e', 'tell application "System Events"',
'-e', 'get the name of every login item',
'-e', 'if not exists login item "Tautulli" then '
'make login item at end with properties '
'{path:"/Applications/Tautulli.app", hidden:false}',
'-e', 'end tell'])
logger.info("Added Tautulli to MacOS login items.")
return True
except OSError as e:
logger.error("Failed to add Tautulli to MacOS login items: %s", e)
return False
else:
try:
subprocess.Popen(['osascript',
'-e', 'tell application "System Events"',
'-e', 'get the name of every login item',
'-e', 'if exists login item "Tautulli" then '
'delete login item "Tautulli"',
'-e', 'end tell'])
logger.info("Removed Tautulli from MacOS login items.")
return True
except OSError as e:
logger.error("Failed to remove Tautulli from MacOS login items: %s", e)
return False
else:
launch_agents = os.path.join(os.path.expanduser('~'), 'Library/LaunchAgents')
plist_file = 'com.Tautulli.Tautulli.plist'
plist_file_path = os.path.join(launch_agents, plist_file)
exe = sys.executable
run_args = [arg for arg in jellypy.ARGS if arg != '--nolaunch']
if jellypy.FROZEN:
args = [exe] + run_args
else:
args = [exe, jellypy.FULL_PATH] + run_args
plist_dict = {
'Label': common.PRODUCT,
'ProgramArguments': args,
'RunAtLoad': True
}
if jellypy.CONFIG.LAUNCH_STARTUP:
if not os.path.exists(launch_agents):
try:
os.makedirs(launch_agents)
except OSError:
return False
with open(plist_file_path, 'wb') as f:
try:
plistlib.dump(plist_dict, f)
except AttributeError:
plistlib.writePlist(plist_dict, f)
except OSError as e:
logger.error("Failed to create MacOS system startup plist file: %s", e)
return False
logger.info("Added Tautulli to MacOS system startup launch agents.")
return True
else:
try:
if os.path.isfile(plist_file_path):
os.remove(plist_file_path)
logger.info("Removed Tautulli from MacOS system startup launch agents.")
return True
except OSError as e:
logger.error("Failed to delete MacOS system startup plist file: %s", e)
return False

194
jellypy/mobile_app.py Normal file
View File

@@ -0,0 +1,194 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import str
import requests
import threading
import jellypy
if jellypy.PYTHON2:
import database
import helpers
import logger
else:
from jellypy import database
from jellypy import helpers
from jellypy import logger
TEMP_DEVICE_TOKEN = None
INVALIDATE_TIMER = None
_ONESIGNAL_APP_ID = '3b4b666a-d557-4b92-acdf-e2c8c4b95357'
def set_temp_device_token(token=None):
global TEMP_DEVICE_TOKEN
TEMP_DEVICE_TOKEN = token
if TEMP_DEVICE_TOKEN:
logger._BLACKLIST_WORDS.add(TEMP_DEVICE_TOKEN)
else:
logger._BLACKLIST_WORDS.discard(TEMP_DEVICE_TOKEN)
if TEMP_DEVICE_TOKEN is not None:
global INVALIDATE_TIMER
if INVALIDATE_TIMER:
INVALIDATE_TIMER.cancel()
invalidate_time = 5 * 60 # 5 minutes
INVALIDATE_TIMER = threading.Timer(invalidate_time, set_temp_device_token, args=[None])
INVALIDATE_TIMER.start()
def get_temp_device_token():
return TEMP_DEVICE_TOKEN
def get_mobile_devices(device_id=None, device_token=None):
where = where_id = where_token = ''
args = []
if device_id or device_token:
where = 'WHERE '
if device_id:
where_id += 'device_id = ?'
args.append(device_id)
if device_token:
where_token = 'device_token = ?'
args.append(device_token)
where += ' AND '.join([w for w in [where_id, where_token] if w])
db = database.MonitorDatabase()
result = db.select('SELECT * FROM mobile_devices %s' % where, args=args)
return result
def get_mobile_device_by_token(device_token=None):
if not device_token:
return None
return get_mobile_devices(device_token=device_token)
def add_mobile_device(device_id=None, device_name=None, device_token=None, friendly_name=None, onesignal_id=None):
db = database.MonitorDatabase()
keys = {'device_id': device_id}
values = {'device_name': device_name,
'device_token': device_token,
'onesignal_id': onesignal_id,
'official': validate_onesignal_id(onesignal_id=onesignal_id)}
if friendly_name:
values['friendly_name'] = friendly_name
try:
result = db.upsert(table_name='mobile_devices', key_dict=keys, value_dict=values)
blacklist_logger()
except Exception as e:
logger.warn("Tautulli MobileApp :: Failed to register mobile device in the database: %s." % e)
return
if result == 'insert':
logger.info("Tautulli MobileApp :: Registered mobile device '%s' in the database." % device_name)
else:
logger.debug("Tautulli MobileApp :: Re-registered mobile device '%s' in the database." % device_name)
return True
def get_mobile_device_config(mobile_device_id=None):
if str(mobile_device_id).isdigit():
mobile_device_id = int(mobile_device_id)
else:
logger.error("Tautulli MobileApp :: Unable to retrieve mobile device config: invalid mobile_device_id %s." % mobile_device_id)
return None
db = database.MonitorDatabase()
result = db.select_single('SELECT * FROM mobile_devices WHERE id = ?',
args=[mobile_device_id])
return result
def set_mobile_device_config(mobile_device_id=None, **kwargs):
if str(mobile_device_id).isdigit():
mobile_device_id = int(mobile_device_id)
else:
logger.error("Tautulli MobileApp :: Unable to set exisiting mobile device: invalid mobile_device_id %s." % mobile_device_id)
return False
keys = {'id': mobile_device_id}
values = {'friendly_name': kwargs.get('friendly_name', '')}
db = database.MonitorDatabase()
try:
db.upsert(table_name='mobile_devices', key_dict=keys, value_dict=values)
logger.info("Tautulli MobileApp :: Updated mobile device agent: mobile_device_id %s." % mobile_device_id)
blacklist_logger()
return True
except Exception as e:
logger.warn("Tautulli MobileApp :: Unable to update mobile device: %s." % e)
return False
def delete_mobile_device(mobile_device_id=None, device_id=None):
db = database.MonitorDatabase()
if mobile_device_id:
logger.debug("Tautulli MobileApp :: Deleting mobile_device_id %s from the database." % mobile_device_id)
result = db.action('DELETE FROM mobile_devices WHERE id = ?', args=[mobile_device_id])
return True
elif device_id:
logger.debug("Tautulli MobileApp :: Deleting device_id %s from the database." % device_id)
result = db.action('DELETE FROM mobile_devices WHERE device_id = ?', args=[device_id])
return True
else:
return False
def set_last_seen(device_token=None):
db = database.MonitorDatabase()
last_seen = helpers.timestamp()
try:
result = db.action('UPDATE mobile_devices SET last_seen = ? WHERE device_token = ?',
args=[last_seen, device_token])
except Exception as e:
logger.warn("Tautulli MobileApp :: Failed to set last_seen time for device: %s." % e)
return
def validate_onesignal_id(onesignal_id):
if onesignal_id is None:
return False
headers = {'Content-Type': 'application/json'}
payload = {'app_id': _ONESIGNAL_APP_ID}
r = requests.get('https://onesignal.com/api/v1/players/{}'.format(onesignal_id), headers=headers, json=payload)
return r.status_code == 200
def blacklist_logger():
devices = get_mobile_devices()
for d in devices:
logger.blacklist_config(d)

View File

@@ -0,0 +1,225 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from io import open
import os
from apscheduler.triggers.cron import CronTrigger
import email.utils
import jellypy
if jellypy.PYTHON2:
import database
import helpers
import logger
import newsletters
else:
from jellypy import database
from jellypy import helpers
from jellypy import logger
from jellypy import newsletters
NEWSLETTER_SCHED = None
def add_newsletter_each(newsletter_id=None, notify_action=None, **kwargs):
if not notify_action:
logger.debug("Tautulli NewsletterHandler :: Notify called but no action received.")
return
data = {'newsletter': True,
'newsletter_id': newsletter_id,
'notify_action': notify_action}
data.update(kwargs)
jellypy.NOTIFY_QUEUE.put(data)
def schedule_newsletters(newsletter_id=None):
newsletters_list = newsletters.get_newsletters(newsletter_id=newsletter_id)
for newsletter in newsletters_list:
newsletter_job_name = '{} (newsletter_id {})'.format(newsletter['agent_label'], newsletter['id'])
if newsletter['active']:
schedule_newsletter_job('newsletter-{}'.format(newsletter['id']), name=newsletter_job_name,
func=add_newsletter_each, args=[newsletter['id'], 'on_cron'], cron=newsletter['cron'])
else:
schedule_newsletter_job('newsletter-{}'.format(newsletter['id']), name=newsletter_job_name,
remove_job=True)
def schedule_newsletter_job(newsletter_job_id, name='', func=None, remove_job=False, args=None, cron=None):
# apscheduler day_of_week uses 0-6 = mon-sun
if cron:
cron = cron.split(' ')
cron[4] = str((int(cron[4]) - 1) % 7) if cron[4].isdigit() else cron[4]
cron = ' '.join(cron)
if NEWSLETTER_SCHED.get_job(newsletter_job_id):
if remove_job:
NEWSLETTER_SCHED.remove_job(newsletter_job_id)
logger.info("Tautulli NewsletterHandler :: Removed scheduled newsletter: %s" % name)
else:
NEWSLETTER_SCHED.reschedule_job(
newsletter_job_id, args=args, trigger=CronTrigger.from_crontab(cron))
logger.info("Tautulli NewsletterHandler :: Re-scheduled newsletter: %s" % name)
elif not remove_job:
NEWSLETTER_SCHED.add_job(
func, args=args, id=newsletter_job_id, trigger=CronTrigger.from_crontab(cron),
misfire_grace_time=None)
logger.info("Tautulli NewsletterHandler :: Scheduled newsletter: %s" % name)
def notify(newsletter_id=None, notify_action=None, **kwargs):
logger.info("Tautulli NewsletterHandler :: Preparing newsletter for newsletter_id %s." % newsletter_id)
newsletter_config = newsletters.get_newsletter_config(newsletter_id=newsletter_id)
if not newsletter_config:
return
if notify_action in ('test', 'api'):
subject = kwargs.pop('subject', None) or newsletter_config['subject']
body = kwargs.pop('body', None) or newsletter_config['body']
message = kwargs.pop('message', None) or newsletter_config['message']
else:
subject = newsletter_config['subject']
body = newsletter_config['body']
message = newsletter_config['message']
email_msg_id = email.utils.make_msgid()
email_reply_msg_id = get_last_newsletter_email_msg_id(newsletter_id=newsletter_id, notify_action=notify_action)
newsletter_agent = newsletters.get_agent_class(newsletter_id=newsletter_id,
newsletter_id_name=newsletter_config['id_name'],
agent_id=newsletter_config['agent_id'],
config=newsletter_config['config'],
email_config=newsletter_config['email_config'],
subject=subject,
body=body,
message=message,
email_msg_id=email_msg_id,
email_reply_msg_id=email_reply_msg_id
)
# Set the newsletter state in the db
newsletter_log_id = set_notify_state(newsletter=newsletter_config,
notify_action=notify_action,
subject=newsletter_agent.subject_formatted,
body=newsletter_agent.body_formatted,
message=newsletter_agent.message_formatted,
filename=newsletter_agent.filename_formatted,
start_date=newsletter_agent.start_date.format('YYYY-MM-DD'),
end_date=newsletter_agent.end_date.format('YYYY-MM-DD'),
start_time=newsletter_agent.start_time,
end_time=newsletter_agent.end_time,
newsletter_uuid=newsletter_agent.uuid,
email_msg_id=email_msg_id)
# Send the notification
success = newsletter_agent.send()
if success:
set_notify_success(newsletter_log_id)
return True
def set_notify_state(newsletter, notify_action, subject, body, message, filename,
start_date, end_date, start_time, end_time, newsletter_uuid, email_msg_id):
if newsletter and notify_action:
db = database.MonitorDatabase()
keys = {'timestamp': helpers.timestamp(),
'uuid': newsletter_uuid}
values = {'newsletter_id': newsletter['id'],
'agent_id': newsletter['agent_id'],
'agent_name': newsletter['agent_name'],
'notify_action': notify_action,
'subject_text': subject,
'body_text': body,
'message_text': message,
'start_date': start_date,
'end_date': end_date,
'start_time': start_time,
'end_time': end_time,
'email_msg_id': email_msg_id,
'filename': filename}
db.upsert(table_name='newsletter_log', key_dict=keys, value_dict=values)
return db.last_insert_id()
else:
logger.error("Tautulli NewsletterHandler :: Unable to set notify state.")
def set_notify_success(newsletter_log_id):
keys = {'id': newsletter_log_id}
values = {'success': 1}
db = database.MonitorDatabase()
db.upsert(table_name='newsletter_log', key_dict=keys, value_dict=values)
def get_last_newsletter_email_msg_id(newsletter_id, notify_action):
db = database.MonitorDatabase()
result = db.select_single('SELECT email_msg_id FROM newsletter_log '
'WHERE newsletter_id = ? AND notify_action = ? AND success = 1 '
'ORDER BY timestamp DESC LIMIT 1', [newsletter_id, notify_action])
if result:
return result['email_msg_id']
def get_newsletter(newsletter_uuid=None, newsletter_id_name=None):
db = database.MonitorDatabase()
if newsletter_uuid:
result = db.select_single('SELECT start_date, end_date, uuid, filename FROM newsletter_log '
'WHERE uuid = ?', [newsletter_uuid])
elif newsletter_id_name:
result = db.select_single('SELECT start_date, end_date, uuid, filename FROM newsletter_log '
'JOIN newsletters ON newsletters.id = newsletter_log.newsletter_id '
'WHERE id_name = ? AND notify_action != "test" '
'ORDER BY timestamp DESC LIMIT 1', [newsletter_id_name])
else:
result = None
if result:
newsletter_uuid = result['uuid']
start_date = result['start_date']
end_date = result['end_date']
newsletter_file = result['filename'] or 'newsletter_%s-%s_%s.html' % (start_date.replace('-', ''),
end_date.replace('-', ''),
newsletter_uuid)
newsletter_folder = jellypy.CONFIG.NEWSLETTER_DIR or os.path.join(jellypy.DATA_DIR, 'newsletters')
newsletter_file_fp = os.path.join(newsletter_folder, newsletter_file)
if newsletter_file in os.listdir(newsletter_folder):
try:
with open(newsletter_file_fp, 'r', encoding='utf-8') as n_file:
newsletter = n_file.read()
return newsletter
except OSError as e:
logger.error("Tautulli NewsletterHandler :: Failed to retrieve newsletter '%s': %s" % (newsletter_uuid, e))
else:
logger.warn("Tautulli NewsletterHandler :: Newsletter file '%s' is missing." % newsletter_file)

967
jellypy/newsletters.py Normal file
View File

@@ -0,0 +1,967 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import next
from future.builtins import str
from future.builtins import object
import arrow
from collections import OrderedDict
import json
from itertools import groupby
from mako.lookup import TemplateLookup
from mako import exceptions
import os
import re
import jellypy
if jellypy.PYTHON2:
import common
import database
import helpers
import libraries
import logger
import newsletter_handler
import pmsconnect
from notifiers import send_notification, EMAIL
else:
from jellypy import common
from jellypy import database
from jellypy import helpers
from jellypy import libraries
from jellypy import logger
from jellypy import newsletter_handler
from jellypy import pmsconnect
from jellypy.notifiers import send_notification, EMAIL
AGENT_IDS = {
'recently_added': 0
}
def available_newsletter_agents():
agents = [
{
'label': 'Recently Added',
'name': 'recently_added',
'id': AGENT_IDS['recently_added']
}
]
return agents
def available_notification_actions():
actions = [{'label': 'Schedule',
'name': 'on_cron',
'description': 'Trigger a notification on a certain schedule.',
'subject': 'Tautulli Newsletter',
'body': 'Tautulli Newsletter',
'message': '',
'icon': 'fa-calendar',
'media_types': ('newsletter',)
}
]
return actions
def get_agent_class(newsletter_id=None, newsletter_id_name=None, agent_id=None, config=None, email_config=None,
start_date=None, end_date=None, subject=None, body=None, message=None,
email_msg_id=None, email_reply_msg_id=None):
if str(agent_id).isdigit():
agent_id = int(agent_id)
kwargs = {'newsletter_id': newsletter_id,
'newsletter_id_name': newsletter_id_name,
'config': config,
'email_config': email_config,
'start_date': start_date,
'end_date': end_date,
'subject': subject,
'body': body,
'message': message,
'email_msg_id': email_msg_id,
'email_reply_msg_id': email_reply_msg_id}
if agent_id == 0:
return RecentlyAdded(**kwargs)
else:
return Newsletter(**kwargs)
else:
return None
def get_newsletter_agents():
return tuple(a['name'] for a in sorted(available_newsletter_agents(), key=lambda k: k['label']))
def get_newsletters(newsletter_id=None):
where = where_id = ''
args = []
if newsletter_id:
where = 'WHERE '
if newsletter_id:
where_id += 'id = ?'
args.append(newsletter_id)
where += ' AND '.join([w for w in [where_id] if w])
db = database.MonitorDatabase()
result = db.select('SELECT id, agent_id, agent_name, agent_label, '
'friendly_name, cron, active FROM newsletters %s' % where, args=args)
return result
def delete_newsletter(newsletter_id=None):
db = database.MonitorDatabase()
if str(newsletter_id).isdigit():
logger.debug("Tautulli Newsletters :: Deleting newsletter_id %s from the database."
% newsletter_id)
result = db.action('DELETE FROM newsletters WHERE id = ?', args=[newsletter_id])
return True
else:
return False
def get_newsletter_config(newsletter_id=None, mask_passwords=False):
if str(newsletter_id).isdigit():
newsletter_id = int(newsletter_id)
else:
logger.error("Tautulli Newsletters :: Unable to retrieve newsletter config: invalid newsletter_id %s."
% newsletter_id)
return None
db = database.MonitorDatabase()
result = db.select_single('SELECT * FROM newsletters WHERE id = ?', args=[newsletter_id])
if not result:
return None
try:
config = json.loads(result.pop('newsletter_config', '{}'))
email_config = json.loads(result.pop('email_config', '{}'))
subject = result.pop('subject')
body = result.pop('body')
message = result.pop('message')
newsletter_agent = get_agent_class(newsletter_id=newsletter_id, newsletter_id_name=result['id_name'],
agent_id=result['agent_id'],
config=config, email_config=email_config,
subject=subject, body=body, message=message)
except Exception as e:
logger.error("Tautulli Newsletters :: Failed to get newsletter config options: %s." % e)
return
if mask_passwords:
newsletter_agent.email_config = helpers.mask_config_passwords(newsletter_agent.email_config)
result['subject'] = newsletter_agent.subject
result['body'] = newsletter_agent.body
result['message'] = newsletter_agent.message
result['config'] = newsletter_agent.config
result['email_config'] = newsletter_agent.email_config
result['config_options'] = newsletter_agent.return_config_options(mask_passwords=mask_passwords)
result['email_config_options'] = newsletter_agent.return_email_config_options(mask_passwords=mask_passwords)
return result
def add_newsletter_config(agent_id=None, **kwargs):
if str(agent_id).isdigit():
agent_id = int(agent_id)
else:
logger.error("Tautulli Newsletters :: Unable to add new newsletter: invalid agent_id %s."
% agent_id)
return False
agent = next((a for a in available_newsletter_agents() if a['id'] == agent_id), None)
if not agent:
logger.error("Tautulli Newsletters :: Unable to retrieve new newsletter agent: invalid agent_id %s."
% agent_id)
return False
agent_class = get_agent_class(agent_id=agent['id'])
keys = {'id': None}
values = {'agent_id': agent['id'],
'agent_name': agent['name'],
'agent_label': agent['label'],
'id_name': '',
'friendly_name': '',
'newsletter_config': json.dumps(agent_class.config),
'email_config': json.dumps(agent_class.email_config),
'subject': agent_class.subject,
'body': agent_class.body,
'message': agent_class.message
}
db = database.MonitorDatabase()
try:
db.upsert(table_name='newsletters', key_dict=keys, value_dict=values)
newsletter_id = db.last_insert_id()
logger.info("Tautulli Newsletters :: Added new newsletter agent: %s (newsletter_id %s)."
% (agent['label'], newsletter_id))
blacklist_logger()
return newsletter_id
except Exception as e:
logger.warn("Tautulli Newsletters :: Unable to add newsletter agent: %s." % e)
return False
def set_newsletter_config(newsletter_id=None, agent_id=None, **kwargs):
if str(agent_id).isdigit():
agent_id = int(agent_id)
else:
logger.error("Tautulli Newsletters :: Unable to set existing newsletter: invalid agent_id %s."
% agent_id)
return False
agent = next((a for a in available_newsletter_agents() if a['id'] == agent_id), None)
if not agent:
logger.error("Tautulli Newsletters :: Unable to retrieve existing newsletter agent: invalid agent_id %s."
% agent_id)
return False
config_prefix = 'newsletter_config_'
email_config_prefix = 'newsletter_email_'
newsletter_config = {k[len(config_prefix):]: kwargs.pop(k)
for k in list(kwargs.keys()) if k.startswith(config_prefix)}
email_config = {k[len(email_config_prefix):]: kwargs.pop(k)
for k in list(kwargs.keys()) if k.startswith(email_config_prefix)}
for cfg, val in email_config.items():
# Check for a password config keys and a blank password from the HTML form
if 'password' in cfg and val == ' ':
# Get the previous password so we don't overwrite it with a blank value
old_newsletter_config = get_newsletter_config(newsletter_id=newsletter_id)
email_config[cfg] = old_newsletter_config['email_config'][cfg]
subject = kwargs.pop('subject')
body = kwargs.pop('body')
message = kwargs.pop('message')
agent_class = get_agent_class(agent_id=agent['id'],
config=newsletter_config, email_config=email_config,
subject=subject, body=body, message=message)
keys = {'id': newsletter_id}
values = {'agent_id': agent['id'],
'agent_name': agent['name'],
'agent_label': agent['label'],
'id_name': kwargs.get('id_name', ''),
'friendly_name': kwargs.get('friendly_name', ''),
'newsletter_config': json.dumps(agent_class.config),
'email_config': json.dumps(agent_class.email_config),
'subject': agent_class.subject,
'body': agent_class.body,
'message': agent_class.message,
'cron': kwargs.get('cron'),
'active': kwargs.get('active')
}
db = database.MonitorDatabase()
try:
db.upsert(table_name='newsletters', key_dict=keys, value_dict=values)
logger.info("Tautulli Newsletters :: Updated newsletter agent: %s (newsletter_id %s)."
% (agent['label'], newsletter_id))
newsletter_handler.schedule_newsletters(newsletter_id=newsletter_id)
blacklist_logger()
return True
except Exception as e:
logger.warn("Tautulli Newsletters :: Unable to update newsletter agent: %s." % e)
return False
def send_newsletter(newsletter_id=None, subject=None, body=None, message=None, newsletter_log_id=None, **kwargs):
newsletter_config = get_newsletter_config(newsletter_id=newsletter_id)
if newsletter_config:
agent = get_agent_class(agent_id=newsletter_config['agent_id'],
config=newsletter_config['config'],
email_config=newsletter_config['email_config'],
subject=subject,
body=body,
message=message)
return agent.send()
else:
logger.debug("Tautulli Newsletters :: Notification requested but no newsletter_id received.")
def blacklist_logger():
db = database.MonitorDatabase()
notifiers = db.select('SELECT newsletter_config, email_config FROM newsletters')
for n in notifiers:
config = json.loads(n['newsletter_config'] or '{}')
logger.blacklist_config(config)
email_config = json.loads(n['email_config'] or '{}')
logger.blacklist_config(email_config)
def serve_template(templatename, **kwargs):
if jellypy.CONFIG.NEWSLETTER_CUSTOM_DIR:
logger.info("Tautulli Newsletters :: Using custom newsletter template directory.")
template_dir = jellypy.CONFIG.NEWSLETTER_CUSTOM_DIR
else:
interface_dir = os.path.join(str(jellypy.PROG_DIR), 'data/interfaces/')
template_dir = os.path.join(str(interface_dir), jellypy.CONFIG.NEWSLETTER_TEMPLATES)
if not jellypy.CONFIG.NEWSLETTER_INLINE_STYLES:
templatename = templatename.replace('.html', '.internal.html')
_hplookup = TemplateLookup(directories=[template_dir], default_filters=['unicode', 'h'])
try:
template = _hplookup.get_template(templatename)
return template.render(**kwargs), False
except:
return exceptions.html_error_template().render(), True
def generate_newsletter_uuid():
uuid = ''
uuid_exists = 0
db = database.MonitorDatabase()
while not uuid or uuid_exists:
uuid = jellypy.generate_uuid()[:8]
result = db.select_single(
'SELECT EXISTS(SELECT uuid FROM newsletter_log WHERE uuid = ?) as uuid_exists', [uuid])
uuid_exists = result['uuid_exists']
return uuid
class Newsletter(object):
NAME = ''
_DEFAULT_CONFIG = {'custom_cron': 0,
'time_frame': 7,
'time_frame_units': 'days',
'formatted': 1,
'threaded': 0,
'notifier_id': 0,
'filename': '',
'save_only': 0}
_DEFAULT_EMAIL_CONFIG = EMAIL().return_default_config()
_DEFAULT_EMAIL_CONFIG['from_name'] = 'Tautulli Newsletter'
_DEFAULT_EMAIL_CONFIG['notifier_id'] = 0
_DEFAULT_SUBJECT = 'Tautulli Newsletter'
_DEFAULT_BODY = 'View the newsletter here: {newsletter_url}'
_DEFAULT_MESSAGE = ''
_DEFAULT_FILENAME = 'newsletter_{newsletter_uuid}.html'
_TEMPLATE = ''
def __init__(self, newsletter_id=None, newsletter_id_name=None, config=None, email_config=None,
start_date=None, end_date=None, subject=None, body=None, message=None,
email_msg_id=None, email_reply_msg_id=None):
self.config = self.set_config(config=config, default=self._DEFAULT_CONFIG)
self.email_config = self.set_config(config=email_config, default=self._DEFAULT_EMAIL_CONFIG)
self.uuid = generate_newsletter_uuid()
self.email_msg_id = email_msg_id
self.email_reply_msg_id = email_reply_msg_id
self.newsletter_id = newsletter_id
self.newsletter_id_name = newsletter_id_name or ''
self.start_date = None
self.end_date = None
if end_date:
try:
self.end_date = arrow.get(end_date, 'YYYY-MM-DD', tzinfo='local').ceil('day')
except ValueError:
pass
if self.end_date is None:
self.end_date = arrow.now()
if start_date:
try:
self.start_date = arrow.get(start_date, 'YYYY-MM-DD', tzinfo='local').floor('day')
except ValueError:
pass
if self.start_date is None:
if self.config['time_frame_units'] == 'days':
self.start_date = self.end_date.shift(days=-self.config['time_frame'])
else:
self.start_date = self.end_date.shift(hours=-self.config['time_frame'])
self.end_time = self.end_date.timestamp
self.start_time = self.start_date.timestamp
self.parameters = self.build_params()
self.subject = subject or self._DEFAULT_SUBJECT
self.body = body or self._DEFAULT_BODY
self.message = message or self._DEFAULT_MESSAGE
self.filename = self.config['filename'] or self._DEFAULT_FILENAME
if not self.filename.endswith('.html'):
self.filename += '.html'
self.subject_formatted, self.body_formatted, self.message_formatted = self.build_text()
self.filename_formatted = self.build_filename()
self.data = {}
self.newsletter = None
self.is_preview = False
self.template_error = None
def set_config(self, config=None, default=None):
return self._validate_config(config=config, default=default)
def _validate_config(self, config=None, default=None):
if config is None:
return default
new_config = {}
for k, v in default.items():
if isinstance(v, int):
new_config[k] = helpers.cast_to_int(config.get(k, v))
elif isinstance(v, list):
c = config.get(k, v)
if not isinstance(c, list):
new_config[k] = [c]
else:
new_config[k] = c
else:
new_config[k] = config.get(k, v)
return new_config
def retrieve_data(self):
pass
def _has_data(self):
return False
def raw_data(self, preview=False):
if preview:
self.is_preview = True
self.retrieve_data()
return {'title': self.NAME,
'parameters': self.parameters,
'data': self.data}
def generate_newsletter(self, preview=False):
if preview:
self.is_preview = True
self.retrieve_data()
logger.info("Tautulli Newsletters :: Generating newsletter%s." % (' preview' if self.is_preview else ''))
newsletter_rendered, self.template_error = serve_template(
templatename=self._TEMPLATE,
uuid=self.uuid,
subject=self.subject_formatted,
body=self.body_formatted,
message=self.message_formatted,
parameters=self.parameters,
data=self.data,
preview=self.is_preview
)
if self.template_error:
return newsletter_rendered
# Force Tautulli footer
if '<!-- FOOTER MESSAGE - DO NOT REMOVE -->' in newsletter_rendered:
newsletter_rendered = newsletter_rendered.replace(
'<!-- FOOTER MESSAGE - DO NOT REMOVE -->',
'Newsletter generated by <a href="https://tautulli.com" target="_blank" '
'style="text-decoration: underline;color: inherit;font-size: inherit;">Tautulli</a>.'
)
else:
msg = ('<div style="text-align: center;padding-top: 100px;padding-bottom: 100px;">'
'<p style="font-family: \'Open Sans\', Helvetica, Arial, sans-serif;color: #282A2D;'
'font-size: 18px;line-height: 30px;">'
'The Tautulli newsletter footer was removed from the newsletter template.<br>'
'Please leave the footer in place as it is unobtrusive and supports '
'<a href="https://tautulli.com" target="_blank">Tautulli</a>.<br>Thank you.'
'</p></div>')
newsletter_rendered = re.sub(r'(<body.*?>)', r'\1' + msg, newsletter_rendered)
return newsletter_rendered
def send(self):
self.newsletter = self.generate_newsletter()
if self.template_error:
logger.error("Tautulli Newsletters :: %s newsletter failed to render template. Newsletter not sent." % self.NAME)
return False
if not self._has_data():
logger.warn("Tautulli Newsletters :: %s newsletter has no data. Newsletter not sent." % self.NAME)
return False
self._save()
if self.config['save_only']:
return True
return self._send()
def _save(self):
newsletter_file = self.filename_formatted
newsletter_folder = jellypy.CONFIG.NEWSLETTER_DIR or os.path.join(jellypy.DATA_DIR, 'newsletters')
newsletter_file_fp = os.path.join(newsletter_folder, newsletter_file)
# In case the user has deleted it manually
if not os.path.exists(newsletter_folder):
os.makedirs(newsletter_folder)
try:
with open(newsletter_file_fp, 'wb') as n_file:
for line in self.newsletter.splitlines():
if '<!-- IGNORE SAVE -->' not in line:
n_file.write((line + '\r\n').encode('utf-8'))
#n_file.write(line.strip())
logger.info("Tautulli Newsletters :: %s newsletter saved to '%s'" % (self.NAME, newsletter_file))
except OSError as e:
logger.error("Tautulli Newsletters :: Failed to save %s newsletter to '%s': %s"
% (self.NAME, newsletter_file, e))
def _send(self):
if self.config['formatted']:
newsletter_stripped = ''.join(l.strip() for l in self.newsletter.splitlines())
plaintext = 'HTML email support is required to view the newsletter.\n'
if jellypy.CONFIG.NEWSLETTER_SELF_HOSTED and jellypy.CONFIG.HTTP_BASE_URL:
plaintext += self._DEFAULT_BODY.format(**self.parameters)
email_reply_msg_id = self.email_reply_msg_id if self.config['threaded'] else None
if self.email_config['notifier_id']:
return send_notification(
notifier_id=self.email_config['notifier_id'],
subject=self.subject_formatted,
body=newsletter_stripped,
plaintext=plaintext,
msg_id=self.email_msg_id,
reply_msg_id=email_reply_msg_id
)
else:
email = EMAIL(config=self.email_config)
return email.notify(
subject=self.subject_formatted,
body=newsletter_stripped,
plaintext=plaintext,
msg_id=self.email_msg_id,
reply_msg_id=email_reply_msg_id
)
elif self.config['notifier_id']:
return send_notification(
notifier_id=self.config['notifier_id'],
subject=self.subject_formatted,
body=self.body_formatted
)
def build_params(self):
parameters = self._build_params()
return parameters
def _build_params(self):
date_format = helpers.momentjs_to_arrow(jellypy.CONFIG.DATE_FORMAT)
if jellypy.CONFIG.NEWSLETTER_SELF_HOSTED and jellypy.CONFIG.HTTP_BASE_URL:
base_url = jellypy.CONFIG.HTTP_BASE_URL + jellypy.HTTP_ROOT + 'newsletter/'
else:
base_url = helpers.get_plexpy_url() + '/newsletter/'
parameters = {
'server_name': jellypy.CONFIG.PMS_NAME,
'start_date': self.start_date.format(date_format),
'end_date': self.end_date.format(date_format),
'current_year': self.start_date.year,
'current_month': self.start_date.month,
'current_day': self.start_date.day,
'current_hour': self.start_date.hour,
'current_minute': self.start_date.minute,
'current_second': self.start_date.second,
'current_weekday': self.start_date.isocalendar()[2],
'current_week': self.start_date.isocalendar()[1],
'week_number': self.start_date.isocalendar()[1],
'newsletter_time_frame': self.config['time_frame'],
'newsletter_time_frame_units': self.config['time_frame_units'],
'newsletter_url': base_url + self.uuid,
'newsletter_static_url': base_url + 'id/' + self.newsletter_id_name,
'newsletter_uuid': self.uuid,
'newsletter_id': self.newsletter_id,
'newsletter_id_name': self.newsletter_id_name,
'newsletter_password': jellypy.CONFIG.NEWSLETTER_PASSWORD
}
return parameters
def build_text(self):
from jellypy.notification_handler import CustomFormatter
custom_formatter = CustomFormatter()
try:
subject = custom_formatter.format(str(self.subject), **self.parameters)
except LookupError as e:
logger.error("Tautulli Newsletter :: Unable to parse parameter %s in newsletter subject. Using fallback." % e)
subject = str(self._DEFAULT_SUBJECT).format(**self.parameters)
except Exception as e:
logger.error("Tautulli Newsletter :: Unable to parse custom newsletter subject: %s. Using fallback." % e)
subject = str(self._DEFAULT_SUBJECT).format(**self.parameters)
try:
body = custom_formatter.format(str(self.body), **self.parameters)
except LookupError as e:
logger.error("Tautulli Newsletter :: Unable to parse parameter %s in newsletter body. Using fallback." % e)
body = str(self._DEFAULT_BODY).format(**self.parameters)
except Exception as e:
logger.error("Tautulli Newsletter :: Unable to parse custom newsletter body: %s. Using fallback." % e)
body = str(self._DEFAULT_BODY).format(**self.parameters)
try:
message = custom_formatter.format(str(self.message), **self.parameters)
except LookupError as e:
logger.error("Tautulli Newsletter :: Unable to parse parameter %s in newsletter message. Using fallback." % e)
message = str(self._DEFAULT_MESSAGE).format(**self.parameters)
except Exception as e:
logger.error("Tautulli Newsletter :: Unable to parse custom newsletter message: %s. Using fallback." % e)
message = str(self._DEFAULT_MESSAGE).format(**self.parameters)
return subject, body, message
def build_filename(self):
from jellypy.notification_handler import CustomFormatter
custom_formatter = CustomFormatter()
try:
filename = custom_formatter.format(str(self.filename), **self.parameters)
except LookupError as e:
logger.error("Tautulli Newsletter :: Unable to parse parameter %s in newsletter filename. Using fallback." % e)
filename = str(self._DEFAULT_FILENAME).format(**self.parameters)
except Exception as e:
logger.error("Tautulli Newsletter :: Unable to parse custom newsletter subject: %s. Using fallback." % e)
filename = str(self._DEFAULT_FILENAME).format(**self.parameters)
return filename
def return_config_options(self, mask_passwords=False):
config_options = self._return_config_options()
# Mask password config options
if mask_passwords:
helpers.mask_config_passwords(config_options)
return config_options
def _return_config_options(self):
config_options = []
return config_options
def return_email_config_options(self, mask_passwords=False):
config_options = EMAIL(self.email_config).return_config_options(mask_passwords=mask_passwords)
for c in config_options:
c['name'] = 'newsletter_' + c['name']
return config_options
class RecentlyAdded(Newsletter):
"""
Recently Added Newsletter
"""
NAME = 'Recently Added'
_DEFAULT_CONFIG = Newsletter._DEFAULT_CONFIG.copy()
_DEFAULT_CONFIG['incl_libraries'] = []
_DEFAULT_SUBJECT = 'Recently Added to {server_name}! ({end_date})'
_DEFAULT_BODY = 'View the newsletter here: {newsletter_url}'
_DEFAULT_MESSAGE = ''
_TEMPLATE = 'recently_added.html'
def _get_recently_added(self, media_type=None):
from jellypy.notification_handler import format_group_index
pms_connect = pmsconnect.PmsConnect()
recently_added = []
done = False
start = 0
while not done:
recent_items = pms_connect.get_recently_added_details(start=str(start), count='10', media_type=media_type)
filtered_items = [i for i in recent_items['recently_added']
if self.start_time < helpers.cast_to_int(i['added_at']) < self.end_time]
if len(filtered_items) < 10:
done = True
else:
start += 10
recently_added.extend(filtered_items)
if media_type in ('movie', 'other_video'):
movie_list = []
for item in recently_added:
# Filter included libraries
if item['section_id'] not in self.config['incl_libraries']:
continue
movie_list.append(item)
recently_added = movie_list
if media_type == 'show':
shows_list = []
show_rating_keys = []
for item in recently_added:
# Filter included libraries
if item['section_id'] not in self.config['incl_libraries']:
continue
if item['media_type'] == 'show':
show_rating_key = item['rating_key']
elif item['media_type'] == 'season':
show_rating_key = item['parent_rating_key']
elif item['media_type'] == 'episode':
show_rating_key = item['grandparent_rating_key']
if show_rating_key in show_rating_keys:
continue
show_metadata = pms_connect.get_metadata_details(show_rating_key, media_info=False)
children = pms_connect.get_item_children(show_rating_key, get_grandchildren=True)
filtered_children = [i for i in children['children_list']
if self.start_time < helpers.cast_to_int(i['added_at']) < self.end_time]
filtered_children.sort(key=lambda x: int(x['parent_media_index']))
seasons = []
for k, v in groupby(filtered_children, key=lambda x: x['parent_media_index']):
episodes = list(v)
num, num00 = format_group_index([helpers.cast_to_int(d['media_index']) for d in episodes])
seasons.append({'media_index': k,
'episode_range': num00,
'episode_count': len(episodes),
'episode': episodes})
num, num00 = format_group_index([helpers.cast_to_int(d['media_index']) for d in seasons])
show_metadata['season_range'] = num00
show_metadata['season_count'] = len(seasons)
show_metadata['season'] = seasons
shows_list.append(show_metadata)
show_rating_keys.append(show_rating_key)
recently_added = shows_list
if media_type == 'artist':
artists_list = []
artist_rating_keys = []
for item in recently_added:
# Filter included libraries
if item['section_id'] not in self.config['incl_libraries']:
continue
if item['media_type'] == 'artist':
artist_rating_key = item['rating_key']
elif item['media_type'] == 'album':
artist_rating_key = item['parent_rating_key']
elif item['media_type'] == 'track':
artist_rating_key = item['grandparent_rating_key']
if artist_rating_key in artist_rating_keys:
continue
artist_metadata = pms_connect.get_metadata_details(artist_rating_key, media_info=False)
children = pms_connect.get_item_children(artist_rating_key)
filtered_children = [i for i in children['children_list']
if self.start_time < helpers.cast_to_int(i['added_at']) < self.end_time]
filtered_children.sort(key=lambda x: x['added_at'])
albums = []
for a in filtered_children:
album_metadata = pms_connect.get_metadata_details(a['rating_key'], media_info=False)
album_metadata['track_count'] = helpers.cast_to_int(album_metadata['children_count'])
albums.append(album_metadata)
artist_metadata['album_count'] = len(albums)
artist_metadata['album'] = albums
artists_list.append(artist_metadata)
artist_rating_keys.append(artist_rating_key)
recently_added = artists_list
return recently_added
def retrieve_data(self):
from jellypy.notification_handler import get_img_info, set_hash_image_info
if not self.config['incl_libraries']:
logger.warn("Tautulli Newsletters :: Failed to retrieve %s newsletter data: no libraries selected." % self.NAME)
media_types = set()
for s in self._get_sections():
if str(s['section_id']) in self.config['incl_libraries']:
if s['section_type'] == 'movie' and s['agent'] == 'com.plexapp.agents.none':
media_types.add('other_video')
else:
media_types.add(s['section_type'])
recently_added = {}
for media_type in media_types:
if media_type not in recently_added:
recently_added[media_type] = self._get_recently_added(media_type)
movies = recently_added.get('movie', [])
shows = recently_added.get('show', [])
artists = recently_added.get('artist', [])
albums = [a for artist in artists for a in artist['album']]
other_video = recently_added.get('other_video', [])
if self.is_preview or helpers.get_img_service(include_self=True) == 'self-hosted':
for item in movies + shows + albums + other_video:
if item['media_type'] == 'album':
height = 150
fallback = 'cover'
else:
height = 225
fallback = 'poster'
item['thumb_hash'] = set_hash_image_info(
img=item['thumb'], width=150, height=height, fallback=fallback)
if item['art']:
item['art_hash'] = set_hash_image_info(
img=item['art'], width=500, height=280,
opacity=25, background='282828', blur=3, fallback='art')
else:
item['art_hash'] = ''
item['thumb_url'] = ''
item['art_url'] = ''
item['poster_url'] = item['thumb_url'] # Keep for backwards compatibility
elif helpers.get_img_service():
# Upload posters and art to image hosting service
for item in movies + shows + albums + other_video:
if item['media_type'] == 'album':
height = 150
fallback = 'cover'
else:
height = 225
fallback = 'poster'
img_info = get_img_info(
img=item['thumb'], rating_key=item['rating_key'], title=item['title'],
width=150, height=height, fallback=fallback)
item['thumb_url'] = img_info.get('img_url') or common.ONLINE_POSTER_THUMB
img_info = get_img_info(
img=item['art'], rating_key=item['rating_key'], title=item['title'],
width=500, height=280, opacity=25, background='282828', blur=3, fallback='art')
item['art_url'] = img_info.get('img_url')
item['thumb_hash'] = ''
item['art_hash'] = ''
item['poster_url'] = item['thumb_url'] # Keep for backwards compatibility
else:
for item in movies + shows + albums + other_video:
item['thumb_hash'] = ''
item['art_hash'] = ''
item['thumb_url'] = ''
item['art_url'] = ''
item['poster_url'] = item['thumb_url'] # Keep for backwards compatibility
self.data['recently_added'] = recently_added
return self.data
def _has_data(self):
recently_added = self.data.get('recently_added')
if recently_added and (
recently_added.get('movie') or
recently_added.get('show') or
recently_added.get('artist') or
recently_added.get('other_video')):
return True
return False
def _get_sections(self):
return libraries.Libraries().get_sections()
def _get_sections_options(self):
sections = {}
for s in self._get_sections():
if s['section_type'] != 'photo':
if s['section_type'] == 'movie' and s['agent'] == 'com.plexapp.agents.none':
library_type = 'other_video'
else:
library_type = s['section_type']
group = sections.get(library_type, [])
group.append({'value': s['section_id'],
'text': s['section_name']})
sections[library_type] = group
groups = OrderedDict([(k, v) for k, v in [
('Movie Libraries', sections.get('movie')),
('TV Show Libraries', sections.get('show')),
('Music Libraries', sections.get('artist')),
('Other Video Libraries', sections.get('other_video'))
] if v is not None])
return groups
def build_params(self):
parameters = self._build_params()
newsletter_libraries = []
for s in self._get_sections():
if str(s['section_id']) in self.config['incl_libraries']:
newsletter_libraries.append(s['section_name'])
parameters['newsletter_libraries'] = ', '.join(sorted(newsletter_libraries))
parameters['pms_identifier'] = jellypy.CONFIG.PMS_IDENTIFIER
parameters['pms_web_url'] = jellypy.CONFIG.PMS_WEB_URL
return parameters
def _return_config_options(self):
config_options = [
{'label': 'Included Libraries',
'value': self.config['incl_libraries'],
'description': 'Select the libraries to include in the newsletter.',
'name': 'newsletter_config_incl_libraries',
'input_type': 'selectize',
'select_options': self._get_sections_options()
}
]
return config_options

File diff suppressed because it is too large Load Diff

3874
jellypy/notifiers.py Normal file

File diff suppressed because it is too large Load Diff

42
jellypy/plex.py Normal file
View File

@@ -0,0 +1,42 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import object
from future.builtins import str
from plexapi.server import PlexServer
import jellypy
if jellypy.PYTHON2:
import logger
else:
from jellypy import logger
class Plex(object):
def __init__(self, url, token):
self.plex = PlexServer(url, token)
def get_library(self, section_id):
return self.plex.library.sectionByID(str(section_id))
def get_library_items(self, section_id):
return self.get_library(str(section_id)).all()
def get_item(self, rating_key):
return self.plex.fetchItem(rating_key)

467
jellypy/plexivity_import.py Normal file
View File

@@ -0,0 +1,467 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import str
import arrow
import sqlite3
from xml.dom import minidom
import jellypy
if jellypy.PYTHON2:
import activity_processor
import database
import helpers
import logger
import users
else:
from jellypy import activity_processor
from jellypy import database
from jellypy import helpers
from jellypy import logger
from jellypy import users
def extract_plexivity_xml(xml=None):
output = {}
clean_xml = helpers.latinToAscii(xml)
try:
xml_parse = minidom.parseString(clean_xml)
except:
logger.warn("Tautulli Importer :: Error parsing XML for Plexivity database.")
return None
# I think Plexivity only tracked videos and not music?
xml_head = xml_parse.getElementsByTagName('Video')
if not xml_head:
logger.warn("Tautulli Importer :: Error parsing XML for Plexivity database.")
return None
for a in xml_head:
rating_key = helpers.get_xml_attr(a, 'ratingKey')
added_at = helpers.get_xml_attr(a, 'addedAt')
art = helpers.get_xml_attr(a, 'art')
duration = helpers.get_xml_attr(a, 'duration')
grandparent_rating_key = helpers.get_xml_attr(a, 'grandparentRatingKey')
grandparent_thumb = helpers.get_xml_attr(a, 'grandparentThumb')
grandparent_title = helpers.get_xml_attr(a, 'grandparentTitle')
original_title = helpers.get_xml_attr(a, 'originalTitle')
guid = helpers.get_xml_attr(a, 'guid')
section_id = helpers.get_xml_attr(a, 'librarySectionID')
media_index = helpers.get_xml_attr(a, 'index')
originally_available_at = helpers.get_xml_attr(a, 'originallyAvailableAt')
last_viewed_at = helpers.get_xml_attr(a, 'lastViewedAt')
parent_rating_key = helpers.get_xml_attr(a, 'parentRatingKey')
parent_media_index = helpers.get_xml_attr(a, 'parentIndex')
parent_thumb = helpers.get_xml_attr(a, 'parentThumb')
parent_title = helpers.get_xml_attr(a, 'parentTitle')
rating = helpers.get_xml_attr(a, 'rating')
thumb = helpers.get_xml_attr(a, 'thumb')
media_type = helpers.get_xml_attr(a, 'type')
updated_at = helpers.get_xml_attr(a, 'updatedAt')
view_offset = helpers.get_xml_attr(a, 'viewOffset')
year = helpers.get_xml_attr(a, 'year')
studio = helpers.get_xml_attr(a, 'studio')
title = helpers.get_xml_attr(a, 'title')
tagline = helpers.get_xml_attr(a, 'tagline')
directors = []
if a.getElementsByTagName('Director'):
director_elem = a.getElementsByTagName('Director')
for b in director_elem:
directors.append(helpers.get_xml_attr(b, 'tag'))
aspect_ratio = ''
audio_channels = None
audio_codec = ''
bitrate = None
container = ''
height = None
video_codec = ''
video_framerate = ''
video_resolution = ''
width = None
if a.getElementsByTagName('Media'):
media_elem = a.getElementsByTagName('Media')
for c in media_elem:
aspect_ratio = helpers.get_xml_attr(c, 'aspectRatio')
audio_channels = helpers.get_xml_attr(c, 'audioChannels')
audio_codec = helpers.get_xml_attr(c, 'audioCodec')
bitrate = helpers.get_xml_attr(c, 'bitrate')
container = helpers.get_xml_attr(c, 'container')
height = helpers.get_xml_attr(c, 'height')
video_codec = helpers.get_xml_attr(c, 'videoCodec')
video_framerate = helpers.get_xml_attr(c, 'videoFrameRate')
video_resolution = helpers.get_xml_attr(c, 'videoResolution')
width = helpers.get_xml_attr(c, 'width')
ip_address = ''
machine_id = ''
platform = ''
player = ''
if a.getElementsByTagName('Player'):
player_elem = a.getElementsByTagName('Player')
for d in player_elem:
ip_address = helpers.get_xml_attr(d, 'address').split('::ffff:')[-1]
machine_id = helpers.get_xml_attr(d, 'machineIdentifier')
platform = helpers.get_xml_attr(d, 'platform')
player = helpers.get_xml_attr(d, 'title')
transcode_audio_channels = None
transcode_audio_codec = ''
audio_decision = 'direct play'
transcode_container = ''
transcode_height = None
transcode_protocol = ''
transcode_video_codec = ''
video_decision = 'direct play'
transcode_width = None
if a.getElementsByTagName('TranscodeSession'):
transcode_elem = a.getElementsByTagName('TranscodeSession')
for e in transcode_elem:
transcode_audio_channels = helpers.get_xml_attr(e, 'audioChannels')
transcode_audio_codec = helpers.get_xml_attr(e, 'audioCodec')
audio_decision = helpers.get_xml_attr(e, 'audioDecision')
transcode_container = helpers.get_xml_attr(e, 'container')
transcode_height = helpers.get_xml_attr(e, 'height')
transcode_protocol = helpers.get_xml_attr(e, 'protocol')
transcode_video_codec = helpers.get_xml_attr(e, 'videoCodec')
video_decision = helpers.get_xml_attr(e, 'videoDecision')
transcode_width = helpers.get_xml_attr(e, 'width')
# Generate a combined transcode decision value
if video_decision == 'transcode' or audio_decision == 'transcode':
transcode_decision = 'transcode'
elif video_decision == 'copy' or audio_decision == 'copy':
transcode_decision = 'copy'
else:
transcode_decision = 'direct play'
user_id = None
if a.getElementsByTagName('User'):
user_elem = a.getElementsByTagName('User')
for f in user_elem:
user_id = helpers.get_xml_attr(f, 'id')
writers = []
if a.getElementsByTagName('Writer'):
writer_elem = a.getElementsByTagName('Writer')
for g in writer_elem:
writers.append(helpers.get_xml_attr(g, 'tag'))
actors = []
if a.getElementsByTagName('Role'):
actor_elem = a.getElementsByTagName('Role')
for h in actor_elem:
actors.append(helpers.get_xml_attr(h, 'tag'))
genres = []
if a.getElementsByTagName('Genre'):
genre_elem = a.getElementsByTagName('Genre')
for i in genre_elem:
genres.append(helpers.get_xml_attr(i, 'tag'))
labels = []
if a.getElementsByTagName('Lables'):
label_elem = a.getElementsByTagName('Lables')
for i in label_elem:
labels.append(helpers.get_xml_attr(i, 'tag'))
output = {'rating_key': rating_key,
'added_at': added_at,
'art': art,
'duration': duration,
'grandparent_rating_key': grandparent_rating_key,
'grandparent_thumb': grandparent_thumb,
'title': title,
'parent_title': parent_title,
'grandparent_title': grandparent_title,
'original_title': original_title,
'tagline': tagline,
'guid': guid,
'section_id': section_id,
'media_index': media_index,
'originally_available_at': originally_available_at,
'last_viewed_at': last_viewed_at,
'parent_rating_key': parent_rating_key,
'parent_media_index': parent_media_index,
'parent_thumb': parent_thumb,
'rating': rating,
'thumb': thumb,
'media_type': media_type,
'updated_at': updated_at,
'view_offset': view_offset,
'year': year,
'directors': directors,
'aspect_ratio': aspect_ratio,
'audio_channels': audio_channels,
'audio_codec': audio_codec,
'bitrate': bitrate,
'container': container,
'height': height,
'video_codec': video_codec,
'video_framerate': video_framerate,
'video_resolution': video_resolution,
'width': width,
'ip_address': ip_address,
'machine_id': machine_id,
'platform': platform,
'player': player,
'transcode_audio_channels': transcode_audio_channels,
'transcode_audio_codec': transcode_audio_codec,
'audio_decision': audio_decision,
'transcode_container': transcode_container,
'transcode_height': transcode_height,
'transcode_protocol': transcode_protocol,
'transcode_video_codec': transcode_video_codec,
'video_decision': video_decision,
'transcode_width': transcode_width,
'transcode_decision': transcode_decision,
'user_id': user_id,
'writers': writers,
'actors': actors,
'genres': genres,
'studio': studio,
'labels': labels
}
return output
def validate_database(database_file=None, table_name=None):
try:
connection = sqlite3.connect(database_file, timeout=20)
except sqlite3.OperationalError:
logger.error("Tautulli Importer :: Invalid database specified.")
return 'Invalid database specified.'
except ValueError:
logger.error("Tautulli Importer :: Invalid database specified.")
return 'Invalid database specified.'
except:
logger.error("Tautulli Importer :: Uncaught exception.")
return 'Uncaught exception.'
try:
connection.execute('SELECT xml from %s' % table_name)
connection.close()
except sqlite3.OperationalError:
logger.error("Tautulli Importer :: Invalid database specified.")
return 'Invalid database specified.'
except:
logger.error("Tautulli Importer :: Uncaught exception.")
return 'Uncaught exception.'
return 'success'
def import_from_plexivity(database_file=None, table_name=None, import_ignore_interval=0):
try:
connection = sqlite3.connect(database_file, timeout=20)
connection.row_factory = sqlite3.Row
except sqlite3.OperationalError:
logger.error("Tautulli Importer :: Invalid filename.")
return None
except ValueError:
logger.error("Tautulli Importer :: Invalid filename.")
return None
try:
connection.execute('SELECT xml from %s' % table_name)
except sqlite3.OperationalError:
logger.error("Tautulli Importer :: Database specified does not contain the required fields.")
return None
logger.debug("Tautulli Importer :: Plexivity data import in progress...")
database.set_is_importing(True)
ap = activity_processor.ActivityProcessor()
user_data = users.Users()
# Get the latest friends list so we can pull user id's
try:
users.refresh_users()
except:
logger.debug("Tautulli Importer :: Unable to refresh the users list. Aborting import.")
return None
query = 'SELECT id AS id, ' \
'time AS started, ' \
'stopped, ' \
'null AS user_id, ' \
'user, ' \
'ip_address, ' \
'paused_counter, ' \
'platform AS player, ' \
'null AS platform, ' \
'null as machine_id, ' \
'null AS media_type, ' \
'null AS view_offset, ' \
'xml, ' \
'rating as content_rating,' \
'summary,' \
'title AS full_title,' \
'(case when orig_title_ep = "n/a" then orig_title else ' \
'orig_title_ep end) as title,' \
'(case when orig_title_ep != "n/a" then orig_title else ' \
'null end) as grandparent_title ' \
'FROM ' + table_name + ' ORDER BY id'
result = connection.execute(query)
for row in result:
# Extract the xml from the Plexivity db xml field.
extracted_xml = extract_plexivity_xml(row['xml'])
# If we get back None from our xml extractor skip over the record and log error.
if not extracted_xml:
logger.error("Tautulli Importer :: Skipping record with id %s due to malformed xml."
% str(row['id']))
continue
# Skip line if we don't have a ratingKey to work with
#if not row['rating_key']:
# logger.error("Tautulli Importer :: Skipping record due to null ratingKey.")
# continue
# If the user_id no longer exists in the friends list, pull it from the xml.
if user_data.get_user_id(user=row['user']):
user_id = user_data.get_user_id(user=row['user'])
else:
user_id = extracted_xml['user_id']
session_history = {'started': arrow.get(row['started']).timestamp,
'stopped': arrow.get(row['stopped']).timestamp,
'rating_key': extracted_xml['rating_key'],
'title': row['title'],
'parent_title': extracted_xml['parent_title'],
'grandparent_title': row['grandparent_title'],
'original_title': extracted_xml['original_title'],
'full_title': row['full_title'],
'user_id': user_id,
'user': row['user'],
'ip_address': row['ip_address'] if row['ip_address'] else extracted_xml['ip_address'],
'paused_counter': row['paused_counter'],
'player': row['player'],
'platform': extracted_xml['platform'],
'machine_id': extracted_xml['machine_id'],
'parent_rating_key': extracted_xml['parent_rating_key'],
'grandparent_rating_key': extracted_xml['grandparent_rating_key'],
'media_type': extracted_xml['media_type'],
'view_offset': extracted_xml['view_offset'],
'video_decision': extracted_xml['video_decision'],
'audio_decision': extracted_xml['audio_decision'],
'transcode_decision': extracted_xml['transcode_decision'],
'duration': extracted_xml['duration'],
'width': extracted_xml['width'],
'height': extracted_xml['height'],
'container': extracted_xml['container'],
'video_codec': extracted_xml['video_codec'],
'audio_codec': extracted_xml['audio_codec'],
'bitrate': extracted_xml['bitrate'],
'video_resolution': extracted_xml['video_resolution'],
'video_framerate': extracted_xml['video_framerate'],
'aspect_ratio': extracted_xml['aspect_ratio'],
'audio_channels': extracted_xml['audio_channels'],
'transcode_protocol': extracted_xml['transcode_protocol'],
'transcode_container': extracted_xml['transcode_container'],
'transcode_video_codec': extracted_xml['transcode_video_codec'],
'transcode_audio_codec': extracted_xml['transcode_audio_codec'],
'transcode_audio_channels': extracted_xml['transcode_audio_channels'],
'transcode_width': extracted_xml['transcode_width'],
'transcode_height': extracted_xml['transcode_height']
}
session_history_metadata = {'rating_key': extracted_xml['rating_key'],
'parent_rating_key': extracted_xml['parent_rating_key'],
'grandparent_rating_key': extracted_xml['grandparent_rating_key'],
'title': row['title'],
'parent_title': extracted_xml['parent_title'],
'grandparent_title': row['grandparent_title'],
'original_title': extracted_xml['original_title'],
'media_index': extracted_xml['media_index'],
'parent_media_index': extracted_xml['parent_media_index'],
'thumb': extracted_xml['thumb'],
'parent_thumb': extracted_xml['parent_thumb'],
'grandparent_thumb': extracted_xml['grandparent_thumb'],
'art': extracted_xml['art'],
'media_type': extracted_xml['media_type'],
'year': extracted_xml['year'],
'originally_available_at': extracted_xml['originally_available_at'],
'added_at': extracted_xml['added_at'],
'updated_at': extracted_xml['updated_at'],
'last_viewed_at': extracted_xml['last_viewed_at'],
'content_rating': row['content_rating'],
'summary': row['summary'],
'tagline': extracted_xml['tagline'],
'rating': extracted_xml['rating'],
'duration': extracted_xml['duration'],
'guid': extracted_xml['guid'],
'section_id': extracted_xml['section_id'],
'directors': extracted_xml['directors'],
'writers': extracted_xml['writers'],
'actors': extracted_xml['actors'],
'genres': extracted_xml['genres'],
'studio': extracted_xml['studio'],
'labels': extracted_xml['labels'],
'full_title': row['full_title'],
'width': extracted_xml['width'],
'height': extracted_xml['height'],
'container': extracted_xml['container'],
'video_codec': extracted_xml['video_codec'],
'audio_codec': extracted_xml['audio_codec'],
'bitrate': extracted_xml['bitrate'],
'video_resolution': extracted_xml['video_resolution'],
'video_framerate': extracted_xml['video_framerate'],
'aspect_ratio': extracted_xml['aspect_ratio'],
'audio_channels': extracted_xml['audio_channels']
}
# On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values
# Just make sure that the ratingKey is indeed an integer
if session_history_metadata['rating_key'].isdigit():
ap.write_session_history(session=session_history,
import_metadata=session_history_metadata,
is_import=True,
import_ignore_interval=import_ignore_interval)
else:
logger.debug("Tautulli Importer :: Item has bad rating_key: %s" % session_history_metadata['rating_key'])
import_users()
logger.debug("Tautulli Importer :: Plexivity data import complete.")
database.set_is_importing(False)
def import_users():
logger.debug("Tautulli Importer :: Importing Plexivity Users...")
monitor_db = database.MonitorDatabase()
query = 'INSERT OR IGNORE INTO users (user_id, username) ' \
'SELECT user_id, user ' \
'FROM session_history WHERE user_id != 1 GROUP BY user_id'
try:
monitor_db.action(query)
logger.debug("Tautulli Importer :: Users imported.")
except:
logger.debug("Tautulli Importer :: Failed to import users.")

989
jellypy/plextv.py Normal file
View File

@@ -0,0 +1,989 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import next
from future.builtins import str
from future.builtins import object
import base64
import json
import jellypy
if jellypy.PYTHON2:
import common
import helpers
import http_handler
import logger
import users
import pmsconnect
import session
else:
from jellypy import common
from jellypy import helpers
from jellypy import http_handler
from jellypy import logger
from jellypy import users
from jellypy import pmsconnect
from jellypy import session
def get_server_resources(return_presence=False, return_server=False, return_info=False, **kwargs):
if not return_presence and not return_info:
logger.info("Tautulli PlexTV :: Requesting resources for server...")
server = {'pms_name': jellypy.CONFIG.PMS_NAME,
'pms_version': jellypy.CONFIG.PMS_VERSION,
'pms_platform': jellypy.CONFIG.PMS_PLATFORM,
'pms_ip': jellypy.CONFIG.PMS_IP,
'pms_port': jellypy.CONFIG.PMS_PORT,
'pms_ssl': jellypy.CONFIG.PMS_SSL,
'pms_is_remote': jellypy.CONFIG.PMS_IS_REMOTE,
'pms_is_cloud': jellypy.CONFIG.PMS_IS_CLOUD,
'pms_url': jellypy.CONFIG.PMS_URL,
'pms_url_manual': jellypy.CONFIG.PMS_URL_MANUAL,
'pms_identifier': jellypy.CONFIG.PMS_IDENTIFIER,
'pms_plexpass': jellypy.CONFIG.PMS_PLEXPASS
}
if return_info:
return server
if kwargs:
server.update(kwargs)
for k in ['pms_ssl', 'pms_is_remote', 'pms_is_cloud', 'pms_url_manual']:
server[k] = int(server[k])
if server['pms_url_manual'] and server['pms_ssl'] or server['pms_is_cloud']:
scheme = 'https'
else:
scheme = 'http'
fallback_url = '{scheme}://{hostname}:{port}'.format(scheme=scheme,
hostname=server['pms_ip'],
port=server['pms_port'])
plex_tv = PlexTV()
result = plex_tv.get_server_connections(pms_identifier=server['pms_identifier'],
pms_ip=server['pms_ip'],
pms_port=server['pms_port'],
include_https=server['pms_ssl'])
if result:
connections = result.pop('connections', [])
server.update(result)
presence = server.pop('pms_presence', 0)
else:
connections = []
presence = 0
if return_presence:
return presence
plexpass = plex_tv.get_plexpass_status()
server['pms_plexpass'] = int(plexpass)
# Only need to retrieve PMS_URL if using SSL
if not server['pms_url_manual'] and server['pms_ssl']:
if connections:
if server['pms_is_remote']:
# Get all remote connections
conns = [c for c in connections if
c['local'] == '0' and ('plex.direct' in c['uri'] or 'plex.service' in c['uri'])]
else:
# Get all local connections
conns = [c for c in connections if
c['local'] == '1' and ('plex.direct' in c['uri'] or 'plex.service' in c['uri'])]
if conns:
# Get connection with matching address, otherwise return first connection
conn = next((c for c in conns if c['address'] == server['pms_ip']
and c['port'] == str(server['pms_port'])), conns[0])
server['pms_url'] = conn['uri']
logger.info("Tautulli PlexTV :: Server URL retrieved.")
# get_server_urls() failed or PMS_URL not found, fallback url doesn't use SSL
if not server['pms_url']:
server['pms_url'] = fallback_url
logger.warn("Tautulli PlexTV :: Unable to retrieve server URLs. Using user-defined value without SSL.")
# Not using SSL, remote has no effect
else:
server['pms_url'] = fallback_url
logger.info("Tautulli PlexTV :: Using user-defined URL.")
if return_server:
return server
logger.info("Tautulli PlexTV :: Selected server: %s (%s) (%s - Version %s)",
server['pms_name'], server['pms_url'], server['pms_platform'], server['pms_version'])
jellypy.CONFIG.process_kwargs(server)
jellypy.CONFIG.write()
class PlexTV(object):
"""
Plex.tv authentication
"""
def __init__(self, username=None, password=None, token=None, headers=None):
self.username = username
self.password = password
self.token = token
self.urls = 'https://plex.tv'
self.timeout = jellypy.CONFIG.PMS_TIMEOUT
self.ssl_verify = jellypy.CONFIG.VERIFY_SSL_CERT
if self.username is None and self.password is None:
if not self.token:
# Check if we should use the admin token, or the guest server token
if session.get_session_user_id():
user_data = users.Users()
user_tokens = user_data.get_tokens(user_id=session.get_session_user_id())
self.token = user_tokens['server_token']
else:
self.token = jellypy.CONFIG.PMS_TOKEN
if not self.token:
logger.error("Tautulli PlexTV :: PlexTV called, but no token provided.")
return
self.request_handler = http_handler.HTTPHandler(urls=self.urls,
token=self.token,
timeout=self.timeout,
ssl_verify=self.ssl_verify,
headers=headers)
def get_plex_auth(self, output_format='raw'):
uri = '/api/v2/users/signin'
headers = {'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Accept': 'application/xml'}
data = {'login': self.username,
'password': self.password,
'rememberMe': True}
request = self.request_handler.make_request(uri=uri,
request_type='POST',
headers=headers,
data=data,
output_format=output_format,
no_token=True,
encode_multipart=False)
return request
def get_token(self):
plextv_response = self.get_plex_auth(output_format='xml')
if plextv_response:
try:
xml_head = plextv_response.getElementsByTagName('user')
if xml_head:
user = {'auth_token': xml_head[0].getAttribute('authToken'),
'user_id': xml_head[0].getAttribute('id')
}
else:
logger.warn("Tautulli PlexTV :: Could not get Plex authentication token.")
except Exception as e:
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_token: %s." % e)
return None
return user
else:
return None
def get_plexpy_pms_token(self, force=False):
if force:
logger.debug("Tautulli PlexTV :: Forcing refresh of Plex.tv token.")
devices_list = self.get_devices_list()
device_id = next((d for d in devices_list if d['device_identifier'] == jellypy.CONFIG.PMS_UUID), {}).get('device_id', None)
if device_id:
logger.debug("Tautulli PlexTV :: Removing Tautulli from Plex.tv devices.")
try:
self.delete_plextv_device(device_id=device_id)
except:
logger.error("Tautulli PlexTV :: Failed to remove Tautulli from Plex.tv devices.")
return None
else:
logger.warn("Tautulli PlexTV :: No existing Tautulli device found.")
logger.info("Tautulli PlexTV :: Fetching a new Plex.tv token for Tautulli.")
user = self.get_token()
if user:
token = user['auth_token']
jellypy.CONFIG.__setattr__('PMS_TOKEN', token)
jellypy.CONFIG.write()
logger.info("Tautulli PlexTV :: Updated Plex.tv token for Tautulli.")
return token
def get_server_token(self):
servers = self.get_plextv_resources(output_format='xml')
server_token = ''
try:
xml_head = servers.getElementsByTagName('Device')
except Exception as e:
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_server_token: %s." % e)
return None
for a in xml_head:
if helpers.get_xml_attr(a, 'clientIdentifier') == jellypy.CONFIG.PMS_IDENTIFIER \
and 'server' in helpers.get_xml_attr(a, 'provides'):
server_token = helpers.get_xml_attr(a, 'accessToken')
break
return server_token
def get_plextv_pin(self, pin='', output_format=''):
if pin:
uri = '/api/v2/pins/' + pin
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format,
no_token=True)
else:
uri = '/api/v2/pins?strong=true'
request = self.request_handler.make_request(uri=uri,
request_type='POST',
output_format=output_format,
no_token=True)
return request
def get_pin(self, pin=''):
plextv_response = self.get_plextv_pin(pin=pin,
output_format='xml')
if plextv_response:
try:
xml_head = plextv_response.getElementsByTagName('pin')
if xml_head:
pin = {'id': xml_head[0].getAttribute('id'),
'code': xml_head[0].getAttribute('code'),
'token': xml_head[0].getAttribute('authToken')
}
return pin
else:
logger.warn("Tautulli PlexTV :: Could not get Plex authentication pin.")
return None
except Exception as e:
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_pin: %s." % e)
return None
else:
return None
def get_plextv_user_data(self):
plextv_response = self.get_plex_auth(output_format='dict')
if plextv_response:
return plextv_response
else:
return []
def get_plextv_friends(self, output_format=''):
uri = '/api/users'
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format)
return request
def get_plextv_user_details(self, output_format=''):
uri = '/users/account'
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format)
return request
def get_plextv_devices_list(self, output_format=''):
uri = '/devices.xml'
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format)
return request
def get_plextv_server_list(self, output_format=''):
uri = '/pms/servers.xml'
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format)
return request
def get_plextv_shared_servers(self, machine_id='', output_format=''):
uri = '/api/servers/%s/shared_servers' % machine_id
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format)
return request
def get_plextv_sync_lists(self, machine_id='', output_format=''):
uri = '/servers/%s/sync_lists' % machine_id
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format)
return request
def get_plextv_resources(self, include_https=False, output_format=''):
if include_https:
uri = '/api/resources?includeHttps=1'
else:
uri = '/api/resources'
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format)
return request
def get_plextv_downloads(self, plexpass=False, output_format=''):
if plexpass:
uri = '/api/downloads/5.json?channel=plexpass'
else:
uri = '/api/downloads/1.json'
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format)
return request
def delete_plextv_device(self, device_id='', output_format=''):
uri = '/devices/%s.xml' % device_id
request = self.request_handler.make_request(uri=uri,
request_type='DELETE',
output_format=output_format)
return request
def delete_plextv_device_sync_lists(self, client_id='', output_format=''):
uri = '/devices/%s/sync_items' % client_id
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format)
return request
def delete_plextv_sync(self, client_id='', sync_id='', output_format=''):
uri = '/devices/%s/sync_items/%s' % (client_id, sync_id)
request = self.request_handler.make_request(uri=uri,
request_type='DELETE',
output_format=output_format)
return request
def cloud_server_status(self, output_format=''):
uri = '/api/v2/cloud_server'
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format)
return request
def get_plextv_geoip(self, ip_address='', output_format=''):
uri = '/api/v2/geoip?ip_address=%s' % ip_address
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format)
return request
def get_full_users_list(self):
own_account = self.get_plextv_user_details(output_format='xml')
friends_list = self.get_plextv_friends(output_format='xml')
shared_servers = self.get_plextv_shared_servers(machine_id=jellypy.CONFIG.PMS_IDENTIFIER,
output_format='xml')
users_list = []
try:
xml_head = own_account.getElementsByTagName('user')
except Exception as e:
logger.warn("Tautulli PlexTV :: Unable to parse own account XML for get_full_users_list: %s." % e)
return []
for a in xml_head:
own_details = {"user_id": helpers.get_xml_attr(a, 'id'),
"username": helpers.get_xml_attr(a, 'username'),
"thumb": helpers.get_xml_attr(a, 'thumb'),
"email": helpers.get_xml_attr(a, 'email'),
"is_active": 1,
"is_admin": 1,
"is_home_user": helpers.get_xml_attr(a, 'home'),
"is_allow_sync": 1,
"is_restricted": helpers.get_xml_attr(a, 'restricted'),
"filter_all": helpers.get_xml_attr(a, 'filterAll'),
"filter_movies": helpers.get_xml_attr(a, 'filterMovies'),
"filter_tv": helpers.get_xml_attr(a, 'filterTelevision'),
"filter_music": helpers.get_xml_attr(a, 'filterMusic'),
"filter_photos": helpers.get_xml_attr(a, 'filterPhotos'),
"user_token": helpers.get_xml_attr(a, 'authToken'),
"server_token": helpers.get_xml_attr(a, 'authToken'),
"shared_libraries": None,
}
users_list.append(own_details)
try:
xml_head = friends_list.getElementsByTagName('User')
except Exception as e:
logger.warn("Tautulli PlexTV :: Unable to parse friends list XML for get_full_users_list: %s." % e)
return []
for a in xml_head:
friend = {"user_id": helpers.get_xml_attr(a, 'id'),
"username": helpers.get_xml_attr(a, 'title'),
"thumb": helpers.get_xml_attr(a, 'thumb'),
"email": helpers.get_xml_attr(a, 'email'),
"is_active": 1,
"is_admin": 0,
"is_home_user": helpers.get_xml_attr(a, 'home'),
"is_allow_sync": helpers.get_xml_attr(a, 'allowSync'),
"is_restricted": helpers.get_xml_attr(a, 'restricted'),
"filter_all": helpers.get_xml_attr(a, 'filterAll'),
"filter_movies": helpers.get_xml_attr(a, 'filterMovies'),
"filter_tv": helpers.get_xml_attr(a, 'filterTelevision'),
"filter_music": helpers.get_xml_attr(a, 'filterMusic'),
"filter_photos": helpers.get_xml_attr(a, 'filterPhotos')
}
users_list.append(friend)
try:
xml_head = shared_servers.getElementsByTagName('SharedServer')
except Exception as e:
logger.warn("Tautulli PlexTV :: Unable to parse shared server list XML for get_full_users_list: %s." % e)
return []
user_map = {}
for a in xml_head:
user_id = helpers.get_xml_attr(a, 'userID')
server_token = helpers.get_xml_attr(a, 'accessToken')
sections = a.getElementsByTagName('Section')
shared_libraries = [helpers.get_xml_attr(s, 'key')
for s in sections if helpers.get_xml_attr(s, 'shared') == '1']
user_map[user_id] = {'server_token': server_token,
'shared_libraries': shared_libraries}
for u in users_list:
d = user_map.get(u['user_id'], {})
u.update(d)
return users_list
def get_synced_items(self, machine_id=None, client_id_filter=None, user_id_filter=None,
rating_key_filter=None, sync_id_filter=None):
if not machine_id:
machine_id = jellypy.CONFIG.PMS_IDENTIFIER
if isinstance(rating_key_filter, list):
rating_key_filter = [str(k) for k in rating_key_filter]
elif rating_key_filter:
rating_key_filter = [str(rating_key_filter)]
if isinstance(user_id_filter, list):
user_id_filter = [str(k) for k in user_id_filter]
elif user_id_filter:
user_id_filter = [str(user_id_filter)]
sync_list = self.get_plextv_sync_lists(machine_id, output_format='xml')
user_data = users.Users()
synced_items = []
try:
xml_head = sync_list.getElementsByTagName('SyncList')
except Exception as e:
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_synced_items: %s." % e)
return {}
for a in xml_head:
client_id = helpers.get_xml_attr(a, 'clientIdentifier')
# Filter by client_id
if client_id_filter and str(client_id_filter) != client_id:
continue
sync_list_id = helpers.get_xml_attr(a, 'id')
sync_device = a.getElementsByTagName('Device')
for device in sync_device:
device_user_id = helpers.get_xml_attr(device, 'userID')
try:
device_username = user_data.get_details(user_id=device_user_id)['username']
device_friendly_name = user_data.get_details(user_id=device_user_id)['friendly_name']
except:
device_username = ''
device_friendly_name = ''
device_name = helpers.get_xml_attr(device, 'name')
device_product = helpers.get_xml_attr(device, 'product')
device_product_version = helpers.get_xml_attr(device, 'productVersion')
device_platform = helpers.get_xml_attr(device, 'platform')
device_platform_version = helpers.get_xml_attr(device, 'platformVersion')
device_type = helpers.get_xml_attr(device, 'device')
device_model = helpers.get_xml_attr(device, 'model')
device_last_seen = helpers.get_xml_attr(device, 'lastSeenAt')
# Filter by user_id
if user_id_filter and device_user_id not in user_id_filter:
continue
for synced in a.getElementsByTagName('SyncItems'):
sync_item = synced.getElementsByTagName('SyncItem')
for item in sync_item:
for location in item.getElementsByTagName('Location'):
clean_uri = helpers.get_xml_attr(location, 'uri').split('%2F')
rating_key = next((clean_uri[(idx + 1) % len(clean_uri)]
for idx, item in enumerate(clean_uri) if item == 'metadata'), None)
# Filter by rating_key
if rating_key_filter and rating_key not in rating_key_filter:
continue
sync_id = helpers.get_xml_attr(item, 'id')
# Filter by sync_id
if sync_id_filter and str(sync_id_filter) != sync_id:
continue
sync_version = helpers.get_xml_attr(item, 'version')
sync_root_title = helpers.get_xml_attr(item, 'rootTitle')
sync_title = helpers.get_xml_attr(item, 'title')
sync_metadata_type = helpers.get_xml_attr(item, 'metadataType')
sync_content_type = helpers.get_xml_attr(item, 'contentType')
for status in item.getElementsByTagName('Status'):
status_failure_code = helpers.get_xml_attr(status, 'failureCode')
status_failure = helpers.get_xml_attr(status, 'failure')
status_state = helpers.get_xml_attr(status, 'state')
status_item_count = helpers.get_xml_attr(status, 'itemsCount')
status_item_complete_count = helpers.get_xml_attr(status, 'itemsCompleteCount')
status_item_downloaded_count = helpers.get_xml_attr(status, 'itemsDownloadedCount')
status_item_ready_count = helpers.get_xml_attr(status, 'itemsReadyCount')
status_item_successful_count = helpers.get_xml_attr(status, 'itemsSuccessfulCount')
status_total_size = helpers.get_xml_attr(status, 'totalSize')
status_item_download_percent_complete = helpers.get_percent(
status_item_downloaded_count, status_item_count)
for settings in item.getElementsByTagName('MediaSettings'):
settings_video_bitrate = helpers.get_xml_attr(settings, 'maxVideoBitrate')
settings_video_quality = helpers.get_xml_attr(settings, 'videoQuality')
settings_video_resolution = helpers.get_xml_attr(settings, 'videoResolution')
settings_audio_boost = helpers.get_xml_attr(settings, 'audioBoost')
settings_audio_bitrate = helpers.get_xml_attr(settings, 'musicBitrate')
settings_photo_quality = helpers.get_xml_attr(settings, 'photoQuality')
settings_photo_resolution = helpers.get_xml_attr(settings, 'photoResolution')
sync_details = {"device_name": device_name,
"platform": device_platform,
"user_id": device_user_id,
"user": device_friendly_name,
"username": device_username,
"root_title": sync_root_title,
"sync_title": sync_title,
"metadata_type": sync_metadata_type,
"content_type": sync_content_type,
"rating_key": rating_key,
"state": status_state,
"item_count": status_item_count,
"item_complete_count": status_item_complete_count,
"item_downloaded_count": status_item_downloaded_count,
"item_downloaded_percent_complete": status_item_download_percent_complete,
"video_bitrate": settings_video_bitrate,
"audio_bitrate": settings_audio_bitrate,
"photo_quality": settings_photo_quality,
"video_quality": settings_video_quality,
"total_size": status_total_size,
"failure": status_failure,
"client_id": client_id,
"sync_id": sync_id
}
synced_items.append(sync_details)
return session.filter_session_info(synced_items, filter_key='user_id')
def delete_sync(self, client_id, sync_id):
logger.info("Tautulli PlexTV :: Deleting sync item '%s'." % sync_id)
self.delete_plextv_sync(client_id=client_id, sync_id=sync_id)
def get_server_connections(self, pms_identifier='', pms_ip='', pms_port=32400, include_https=True):
if not pms_identifier:
logger.error("Tautulli PlexTV :: Unable to retrieve server connections: no pms_identifier provided.")
return {}
plextv_resources = self.get_plextv_resources(include_https=include_https,
output_format='xml')
try:
xml_head = plextv_resources.getElementsByTagName('Device')
except Exception as e:
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_server_urls: %s." % e)
return {}
# Function to get all connections for a device
def get_connections(device):
conn = []
connections = device.getElementsByTagName('Connection')
server = {'pms_identifier': helpers.get_xml_attr(device, 'clientIdentifier'),
'pms_name': helpers.get_xml_attr(device, 'name'),
'pms_version': helpers.get_xml_attr(device, 'productVersion'),
'pms_platform': helpers.get_xml_attr(device, 'platform'),
'pms_presence': helpers.get_xml_attr(device, 'presence'),
'pms_is_cloud': 1 if helpers.get_xml_attr(device, 'platform') == 'Cloud' else 0
}
for c in connections:
server_details = {'protocol': helpers.get_xml_attr(c, 'protocol'),
'address': helpers.get_xml_attr(c, 'address'),
'port': helpers.get_xml_attr(c, 'port'),
'uri': helpers.get_xml_attr(c, 'uri'),
'local': helpers.get_xml_attr(c, 'local')
}
conn.append(server_details)
server['connections'] = conn
return server
server = {}
# Try to match the device
for a in xml_head:
if helpers.get_xml_attr(a, 'clientIdentifier') == pms_identifier:
server = get_connections(a)
break
# Else no device match found
if not server:
# Try to match the PMS_IP and PMS_PORT
for a in xml_head:
if helpers.get_xml_attr(a, 'provides') == 'server':
connections = a.getElementsByTagName('Connection')
for connection in connections:
if helpers.get_xml_attr(connection, 'address') == pms_ip and \
helpers.get_xml_attr(connection, 'port') == str(pms_port):
server = get_connections(a)
break
if server.get('connections'):
break
return server
def get_server_times(self):
servers = self.get_plextv_server_list(output_format='xml')
server_times = {}
try:
xml_head = servers.getElementsByTagName('Server')
except Exception as e:
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_server_times: %s." % e)
return {}
for a in xml_head:
if helpers.get_xml_attr(a, 'machineIdentifier') == jellypy.CONFIG.PMS_IDENTIFIER:
server_times = {"created_at": helpers.get_xml_attr(a, 'createdAt'),
"updated_at": helpers.get_xml_attr(a, 'updatedAt'),
"version": helpers.get_xml_attr(a, 'version')
}
break
return server_times
def discover(self, include_cloud=True, all_servers=False):
""" Query plex for all servers online. Returns the ones you own in a selectize format """
# Try to discover localhost server
local_machine_identifier = None
request_handler = http_handler.HTTPHandler(urls='http://127.0.0.1:32400', timeout=1,
ssl_verify=False, silent=True)
request = request_handler.make_request(uri='/identity', request_type='GET', output_format='xml')
if request:
xml_head = request.getElementsByTagName('MediaContainer')[0]
local_machine_identifier = xml_head.getAttribute('machineIdentifier')
local_server = {'httpsRequired': '0',
'clientIdentifier': local_machine_identifier,
'label': 'Local',
'ip': '127.0.0.1',
'port': '32400',
'uri': 'http://127.0.0.1:32400',
'local': '1',
'value': '127.0.0.1:32400',
'is_cloud': False
}
servers = self.get_plextv_resources(include_https=True, output_format='xml')
clean_servers = []
try:
xml_head = servers.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn("Tautulli PlexTV :: Failed to get servers from plex: %s." % e)
return []
for a in xml_head:
if a.getAttribute('size'):
if a.getAttribute('size') == '0':
return []
if a.getElementsByTagName('Device'):
devices = a.getElementsByTagName('Device')
for d in devices:
if helpers.get_xml_attr(d, 'presence') == '1' and \
helpers.get_xml_attr(d, 'owned') == '1' and \
helpers.get_xml_attr(d, 'provides') == 'server':
is_cloud = (helpers.get_xml_attr(d, 'platform').lower() == 'cloud')
if not include_cloud and is_cloud:
continue
connections = d.getElementsByTagName('Connection')
for c in connections:
if not all_servers:
# If this is a remote server don't show any local IPs.
if helpers.get_xml_attr(d, 'publicAddressMatches') == '0' and \
helpers.get_xml_attr(c, 'local') == '1':
continue
# If this is a local server don't show any remote IPs.
if helpers.get_xml_attr(d, 'publicAddressMatches') == '1' and \
helpers.get_xml_attr(c, 'local') == '0':
continue
if helpers.get_xml_attr(d, 'clientIdentifier') == local_machine_identifier:
local_server['httpsRequired'] = helpers.get_xml_attr(d, 'httpsRequired')
local_server['label'] = helpers.get_xml_attr(d, 'name')
clean_servers.append(local_server)
local_machine_identifier = None
server = {'httpsRequired': '1' if is_cloud else helpers.get_xml_attr(d, 'httpsRequired'),
'clientIdentifier': helpers.get_xml_attr(d, 'clientIdentifier'),
'label': helpers.get_xml_attr(d, 'name'),
'ip': helpers.get_xml_attr(c, 'address'),
'port': helpers.get_xml_attr(c, 'port'),
'uri': helpers.get_xml_attr(c, 'uri'),
'local': helpers.get_xml_attr(c, 'local'),
'value': helpers.get_xml_attr(c, 'address') + ':' + helpers.get_xml_attr(c, 'port'),
'is_cloud': is_cloud
}
clean_servers.append(server)
if local_machine_identifier:
clean_servers.append(local_server)
clean_servers.sort(key=lambda s: (s['label'], -int(s['local']), s['ip']))
return clean_servers
def get_plex_downloads(self):
logger.debug("Tautulli PlexTV :: Retrieving current server version.")
pms_connect = pmsconnect.PmsConnect()
pms_connect.set_server_version()
update_channel = pms_connect.get_server_update_channel()
logger.debug("Tautulli PlexTV :: Plex update channel is %s." % update_channel)
plex_downloads = self.get_plextv_downloads(plexpass=(update_channel == 'beta'))
try:
available_downloads = json.loads(plex_downloads)
except Exception as e:
logger.warn("Tautulli PlexTV :: Unable to load JSON for get_plex_updates.")
return {}
# Get the updates for the platform
pms_platform = common.PMS_PLATFORM_NAME_OVERRIDES.get(jellypy.CONFIG.PMS_PLATFORM, jellypy.CONFIG.PMS_PLATFORM)
platform_downloads = available_downloads.get('computer').get(pms_platform) or \
available_downloads.get('nas').get(pms_platform)
if not platform_downloads:
logger.error("Tautulli PlexTV :: Unable to retrieve Plex updates: Could not match server platform: %s."
% pms_platform)
return {}
v_old = helpers.cast_to_int("".join(v.zfill(4) for v in jellypy.CONFIG.PMS_VERSION.split('-')[0].split('.')[:4]))
v_new = helpers.cast_to_int("".join(v.zfill(4) for v in platform_downloads.get('version', '').split('-')[0].split('.')[:4]))
if not v_old:
logger.error("Tautulli PlexTV :: Unable to retrieve Plex updates: Invalid current server version: %s."
% jellypy.CONFIG.PMS_VERSION)
return {}
if not v_new:
logger.error("Tautulli PlexTV :: Unable to retrieve Plex updates: Invalid new server version: %s."
% platform_downloads.get('version'))
return {}
# Get proper download
releases = platform_downloads.get('releases', [{}])
release = next((r for r in releases if r['distro'] == jellypy.CONFIG.PMS_UPDATE_DISTRO and
r['build'] == jellypy.CONFIG.PMS_UPDATE_DISTRO_BUILD), releases[0])
download_info = {'update_available': v_new > v_old,
'platform': platform_downloads.get('name'),
'release_date': platform_downloads.get('release_date'),
'version': platform_downloads.get('version'),
'requirements': platform_downloads.get('requirements'),
'extra_info': platform_downloads.get('extra_info'),
'changelog_added': platform_downloads.get('items_added'),
'changelog_fixed': platform_downloads.get('items_fixed'),
'label': release.get('label'),
'distro': release.get('distro'),
'distro_build': release.get('build'),
'download_url': release.get('url'),
}
return download_info
def get_plexpass_status(self):
account_data = self.get_plextv_user_details(output_format='xml')
try:
subscription = account_data.getElementsByTagName('subscription')
except Exception as e:
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_plexpass_status: %s." % e)
return False
if subscription and helpers.get_xml_attr(subscription[0], 'active') == '1':
jellypy.CONFIG.__setattr__('PMS_PLEXPASS', 1)
jellypy.CONFIG.write()
return True
else:
logger.debug("Tautulli PlexTV :: Plex Pass subscription not found.")
jellypy.CONFIG.__setattr__('PMS_PLEXPASS', 0)
jellypy.CONFIG.write()
return False
def get_devices_list(self):
devices = self.get_plextv_devices_list(output_format='xml')
try:
xml_head = devices.getElementsByTagName('Device')
except Exception as e:
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_devices_list: %s." % e)
return []
devices_list = []
for a in xml_head:
device = {"device_name": helpers.get_xml_attr(a, 'name'),
"product": helpers.get_xml_attr(a, 'product'),
"product_version": helpers.get_xml_attr(a, 'productVersion'),
"platform": helpers.get_xml_attr(a, 'platform'),
"platform_version": helpers.get_xml_attr(a, 'platformVersion'),
"device": helpers.get_xml_attr(a, 'device'),
"model": helpers.get_xml_attr(a, 'model'),
"vendor": helpers.get_xml_attr(a, 'vendor'),
"provides": helpers.get_xml_attr(a, 'provides'),
"device_identifier": helpers.get_xml_attr(a, 'clientIdentifier'),
"device_id": helpers.get_xml_attr(a, 'id'),
"token": helpers.get_xml_attr(a, 'token')
}
devices_list.append(device)
return devices_list
def get_cloud_server_status(self):
cloud_status = self.cloud_server_status(output_format='xml')
try:
status_info = cloud_status.getElementsByTagName('info')
except Exception as e:
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_cloud_server_status: %s." % e)
return False
for info in status_info:
servers = info.getElementsByTagName('server')
for s in servers:
if helpers.get_xml_attr(s, 'address') == jellypy.CONFIG.PMS_IP:
if helpers.get_xml_attr(info, 'running') == '1':
return True
else:
return False
def get_plex_account_details(self):
account_data = self.get_plextv_user_details(output_format='xml')
try:
xml_head = account_data.getElementsByTagName('user')
except Exception as e:
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_plex_account_details: %s." % e)
return None
for a in xml_head:
account_details = {"user_id": helpers.get_xml_attr(a, 'id'),
"username": helpers.get_xml_attr(a, 'username'),
"thumb": helpers.get_xml_attr(a, 'thumb'),
"email": helpers.get_xml_attr(a, 'email'),
"is_home_user": helpers.get_xml_attr(a, 'home'),
"is_restricted": helpers.get_xml_attr(a, 'restricted'),
"filter_all": helpers.get_xml_attr(a, 'filterAll'),
"filter_movies": helpers.get_xml_attr(a, 'filterMovies'),
"filter_tv": helpers.get_xml_attr(a, 'filterTelevision'),
"filter_music": helpers.get_xml_attr(a, 'filterMusic'),
"filter_photos": helpers.get_xml_attr(a, 'filterPhotos'),
"user_token": helpers.get_xml_attr(a, 'authToken')
}
return account_details
def get_geoip_lookup(self, ip_address=''):
if not ip_address or not helpers.is_valid_ip(ip_address):
return
geoip_data = self.get_plextv_geoip(ip_address=ip_address, output_format='xml')
try:
xml_head = geoip_data.getElementsByTagName('location')
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse XML for get_geoip_lookup: %s." % e)
return None
for a in xml_head:
coordinates = helpers.get_xml_attr(a, 'coordinates').split(',')
latitude = longitude = None
if len(coordinates) == 2:
latitude, longitude = [helpers.cast_to_float(c) for c in coordinates]
geo_info = {"code": helpers.get_xml_attr(a, 'code') or None,
"country": helpers.get_xml_attr(a, 'country') or None,
"region": helpers.get_xml_attr(a, 'subdivisions') or None,
"city": helpers.get_xml_attr(a, 'city') or None,
"postal_code": helpers.get_xml_attr(a, 'postal_code') or None,
"timezone": helpers.get_xml_attr(a, 'time_zone') or None,
"latitude": latitude,
"longitude": longitude,
"continent": None, # keep for backwards compatibility with GeoLite2
"accuracy": None # keep for backwards compatibility with GeoLite2
}
return geo_info

460
jellypy/plexwatch_import.py Normal file
View File

@@ -0,0 +1,460 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import str
import sqlite3
from xml.dom import minidom
import jellypy
if jellypy.PYTHON2:
import activity_processor
import database
import helpers
import logger
import users
else:
from jellypy import activity_processor
from jellypy import database
from jellypy import helpers
from jellypy import logger
from jellypy import users
def extract_plexwatch_xml(xml=None):
output = {}
clean_xml = helpers.latinToAscii(xml)
try:
xml_parse = minidom.parseString(clean_xml)
except:
logger.warn("Tautulli Importer :: Error parsing XML for PlexWatch database.")
return None
xml_head = xml_parse.getElementsByTagName('opt')
if not xml_head:
logger.warn("Tautulli Importer :: Error parsing XML for PlexWatch database.")
return None
for a in xml_head:
added_at = helpers.get_xml_attr(a, 'addedAt')
art = helpers.get_xml_attr(a, 'art')
duration = helpers.get_xml_attr(a, 'duration')
grandparent_thumb = helpers.get_xml_attr(a, 'grandparentThumb')
grandparent_title = helpers.get_xml_attr(a, 'grandparentTitle')
original_title = helpers.get_xml_attr(a, 'originalTitle')
guid = helpers.get_xml_attr(a, 'guid')
section_id = helpers.get_xml_attr(a, 'librarySectionID')
media_index = helpers.get_xml_attr(a, 'index')
originally_available_at = helpers.get_xml_attr(a, 'originallyAvailableAt')
last_viewed_at = helpers.get_xml_attr(a, 'lastViewedAt')
parent_media_index = helpers.get_xml_attr(a, 'parentIndex')
parent_thumb = helpers.get_xml_attr(a, 'parentThumb')
rating = helpers.get_xml_attr(a, 'rating')
thumb = helpers.get_xml_attr(a, 'thumb')
media_type = helpers.get_xml_attr(a, 'type')
updated_at = helpers.get_xml_attr(a, 'updatedAt')
view_offset = helpers.get_xml_attr(a, 'viewOffset')
year = helpers.get_xml_attr(a, 'year')
parent_title = helpers.get_xml_attr(a, 'parentTitle')
studio = helpers.get_xml_attr(a, 'studio')
title = helpers.get_xml_attr(a, 'title')
tagline = helpers.get_xml_attr(a, 'tagline')
directors = []
if a.getElementsByTagName('Director'):
director_elem = a.getElementsByTagName('Director')
for b in director_elem:
directors.append(helpers.get_xml_attr(b, 'tag'))
aspect_ratio = ''
audio_channels = None
audio_codec = ''
bitrate = None
container = ''
height = None
video_codec = ''
video_framerate = ''
video_resolution = ''
width = None
if a.getElementsByTagName('Media'):
media_elem = a.getElementsByTagName('Media')
for c in media_elem:
aspect_ratio = helpers.get_xml_attr(c, 'aspectRatio')
audio_channels = helpers.get_xml_attr(c, 'audioChannels')
audio_codec = helpers.get_xml_attr(c, 'audioCodec')
bitrate = helpers.get_xml_attr(c, 'bitrate')
container = helpers.get_xml_attr(c, 'container')
height = helpers.get_xml_attr(c, 'height')
video_codec = helpers.get_xml_attr(c, 'videoCodec')
video_framerate = helpers.get_xml_attr(c, 'videoFrameRate')
video_resolution = helpers.get_xml_attr(c, 'videoResolution')
width = helpers.get_xml_attr(c, 'width')
machine_id = ''
platform = ''
player = ''
if a.getElementsByTagName('Player'):
player_elem = a.getElementsByTagName('Player')
for d in player_elem:
ip_address = helpers.get_xml_attr(d, 'address')
machine_id = helpers.get_xml_attr(d, 'machineIdentifier')
platform = helpers.get_xml_attr(d, 'platform')
player = helpers.get_xml_attr(d, 'title')
transcode_audio_channels = None
transcode_audio_codec = ''
audio_decision = 'direct play'
transcode_container = ''
transcode_height = None
transcode_protocol = ''
transcode_video_codec = ''
video_decision = 'direct play'
transcode_width = None
if a.getElementsByTagName('TranscodeSession'):
transcode_elem = a.getElementsByTagName('TranscodeSession')
for e in transcode_elem:
transcode_audio_channels = helpers.get_xml_attr(e, 'audioChannels')
transcode_audio_codec = helpers.get_xml_attr(e, 'audioCodec')
audio_decision = helpers.get_xml_attr(e, 'audioDecision')
transcode_container = helpers.get_xml_attr(e, 'container')
transcode_height = helpers.get_xml_attr(e, 'height')
transcode_protocol = helpers.get_xml_attr(e, 'protocol')
transcode_video_codec = helpers.get_xml_attr(e, 'videoCodec')
video_decision = helpers.get_xml_attr(e, 'videoDecision')
transcode_width = helpers.get_xml_attr(e, 'width')
# Generate a combined transcode decision value
if video_decision == 'transcode' or audio_decision == 'transcode':
transcode_decision = 'transcode'
elif video_decision == 'copy' or audio_decision == 'copy':
transcode_decision = 'copy'
else:
transcode_decision = 'direct play'
user_id = None
if a.getElementsByTagName('User'):
user_elem = a.getElementsByTagName('User')
for f in user_elem:
user_id = helpers.get_xml_attr(f, 'id')
writers = []
if a.getElementsByTagName('Writer'):
writer_elem = a.getElementsByTagName('Writer')
for g in writer_elem:
writers.append(helpers.get_xml_attr(g, 'tag'))
actors = []
if a.getElementsByTagName('Role'):
actor_elem = a.getElementsByTagName('Role')
for h in actor_elem:
actors.append(helpers.get_xml_attr(h, 'tag'))
genres = []
if a.getElementsByTagName('Genre'):
genre_elem = a.getElementsByTagName('Genre')
for i in genre_elem:
genres.append(helpers.get_xml_attr(i, 'tag'))
labels = []
if a.getElementsByTagName('Lables'):
label_elem = a.getElementsByTagName('Lables')
for i in label_elem:
labels.append(helpers.get_xml_attr(i, 'tag'))
output = {'added_at': added_at,
'art': art,
'duration': duration,
'grandparent_thumb': grandparent_thumb,
'title': title,
'parent_title': parent_title,
'grandparent_title': grandparent_title,
'original_title': original_title,
'tagline': tagline,
'guid': guid,
'section_id': section_id,
'media_index': media_index,
'originally_available_at': originally_available_at,
'last_viewed_at': last_viewed_at,
'parent_media_index': parent_media_index,
'parent_thumb': parent_thumb,
'rating': rating,
'thumb': thumb,
'media_type': media_type,
'updated_at': updated_at,
'view_offset': view_offset,
'year': year,
'directors': directors,
'aspect_ratio': aspect_ratio,
'audio_channels': audio_channels,
'audio_codec': audio_codec,
'bitrate': bitrate,
'container': container,
'height': height,
'video_codec': video_codec,
'video_framerate': video_framerate,
'video_resolution': video_resolution,
'width': width,
'ip_address': ip_address,
'machine_id': machine_id,
'platform': platform,
'player': player,
'transcode_audio_channels': transcode_audio_channels,
'transcode_audio_codec': transcode_audio_codec,
'audio_decision': audio_decision,
'transcode_container': transcode_container,
'transcode_height': transcode_height,
'transcode_protocol': transcode_protocol,
'transcode_video_codec': transcode_video_codec,
'video_decision': video_decision,
'transcode_width': transcode_width,
'transcode_decision': transcode_decision,
'user_id': user_id,
'writers': writers,
'actors': actors,
'genres': genres,
'studio': studio,
'labels': labels
}
return output
def validate_database(database_file=None, table_name=None):
try:
connection = sqlite3.connect(database_file, timeout=20)
except sqlite3.OperationalError:
logger.error("Tautulli Importer :: Invalid database specified.")
return 'Invalid database specified.'
except ValueError:
logger.error("Tautulli Importer :: Invalid database specified.")
return 'Invalid database specified.'
except:
logger.error("Tautulli Importer :: Uncaught exception.")
return 'Uncaught exception.'
try:
connection.execute('SELECT ratingKey from %s' % table_name)
connection.close()
except sqlite3.OperationalError:
logger.error("Tautulli Importer :: Invalid database specified.")
return 'Invalid database specified.'
except:
logger.error("Tautulli Importer :: Uncaught exception.")
return 'Uncaught exception.'
return 'success'
def import_from_plexwatch(database_file=None, table_name=None, import_ignore_interval=0):
try:
connection = sqlite3.connect(database_file, timeout=20)
connection.row_factory = sqlite3.Row
except sqlite3.OperationalError:
logger.error("Tautulli Importer :: Invalid filename.")
return None
except ValueError:
logger.error("Tautulli Importer :: Invalid filename.")
return None
try:
connection.execute('SELECT ratingKey from %s' % table_name)
except sqlite3.OperationalError:
logger.error("Tautulli Importer :: Database specified does not contain the required fields.")
return None
logger.debug("Tautulli Importer :: PlexWatch data import in progress...")
database.set_is_importing(True)
ap = activity_processor.ActivityProcessor()
user_data = users.Users()
# Get the latest friends list so we can pull user id's
try:
users.refresh_users()
except:
logger.debug("Tautulli Importer :: Unable to refresh the users list. Aborting import.")
return None
query = 'SELECT time AS started, ' \
'stopped, ' \
'cast(ratingKey as text) AS rating_key, ' \
'null AS user_id, ' \
'user, ' \
'ip_address, ' \
'paused_counter, ' \
'platform AS player, ' \
'null AS platform, ' \
'null as machine_id, ' \
'parentRatingKey as parent_rating_key, ' \
'grandparentRatingKey as grandparent_rating_key, ' \
'null AS media_type, ' \
'null AS view_offset, ' \
'xml, ' \
'rating as content_rating,' \
'summary,' \
'title AS full_title,' \
'(case when orig_title_ep = "" then orig_title else ' \
'orig_title_ep end) as title,' \
'(case when orig_title_ep != "" then orig_title else ' \
'null end) as grandparent_title ' \
'FROM ' + table_name + ' ORDER BY id'
result = connection.execute(query)
for row in result:
# Extract the xml from the Plexwatch db xml field.
extracted_xml = extract_plexwatch_xml(row['xml'])
# If we get back None from our xml extractor skip over the record and log error.
if not extracted_xml:
logger.error("Tautulli Importer :: Skipping record with ratingKey %s due to malformed xml."
% str(row['rating_key']))
continue
# Skip line if we don't have a ratingKey to work with
if not row['rating_key']:
logger.error("Tautulli Importer :: Skipping record due to null ratingKey.")
continue
# If the user_id no longer exists in the friends list, pull it from the xml.
if user_data.get_user_id(user=row['user']):
user_id = user_data.get_user_id(user=row['user'])
else:
user_id = extracted_xml['user_id']
session_history = {'started': row['started'],
'stopped': row['stopped'],
'rating_key': row['rating_key'],
'title': row['title'],
'parent_title': extracted_xml['parent_title'],
'grandparent_title': row['grandparent_title'],
'original_title': extracted_xml['original_title'],
'full_title': row['full_title'],
'user_id': user_id,
'user': row['user'],
'ip_address': row['ip_address'] if row['ip_address'] else extracted_xml['ip_address'],
'paused_counter': row['paused_counter'],
'player': row['player'],
'platform': extracted_xml['platform'],
'machine_id': extracted_xml['machine_id'],
'parent_rating_key': row['parent_rating_key'],
'grandparent_rating_key': row['grandparent_rating_key'],
'media_type': extracted_xml['media_type'],
'view_offset': extracted_xml['view_offset'],
'video_decision': extracted_xml['video_decision'],
'audio_decision': extracted_xml['audio_decision'],
'transcode_decision': extracted_xml['transcode_decision'],
'duration': extracted_xml['duration'],
'width': extracted_xml['width'],
'height': extracted_xml['height'],
'container': extracted_xml['container'],
'video_codec': extracted_xml['video_codec'],
'audio_codec': extracted_xml['audio_codec'],
'bitrate': extracted_xml['bitrate'],
'video_resolution': extracted_xml['video_resolution'],
'video_framerate': extracted_xml['video_framerate'],
'aspect_ratio': extracted_xml['aspect_ratio'],
'audio_channels': extracted_xml['audio_channels'],
'transcode_protocol': extracted_xml['transcode_protocol'],
'transcode_container': extracted_xml['transcode_container'],
'transcode_video_codec': extracted_xml['transcode_video_codec'],
'transcode_audio_codec': extracted_xml['transcode_audio_codec'],
'transcode_audio_channels': extracted_xml['transcode_audio_channels'],
'transcode_width': extracted_xml['transcode_width'],
'transcode_height': extracted_xml['transcode_height']
}
session_history_metadata = {'rating_key': helpers.latinToAscii(row['rating_key']),
'parent_rating_key': row['parent_rating_key'],
'grandparent_rating_key': row['grandparent_rating_key'],
'title': row['title'],
'parent_title': extracted_xml['parent_title'],
'grandparent_title': row['grandparent_title'],
'original_title': extracted_xml['original_title'],
'media_index': extracted_xml['media_index'],
'parent_media_index': extracted_xml['parent_media_index'],
'thumb': extracted_xml['thumb'],
'parent_thumb': extracted_xml['parent_thumb'],
'grandparent_thumb': extracted_xml['grandparent_thumb'],
'art': extracted_xml['art'],
'media_type': extracted_xml['media_type'],
'year': extracted_xml['year'],
'originally_available_at': extracted_xml['originally_available_at'],
'added_at': extracted_xml['added_at'],
'updated_at': extracted_xml['updated_at'],
'last_viewed_at': extracted_xml['last_viewed_at'],
'content_rating': row['content_rating'],
'summary': row['summary'],
'tagline': extracted_xml['tagline'],
'rating': extracted_xml['rating'],
'duration': extracted_xml['duration'],
'guid': extracted_xml['guid'],
'section_id': extracted_xml['section_id'],
'directors': extracted_xml['directors'],
'writers': extracted_xml['writers'],
'actors': extracted_xml['actors'],
'genres': extracted_xml['genres'],
'studio': extracted_xml['studio'],
'labels': extracted_xml['labels'],
'full_title': row['full_title'],
'width': extracted_xml['width'],
'height': extracted_xml['height'],
'container': extracted_xml['container'],
'video_codec': extracted_xml['video_codec'],
'audio_codec': extracted_xml['audio_codec'],
'bitrate': extracted_xml['bitrate'],
'video_resolution': extracted_xml['video_resolution'],
'video_framerate': extracted_xml['video_framerate'],
'aspect_ratio': extracted_xml['aspect_ratio'],
'audio_channels': extracted_xml['audio_channels']
}
# On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values
# Just make sure that the ratingKey is indeed an integer
if session_history_metadata['rating_key'].isdigit():
ap.write_session_history(session=session_history,
import_metadata=session_history_metadata,
is_import=True,
import_ignore_interval=import_ignore_interval)
else:
logger.debug("Tautulli Importer :: Item has bad rating_key: %s" % session_history_metadata['rating_key'])
import_users()
logger.debug("Tautulli Importer :: PlexWatch data import complete.")
database.set_is_importing(False)
def import_users():
logger.debug("Tautulli Importer :: Importing PlexWatch Users...")
monitor_db = database.MonitorDatabase()
query = 'INSERT OR IGNORE INTO users (user_id, username) ' \
'SELECT user_id, user ' \
'FROM session_history WHERE user_id != 1 GROUP BY user_id'
try:
monitor_db.action(query)
logger.debug("Tautulli Importer :: Users imported.")
except:
logger.debug("Tautulli Importer :: Failed to import users.")

3140
jellypy/pmsconnect.py Normal file

File diff suppressed because it is too large Load Diff

327
jellypy/request.py Normal file
View File

@@ -0,0 +1,327 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import str
from bs4 import BeautifulSoup
from xml.dom import minidom
import collections
import requests
from requests.packages import urllib3
import jellypy
if jellypy.PYTHON2:
import lock
import logger
else:
from jellypy import lock
from jellypy import logger
# Dictionary with last request times, for rate limiting.
last_requests = collections.defaultdict(int)
fake_lock = lock.FakeLock()
def request_response(url, method="get", auto_raise=True,
whitelist_status_code=None, lock=fake_lock, **kwargs):
"""
Convenient wrapper for `requests.get', which will capture the exceptions
and log them. On success, the Response object is returned. In case of a
exception, None is returned.
Additionally, there is support for rate limiting. To use this feature,
supply a tuple of (lock, request_limit). The lock is used to make sure no
other request with the same lock is executed. The request limit is the
minimal time between two requests (and so 1/request_limit is the number of
requests per seconds).
"""
# Convert whitelist_status_code to a list if needed
if whitelist_status_code and type(whitelist_status_code) != list:
whitelist_status_code = [whitelist_status_code]
# Disable verification of SSL certificates if requested. Note: this could
# pose a security issue!
kwargs["verify"] = bool(jellypy.CONFIG.VERIFY_SSL_CERT)
if not kwargs['verify']:
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Map method to the request.XXX method. This is a simple hack, but it
# allows requests to apply more magic per method. See lib/requests/api.py.
request_method = getattr(requests, method.lower())
try:
# Request URL and wait for response
with lock:
logger.debug(
"Requesting URL via %s method: %s", method.upper(), url)
response = request_method(url, **kwargs)
# If status code != OK, then raise exception, except if the status code
# is white listed.
if whitelist_status_code and auto_raise:
if response.status_code not in whitelist_status_code:
try:
response.raise_for_status()
except:
logger.debug(
"Response status code %d is not white "
"listed, raised exception", response.status_code)
raise
elif auto_raise:
response.raise_for_status()
return response
except requests.exceptions.SSLError as e:
if kwargs["verify"]:
logger.error(
"Unable to connect to remote host because of a SSL error. "
"It is likely that your system cannot verify the validity"
"of the certificate. The remote certificate is either "
"self-signed, or the remote server uses SNI. See the wiki for "
"more information on this topic.")
else:
logger.error(
"SSL error raised during connection, with certificate "
"verification turned off: %s", e)
except requests.ConnectionError:
logger.error(
"Unable to connect to remote host. Check if the remote "
"host is up and running.")
except requests.Timeout:
logger.error(
"Request timed out. The remote host did not respond timely.")
except requests.HTTPError as e:
if e.response is not None:
if e.response.status_code >= 500:
cause = "remote server error"
elif e.response.status_code >= 400:
cause = "local client error"
else:
# I don't think we will end up here, but for completeness
cause = "unknown"
logger.error(
"Request raise HTTP error with status code %d (%s).",
e.response.status_code, cause)
# Debug response
if jellypy.VERBOSE:
server_message(e.response)
else:
logger.error("Request raised HTTP error.")
except requests.RequestException as e:
logger.error("Request raised exception: %s", e)
def request_response2(url, method="get", auto_raise=True,
whitelist_status_code=None, lock=fake_lock, **kwargs):
"""
Convenient wrapper for `requests.get', which will capture the exceptions
and log them. On success, the Response object is returned. In case of a
exception, None is returned.
Additionally, there is support for rate limiting. To use this feature,
supply a tuple of (lock, request_limit). The lock is used to make sure no
other request with the same lock is executed. The request limit is the
minimal time between two requests (and so 1/request_limit is the number of
requests per seconds).
"""
# Convert whitelist_status_code to a list if needed
if whitelist_status_code and type(whitelist_status_code) != list:
whitelist_status_code = [whitelist_status_code]
# Disable verification of SSL certificates if requested. Note: this could
# pose a security issue!
kwargs['verify'] = bool(jellypy.CONFIG.VERIFY_SSL_CERT)
if not kwargs['verify']:
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Map method to the request.XXX method. This is a simple hack, but it
# allows requests to apply more magic per method. See lib/requests/api.py.
request_method = getattr(requests, method.lower())
response = None
err_msg = http_err = req_msg = None
try:
with lock:
response = request_method(url, **kwargs)
# If status code != OK, then raise exception, except if the status code
# is white listed.
if whitelist_status_code and auto_raise:
if response.status_code not in whitelist_status_code:
try:
response.raise_for_status()
except:
raise
elif auto_raise:
response.raise_for_status()
except requests.exceptions.SSLError as e:
if kwargs["verify"]:
err_msg = "Unable to connect to remote host because of a SSL error."
else:
err_msg = "Unable to connect to remote host because of a SSL error, " \
"with certificate verification turned off: {}".format(e)
except requests.ConnectionError:
err_msg = "Unable to connect to remote host. Check if the remote host is up and running."
except requests.Timeout:
err_msg = "Request to the remote host timed out."
except requests.HTTPError as e:
if e.response is not None:
if e.response.status_code >= 500:
http_err = "[{e.response.status_code}] {e.response.reason} (remote server error).".format(e=e)
elif e.response.status_code >= 400:
http_err = "[{e.response.status_code}] {e.response.reason} (local client error).".format(e=e)
else:
http_err = "Unknown HTTP error."
err_msg = "Request raised a HTTP error: {}".format(http_err)
if jellypy.VERBOSE:
req_msg = server_message(e.response, return_msg=True)
else:
err_msg = "Request raised a HTTP error: Unknown response."
except requests.RequestException as e:
err_msg = "Request raised an exception: {}".format(e)
return response, err_msg, req_msg
def request_soup(url, **kwargs):
"""
Wrapper for `request_response', which will return a BeatifulSoup object if
no exceptions are raised.
"""
parser = kwargs.pop("parser", "html5lib")
response = request_response(url, **kwargs)
if response is not None:
return BeautifulSoup(response.content, parser)
def request_minidom(url, **kwargs):
"""
Wrapper for `request_response', which will return a Minidom object if no
exceptions are raised.
"""
response = request_response(url, **kwargs)
if response is not None:
return minidom.parseString(response.content)
def request_json(url, **kwargs):
"""
Wrapper for `request_response', which will decode the response as JSON
object and return the result, if no exceptions are raised.
As an option, a validator callback can be given, which should return True
if the result is valid.
"""
validator = kwargs.pop("validator", None)
response = request_response(url, **kwargs)
if response is not None:
try:
result = response.json()
if validator and not validator(result):
logger.error("JSON validation result failed")
else:
return result
except ValueError:
logger.error("Response returned invalid JSON data")
# Debug response
if jellypy.VERBOSE:
server_message(response)
def request_content(url, **kwargs):
"""
Wrapper for `request_response', which will return the raw content.
"""
response = request_response(url, **kwargs)
if response is not None:
return response.content
def server_message(response, return_msg=False):
"""
Extract server message from response and log in to logger with DEBUG level.
Some servers return extra information in the result. Try to parse it for
debugging purpose. Messages are limited to 150 characters, since it may
return the whole page in case of normal web page URLs
"""
message = None
# First attempt is to 'read' the response as HTML
if "text/html" in response.headers.get("content-type", ""):
try:
soup = BeautifulSoup(response.content, "html5lib")
except Exception:
soup = None
if soup:
# Find body and cleanup common tags to grab content, which probably
# contains the message.
message = soup.find("body")
elements = ("header", "script", "footer", "nav", "input", "textarea")
for element in elements:
for tag in soup.find_all(element):
tag.replaceWith("")
message = message.text if message else soup.text
message = message.strip()
# Second attempt is to just take the response
if message is None:
message = response.content.strip()
if message:
message = str(message, 'utf-8', 'replace')
# Truncate message if it is too long.
if len(message) > 150:
message = message[:150] + "..."
if return_msg:
return message
logger.debug("Server responded with message: %s", message)

280
jellypy/session.py Normal file
View File

@@ -0,0 +1,280 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import str
import cherrypy
import jellypy
if jellypy.PYTHON2:
import common
import users
else:
from jellypy import common
from jellypy import users
def get_session_info():
"""
Returns the session info for the user session
"""
_session = {'user_id': None,
'user': None,
'user_group': 'admin',
'exp': None}
if isinstance(cherrypy.request.login, dict):
return cherrypy.request.login
return _session
def get_session_user():
"""
Returns the user_id for the current logged in session
"""
_session = get_session_info()
return _session['user'] if _session['user_group'] == 'guest' and _session['user'] else None
def get_session_user_id():
"""
Returns the user_id for the current logged in session
"""
_session = get_session_info()
return str(_session['user_id']) if _session['user_group'] == 'guest' and _session['user_id'] else None
def get_session_user_token():
"""
Returns the user's server_token for the current logged in session
"""
_session = get_session_info()
if _session['user_group'] == 'guest' and _session['user_id']:
session_user_tokens = users.Users().get_tokens(_session['user_id'])
user_token = session_user_tokens['server_token']
else:
user_token = jellypy.CONFIG.PMS_TOKEN
return user_token
def get_session_shared_libraries():
"""
Returns a tuple of section_id for the current logged in session
"""
user_details = users.Users().get_details(user_id=get_session_user_id())
return tuple(str(s) for s in user_details['shared_libraries'])
def get_session_library_filters():
"""
Returns a dict of library filters for the current logged in session
{'content_rating': ('PG', 'R')
'labels': ('label1', label2')},
"""
filters = users.Users().get_filters(user_id=get_session_user_id())
return filters
def get_session_library_filters_type(filters, media_type=None):
"""
Returns a dict of library filters for the current logged in session
{'content_rating': ('PG', 'R')
'labels': ('label1', label2')},
"""
if media_type == 'movie':
filters = filters.get('filter_movies', ())
elif media_type == 'show' or media_type == 'season' or media_type == 'episode':
filters = filters.get('filter_tv', ())
elif media_type == 'artist' or media_type == 'album' or media_type == 'track':
filters = filters.get('filter_music', ())
elif media_type == 'photo' or media_type == 'photo_album' or media_type == 'picture' or media_type == 'clip':
filters = filters.get('filter_photos', ())
else:
filters = filters.get('filter_all', ())
content_rating = filters.get('content_rating', ())
labels = filters.get('labels', ())
return content_rating, tuple(f.lower() for f in labels)
def allow_session_user(user_id):
"""
Returns True or False if the user_id is allowed for the current logged in session
"""
session_user_id = get_session_user_id()
if session_user_id and str(user_id) != session_user_id:
return False
return True
def allow_session_library(section_id):
"""
Returns True or False if the section_id is allowed for the current logged in session
"""
session_library_ids = get_session_shared_libraries()
if session_library_ids and str(section_id) not in session_library_ids:
return False
return True
def friendly_name_to_username(list_of_dicts):
"""
Reverts the friendly name back to the username of the current logged in session
"""
session_user = get_session_user()
session_user_id = get_session_user_id()
if session_user_id:
for d in list_of_dicts:
if 'friendly_name' in d and d['friendly_name'] != session_user:
d['friendly_name'] = session_user
return list_of_dicts
def filter_session_info(list_of_dicts, filter_key=None):
"""
Filters a list of dictionary items to only return the info for the current logged in session
"""
session_user_id = get_session_user_id()
if not session_user_id:
return list_of_dicts
session_library_ids = get_session_shared_libraries()
session_library_filters = get_session_library_filters()
list_of_dicts = friendly_name_to_username(list_of_dicts)
if filter_key == 'user_id' and session_user_id:
return [d for d in list_of_dicts if str(d.get('user_id','')) == session_user_id]
elif filter_key == 'section_id' and session_library_ids:
new_list_of_dicts = []
for d in list_of_dicts:
if str(d.get('section_id','')) not in session_library_ids:
continue
if d.get('media_type'):
f_content_rating, f_labels = get_session_library_filters_type(session_library_filters,
media_type=d['media_type'])
d_content_rating = d.get('content_rating', '')
d_labels = tuple(f.lower() for f in d.get('labels', ()))
keep = False
if not f_content_rating and not f_labels:
keep = True
elif not f_content_rating and f_labels:
if set(d_labels).intersection(set(f_labels)):
keep = True
elif f_content_rating and not f_labels:
if d_content_rating in f_content_rating:
keep = True
elif f_content_rating and f_labels:
if d_content_rating in f_content_rating or set(d_labels).intersection(set(f_labels)):
keep = True
if keep:
new_list_of_dicts.append(d)
return new_list_of_dicts
return list_of_dicts
def mask_session_info(list_of_dicts, mask_metadata=True):
"""
Masks user info in a list of dictionary items to only display info for the current logged in session
"""
session_user_id = get_session_user_id()
if not session_user_id:
return list_of_dicts
session_user = get_session_user()
session_library_ids = get_session_shared_libraries()
session_library_filters = get_session_library_filters()
keys_to_mask = {'user_id': '',
'user': 'Plex User',
'username': 'Plex User',
'friendly_name': 'Plex User',
'user_thumb': common.DEFAULT_USER_THUMB,
'ip_address': 'N/A',
'machine_id': '',
'player': 'Player'
}
metadata_to_mask = {'media_index': '0',
'parent_media_index': '0',
'art': common.DEFAULT_ART,
'parent_thumb': common.DEFAULT_POSTER_THUMB,
'grandparent_thumb': common.DEFAULT_POSTER_THUMB,
'thumb': common.DEFAULT_POSTER_THUMB,
'bif_thumb': '',
'title': 'Plex Media',
'parent_title': 'Plex Media',
'grandparent_title': 'Plex Media',
'original_title': 'Plex Media',
'rating_key': '',
'parent_rating_key': '',
'grandparent_rating_key': '',
'year': '',
'last_played': 'Plex Media'
}
list_of_dicts = friendly_name_to_username(list_of_dicts)
for d in list_of_dicts:
if session_user_id and not (str(d.get('user_id')) == session_user_id or d.get('user') == session_user):
for k, v in keys_to_mask.items():
if k in d: d[k] = keys_to_mask[k]
if not mask_metadata:
continue
if str(d.get('section_id','')) not in session_library_ids:
for k, v in metadata_to_mask.items():
if k in d: d[k] = metadata_to_mask[k]
continue
media_type = d.get('media_type')
if media_type:
f_content_rating, f_labels = get_session_library_filters_type(session_library_filters,
media_type=d['media_type'])
d_content_rating = d.get('content_rating', '')
d_labels = tuple(f.lower() for f in d.get('labels', ()))
if not f_content_rating and not f_labels:
continue
elif not f_content_rating and f_labels:
if set(d_labels).intersection(set(f_labels)):
continue
elif f_content_rating and not f_labels:
if d_content_rating in f_content_rating:
continue
elif f_content_rating and f_labels:
if d_content_rating in f_content_rating or set(d_labels).intersection(set(f_labels)):
continue
for k, v in metadata_to_mask.items():
if k in d: d[k] = metadata_to_mask[k]
return list_of_dicts

950
jellypy/users.py Normal file
View File

@@ -0,0 +1,950 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from future.builtins import next
from future.builtins import str
from future.builtins import object
from future.moves.urllib.parse import parse_qsl
import httpagentparser
import jellypy
if jellypy.PYTHON2:
import common
import database
import datatables
import helpers
import libraries
import logger
import plextv
import session
else:
from jellypy import common
from jellypy import database
from jellypy import datatables
from jellypy import helpers
from jellypy import libraries
from jellypy import logger
from jellypy import plextv
from jellypy import session
def refresh_users():
logger.info("Tautulli Users :: Requesting users list refresh...")
result = plextv.PlexTV().get_full_users_list()
server_id = jellypy.CONFIG.PMS_IDENTIFIER
if not server_id:
logger.error("Tautulli Users :: No PMS identifier, cannot refresh users. Verify server in settings.")
return
if result:
monitor_db = database.MonitorDatabase()
# Keep track of user_id to update is_active status
user_ids = [0] # Local user always considered active
for item in result:
user_ids.append(helpers.cast_to_int(item['user_id']))
if item.get('shared_libraries'):
item['shared_libraries'] = ';'.join(item['shared_libraries'])
elif item.get('server_token'):
libs = libraries.Libraries().get_sections()
item['shared_libraries'] = ';'.join([str(l['section_id']) for l in libs])
keys_dict = {"user_id": item.pop('user_id')}
# Check if we've set a custom avatar if so don't overwrite it.
if keys_dict['user_id']:
avatar_urls = monitor_db.select('SELECT thumb, custom_avatar_url '
'FROM users WHERE user_id = ?',
[keys_dict['user_id']])
if avatar_urls:
if not avatar_urls[0]['custom_avatar_url'] or \
avatar_urls[0]['custom_avatar_url'] == avatar_urls[0]['thumb']:
item['custom_avatar_url'] = item['thumb']
else:
item['custom_avatar_url'] = item['thumb']
monitor_db.upsert('users', key_dict=keys_dict, value_dict=item)
query = 'UPDATE users SET is_active = 0 WHERE user_id NOT IN ({})'.format(', '.join(['?'] * len(user_ids)))
monitor_db.action(query=query, args=user_ids)
logger.info("Tautulli Users :: Users list refreshed.")
return True
else:
logger.warn("Tautulli Users :: Unable to refresh users list.")
return False
class Users(object):
def __init__(self):
pass
def get_datatables_list(self, kwargs=None, grouping=None):
default_return = {'recordsFiltered': 0,
'recordsTotal': 0,
'draw': 0,
'data': 'null',
'error': 'Unable to execute database query.'}
data_tables = datatables.DataTables()
custom_where = [['users.deleted_user', 0]]
if grouping is None:
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
if session.get_session_user_id():
custom_where.append(['users.user_id', session.get_session_user_id()])
if kwargs.get('user_id'):
custom_where.append(['users.user_id', kwargs.get('user_id')])
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
columns = ['users.id AS row_id',
'users.user_id',
'users.username',
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
THEN users.username ELSE users.friendly_name END) AS friendly_name',
'users.thumb AS user_thumb',
'users.custom_avatar_url AS custom_thumb',
'COUNT(DISTINCT %s) AS plays' % group_by,
'SUM(CASE WHEN session_history.stopped > 0 THEN (session_history.stopped - session_history.started) \
ELSE 0 END) - SUM(CASE WHEN session_history.paused_counter IS NULL THEN 0 ELSE \
session_history.paused_counter END) AS duration',
'MAX(session_history.started) AS last_seen',
'MAX(session_history.id) AS history_row_id',
'session_history_metadata.full_title AS last_played',
'session_history.ip_address',
'session_history.platform',
'session_history.player',
'session_history.rating_key',
'session_history_metadata.media_type',
'session_history_metadata.thumb',
'session_history_metadata.parent_thumb',
'session_history_metadata.grandparent_thumb',
'session_history_metadata.parent_title',
'session_history_metadata.year',
'session_history_metadata.media_index',
'session_history_metadata.parent_media_index',
'session_history_metadata.live',
'session_history_metadata.added_at',
'session_history_metadata.originally_available_at',
'session_history_metadata.guid',
'session_history_media_info.transcode_decision',
'users.do_notify AS do_notify',
'users.keep_history AS keep_history',
'users.allow_guest AS allow_guest',
'users.is_active AS is_active'
]
try:
query = data_tables.ssp_query(table_name='users',
columns=columns,
custom_where=custom_where,
group_by=['users.user_id'],
join_types=['LEFT OUTER JOIN',
'LEFT OUTER JOIN',
'LEFT OUTER JOIN'],
join_tables=['session_history',
'session_history_metadata',
'session_history_media_info'],
join_evals=[['session_history.user_id', 'users.user_id'],
['session_history.id', 'session_history_metadata.id'],
['session_history.id', 'session_history_media_info.id']],
kwargs=kwargs)
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for get_list: %s." % e)
return default_return
users = query['result']
rows = []
for item in users:
if item['media_type'] == 'episode' and item['parent_thumb']:
thumb = item['parent_thumb']
elif item['media_type'] == 'episode':
thumb = item['grandparent_thumb']
else:
thumb = item['thumb']
if item['custom_thumb'] and item['custom_thumb'] != item['user_thumb']:
user_thumb = item['custom_thumb']
elif item['user_thumb']:
user_thumb = item['user_thumb']
else:
user_thumb = common.DEFAULT_USER_THUMB
# Rename Mystery platform names
platform = common.PLATFORM_NAME_OVERRIDES.get(item['platform'], item['platform'])
row = {'row_id': item['row_id'],
'user_id': item['user_id'],
'username': item['username'],
'friendly_name': item['friendly_name'],
'user_thumb': user_thumb,
'plays': item['plays'],
'duration': item['duration'],
'last_seen': item['last_seen'],
'last_played': item['last_played'],
'history_row_id': item['history_row_id'],
'ip_address': item['ip_address'],
'platform': platform,
'player': item['player'],
'rating_key': item['rating_key'],
'media_type': item['media_type'],
'thumb': thumb,
'parent_title': item['parent_title'],
'year': item['year'],
'media_index': item['media_index'],
'parent_media_index': item['parent_media_index'],
'live': item['live'],
'originally_available_at': item['originally_available_at'],
'guid': item['guid'],
'transcode_decision': item['transcode_decision'],
'do_notify': helpers.checked(item['do_notify']),
'keep_history': helpers.checked(item['keep_history']),
'allow_guest': helpers.checked(item['allow_guest']),
'is_active': item['is_active']
}
rows.append(row)
dict = {'recordsFiltered': query['filteredCount'],
'recordsTotal': query['totalCount'],
'data': session.friendly_name_to_username(rows),
'draw': query['draw']
}
return dict
def get_datatables_unique_ips(self, user_id=None, kwargs=None):
default_return = {'recordsFiltered': 0,
'recordsTotal': 0,
'draw': 0,
'data': 'null',
'error': 'Unable to execute database query.'}
if not session.allow_session_user(user_id):
return default_return
data_tables = datatables.DataTables()
custom_where = ['users.user_id', user_id]
columns = ['session_history.id AS history_row_id',
'MIN(session_history.started) AS first_seen',
'MAX(session_history.started) AS last_seen',
'session_history.ip_address',
'COUNT(session_history.id) AS play_count',
'session_history.platform',
'session_history.player',
'session_history.rating_key',
'session_history_metadata.full_title AS last_played',
'session_history_metadata.thumb',
'session_history_metadata.parent_thumb',
'session_history_metadata.grandparent_thumb',
'session_history_metadata.media_type',
'session_history_metadata.parent_title',
'session_history_metadata.year',
'session_history_metadata.media_index',
'session_history_metadata.parent_media_index',
'session_history_metadata.live',
'session_history_metadata.added_at',
'session_history_metadata.originally_available_at',
'session_history_metadata.guid',
'session_history_media_info.transcode_decision',
'session_history.user',
'session_history.user_id as custom_user_id',
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
THEN users.username ELSE users.friendly_name END) AS friendly_name'
]
try:
query = data_tables.ssp_query(table_name='session_history',
columns=columns,
custom_where=[custom_where],
group_by=['ip_address'],
join_types=['JOIN',
'JOIN',
'JOIN'],
join_tables=['users',
'session_history_metadata',
'session_history_media_info'],
join_evals=[['session_history.user_id', 'users.user_id'],
['session_history.id', 'session_history_metadata.id'],
['session_history.id', 'session_history_media_info.id']],
kwargs=kwargs)
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for get_unique_ips: %s." % e)
return default_return
results = query['result']
rows = []
for item in results:
if item["media_type"] == 'episode' and item["parent_thumb"]:
thumb = item["parent_thumb"]
elif item["media_type"] == 'episode':
thumb = item["grandparent_thumb"]
else:
thumb = item["thumb"]
# Rename Mystery platform names
platform = common.PLATFORM_NAME_OVERRIDES.get(item["platform"], item["platform"])
row = {'history_row_id': item['history_row_id'],
'last_seen': item['last_seen'],
'first_seen': item['first_seen'],
'ip_address': item['ip_address'],
'play_count': item['play_count'],
'platform': platform,
'player': item['player'],
'last_played': item['last_played'],
'rating_key': item['rating_key'],
'thumb': thumb,
'media_type': item['media_type'],
'parent_title': item['parent_title'],
'year': item['year'],
'media_index': item['media_index'],
'parent_media_index': item['parent_media_index'],
'live': item['live'],
'originally_available_at': item['originally_available_at'],
'guid': item['guid'],
'transcode_decision': item['transcode_decision'],
'friendly_name': item['friendly_name'],
'user_id': item['custom_user_id']
}
rows.append(row)
dict = {'recordsFiltered': query['filteredCount'],
'recordsTotal': query['totalCount'],
'data': session.friendly_name_to_username(rows),
'draw': query['draw']
}
return dict
def set_config(self, user_id=None, friendly_name='', custom_thumb='', do_notify=1, keep_history=1, allow_guest=1):
if str(user_id).isdigit():
monitor_db = database.MonitorDatabase()
key_dict = {'user_id': user_id}
value_dict = {'friendly_name': friendly_name,
'custom_avatar_url': custom_thumb,
'do_notify': do_notify,
'keep_history': keep_history,
'allow_guest': allow_guest
}
try:
monitor_db.upsert('users', value_dict, key_dict)
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for set_config: %s." % e)
def get_details(self, user_id=None, user=None, email=None):
default_return = {'row_id': 0,
'user_id': 0,
'username': 'Local',
'friendly_name': 'Local',
'user_thumb': common.DEFAULT_USER_THUMB,
'email': '',
'is_active': 1,
'is_admin': '',
'is_home_user': 0,
'is_allow_sync': 0,
'is_restricted': 0,
'do_notify': 0,
'keep_history': 1,
'allow_guest': 0,
'deleted_user': 0,
'shared_libraries': ()
}
if user_id is None and not user and not email:
return default_return
def get_user_details(user_id=user_id, user=user, email=email):
monitor_db = database.MonitorDatabase()
try:
if str(user_id).isdigit():
query = 'SELECT id AS row_id, user_id, username, friendly_name, ' \
'thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \
'email, is_active, is_admin, is_home_user, is_allow_sync, is_restricted, ' \
'do_notify, keep_history, deleted_user, ' \
'allow_guest, shared_libraries ' \
'FROM users ' \
'WHERE user_id = ? '
result = monitor_db.select(query, args=[user_id])
elif user:
query = 'SELECT id AS row_id, user_id, username, friendly_name, ' \
'thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \
'email, is_active, is_admin, is_home_user, is_allow_sync, is_restricted, ' \
'do_notify, keep_history, deleted_user, ' \
'allow_guest, shared_libraries ' \
'FROM users ' \
'WHERE username = ? COLLATE NOCASE '
result = monitor_db.select(query, args=[user])
elif email:
query = 'SELECT id AS row_id, user_id, username, friendly_name, ' \
'thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \
'email, is_active, is_admin, is_home_user, is_allow_sync, is_restricted, ' \
'do_notify, keep_history, deleted_user, ' \
'allow_guest, shared_libraries ' \
'FROM users ' \
'WHERE email = ? COLLATE NOCASE '
result = monitor_db.select(query, args=[email])
else:
result = []
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for get_details: %s." % e)
result = []
user_details = {}
if result:
for item in result:
if session.get_session_user_id():
friendly_name = session.get_session_user()
elif item['friendly_name']:
friendly_name = item['friendly_name']
else:
friendly_name = item['username']
if item['custom_thumb'] and item['custom_thumb'] != item['user_thumb']:
user_thumb = item['custom_thumb']
elif item['user_thumb']:
user_thumb = item['user_thumb']
else:
user_thumb = common.DEFAULT_USER_THUMB
shared_libraries = tuple(item['shared_libraries'].split(';')) if item['shared_libraries'] else ()
user_details = {'row_id': item['row_id'],
'user_id': item['user_id'],
'username': item['username'],
'friendly_name': friendly_name,
'user_thumb': user_thumb,
'email': item['email'],
'is_active': item['is_active'],
'is_admin': item['is_admin'],
'is_home_user': item['is_home_user'],
'is_allow_sync': item['is_allow_sync'],
'is_restricted': item['is_restricted'],
'do_notify': item['do_notify'],
'keep_history': item['keep_history'],
'deleted_user': item['deleted_user'],
'allow_guest': item['allow_guest'],
'shared_libraries': shared_libraries
}
return user_details
user_details = get_user_details(user_id=user_id, user=user)
if user_details:
return user_details
else:
logger.warn("Tautulli Users :: Unable to retrieve user %s from database. Requesting user list refresh."
% user_id if user_id else user)
# Let's first refresh the user list to make sure the user isn't newly added and not in the db yet
refresh_users()
user_details = get_user_details(user_id=user_id, user=user)
if user_details:
return user_details
else:
logger.warn("Tautulli Users :: Unable to retrieve user %s from database. Returning 'Local' user."
% user_id if user_id else user)
# If there is no user data we must return something
# Use "Local" user to retain compatibility with PlexWatch database value
return default_return
def get_watch_time_stats(self, user_id=None, grouping=None, query_days=None):
if not session.allow_session_user(user_id):
return []
if grouping is None:
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
if query_days and query_days is not None:
query_days = map(helpers.cast_to_int, query_days.split(','))
else:
query_days = [1, 7, 30, 0]
monitor_db = database.MonitorDatabase()
user_watch_time_stats = []
group_by = 'reference_id' if grouping else 'id'
for days in query_days:
try:
if days > 0:
if str(user_id).isdigit():
query = 'SELECT (SUM(stopped - started) - ' \
' SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \
'COUNT(DISTINCT %s) AS total_plays ' \
'FROM session_history ' \
'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \
'AND user_id = ? ' % (group_by, days)
result = monitor_db.select(query, args=[user_id])
else:
result = []
else:
if str(user_id).isdigit():
query = 'SELECT (SUM(stopped - started) - ' \
' SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \
'COUNT(DISTINCT %s) AS total_plays ' \
'FROM session_history ' \
'WHERE user_id = ? ' % group_by
result = monitor_db.select(query, args=[user_id])
else:
result = []
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for get_watch_time_stats: %s." % e)
result = []
for item in result:
if item['total_time']:
total_time = item['total_time']
total_plays = item['total_plays']
else:
total_time = 0
total_plays = 0
row = {'query_days': days,
'total_time': total_time,
'total_plays': total_plays
}
user_watch_time_stats.append(row)
return user_watch_time_stats
def get_player_stats(self, user_id=None, grouping=None):
if not session.allow_session_user(user_id):
return []
if grouping is None:
grouping = jellypy.CONFIG.GROUP_HISTORY_TABLES
monitor_db = database.MonitorDatabase()
player_stats = []
result_id = 0
group_by = 'reference_id' if grouping else 'id'
try:
if str(user_id).isdigit():
query = 'SELECT player, COUNT(DISTINCT %s) as player_count, platform ' \
'FROM session_history ' \
'WHERE user_id = ? ' \
'GROUP BY player ' \
'ORDER BY player_count DESC' % group_by
result = monitor_db.select(query, args=[user_id])
else:
result = []
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for get_player_stats: %s." % e)
result = []
for item in result:
# Rename Mystery platform names
platform = common.PLATFORM_NAME_OVERRIDES.get(item['platform'], item['platform'])
platform_name = next((v for k, v in common.PLATFORM_NAMES.items() if k in platform.lower()), 'default')
row = {'player_name': item['player'],
'platform': platform,
'platform_name': platform_name,
'total_plays': item['player_count'],
'result_id': result_id
}
player_stats.append(row)
result_id += 1
return player_stats
def get_recently_watched(self, user_id=None, limit='10'):
if not session.allow_session_user(user_id):
return []
monitor_db = database.MonitorDatabase()
recently_watched = []
if not limit.isdigit():
limit = '10'
try:
if str(user_id).isdigit():
query = 'SELECT session_history.id, session_history.media_type, guid, ' \
'session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key, ' \
'title, parent_title, grandparent_title, original_title, ' \
'thumb, parent_thumb, grandparent_thumb, media_index, parent_media_index, ' \
'year, originally_available_at, added_at, live, started, user ' \
'FROM session_history_metadata ' \
'JOIN session_history ON session_history_metadata.id = session_history.id ' \
'WHERE user_id = ? ' \
'GROUP BY (CASE WHEN session_history.media_type = "track" THEN session_history.parent_rating_key ' \
' ELSE session_history.rating_key END) ' \
'ORDER BY MAX(started) DESC LIMIT ?'
result = monitor_db.select(query, args=[user_id, limit])
else:
result = []
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for get_recently_watched: %s." % e)
result = []
for row in result:
if row['media_type'] == 'episode' and row['parent_thumb']:
thumb = row['parent_thumb']
elif row['media_type'] == 'episode':
thumb = row['grandparent_thumb']
else:
thumb = row['thumb']
recent_output = {'row_id': row['id'],
'media_type': row['media_type'],
'rating_key': row['rating_key'],
'parent_rating_key': row['parent_rating_key'],
'grandparent_rating_key': row['grandparent_rating_key'],
'title': row['title'],
'parent_title': row['parent_title'],
'grandparent_title': row['grandparent_title'],
'original_title': row['original_title'],
'thumb': thumb,
'media_index': row['media_index'],
'parent_media_index': row['parent_media_index'],
'year': row['year'],
'originally_available_at': row['originally_available_at'],
'live': row['live'],
'guid': row['guid'],
'time': row['started'],
'user': row['user']
}
recently_watched.append(recent_output)
return recently_watched
def get_users(self):
monitor_db = database.MonitorDatabase()
try:
query = 'SELECT id AS row_id, user_id, username, friendly_name, thumb, custom_avatar_url, email, ' \
'is_active, is_admin, is_home_user, is_allow_sync, is_restricted, ' \
'do_notify, keep_history, allow_guest, server_token, shared_libraries, ' \
'filter_all, filter_movies, filter_tv, filter_music, filter_photos ' \
'FROM users WHERE deleted_user = 0'
result = monitor_db.select(query=query)
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for get_users: %s." % e)
return None
users = []
for item in result:
user = {'row_id': item['row_id'],
'user_id': item['user_id'],
'username': item['username'],
'friendly_name': item['friendly_name'] or item['username'],
'thumb': item['custom_avatar_url'] or item['thumb'],
'email': item['email'],
'is_active': item['is_active'],
'is_admin': item['is_admin'],
'is_home_user': item['is_home_user'],
'is_allow_sync': item['is_allow_sync'],
'is_restricted': item['is_restricted'],
'do_notify': item['do_notify'],
'keep_history': item['keep_history'],
'allow_guest': item['allow_guest'],
'server_token': item['server_token'],
'shared_libraries': item['shared_libraries'],
'filter_all': item['filter_all'],
'filter_movies': item['filter_movies'],
'filter_tv': item['filter_tv'],
'filter_music': item['filter_music'],
'filter_photos': item['filter_photos'],
}
users.append(user)
return users
def delete(self, user_id=None, row_ids=None, purge_only=False):
monitor_db = database.MonitorDatabase()
if row_ids and row_ids is not None:
row_ids = list(map(helpers.cast_to_int, row_ids.split(',')))
# Get the user_ids corresponding to the row_ids
result = monitor_db.select('SELECT user_id FROM users '
'WHERE id IN ({})'.format(','.join(['?'] * len(row_ids))), row_ids)
success = []
for user in result:
success.append(self.delete(user_id=user['user_id'],
purge_only=purge_only))
return all(success)
elif str(user_id).isdigit():
delete_success = database.delete_user_history(user_id=user_id)
if purge_only:
return delete_success
else:
logger.info("Tautulli Users :: Deleting user with user_id %s from database."
% user_id)
try:
monitor_db.action('UPDATE users '
'SET deleted_user = 1, keep_history = 0, do_notify = 0 '
'WHERE user_id = ?', [user_id])
return delete_success
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for delete: %s." % e)
else:
return False
def undelete(self, user_id=None, username=None):
monitor_db = database.MonitorDatabase()
try:
if user_id and str(user_id).isdigit():
query = 'SELECT * FROM users WHERE user_id = ?'
result = monitor_db.select(query=query, args=[user_id])
if result:
logger.info("Tautulli Users :: Re-adding user with id %s to database." % user_id)
monitor_db.action('UPDATE users '
'SET deleted_user = 0, keep_history = 1, do_notify = 1 '
'WHERE user_id = ?', [user_id])
return True
else:
return False
elif username:
query = 'SELECT * FROM users WHERE username = ?'
result = monitor_db.select(query=query, args=[username])
if result:
logger.info("Tautulli Users :: Re-adding user with username %s to database." % username)
monitor_db.action('UPDATE users '
'SET deleted_user = 0, keep_history = 1, do_notify = 1 '
'WHERE username = ?', [username])
return True
else:
return False
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for undelete: %s." % e)
# Keep method for PlexWatch/Plexivity import
def get_user_id(self, user=None):
if user:
try:
monitor_db = database.MonitorDatabase()
query = 'SELECT user_id FROM users WHERE username = ?'
result = monitor_db.select_single(query, args=[user])
if result:
return result['user_id']
else:
return None
except:
return None
return None
def get_user_names(self, kwargs=None):
monitor_db = database.MonitorDatabase()
user_cond = ''
if session.get_session_user_id():
user_cond = 'AND user_id = %s ' % session.get_session_user_id()
try:
query = 'SELECT user_id, ' \
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
THEN users.username ELSE users.friendly_name END) AS friendly_name ' \
'FROM users ' \
'WHERE deleted_user = 0 %s' % user_cond
result = monitor_db.select(query)
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for get_user_names: %s." % e)
return None
return session.friendly_name_to_username(result)
def get_tokens(self, user_id=None):
tokens = {
'allow_guest': 0,
'user_token': '',
'server_token': ''
}
if user_id:
try:
monitor_db = database.MonitorDatabase()
query = 'SELECT allow_guest, user_token, server_token FROM users ' \
'WHERE user_id = ? AND deleted_user = 0'
result = monitor_db.select_single(query, args=[user_id])
if result:
tokens = {'allow_guest': result['allow_guest'],
'user_token': result['user_token'],
'server_token': result['server_token']
}
return tokens
else:
return tokens
except:
return tokens
return tokens
def get_filters(self, user_id=None):
if not user_id:
return {}
try:
monitor_db = database.MonitorDatabase()
query = 'SELECT filter_all, filter_movies, filter_tv, filter_music, filter_photos FROM users ' \
'WHERE user_id = ?'
result = monitor_db.select_single(query, args=[user_id])
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for get_filters: %s." % e)
result = {}
filters_list = {}
for k, v in result.items():
filters = {}
for f in v.split('|'):
if 'contentRating=' in f or 'label=' in f:
filters.update(dict(parse_qsl(f)))
filters['content_rating'] = tuple(f for f in filters.pop('contentRating', '').split(',') if f)
filters['labels'] = tuple(f for f in filters.pop('label', '').split(',') if f)
filters_list[k] = filters
return filters_list
def set_user_login(self, user_id=None, user=None, user_group=None, ip_address=None, host=None, user_agent=None, success=0):
if user_id is None or str(user_id).isdigit():
monitor_db = database.MonitorDatabase()
keys = {'timestamp': helpers.timestamp(),
'user_id': user_id}
values = {'user': user,
'user_group': user_group,
'ip_address': ip_address,
'host': host,
'user_agent': user_agent,
'success': success}
try:
monitor_db.upsert(table_name='user_login', key_dict=keys, value_dict=values)
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for set_login_log: %s." % e)
def get_datatables_user_login(self, user_id=None, kwargs=None):
default_return = {'recordsFiltered': 0,
'recordsTotal': 0,
'draw': 0,
'data': 'null',
'error': 'Unable to execute database query.'}
if not session.allow_session_user(user_id):
return default_return
data_tables = datatables.DataTables()
if session.get_session_user_id():
custom_where = [['user_login.user_id', session.get_session_user_id()]]
else:
custom_where = [['user_login.user_id', user_id]] if user_id else []
columns = ['user_login.timestamp',
'user_login.user_id',
'user_login.user',
'user_login.user_group',
'user_login.ip_address',
'user_login.host',
'user_login.user_agent',
'user_login.success',
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
THEN users.username ELSE users.friendly_name END) AS friendly_name'
]
try:
query = data_tables.ssp_query(table_name='user_login',
columns=columns,
custom_where=custom_where,
group_by=[],
join_types=['LEFT OUTER JOIN'],
join_tables=['users'],
join_evals=[['user_login.user_id', 'users.user_id']],
kwargs=kwargs)
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for get_datatables_user_login: %s." % e)
return default_return
results = query['result']
rows = []
for item in results:
(os, browser) = httpagentparser.simple_detect(item['user_agent'])
row = {'timestamp': item['timestamp'],
'user_id': item['user_id'],
'user_group': item['user_group'],
'ip_address': item['ip_address'],
'host': item['host'],
'user_agent': item['user_agent'],
'os': os,
'browser': browser,
'success': item['success'],
'friendly_name': item['friendly_name'] or item['user']
}
rows.append(row)
dict = {'recordsFiltered': query['filteredCount'],
'recordsTotal': query['totalCount'],
'data': session.friendly_name_to_username(rows),
'draw': query['draw']
}
return dict
def delete_login_log(self):
monitor_db = database.MonitorDatabase()
try:
logger.info("Tautulli Users :: Clearing login logs from database.")
monitor_db.action('DELETE FROM user_login')
monitor_db.action('VACUUM')
return True
except Exception as e:
logger.warn("Tautulli Users :: Unable to execute database query for delete_login_log: %s." % e)
return False

21
jellypy/version.py Normal file
View File

@@ -0,0 +1,21 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
PLEXPY_BRANCH = "master"
PLEXPY_RELEASE_VERSION = "v2.6.5"

530
jellypy/versioncheck.py Normal file
View File

@@ -0,0 +1,530 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
from __future__ import unicode_literals
from future.builtins import next
from future.builtins import str
import json
import os
import platform
import re
import subprocess
import tarfile
import jellypy
if jellypy.PYTHON2:
import common
import helpers
import logger
import request
else:
from jellypy import common
from jellypy import helpers
from jellypy import logger
from jellypy import request
def runGit(args):
if jellypy.CONFIG.GIT_PATH:
git_locations = ['"' + jellypy.CONFIG.GIT_PATH + '"']
else:
git_locations = ['git']
if platform.system().lower() == 'darwin':
git_locations.append('/usr/local/git/bin/git')
output = err = None
for cur_git in git_locations:
cmd = cur_git + ' ' + args
try:
logger.debug('Trying to execute: "' + cmd + '" with shell in ' + jellypy.PROG_DIR)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=jellypy.PROG_DIR)
output, err = p.communicate()
output = output.strip().decode()
logger.debug('Git output: ' + output)
except OSError:
logger.debug('Command failed: %s', cmd)
continue
if 'not found' in output or "not recognized as an internal or external command" in output:
logger.debug('Unable to find git with command ' + cmd)
output = None
elif 'fatal:' in output or err:
logger.error('Git returned bad info. Are you sure this is a git installation?')
output = None
elif output:
break
return output, err
def get_version():
if jellypy.FROZEN and common.PLATFORM == 'Windows':
jellypy.INSTALL_TYPE = 'windows'
current_version, current_branch = get_version_from_file()
return current_version, 'origin', current_branch
elif jellypy.FROZEN and common.PLATFORM == 'Darwin':
jellypy.INSTALL_TYPE = 'macos'
current_version, current_branch = get_version_from_file()
return current_version, 'origin', current_branch
elif os.path.isdir(os.path.join(jellypy.PROG_DIR, '.git')):
jellypy.INSTALL_TYPE = 'git'
output, err = runGit('rev-parse HEAD')
if not output:
logger.error('Could not find latest installed version.')
cur_commit_hash = None
else:
cur_commit_hash = str(output)
if not re.match('^[a-z0-9]+$', cur_commit_hash):
logger.error('Output does not look like a hash, not using it.')
cur_commit_hash = None
if jellypy.CONFIG.DO_NOT_OVERRIDE_GIT_BRANCH and jellypy.CONFIG.GIT_BRANCH:
remote_name = None
branch_name = jellypy.CONFIG.GIT_BRANCH
else:
remote_branch, err = runGit('rev-parse --abbrev-ref --symbolic-full-name @{u}')
remote_branch = remote_branch.rsplit('/', 1) if remote_branch else []
if len(remote_branch) == 2:
remote_name, branch_name = remote_branch
else:
remote_name = branch_name = None
if not remote_name and jellypy.CONFIG.GIT_REMOTE:
logger.error('Could not retrieve remote name from git. Falling back to %s.' % jellypy.CONFIG.GIT_REMOTE)
remote_name = jellypy.CONFIG.GIT_REMOTE
if not remote_name:
logger.error('Could not retrieve remote name from git. Defaulting to origin.')
branch_name = 'origin'
if not branch_name and jellypy.CONFIG.GIT_BRANCH:
logger.error('Could not retrieve branch name from git. Falling back to %s.' % jellypy.CONFIG.GIT_BRANCH)
branch_name = jellypy.CONFIG.GIT_BRANCH
if not branch_name:
logger.error('Could not retrieve branch name from git. Defaulting to master.')
branch_name = 'master'
return cur_commit_hash, remote_name, branch_name
else:
if jellypy.DOCKER:
jellypy.INSTALL_TYPE = 'docker'
elif jellypy.SNAP:
jellypy.INSTALL_TYPE = 'snap'
else:
jellypy.INSTALL_TYPE = 'source'
current_version, current_branch = get_version_from_file()
return current_version, 'origin', current_branch
def get_version_from_file():
version_file = os.path.join(jellypy.PROG_DIR, 'version.txt')
branch_file = os.path.join(jellypy.PROG_DIR, 'branch.txt')
if os.path.isfile(version_file):
with open(version_file, 'r') as f:
current_version = f.read().strip(' \n\r')
else:
current_version = None
if os.path.isfile(branch_file):
with open(branch_file, 'r') as f:
current_branch = f.read().strip(' \n\r')
else:
current_branch = common.BRANCH
return current_version, current_branch
def check_update(scheduler=False, notify=False, use_cache=False):
check_github(scheduler=scheduler, notify=notify, use_cache=use_cache)
if not jellypy.CURRENT_VERSION:
jellypy.UPDATE_AVAILABLE = None
elif jellypy.COMMITS_BEHIND > 0 and \
(jellypy.common.BRANCH in ('master', 'beta') or jellypy.SNAP or jellypy.FROZEN) and \
jellypy.common.RELEASE != jellypy.LATEST_RELEASE:
jellypy.UPDATE_AVAILABLE = 'release'
elif jellypy.COMMITS_BEHIND > 0 and \
not jellypy.SNAP and not jellypy.FROZEN and \
jellypy.CURRENT_VERSION != jellypy.LATEST_VERSION:
jellypy.UPDATE_AVAILABLE = 'commit'
else:
jellypy.UPDATE_AVAILABLE = False
if jellypy.WIN_SYS_TRAY_ICON:
jellypy.WIN_SYS_TRAY_ICON.change_tray_update_icon()
elif jellypy.MAC_SYS_TRAY_ICON:
jellypy.MAC_SYS_TRAY_ICON.change_tray_update_icon()
def check_github(scheduler=False, notify=False, use_cache=False):
jellypy.COMMITS_BEHIND = 0
if jellypy.CONFIG.GIT_TOKEN:
headers = {'Authorization': 'token {}'.format(jellypy.CONFIG.GIT_TOKEN)}
else:
headers = {}
version = github_cache('version', use_cache=use_cache)
if not version:
# Get the latest version available from github
logger.info('Retrieving latest version information from GitHub')
url = 'https://api.github.com/repos/%s/%s/commits/%s' % (jellypy.CONFIG.GIT_USER,
jellypy.CONFIG.GIT_REPO,
jellypy.CONFIG.GIT_BRANCH)
version = request.request_json(url, headers=headers, timeout=20,
validator=lambda x: type(x) == dict)
github_cache('version', github_data=version)
if version is None:
logger.warn('Could not get the latest version from GitHub. Are you running a local development version?')
return jellypy.CURRENT_VERSION
jellypy.LATEST_VERSION = version['sha']
logger.debug("Latest version is %s", jellypy.LATEST_VERSION)
# See how many commits behind we are
if not jellypy.CURRENT_VERSION:
logger.info('You are running an unknown version of Tautulli. Run the updater to identify your version')
return jellypy.LATEST_VERSION
if jellypy.LATEST_VERSION == jellypy.CURRENT_VERSION:
logger.info('Tautulli is up to date')
return jellypy.LATEST_VERSION
commits = github_cache('commits', use_cache=use_cache)
if not commits:
logger.info('Comparing currently installed version with latest GitHub version')
url = 'https://api.github.com/repos/%s/%s/compare/%s...%s' % (jellypy.CONFIG.GIT_USER,
jellypy.CONFIG.GIT_REPO,
jellypy.LATEST_VERSION,
jellypy.CURRENT_VERSION)
commits = request.request_json(url, headers=headers, timeout=20, whitelist_status_code=404,
validator=lambda x: type(x) == dict)
github_cache('commits', github_data=commits)
if commits is None:
logger.warn('Could not get commits behind from GitHub.')
return jellypy.LATEST_VERSION
try:
jellypy.COMMITS_BEHIND = int(commits['behind_by'])
logger.debug("In total, %d commits behind", jellypy.COMMITS_BEHIND)
except KeyError:
logger.info('Cannot compare versions. Are you running a local development version?')
jellypy.COMMITS_BEHIND = 0
if jellypy.COMMITS_BEHIND > 0:
logger.info('New version is available. You are %s commits behind' % jellypy.COMMITS_BEHIND)
releases = github_cache('releases', use_cache=use_cache)
if not releases:
url = 'https://api.github.com/repos/%s/%s/releases' % (jellypy.CONFIG.GIT_USER,
jellypy.CONFIG.GIT_REPO)
releases = request.request_json(url, timeout=20, whitelist_status_code=404,
validator=lambda x: type(x) == list)
github_cache('releases', github_data=releases)
if releases is None:
logger.warn('Could not get releases from GitHub.')
return jellypy.LATEST_VERSION
if jellypy.CONFIG.GIT_BRANCH == 'master':
release = next((r for r in releases if not r['prerelease']), releases[0])
elif jellypy.CONFIG.GIT_BRANCH == 'beta':
release = next((r for r in releases if not r['tag_name'].endswith('-nightly')), releases[0])
elif jellypy.CONFIG.GIT_BRANCH == 'nightly':
release = next((r for r in releases), releases[0])
else:
release = releases[0]
jellypy.LATEST_RELEASE = release['tag_name']
if notify:
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpyupdate',
'plexpy_download_info': release,
'plexpy_update_commit': jellypy.LATEST_VERSION,
'plexpy_update_behind': jellypy.COMMITS_BEHIND})
if jellypy.PYTHON2:
logger.warn('Tautulli is running using Python 2. Unable to run automatic update.')
elif scheduler and jellypy.CONFIG.PLEXPY_AUTO_UPDATE and \
not jellypy.DOCKER and not jellypy.SNAP and \
not (jellypy.FROZEN and common.PLATFORM == 'Darwin'):
logger.info('Running automatic update.')
jellypy.shutdown(restart=True, update=True)
elif jellypy.COMMITS_BEHIND == 0:
logger.info('Tautulli is up to date')
return jellypy.LATEST_VERSION
def update():
if jellypy.PYTHON2:
logger.warn('Tautulli is running using Python 2. Unable to update.')
return
if not jellypy.UPDATE_AVAILABLE:
return
if jellypy.INSTALL_TYPE in ('docker', 'snap', 'macos'):
return
elif jellypy.INSTALL_TYPE == 'windows':
logger.info('Calling Windows scheduled task to update Tautulli')
CREATE_NO_WINDOW = 0x08000000
subprocess.Popen(['SCHTASKS', '/Run', '/TN', 'TautulliUpdateTask'],
creationflags=CREATE_NO_WINDOW)
elif jellypy.INSTALL_TYPE == 'git':
output, err = runGit('pull --ff-only {} {}'.format(jellypy.CONFIG.GIT_REMOTE,
jellypy.CONFIG.GIT_BRANCH))
if not output:
logger.error('Unable to download latest version')
return
for line in output.split('\n'):
if 'Already up-to-date.' in line or 'Already up to date.' in line:
logger.info('No update available, not updating')
elif line.endswith(('Aborting', 'Aborting.')):
logger.error('Unable to update from git: ' + line)
elif jellypy.INSTALL_TYPE == 'source':
tar_download_url = 'https://github.com/{}/{}/tarball/{}'.format(jellypy.CONFIG.GIT_USER,
jellypy.CONFIG.GIT_REPO,
jellypy.CONFIG.GIT_BRANCH)
update_dir = os.path.join(jellypy.DATA_DIR, 'update')
version_path = os.path.join(jellypy.PROG_DIR, 'version.txt')
logger.info('Downloading update from: ' + tar_download_url)
data = request.request_content(tar_download_url)
if not data:
logger.error("Unable to retrieve new version from '%s', can't update", tar_download_url)
return
download_name = jellypy.CONFIG.GIT_BRANCH + '-github'
tar_download_path = os.path.join(jellypy.DATA_DIR, download_name)
# Save tar to disk
with open(tar_download_path, 'wb') as f:
f.write(data)
# Extract the tar to update folder
logger.info('Extracting file: ' + tar_download_path)
tar = tarfile.open(tar_download_path)
tar.extractall(update_dir)
tar.close()
# Delete the tar.gz
logger.info('Deleting file: ' + tar_download_path)
os.remove(tar_download_path)
# Find update dir name
update_dir_contents = [x for x in os.listdir(update_dir) if os.path.isdir(os.path.join(update_dir, x))]
if len(update_dir_contents) != 1:
logger.error("Invalid update data, update failed: " + str(update_dir_contents))
return
content_dir = os.path.join(update_dir, update_dir_contents[0])
# walk temp folder and move files to main folder
for dirname, dirnames, filenames in os.walk(content_dir):
dirname = dirname[len(content_dir) + 1:]
for curfile in filenames:
old_path = os.path.join(content_dir, dirname, curfile)
new_path = os.path.join(jellypy.PROG_DIR, dirname, curfile)
if os.path.isfile(new_path):
os.remove(new_path)
os.renames(old_path, new_path)
# Update version.txt
try:
with open(version_path, 'w') as f:
f.write(str(jellypy.LATEST_VERSION))
except IOError as e:
logger.error(
"Unable to write current version to version.txt, update not complete: %s",
e
)
return
def reset_git_install():
if jellypy.INSTALL_TYPE == 'git':
logger.info('Attempting to reset git install to "{}/{}/{}"'.format(jellypy.CONFIG.GIT_REMOTE,
jellypy.CONFIG.GIT_BRANCH,
common.RELEASE))
output, err = runGit('remote set-url {} https://github.com/{}/{}.git'.format(jellypy.CONFIG.GIT_REMOTE,
jellypy.CONFIG.GIT_USER,
jellypy.CONFIG.GIT_REPO))
output, err = runGit('fetch {}'.format(jellypy.CONFIG.GIT_REMOTE))
output, err = runGit('checkout {}'.format(jellypy.CONFIG.GIT_BRANCH))
output, err = runGit('branch -u {}/{}'.format(jellypy.CONFIG.GIT_REMOTE,
jellypy.CONFIG.GIT_BRANCH))
output, err = runGit('reset --hard {}'.format(common.RELEASE))
if not output:
logger.error('Unable to reset Tautulli installation.')
return False
for line in output.split('\n'):
if 'Already up-to-date.' in line or 'Already up to date.' in line:
logger.info('Tautulli installation reset successfully.')
return True
elif line.endswith(('Aborting', 'Aborting.')):
logger.error('Unable to reset Tautulli installation: ' + line)
return False
def checkout_git_branch():
if jellypy.INSTALL_TYPE == 'git':
logger.info('Attempting to checkout git branch "{}/{}"'.format(jellypy.CONFIG.GIT_REMOTE,
jellypy.CONFIG.GIT_BRANCH))
output, err = runGit('fetch {}'.format(jellypy.CONFIG.GIT_REMOTE))
output, err = runGit('checkout {}'.format(jellypy.CONFIG.GIT_BRANCH))
if not output:
logger.error('Unable to change git branch.')
return
for line in output.split('\n'):
if line.endswith(('Aborting', 'Aborting.')):
logger.error('Unable to checkout from git: ' + line)
return
output, err = runGit('pull {} {}'.format(jellypy.CONFIG.GIT_REMOTE,
jellypy.CONFIG.GIT_BRANCH))
def github_cache(cache, github_data=None, use_cache=True):
timestamp = helpers.timestamp()
cache_filepath = os.path.join(jellypy.CONFIG.CACHE_DIR, 'github_{}.json'.format(cache))
if github_data:
cache_data = {'github_data': github_data, '_cache_time': timestamp}
try:
with open(cache_filepath, 'w', encoding='utf-8') as cache_file:
json.dump(cache_data, cache_file)
except:
pass
else:
if not use_cache:
return
try:
with open(cache_filepath, 'r', encoding='utf-8') as cache_file:
cache_data = json.load(cache_file)
if timestamp - cache_data['_cache_time'] < jellypy.CONFIG.CHECK_GITHUB_CACHE_SECONDS:
logger.debug('Using cached GitHub %s data', cache)
return cache_data['github_data']
except:
pass
def read_changelog(latest_only=False, since_prev_release=False):
changelog_file = os.path.join(jellypy.PROG_DIR, 'CHANGELOG.md')
if not os.path.isfile(changelog_file):
return '<h4>Missing changelog file</h4>'
try:
output = ['']
prev_level = 0
latest_version_found = False
header_pattern = re.compile(r'(^#+)\s(.+)')
list_pattern = re.compile(r'(^[ \t]*\*\s)(.+)')
beta_release = False
prev_release = str(jellypy.PREV_RELEASE)
with open(changelog_file, "r") as logfile:
for line in logfile:
line_header_match = re.search(header_pattern, line)
line_list_match = re.search(list_pattern, line)
if line_header_match:
header_level = str(len(line_header_match.group(1)))
header_text = line_header_match.group(2)
if header_text.lower() == 'changelog':
continue
if latest_version_found:
break
elif latest_only:
latest_version_found = True
# Add a space to the end of the release to match tags
elif since_prev_release:
if prev_release.endswith('-beta') and not beta_release:
if prev_release + ' ' in header_text:
break
elif prev_release.replace('-beta', '') + ' ' in header_text:
beta_release = True
elif prev_release.endswith('-beta') and beta_release:
break
elif prev_release + ' ' in header_text:
break
output[-1] += '<h' + header_level + '>' + header_text + '</h' + header_level + '>'
elif line_list_match:
line_level = len(line_list_match.group(1)) // 2
line_text = line_list_match.group(2)
if line_level > prev_level:
output[-1] += '<ul>' * (line_level - prev_level) + '<li>' + line_text + '</li>'
elif line_level < prev_level:
output[-1] += '</ul>' * (prev_level - line_level) + '<li>' + line_text + '</li>'
else:
output[-1] += '<li>' + line_text + '</li>'
prev_level = line_level
elif line.strip() == '' and prev_level:
output[-1] += '</ul>' * (prev_level)
output.append('')
prev_level = 0
if since_prev_release:
output.reverse()
return ''.join(output)
except IOError as e:
logger.error('Tautulli Version Checker :: Unable to open changelog file. %s' % e)
return '<h4>Unable to open changelog file</h4>'

309
jellypy/web_socket.py Normal file
View File

@@ -0,0 +1,309 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
# Mostly borrowed from https://github.com/trakt/Plex-Trakt-Scrobbler
from __future__ import unicode_literals
from future.builtins import str
import json
import ssl
import threading
import time
import certifi
import websocket
import jellypy
if jellypy.PYTHON2:
import activity_handler
import activity_pinger
import activity_processor
import database
import logger
else:
from jellypy import activity_handler
from jellypy import activity_pinger
from jellypy import activity_processor
from jellypy import database
from jellypy import logger
name = 'websocket'
opcode_data = (websocket.ABNF.OPCODE_TEXT, websocket.ABNF.OPCODE_BINARY)
ws_shutdown = False
pong_timer = None
pong_count = 0
def start_thread():
try:
# Check for any existing sessions on start up
activity_pinger.check_active_sessions(ws_request=True)
except Exception as e:
logger.error("Tautulli WebSocket :: Failed to check for active sessions: %s." % e)
logger.warn("Tautulli WebSocket :: Attempt to fix by flushing temporary sessions...")
database.delete_sessions()
# Start the websocket listener on it's own thread
thread = threading.Thread(target=run)
thread.daemon = True
thread.start()
def on_connect():
if jellypy.PLEX_SERVER_UP is None:
jellypy.PLEX_SERVER_UP = True
if not jellypy.PLEX_SERVER_UP:
logger.info("Tautulli WebSocket :: The Plex Media Server is back up.")
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_intup'})
jellypy.PLEX_SERVER_UP = True
jellypy.initialize_scheduler()
if jellypy.CONFIG.WEBSOCKET_MONITOR_PING_PONG:
send_ping()
def on_disconnect():
if jellypy.PLEX_SERVER_UP is None:
jellypy.PLEX_SERVER_UP = False
if jellypy.PLEX_SERVER_UP:
logger.info("Tautulli WebSocket :: Unable to get a response from the server, Plex server is down.")
jellypy.NOTIFY_QUEUE.put({'notify_action': 'on_intdown'})
jellypy.PLEX_SERVER_UP = False
activity_processor.ActivityProcessor().set_temp_stopped()
jellypy.initialize_scheduler()
def reconnect():
close()
logger.info("Tautulli WebSocket :: Reconnecting websocket...")
start_thread()
def shutdown():
global ws_shutdown
ws_shutdown = True
close()
def close():
logger.info("Tautulli WebSocket :: Disconnecting websocket...")
jellypy.WEBSOCKET.close()
jellypy.WS_CONNECTED = False
def send_ping():
if jellypy.WS_CONNECTED:
# logger.debug("Tautulli WebSocket :: Sending ping.")
jellypy.WEBSOCKET.ping("Hi?")
global pong_timer
pong_timer = threading.Timer(5.0, wait_pong)
pong_timer.daemon = True
pong_timer.start()
def wait_pong():
global pong_count
pong_count += 1
logger.warn("Tautulli WebSocket :: Failed to receive pong from websocket, ping attempt %s." % str(pong_count))
if pong_count >= jellypy.CONFIG.WEBSOCKET_CONNECTION_ATTEMPTS:
pong_count = 0
close()
def receive_pong():
# logger.debug("Tautulli WebSocket :: Received pong.")
global pong_timer
global pong_count
if pong_timer:
pong_timer = pong_timer.cancel()
pong_count = 0
def run():
from websocket import create_connection
if jellypy.CONFIG.PMS_SSL and jellypy.CONFIG.PMS_URL[:5] == 'https':
uri = jellypy.CONFIG.PMS_URL.replace('https://', 'wss://') + '/:/websockets/notifications'
secure = 'secure '
if jellypy.CONFIG.VERIFY_SSL_CERT:
sslopt = {'ca_certs': certifi.where()}
else:
sslopt = {'cert_reqs': ssl.CERT_NONE}
else:
uri = 'ws://%s:%s/:/websockets/notifications' % (
jellypy.CONFIG.PMS_IP,
jellypy.CONFIG.PMS_PORT
)
secure = ''
sslopt = None
# Set authentication token (if one is available)
if jellypy.CONFIG.PMS_TOKEN:
header = ["X-Plex-Token: %s" % jellypy.CONFIG.PMS_TOKEN]
else:
header = []
global ws_shutdown
ws_shutdown = False
reconnects = 0
# Try an open the websocket connection
logger.info("Tautulli WebSocket :: Opening %swebsocket." % secure)
try:
jellypy.WEBSOCKET = create_connection(uri, header=header, sslopt=sslopt)
logger.info("Tautulli WebSocket :: Ready")
jellypy.WS_CONNECTED = True
except (websocket.WebSocketException, IOError, Exception) as e:
logger.error("Tautulli WebSocket :: %s.", e)
if jellypy.WS_CONNECTED:
on_connect()
while jellypy.WS_CONNECTED:
try:
process(*receive(jellypy.WEBSOCKET))
# successfully received data, reset reconnects counter
reconnects = 0
except websocket.WebSocketConnectionClosedException:
if ws_shutdown:
break
if reconnects == 0:
logger.warn("Tautulli WebSocket :: Connection has closed.")
if not jellypy.CONFIG.PMS_IS_CLOUD and reconnects < jellypy.CONFIG.WEBSOCKET_CONNECTION_ATTEMPTS:
reconnects += 1
# Sleep 5 between connection attempts
if reconnects > 1:
time.sleep(jellypy.CONFIG.WEBSOCKET_CONNECTION_TIMEOUT)
logger.warn("Tautulli WebSocket :: Reconnection attempt %s." % str(reconnects))
try:
jellypy.WEBSOCKET = create_connection(uri, header=header)
logger.info("Tautulli WebSocket :: Ready")
jellypy.WS_CONNECTED = True
except (websocket.WebSocketException, IOError, Exception) as e:
logger.error("Tautulli WebSocket :: %s.", e)
else:
close()
break
except (websocket.WebSocketException, Exception) as e:
if ws_shutdown:
break
logger.error("Tautulli WebSocket :: %s.", e)
close()
break
if not jellypy.WS_CONNECTED and not ws_shutdown:
on_disconnect()
logger.debug("Tautulli WebSocket :: Leaving thread.")
def receive(ws):
frame = ws.recv_frame()
if not frame:
raise websocket.WebSocketException("Not a valid frame %s" % frame)
elif frame.opcode in opcode_data:
return frame.opcode, frame.data
elif frame.opcode == websocket.ABNF.OPCODE_CLOSE:
ws.send_close()
return frame.opcode, None
elif frame.opcode == websocket.ABNF.OPCODE_PING:
# logger.debug("Tautulli WebSocket :: Received ping, sending pong.")
ws.pong("Hi!")
elif frame.opcode == websocket.ABNF.OPCODE_PONG:
receive_pong()
return None, None
def process(opcode, data):
if opcode not in opcode_data:
return False
try:
data = data.decode('utf-8')
logger.websocket_debug(data)
event = json.loads(data)
except Exception as e:
logger.warn("Tautulli WebSocket :: Error decoding message from websocket: %s" % e)
logger.websocket_error(data)
return False
event = event.get('NotificationContainer', event)
event_type = event.get('type')
if not event_type:
return False
if event_type == 'playing':
event_data = event.get('PlaySessionStateNotification', event.get('_children', {}))
if not event_data:
logger.debug("Tautulli WebSocket :: Session event found but unable to get websocket data.")
return False
try:
activity = activity_handler.ActivityHandler(timeline=event_data[0])
activity.process()
except Exception as e:
logger.exception("Tautulli WebSocket :: Failed to process session data: %s." % e)
if event_type == 'timeline':
event_data = event.get('TimelineEntry', event.get('_children', {}))
if not event_data:
logger.debug("Tautulli WebSocket :: Timeline event found but unable to get websocket data.")
return False
try:
activity = activity_handler.TimelineHandler(timeline=event_data[0])
activity.process()
except Exception as e:
logger.exception("Tautulli WebSocket :: Failed to process timeline data: %s." % e)
if event_type == 'reachability':
event_data = event.get('ReachabilityNotification', event.get('_children', {}))
if not event_data:
logger.debug("Tautulli WebSocket :: Reachability event found but unable to get websocket data.")
return False
try:
activity = activity_handler.ReachabilityHandler(data=event_data[0])
activity.process()
except Exception as e:
logger.exception("Tautulli WebSocket :: Failed to process reachability data: %s." % e)
return True

421
jellypy/webauth.py Normal file
View File

@@ -0,0 +1,421 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
# https://github.com/cherrypy/tools/blob/master/AuthenticationAndAccessRestrictions
# Form based authentication for CherryPy. Requires the
# Session tool to be loaded.
from future.builtins import object
from datetime import datetime, timedelta
from future.moves.urllib.parse import quote, unquote
import cherrypy
from hashing_passwords import check_hash
import jwt
import jellypy
if jellypy.PYTHON2:
import logger
from database import MonitorDatabase
from helpers import timestamp
from users import Users, refresh_users
from plextv import PlexTV
else:
from jellypy import logger
from jellypy.database import MonitorDatabase
from jellypy.helpers import timestamp
from jellypy.users import Users, refresh_users
from jellypy.plextv import PlexTV
# Monkey patch SameSite support into cookies.
# https://stackoverflow.com/a/50813092
try:
from http.cookies import Morsel
except ImportError:
from Cookie import Morsel
Morsel._reserved[str('samesite')] = str('SameSite')
JWT_ALGORITHM = 'HS256'
JWT_COOKIE_NAME = 'tautulli_token_'
def plex_user_login(username=None, password=None, token=None, headers=None):
user_token = None
user_id = None
# Try to login to Plex.tv to check if the user has a vaild account
if username and password:
plex_tv = PlexTV(username=username, password=password, headers=headers)
plex_user = plex_tv.get_token()
if plex_user:
user_token = plex_user['auth_token']
user_id = plex_user['user_id']
elif token:
plex_tv = PlexTV(token=token, headers=headers)
plex_user = plex_tv.get_plex_account_details()
if plex_user:
user_token = token
user_id = plex_user['user_id']
else:
return None
if user_token and user_id:
# Try to retrieve the user from the database.
# Also make sure guest access is enabled for the user and the user is not deleted.
user_data = Users()
user_details = user_data.get_details(user_id=user_id)
if user_id != str(user_details['user_id']):
# The user is not in the database.
return None
elif jellypy.CONFIG.HTTP_PLEX_ADMIN and user_details['is_admin']:
# Plex admin login
return user_details, 'admin'
elif not user_details['allow_guest'] or user_details['deleted_user']:
# Guest access is disabled or the user is deleted.
return None
# Stop here if guest access is not enabled
if not jellypy.CONFIG.ALLOW_GUEST_ACCESS:
return None
# The user is in the database, and guest access is enabled, so try to retrieve a server token.
# If a server token is returned, then the user is a valid friend of the server.
plex_tv = PlexTV(token=user_token, headers=headers)
server_token = plex_tv.get_server_token()
if server_token:
# Register the new user / update the access tokens.
monitor_db = MonitorDatabase()
try:
logger.debug("Tautulli WebAuth :: Registering token for user '%s' in the database."
% user_details['username'])
result = monitor_db.action('UPDATE users SET server_token = ? WHERE user_id = ?',
[server_token, user_details['user_id']])
if result:
# Refresh the users list to make sure we have all the correct permissions.
refresh_users()
# Successful login
return user_details, 'guest'
else:
logger.warn("Tautulli WebAuth :: Unable to register user '%s' in database."
% user_details['username'])
return None
except Exception as e:
logger.warn("Tautulli WebAuth :: Unable to register user '%s' in database: %s."
% (user_details['username'], e))
return None
else:
logger.warn("Tautulli WebAuth :: Unable to retrieve Plex.tv server token for user '%s'."
% user_details['username'])
return None
elif username:
logger.warn("Tautulli WebAuth :: Unable to retrieve Plex.tv user token for user '%s'." % username)
return None
elif token:
logger.warn("Tautulli WebAuth :: Unable to retrieve Plex.tv user token for Plex OAuth.")
return None
def check_credentials(username=None, password=None, token=None, admin_login='0', headers=None):
"""Verifies credentials for username and password.
Returns True and the user group on success or False and no user group"""
if username and password:
if jellypy.CONFIG.HTTP_PASSWORD:
user_details = {'user_id': None, 'username': username}
if jellypy.CONFIG.HTTP_HASHED_PASSWORD and \
username == jellypy.CONFIG.HTTP_USERNAME and check_hash(password, jellypy.CONFIG.HTTP_PASSWORD):
return True, user_details, 'admin'
elif not jellypy.CONFIG.HTTP_HASHED_PASSWORD and \
username == jellypy.CONFIG.HTTP_USERNAME and password == jellypy.CONFIG.HTTP_PASSWORD:
return True, user_details, 'admin'
if jellypy.CONFIG.HTTP_PLEX_ADMIN or (not admin_login == '1' and jellypy.CONFIG.ALLOW_GUEST_ACCESS):
plex_login = plex_user_login(token=token, headers=headers)
if plex_login is not None:
return True, plex_login[0], plex_login[1]
return False, None, None
def check_jwt_token():
jwt_cookie = str(JWT_COOKIE_NAME + jellypy.CONFIG.PMS_UUID)
jwt_token = cherrypy.request.cookie.get(jwt_cookie)
if jwt_token:
try:
payload = jwt.decode(
jwt_token.value, jellypy.CONFIG.JWT_SECRET, leeway=timedelta(seconds=10), algorithms=[JWT_ALGORITHM]
)
except (jwt.DecodeError, jwt.ExpiredSignatureError):
return None
return payload
def check_auth(*args, **kwargs):
"""A tool that looks in config for 'auth.require'. If found and it
is not None, a login is required and the entry is evaluated as a list of
conditions that the user must fulfill"""
conditions = cherrypy.request.config.get('auth.require', None)
if conditions is not None:
payload = check_jwt_token()
if payload:
cherrypy.request.login = payload
for condition in conditions:
# A condition is just a callable that returns true or false
if not condition():
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT)
else:
redirect_uri = cherrypy.request.wsgi_environ['REQUEST_URI']
if redirect_uri:
redirect_uri = '?redirect_uri=' + quote(redirect_uri)
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT + "auth/logout" + redirect_uri)
def requireAuth(*conditions):
"""A decorator that appends conditions to the auth.require config
variable."""
def decorate(f):
if not hasattr(f, '_cp_config'):
f._cp_config = dict()
if 'auth.require' not in f._cp_config:
f._cp_config['auth.require'] = []
f._cp_config['auth.require'].extend(conditions)
return f
return decorate
# Conditions are callables that return True
# if the user fulfills the conditions they define, False otherwise
#
# They can access the current username as cherrypy.request.login
#
# Define those at will however suits the application.
def member_of(user_group):
return lambda: cherrypy.request.login and cherrypy.request.login['user_group'] == user_group
def name_is(user_name):
return lambda: cherrypy.request.login and cherrypy.request.login['user'] == user_name
# These might be handy
def any_of(*conditions):
"""Returns True if any of the conditions match"""
def check():
for c in conditions:
if c():
return True
return False
return check
# By default all conditions are required, but this might still be
# needed if you want to use it inside of an any_of(...) condition
def all_of(*conditions):
"""Returns True if all of the conditions match"""
def check():
for c in conditions:
if not c():
return False
return True
return check
def check_rate_limit(ip_address):
monitor_db = MonitorDatabase()
result = monitor_db.select('SELECT timestamp, success FROM user_login '
'WHERE ip_address = ? '
'AND timestamp >= ( '
'SELECT CASE WHEN MAX(timestamp) IS NULL THEN 0 ELSE MAX(timestamp) END '
'FROM user_login WHERE ip_address = ? AND success = 1) '
'ORDER BY timestamp DESC',
[ip_address, ip_address])
try:
last_timestamp = result[0]['timestamp']
except IndexError:
last_timestamp = 0
try:
last_success = max(login['timestamp'] for login in result if login['success'])
except ValueError:
last_success = 0
max_timestamp = max(last_success, last_timestamp - jellypy.CONFIG.HTTP_RATE_LIMIT_ATTEMPTS_INTERVAL)
attempts = [login for login in result if login['timestamp'] >= max_timestamp and not login['success']]
if len(attempts) >= jellypy.CONFIG.HTTP_RATE_LIMIT_ATTEMPTS:
return max(last_timestamp - (timestamp() - jellypy.CONFIG.HTTP_RATE_LIMIT_LOCKOUT_TIME), 0)
# Controller to provide login and logout actions
class AuthController(object):
def check_auth_enabled(self):
if not jellypy.CONFIG.HTTP_BASIC_AUTH and jellypy.CONFIG.HTTP_PASSWORD:
return
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT)
def on_login(self, username=None, user_id=None, user_group=None, success=False, oauth=False):
"""Called on successful login"""
# Save login to the database
ip_address = cherrypy.request.remote.ip
host = cherrypy.request.base
user_agent = cherrypy.request.headers.get('User-Agent')
Users().set_user_login(user_id=user_id,
user=username,
user_group=user_group,
ip_address=ip_address,
host=host,
user_agent=user_agent,
success=success)
if success:
use_oauth = 'Plex OAuth' if oauth else 'form'
logger.debug("Tautulli WebAuth :: %s user '%s' logged into Tautulli using %s login."
% (user_group.capitalize(), username, use_oauth))
def on_logout(self, username, user_group):
"""Called on logout"""
logger.debug("Tautulli WebAuth :: %s user '%s' logged out of Tautulli." % (user_group.capitalize(), username))
def get_loginform(self, redirect_uri=''):
from jellypy.webserve import serve_template
return serve_template(templatename="login.html", title="Login", redirect_uri=unquote(redirect_uri))
@cherrypy.expose
def index(self, *args, **kwargs):
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT + "auth/login")
@cherrypy.expose
def login(self, redirect_uri='', *args, **kwargs):
self.check_auth_enabled()
return self.get_loginform(redirect_uri=redirect_uri)
@cherrypy.expose
def logout(self, redirect_uri='', *args, **kwargs):
self.check_auth_enabled()
payload = check_jwt_token()
if payload:
self.on_logout(payload['user'], payload['user_group'])
jwt_cookie = str(JWT_COOKIE_NAME + jellypy.CONFIG.PMS_UUID)
cherrypy.response.cookie[jwt_cookie] = ''
cherrypy.response.cookie[jwt_cookie]['expires'] = 0
cherrypy.response.cookie[jwt_cookie]['path'] = jellypy.HTTP_ROOT.rstrip('/') or '/'
if jellypy.HTTP_ROOT != '/':
# Also expire the JWT on the root path
cherrypy.response.headers['Set-Cookie'] = jwt_cookie + '=""; expires=Thu, 01 Jan 1970 12:00:00 GMT; path=/'
cherrypy.request.login = None
if redirect_uri:
redirect_uri = '?redirect_uri=' + redirect_uri
raise cherrypy.HTTPRedirect(jellypy.HTTP_ROOT + "auth/login" + redirect_uri)
@cherrypy.expose
@cherrypy.tools.json_out()
def signin(self, username=None, password=None, token=None, remember_me='0', admin_login='0', *args, **kwargs):
if cherrypy.request.method != 'POST':
cherrypy.response.status = 405
return {'status': 'error', 'message': 'Sign in using POST.'}
ip_address = cherrypy.request.remote.ip
rate_limit = check_rate_limit(ip_address)
if rate_limit:
logger.debug("Tautulli WebAuth :: Too many incorrect login attempts from '%s'." % ip_address)
error_message = {'status': 'error', 'message': 'Too many login attempts.'}
cherrypy.response.status = 429
cherrypy.response.headers['Retry-After'] = rate_limit
return error_message
error_message = {'status': 'error', 'message': 'Invalid credentials.'}
valid_login, user_details, user_group = check_credentials(username=username,
password=password,
token=token,
admin_login=admin_login,
headers=kwargs)
if valid_login:
time_delta = timedelta(days=30) if remember_me == '1' else timedelta(minutes=60)
expiry = datetime.utcnow() + time_delta
payload = {
'user_id': user_details['user_id'],
'user': user_details['username'],
'user_group': user_group,
'exp': expiry
}
jwt_token = jwt.encode(payload, jellypy.CONFIG.JWT_SECRET, algorithm=JWT_ALGORITHM).decode('utf-8')
self.on_login(username=user_details['username'],
user_id=user_details['user_id'],
user_group=user_group,
success=True,
oauth=bool(token))
jwt_cookie = str(JWT_COOKIE_NAME + jellypy.CONFIG.PMS_UUID)
cherrypy.response.cookie[jwt_cookie] = jwt_token
cherrypy.response.cookie[jwt_cookie]['expires'] = int(time_delta.total_seconds())
cherrypy.response.cookie[jwt_cookie]['path'] = jellypy.HTTP_ROOT.rstrip('/') or '/'
cherrypy.response.cookie[jwt_cookie]['httponly'] = True
cherrypy.response.cookie[jwt_cookie]['samesite'] = 'lax'
cherrypy.request.login = payload
cherrypy.response.status = 200
return {'status': 'success', 'token': jwt_token, 'uuid': jellypy.CONFIG.PMS_UUID}
elif admin_login == '1' and username:
self.on_login(username=username)
logger.debug("Tautulli WebAuth :: Invalid admin login attempt from '%s'." % username)
cherrypy.response.status = 401
return error_message
elif username:
self.on_login(username=username)
logger.debug("Tautulli WebAuth :: Invalid user login attempt from '%s'." % username)
cherrypy.response.status = 401
return error_message
elif token:
self.on_login(username='Plex OAuth', oauth=True)
logger.debug("Tautulli WebAuth :: Invalid Plex OAuth login attempt.")
cherrypy.response.status = 401
return error_message

6904
jellypy/webserve.py Normal file

File diff suppressed because it is too large Load Diff

293
jellypy/webstart.py Normal file
View File

@@ -0,0 +1,293 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import cherrypy
import jellypy
if jellypy.PYTHON2:
import logger
import webauth
from helpers import create_https_certificates
from webserve import WebInterface, BaseRedirect
else:
from jellypy import logger
from jellypy import webauth
from jellypy.helpers import create_https_certificates
from jellypy.webserve import WebInterface, BaseRedirect
def start():
logger.info("Tautulli WebStart :: Initializing Tautulli web server...")
web_config = {
'http_port': jellypy.HTTP_PORT,
'http_host': jellypy.CONFIG.HTTP_HOST,
'http_root': jellypy.CONFIG.HTTP_ROOT,
'http_environment': jellypy.CONFIG.HTTP_ENVIRONMENT,
'http_proxy': jellypy.CONFIG.HTTP_PROXY,
'enable_https': jellypy.CONFIG.ENABLE_HTTPS,
'https_cert': jellypy.CONFIG.HTTPS_CERT,
'https_cert_chain': jellypy.CONFIG.HTTPS_CERT_CHAIN,
'https_key': jellypy.CONFIG.HTTPS_KEY,
'http_username': jellypy.CONFIG.HTTP_USERNAME,
'http_password': jellypy.CONFIG.HTTP_PASSWORD,
'http_basic_auth': jellypy.CONFIG.HTTP_BASIC_AUTH
}
initialize(web_config)
def stop():
logger.info("Tautulli WebStart :: Stopping Tautulli web server...")
cherrypy.engine.exit()
def restart():
logger.info("Tautulli WebStart :: Restarting Tautulli web server...")
stop()
start()
def initialize(options):
# HTTPS stuff stolen from sickbeard
enable_https = options['enable_https']
https_cert = options['https_cert']
https_cert_chain = options['https_cert_chain']
https_key = options['https_key']
if enable_https:
# If either the HTTPS certificate or key do not exist, try to make self-signed ones.
if jellypy.CONFIG.HTTPS_CREATE_CERT and \
(not (https_cert and os.path.exists(https_cert)) or
not (https_key and os.path.exists(https_key))):
if not create_https_certificates(https_cert, https_key):
logger.warn("Tautulli WebStart :: Unable to create certificate and key. Disabling HTTPS")
enable_https = False
if not (os.path.exists(https_cert) and os.path.exists(https_key)):
logger.warn("Tautulli WebStart :: Disabled HTTPS because of missing certificate and key.")
enable_https = False
options_dict = {
'server.socket_port': options['http_port'],
'server.socket_host': options['http_host'],
'environment': options['http_environment'],
'server.thread_pool': 10,
'server.max_request_body_size': 1073741824,
'server.socket_timeout': 60,
'tools.encode.on': True,
'tools.encode.encoding': 'utf-8',
'tools.decode.on': True
}
if jellypy.DEV:
options_dict['environment'] = "test_suite"
options_dict['engine.autoreload.on'] = True
if enable_https:
options_dict['server.ssl_certificate'] = https_cert
options_dict['server.ssl_certificate_chain'] = https_cert_chain
options_dict['server.ssl_private_key'] = https_key
protocol = "https"
else:
protocol = "http"
if options['http_proxy']:
# Overwrite cherrypy.tools.proxy with our own proxy handler
cherrypy.tools.proxy = cherrypy.Tool('before_handler', proxy, priority=1)
if options['http_password']:
login_allowed = ["Tautulli admin (username is '%s')" % options['http_username']]
if jellypy.CONFIG.HTTP_PLEX_ADMIN:
login_allowed.append("Plex admin")
logger.info("Tautulli WebStart :: Web server authentication is enabled: %s.", ' and '.join(login_allowed))
if options['http_basic_auth']:
jellypy.AUTH_ENABLED = False
basic_auth_enabled = True
else:
jellypy.AUTH_ENABLED = True
basic_auth_enabled = False
cherrypy.tools.auth = cherrypy.Tool('before_handler', webauth.check_auth, priority=2)
else:
jellypy.AUTH_ENABLED = False
basic_auth_enabled = False
if options['http_root'].strip('/'):
jellypy.HTTP_ROOT = options['http_root'] = '/' + str(options['http_root'].strip('/')) + '/'
else:
jellypy.HTTP_ROOT = options['http_root'] = '/'
cherrypy.config.update(options_dict)
conf = {
'/': {
'engine.timeout_monitor.on': False,
'tools.staticdir.root': os.path.join(jellypy.PROG_DIR, 'data'),
'tools.proxy.on': bool(options['http_proxy']),
'tools.gzip.on': True,
'tools.gzip.mime_types': ['text/html', 'text/plain', 'text/css',
'text/javascript', 'application/json',
'application/javascript'],
'tools.auth.on': jellypy.AUTH_ENABLED,
'tools.auth_basic.on': basic_auth_enabled,
'tools.auth_basic.realm': 'Tautulli web server',
'tools.auth_basic.checkpassword': cherrypy.lib.auth_basic.checkpassword_dict({
options['http_username']: options['http_password']})
},
'/api': {
'tools.auth_basic.on': False
},
'/status': {
'tools.auth_basic.on': False
},
'/interfaces': {
'tools.staticdir.on': True,
'tools.staticdir.dir': "interfaces",
'tools.caching.on': True,
'tools.caching.force': True,
'tools.caching.delay': 0,
'tools.expires.on': True,
'tools.expires.secs': 60 * 60 * 24 * 30, # 30 days
'tools.sessions.on': False,
'tools.auth.on': False
},
'/images': {
'tools.staticdir.on': True,
'tools.staticdir.dir': "interfaces/default/images",
'tools.staticdir.content_types': {'svg': 'image/svg+xml'},
'tools.caching.on': True,
'tools.caching.force': True,
'tools.caching.delay': 0,
'tools.expires.on': True,
'tools.expires.secs': 60 * 60 * 24 * 30, # 30 days
'tools.sessions.on': False,
'tools.auth.on': False
},
'/css': {
'tools.staticdir.on': True,
'tools.staticdir.dir': "interfaces/default/css",
'tools.caching.on': True,
'tools.caching.force': True,
'tools.caching.delay': 0,
'tools.expires.on': True,
'tools.expires.secs': 60 * 60 * 24 * 30, # 30 days
'tools.sessions.on': False,
'tools.auth.on': False
},
'/fonts': {
'tools.staticdir.on': True,
'tools.staticdir.dir': "interfaces/default/fonts",
'tools.caching.on': True,
'tools.caching.force': True,
'tools.caching.delay': 0,
'tools.expires.on': True,
'tools.expires.secs': 60 * 60 * 24 * 30, # 30 days
'tools.sessions.on': False,
'tools.auth.on': False
},
'/js': {
'tools.staticdir.on': True,
'tools.staticdir.dir': "interfaces/default/js",
'tools.caching.on': True,
'tools.caching.force': True,
'tools.caching.delay': 0,
'tools.expires.on': True,
'tools.expires.secs': 60 * 60 * 24 * 30, # 30 days
'tools.sessions.on': False,
'tools.auth.on': False
},
'/cache': {
'tools.staticdir.on': True,
'tools.staticdir.dir': jellypy.CONFIG.CACHE_DIR,
'tools.caching.on': True,
'tools.caching.force': True,
'tools.caching.delay': 0,
'tools.expires.on': True,
'tools.expires.secs': 60 * 60 * 24 * 30, # 30 days
'tools.sessions.on': False,
'tools.auth.on': False
},
#'/pms_image_proxy': {
# 'tools.staticdir.on': True,
# 'tools.staticdir.dir': os.path.join(jellypy.CONFIG.CACHE_DIR, 'images'),
# 'tools.caching.on': True,
# 'tools.caching.force': True,
# 'tools.caching.delay': 0,
# 'tools.expires.on': True,
# 'tools.expires.secs': 60 * 60 * 24 * 30, # 30 days
# 'tools.auth.on': False,
# 'tools.sessions.on': False
#},
'/favicon.ico': {
'tools.staticfile.on': True,
'tools.staticfile.filename': os.path.abspath(os.path.join(jellypy.PROG_DIR, 'data/interfaces/default/images/favicon/favicon.ico')),
'tools.caching.on': True,
'tools.caching.force': True,
'tools.caching.delay': 0,
'tools.expires.on': True,
'tools.expires.secs': 60 * 60 * 24 * 30, # 30 days
'tools.sessions.on': False,
'tools.auth.on': False
}
}
cherrypy.tree.mount(WebInterface(), options['http_root'], config=conf)
if jellypy.HTTP_ROOT != '/':
cherrypy.tree.mount(BaseRedirect(), '/')
try:
logger.info("Tautulli WebStart :: Starting Tautulli web server on %s://%s:%d%s", protocol,
options['http_host'], options['http_port'], options['http_root'])
#cherrypy.process.servers.check_port(str(options['http_host']), options['http_port'])
if not jellypy.DEV:
cherrypy.server.start()
else:
cherrypy.engine.signals.subscribe()
cherrypy.engine.start()
cherrypy.engine.block()
except IOError as e:
logger.error("Tautulli WebStart :: Failed to start Tautulli: %s", e)
sys.exit(1)
cherrypy.server.wait()
def proxy():
# logger.debug("REQUEST URI: %s, HEADER [X-Forwarded-Host]: %s, [X-Host]: %s, [Origin]: %s, [Host]: %s",
# cherrypy.request.wsgi_environ['REQUEST_URI'],
# cherrypy.request.headers.get('X-Forwarded-Host'),
# cherrypy.request.headers.get('X-Host'),
# cherrypy.request.headers.get('Origin'),
# cherrypy.request.headers.get('Host'))
# Change cherrpy.tools.proxy.local header if X-Forwarded-Host header is not present
local = 'X-Forwarded-Host'
if not cherrypy.request.headers.get('X-Forwarded-Host'):
if cherrypy.request.headers.get('X-Host'): # lighttpd
local = 'X-Host'
elif cherrypy.request.headers.get('Origin'): # Squid
local = 'Origin'
elif cherrypy.request.headers.get('Host'): # nginx
local = 'Host'
# logger.debug("cherrypy.tools.proxy.local set to [%s]", local)
# Call original cherrypy proxy tool with the new local
cherrypy.lib.cptools.proxy(local=local)

205
jellypy/windows.py Normal file
View File

@@ -0,0 +1,205 @@
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
# Tautulli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tautulli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
from systray import SysTrayIcon
try:
from shlex import quote as cmd_quote
except ImportError:
from pipes import quote as cmd_quote
try:
import winreg
except ImportError:
import _winreg as winreg
import jellypy
if jellypy.PYTHON2:
import common
import logger
import versioncheck
else:
from jellypy import common
from jellypy import logger
from jellypy import versioncheck
class WindowsSystemTray(object):
def __init__(self):
self.image_dir = os.path.join(jellypy.PROG_DIR, 'data/interfaces/', jellypy.CONFIG.INTERFACE, 'images')
self.icon = os.path.join(self.image_dir, 'logo-circle.ico')
if jellypy.UPDATE_AVAILABLE:
self.hover_text = common.PRODUCT + ' - Update Available!'
self.update_title = 'Check for Updates - Update Available!'
else:
self.hover_text = common.PRODUCT
self.update_title = 'Check for Updates'
if jellypy.CONFIG.LAUNCH_STARTUP:
launch_start_icon = os.path.join(self.image_dir, 'check-solid.ico')
else:
launch_start_icon = None
if jellypy.CONFIG.LAUNCH_BROWSER:
launch_browser_icon = os.path.join(self.image_dir, 'check-solid.ico')
else:
launch_browser_icon = None
self.menu = [
['Open Tautulli', None, self.tray_open, 'default'],
['', None, 'separator', None],
['Start Tautulli at Login', launch_start_icon, self.tray_startup, None],
['Open Browser when Tautulli Starts', launch_browser_icon, self.tray_browser, None],
['', None, 'separator', None],
[self.update_title, None, self.tray_check_update, None],
['Restart', None, self.tray_restart, None]
]
if not jellypy.FROZEN:
self.menu.insert(6, ['Update', None, self.tray_update, None])
self.tray_icon = SysTrayIcon(self.icon, self.hover_text, self.menu, on_quit=self.tray_quit)
def start(self):
logger.info("Launching Windows system tray icon.")
try:
self.tray_icon.start()
except Exception as e:
logger.error("Unable to launch system tray icon: %s." % e)
def shutdown(self):
self.tray_icon.shutdown()
def update(self, **kwargs):
self.tray_icon.update(**kwargs)
def tray_open(self, tray_icon):
jellypy.launch_browser(jellypy.CONFIG.HTTP_HOST, jellypy.HTTP_PORT, jellypy.HTTP_ROOT)
def tray_startup(self, tray_icon):
jellypy.CONFIG.LAUNCH_STARTUP = not jellypy.CONFIG.LAUNCH_STARTUP
set_startup()
def tray_browser(self, tray_icon):
jellypy.CONFIG.LAUNCH_BROWSER = not jellypy.CONFIG.LAUNCH_BROWSER
set_startup()
def tray_check_update(self, tray_icon):
versioncheck.check_update()
def tray_update(self, tray_icon):
if jellypy.UPDATE_AVAILABLE:
jellypy.SIGNAL = 'update'
else:
self.hover_text = common.PRODUCT + ' - No Update Available'
self.update_title = 'Check for Updates - No Update Available'
self.menu[5][0] = self.update_title
self.update(hover_text=self.hover_text, menu_options=self.menu)
def tray_restart(self, tray_icon):
jellypy.SIGNAL = 'restart'
def tray_quit(self, tray_icon):
jellypy.SIGNAL = 'shutdown'
def change_tray_update_icon(self):
if jellypy.UPDATE_AVAILABLE:
self.hover_text = common.PRODUCT + ' - Update Available!'
self.update_title = 'Check for Updates - Update Available!'
else:
self.hover_text = common.PRODUCT + ' - No Update Available'
self.update_title = 'Check for Updates'
self.menu[5][0] = self.update_title
self.update(hover_text=self.hover_text, menu_options=self.menu)
def change_tray_icons(self):
if jellypy.CONFIG.LAUNCH_STARTUP:
launch_start_icon = os.path.join(self.image_dir, 'check-solid.ico')
else:
launch_start_icon = None
if jellypy.CONFIG.LAUNCH_BROWSER:
launch_browser_icon = os.path.join(self.image_dir, 'check-solid.ico')
else:
launch_browser_icon = None
self.menu[2][1] = launch_start_icon
self.menu[3][1] = launch_browser_icon
self.update(menu_options=self.menu)
def set_startup():
if jellypy.WIN_SYS_TRAY_ICON:
jellypy.WIN_SYS_TRAY_ICON.change_tray_icons()
startup_reg_path = "Software\\Microsoft\\Windows\\CurrentVersion\\Run"
exe = sys.executable
run_args = [arg for arg in jellypy.ARGS if arg != '--nolaunch']
if jellypy.FROZEN:
args = [exe] + run_args
else:
args = [exe, jellypy.FULL_PATH] + run_args
registry_key_name = '{}_{}'.format(common.PRODUCT, jellypy.CONFIG.PMS_UUID)
cmd = ' '.join(cmd_quote(arg) for arg in args).replace('python.exe', 'pythonw.exe').replace("'", '"')
if jellypy.CONFIG.LAUNCH_STARTUP:
# Rename old Tautulli registry key
try:
registry_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, startup_reg_path, 0, winreg.KEY_ALL_ACCESS)
winreg.QueryValueEx(registry_key, common.PRODUCT)
reg_value_exists = True
except WindowsError:
reg_value_exists = False
if reg_value_exists:
try:
winreg.DeleteValue(registry_key, common.PRODUCT)
winreg.CloseKey(registry_key)
except WindowsError:
pass
try:
winreg.CreateKey(winreg.HKEY_CURRENT_USER, startup_reg_path)
registry_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, startup_reg_path, 0, winreg.KEY_WRITE)
winreg.SetValueEx(registry_key, registry_key_name, 0, winreg.REG_SZ, cmd)
winreg.CloseKey(registry_key)
logger.info("Added Tautulli to Windows system startup registry key.")
return True
except WindowsError as e:
logger.error("Failed to create Windows system startup registry key: %s", e)
return False
else:
# Check if registry value exists
try:
registry_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, startup_reg_path, 0, winreg.KEY_ALL_ACCESS)
winreg.QueryValueEx(registry_key, registry_key_name)
reg_value_exists = True
except WindowsError:
reg_value_exists = False
if reg_value_exists:
try:
winreg.DeleteValue(registry_key, registry_key_name)
winreg.CloseKey(registry_key)
logger.info("Removed Tautulli from Windows system startup registry key.")
return True
except WindowsError as e:
logger.error("Failed to delete Windows system startup registry key: %s", e)
return False