Enable video logging by default.
Move database functions to its own file. Incorporate datatables changes into original file and remove the test file. Remove old plexWatch processing and db files and references.
This commit is contained in:
@@ -13,10 +13,8 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from plexpy import logger, helpers, pmsconnect, notification_handler, config, log_reader, common
|
||||
from plexpy import logger, pmsconnect, notification_handler, log_reader, common, database
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
import threading
|
||||
import plexpy
|
||||
import re
|
||||
@@ -29,7 +27,7 @@ def check_active_sessions():
|
||||
with monitor_lock:
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
session_list = pms_connect.get_current_activity()
|
||||
monitor_db = MonitorDatabase()
|
||||
monitor_db = database.MonitorDatabase()
|
||||
monitor_process = MonitorProcessing()
|
||||
# logger.debug(u"PlexPy Monitor :: Checking for active streams.")
|
||||
|
||||
@@ -126,119 +124,11 @@ def check_active_sessions():
|
||||
else:
|
||||
logger.debug(u"PlexPy Monitor :: Unable to read session list.")
|
||||
|
||||
def drop_session_db():
|
||||
monitor_db = MonitorDatabase()
|
||||
monitor_db.action('DROP TABLE sessions')
|
||||
|
||||
def clear_history_tables():
|
||||
logger.debug(u"PlexPy Monitor :: Deleting all session_history records... No turning back now bub.")
|
||||
monitor_db = MonitorDatabase()
|
||||
monitor_db.action('DELETE FROM session_history')
|
||||
monitor_db.action('DELETE FROM session_history_media_info')
|
||||
monitor_db.action('DELETE FROM session_history_metadata')
|
||||
monitor_db.action('VACUUM;')
|
||||
|
||||
def db_filename(filename="plexpy.db"):
|
||||
|
||||
return os.path.join(plexpy.DATA_DIR, filename)
|
||||
|
||||
def get_cache_size():
|
||||
# This will protect against typecasting problems produced by empty string and None settings
|
||||
if not plexpy.CONFIG.CACHE_SIZEMB:
|
||||
# sqlite will work with this (very slowly)
|
||||
return 0
|
||||
return int(plexpy.CONFIG.CACHE_SIZEMB)
|
||||
|
||||
|
||||
class MonitorDatabase(object):
|
||||
|
||||
def __init__(self, filename='plexpy.db'):
|
||||
self.filename = filename
|
||||
self.connection = sqlite3.connect(db_filename(filename), timeout=20)
|
||||
# Don't wait for the disk to finish writing
|
||||
self.connection.execute("PRAGMA synchronous = OFF")
|
||||
# Journal disabled since we never do rollbacks
|
||||
self.connection.execute("PRAGMA journal_mode = %s" % plexpy.CONFIG.JOURNAL_MODE)
|
||||
# 64mb of cache memory, probably need to make it user configurable
|
||||
self.connection.execute("PRAGMA cache_size=-%s" % (get_cache_size() * 1024))
|
||||
self.connection.row_factory = sqlite3.Row
|
||||
|
||||
def action(self, query, args=None, return_last_id=False):
|
||||
|
||||
if query is None:
|
||||
return
|
||||
|
||||
sql_result = None
|
||||
|
||||
try:
|
||||
with self.connection as c:
|
||||
if args is None:
|
||||
sql_result = c.execute(query)
|
||||
else:
|
||||
sql_result = c.execute(query, args)
|
||||
|
||||
except sqlite3.OperationalError, e:
|
||||
if "unable to open database file" in e.message or "database is locked" in e.message:
|
||||
logger.warn('Database Error: %s', e)
|
||||
else:
|
||||
logger.error('Database error: %s', e)
|
||||
raise
|
||||
|
||||
except sqlite3.DatabaseError, e:
|
||||
logger.error('Fatal Error executing %s :: %s', query, e)
|
||||
raise
|
||||
|
||||
return sql_result
|
||||
|
||||
def select(self, query, args=None):
|
||||
|
||||
sql_results = self.action(query, args).fetchall()
|
||||
|
||||
if sql_results is None or sql_results == [None]:
|
||||
return []
|
||||
|
||||
return sql_results
|
||||
|
||||
def select_single(self, query, args=None):
|
||||
|
||||
sql_results = self.action(query, args).fetchone()[0]
|
||||
|
||||
if sql_results is None or sql_results == "":
|
||||
return ""
|
||||
|
||||
return sql_results
|
||||
|
||||
def upsert(self, table_name, value_dict, key_dict):
|
||||
|
||||
trans_type = 'update'
|
||||
changes_before = self.connection.total_changes
|
||||
|
||||
gen_params = lambda my_dict: [x + " = ?" for x in my_dict.keys()]
|
||||
|
||||
update_query = "UPDATE " + table_name + " SET " + ", ".join(gen_params(value_dict)) + \
|
||||
" WHERE " + " AND ".join(gen_params(key_dict))
|
||||
|
||||
self.action(update_query, value_dict.values() + key_dict.values())
|
||||
|
||||
if self.connection.total_changes == changes_before:
|
||||
trans_type = 'insert'
|
||||
insert_query = (
|
||||
"INSERT INTO " + table_name + " (" + ", ".join(value_dict.keys() + key_dict.keys()) + ")" +
|
||||
" VALUES (" + ", ".join(["?"] * len(value_dict.keys() + key_dict.keys())) + ")"
|
||||
)
|
||||
try:
|
||||
self.action(insert_query, value_dict.values() + key_dict.values())
|
||||
except sqlite3.IntegrityError:
|
||||
logger.info('Queries failed: %s and %s', update_query, insert_query)
|
||||
|
||||
# We want to know if it was an update or insert
|
||||
return trans_type
|
||||
|
||||
|
||||
class MonitorProcessing(object):
|
||||
|
||||
def __init__(self):
|
||||
self.db = MonitorDatabase()
|
||||
self.db = database.MonitorDatabase()
|
||||
|
||||
def write_session(self, session=None):
|
||||
|
||||
|
Reference in New Issue
Block a user