diff --git a/config.py b/config.py index a801cfc..953ef1b 100644 --- a/config.py +++ b/config.py @@ -23,97 +23,103 @@ class Config(object): __metaclass__ = Singleton base_config = { - 'PLEX_USER': 'plex', - 'PLEX_SCANNER': '/usr/lib/plexmediaserver/Plex\\ Media\\ Scanner', - 'PLEX_SUPPORT_DIR': '/var/lib/plexmediaserver/Library/Application\ Support', - 'PLEX_LD_LIBRARY_PATH': '/usr/lib/plexmediaserver/lib', - 'PLEX_DATABASE_PATH': '/var/lib/plexmediaserver/Library/Application Support/Plex Media Server' - '/Plug-in Support/Databases/com.plexapp.plugins.library.db', - 'PLEX_LOCAL_URL': 'http://localhost:32400', - 'PLEX_EMPTY_TRASH': False, - 'PLEX_EMPTY_TRASH_MAX_FILES': 100, - 'PLEX_EMPTY_TRASH_CONTROL_FILES': [], - 'PLEX_EMPTY_TRASH_ZERO_DELETED': False, - 'PLEX_WAIT_FOR_EXTERNAL_SCANNERS': True, - 'PLEX_ANALYZE_TYPE': 'basic', - 'PLEX_ANALYZE_DIRECTORY': True, - 'PLEX_FIX_MISMATCHED': False, - 'PLEX_FIX_MISMATCHED_LANG': 'en', - 'PLEX_TOKEN': '', - 'PLEX_CHECK_BEFORE_SCAN': True, - 'SERVER_IP': '0.0.0.0', - 'SERVER_PORT': 3467, - 'SERVER_PASS': uuid.uuid4().hex, - 'SERVER_PATH_MAPPINGS': {}, - 'SERVER_SCAN_DELAY': 180, - 'SERVER_MAX_FILE_CHECKS': 10, - 'SERVER_FILE_CHECK_DELAY': 60, - 'SERVER_FILE_EXIST_PATH_MAPPINGS': {}, - 'SERVER_ALLOW_MANUAL_SCAN': False, - 'SERVER_IGNORE_LIST': [], - 'SERVER_USE_SQLITE': False, - 'SERVER_SCAN_PRIORITIES': {}, - 'SERVER_SCAN_FOLDER_ON_FILE_EXISTS_EXHAUSTION': False, - 'RCLONE': { - 'RC_CACHE_REFRESH': { - 'ENABLED': False, - 'FILE_EXISTS_TO_REMOTE_MAPPINGS': {}, - 'RC_URL': 'http://localhost:5572', + "PLEX_USER": "plex", + "PLEX_SCANNER": "/usr/lib/plexmediaserver/Plex\\ Media\\ Scanner", + "PLEX_SUPPORT_DIR": "/var/lib/plexmediaserver/Library/Application\ Support", + "PLEX_LD_LIBRARY_PATH": "/usr/lib/plexmediaserver/lib", + "PLEX_DATABASE_PATH": "/var/lib/plexmediaserver/Library/Application Support/Plex Media Server" + "/Plug-in Support/Databases/com.plexapp.plugins.library.db", + "PLEX_LOCAL_URL": "http://localhost:32400", + "PLEX_EMPTY_TRASH": False, + "PLEX_EMPTY_TRASH_MAX_FILES": 100, + "PLEX_EMPTY_TRASH_CONTROL_FILES": [], + "PLEX_EMPTY_TRASH_ZERO_DELETED": False, + "PLEX_WAIT_FOR_EXTERNAL_SCANNERS": True, + "PLEX_ANALYZE_TYPE": "basic", + "PLEX_ANALYZE_DIRECTORY": True, + "PLEX_FIX_MISMATCHED": False, + "PLEX_FIX_MISMATCHED_LANG": "en", + "PLEX_TOKEN": "", + "PLEX_CHECK_BEFORE_SCAN": True, + "SERVER_IP": "0.0.0.0", + "SERVER_PORT": 3467, + "SERVER_PASS": uuid.uuid4().hex, + "SERVER_PATH_MAPPINGS": {}, + "SERVER_SCAN_DELAY": 180, + "SERVER_MAX_FILE_CHECKS": 10, + "SERVER_FILE_CHECK_DELAY": 60, + "SERVER_FILE_EXIST_PATH_MAPPINGS": {}, + "SERVER_ALLOW_MANUAL_SCAN": False, + "SERVER_IGNORE_LIST": [], + "SERVER_USE_SQLITE": False, + "SERVER_SCAN_PRIORITIES": {}, + "SERVER_SCAN_FOLDER_ON_FILE_EXISTS_EXHAUSTION": False, + "RCLONE": { + "RC_CACHE_REFRESH": { + "ENABLED": False, + "FILE_EXISTS_TO_REMOTE_MAPPINGS": {}, + "RC_URL": "http://localhost:5572", }, - 'BINARY': '/usr/bin/rclone', - 'CRYPT_MAPPINGS': {}, - 'CONFIG': '', + "BINARY": "/usr/bin/rclone", + "CRYPT_MAPPINGS": {}, + "CONFIG": "", }, - 'DOCKER_NAME': 'plex', - 'RUN_COMMAND_BEFORE_SCAN': '', - 'RUN_COMMAND_AFTER_SCAN': '', - 'USE_DOCKER': False, - 'USE_SUDO': True, - 'ENABLE_JOE': False, - 'ENABLE_PLEX': False, - "JELLYFIN_EMBY": 'jellyfin', - "JOE_API_KEY": '', - "JOE_HOST": 'http://localhost:8096', - 'GOOGLE': { - 'ENABLED': False, - 'CLIENT_ID': '', - 'CLIENT_SECRET': '', - 'ALLOWED': { - 'FILE_PATHS': [], - 'FILE_EXTENSIONS': False, - 'FILE_EXTENSIONS_LIST': [], - 'MIME_TYPES': False, - 'MIME_TYPES_LIST': [], + "DOCKER_NAME": "plex", + "RUN_COMMAND_BEFORE_SCAN": "", + "RUN_COMMAND_AFTER_SCAN": "", + "USE_DOCKER": False, + "USE_SUDO": True, + "ENABLE_JOE": False, + "ENABLE_PLEX": False, + "JELLYFIN_EMBY": "jellyfin", + "JOE_API_KEY": "", + "JOE_HOST": "http://localhost:8096", + "GOOGLE": { + "ENABLED": False, + "CLIENT_ID": "", + "CLIENT_SECRET": "", + "ALLOWED": { + "FILE_PATHS": [], + "FILE_EXTENSIONS": False, + "FILE_EXTENSIONS_LIST": [], + "MIME_TYPES": False, + "MIME_TYPES_LIST": [], }, - 'POLL_INTERVAL': 120, - 'DISABLE_DISK_FILE_SIZE_CHECK': False, - 'TEAMDRIVE': False, - 'TEAMDRIVES': [], - 'SHOW_CACHE_LOGS': True, + "POLL_INTERVAL": 120, + "DISABLE_DISK_FILE_SIZE_CHECK": False, + "TEAMDRIVE": False, + "TEAMDRIVES": [], + "SHOW_CACHE_LOGS": True, }, } base_settings = { - 'config': { - 'argv': '--config', - 'env': 'AUTOSCAN_CONFIG', - 'default': os.path.join(os.path.dirname(sys.argv[0]), 'config', 'config.json'), + "config": { + "argv": "--config", + "env": "AUTOSCAN_CONFIG", + "default": os.path.join( + os.path.dirname(sys.argv[0]), "config", "config.json" + ), + }, + "logfile": { + "argv": "--logfile", + "env": "AUTOSCAN_LOGFILE", + "default": os.path.join(os.path.dirname(sys.argv[0]), "autoscan.log"), }, - 'logfile': { - 'argv': '--logfile', - 'env': 'AUTOSCAN_LOGFILE', - 'default': os.path.join(os.path.dirname(sys.argv[0]), 'autoscan.log'), + "loglevel": { + "argv": "--loglevel", + "env": "AUTOSCAN_LOGLEVEL", + "default": "INFO", }, - 'loglevel': {'argv': '--loglevel', 'env': 'AUTOSCAN_LOGLEVEL', 'default': 'INFO'}, - 'queuefile': { - 'argv': '--queuefile', - 'env': 'AUTOSCAN_QUEUEFILE', - 'default': os.path.join(os.path.dirname(sys.argv[0]), 'queue.db'), + "queuefile": { + "argv": "--queuefile", + "env": "AUTOSCAN_QUEUEFILE", + "default": os.path.join(os.path.dirname(sys.argv[0]), "queue.db"), }, - 'cachefile': { - 'argv': '--cachefile', - 'env': 'AUTOSCAN_CACHEFILE', - 'default': os.path.join(os.path.dirname(sys.argv[0]), 'cache.db'), + "cachefile": { + "argv": "--cachefile", + "env": "AUTOSCAN_CACHEFILE", + "default": os.path.join(os.path.dirname(sys.argv[0]), "cache.db"), }, } @@ -129,108 +135,122 @@ def __init__(self): def default_config(self): cfg = copy(self.base_config) - if os.name == 'nt': - cfg['PLEX_SCANNER'] = '%PROGRAMFILES(X86)%\\Plex\\Plex Media Server\\Plex Media Scanner.exe' + if os.name == "nt": cfg[ - 'PLEX_DATABASE_PATH' - ] = '%LOCALAPPDATA%\\Plex Media Server\\Plug-in Support\\Databases\\com.plexapp.plugins.library.db' - cfg['RCLONE']['BINARY'] = '%ChocolateyInstall%\\bin\\rclone.exe' - cfg['RCLONE']['CONFIG'] = '%HOMEDRIVE%%HOMEPATH%\\.config\\rclone\\rclone.conf' + "PLEX_SCANNER" + ] = "%PROGRAMFILES(X86)%\\Plex\\Plex Media Server\\Plex Media Scanner.exe" + cfg[ + "PLEX_DATABASE_PATH" + ] = "%LOCALAPPDATA%\\Plex Media Server\\Plug-in Support\\Databases\\com.plexapp.plugins.library.db" + cfg["RCLONE"]["BINARY"] = "%ChocolateyInstall%\\bin\\rclone.exe" + cfg["RCLONE"][ + "CONFIG" + ] = "%HOMEDRIVE%%HOMEPATH%\\.config\\rclone\\rclone.conf" # add example scan priorities - cfg['SERVER_SCAN_PRIORITIES'] = {"0": ['/Movies/'], "1": ['/TV/'], "2": ['/Music/']} + cfg["SERVER_SCAN_PRIORITIES"] = { + "0": ["/Movies/"], + "1": ["/TV/"], + "2": ["/Music/"], + } # add example file trash control files - if os.name == 'nt': - cfg['PLEX_EMPTY_TRASH_CONTROL_FILES'] = ["G:\\mounted.bin"] + if os.name == "nt": + cfg["PLEX_EMPTY_TRASH_CONTROL_FILES"] = ["G:\\mounted.bin"] else: - cfg['PLEX_EMPTY_TRASH_CONTROL_FILES'] = ['/mnt/unionfs/mounted.bin'] + cfg["PLEX_EMPTY_TRASH_CONTROL_FILES"] = ["/mnt/unionfs/mounted.bin"] # add example server path mappings - if os.name == 'nt': - cfg['SERVER_PATH_MAPPINGS'] = {'G:\\media': ["/data/media", "DRIVENAME\\media"]} + if os.name == "nt": + cfg["SERVER_PATH_MAPPINGS"] = { + "G:\\media": ["/data/media", "DRIVENAME\\media"] + } else: - cfg['SERVER_PATH_MAPPINGS'] = {'/mnt/unionfs/': ['/home/user/media/fused/']} + cfg["SERVER_PATH_MAPPINGS"] = {"/mnt/unionfs/": ["/home/user/media/fused/"]} # add example file exist path mappings - if os.name == 'nt': - cfg['SERVER_FILE_EXIST_PATH_MAPPINGS'] = {"G:\\": ["/data/"]} + if os.name == "nt": + cfg["SERVER_FILE_EXIST_PATH_MAPPINGS"] = {"G:\\": ["/data/"]} else: - cfg['SERVER_FILE_EXIST_PATH_MAPPINGS'] = {'/home/user/rclone/': ['/data/']} + cfg["SERVER_FILE_EXIST_PATH_MAPPINGS"] = {"/home/user/rclone/": ["/data/"]} # add example server ignore list - cfg['SERVER_IGNORE_LIST'] = ['/.grab/', '.DS_Store', 'Thumbs.db'] + cfg["SERVER_IGNORE_LIST"] = ["/.grab/", ".DS_Store", "Thumbs.db"] # add example allowed scan paths to google - if os.name == 'nt': - cfg['GOOGLE']['ALLOWED']['FILE_PATHS'] = [ + if os.name == "nt": + cfg["GOOGLE"]["ALLOWED"]["FILE_PATHS"] = [ "My Drive\\Media\\Movies\\", "My Drive\\Media\\TV\\", "My Drive\\Media\\4K\\", ] else: - cfg['GOOGLE']['ALLOWED']['FILE_PATHS'] = [ + cfg["GOOGLE"]["ALLOWED"]["FILE_PATHS"] = [ "My Drive/Media/Movies/", "My Drive/Media/TV/", "My Drive/Media/4K/", ] # add example scan extensions to google - cfg['GOOGLE']['ALLOWED']['FILE_EXTENSIONS'] = True - cfg['GOOGLE']['ALLOWED']['FILE_EXTENSIONS_LIST'] = [ - 'webm', - 'mkv', - 'flv', - 'vob', - 'ogv', - 'ogg', - 'drc', - 'gif', - 'gifv', - 'mng', - 'avi', - 'mov', - 'qt', - 'wmv', - 'yuv', - 'rm', - 'rmvb', - 'asf', - 'amv', - 'mp4', - 'm4p', - 'm4v', - 'mpg', - 'mp2', - 'mpeg', - 'mpe', - 'mpv', - 'm2v', - 'm4v', - 'svi', - '3gp', - '3g2', - 'mxf', - 'roq', - 'nsv', - 'f4v', - 'f4p', - 'f4a', - 'f4b', - 'mp3', - 'flac', - 'ts', + cfg["GOOGLE"]["ALLOWED"]["FILE_EXTENSIONS"] = True + cfg["GOOGLE"]["ALLOWED"]["FILE_EXTENSIONS_LIST"] = [ + "webm", + "mkv", + "flv", + "vob", + "ogv", + "ogg", + "drc", + "gif", + "gifv", + "mng", + "avi", + "mov", + "qt", + "wmv", + "yuv", + "rm", + "rmvb", + "asf", + "amv", + "mp4", + "m4p", + "m4v", + "mpg", + "mp2", + "mpeg", + "mpe", + "mpv", + "m2v", + "m4v", + "svi", + "3gp", + "3g2", + "mxf", + "roq", + "nsv", + "f4v", + "f4p", + "f4a", + "f4b", + "mp3", + "flac", + "ts", ] # add example scan mimes for Google - cfg['GOOGLE']['ALLOWED']['MIME_TYPES'] = True - cfg['GOOGLE']['ALLOWED']['MIME_TYPES_LIST'] = ['video'] + cfg["GOOGLE"]["ALLOWED"]["MIME_TYPES"] = True + cfg["GOOGLE"]["ALLOWED"]["MIME_TYPES_LIST"] = ["video"] # add example Rclone file exists to remote mappings - if os.name == 'nt': - cfg['RCLONE']['RC_CACHE_REFRESH']['FILE_EXISTS_TO_REMOTE_MAPPINGS'] = {'Media/': ["G:\\Media"]} + if os.name == "nt": + cfg["RCLONE"]["RC_CACHE_REFRESH"]["FILE_EXISTS_TO_REMOTE_MAPPINGS"] = { + "Media/": ["G:\\Media"] + } else: - cfg['RCLONE']['RC_CACHE_REFRESH']['FILE_EXISTS_TO_REMOTE_MAPPINGS'] = {'Media/': ['/mnt/rclone/Media/']} + cfg["RCLONE"]["RC_CACHE_REFRESH"]["FILE_EXISTS_TO_REMOTE_MAPPINGS"] = { + "Media/": ["/mnt/rclone/Media/"] + } return cfg @@ -247,7 +267,9 @@ def __inner_upgrade(self, settings1, settings2, key=None, overwrite=False): if not key: logger.info("Added %r config option: %s", str(k), str(v)) else: - logger.info("Added %r to config option %r: %s", str(k), str(key), str(v)) + logger.info( + "Added %r to config option %r: %s", str(k), str(key), str(v) + ) continue # iterate children @@ -288,12 +310,12 @@ def upgrade_settings(self, currents): def load(self): logger.debug("Upgrading config...") - if not os.path.exists(self.settings['config']): + if not os.path.exists(self.settings["config"]): logger.info("No config file found. Creating a default config...") self.save(self.default_config) cfg = {} - with open(self.settings['config'], 'r') as fp: + with open(self.settings["config"], "r") as fp: cfg, upgraded = self.upgrade_settings(json.load(fp)) # Save config if upgraded @@ -306,10 +328,13 @@ def load(self): self.configs = cfg def save(self, cfg, exitOnSave=True): - with open(self.settings['config'], 'w') as fp: + with open(self.settings["config"], "w") as fp: json.dump(cfg, fp, indent=2, sort_keys=True) if exitOnSave: - logger.info("Your config was upgraded. You may check the changes here: %r", self.settings['config']) + logger.info( + "Your config was upgraded. You may check the changes here: %r", + self.settings["config"], + ) if exitOnSave: exit(0) @@ -326,14 +351,14 @@ def get_settings(self): logger.info("Using ARG setting %s=%s", name, value) # Envirnoment variable - elif data['env'] in os.environ: - value = os.environ[data['env']] - logger.info("Using ENV setting %s=%s" % (data['env'], value)) + elif data["env"] in os.environ: + value = os.environ[data["env"]] + logger.info("Using ENV setting %s=%s" % (data["env"], value)) # Default else: - value = data['default'] - logger.info("Using default setting %s=%s" % (data['argv'], value)) + value = data["default"] + logger.info("Using default setting %s=%s" % (data["argv"], value)) setts[name] = os.path.expandvars(value) @@ -346,15 +371,22 @@ def get_settings(self): def parse_args(self): parser = argparse.ArgumentParser( description=( - 'Script to assist Sonarr/Radarr/Lidarr with Plex/Jellyfin/Emby imports so that it will only scan the folder that has been imported, instead of the entire library section.' + "Script to assist Sonarr/Radarr/Lidarr with Plex/Jellyfin/Emby imports so that it will only scan the folder that has been imported, instead of the entire library section." ), formatter_class=argparse.RawTextHelpFormatter, ) # Mode parser.add_argument( - 'cmd', - choices=('sections', 'server', 'authorize', 'build_caches', 'update_config', 'jesections'), + "cmd", + choices=( + "sections", + "server", + "authorize", + "build_caches", + "update_config", + "jesections", + ), help=( '"sections": Prints Plex Sections with more details.\n' '"jesections": Prints Jellyfin/Emby library paths.\n' @@ -367,41 +399,45 @@ def parse_args(self): # Config file parser.add_argument( - self.base_settings['config']['argv'], - nargs='?', + self.base_settings["config"]["argv"], + nargs="?", const=None, - help='Config file location (default: %s)' % self.base_settings['config']['default'], + help="Config file location (default: %s)" + % self.base_settings["config"]["default"], ) # Log file parser.add_argument( - self.base_settings['logfile']['argv'], - nargs='?', + self.base_settings["logfile"]["argv"], + nargs="?", const=None, - help='Log file location (default: %s)' % self.base_settings['logfile']['default'], + help="Log file location (default: %s)" + % self.base_settings["logfile"]["default"], ) # Queue file parser.add_argument( - self.base_settings['queuefile']['argv'], - nargs='?', + self.base_settings["queuefile"]["argv"], + nargs="?", const=None, - help='Queue file location (default: %s)' % self.base_settings['queuefile']['default'], + help="Queue file location (default: %s)" + % self.base_settings["queuefile"]["default"], ) # Cache file parser.add_argument( - self.base_settings['cachefile']['argv'], - nargs='?', + self.base_settings["cachefile"]["argv"], + nargs="?", const=None, - help='Google cache file location (default: %s)' % self.base_settings['cachefile']['default'], + help="Google cache file location (default: %s)" + % self.base_settings["cachefile"]["default"], ) # Logging level parser.add_argument( - self.base_settings['loglevel']['argv'], - choices=('WARN', 'INFO', 'DEBUG'), - help='Log level (default: %s)' % self.base_settings['loglevel']['default'], + self.base_settings["loglevel"]["argv"], + choices=("WARN", "INFO", "DEBUG"), + help="Log level (default: %s)" % self.base_settings["loglevel"]["default"], ) # Print help by default if no arguments diff --git a/db.py b/db.py index c0d6cc9..50437d7 100644 --- a/db.py +++ b/db.py @@ -9,7 +9,7 @@ # Config conf = config.Config() -db_path = conf.settings['queuefile'] +db_path = conf.settings["queuefile"] database = SqliteDatabase(db_path) @@ -55,14 +55,14 @@ def get_next_item(): def exists_file_root_path(file_path): items = get_all_items() - if '.' in file_path: + if "." in file_path: dir_path = os.path.dirname(file_path) else: dir_path = file_path for item in items: - if dir_path.lower() in item['scan_path'].lower(): - return True, item['scan_path'] + if dir_path.lower() in item["scan_path"].lower(): + return True, item["scan_path"] return False, None @@ -72,10 +72,10 @@ def get_all_items(): for item in QueueItemModel.select(): items.append( { - 'scan_path': item.scan_path, - 'scan_for': item.scan_for, - 'scan_type': item.scan_type, - 'scan_section': item.scan_section, + "scan_path": item.scan_path, + "scan_for": item.scan_for, + "scan_type": item.scan_type, + "scan_section": item.scan_section, } ) except Exception: @@ -89,15 +89,23 @@ def get_queue_count(): try: count = QueueItemModel.select().count() except Exception: - logger.exception("Exception getting queued item count from Plex Autoscan database: ") + logger.exception( + "Exception getting queued item count from Plex Autoscan database: " + ) return count def remove_item(scan_path): try: - return (QueueItemModel.delete()).where(QueueItemModel.scan_path == scan_path).execute() + return ( + (QueueItemModel.delete()) + .where(QueueItemModel.scan_path == scan_path) + .execute() + ) except Exception: - logger.exception("Exception deleting %r from Plex Autoscan database: ", scan_path) + logger.exception( + "Exception deleting %r from Plex Autoscan database: ", scan_path + ) return False @@ -105,7 +113,10 @@ def add_item(scan_path, scan_for, scan_section, scan_type): item = None try: return QueueItemModel.create( - scan_path=scan_path, scan_for=scan_for, scan_section=scan_section, scan_type=scan_type + scan_path=scan_path, + scan_for=scan_for, + scan_section=scan_section, + scan_type=scan_type, ) except AttributeError as ex: return item diff --git a/google/cache.py b/google/cache.py index 3af6c03..e09c458 100644 --- a/google/cache.py +++ b/google/cache.py @@ -11,6 +11,10 @@ def __init__(self, cache_file_path): def get_cache(self, cache_name, autocommit=False): if cache_name not in self.caches: self.caches[cache_name] = SqliteDict( - self.cache_file_path, tablename=cache_name, encode=json.dumps, decode=json.loads, autocommit=autocommit + self.cache_file_path, + tablename=cache_name, + encode=json.dumps, + decode=json.loads, + autocommit=autocommit, ) return self.caches[cache_name] diff --git a/google/drive.py b/google/drive.py index 42740e5..55bee35 100644 --- a/google/drive.py +++ b/google/drive.py @@ -33,7 +33,7 @@ def __init__( self.allowed_teamdrives = [] if not allowed_teamdrives else allowed_teamdrives self.drives = OrderedDict( { - 'drive_root': GoogleDrive( + "drive_root": GoogleDrive( client_id, client_secret, cache_path, @@ -46,18 +46,21 @@ def __init__( def load_teamdrives(self): loaded_teamdrives = 0 - teamdrives = self.drives['drive_root'].get_teamdrives() + teamdrives = self.drives["drive_root"].get_teamdrives() - if not teamdrives or 'teamDrives' not in teamdrives: + if not teamdrives or "teamDrives" not in teamdrives: logger.error("Failed to retrieve teamdrive list...") return False - teamdrives = teamdrives['teamDrives'] + teamdrives = teamdrives["teamDrives"] for teamdrive in teamdrives: - teamdrive_name = None if 'name' not in teamdrive else teamdrive['name'] - teamdrive_id = None if 'id' not in teamdrive else teamdrive['id'] + teamdrive_name = None if "name" not in teamdrive else teamdrive["name"] + teamdrive_id = None if "id" not in teamdrive else teamdrive["id"] if not teamdrive_id or not teamdrive_name: - logger.error("TeamDrive had insufficient data associated with it, skipping:\n%s", teamdrive) + logger.error( + "TeamDrive had insufficient data associated with it, skipping:\n%s", + teamdrive, + ) continue if teamdrive_name not in self.allowed_teamdrives: continue @@ -72,7 +75,11 @@ def load_teamdrives(self): crypt_decoder=self.crypt_decoder, teamdrive_id=teamdrive_id, ) - logger.debug("Loaded TeamDrive GoogleDrive instance for: %s (id = %s)", teamdrive_name, teamdrive_id) + logger.debug( + "Loaded TeamDrive GoogleDrive instance for: %s (id = %s)", + teamdrive_name, + teamdrive_id, + ) loaded_teamdrives += 1 logger.info("Loaded %d TeamDrive GoogleDrive instances", loaded_teamdrives) @@ -88,7 +95,7 @@ def get_changes(self): def is_authorized(self): try: - return self.drives['drive_root'].validate_access_token() + return self.drives["drive_root"].validate_access_token() except Exception: logger.exception("Exception validating authentication token: ") return False @@ -108,18 +115,18 @@ def build_caches(self): class GoogleDrive: - auth_url = 'https://accounts.google.com/o/oauth2/v2/auth' - token_url = 'https://www.googleapis.com/oauth2/v4/token' - api_url = 'https://www.googleapis.com/drive/' - redirect_url = 'urn:ietf:wg:oauth:2.0:oob' - scopes = ['https://www.googleapis.com/auth/drive'] + auth_url = "https://accounts.google.com/o/oauth2/v2/auth" + token_url = "https://www.googleapis.com/oauth2/v4/token" + api_url = "https://www.googleapis.com/drive/" + redirect_url = "urn:ietf:wg:oauth:2.0:oob" + scopes = ["https://www.googleapis.com/auth/drive"] def __init__( self, client_id, client_secret, cache_path, - allowed_config=None, + allowed_config=None, show_cache_logs=True, crypt_decoder=None, teamdrive_id=None, @@ -130,8 +137,10 @@ def __init__( self.client_secret = client_secret self.cache_path = cache_path self.cache_manager = Cache(cache_path) - self.cache = self.cache_manager.get_cache('drive_root' if not teamdrive_id else 'teamdrive_%s' % teamdrive_id) - self.settings_cache = self.cache_manager.get_cache('settings', autocommit=True) + self.cache = self.cache_manager.get_cache( + "drive_root" if not teamdrive_id else "teamdrive_%s" % teamdrive_id + ) + self.settings_cache = self.cache_manager.get_cache("settings", autocommit=True) self.support_team_drives = True if teamdrive_id is not None else False self.token = self._load_token() self.token_refresh_lock = Lock() @@ -147,7 +156,7 @@ def __init__( ############################################################ def set_page_token(self, page_token): - self.cache['page_token'] = page_token + self.cache["page_token"] = page_token return def set_callbacks(self, callbacks=None): @@ -158,35 +167,52 @@ def set_callbacks(self, callbacks=None): return def get_auth_link(self): - auth_url, state = self.http.authorization_url(self.auth_url, access_type='offline', prompt='select_account') + auth_url, state = self.http.authorization_url( + self.auth_url, access_type="offline", prompt="select_account" + ) return auth_url def exchange_code(self, code): - token = self.http.fetch_token(self.token_url, code=code, client_secret=self.client_secret) - if 'access_token' in token: + token = self.http.fetch_token( + self.token_url, code=code, client_secret=self.client_secret + ) + if "access_token" in token: self._token_saver(token) # pull in existing team drives and create cache for them return self.token - def query(self, path, method='GET', page_type='changes', fetch_all_pages=False, callbacks=None, **kwargs): + def query( + self, + path, + method="GET", + page_type="changes", + fetch_all_pages=False, + callbacks=None, + **kwargs + ): if callbacks is None: callbacks = {} resp = None pages = 1 resp_json = {} - request_url = self.api_url + path.lstrip('/') if not path.startswith('http') else path + request_url = ( + self.api_url + path.lstrip("/") if not path.startswith("http") else path + ) try: while True: resp = self._do_query(request_url, method, **kwargs) logger.debug("Request URL: %s", resp.url) logger.debug("Request ARG: %s", kwargs) - logger.debug('Response Status: %d %s', resp.status_code, resp.reason) - logger.debug('Response Content:\n%s\n', resp.text) + logger.debug("Response Status: %d %s", resp.status_code, resp.reason) + logger.debug("Response Content:\n%s\n", resp.text) - if 'Content-Type' in resp.headers and 'json' in resp.headers['Content-Type']: + if ( + "Content-Type" in resp.headers + and "json" in resp.headers["Content-Type"] + ): if fetch_all_pages: - resp_json.pop('nextPageToken', None) + resp_json.pop("nextPageToken", None) new_json = resp.json() # does this page have changes extended_pages = False @@ -204,27 +230,37 @@ def query(self, path, method='GET', page_type='changes', fetch_all_pages=False, return False if resp.status_code != 200 else True, resp, resp.text # call page_token_callback to update cached page_token, if specified - if page_type == 'changes' and 'page_token_callback' in callbacks: - if 'nextPageToken' in resp_json: - callbacks['page_token_callback'](resp_json['nextPageToken']) - elif 'newStartPageToken' in resp_json: - callbacks['page_token_callback'](resp_json['newStartPageToken']) + if page_type == "changes" and "page_token_callback" in callbacks: + if "nextPageToken" in resp_json: + callbacks["page_token_callback"](resp_json["nextPageToken"]) + elif "newStartPageToken" in resp_json: + callbacks["page_token_callback"](resp_json["newStartPageToken"]) # call data_callback, fetch_all_pages is true - if page_type == 'changes' and fetch_all_pages and 'data_callback' in callbacks: - callbacks['data_callback'](resp.json()) + if ( + page_type == "changes" + and fetch_all_pages + and "data_callback" in callbacks + ): + callbacks["data_callback"](resp.json()) # handle nextPageToken - if fetch_all_pages and 'nextPageToken' in resp_json and resp_json['nextPageToken']: + if ( + fetch_all_pages + and "nextPageToken" in resp_json + and resp_json["nextPageToken"] + ): # there are more pages pages += 1 logger.info("Fetching extra results from page %d", pages) - if 'params' in kwargs: - kwargs['params'].update({'pageToken': resp_json['nextPageToken']}) - elif 'json' in kwargs: - kwargs['json'].update({'pageToken': resp_json['nextPageToken']}) - elif 'data' in kwargs: - kwargs['data'].update({'pageToken': resp_json['nextPageToken']}) + if "params" in kwargs: + kwargs["params"].update( + {"pageToken": resp_json["nextPageToken"]} + ) + elif "json" in kwargs: + kwargs["json"].update({"pageToken": resp_json["nextPageToken"]}) + elif "data" in kwargs: + kwargs["data"].update({"pageToken": resp_json["nextPageToken"]}) continue break @@ -236,7 +272,9 @@ def query(self, path, method='GET', page_type='changes', fetch_all_pages=False, ) except Exception: - logger.exception("Exception sending request to %s with kwargs=%s: ", request_url, kwargs) + logger.exception( + "Exception sending request to %s with kwargs=%s: ", request_url, kwargs + ) return False, resp, None ############################################################ @@ -245,13 +283,13 @@ def query(self, path, method='GET', page_type='changes', fetch_all_pages=False, def validate_access_token(self): success, resp, data = self.query( - '/v3/changes/startPageToken', - params={'supportsTeamDrives': self.support_team_drives}, + "/v3/changes/startPageToken", + params={"supportsTeamDrives": self.support_team_drives}, fetch_all_pages=True, - page_type='auth', + page_type="auth", ) if success and resp.status_code == 200: - if 'startPageToken' not in data: + if "startPageToken" not in data: logger.error("Failed validate up to date access_token:\n\n%s\n", data) return False return True @@ -264,17 +302,19 @@ def validate_access_token(self): return False def get_changes_start_page_token(self): - params = {'supportsTeamDrives': self.support_team_drives} + params = {"supportsTeamDrives": self.support_team_drives} if self.teamdrive_id is not None and self.support_team_drives: - params['teamDriveId'] = self.teamdrive_id + params["teamDriveId"] = self.teamdrive_id - success, resp, data = self.query('/v3/changes/startPageToken', params=params, fetch_all_pages=True) + success, resp, data = self.query( + "/v3/changes/startPageToken", params=params, fetch_all_pages=True + ) if success and resp.status_code == 200: - if 'startPageToken' not in data: + if "startPageToken" not in data: logger.error("Failed to retrieve changes startPageToken:\n\n%s\n", data) return None - return data['startPageToken'] + return data["startPageToken"] else: logger.error( "Error retrieving changes startPageToken, status_code = %d, data =\n\n%s\n", @@ -285,21 +325,31 @@ def get_changes_start_page_token(self): def get_teamdrives(self): success, resp, data = self.query( - '/v3/teamdrives', params={'pageSize': 100}, fetch_all_pages=True, page_type='teamDrives' + "/v3/teamdrives", + params={"pageSize": 100}, + fetch_all_pages=True, + page_type="teamDrives", ) if success and resp.status_code == 200: return data else: - logger.error('Failed to retrieve teamdrives, status_code = %d, content =\n', resp.status_code, resp.text) + logger.error( + "Failed to retrieve teamdrives, status_code = %d, content =\n", + resp.status_code, + resp.text, + ) return None def get_changes(self): - callbacks = {'page_token_callback': self._page_token_saver, 'data_callback': self._process_changes} + callbacks = { + "page_token_callback": self._page_token_saver, + "data_callback": self._process_changes, + } # get page token page_token = None - if 'page_token' in self.cache: - page_token = self.cache['page_token'] + if "page_token" in self.cache: + page_token = self.cache["page_token"] else: page_token = self.get_changes_start_page_token() @@ -309,22 +359,24 @@ def get_changes(self): # build params params = { - 'pageToken': page_token, - 'pageSize': 1000, - 'includeRemoved': True, - 'includeTeamDriveItems': self.support_team_drives, - 'supportsTeamDrives': self.support_team_drives, - 'fields': 'changes(file(md5Checksum,mimeType,modifiedTime,' - 'name,parents,teamDriveId,trashed),' - 'fileId,removed,teamDrive(id,name),' - 'teamDriveId),newStartPageToken,nextPageToken', + "pageToken": page_token, + "pageSize": 1000, + "includeRemoved": True, + "includeTeamDriveItems": self.support_team_drives, + "supportsTeamDrives": self.support_team_drives, + "fields": "changes(file(md5Checksum,mimeType,modifiedTime," + "name,parents,teamDriveId,trashed)," + "fileId,removed,teamDrive(id,name)," + "teamDriveId),newStartPageToken,nextPageToken", } if self.teamdrive_id is not None and self.support_team_drives: - params['teamDriveId'] = self.teamdrive_id + params["teamDriveId"] = self.teamdrive_id # make call(s) - success, resp, data = self.query('/v3/changes', params=params, fetch_all_pages=True, callbacks=callbacks) + success, resp, data = self.query( + "/v3/changes", params=params, fetch_all_pages=True, callbacks=callbacks + ) return ############################################################ @@ -339,27 +391,30 @@ def get_id_metadata(self, item_id, teamdrive_id=None): # does item_id match teamdrive_id? if teamdrive_id is not None and item_id == teamdrive_id: - success, resp, data = self.query('v3/teamdrives/%s' % str(item_id)) - if success and resp.status_code == 200 and 'name' in data: + success, resp, data = self.query("v3/teamdrives/%s" % str(item_id)) + if success and resp.status_code == 200 and "name" in data: # we successfully retrieved this teamdrive info, lets place a mimeType key in the result # so we know it needs to be cached - data['mimeType'] = 'application/vnd.google-apps.folder' + data["mimeType"] = "application/vnd.google-apps.folder" # lets create a cache for this teamdrive aswell self.cache_manager.get_cache("teamdrive_%s" % teamdrive_id) - self._do_callback('teamdrive_added', data) + self._do_callback("teamdrive_added", data) else: # retrieve file metadata success, resp, data = self.query( - 'v3/files/%s' % str(item_id), + "v3/files/%s" % str(item_id), params={ - 'supportsTeamDrives': self.support_team_drives, - 'fields': 'id,md5Checksum,mimeType,modifiedTime,name,parents,' 'trashed,teamDriveId', + "supportsTeamDrives": self.support_team_drives, + "fields": "id,md5Checksum,mimeType,modifiedTime,name,parents," + "trashed,teamDriveId", }, ) if success and resp.status_code == 200: return True, data else: - logger.error("Error retrieving metadata for item %r:\n\n%s\n", item_id, data) + logger.error( + "Error retrieving metadata for item %r:\n\n%s\n", item_id, data + ) return False, data def get_id_file_paths(self, item_id, teamdrive_id=None): @@ -368,39 +423,49 @@ def get_id_file_paths(self, item_id, teamdrive_id=None): try: - def get_item_paths(obj_id, path, paths, new_cache_entries, teamdrive_id=None): + def get_item_paths( + obj_id, path, paths, new_cache_entries, teamdrive_id=None + ): success, obj = self.get_id_metadata(obj_id, teamdrive_id) if not success: return new_cache_entries - teamdrive_id = teamdrive_id if 'teamDriveId' not in obj else obj['teamDriveId'] + teamdrive_id = ( + teamdrive_id if "teamDriveId" not in obj else obj["teamDriveId"] + ) # add item object to cache if we know its not from cache - if 'mimeType' in obj: + if "mimeType" in obj: # we know this is a new item fetched from the api, because the cache does not store this field self.add_item_to_cache( - obj['id'], - obj['name'], - [] if 'parents' not in obj else obj['parents'], - obj['md5Checksum'] if 'md5Checksum' in obj else None, + obj["id"], + obj["name"], + [] if "parents" not in obj else obj["parents"], + obj["md5Checksum"] if "md5Checksum" in obj else None, ) new_cache_entries += 1 - if path.strip() == '': - path = obj['name'] + if path.strip() == "": + path = obj["name"] else: - path = os.path.join(obj['name'], path) + path = os.path.join(obj["name"], path) - if 'parents' in obj and obj['parents']: - for parent in obj['parents']: - new_cache_entries += get_item_paths(parent, path, paths, new_cache_entries, teamdrive_id) + if "parents" in obj and obj["parents"]: + for parent in obj["parents"]: + new_cache_entries += get_item_paths( + parent, path, paths, new_cache_entries, teamdrive_id + ) - if (not obj or 'parents' not in obj or not obj['parents']) and len(path): + if (not obj or "parents" not in obj or not obj["parents"]) and len( + path + ): paths.append(path) return new_cache_entries return new_cache_entries - added_to_cache += get_item_paths(item_id, '', file_paths, added_to_cache, teamdrive_id) + added_to_cache += get_item_paths( + item_id, "", file_paths, added_to_cache, teamdrive_id + ) if added_to_cache: logger.debug("Dumping cache due to new entries!") self._dump_cache() @@ -415,7 +480,9 @@ def get_item_paths(obj_id, path, paths, new_cache_entries, teamdrive_id=None): return False, [] - def add_item_to_cache(self, item_id, item_name, item_parents, md5_checksum, file_paths=None): + def add_item_to_cache( + self, item_id, item_name, item_parents, md5_checksum, file_paths=None + ): if file_paths is None: file_paths = [] if self.show_cache_logs and item_id not in self.cache: @@ -423,13 +490,13 @@ def add_item_to_cache(self, item_id, item_name, item_parents, md5_checksum, file if not file_paths: existing_item = self.cache[item_id] if item_id in self.cache else None - if existing_item is not None and 'paths' in existing_item: - file_paths = existing_item['paths'] + if existing_item is not None and "paths" in existing_item: + file_paths = existing_item["paths"] self.cache[item_id] = { - 'name': item_name, - 'parents': item_parents, - 'md5Checksum': md5_checksum, - 'paths': file_paths, + "name": item_name, + "parents": item_parents, + "md5Checksum": md5_checksum, + "paths": file_paths, } return @@ -441,10 +508,10 @@ def remove_item_from_cache(self, item_id): def get_item_name_from_cache(self, item_id): try: item = self.cache.get(item_id) - return item['name'] if isinstance(item, dict) else 'Unknown' + return item["name"] if isinstance(item, dict) else "Unknown" except Exception: pass - return 'Unknown' + return "Unknown" def get_item_from_cache(self, item_id): try: @@ -466,26 +533,28 @@ def _do_query(self, request_url, method, **kwargs): use_timeout = 30 # override default timeout - if 'timeout' in kwargs and isinstance(kwargs['timeout'], int): - use_timeout = kwargs['timeout'] - kwargs.pop('timeout', None) + if "timeout" in kwargs and isinstance(kwargs["timeout"], int): + use_timeout = kwargs["timeout"] + kwargs.pop("timeout", None) # remove un-needed kwargs - kwargs.pop('fetch_all_pages', None) - kwargs.pop('page_token_callback', None) + kwargs.pop("fetch_all_pages", None) + kwargs.pop("page_token_callback", None) # do query while tries < max_tries: if self.token_refresh_lock.locked() and not lock_acquirer: - logger.debug("Token refresh lock is currently acquired. Trying again in 500ms...") + logger.debug( + "Token refresh lock is currently acquired. Trying again in 500ms..." + ) time.sleep(0.5) continue - if method == 'POST': + if method == "POST": resp = self.http.post(request_url, timeout=use_timeout, **kwargs) - elif method == 'PATCH': + elif method == "PATCH": resp = self.http.patch(request_url, timeout=use_timeout, **kwargs) - elif method == 'DELETE': + elif method == "DELETE": resp = self.http.delete(request_url, timeout=use_timeout, **kwargs) else: resp = self.http.get(request_url, timeout=use_timeout, **kwargs) @@ -495,8 +564,10 @@ def _do_query(self, request_url, method, **kwargs): # unauthorized error, lets refresh token and retry self.token_refresh_lock.acquire(False) lock_acquirer = True - logger.warning("Unauthorized Response (Attempts %d/%d)", tries, max_tries) - self.token['expires_at'] = time() - 10 + logger.warning( + "Unauthorized Response (Attempts %d/%d)", tries, max_tries + ) + self.token["expires_at"] = time() - 10 self.http = self._new_http_object() else: break @@ -505,16 +576,16 @@ def _do_query(self, request_url, method, **kwargs): def _load_token(self): try: - if 'token' not in self.settings_cache: + if "token" not in self.settings_cache: return {} - return self.settings_cache['token'] + return self.settings_cache["token"] except Exception: logger.exception("Exception loading token from cache: ") return {} def _dump_token(self): try: - self.settings_cache['token'] = self.token + self.settings_cache["token"] = self.token return True except Exception: logger.exception("Exception dumping token to cache: ") @@ -534,7 +605,7 @@ def _token_saver(self, token): def _page_token_saver(self, page_token): # update internal token dict - self.cache['page_token'] = page_token + self.cache["page_token"] = page_token self._dump_cache() logger.debug("Updated page_token: %s", page_token) return @@ -545,7 +616,10 @@ def _new_http_object(self): redirect_uri=self.redirect_url, scope=self.scopes, auto_refresh_url=self.token_url, - auto_refresh_kwargs={'client_id': self.client_id, 'client_secret': self.client_secret}, + auto_refresh_kwargs={ + "client_id": self.client_id, + "client_secret": self.client_secret, + }, token_updater=self._token_saver, token=self.token, ) @@ -562,52 +636,57 @@ def _dump_cache(self, blocking=True): def _remove_unwanted_paths(self, paths_list, mime_type): removed_file_paths = [] # remove paths that were not allowed - this is always enabled - if 'FILE_PATHS' in self.allowed_config: + if "FILE_PATHS" in self.allowed_config: for item_path in copy(paths_list): allowed_path = False - for allowed_file_path in self.allowed_config['FILE_PATHS']: + for allowed_file_path in self.allowed_config["FILE_PATHS"]: if item_path.lower().startswith(allowed_file_path.lower()): allowed_path = True break if not allowed_path: - logger.debug("Ignoring %r because its not an allowed path.", item_path) + logger.debug( + "Ignoring %r because its not an allowed path.", item_path + ) removed_file_paths.append(item_path) paths_list.remove(item_path) continue # remove unallowed extensions if ( - 'FILE_EXTENSIONS' in self.allowed_config - and 'FILE_EXTENSIONS_LIST' in self.allowed_config - and self.allowed_config['FILE_EXTENSIONS'] + "FILE_EXTENSIONS" in self.allowed_config + and "FILE_EXTENSIONS_LIST" in self.allowed_config + and self.allowed_config["FILE_EXTENSIONS"] and len(paths_list) ): for item_path in copy(paths_list): allowed_file = False - for allowed_extension in self.allowed_config['FILE_EXTENSIONS_LIST']: + for allowed_extension in self.allowed_config["FILE_EXTENSIONS_LIST"]: if item_path.lower().endswith(allowed_extension.lower()): allowed_file = True break if not allowed_file: - logger.debug("Ignoring %r because it was not an allowed extension.", item_path) + logger.debug( + "Ignoring %r because it was not an allowed extension.", + item_path, + ) removed_file_paths.append(item_path) paths_list.remove(item_path) # remove unallowed mimes if ( - 'MIME_TYPES' in self.allowed_config - and 'MIME_TYPES_LIST' in self.allowed_config - and self.allowed_config['MIME_TYPES'] + "MIME_TYPES" in self.allowed_config + and "MIME_TYPES_LIST" in self.allowed_config + and self.allowed_config["MIME_TYPES"] and len(paths_list) ): allowed_file = False - for allowed_mime in self.allowed_config['MIME_TYPES_LIST']: + for allowed_mime in self.allowed_config["MIME_TYPES_LIST"]: if allowed_mime.lower() in mime_type.lower(): - if 'video' in mime_type.lower(): + if "video" in mime_type.lower(): # we want to validate this is not a .sub file, which for some reason, google shows as video/MP2G double_checked_allowed = True for item_path in paths_list: - if item_path.lower().endswith('.sub'): + if item_path.lower().endswith(".sub"): double_checked_allowed = False if double_checked_allowed: allowed_file = True @@ -617,7 +696,11 @@ def _remove_unwanted_paths(self, paths_list, mime_type): break if not allowed_file: - logger.debug("Ignoring %r because it was not an allowed mime: %s", paths_list, mime_type) + logger.debug( + "Ignoring %r because it was not an allowed mime: %s", + paths_list, + mime_type, + ) for item_path in copy(paths_list): removed_file_paths.append(item_path) paths_list.remove(item_path) @@ -631,45 +714,63 @@ def _process_changes(self, data): moved_file_paths = {} removes = 0 - if not data or 'changes' not in data: + if not data or "changes" not in data: logger.error("There were no changes to process") return - logger.info("Processing %d changes", len(data['changes'])) + logger.info("Processing %d changes", len(data["changes"])) # process changes - for change in data['changes']: - if 'file' in change and 'fileId' in change: + for change in data["changes"]: + if "file" in change and "fileId" in change: # dont consider trashed/removed events for processing - if ('trashed' in change['file'] and change['file']['trashed']) or ( - 'removed' in change and change['removed'] + if ("trashed" in change["file"] and change["file"]["trashed"]) or ( + "removed" in change and change["removed"] ): - if self.remove_item_from_cache(change['fileId']) and self.show_cache_logs: - logger.info("Removed '%s' from cache: %s", change['fileId'], change['file']['name']) + if ( + self.remove_item_from_cache(change["fileId"]) + and self.show_cache_logs + ): + logger.info( + "Removed '%s' from cache: %s", + change["fileId"], + change["file"]["name"], + ) removes += 1 continue # retrieve item from cache - existing_cache_item = self.get_item_from_cache(change['fileId']) + existing_cache_item = self.get_item_from_cache(change["fileId"]) # we always want to add changes to the cache so renames etc can be reflected inside the cache self.add_item_to_cache( - change['fileId'], - change['file']['name'], - [] if 'parents' not in change['file'] else change['file']['parents'], - change['file']['md5Checksum'] if 'md5Checksum' in change['file'] else None, + change["fileId"], + change["file"]["name"], + [] + if "parents" not in change["file"] + else change["file"]["parents"], + change["file"]["md5Checksum"] + if "md5Checksum" in change["file"] + else None, ) # get this files paths success, item_paths = self.get_id_file_paths( - change['fileId'], change['file']['teamDriveId'] if 'teamDriveId' in change['file'] else None + change["fileId"], + change["file"]["teamDriveId"] + if "teamDriveId" in change["file"] + else None, ) if success: # save item paths self.add_item_to_cache( - change['fileId'], - change['file']['name'], - [] if 'parents' not in change['file'] else change['file']['parents'], - change['file']['md5Checksum'] if 'md5Checksum' in change['file'] else None, + change["fileId"], + change["file"]["name"], + [] + if "parents" not in change["file"] + else change["file"]["parents"], + change["file"]["md5Checksum"] + if "md5Checksum" in change["file"] + else None, item_paths, ) @@ -680,19 +781,25 @@ def _process_changes(self, data): item_paths = decoded # dont process folder events - if 'mimeType' in change['file'] and 'vnd.google-apps.folder' in change['file']['mimeType']: + if ( + "mimeType" in change["file"] + and "vnd.google-apps.folder" in change["file"]["mimeType"] + ): # ignore this change as we dont want to scan folders logger.debug("Ignoring %r because it is a folder", item_paths) - if change['fileId'] in ignored_file_paths: - ignored_file_paths[change['fileId']].extend(item_paths) + if change["fileId"] in ignored_file_paths: + ignored_file_paths[change["fileId"]].extend(item_paths) else: - ignored_file_paths[change['fileId']] = item_paths + ignored_file_paths[change["fileId"]] = item_paths continue # remove unwanted paths if success and len(item_paths): unwanted_paths = self._remove_unwanted_paths( - item_paths, change['file']['mimeType'] if 'mimeType' in change['file'] else 'Unknown' + item_paths, + change["file"]["mimeType"] + if "mimeType" in change["file"] + else "Unknown", ) if isinstance(unwanted_paths, list) and len(unwanted_paths): unwanted_file_paths.extend(unwanted_paths) @@ -701,86 +808,130 @@ def _process_changes(self, data): if existing_cache_item is not None and (success and len(item_paths)): # this was an existing item, and we are re-processing it again # we need to determine if this file has changed (md5Checksum) - if 'md5Checksum' in change['file'] and 'md5Checksum' in existing_cache_item: + if ( + "md5Checksum" in change["file"] + and "md5Checksum" in existing_cache_item + ): # compare this changes md5Checksum and the existing cache item - if change['file']['md5Checksum'] != existing_cache_item['md5Checksum']: + if ( + change["file"]["md5Checksum"] + != existing_cache_item["md5Checksum"] + ): # the file was modified - if change['fileId'] in added_file_paths: - added_file_paths[change['fileId']].extend(item_paths) + if change["fileId"] in added_file_paths: + added_file_paths[change["fileId"]].extend(item_paths) else: - added_file_paths[change['fileId']] = item_paths + added_file_paths[change["fileId"]] = item_paths else: - if ('name' in change['file'] and 'name' in existing_cache_item) and change['file'][ - 'name' - ] != existing_cache_item['name']: - logger.debug("md5Checksum matches but file was server-side renamed: %s", item_paths) - if change['fileId'] in added_file_paths: - added_file_paths[change['fileId']].extend(item_paths) + if ( + "name" in change["file"] + and "name" in existing_cache_item + ) and change["file"]["name"] != existing_cache_item["name"]: + logger.debug( + "md5Checksum matches but file was server-side renamed: %s", + item_paths, + ) + if change["fileId"] in added_file_paths: + added_file_paths[change["fileId"]].extend( + item_paths + ) else: - added_file_paths[change['fileId']] = item_paths + added_file_paths[change["fileId"]] = item_paths - if change['fileId'] in renamed_file_paths: - renamed_file_paths[change['fileId']].extend(item_paths) + if change["fileId"] in renamed_file_paths: + renamed_file_paths[change["fileId"]].extend( + item_paths + ) else: - renamed_file_paths[change['fileId']] = item_paths - elif 'paths' in existing_cache_item and not self._list_matches( - item_paths, existing_cache_item['paths'] + renamed_file_paths[change["fileId"]] = item_paths + elif ( + "paths" in existing_cache_item + and not self._list_matches( + item_paths, existing_cache_item["paths"] + ) ): - logger.debug("md5Checksum matches but file was server-side moved: %s", item_paths) + logger.debug( + "md5Checksum matches but file was server-side moved: %s", + item_paths, + ) - if change['fileId'] in added_file_paths: - added_file_paths[change['fileId']].extend(item_paths) + if change["fileId"] in added_file_paths: + added_file_paths[change["fileId"]].extend( + item_paths + ) else: - added_file_paths[change['fileId']] = item_paths + added_file_paths[change["fileId"]] = item_paths - if change['fileId'] in moved_file_paths: - moved_file_paths[change['fileId']].extend(item_paths) + if change["fileId"] in moved_file_paths: + moved_file_paths[change["fileId"]].extend( + item_paths + ) else: - moved_file_paths[change['fileId']] = item_paths + moved_file_paths[change["fileId"]] = item_paths else: logger.debug( "Ignoring %r because the md5Checksum was the same as cache: %s", item_paths, - existing_cache_item['md5Checksum'], + existing_cache_item["md5Checksum"], ) - if change['fileId'] in ignored_file_paths: - ignored_file_paths[change['fileId']].extend(item_paths) + if change["fileId"] in ignored_file_paths: + ignored_file_paths[change["fileId"]].extend( + item_paths + ) else: - ignored_file_paths[change['fileId']] = item_paths + ignored_file_paths[change["fileId"]] = item_paths else: - logger.error("No md5Checksum for cache item:\n%s", existing_cache_item) + logger.error( + "No md5Checksum for cache item:\n%s", existing_cache_item + ) elif success and len(item_paths): # these are new paths/files that were not already in the cache - if change['fileId'] in added_file_paths: - added_file_paths[change['fileId']].extend(item_paths) + if change["fileId"] in added_file_paths: + added_file_paths[change["fileId"]].extend(item_paths) else: - added_file_paths[change['fileId']] = item_paths + added_file_paths[change["fileId"]] = item_paths - elif 'teamDriveId' in change: + elif "teamDriveId" in change: # this is a teamdrive change # dont consider trashed/removed events for processing - if 'removed' in change and change['removed']: + if "removed" in change and change["removed"]: # remove item from cache - if self.remove_item_from_cache(change['teamDriveId']): - if self.show_cache_logs and 'teamDrive' in change and 'name' in change['teamDrive']: - teamdrive_name = 'Unknown teamDrive' - teamdrive_name = change['teamDrive']['name'] - logger.info("Removed teamDrive '%s' from cache: %s", change['teamDriveId'], teamdrive_name) - - self._do_callback('teamdrive_removed', change) + if self.remove_item_from_cache(change["teamDriveId"]): + if ( + self.show_cache_logs + and "teamDrive" in change + and "name" in change["teamDrive"] + ): + teamdrive_name = "Unknown teamDrive" + teamdrive_name = change["teamDrive"]["name"] + logger.info( + "Removed teamDrive '%s' from cache: %s", + change["teamDriveId"], + teamdrive_name, + ) + + self._do_callback("teamdrive_removed", change) removes += 1 continue - if 'teamDrive' in change and 'id' in change['teamDrive'] and 'name' in change['teamDrive']: + if ( + "teamDrive" in change + and "id" in change["teamDrive"] + and "name" in change["teamDrive"] + ): # we always want to add changes to the cache so renames etc can be reflected inside the cache - if change['teamDrive']['id'] not in self.cache: - self.cache_manager.get_cache("teamdrive_%s" % change['teamDrive']['id']) - self._do_callback('teamdrive_added', change) + if change["teamDrive"]["id"] not in self.cache: + self.cache_manager.get_cache( + "teamdrive_%s" % change["teamDrive"]["id"] + ) + self._do_callback("teamdrive_added", change) - self.add_item_to_cache(change['teamDrive']['id'], change['teamDrive']['name'], [], None) + self.add_item_to_cache( + change["teamDrive"]["id"], change["teamDrive"]["name"], [], None + ) continue # always dump the cache after running changes @@ -794,7 +945,7 @@ def _process_changes(self, data): logger.debug("Moved: %s", moved_file_paths) logger.info( - '%d added / %d removed / %d unwanted / %d ignored / %d renamed / %d moved', + "%d added / %d removed / %d unwanted / %d ignored / %d renamed / %d moved", len(added_file_paths), removes, len(unwanted_file_paths), @@ -804,9 +955,9 @@ def _process_changes(self, data): ) # call further callbacks - self._do_callback('items_added', added_file_paths) - self._do_callback('items_unwanted', unwanted_file_paths) - self._do_callback('items_ignored', ignored_file_paths) + self._do_callback("items_added", added_file_paths) + self._do_callback("items_unwanted", unwanted_file_paths) + self._do_callback("items_ignored", ignored_file_paths) return @@ -823,5 +974,5 @@ def _list_matches(list_master, list_check): return False return True except Exception: - logger.exception('Exception checking if lists match: ') + logger.exception("Exception checking if lists match: ") return False diff --git a/jelly_emby.py b/jelly_emby.py index 9d75638..21d235d 100644 --- a/jelly_emby.py +++ b/jelly_emby.py @@ -10,22 +10,25 @@ def get_library_paths(conf): - if conf.configs['ENABLE_JOE']: - server_type = conf.configs['JELLYFIN_EMBY'] - host = conf.configs['JOE_HOST'] - headers = {'accept': 'application/json', 'Content-Type': 'application/json'} + if conf.configs["ENABLE_JOE"]: + server_type = conf.configs["JELLYFIN_EMBY"] + host = conf.configs["JOE_HOST"] + headers = {"accept": "application/json", "Content-Type": "application/json"} try: command = requests.get( - host + f'/Library/PhysicalPaths?api_key={conf.configs["JOE_API_KEY"]}', headers=headers + host + f'/Library/PhysicalPaths?api_key={conf.configs["JOE_API_KEY"]}', + headers=headers, ) if server_type == "jellyfin": if command.status_code == 200: - jellyfin_logger.info("Requesting of library sections info was successful.") + jellyfin_logger.info( + "Requesting of library sections info was successful." + ) jellyfin_logger.debug("Request response: %s", command.text) - print('') + print("") print("Jellyfin Sections:") print("==============") - print(*command.json(), sep='\n') + print(*command.json(), sep="\n") print("==============") else: jellyfin_logger.error( @@ -33,12 +36,14 @@ def get_library_paths(conf): ) elif server_type == "emby": if command.status_code == 200: - emby_logger.info("Requesting of library sections info was successful.") + emby_logger.info( + "Requesting of library sections info was successful." + ) emby_logger.debug("Request response: %s", command.text) - print('') + print("") print("Emby Sections:") print("==============") - print(*command.json(), sep='\n') + print(*command.json(), sep="\n") print("==============") else: emby_logger.error( @@ -46,9 +51,13 @@ def get_library_paths(conf): ) except requests.exceptions.ConnectionError: if server_type == "jellyfin": - jellyfin_logger.error("Issue encountered when attempting to list library paths.") + jellyfin_logger.error( + "Issue encountered when attempting to list library paths." + ) elif server_type == "emby": - emby_logger.error("Issue encountered when attempting to list library paths.") + emby_logger.error( + "Issue encountered when attempting to list library paths." + ) else: logger.error( "You must enable the Jellyfin/Emby section in config. To enable it set 'ENABLE_JOE' to true in config.json." @@ -56,41 +65,53 @@ def get_library_paths(conf): def scan(config, path, scan_for): - if config['ENABLE_JOE']: - server_type = config['JELLYFIN_EMBY'] + if config["ENABLE_JOE"]: + server_type = config["JELLYFIN_EMBY"] # sleep for delay if server_type == "jellyfin": jellyfin_logger.info("Scan request from %s for '%s'.", scan_for, path) elif server_type == "emby": emby_logger.info("Scan request from %s for '%s'.", scan_for, path) - if config['SERVER_SCAN_DELAY']: + if config["SERVER_SCAN_DELAY"]: if server_type == "jellyfin": - jellyfin_logger.info("Sleeping for %d seconds...", config['SERVER_SCAN_DELAY']) + jellyfin_logger.info( + "Sleeping for %d seconds...", config["SERVER_SCAN_DELAY"] + ) elif server_type == "emby": - emby_logger.info("Sleeping for %d seconds...", config['SERVER_SCAN_DELAY']) - time.sleep(config['SERVER_SCAN_DELAY']) + emby_logger.info( + "Sleeping for %d seconds...", config["SERVER_SCAN_DELAY"] + ) + time.sleep(config["SERVER_SCAN_DELAY"]) try: data = {"Updates": [{"Path": f"{path}", "UpdateType": "Created"}]} - headers = {'accept': 'application/json', 'Content-Type': 'application/json'} - server_type = config['JELLYFIN_EMBY'] - host = config['JOE_HOST'] + headers = {"accept": "application/json", "Content-Type": "application/json"} + server_type = config["JELLYFIN_EMBY"] + host = config["JOE_HOST"] try: command = requests.post( - host + f'/Library/Media/Updated?api_key={config["JOE_API_KEY"]}', headers=headers, json=data + host + f'/Library/Media/Updated?api_key={config["JOE_API_KEY"]}', + headers=headers, + json=data, ) if server_type == "jellyfin": if command.status_code == 204: - jellyfin_logger.info("Successfully sent scan request to Jellyfin.") + jellyfin_logger.info( + "Successfully sent scan request to Jellyfin." + ) elif server_type == "emby": if command.status_code == 204: emby_logger.info("Successfully sent scan request to Emby.") except RequestException as e: if server_type == "jellyfin": - jellyfin_logger.error(f"Error occurred when trying to send scan request to Jellyfin. {e}") + jellyfin_logger.error( + f"Error occurred when trying to send scan request to Jellyfin. {e}" + ) elif server_type == "emby": - emby_logger.error(f"Error occurred when trying to send scan request to Emby. {e}") + emby_logger.error( + f"Error occurred when trying to send scan request to Emby. {e}" + ) pass except KeyError: pass diff --git a/plex.py b/plex.py index 84670f6..388d06f 100644 --- a/plex.py +++ b/plex.py @@ -18,30 +18,50 @@ def show_detailed_sections_info(conf): - if conf.configs['ENABLE_PLEX']: + if conf.configs["ENABLE_PLEX"]: from xml.etree import ElementTree try: - headers = {'X-Plex-Token': conf.configs['PLEX_TOKEN'], 'Accept': 'application/json'} + headers = { + "X-Plex-Token": conf.configs["PLEX_TOKEN"], + "Accept": "application/json", + } logger.info("Requesting section info from Plex...") - resp = requests.get(f"{conf.configs['PLEX_LOCAL_URL']}/library/sections/all", timeout=30, headers=headers) + resp = requests.get( + f"{conf.configs['PLEX_LOCAL_URL']}/library/sections/all", + timeout=30, + headers=headers, + ) if resp.status_code == 200: logger.info("Requesting of section info was successful.") logger.debug("Request response: %s", resp.text) root = ElementTree.fromstring(resp.text) - print('') + print("") print("Plex Sections:") print("==============") for document in root.findall("Directory"): - print('') - print(document.get('key') + ') ' + document.get('title')) - dashes_length = len(document.get('key') + ') ' + document.get('title')) - print('-' * dashes_length) - print("\n".join([os.path.join(k.get('path'), '') for k in document.findall("Location")])) + print("") + print(document.get("key") + ") " + document.get("title")) + dashes_length = len( + document.get("key") + ") " + document.get("title") + ) + print("-" * dashes_length) + print( + "\n".join( + [ + os.path.join(k.get("path"), "") + for k in document.findall("Location") + ] + ) + ) except Exception as e: - logger.exception("Issue encountered when attempting to list detailed sections info.") + logger.exception( + "Issue encountered when attempting to list detailed sections info." + ) else: - logger.error("You must enable Plex in config. To enable it set 'ENABLE_PLEX' to true in config.json.") + logger.error( + "You must enable Plex in config. To enable it set 'ENABLE_PLEX' to true in config.json." + ) def scan( @@ -56,21 +76,23 @@ def scan( scan_lookup_type=None, scan_lookup_id=None, ): - if config['ENABLE_PLEX']: + if config["ENABLE_PLEX"]: scan_path = "" # sleep for delay while True: logger.info("Scan request from %s for '%s'.", scan_for, path) - if config['SERVER_SCAN_DELAY']: - logger.info("Sleeping for %d seconds...", config['SERVER_SCAN_DELAY']) - time.sleep(config['SERVER_SCAN_DELAY']) + if config["SERVER_SCAN_DELAY"]: + logger.info("Sleeping for %d seconds...", config["SERVER_SCAN_DELAY"]) + time.sleep(config["SERVER_SCAN_DELAY"]) # check if root scan folder for if path in resleep_paths: logger.info("Another scan request occurred for folder of '%s'.", path) - logger.info("Sleeping again for %d seconds...", config['SERVER_SCAN_DELAY']) + logger.info( + "Sleeping again for %d seconds...", config["SERVER_SCAN_DELAY"] + ) utils.remove_item_from_list(path, resleep_paths) else: break @@ -83,14 +105,23 @@ def scan( while True: checks += 1 if os.path.exists(check_path): - logger.info("File '%s' exists on check %d of %d.", check_path, checks, config['SERVER_MAX_FILE_CHECKS']) + logger.info( + "File '%s' exists on check %d of %d.", + check_path, + checks, + config["SERVER_MAX_FILE_CHECKS"], + ) if not scan_path or not len(scan_path): - scan_path = os.path.dirname(path).strip() if not scan_path_is_directory else path.strip() + scan_path = ( + os.path.dirname(path).strip() + if not scan_path_is_directory + else path.strip() + ) break elif ( not scan_path_is_directory - and config['SERVER_SCAN_FOLDER_ON_FILE_EXISTS_EXHAUSTION'] - and config['SERVER_MAX_FILE_CHECKS'] - checks == 1 + and config["SERVER_SCAN_FOLDER_ON_FILE_EXISTS_EXHAUSTION"] + and config["SERVER_MAX_FILE_CHECKS"] - checks == 1 ): # penultimate check but SERVER_SCAN_FOLDER_ON_FILE_EXISTS_EXHAUSTION was turned on # lets make scan path the folder instead for the final check @@ -99,25 +130,30 @@ def scan( "in %s seconds...", check_path, os.path.dirname(path), - config['SERVER_FILE_CHECK_DELAY'], + config["SERVER_FILE_CHECK_DELAY"], ) check_path = os.path.dirname(check_path).strip() scan_path = os.path.dirname(path).strip() scan_path_is_directory = os.path.isdir(check_path) - time.sleep(config['SERVER_FILE_CHECK_DELAY']) + time.sleep(config["SERVER_FILE_CHECK_DELAY"]) # send Rclone cache clear if enabled - if config['RCLONE']['RC_CACHE_REFRESH']['ENABLED']: + if config["RCLONE"]["RC_CACHE_REFRESH"]["ENABLED"]: utils.rclone_rc_clear_cache(config, check_path) - elif checks >= config['SERVER_MAX_FILE_CHECKS']: - logger.warning("File '%s' exhausted all available checks. Aborting scan request.", check_path) + elif checks >= config["SERVER_MAX_FILE_CHECKS"]: + logger.warning( + "File '%s' exhausted all available checks. Aborting scan request.", + check_path, + ) # remove item from database if sqlite is enabled - if config['SERVER_USE_SQLITE']: + if config["SERVER_USE_SQLITE"]: if db.remove_item(path): logger.info("Removed '%s' from Plex Autoscan database.", path) time.sleep(1) else: - logger.error("Failed removing '%s' from Plex Autoscan database.", path) + logger.error( + "Failed removing '%s' from Plex Autoscan database.", path + ) return else: @@ -125,17 +161,17 @@ def scan( "File '%s' did not exist on check %d of %d. Checking again in %s seconds...", check_path, checks, - config['SERVER_MAX_FILE_CHECKS'], - config['SERVER_FILE_CHECK_DELAY'], + config["SERVER_MAX_FILE_CHECKS"], + config["SERVER_FILE_CHECK_DELAY"], ) - time.sleep(config['SERVER_FILE_CHECK_DELAY']) + time.sleep(config["SERVER_FILE_CHECK_DELAY"]) # send Rclone cache clear if enabled - if config['RCLONE']['RC_CACHE_REFRESH']['ENABLED']: + if config["RCLONE"]["RC_CACHE_REFRESH"]["ENABLED"]: utils.rclone_rc_clear_cache(config, check_path) params = {"path": scan_path} - headers = {"X-Plex-Token": config['PLEX_TOKEN'], "Accept": "application/json"} + headers = {"X-Plex-Token": config["PLEX_TOKEN"], "Accept": "application/json"} # plex scanner final_cmd = requests.get( @@ -147,31 +183,41 @@ def scan( # invoke plex scanner priority = utils.get_priority(config, scan_path) - logger.debug(f"Waiting for turn in the scan request backlog with priority '{priority}'...") + logger.debug( + f"Waiting for turn in the scan request backlog with priority '{priority}'..." + ) lock.acquire(priority) try: logger.info("Scan request is now being processed...") # run external command before scan if supplied - if len(config['RUN_COMMAND_BEFORE_SCAN']) > 2: - logger.info("Running external command: %r", config['RUN_COMMAND_BEFORE_SCAN']) - utils.run_command(config['RUN_COMMAND_BEFORE_SCAN']) + if len(config["RUN_COMMAND_BEFORE_SCAN"]) > 2: + logger.info( + "Running external command: %r", config["RUN_COMMAND_BEFORE_SCAN"] + ) + utils.run_command(config["RUN_COMMAND_BEFORE_SCAN"]) logger.info("Finished running external command.") # wait for Plex to become responsive (if PLEX_CHECK_BEFORE_SCAN is enabled) - if 'PLEX_CHECK_BEFORE_SCAN' in config and config['PLEX_CHECK_BEFORE_SCAN']: + if "PLEX_CHECK_BEFORE_SCAN" in config and config["PLEX_CHECK_BEFORE_SCAN"]: plex_account_user = wait_plex_alive(config) if plex_account_user is not None: - logger.info(f"Plex is available for media scanning - (Server Account: '{plex_account_user}').") + logger.info( + f"Plex is available for media scanning - (Server Account: '{plex_account_user}')." + ) # begin scan logger.info(f"Running Plex Scanner for '{scan_path}'.") logger.debug(final_cmd) if final_cmd.status_code == 200: - logger.debug(f"Successfully sent scan request to Plex for '{scan_path}'.") + logger.debug( + f"Successfully sent scan request to Plex for '{scan_path}'." + ) logger.info("Finished scan!") else: - logger.error(f"Error occurred when trying to send scan request to Plex for '{scan_path}'.") + logger.error( + f"Error occurred when trying to send scan request to Plex for '{scan_path}'." + ) logger.error("-" * 100) logger.error(f"Status code: {final_cmd.status_code}") logger.error(f"Content: {final_cmd.content}") @@ -179,58 +225,93 @@ def scan( logger.error("-" * 100) # remove item from Plex database if sqlite is enabled - if config['SERVER_USE_SQLITE']: + if config["SERVER_USE_SQLITE"]: if db.remove_item(path): logger.debug("Removed '%s' from Plex Autoscan database.", path) time.sleep(1) - logger.info("There are %d queued item(s) remaining.", db.queued_count()) + logger.info( + "There are %d queued item(s) remaining.", db.queued_count() + ) else: - logger.error("Failed removing '%s' from Plex Autoscan database.", path) + logger.error( + "Failed removing '%s' from Plex Autoscan database.", path + ) # empty trash if configured - if config['PLEX_EMPTY_TRASH'] and config['PLEX_TOKEN'] and config['PLEX_EMPTY_TRASH_MAX_FILES']: + if ( + config["PLEX_EMPTY_TRASH"] + and config["PLEX_TOKEN"] + and config["PLEX_EMPTY_TRASH_MAX_FILES"] + ): logger.debug("Checking deleted items count in 10 seconds...") time.sleep(10) # check deleted item count, don't proceed if more than this value deleted_items = get_deleted_count(config) - if deleted_items > config['PLEX_EMPTY_TRASH_MAX_FILES']: + if deleted_items > config["PLEX_EMPTY_TRASH_MAX_FILES"]: logger.warning( f"There were {deleted_items} deleted files. Skip emptying of trash for Section '{section}'." ) elif deleted_items == -1: - logger.error("Could not determine deleted item count. Abort emptying of trash.") - elif not config['PLEX_EMPTY_TRASH_ZERO_DELETED'] and not deleted_items and scan_type != 'Upgrade': - logger.debug("Skipping emptying trash as there were no deleted items.") + logger.error( + "Could not determine deleted item count. Abort emptying of trash." + ) + elif ( + not config["PLEX_EMPTY_TRASH_ZERO_DELETED"] + and not deleted_items + and scan_type != "Upgrade" + ): + logger.debug( + "Skipping emptying trash as there were no deleted items." + ) else: - logger.info(f"Emptying trash to clear {deleted_items} deleted items...") + logger.info( + f"Emptying trash to clear {deleted_items} deleted items..." + ) empty_trash(config, str(section)) # analyze movie/episode - if config['PLEX_ANALYZE_TYPE'].lower() != 'off' and not scan_path_is_directory: + if ( + config["PLEX_ANALYZE_TYPE"].lower() != "off" + and not scan_path_is_directory + ): logger.debug("Sleeping for 10 seconds...") time.sleep(10) logger.debug("Sending analysis request...") analyze_item(config, path) # match item - if config['PLEX_FIX_MISMATCHED'] and config['PLEX_TOKEN'] and not scan_path_is_directory: + if ( + config["PLEX_FIX_MISMATCHED"] + and config["PLEX_TOKEN"] + and not scan_path_is_directory + ): # were we initiated with the scan_title/scan_lookup_type/scan_lookup_id parameters? - if scan_title is not None and scan_lookup_type is not None and scan_lookup_id is not None: + if ( + scan_title is not None + and scan_lookup_type is not None + and scan_lookup_id is not None + ): logger.debug("Sleeping for 10 seconds...") time.sleep(10) logger.debug( f"Validating match for '{scan_title}' ({scan_lookup_type} ID: { str(scan_lookup_id)})..." ) - match_item_parent(config, path, scan_title, scan_lookup_type, scan_lookup_id) + match_item_parent( + config, path, scan_title, scan_lookup_type, scan_lookup_id + ) # run external command after scan if supplied - if len(config['RUN_COMMAND_AFTER_SCAN']) > 2: - logger.info(f"Running external command: '{config['RUN_COMMAND_AFTER_SCAN']}'.") - utils.run_command(config['RUN_COMMAND_AFTER_SCAN']) + if len(config["RUN_COMMAND_AFTER_SCAN"]) > 2: + logger.info( + f"Running external command: '{config['RUN_COMMAND_AFTER_SCAN']}'." + ) + utils.run_command(config["RUN_COMMAND_AFTER_SCAN"]) logger.info("Finished running external command.") except Exception: - logger.exception(f"Unexpected exception occurred while processing: '{scan_path}'.") + logger.exception( + f"Unexpected exception occurred while processing: '{scan_path}'." + ) finally: lock.release() @@ -238,24 +319,28 @@ def scan( def match_item_parent(config, scan_path, scan_title, scan_lookup_type, scan_lookup_id): - if not os.path.exists(config['PLEX_DATABASE_PATH']): - logger.info(f"Could not analyze '{scan_path}' because Plex database could not be found.") + if not os.path.exists(config["PLEX_DATABASE_PATH"]): + logger.info( + f"Could not analyze '{scan_path}' because Plex database could not be found." + ) return # get files metadata_item_id metadata_item_id = get_file_metadata_item_id(config, scan_path) if metadata_item_id is None: - logger.error(f"Aborting match of '{scan_path}' as could not find 'metadata_item_id'.") + logger.error( + f"Aborting match of '{scan_path}' as could not find 'metadata_item_id'." + ) return # find metadata_item_id parent info metadata_item_parent_info = get_metadata_parent_info(config, int(metadata_item_id)) if ( metadata_item_parent_info is None - or 'parent_id' not in metadata_item_parent_info - or metadata_item_parent_info['parent_id'] is not None - or 'id' not in metadata_item_parent_info - or 'title' not in metadata_item_parent_info + or "parent_id" not in metadata_item_parent_info + or metadata_item_parent_info["parent_id"] is not None + or "id" not in metadata_item_parent_info + or "title" not in metadata_item_parent_info ): # parent_id should always be null as we are looking for a series or movie metadata_item_id which has no parent! logger.error( @@ -265,16 +350,21 @@ def match_item_parent(config, scan_path, scan_title, scan_lookup_type, scan_look ) return - parent_metadata_item_id = metadata_item_parent_info['id'] - parent_title = metadata_item_parent_info['title'] - parent_guid = metadata_item_parent_info['guid'] + parent_metadata_item_id = metadata_item_parent_info["id"] + parent_title = metadata_item_parent_info["title"] + parent_guid = metadata_item_parent_info["guid"] logger.debug( - "Found parent 'metadata_item' of '%s': %d = '%s'.", scan_path, int(parent_metadata_item_id), parent_title + "Found parent 'metadata_item' of '%s': %d = '%s'.", + scan_path, + int(parent_metadata_item_id), + parent_title, ) # did the metadata_item_id have matches already (dupes)? scan_directory = os.path.dirname(scan_path) - metadata_item_id_has_dupes = get_metadata_item_id_has_duplicates(config, metadata_item_id, scan_directory) + metadata_item_id_has_dupes = get_metadata_item_id_has_duplicates( + config, metadata_item_id, scan_directory + ) if metadata_item_id_has_dupes: # there are multiple media_items with this metadata_item_id who's folder does not match the scan directory # we must split the parent metadata_item, wait 10 seconds and then repeat the steps above @@ -296,17 +386,22 @@ def match_item_parent(config, scan_path, scan_title, scan_lookup_type, scan_look time.sleep(10) metadata_item_id = get_file_metadata_item_id(config, scan_path) if metadata_item_id is None: - logger.error("Aborting match of '%s' as could not find post split 'metadata_item_id'.", scan_path) + logger.error( + "Aborting match of '%s' as could not find post split 'metadata_item_id'.", + scan_path, + ) return # now lookup parent again - metadata_item_parent_info = get_metadata_parent_info(config, int(metadata_item_id)) + metadata_item_parent_info = get_metadata_parent_info( + config, int(metadata_item_id) + ) if ( metadata_item_parent_info is None - or 'parent_id' not in metadata_item_parent_info - or metadata_item_parent_info['parent_id'] is not None - or 'id' not in metadata_item_parent_info - or 'title' not in metadata_item_parent_info + or "parent_id" not in metadata_item_parent_info + or metadata_item_parent_info["parent_id"] is not None + or "id" not in metadata_item_parent_info + or "title" not in metadata_item_parent_info ): # parent_id should always be null as we are looking for a series or movie metadata_item_id # which has no parent! @@ -318,22 +413,28 @@ def match_item_parent(config, scan_path, scan_title, scan_lookup_type, scan_look ) return - parent_metadata_item_id = metadata_item_parent_info['id'] - parent_title = metadata_item_parent_info['title'] - parent_guid = metadata_item_parent_info['guid'] + parent_metadata_item_id = metadata_item_parent_info["id"] + parent_title = metadata_item_parent_info["title"] + parent_guid = metadata_item_parent_info["guid"] logger.debug( - "Found parent 'metadata_item' of '%s': %d = '%s'.", scan_path, int(parent_metadata_item_id), parent_title + "Found parent 'metadata_item' of '%s': %d = '%s'.", + scan_path, + int(parent_metadata_item_id), + parent_title, ) else: # there were no duplicate media_items with this metadata_item_id - logger.info("No duplicate 'media_items' found with 'metadata_item_id': '%d'", int(parent_metadata_item_id)) + logger.info( + "No duplicate 'media_items' found with 'metadata_item_id': '%d'", + int(parent_metadata_item_id), + ) # generate new guid - new_guid = 'com.plexapp.agents.%s://%s?lang=%s' % ( + new_guid = "com.plexapp.agents.%s://%s?lang=%s" % ( scan_lookup_type.lower(), str(scan_lookup_id).lower(), - config['PLEX_FIX_MISMATCHED_LANG'].lower(), + config["PLEX_FIX_MISMATCHED_LANG"].lower(), ) # does good match? if parent_guid and (parent_guid.lower() != new_guid): @@ -344,7 +445,13 @@ def match_item_parent(config, scan_path, scan_title, scan_lookup_type, scan_look new_guid, scan_title, ) - logger.info("Fixing match of '%s' (%s) to '%s' (%s).", parent_title, parent_guid, scan_title, new_guid) + logger.info( + "Fixing match of '%s' (%s) to '%s' (%s).", + parent_title, + parent_guid, + scan_title, + new_guid, + ) # fix item match_plex_item(config, parent_metadata_item_id, new_guid, scan_title) refresh_plex_item(config, parent_metadata_item_id, scan_title) @@ -362,49 +469,74 @@ def match_item_parent(config, scan_path, scan_title, scan_lookup_type, scan_look def analyze_item(config, scan_path): - if not os.path.exists(config['PLEX_DATABASE_PATH']): - logger.warning("Could not analyze of '%s' because Plex database could not be found.", scan_path) + if not os.path.exists(config["PLEX_DATABASE_PATH"]): + logger.warning( + "Could not analyze of '%s' because Plex database could not be found.", + scan_path, + ) return # get files metadata_item_id metadata_item_ids = get_file_metadata_ids(config, scan_path) if metadata_item_ids is None or not len(metadata_item_ids): - logger.warning("Aborting analysis of '%s' because could not find any 'metadata_item_id' for it.", scan_path) + logger.warning( + "Aborting analysis of '%s' because could not find any 'metadata_item_id' for it.", + scan_path, + ) return - metadata_item_id = ','.join(str(x) for x in metadata_item_ids) + metadata_item_id = ",".join(str(x) for x in metadata_item_ids) # build Plex analyze command - analyze_type = 'analyze-deeply' if config['PLEX_ANALYZE_TYPE'].lower() == 'deep' else 'analyze' + analyze_type = ( + "analyze-deeply" if config["PLEX_ANALYZE_TYPE"].lower() == "deep" else "analyze" + ) # wait for existing scanners to exit - if config['PLEX_WAIT_FOR_EXTERNAL_SCANNERS']: - if os.name == 'nt': - scanner_name = os.path.basename(config['PLEX_SCANNER']) + if config["PLEX_WAIT_FOR_EXTERNAL_SCANNERS"]: + if os.name == "nt": + scanner_name = os.path.basename(config["PLEX_SCANNER"]) else: - scanner_name = os.path.basename(config['PLEX_SCANNER']).replace('\\', '') - if not utils.wait_running_process(scanner_name, config['USE_DOCKER'], cmd_quote(config['DOCKER_NAME'])): + scanner_name = os.path.basename(config["PLEX_SCANNER"]).replace("\\", "") + if not utils.wait_running_process( + scanner_name, config["USE_DOCKER"], cmd_quote(config["DOCKER_NAME"]) + ): logger.warning( - "There was a problem waiting for existing '%s' process(s) to finish. Aborting scan.", scanner_name + "There was a problem waiting for existing '%s' process(s) to finish. Aborting scan.", + scanner_name, ) return else: logger.info("No '%s' processes were found.", scanner_name) - if os.name == 'nt': - final_cmd = '"%s" --%s --item %s' % (config['PLEX_SCANNER'], analyze_type, metadata_item_id) + if os.name == "nt": + final_cmd = '"%s" --%s --item %s' % ( + config["PLEX_SCANNER"], + analyze_type, + metadata_item_id, + ) else: - cmd = 'export LD_LIBRARY_PATH=' + config['PLEX_LD_LIBRARY_PATH'] + ';' - if not config['USE_DOCKER']: - cmd += 'export PLEX_MEDIA_SERVER_APPLICATION_SUPPORT_DIR=' + config['PLEX_SUPPORT_DIR'] + ';' - cmd += config['PLEX_SCANNER'] + ' --' + analyze_type + ' --item ' + metadata_item_id - - if config['USE_DOCKER']: - final_cmd = 'docker exec -u %s -i %s bash -c %s' % ( - cmd_quote(config['PLEX_USER']), - cmd_quote(config['DOCKER_NAME']), + cmd = "export LD_LIBRARY_PATH=" + config["PLEX_LD_LIBRARY_PATH"] + ";" + if not config["USE_DOCKER"]: + cmd += ( + "export PLEX_MEDIA_SERVER_APPLICATION_SUPPORT_DIR=" + + config["PLEX_SUPPORT_DIR"] + + ";" + ) + cmd += ( + config["PLEX_SCANNER"] + + " --" + + analyze_type + + " --item " + + metadata_item_id + ) + + if config["USE_DOCKER"]: + final_cmd = "docker exec -u %s -i %s bash -c %s" % ( + cmd_quote(config["PLEX_USER"]), + cmd_quote(config["DOCKER_NAME"]), cmd_quote(cmd), ) - elif config['USE_SUDO']: - final_cmd = 'sudo -u %s bash -c %s' % (config['PLEX_USER'], cmd_quote(cmd)) + elif config["USE_SUDO"]: + final_cmd = "sudo -u %s bash -c %s" % (config["PLEX_USER"], cmd_quote(cmd)) else: final_cmd = cmd @@ -413,7 +545,7 @@ def analyze_item(config, scan_path): f"Starting {'deep' if config['PLEX_ANALYZE_TYPE'].lower() == 'deep' else 'basic'} analysis of metadata_item: '{metadata_item_id}'." ) logger.debug(final_cmd) - if os.name == 'nt': + if os.name == "nt": utils.run_command(final_cmd) else: utils.run_command(final_cmd.encode("utf-8")) @@ -424,15 +556,19 @@ def analyze_item(config, scan_path): def get_file_metadata_item_id(config, file_path): try: - with sqlite3.connect(config['PLEX_DATABASE_PATH']) as conn: + with sqlite3.connect(config["PLEX_DATABASE_PATH"]) as conn: conn.row_factory = sqlite3.Row with closing(conn.cursor()) as c: # query media_parts to retrieve media_item_row for this file for x in range(5): - media_item_row = c.execute("SELECT * FROM media_parts WHERE file=?", (file_path,)).fetchone() + media_item_row = c.execute( + "SELECT * FROM media_parts WHERE file=?", (file_path,) + ).fetchone() if media_item_row: logger.debug( - "Found row in 'media_parts' where 'file' = '%s' after %d of 5 tries.", file_path, x + 1 + "Found row in 'media_parts' where 'file' = '%s' after %d of 5 tries.", + file_path, + x + 1, ) break else: @@ -445,18 +581,23 @@ def get_file_metadata_item_id(config, file_path): if not media_item_row: logger.error( - "Could not locate record in 'media_parts' where 'file' = '%s' after 5 tries.", file_path + "Could not locate record in 'media_parts' where 'file' = '%s' after 5 tries.", + file_path, ) return None - media_item_id = media_item_row['media_item_id'] + media_item_id = media_item_row["media_item_id"] if media_item_id and int(media_item_id): # query db to find metadata_item_id metadata_item_id = c.execute( "SELECT * FROM media_items WHERE id=?", (int(media_item_id),) - ).fetchone()['metadata_item_id'] + ).fetchone()["metadata_item_id"] if metadata_item_id and int(metadata_item_id): - logger.debug("Found 'metadata_item_id' for '%s': %d", file_path, int(metadata_item_id)) + logger.debug( + "Found 'metadata_item_id' for '%s': %d", + file_path, + int(metadata_item_id), + ) return int(metadata_item_id) except Exception: @@ -466,27 +607,27 @@ def get_file_metadata_item_id(config, file_path): def get_metadata_item_id_has_duplicates(config, metadata_item_id, scan_directory): try: - with sqlite3.connect(config['PLEX_DATABASE_PATH']) as conn: + with sqlite3.connect(config["PLEX_DATABASE_PATH"]) as conn: conn.row_factory = sqlite3.Row with closing(conn.cursor()) as c: # retrieve matches for metadata_item_id metadata_item_id_matches = c.execute( - 'select ' - 'count(mi.id) as matches ' - 'from media_items mi ' - 'join media_parts mp on mp.media_item_id = mi.id ' - 'where mi.metadata_item_id=? and mp.file not like ?', + "select " + "count(mi.id) as matches " + "from media_items mi " + "join media_parts mp on mp.media_item_id = mi.id " + "where mi.metadata_item_id=? and mp.file not like ?", ( metadata_item_id, - scan_directory + '%', + scan_directory + "%", ), ).fetchone() if metadata_item_id_matches: row_dict = dict(metadata_item_id_matches) - if 'matches' in row_dict and row_dict['matches'] >= 1: + if "matches" in row_dict and row_dict["matches"] >= 1: logger.info( "Found %d 'media_items' with 'metadata_item_id' %d where folder does not match: '%s'", - int(row_dict['matches']), + int(row_dict["matches"]), int(metadata_item_id), scan_directory, ) @@ -495,45 +636,50 @@ def get_metadata_item_id_has_duplicates(config, metadata_item_id, scan_directory return False logger.error( - "Failed determining if 'metadata_item_id' '%d' has duplicate 'media_items'.", int(metadata_item_id) + "Failed determining if 'metadata_item_id' '%d' has duplicate 'media_items'.", + int(metadata_item_id), ) except Exception: logger.exception( - "Exception determining if 'metadata_item_id' '%d' has duplicate 'media_items': ", int(metadata_item_id) + "Exception determining if 'metadata_item_id' '%d' has duplicate 'media_items': ", + int(metadata_item_id), ) return False def get_metadata_parent_info(config, metadata_item_id): try: - with sqlite3.connect(config['PLEX_DATABASE_PATH']) as conn: + with sqlite3.connect(config["PLEX_DATABASE_PATH"]) as conn: conn.row_factory = sqlite3.Row with closing(conn.cursor()) as c: # retrieve parent info for metadata_item_id metadata_item_parent_info = c.execute( - 'WITH cte_MediaItems AS (' - 'SELECT ' - 'mi.* ' - 'FROM metadata_items mi ' - 'WHERE mi.id = ? ' - 'UNION ' - 'SELECT mi.* ' - 'FROM cte_MediaItems cte ' - 'JOIN metadata_items mi ON mi.id = cte.parent_id' - ') ' - 'SELECT ' - 'cte.id' - ', cte.parent_id' - ', cte.guid' - ', cte.title ' - 'FROM cte_MediaItems cte ' - 'WHERE cte.parent_id IS NULL ' - 'LIMIT 1', + "WITH cte_MediaItems AS (" + "SELECT " + "mi.* " + "FROM metadata_items mi " + "WHERE mi.id = ? " + "UNION " + "SELECT mi.* " + "FROM cte_MediaItems cte " + "JOIN metadata_items mi ON mi.id = cte.parent_id" + ") " + "SELECT " + "cte.id" + ", cte.parent_id" + ", cte.guid" + ", cte.title " + "FROM cte_MediaItems cte " + "WHERE cte.parent_id IS NULL " + "LIMIT 1", (metadata_item_id,), ).fetchone() if metadata_item_parent_info: metadata_item_row = dict(metadata_item_parent_info) - if 'parent_id' in metadata_item_row and not metadata_item_row['parent_id']: + if ( + "parent_id" in metadata_item_row + and not metadata_item_row["parent_id"] + ): logger.debug( "Found parent row in 'metadata_items' for 'metadata_item_id' '%d': %s", int(metadata_item_id), @@ -542,11 +688,15 @@ def get_metadata_parent_info(config, metadata_item_id): return metadata_item_row logger.error( - "Failed finding parent row in 'metadata_items' for 'metadata_item_id': %d", int(metadata_item_id) + "Failed finding parent row in 'metadata_items' for 'metadata_item_id': %d", + int(metadata_item_id), ) except Exception: - logger.exception("Exception finding parent info for 'metadata_item_id' '%d': ", int(metadata_item_id)) + logger.exception( + "Exception finding parent info for 'metadata_item_id' '%d': ", + int(metadata_item_id), + ) return None @@ -555,15 +705,19 @@ def get_file_metadata_ids(config, file_path): media_item_row = None try: - with sqlite3.connect(config['PLEX_DATABASE_PATH']) as conn: + with sqlite3.connect(config["PLEX_DATABASE_PATH"]) as conn: conn.row_factory = sqlite3.Row with closing(conn.cursor()) as c: # query media_parts to retrieve media_item_row for this file for x in range(5): - media_item_row = c.execute("SELECT * FROM media_parts WHERE file=?", (file_path,)).fetchone() + media_item_row = c.execute( + "SELECT * FROM media_parts WHERE file=?", (file_path,) + ).fetchone() if media_item_row: logger.debug( - "Found row in 'media_parts' where 'file' = '%s' after %d of 5 tries.", file_path, x + 1 + "Found row in 'media_parts' where 'file' = '%s' after %d of 5 tries.", + file_path, + x + 1, ) break else: @@ -576,48 +730,69 @@ def get_file_metadata_ids(config, file_path): if not media_item_row: logger.error( - "Could not locate record in 'media_parts' where 'file' = '%s' after 5 tries", file_path + "Could not locate record in 'media_parts' where 'file' = '%s' after 5 tries", + file_path, ) return None - media_item_id = media_item_row['media_item_id'] + media_item_id = media_item_row["media_item_id"] if media_item_id and int(media_item_id): # query db to find metadata_item_id metadata_item_id = c.execute( "SELECT * FROM media_items WHERE id=?", (int(media_item_id),) - ).fetchone()['metadata_item_id'] + ).fetchone()["metadata_item_id"] if metadata_item_id and int(metadata_item_id): - logger.debug("Found 'metadata_item_id' for '%s': %d", file_path, int(metadata_item_id)) + logger.debug( + "Found 'metadata_item_id' for '%s': %d", + file_path, + int(metadata_item_id), + ) # query db to find parent_id of metadata_item_id - if config['PLEX_ANALYZE_DIRECTORY']: + if config["PLEX_ANALYZE_DIRECTORY"]: parent_id = c.execute( - "SELECT * FROM metadata_items WHERE id=?", (int(metadata_item_id),) - ).fetchone()['parent_id'] + "SELECT * FROM metadata_items WHERE id=?", + (int(metadata_item_id),), + ).fetchone()["parent_id"] if not parent_id or not int(parent_id): # could not find parent_id of this item, likely its a movie... # lets just return the metadata_item_id return [int(metadata_item_id)] - logger.debug("Found 'parent_id' for '%s': %d", file_path, int(parent_id)) + logger.debug( + "Found 'parent_id' for '%s': %d", + file_path, + int(parent_id), + ) # if mode is basic, single parent_id is enough - if config['PLEX_ANALYZE_TYPE'].lower() == 'basic': + if config["PLEX_ANALYZE_TYPE"].lower() == "basic": return [int(parent_id)] # lets find all metadata_item_id's with this parent_id for use with deep analysis metadata_items = c.execute( - "SELECT * FROM metadata_items WHERE parent_id=?", (int(parent_id),) + "SELECT * FROM metadata_items WHERE parent_id=?", + (int(parent_id),), ).fetchall() if not metadata_items: # could not find any results, lets just return metadata_item_id return [int(metadata_item_id)] for row in metadata_items: - if row['id'] and int(row['id']) and int(row['id']) not in results: - results.append(int(row['id'])) - - logger.debug("Found 'media_item_id' for '%s': %s", file_path, results) - logger.info("Found %d 'media_item_id' to deep analyze for: '%s'", len(results), file_path) + if ( + row["id"] + and int(row["id"]) + and int(row["id"]) not in results + ): + results.append(int(row["id"])) + + logger.debug( + "Found 'media_item_id' for '%s': %s", file_path, results + ) + logger.info( + "Found %d 'media_item_id' to deep analyze for: '%s'", + len(results), + file_path, + ) else: # user had PLEX_ANALYZE_DIRECTORY as False - lets just scan the single metadata_item_id results.append(int(metadata_item_id)) @@ -628,19 +803,25 @@ def get_file_metadata_ids(config, file_path): def empty_trash(config, section): - if len(config['PLEX_EMPTY_TRASH_CONTROL_FILES']): + if len(config["PLEX_EMPTY_TRASH_CONTROL_FILES"]): logger.info("Control file(s) are specified.") - for control in config['PLEX_EMPTY_TRASH_CONTROL_FILES']: + for control in config["PLEX_EMPTY_TRASH_CONTROL_FILES"]: if not os.path.exists(control): - logger.info("Skip emptying of trash as control file is not present: '%s'", control) + logger.info( + "Skip emptying of trash as control file is not present: '%s'", + control, + ) return logger.info("Commence emptying of trash as control file(s) are present.") for x in range(5): try: - headers = {'X-Plex-Token': config['PLEX_TOKEN'], 'Accept': 'application/json'} + headers = { + "X-Plex-Token": config["PLEX_TOKEN"], + "Accept": "application/json", + } resp = requests.put( f"{config['PLEX_LOCAL_URL']}/library/sections/{section}/emptyTrash", data=None, @@ -648,7 +829,11 @@ def empty_trash(config, section): headers=headers, ) if resp.status_code == 200: - logger.info("Trash cleared for Section '%s' after %d of 5 tries.", section, x + 1) + logger.info( + "Trash cleared for Section '%s' after %d of 5 tries.", + section, + x + 1, + ) break else: logger.error( @@ -658,13 +843,17 @@ def empty_trash(config, section): ) time.sleep(10) except Exception as ex: - logger.exception("Exception sending empty trash for Section '%s' in %d of 5 attempts: ", section, x + 1) + logger.exception( + "Exception sending empty trash for Section '%s' in %d of 5 attempts: ", + section, + x + 1, + ) time.sleep(10) return def wait_plex_alive(config): - if not config['PLEX_LOCAL_URL'] or not config['PLEX_TOKEN']: + if not config["PLEX_LOCAL_URL"] or not config["PLEX_TOKEN"]: logger.error( "Unable to check if Plex was ready for scan requests because 'PLEX_LOCAL_URL' and/or 'PLEX_TOKEN' are missing in config." ) @@ -675,17 +864,24 @@ def wait_plex_alive(config): while True: check_attempts += 1 try: - headers = {'X-Plex-Token': config['PLEX_TOKEN'], 'Accept': 'application/json'} + headers = { + "X-Plex-Token": config["PLEX_TOKEN"], + "Accept": "application/json", + } resp = requests.get( f"{config['PLEX_LOCAL_URL']}/myplex/account", headers=headers, timeout=30, verify=False, ) - if resp.status_code == 200 and 'json' in resp.headers['Content-Type']: + if resp.status_code == 200 and "json" in resp.headers["Content-Type"]: resp_json = resp.json() - if 'MyPlex' in resp_json: - plex_user = resp_json['MyPlex']['username'] if 'username' in resp_json['MyPlex'] else 'Unknown' + if "MyPlex" in resp_json: + plex_user = ( + resp_json["MyPlex"]["username"] + if "username" in resp_json["MyPlex"] + else "Unknown" + ) return plex_user logger.error( @@ -696,7 +892,10 @@ def wait_plex_alive(config): resp.text, ) except Exception: - logger.exception("Exception checking if Plex was available at %s: ", config['PLEX_LOCAL_URL']) + logger.exception( + "Exception checking if Plex was available at %s: ", + config["PLEX_LOCAL_URL"], + ) logger.warning("Checking again in 15 seconds (attempt %d)...", check_attempts) time.sleep(15) continue @@ -705,13 +904,13 @@ def wait_plex_alive(config): def get_deleted_count(config): try: - with sqlite3.connect(config['PLEX_DATABASE_PATH']) as conn: + with sqlite3.connect(config["PLEX_DATABASE_PATH"]) as conn: with closing(conn.cursor()) as c: deleted_metadata = c.execute( - 'SELECT count(*) FROM metadata_items WHERE deleted_at IS NOT NULL' + "SELECT count(*) FROM metadata_items WHERE deleted_at IS NOT NULL" ).fetchone()[0] deleted_media_parts = c.execute( - 'SELECT count(*) FROM media_parts WHERE deleted_at IS NOT NULL' + "SELECT count(*) FROM media_parts WHERE deleted_at IS NOT NULL" ).fetchone()[0] return int(deleted_metadata) + int(deleted_media_parts) @@ -724,39 +923,54 @@ def get_deleted_count(config): def split_plex_item(config, metadata_item_id): try: - headers = {'X-Plex-Token': config['PLEX_TOKEN']} - url_str = '%s/library/metadata/%d/split' % (config['PLEX_LOCAL_URL'], int(metadata_item_id)) + headers = {"X-Plex-Token": config["PLEX_TOKEN"]} + url_str = "%s/library/metadata/%d/split" % ( + config["PLEX_LOCAL_URL"], + int(metadata_item_id), + ) # send options request first (webui does this) requests.options(url_str, headers=headers, timeout=30) resp = requests.put(url_str, headers=headers, timeout=30) if resp.status_code == 200: - logger.info("Successfully split 'metadata_item_id': '%d'", int(metadata_item_id)) + logger.info( + "Successfully split 'metadata_item_id': '%d'", int(metadata_item_id) + ) return True else: logger.error( - "Failed splitting 'metadata_item_id': '%d'... Response =\n%s\n", int(metadata_item_id), resp.text + "Failed splitting 'metadata_item_id': '%d'... Response =\n%s\n", + int(metadata_item_id), + resp.text, ) except Exception: - logger.exception("Exception splitting 'metadata_item' %d: ", int(metadata_item_id)) + logger.exception( + "Exception splitting 'metadata_item' %d: ", int(metadata_item_id) + ) return False def match_plex_item(config, metadata_item_id, new_guid, new_name): try: params = { - 'guid': new_guid, - 'name': new_name, + "guid": new_guid, + "name": new_name, } - headers = {'X-Plex-Token': config['PLEX_TOKEN'], 'Accept': 'application/json'} - url_str = '%s/library/metadata/%d/match' % (config['PLEX_LOCAL_URL'], int(metadata_item_id)) + headers = {"X-Plex-Token": config["PLEX_TOKEN"], "Accept": "application/json"} + url_str = "%s/library/metadata/%d/match" % ( + config["PLEX_LOCAL_URL"], + int(metadata_item_id), + ) requests.options(url_str, params=params, timeout=30, headers=headers) resp = requests.put(url_str, params=params, timeout=30, headers=headers) if resp.status_code == 200: logger.info( - "Successfully matched 'metadata_item_id' '%d' to '%s' (%s).", int(metadata_item_id), new_name, new_guid + "Successfully matched 'metadata_item_id' '%d' to '%s' (%s).", + int(metadata_item_id), + new_name, + new_guid, ) return True else: @@ -776,14 +990,21 @@ def match_plex_item(config, metadata_item_id, new_guid, new_name): def refresh_plex_item(config, metadata_item_id, new_name): try: headers = { - 'X-Plex-Token': config['PLEX_TOKEN'], + "X-Plex-Token": config["PLEX_TOKEN"], } - url_str = '%s/library/metadata/%d/refresh' % (config['PLEX_LOCAL_URL'], int(metadata_item_id)) + url_str = "%s/library/metadata/%d/refresh" % ( + config["PLEX_LOCAL_URL"], + int(metadata_item_id), + ) requests.options(url_str, headers=headers, timeout=30) resp = requests.put(url_str, headers=headers, timeout=30) if resp.status_code == 200: - logger.info("Successfully refreshed 'metadata_item_id' '%d' of '%s'.", int(metadata_item_id), new_name) + logger.info( + "Successfully refreshed 'metadata_item_id' '%d' of '%s'.", + int(metadata_item_id), + new_name, + ) return True else: logger.error( @@ -794,5 +1015,7 @@ def refresh_plex_item(config, metadata_item_id, new_name): ) except Exception as e: logger.exception(e) - logger.exception(f"Exception refreshing metadata_item '{int(metadata_item_id)}'.") + logger.exception( + f"Exception refreshing metadata_item '{int(metadata_item_id)}'." + ) return False diff --git a/rclone.py b/rclone.py index a6ef81b..7fe42b0 100644 --- a/rclone.py +++ b/rclone.py @@ -9,7 +9,9 @@ class RcloneDecoder: def __init__(self, binary, crypt_mappings, config): self._binary = binary if self._binary == "" or not os.path.isfile(binary): - self._binary = os.path.normpath(subprocess.check_output(["which", "rclone"]).decode().rstrip('\n')) + self._binary = os.path.normpath( + subprocess.check_output(["which", "rclone"]).decode().rstrip("\n") + ) logger.debug("Rclone binary path located as: '%s'", binary) self._config = config @@ -18,7 +20,7 @@ def __init__(self, binary, crypt_mappings, config): def decode_path(self, path): for crypt_dir, mapped_remotes in self._crypt_mappings.items(): # Isolate root/file path and attempt to locate entry in mappings - file_path = path.replace(crypt_dir, '') + file_path = path.replace(crypt_dir, "") logger.debug("Encoded file path identified as: '%s'", file_path) if path.lower().startswith(crypt_dir.lower()): for mapped_remote in mapped_remotes: @@ -31,27 +33,52 @@ def decode_path(self, path): logger.info("Attempting to decode...") logger.debug( "Raw query is: '%s'", - " ".join([self._binary, "--config", self._config, "cryptdecode", mapped_remote, file_path]), + " ".join( + [ + self._binary, + "--config", + self._config, + "cryptdecode", + mapped_remote, + file_path, + ] + ), ) try: decoded = ( subprocess.check_output( - [self._binary, "--config", self._config, "cryptdecode", mapped_remote, file_path], + [ + self._binary, + "--config", + self._config, + "cryptdecode", + mapped_remote, + file_path, + ], stderr=subprocess.STDOUT, ) - .decode('utf-8') - .rstrip('\n') + .decode("utf-8") + .rstrip("\n") ) except subprocess.CalledProcessError as e: - logger.error("Command '%s' returned with error (code %s): %s", e.cmd, e.returncode, e.output) + logger.error( + "Command '%s' returned with error (code %s): %s", + e.cmd, + e.returncode, + e.output, + ) return None - decoded = decoded.split(' ', 1)[1].lstrip() + decoded = decoded.split(" ", 1)[1].lstrip() if "failed" in decoded.lower(): logger.error("Failed to decode path: '%s'", file_path) else: - logger.debug("Decoded path of '%s' is: '%s'", file_path, os.path.join(crypt_dir, decoded)) + logger.debug( + "Decoded path of '%s' is: '%s'", + file_path, + os.path.join(crypt_dir, decoded), + ) logger.info("Decode successful.") return [os.path.join(crypt_dir, decoded)] else: diff --git a/scan.py b/scan.py index 84d61d7..7f1f86f 100644 --- a/scan.py +++ b/scan.py @@ -22,16 +22,18 @@ ############################################################ # Logging -logFormatter = logging.Formatter('%(asctime)24s - %(levelname)8s - %(name)9s [%(thread)5d]: %(message)s') +logFormatter = logging.Formatter( + "%(asctime)24s - %(levelname)8s - %(name)9s [%(thread)5d]: %(message)s" +) rootLogger = logging.getLogger() rootLogger.setLevel(logging.INFO) # Decrease modules logging -logging.getLogger('requests').setLevel(logging.ERROR) -logging.getLogger('werkzeug').setLevel(logging.ERROR) -logging.getLogger('peewee').setLevel(logging.ERROR) -logging.getLogger('urllib3.connectionpool').setLevel(logging.ERROR) -logging.getLogger('sqlitedict').setLevel(logging.ERROR) +logging.getLogger("requests").setLevel(logging.ERROR) +logging.getLogger("werkzeug").setLevel(logging.ERROR) +logging.getLogger("peewee").setLevel(logging.ERROR) +logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR) +logging.getLogger("sqlitedict").setLevel(logging.ERROR) # Console logger, log to stdout instead of stderr consoleHandler = logging.StreamHandler(sys.stdout) @@ -42,12 +44,14 @@ conf = config.Config() # File logger -fileHandler = RotatingFileHandler(conf.settings['logfile'], maxBytes=1024 * 1024 * 2, backupCount=5, encoding='utf-8') +fileHandler = RotatingFileHandler( + conf.settings["logfile"], maxBytes=1024 * 1024 * 2, backupCount=5, encoding="utf-8" +) fileHandler.setFormatter(logFormatter) rootLogger.addHandler(fileHandler) # Set configured log level -rootLogger.setLevel(conf.settings['loglevel']) +rootLogger.setLevel(conf.settings["loglevel"]) # Scan logger logger = rootLogger.getChild("AUTOSCAN") @@ -79,16 +83,16 @@ google_drive = None manager = None -if not conf.configs['ENABLE_PLEX']: - if not conf.configs['ENABLE_JOE']: +if not conf.configs["ENABLE_PLEX"]: + if not conf.configs["ENABLE_JOE"]: logger.error("None of the apps are enabled.. Exiting..") sys.exit(1) else: - if conf.configs['JOE_API_KEY'] == '': + if conf.configs["JOE_API_KEY"] == "": logger.error("JOE_API_KEY is not set.. Exiting..") sys.exit(1) else: - if conf.configs['PLEX_TOKEN'] == '': + if conf.configs["PLEX_TOKEN"] == "": logger.error("PLEX_TOKEN is not set.. Exiting..") sys.exit(1) @@ -111,10 +115,10 @@ def queue_processor(): args=[ conf.configs, scan_lock, - db_item['scan_path'], - db_item['scan_for'], - db_item['scan_section'], - db_item['scan_type'], + db_item["scan_path"], + db_item["scan_for"], + db_item["scan_section"], + db_item["scan_type"], resleep_paths, ], ) @@ -122,7 +126,9 @@ def queue_processor(): time.sleep(2) logger.info("Restored %d scan request(s) from Autoscan database.", items) except Exception: - logger.exception("Exception while processing scan requests from Autoscan database.") + logger.exception( + "Exception while processing scan requests from Autoscan database." + ) return @@ -131,8 +137,15 @@ def queue_processor(): ############################################################ -def start_scan(path, scan_for, scan_type, scan_title=None, scan_lookup_type=None, scan_lookup_id=None): - if conf.configs['ENABLE_PLEX']: +def start_scan( + path, + scan_for, + scan_type, + scan_title=None, + scan_lookup_type=None, + scan_lookup_id=None, +): + if conf.configs["ENABLE_PLEX"]: section = utils.get_plex_section(conf.configs, path) if section <= 0: return False @@ -141,14 +154,15 @@ def start_scan(path, scan_for, scan_type, scan_title=None, scan_lookup_type=None else: section = 0 - if conf.configs['SERVER_USE_SQLITE']: + if conf.configs["SERVER_USE_SQLITE"]: db_exists, db_file = db.exists_file_root_path(path) if not db_exists and db.add_item(path, scan_for, section, scan_type): logger.info("Added '%s' to Autoscan database.", path) logger.info("Proceeding with scan...") else: logger.info( - "Already processing '%s' from same folder. Skip adding extra scan request to the queue.", db_file + "Already processing '%s' from same folder. Skip adding extra scan request to the queue.", + db_file, ) resleep_paths.append(db_file) return False @@ -203,21 +217,28 @@ def process_google_changes(items_added): new_file_paths.append(file_path) # remove files that already exist in the plex database - removed_rejected_exists = utils.remove_files_exist_in_plex_database(conf.configs, new_file_paths) + removed_rejected_exists = utils.remove_files_exist_in_plex_database( + conf.configs, new_file_paths + ) if removed_rejected_exists: - logger.info("Rejected %d file(s) from Google Drive changes for already being in Plex.", removed_rejected_exists) + logger.info( + "Rejected %d file(s) from Google Drive changes for already being in Plex.", + removed_rejected_exists, + ) # process the file_paths list if len(new_file_paths): logger.info( - "Proceeding with scan of %d file(s) from Google Drive changes: %s", len(new_file_paths), new_file_paths + "Proceeding with scan of %d file(s) from Google Drive changes: %s", + len(new_file_paths), + new_file_paths, ) # loop each file, remapping and starting a scan thread for file_path in new_file_paths: final_path = utils.map_pushed_path(conf.configs, file_path) - start_scan(final_path, 'Google Drive', 'Download') + start_scan(final_path, "Google Drive", "Download") return True @@ -232,21 +253,25 @@ def thread_google_monitor(): crypt_decoder = None # load rclone client if crypt being used - if conf.configs['RCLONE']['CRYPT_MAPPINGS'] != {}: - logger.info("Crypt mappings have been defined. Initializing Rclone Crypt Decoder...") + if conf.configs["RCLONE"]["CRYPT_MAPPINGS"] != {}: + logger.info( + "Crypt mappings have been defined. Initializing Rclone Crypt Decoder..." + ) crypt_decoder = rclone.RcloneDecoder( - conf.configs['RCLONE']['BINARY'], conf.configs['RCLONE']['CRYPT_MAPPINGS'], conf.configs['RCLONE']['CONFIG'] + conf.configs["RCLONE"]["BINARY"], + conf.configs["RCLONE"]["CRYPT_MAPPINGS"], + conf.configs["RCLONE"]["CONFIG"], ) # load google drive manager manager = drive.GoogleDriveManager( - conf.configs['GOOGLE']['CLIENT_ID'], - conf.configs['GOOGLE']['CLIENT_SECRET'], - conf.settings['cachefile'], - allowed_config=conf.configs['GOOGLE']['ALLOWED'], - show_cache_logs=conf.configs['GOOGLE']['SHOW_CACHE_LOGS'], + conf.configs["GOOGLE"]["CLIENT_ID"], + conf.configs["GOOGLE"]["CLIENT_SECRET"], + conf.settings["cachefile"], + allowed_config=conf.configs["GOOGLE"]["ALLOWED"], + show_cache_logs=conf.configs["GOOGLE"]["SHOW_CACHE_LOGS"], crypt_decoder=crypt_decoder, - allowed_teamdrives=conf.configs['GOOGLE']['TEAMDRIVES'], + allowed_teamdrives=conf.configs["GOOGLE"]["TEAMDRIVES"], ) if not manager.is_authorized(): @@ -256,12 +281,12 @@ def thread_google_monitor(): logger.info("Google Drive access token was successfully validated.") # load teamdrives (if enabled) - if conf.configs['GOOGLE']['TEAMDRIVE'] and not manager.load_teamdrives(): + if conf.configs["GOOGLE"]["TEAMDRIVE"] and not manager.load_teamdrives(): logger.error("Failed to load Google Teamdrives.") exit(1) # set callbacks - manager.set_callbacks({'items_added': process_google_changes}) + manager.set_callbacks({"items_added": process_google_changes}) try: logger.info("Google Drive changes monitor started.") @@ -269,10 +294,12 @@ def thread_google_monitor(): # poll for changes manager.get_changes() # sleep before polling for changes again - time.sleep(conf.configs['GOOGLE']['POLL_INTERVAL']) + time.sleep(conf.configs["GOOGLE"]["POLL_INTERVAL"]) except Exception: - logger.exception("Fatal Exception occurred while monitoring Google Drive for changes: ") + logger.exception( + "Fatal Exception occurred while monitoring Google Drive for changes: " + ) ############################################################ @@ -280,49 +307,60 @@ def thread_google_monitor(): ############################################################ app = Flask(__name__) -app.config['JSON_AS_ASCII'] = False +app.config["JSON_AS_ASCII"] = False -@app.route("/api/%s" % conf.configs['SERVER_PASS'], methods=['GET', 'POST']) +@app.route("/api/%s" % conf.configs["SERVER_PASS"], methods=["GET", "POST"]) def api_call(): data = {} try: - if request.content_type == 'application/json': + if request.content_type == "application/json": data = request.get_json(silent=True) - elif request.method == 'POST': + elif request.method == "POST": data = request.form.to_dict() else: data = request.args.to_dict() # verify cmd was supplied - if 'cmd' not in data: - logger.error("Unknown %s API call from %r", request.method, request.remote_addr) - return jsonify({'error': 'No cmd parameter was supplied'}) + if "cmd" not in data: + logger.error( + "Unknown %s API call from %r", request.method, request.remote_addr + ) + return jsonify({"error": "No cmd parameter was supplied"}) else: - logger.info("Client %s API call from %r, type: %s", request.method, request.remote_addr, data['cmd']) + logger.info( + "Client %s API call from %r, type: %s", + request.method, + request.remote_addr, + data["cmd"], + ) # process cmds - cmd = data['cmd'].lower() - if cmd == 'queue_count': + cmd = data["cmd"].lower() + if cmd == "queue_count": # queue count - if not conf.configs['SERVER_USE_SQLITE']: + if not conf.configs["SERVER_USE_SQLITE"]: # return error if SQLITE db is not enabled - return jsonify({'error': 'SERVER_USE_SQLITE must be enabled'}) - return jsonify({'queue_count': db.get_queue_count()}) + return jsonify({"error": "SERVER_USE_SQLITE must be enabled"}) + return jsonify({"queue_count": db.get_queue_count()}) else: # unknown cmd - return jsonify({'error': 'Unknown cmd: %s' % cmd}) + return jsonify({"error": "Unknown cmd: %s" % cmd}) except Exception: - logger.exception("Exception parsing %s API call from %r: ", request.method, request.remote_addr) + logger.exception( + "Exception parsing %s API call from %r: ", + request.method, + request.remote_addr, + ) - return jsonify({'error': 'Unexpected error occurred, check logs...'}) + return jsonify({"error": "Unexpected error occurred, check logs..."}) -@app.route("/%s" % conf.configs['SERVER_PASS'], methods=['GET']) +@app.route("/%s" % conf.configs["SERVER_PASS"], methods=["GET"]) def manual_scan(): - if not conf.configs['SERVER_ALLOW_MANUAL_SCAN']: + if not conf.configs["SERVER_ALLOW_MANUAL_SCAN"]: return abort(401) page = """ @@ -353,9 +391,9 @@ def manual_scan(): return page, 200 -@app.route("/%s" % conf.configs['SERVER_PASS'], methods=['POST']) +@app.route("/%s" % conf.configs["SERVER_PASS"], methods=["POST"]) def client_pushed(): - if request.content_type == 'application/json': + if request.content_type == "application/json": data = request.get_json(silent=True) else: data = request.form.to_dict() @@ -363,13 +401,25 @@ def client_pushed(): if not data: logger.error("Invalid scan request from: %r", request.remote_addr) abort(400) - logger.debug("Client %r request dump:\n%s", request.remote_addr, json.dumps(data, indent=4, sort_keys=True)) + logger.debug( + "Client %r request dump:\n%s", + request.remote_addr, + json.dumps(data, indent=4, sort_keys=True), + ) - if ('eventType' in data and data['eventType'] == 'Test') or ('EventType' in data and data['EventType'] == 'Test'): - logger.info("Client %r made a test request, event: '%s'", request.remote_addr, 'Test') - elif 'eventType' in data and data['eventType'] == 'Manual': - logger.info("Client %r made a manual scan request for: '%s'", request.remote_addr, data['filepath']) - final_path = utils.map_pushed_path(conf.configs, data['filepath']) + if ("eventType" in data and data["eventType"] == "Test") or ( + "EventType" in data and data["EventType"] == "Test" + ): + logger.info( + "Client %r made a test request, event: '%s'", request.remote_addr, "Test" + ) + elif "eventType" in data and data["eventType"] == "Manual": + logger.info( + "Client %r made a manual scan request for: '%s'", + request.remote_addr, + data["filepath"], + ) + final_path = utils.map_pushed_path(conf.configs, data["filepath"]) # ignore this request? ignore, ignore_match = utils.should_ignore(final_path, conf.configs) if ignore: @@ -378,8 +428,11 @@ def client_pushed(): final_path, ignore_match, ) - return "Ignoring scan request because %s was matched from your SERVER_IGNORE_LIST" % ignore_match - if start_scan(final_path, 'Manual', 'Manual'): + return ( + "Ignoring scan request because %s was matched from your SERVER_IGNORE_LIST" + % ignore_match + ) + if start_scan(final_path, "Manual", "Manual"): return """ @@ -425,49 +478,75 @@ def client_pushed(): """.format( - data['filepath'] + data["filepath"] ) - elif 'series' in data and 'eventType' in data and data['eventType'] == 'Rename' and 'path' in data['series']: + elif ( + "series" in data + and "eventType" in data + and data["eventType"] == "Rename" + and "path" in data["series"] + ): # sonarr Rename webhook logger.info( "Client %r scan request for series: '%s', event: '%s'", request.remote_addr, - data['series']['path'], - "Upgrade" if ('isUpgrade' in data and data['isUpgrade']) else data['eventType'], + data["series"]["path"], + "Upgrade" + if ("isUpgrade" in data and data["isUpgrade"]) + else data["eventType"], ) - final_path = utils.map_pushed_path(conf.configs, data['series']['path']) + final_path = utils.map_pushed_path(conf.configs, data["series"]["path"]) start_scan( - final_path, 'Sonarr', "Upgrade" if ('isUpgrade' in data and data['isUpgrade']) else data['eventType'] + final_path, + "Sonarr", + "Upgrade" + if ("isUpgrade" in data and data["isUpgrade"]) + else data["eventType"], ) - elif 'movie' in data and 'eventType' in data and data['eventType'] == 'Rename' and 'folderPath' in data['movie']: + elif ( + "movie" in data + and "eventType" in data + and data["eventType"] == "Rename" + and "folderPath" in data["movie"] + ): # radarr Rename webhook logger.info( "Client %r scan request for movie: '%s', event: '%s'", request.remote_addr, - data['movie']['folderPath'], - "Upgrade" if ('isUpgrade' in data and data['isUpgrade']) else data['eventType'], + data["movie"]["folderPath"], + "Upgrade" + if ("isUpgrade" in data and data["isUpgrade"]) + else data["eventType"], ) - final_path = utils.map_pushed_path(conf.configs, data['movie']['folderPath']) + final_path = utils.map_pushed_path(conf.configs, data["movie"]["folderPath"]) start_scan( - final_path, 'Radarr', "Upgrade" if ('isUpgrade' in data and data['isUpgrade']) else data['eventType'] + final_path, + "Radarr", + "Upgrade" + if ("isUpgrade" in data and data["isUpgrade"]) + else data["eventType"], ) elif ( - 'movie' in data - and 'movieFile' in data - and 'folderPath' in data['movie'] - and 'relativePath' in data['movieFile'] - and 'eventType' in data + "movie" in data + and "movieFile" in data + and "folderPath" in data["movie"] + and "relativePath" in data["movieFile"] + and "eventType" in data ): # radarr download/upgrade webhook - path = os.path.join(data['movie']['folderPath'], data['movieFile']['relativePath']) + path = os.path.join( + data["movie"]["folderPath"], data["movieFile"]["relativePath"] + ) logger.info( "Client %r scan request for movie: '%s', event: '%s'", request.remote_addr, path, - "Upgrade" if ('isUpgrade' in data and data['isUpgrade']) else data['eventType'], + "Upgrade" + if ("isUpgrade" in data and data["isUpgrade"]) + else data["eventType"], ) final_path = utils.map_pushed_path(conf.configs, path) @@ -476,40 +555,44 @@ def client_pushed(): scan_lookup_type = None scan_lookup_id = None - if 'remoteMovie' in data: - if 'imdbId' in data['remoteMovie'] and data['remoteMovie']['imdbId']: + if "remoteMovie" in data: + if "imdbId" in data["remoteMovie"] and data["remoteMovie"]["imdbId"]: # prefer imdb - scan_lookup_id = data['remoteMovie']['imdbId'] - scan_lookup_type = 'IMDB' - elif 'tmdbId' in data['remoteMovie'] and data['remoteMovie']['tmdbId']: + scan_lookup_id = data["remoteMovie"]["imdbId"] + scan_lookup_type = "IMDB" + elif "tmdbId" in data["remoteMovie"] and data["remoteMovie"]["tmdbId"]: # fallback tmdb - scan_lookup_id = data['remoteMovie']['tmdbId'] - scan_lookup_type = 'TheMovieDB' + scan_lookup_id = data["remoteMovie"]["tmdbId"] + scan_lookup_type = "TheMovieDB" scan_title = ( - data['remoteMovie']['title'] - if 'title' in data['remoteMovie'] and data['remoteMovie']['title'] + data["remoteMovie"]["title"] + if "title" in data["remoteMovie"] and data["remoteMovie"]["title"] else None ) # start scan start_scan( final_path, - 'Radarr', - "Upgrade" if ('isUpgrade' in data and data['isUpgrade']) else data['eventType'], + "Radarr", + "Upgrade" + if ("isUpgrade" in data and data["isUpgrade"]) + else data["eventType"], scan_title, scan_lookup_type, scan_lookup_id, ) - elif 'series' in data and 'episodeFile' in data and 'eventType' in data: + elif "series" in data and "episodeFile" in data and "eventType" in data: # sonarr download/upgrade webhook - path = os.path.join(data['series']['path'], data['episodeFile']['relativePath']) + path = os.path.join(data["series"]["path"], data["episodeFile"]["relativePath"]) logger.info( "Client %r scan request for series: '%s', event: '%s'", request.remote_addr, path, - "Upgrade" if ('isUpgrade' in data and data['isUpgrade']) else data['eventType'], + "Upgrade" + if ("isUpgrade" in data and data["isUpgrade"]) + else data["eventType"], ) final_path = utils.map_pushed_path(conf.configs, path) @@ -517,39 +600,57 @@ def client_pushed(): scan_title = None scan_lookup_type = None scan_lookup_id = None - if 'series' in data: + if "series" in data: scan_lookup_id = ( - data['series']['tvdbId'] if 'tvdbId' in data['series'] and data['series']['tvdbId'] else None + data["series"]["tvdbId"] + if "tvdbId" in data["series"] and data["series"]["tvdbId"] + else None + ) + scan_lookup_type = "TheTVDB" if scan_lookup_id is not None else None + scan_title = ( + data["series"]["title"] + if "title" in data["series"] and data["series"]["title"] + else None ) - scan_lookup_type = 'TheTVDB' if scan_lookup_id is not None else None - scan_title = data['series']['title'] if 'title' in data['series'] and data['series']['title'] else None # start scan start_scan( final_path, - 'Sonarr', - "Upgrade" if ('isUpgrade' in data and data['isUpgrade']) else data['eventType'], + "Sonarr", + "Upgrade" + if ("isUpgrade" in data and data["isUpgrade"]) + else data["eventType"], scan_title, scan_lookup_type, scan_lookup_id, ) - elif 'artist' in data and 'trackFiles' in data and 'eventType' in data: + elif "artist" in data and "trackFiles" in data and "eventType" in data: # lidarr download/upgrade webhook - for track in data['trackFiles']: - if 'path' not in track and 'relativePath' not in track: + for track in data["trackFiles"]: + if "path" not in track and "relativePath" not in track: continue - path = track['path'] if 'path' in track else os.path.join(data['artist']['path'], track['relativePath']) + path = ( + track["path"] + if "path" in track + else os.path.join(data["artist"]["path"], track["relativePath"]) + ) logger.info( "Client %r scan request for album track: '%s', event: '%s'", request.remote_addr, path, - "Upgrade" if ('isUpgrade' in data and data['isUpgrade']) else data['eventType'], + "Upgrade" + if ("isUpgrade" in data and data["isUpgrade"]) + else data["eventType"], ) final_path = utils.map_pushed_path(conf.configs, path) start_scan( - final_path, 'Lidarr', "Upgrade" if ('isUpgrade' in data and data['isUpgrade']) else data['eventType'] + final_path, + "Lidarr", + "Upgrade" + if ("isUpgrade" in data and data["isUpgrade"]) + else data["eventType"], ) else: @@ -566,8 +667,8 @@ def client_pushed(): if __name__ == "__main__": print("") - f = Figlet(font='slant', width=100) - print(f.renderText('Autoscan')) + f = Figlet(font="slant", width=100) + print(f.renderText("Autoscan")) logger.info( """ @@ -583,26 +684,26 @@ def client_pushed(): ######################################################################### """ ) - if conf.args['cmd'] == 'sections': + if conf.args["cmd"] == "sections": plex.show_detailed_sections_info(conf) exit(0) - elif conf.args['cmd'] == 'jesections': + elif conf.args["cmd"] == "jesections": jelly_emby.get_library_paths(conf) exit(0) - elif conf.args['cmd'] == 'update_config': + elif conf.args["cmd"] == "update_config": exit(0) - elif conf.args['cmd'] == 'authorize': - if not conf.configs['GOOGLE']['ENABLED']: + elif conf.args["cmd"] == "authorize": + if not conf.configs["GOOGLE"]["ENABLED"]: logger.error("You must enable the GOOGLE section in config.") exit(1) else: - logger.debug("client_id: %r", conf.configs['GOOGLE']['CLIENT_ID']) - logger.debug("client_secret: %r", conf.configs['GOOGLE']['CLIENT_SECRET']) + logger.debug("client_id: %r", conf.configs["GOOGLE"]["CLIENT_ID"]) + logger.debug("client_secret: %r", conf.configs["GOOGLE"]["CLIENT_SECRET"]) google_drive = drive.GoogleDrive( - conf.configs['GOOGLE']['CLIENT_ID'], - conf.configs['GOOGLE']['CLIENT_SECRET'], - conf.settings['cachefile'], - allowed_config=conf.configs['GOOGLE']['ALLOWED'], + conf.configs["GOOGLE"]["CLIENT_ID"], + conf.configs["GOOGLE"]["CLIENT_SECRET"], + conf.settings["cachefile"], + allowed_config=conf.configs["GOOGLE"]["ALLOWED"], ) # Provide authorization link @@ -614,38 +715,48 @@ def client_pushed(): # Exchange authorization code token = google_drive.exchange_code(auth_code) - if not token or 'access_token' not in token: - logger.error("Failed exchanging authorization code for an Access Token.") + if not token or "access_token" not in token: + logger.error( + "Failed exchanging authorization code for an Access Token." + ) sys.exit(1) else: - logger.info("Exchanged authorization code for an Access Token:\n\n%s\n", json.dumps(token, indent=2)) + logger.info( + "Exchanged authorization code for an Access Token:\n\n%s\n", + json.dumps(token, indent=2), + ) sys.exit(0) - elif conf.args['cmd'] == 'server': - if conf.configs['SERVER_USE_SQLITE']: + elif conf.args["cmd"] == "server": + if conf.configs["SERVER_USE_SQLITE"]: start_queue_reloader() - if conf.configs['GOOGLE']['ENABLED']: + if conf.configs["GOOGLE"]["ENABLED"]: start_google_monitor() logger.info( "Starting server: http://%s:%d/%s", - conf.configs['SERVER_IP'], - conf.configs['SERVER_PORT'], - conf.configs['SERVER_PASS'], + conf.configs["SERVER_IP"], + conf.configs["SERVER_PORT"], + conf.configs["SERVER_PASS"], + ) + app.run( + host=conf.configs["SERVER_IP"], + port=conf.configs["SERVER_PORT"], + debug=False, + use_reloader=False, ) - app.run(host=conf.configs['SERVER_IP'], port=conf.configs['SERVER_PORT'], debug=False, use_reloader=False) logger.info("Server stopped") exit(0) - elif conf.args['cmd'] == 'build_caches': + elif conf.args["cmd"] == "build_caches": logger.info("Building caches") # load google drive manager manager = drive.GoogleDriveManager( - conf.configs['GOOGLE']['CLIENT_ID'], - conf.configs['GOOGLE']['CLIENT_SECRET'], - conf.settings['cachefile'], - allowed_config=conf.configs['GOOGLE']['ALLOWED'], - allowed_teamdrives=conf.configs['GOOGLE']['TEAMDRIVES'], + conf.configs["GOOGLE"]["CLIENT_ID"], + conf.configs["GOOGLE"]["CLIENT_SECRET"], + conf.settings["cachefile"], + allowed_config=conf.configs["GOOGLE"]["ALLOWED"], + allowed_teamdrives=conf.configs["GOOGLE"]["TEAMDRIVES"], ) if not manager.is_authorized(): @@ -655,7 +766,7 @@ def client_pushed(): logger.info("Google Drive Access Token was successfully validated.") # load teamdrives (if enabled) - if conf.configs['GOOGLE']['TEAMDRIVE'] and not manager.load_teamdrives(): + if conf.configs["GOOGLE"]["TEAMDRIVE"] and not manager.load_teamdrives(): logger.error("Failed to load Google Teamdrives.") exit(1) diff --git a/utils.py b/utils.py index 8dc906f..24e5e49 100644 --- a/utils.py +++ b/utils.py @@ -17,13 +17,18 @@ def get_plex_section(config, path): try: - with sqlite3.connect(config['PLEX_DATABASE_PATH']) as conn: + with sqlite3.connect(config["PLEX_DATABASE_PATH"]) as conn: conn.row_factory = sqlite3.Row conn.text_factory = str with closing(conn.cursor()) as c: # check if file exists in plex - logger.debug("Checking if root folder path '%s' matches Plex Library root path in the Plex DB.", path) - section_data = c.execute("SELECT library_section_id,root_path FROM section_locations").fetchall() + logger.debug( + "Checking if root folder path '%s' matches Plex Library root path in the Plex DB.", + path, + ) + section_data = c.execute( + "SELECT library_section_id,root_path FROM section_locations" + ).fetchall() for section_id, root_path in section_data: if path.startswith(root_path + os.sep): logger.debug( @@ -35,26 +40,30 @@ def get_plex_section(config, path): logger.error("Unable to map '%s' to a Section ID.", path) except Exception: - logger.exception("Exception while trying to map '%s' to a Section ID in the Plex DB: ", path) + logger.exception( + "Exception while trying to map '%s' to a Section ID in the Plex DB: ", path + ) return -1 def ensure_valid_os_path_sep(path): try: - if path.startswith('/'): + if path.startswith("/"): # replace \ with / - return path.replace('\\', '/') - elif '\\' in path: + return path.replace("\\", "/") + elif "\\" in path: # replace / with \ - return path.replace('/', '\\') + return path.replace("/", "\\") except Exception: - logger.exception("Exception while trying to ensure valid os path seperator for: '%s'", path) + logger.exception( + "Exception while trying to ensure valid os path seperator for: '%s'", path + ) return path def map_pushed_path(config, path): - for mapped_path, mappings in config['SERVER_PATH_MAPPINGS'].items(): + for mapped_path, mappings in config["SERVER_PATH_MAPPINGS"].items(): for mapping in mappings: if path.startswith(mapping): logger.debug("Mapping server path '%s' to '%s'.", mapping, mapped_path) @@ -63,20 +72,26 @@ def map_pushed_path(config, path): def map_pushed_path_file_exists(config, path): - for mapped_path, mappings in config['SERVER_FILE_EXIST_PATH_MAPPINGS'].items(): + for mapped_path, mappings in config["SERVER_FILE_EXIST_PATH_MAPPINGS"].items(): for mapping in mappings: if path.startswith(mapping): - logger.debug("Mapping file check path '%s' to '%s'.", mapping, mapped_path) + logger.debug( + "Mapping file check path '%s' to '%s'.", mapping, mapped_path + ) return ensure_valid_os_path_sep(path.replace(mapping, mapped_path)) return path # For Rclone dir cache clear request def map_file_exists_path_for_rclone(config, path): - for mapped_path, mappings in config['RCLONE']['RC_CACHE_REFRESH']['FILE_EXISTS_TO_REMOTE_MAPPINGS'].items(): + for mapped_path, mappings in config["RCLONE"]["RC_CACHE_REFRESH"][ + "FILE_EXISTS_TO_REMOTE_MAPPINGS" + ].items(): for mapping in mappings: if path.startswith(mapping): - logger.debug("Mapping Rclone file check path '%s' to '%s'.", mapping, mapped_path) + logger.debug( + "Mapping Rclone file check path '%s' to '%s'.", mapping, mapped_path + ) return path.replace(mapping, mapped_path) return path @@ -90,15 +105,17 @@ def is_process_running(process_name, plex_container=None): # plex_container was not None # we need to check if this processes is from the container we are interested in get_pid_container = ( - "docker inspect --format '{{.Name}}' \"$(cat /proc/%s/cgroup |head -n 1 " - "|cut -d / -f 3)\" | sed 's/^\///'" % process.pid + r"docker inspect --format '{{.Name}}' \"$(cat /proc/%s/cgroup |head -n 1 " + r"|cut -d / -f 3)\" | sed 's/^\///'" % process.pid ) process_container = run_command(get_pid_container, True) logger.debug("Using: %s", get_pid_container) logger.debug( "Docker Container For PID %s: %r", process.pid, - process_container.strip() if process_container is not None else 'Unknown???', + process_container.strip() + if process_container is not None + else "Unknown???", ) if ( process_container is not None @@ -118,19 +135,23 @@ def is_process_running(process_name, plex_container=None): def wait_running_process(process_name, use_docker=False, plex_container=None): try: running, process, container = is_process_running( - process_name, None if not use_docker or not plex_container else plex_container + process_name, + None if not use_docker or not plex_container else plex_container, ) while running and process: logger.info( "'%s' is running, pid: %d,%s cmdline: %r. Checking again in 60 seconds...", process.name(), process.pid, - ' container: %s,' % container.strip() if use_docker and isinstance(container, str) else '', + " container: %s," % container.strip() + if use_docker and isinstance(container, str) + else "", process.cmdline(), ) time.sleep(60) running, process, container = is_process_running( - process_name, None if not use_docker or not plex_container else plex_container + process_name, + None if not use_docker or not plex_container else plex_container, ) return True @@ -142,10 +163,12 @@ def wait_running_process(process_name, use_docker=False, plex_container=None): def run_command(command, get_output=False): - total_output = '' - process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + total_output = "" + process = subprocess.Popen( + command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) while True: - output = str(process.stdout.readline()).lstrip('b').replace('\\n', '').strip() + output = str(process.stdout.readline()).lstrip("b").replace("\\n", "").strip() if output and len(output) >= 3: if not get_output: if len(output) >= 8: @@ -161,7 +184,7 @@ def run_command(command, get_output=False): def should_ignore(file_path, config): - for item in config['SERVER_IGNORE_LIST']: + for item in config["SERVER_IGNORE_LIST"]: if item.lower() in file_path.lower(): return True, item @@ -176,10 +199,12 @@ def remove_item_from_list(item, from_list): def get_priority(config, scan_path): try: - for priority, paths in config['SERVER_SCAN_PRIORITIES'].items(): + for priority, paths in config["SERVER_SCAN_PRIORITIES"].items(): for path in paths: if path.lower() in scan_path.lower(): - logger.debug("Using priority '%d' for path '%s'", int(priority), scan_path) + logger.debug( + "Using priority '%d' for path '%s'", int(priority), scan_path + ) return int(priority) logger.debug("Using default priority '0' for path '%s'", scan_path) except Exception: @@ -189,10 +214,16 @@ def get_priority(config, scan_path): def rclone_rc_clear_cache(config, scan_path): try: - rclone_rc_expire_url = urljoin(config['RCLONE']['RC_CACHE_REFRESH']['RC_URL'], 'cache/expire') - rclone_rc_refresh_url = urljoin(config['RCLONE']['RC_CACHE_REFRESH']['RC_URL'], 'vfs/refresh') + rclone_rc_expire_url = urljoin( + config["RCLONE"]["RC_CACHE_REFRESH"]["RC_URL"], "cache/expire" + ) + rclone_rc_refresh_url = urljoin( + config["RCLONE"]["RC_CACHE_REFRESH"]["RC_URL"], "vfs/refresh" + ) - cache_clear_path = map_file_exists_path_for_rclone(config, scan_path).lstrip(os.path.sep) + cache_clear_path = map_file_exists_path_for_rclone(config, scan_path).lstrip( + os.path.sep + ) logger.debug("Top level cache_clear_path: '%s'", cache_clear_path) while True: @@ -210,36 +241,51 @@ def rclone_rc_clear_cache(config, scan_path): last_clear_path = cache_clear_path # send Rclone mount dir cache clear request - logger.info("Sending Rclone mount dir cache clear request for: '%s'", cache_clear_path) + logger.info( + "Sending Rclone mount dir cache clear request for: '%s'", + cache_clear_path, + ) try: # try cache clear - resp = requests.post(rclone_rc_expire_url, json={'remote': cache_clear_path}, timeout=120) - if '{' in resp.text and '}' in resp.text: + resp = requests.post( + rclone_rc_expire_url, json={"remote": cache_clear_path}, timeout=120 + ) + if "{" in resp.text and "}" in resp.text: data = resp.json() - if 'error' in data: + if "error" in data: # try to vfs/refresh as fallback - resp = requests.post(rclone_rc_refresh_url, json={'dir': cache_clear_path}, timeout=120) - if '{' in resp.text and '}' in resp.text: + resp = requests.post( + rclone_rc_refresh_url, + json={"dir": cache_clear_path}, + timeout=120, + ) + if "{" in resp.text and "}" in resp.text: data = resp.json() if ( - 'result' in data - and cache_clear_path in data['result'] - and data['result'][cache_clear_path] == 'OK' + "result" in data + and cache_clear_path in data["result"] + and data["result"][cache_clear_path] == "OK" ): # successfully vfs refreshed logger.info( - "Successfully refreshed Rclone VFS mount's dir cache for '%s'", cache_clear_path + "Successfully refreshed Rclone VFS mount's dir cache for '%s'", + cache_clear_path, ) return True logger.info( "Failed to clear Rclone mount's dir cache for '%s': %s", cache_clear_path, - data['error'] if 'error' in data else data, + data["error"] if "error" in data else data, ) continue - elif ('status' in data and 'message' in data) and data['status'] == 'ok': - logger.info("Successfully cleared Rclone Cache mount's dir cache for '%s'", cache_clear_path) + elif ("status" in data and "message" in data) and data[ + "status" + ] == "ok": + logger.info( + "Successfully cleared Rclone Cache mount's dir cache for '%s'", + cache_clear_path, + ) return True # abort on unexpected response (no json response, no error/status & message in returned json @@ -260,7 +306,9 @@ def rclone_rc_clear_cache(config, scan_path): break except Exception: - logger.exception("Exception clearing Rclone mount dir cache for '%s': ", scan_path) + logger.exception( + "Exception clearing Rclone mount dir cache for '%s': ", scan_path + ) return False @@ -268,7 +316,7 @@ def load_json(file_path): if os.path.sep not in file_path: file_path = os.path.join(os.path.dirname(sys.argv[0]), file_path) - with open(file_path, 'r') as fp: + with open(file_path, "r") as fp: return json.load(fp) @@ -276,7 +324,7 @@ def dump_json(file_path, obj, processing=True): if os.path.sep not in file_path: file_path = os.path.join(os.path.dirname(sys.argv[0]), file_path) - with open(file_path, 'w') as fp: + with open(file_path, "w") as fp: if processing: json.dump(obj, fp, indent=2, sort_keys=True) else: @@ -286,7 +334,7 @@ def dump_json(file_path, obj, processing=True): def remove_files_exist_in_plex_database(config, file_paths): removed_items = 0 - plex_db_path = config['PLEX_DATABASE_PATH'] + plex_db_path = config["PLEX_DATABASE_PATH"] try: if plex_db_path and os.path.exists(plex_db_path): with sqlite3.connect(plex_db_path) as conn: @@ -302,20 +350,26 @@ def remove_files_exist_in_plex_database(config, file_paths): plex_db_path, ) found_item = c.execute( - "SELECT size FROM media_parts WHERE file LIKE ?", ('%' + file_path_plex,) + "SELECT size FROM media_parts WHERE file LIKE ?", + ("%" + file_path_plex,), ).fetchone() - file_path_actual = map_pushed_path_file_exists(config, file_path_plex) + file_path_actual = map_pushed_path_file_exists( + config, file_path_plex + ) # should plex file size and file size on disk be checked? disk_file_size_check = True if ( - 'DISABLE_DISK_FILE_SIZE_CHECK' in config['GOOGLE'] - and config['GOOGLE']['DISABLE_DISK_FILE_SIZE_CHECK'] + "DISABLE_DISK_FILE_SIZE_CHECK" in config["GOOGLE"] + and config["GOOGLE"]["DISABLE_DISK_FILE_SIZE_CHECK"] ): disk_file_size_check = False if found_item: - logger.debug("'%s' was found in the Plex DB media_parts table.", file_name) + logger.debug( + "'%s' was found in the Plex DB media_parts table.", + file_name, + ) skip_file = False if not disk_file_size_check: skip_file = True @@ -328,11 +382,16 @@ def remove_files_exist_in_plex_database(config, file_paths): found_item[0], ) if file_size == found_item[0]: - logger.debug("'%s' size matches size found in the Plex DB.", file_size) + logger.debug( + "'%s' size matches size found in the Plex DB.", + file_size, + ) skip_file = True if skip_file: - logger.debug("Removing path from scan queue: '%s'", file_path) + logger.debug( + "Removing path from scan queue: '%s'", file_path + ) file_paths.remove(file_path) removed_items += 1