diff --git a/SickBeard.py b/SickBeard.py index fd02d430d7599f1ae8a15611e74f247404581661..608e4ec9a7a59498ddc235d8bcbc0314f8b33d5a 100755 --- a/SickBeard.py +++ b/SickBeard.py @@ -457,7 +457,7 @@ class SickRage(object): Populates the showList with shows from the database """ - logger.log(u"Loading initial show list") + logger.log(u"Loading initial show list", logger.DEBUG) myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_shows") diff --git a/gui/slick/interfaces/default/home.tmpl b/gui/slick/interfaces/default/home.tmpl index ae4a05fc51a3fabd37db38f6da2e1ce204897a79..840b37cb23c48e4a1882d7bd5671df8ba907d32c 100644 --- a/gui/slick/interfaces/default/home.tmpl +++ b/gui/slick/interfaces/default/home.tmpl @@ -30,7 +30,7 @@ #set $sql_statement += ' OR (status IN ' + status_quality + ') OR (status IN ' + status_download + '))) AS ep_total, ' #set $sql_statement += ' (SELECT airdate FROM tv_episodes WHERE showid=tv_eps.showid AND airdate >= ' + $today + ' AND (status = ' + str($UNAIRED) + ' OR status = ' + str($WANTED) + ') ORDER BY airdate ASC LIMIT 1) AS ep_airs_next, ' -#set $sql_statement += ' (SELECT airdate FROM tv_episodes WHERE showid=tv_eps.showid AND airdate <> 1 AND status <> ' + str($UNAIRED) + ' ORDER BY airdate DESC LIMIT 1) AS ep_airs_prev ' +#set $sql_statement += ' (SELECT airdate FROM tv_episodes WHERE showid=tv_eps.showid AND airdate > 1 AND status <> ' + str($UNAIRED) + ' ORDER BY airdate DESC LIMIT 1) AS ep_airs_prev ' #set $sql_statement += ' FROM tv_episodes tv_eps GROUP BY showid' #set $sql_result = $myDB.select($sql_statement) diff --git a/gui/slick/interfaces/default/inc_top.tmpl b/gui/slick/interfaces/default/inc_top.tmpl index f46ad2d63d7d2eb66fd1eacb81262034c3cb4cef..eaccca7a581eabdcbb1530a139772352d43f6f8e 100644 --- a/gui/slick/interfaces/default/inc_top.tmpl +++ b/gui/slick/interfaces/default/inc_top.tmpl @@ -155,13 +155,8 @@ #if $sbLogin: <div class="collapse navbar-collapse" id="bs-example-navbar-collapse-1"> <ul class="nav navbar-nav navbar-right"> - <li id="NAVhome" class="dropdown"> - <a href="$sbRoot/home/" class="dropdown-toggle" data-toggle="dropdown">Shows <b class="caret"></b></a> - <ul class="dropdown-menu"> - <li><a href="$sbRoot/home/"><i class="menu-icon-home"></i> Show List</a></li> - <li><a href="$sbRoot/home/addShows/"><i class="menu-icon-addshow"></i> Add Shows</a></li> - <li><a href="$sbRoot/home/postprocess/"><i class="menu-icon-postprocess"></i> Manual Post-Processing</a></li> - </ul> + <li id="NAVhome"> + <a href="$sbRoot/home/">Shows</a> </li> <li id="NAVcomingEpisodes"> diff --git a/sickbeard/databases/cache_db.py b/sickbeard/databases/cache_db.py index e8a4b4415658b0bc4fd70e84e9faec2459030a1c..8cfe5a4167c00eea6387823db1a1188d28b0e3b9 100644 --- a/sickbeard/databases/cache_db.py +++ b/sickbeard/databases/cache_db.py @@ -21,15 +21,19 @@ from sickbeard import db # Add new migrations at the bottom of the list; subclass the previous migration. class InitialSchema(db.SchemaUpgrade): def test(self): - return self.hasTable("lastUpdate") + return self.hasTable("db_version") def execute(self): queries = [ + ("CREATE TABLE db_version (db_version INTEGER);",), ("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);",), ("CREATE TABLE lastSearch (provider TEXT, time NUMERIC);",), - ("CREATE TABLE db_version (db_version INTEGER);",), - ("INSERT INTO db_version (db_version) VALUES (?)", 1), + ("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER KEY, show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);",), + ("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);",), + ("CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT);",), + ("CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER);",), + ("INSERT INTO db_version(db_version) VALUES (1);",), ] for query in queries: if len(query) == 1: @@ -88,4 +92,4 @@ class AddSceneExceptionsRefresh(AddSceneExceptionsCustom): def execute(self): self.connection.action( - "CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)") \ No newline at end of file + "CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)") diff --git a/sickbeard/databases/failed_db.py b/sickbeard/databases/failed_db.py index c0d07b98c8daf221dafcdeb51be621195532f9e3..94091116c1f3795ec990114569ea86573a6d3c96 100644 --- a/sickbeard/databases/failed_db.py +++ b/sickbeard/databases/failed_db.py @@ -23,13 +23,14 @@ from sickbeard.common import Quality # Add new migrations at the bottom of the list; subclass the previous migration. class InitialSchema(db.SchemaUpgrade): def test(self): - return self.hasTable('failed') + return self.hasTable('db_version') def execute(self): queries = [ - ('CREATE TABLE failed (release TEXT);',), + ('CREATE TABLE failed (release TEXT, size NUMERIC, provider TEXT);',), + ('CREATE TABLE history (date NUMERIC, size NUMERIC, release TEXT, provider TEXT, old_status NUMERIC DEFAULT (?), showid NUMERIC DEFAULT -1, season NUMERIC DEFAULT -1, episode NUMERIC DEFAULT -1);', Quality.NONE), ('CREATE TABLE db_version (db_version INTEGER);',), - ('INSERT INTO db_version (db_version) VALUES (?)', 1), + ('INSERT INTO db_version (db_version) VALUES (1);',), ] for query in queries: if len(query) == 1: @@ -43,7 +44,7 @@ class SizeAndProvider(InitialSchema): return self.hasColumn('failed', 'size') and self.hasColumn('failed', 'provider') def execute(self): - self.addColumn('failed', 'size') + self.addColumn('failed', 'size', 'NUMERIC') self.addColumn('failed', 'provider', 'TEXT', '') diff --git a/sickbeard/databases/mainDB.py b/sickbeard/databases/mainDB.py index 604066eacf5595e08367c0a86695aff8d1187640..2c9aefbc7ae3a992cb5c410ce8ee803cac8fbccd 100644 --- a/sickbeard/databases/mainDB.py +++ b/sickbeard/databases/mainDB.py @@ -38,6 +38,7 @@ class MainSanityCheck(db.DBSanityCheck): self.fix_unaired_episodes() self.fix_tvrage_show_statues() self.fix_episode_statuses() + self.fix_invalid_airdates() def fix_duplicate_shows(self, column='indexer_id'): @@ -61,7 +62,7 @@ class MainSanityCheck(db.DBSanityCheck): self.connection.action("DELETE FROM tv_shows WHERE show_id = ?", [cur_dupe_id["show_id"]]) else: - logger.log(u"No duplicate show, check passed") + logger.log(u"No duplicate show, check passed", logger.DEBUG) def fix_duplicate_episodes(self): @@ -85,7 +86,7 @@ class MainSanityCheck(db.DBSanityCheck): self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_dupe_id["episode_id"]]) else: - logger.log(u"No duplicate episode, check passed") + logger.log(u"No duplicate episode, check passed", logger.DEBUG) def fix_orphan_episodes(self): @@ -99,7 +100,7 @@ class MainSanityCheck(db.DBSanityCheck): self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_orphan["episode_id"]]) else: - logger.log(u"No orphan episodes, check passed") + logger.log(u"No orphan episodes, check passed", logger.DEBUG) def fix_missing_table_indexes(self): if not self.connection.select("PRAGMA index_info('idx_indexer_id')"): @@ -142,7 +143,7 @@ class MainSanityCheck(db.DBSanityCheck): [common.UNAIRED, cur_unaired["episode_id"]]) else: - logger.log(u"No UNAIRED episodes, check passed") + logger.log(u"No UNAIRED episodes, check passed", logger.DEBUG) def fix_tvrage_show_statues(self): status_map = { @@ -174,8 +175,22 @@ class MainSanityCheck(db.DBSanityCheck): self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [common.UNKNOWN, cur_ep["episode_id"]]) else: - logger.log(u"No MALFORMED episode statuses, check passed") + logger.log(u"No MALFORMED episode statuses, check passed", logger.DEBUG) + def fix_invalid_airdates(self): + + sqlResults = self.connection.select( + "SELECT episode_id, showid FROM tv_episodes WHERE airdate >= ? OR airdate < 1", + [datetime.date.max.toordinal()]) + + for bad_airdate in sqlResults: + logger.log(u"Bad episode airdate detected! episode_id: " + str(bad_airdate["episode_id"]) + " showid: " + str( + bad_airdate["showid"]), logger.DEBUG) + logger.log(u"Fixing bad episode airdate for episode_id: " + str(bad_airdate["episode_id"])) + self.connection.action("UPDATE tv_episodes SET airdate = '1' WHERE episode_id = ?", [bad_airdate["episode_id"]]) + + else: + logger.log(u"No bad episode airdates, check passed", logger.DEBUG) def backupDatabase(version): logger.log(u"Backing up database before upgrade") diff --git a/sickbeard/db.py b/sickbeard/db.py index 821b11d9e9e1ef6f5c5c8c6de656ab15de664270..79162cdfadfadf5b5ca6e970ec7014d575c635d8 100644 --- a/sickbeard/db.py +++ b/sickbeard/db.py @@ -266,7 +266,7 @@ class DBSanityCheck(object): # =============== def upgradeDatabase(connection, schema): - logger.log(u"Checking database structure...", logger.INFO) + logger.log(u"Checking database structure..." + connection.filename, logger.DEBUG) _processUpgrade(connection, schema) @@ -287,7 +287,7 @@ def _processUpgrade(connection, upgradeClass): instance = upgradeClass(connection) logger.log(u"Checking " + prettyName(upgradeClass.__name__) + " database upgrade", logger.DEBUG) if not instance.test(): - logger.log(u"Database upgrade required: " + prettyName(upgradeClass.__name__), logger.INFO) + logger.log(u"Database upgrade required: " + prettyName(upgradeClass.__name__), logger.DEBUG) try: instance.execute() except sqlite3.DatabaseError, e: diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 72dbd68a6ff438998d335803bb1fa5dc80bbb5b1..ef313702db25493dacca2fea805e621f32a32c88 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -1388,7 +1388,7 @@ def clearCache(force=False): # clean out cache directory, remove everything > 12 hours old if sickbeard.CACHE_DIR: - logger.log(u"Trying to clean cache folder " + sickbeard.CACHE_DIR) + logger.log(u"Trying to clean cache folder " + sickbeard.CACHE_DIR, logger.DEBUG) # Does our cache_dir exists if not ek.ek(os.path.isdir, sickbeard.CACHE_DIR): diff --git a/sickbeard/image_cache.py b/sickbeard/image_cache.py index 54494d3b2fa8afbb1195865d014ed751759193a9..bf664b3fc2ab3951d2ecb652835ccf7c3ab6ee33 100644 --- a/sickbeard/image_cache.py +++ b/sickbeard/image_cache.py @@ -282,7 +282,7 @@ class ImageCache: if not need_images[self.POSTER] and not need_images[self.BANNER] and not need_images[self.POSTER_THUMB] and not \ need_images[self.BANNER_THUMB] and not need_images[self.FANART]: - logger.log(u"No new cache images needed, not retrieving new ones") + logger.log(u"No new cache images needed, not retrieving new ones", logger.DEBUG) return # check the show dir for poster or banner images and use them diff --git a/sickbeard/notifiers/kodi.py b/sickbeard/notifiers/kodi.py index 3557b7ba725e63bafaf3dcf83963dc961e1d9220..4f9e55c90e458b145ef997031564ea6563e4173d 100644 --- a/sickbeard/notifiers/kodi.py +++ b/sickbeard/notifiers/kodi.py @@ -127,7 +127,7 @@ class KODINotifier: result = '' for curHost in [x.strip() for x in host.split(",")]: - logger.log(u"Sending KODI notification to '" + curHost + "' - " + message, logger.INFO) + logger.log(u"Sending KODI notification to '" + curHost + "' - " + message, logger.DEBUG) kodiapi = self._get_kodi_version(curHost, username, password) if kodiapi: @@ -169,27 +169,27 @@ class KODINotifier: """ - logger.log(u"Sending request to update library for KODI host: '" + host + "'", logger.INFO) + logger.log(u"Sending request to update library for KODI host: '" + host + "'", logger.DEBUG) kodiapi = self._get_kodi_version(host, sickbeard.KODI_USERNAME, sickbeard.KODI_PASSWORD) if kodiapi: if (kodiapi <= 4): # try to update for just the show, if it fails, do full update if enabled if not self._update_library(host, showName) and sickbeard.KODI_UPDATE_FULL: - logger.log(u"Single show update failed, falling back to full update", logger.WARNING) + logger.log(u"Single show update failed, falling back to full update", logger.DEBUG) return self._update_library(host) else: return True else: # try to update for just the show, if it fails, do full update if enabled if not self._update_library_json(host, showName) and sickbeard.KODI_UPDATE_FULL: - logger.log(u"Single show update failed, falling back to full update", logger.WARNING) + logger.log(u"Single show update failed, falling back to full update", logger.DEBUG) return self._update_library_json(host) else: return True else: logger.log(u"Failed to detect KODI version for '" + host + "', check configuration and try again.", - logger.DEBUG) + logger.WARNING) return False return False @@ -219,7 +219,7 @@ class KODINotifier: password = sickbeard.KODI_PASSWORD if not host: - logger.log(u'No KODI host passed, aborting update', logger.DEBUG) + logger.log(u'No KODI host passed, aborting update', logger.WARNING) return False for key in command: @@ -269,7 +269,7 @@ class KODINotifier: """ if not host: - logger.log(u'No KODI host passed, aborting update', logger.DEBUG) + logger.log(u'No KODI host passed, aborting update', logger.WARNING) return False logger.log(u"Updating KODI library via HTTP method for host: " + host, logger.DEBUG) @@ -330,7 +330,7 @@ class KODINotifier: time.sleep(5) # do a full update if requested else: - logger.log(u"Doing Full Library KODI update on host: " + host, logger.INFO) + logger.log(u"Doing Full Library KODI update on host: " + host, logger.DEBUG) updateCommand = {'command': 'ExecBuiltIn', 'parameter': 'KODI.updatelibrary(video)'} request = self._send_to_kodi(updateCommand, host) @@ -365,7 +365,7 @@ class KODINotifier: password = sickbeard.KODI_PASSWORD if not host: - logger.log(u'No KODI host passed, aborting update', logger.DEBUG) + logger.log(u'No KODI host passed, aborting update', logger.WARNING) return False command = command.encode('utf-8') @@ -422,10 +422,10 @@ class KODINotifier: """ if not host: - logger.log(u'No KODI host passed, aborting update', logger.DEBUG) + logger.log(u'No KODI host passed, aborting update', logger.WARNING) return False - logger.log(u"Updating KODI library via JSON method for host: " + host, logger.INFO) + logger.log(u"Updating KODI library via JSON method for host: " + host, logger.DEBUG) # if we're doing per-show if showName: @@ -488,7 +488,7 @@ class KODINotifier: # do a full update if requested else: - logger.log(u"Doing Full Library KODI update on host: " + host, logger.INFO) + logger.log(u"Doing Full Library KODI update on host: " + host, logger.DEBUG) updateCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","id":1}' request = self._send_to_kodi_json(updateCommand, host, sickbeard.KODI_USERNAME, sickbeard.KODI_PASSWORD) diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py index a45a9b7184ac0e284e766ae5aff9b857a6a8153a..83c6119b0dad905eb0cfdda171cc3bd1101e88a5 100644 --- a/sickbeard/postProcessor.py +++ b/sickbeard/postProcessor.py @@ -497,13 +497,13 @@ class PostProcessor(object): if none were found. """ - logger.log(u"Analyzing name " + repr(name)) - to_return = (None, None, [], None, None) if not name: return to_return + logger.log(u"Analyzing name " + repr(name), logger.DEBUG) + name = helpers.remove_non_release_groups(helpers.remove_extension(name)) # parse the name to break it into show name, season, and episode diff --git a/sickbeard/processTV.py b/sickbeard/processTV.py index 9712dc90cdbdb18736c2b7b7d8a42f462dc702df..5fb4192903abda28e527cd623699c93cc04cb008 100644 --- a/sickbeard/processTV.py +++ b/sickbeard/processTV.py @@ -164,7 +164,7 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior nzbNameOriginal = nzbName if not postpone: - result.output += logHelper(u"PostProcessing Path: " + path, logger.DEBUG) + result.output += logHelper(u"PostProcessing Path: " + path, logger.INFO) result.output += logHelper(u"PostProcessing Dirs: " + str(dirs), logger.DEBUG) rarFiles = filter(helpers.isRarFile, files) @@ -233,7 +233,7 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior videoInRar = filter(helpers.isMediaFile, rarContent) notwantedFiles = [x for x in fileList if x not in videoFiles] if notwantedFiles: - result.output += logHelper(u"Found unwanted files: " + str(notwantedFiles), logger.INFO) + result.output += logHelper(u"Found unwanted files: " + str(notwantedFiles), logger.DEBUG) #Don't Link media when the media is extracted from a rar in the same path if process_method in ('hardlink', 'symlink') and videoInRar: @@ -267,7 +267,7 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior result.missedfiles.append(processPath + " : Syncfiles found") if result.aggresult: - result.output += logHelper(u"Successfully processed") + result.output += logHelper(u"Processing completed") if result.missedfiles: result.output += logHelper(u"I did encounter some unprocessable items: ") for missedfile in result.missedfiles: @@ -316,9 +316,8 @@ def validateDir(path, dirName, nzbNameOriginal, failed, result): ek.ek(os.path.realpath, sqlShow["location"]).lower() + os.sep) or dirName.lower() == ek.ek( os.path.realpath, sqlShow["location"]).lower(): result.output += logHelper( - u"You're trying to post process an episode that's already been moved to its show dir, skipping", + u"Cannot process an episode that's already been moved to its show dir, skipping " + dirName, logger.ERROR) - result.missedfiles.append(dirName + " : Already processed") return False # Get the videofile list for the next checks @@ -357,7 +356,7 @@ def validateDir(path, dirName, nzbNameOriginal, failed, result): except (InvalidNameException, InvalidShowException): pass - result.missedfiles.append(dirName + " : No processable items found in folder") + result.output += logHelper(dirName + " : No processable items found in folder", logger.DEBUG) return False def unRAR(path, rarFiles, force, result): @@ -383,7 +382,6 @@ def unRAR(path, rarFiles, force, result): u"Archive file already post-processed, extraction skipped: " + file_in_archive, logger.DEBUG) skip_file = True - result.missedfiles.append(archive + " : RAR already processed") break if skip_file: @@ -492,13 +490,12 @@ def process_media(processPath, videoFiles, nzbName, process_method, force, is_pr processor = None for cur_video_file in videoFiles: + cur_video_file_path = ek.ek(os.path.join, processPath, cur_video_file) if already_postprocessed(processPath, cur_video_file, force, result): - result.missedfiles.append(ek.ek(os.path.join, processPath, cur_video_file) + " : Already processed") + result.output += logHelper(u"Already Processed " + cur_video_file_path + " : Skipping", logger.DEBUG) continue - cur_video_file_path = ek.ek(os.path.join, processPath, cur_video_file) - try: processor = postProcessor.PostProcessor(cur_video_file_path, nzbName, process_method, is_priority) result.result = processor.process() diff --git a/sickbeard/scene_exceptions.py b/sickbeard/scene_exceptions.py index 169a26f3c4894436f57b5e7223d03be53dabf3db..61be00e33fadd8190516e9d5f09325d7ffed44d3 100644 --- a/sickbeard/scene_exceptions.py +++ b/sickbeard/scene_exceptions.py @@ -239,9 +239,9 @@ def retrieve_exceptions(): # since this could invalidate the results of the cache we clear it out after updating if changed_exceptions: - logger.log(u"Updated scene exceptions") + logger.log(u"Updated scene exceptions", logger.DEBUG) else: - logger.log(u"No scene exceptions update needed") + logger.log(u"No scene exceptions update needed", logger.DEBUG) # cleanup exception_dict.clear() diff --git a/sickbeard/search.py b/sickbeard/search.py index ff63be776b7fc79dd4176773af69d44ab70da238..b3e572ebea1932268a6e336118fae35ec0cc24c7 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -329,7 +329,7 @@ def wantedEpisodes(show, fromDate): anyQualities, bestQualities = common.Quality.splitQuality(show.quality) # @UnusedVariable allQualities = list(set(anyQualities + bestQualities)) - logger.log(u"Seeing if we need anything from " + show.name) + logger.log(u"Seeing if we need anything from " + show.name, logger.DEBUG) myDB = db.DBConnection() if show.air_by_date: @@ -496,10 +496,10 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False): break if search_mode == 'sponly': - logger.log(u"FALLBACK EPISODE SEARCH INITIATED ...") + logger.log(u"FALLBACK EPISODE SEARCH INITIATED ...", logger.DEBUG) search_mode = 'eponly' else: - logger.log(u"FALLBACK SEASON PACK SEARCH INITIATED ...") + logger.log(u"FALLBACK SEASON PACK SEARCH INITIATED ...", logger.DEBUG) search_mode = 'sponly' # skip to next provider if we have no results to process diff --git a/sickbeard/searchBacklog.py b/sickbeard/searchBacklog.py index 582709640e3e9b5508f25ed61d44074c6733920a..fc50b66bfc517327f096a2d5306ed3f99a1ba023 100644 --- a/sickbeard/searchBacklog.py +++ b/sickbeard/searchBacklog.py @@ -137,7 +137,7 @@ class BacklogSearcher: def _get_segments(self, show, fromDate): anyQualities, bestQualities = common.Quality.splitQuality(show.quality) # @UnusedVariable - logger.log(u"Seeing if we need anything from {show_name}".format(show_name=show.name)) + logger.log(u"Seeing if we need anything from {show_name}".format(show_name=show.name), logger.DEBUG) myDB = db.DBConnection() if show.air_by_date: diff --git a/sickbeard/show_name_helpers.py b/sickbeard/show_name_helpers.py index 9f3a9ea8bfd85f6341abec4d4f8726e9ed1d0bb5..c37f37533ee98d80b8e29e34fa0b8800dd586cd6 100644 --- a/sickbeard/show_name_helpers.py +++ b/sickbeard/show_name_helpers.py @@ -359,7 +359,7 @@ def determineReleaseName(dir_name=None, nzb_name=None): # NOTE: Multiple failed downloads will change the folder name. # (e.g., appending #s) # Should we handle that? - logger.log(u"Folder name (" + folder + ") appears to be a valid release name. Using it.") + logger.log(u"Folder name (" + folder + ") appears to be a valid release name. Using it.", logger.DEBUG) return folder return None diff --git a/sickbeard/show_queue.py b/sickbeard/show_queue.py index 0b9d85f7a22cff10374228cb65594ea1b766a3c6..0da740f5cf638ea5df96e06e86fa4b6c2f2b5569 100644 --- a/sickbeard/show_queue.py +++ b/sickbeard/show_queue.py @@ -519,7 +519,7 @@ class QueueItemUpdate(ShowQueueItem): ShowQueueItem.run(self) - logger.log(u"Beginning update of " + self.show.name) + logger.log(u"Beginning update of " + self.show.name, logger.DEBUG) logger.log(u"Retrieving show info from " + sickbeard.indexerApi(self.show.indexer).name + "", logger.DEBUG) try: diff --git a/sickbeard/tv.py b/sickbeard/tv.py index d06b1965f97d8de834ad10a37f8db6331e6a5118..4b8e0199d33e2d1fb4134b85f2b297e03e5ea400 100644 --- a/sickbeard/tv.py +++ b/sickbeard/tv.py @@ -339,7 +339,7 @@ class TVShow(object): logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation") return False - logger.log(str(self.indexerid) + u": Writing NFOs for show") + logger.log(str(self.indexerid) + u": Writing NFOs for show", logger.DEBUG) for cur_provider in sickbeard.metadata_provider_dict.values(): result = cur_provider.create_show_metadata(self) or result @@ -364,7 +364,7 @@ class TVShow(object): logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation") return - logger.log(str(self.indexerid) + u": Writing NFOs for all episodes") + logger.log(str(self.indexerid) + u": Writing NFOs for all episodes", logger.DEBUG) myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.indexerid]) @@ -405,10 +405,10 @@ class TVShow(object): def loadEpisodesFromDir(self): if not ek.ek(os.path.isdir, self._location): - logger.log(str(self.indexerid) + u": Show dir doesn't exist, not loading episodes from disk") + logger.log(str(self.indexerid) + u": Show dir doesn't exist, not loading episodes from disk", logger.DEBUG) return - logger.log(str(self.indexerid) + u": Loading all episodes from the show directory " + self._location) + logger.log(str(self.indexerid) + u": Loading all episodes from the show directory " + self._location, logger.DEBUG) # get file list mediaFiles = helpers.listMediaFiles(self._location) @@ -466,7 +466,7 @@ class TVShow(object): def loadEpisodesFromDB(self): - logger.log(u"Loading all episodes from the DB") + logger.log(u"Loading all episodes from the DB", logger.DEBUG) myDB = db.DBConnection() sql = "SELECT * FROM tv_episodes WHERE showid = ?" @@ -549,7 +549,7 @@ class TVShow(object): return None logger.log( - str(self.indexerid) + u": Loading all episodes from " + sickbeard.indexerApi(self.indexer).name + "..") + str(self.indexerid) + u": Loading all episodes from " + sickbeard.indexerApi(self.indexer).name + "..", logger.DEBUG) scannedEps = {} @@ -760,7 +760,7 @@ class TVShow(object): def loadFromDB(self, skipNFO=False): - logger.log(str(self.indexerid) + u": Loading show info from database") + logger.log(str(self.indexerid) + u": Loading show info from database", logger.DEBUG) myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_shows WHERE indexer_id = ?", [self.indexerid]) @@ -841,7 +841,7 @@ class TVShow(object): def loadFromIndexer(self, cache=True, tvapi=None, cachedSeason=None): - logger.log(str(self.indexerid) + u": Loading show info from " + sickbeard.indexerApi(self.indexer).name) + logger.log(str(self.indexerid) + u": Loading show info from " + sickbeard.indexerApi(self.indexer).name, logger.DEBUG) # There's gotta be a better way of doing this but we don't wanna # change the cache value elsewhere @@ -909,7 +909,7 @@ class TVShow(object): self.imdbid = i.title2imdbID(self.name, kind='tv series') if self.imdbid: - logger.log(str(self.indexerid) + u": Loading show info from IMDb") + logger.log(str(self.indexerid) + u": Loading show info from IMDb", logger.DEBUG) imdbTv = i.get_movie(str(re.sub("[^0-9]", "", self.imdbid))) @@ -1048,7 +1048,7 @@ class TVShow(object): def populateCache(self): cache_inst = image_cache.ImageCache() - logger.log(u"Checking & filling cache for show " + self.name) + logger.log(u"Checking & filling cache for show " + self.name, logger.DEBUG) cache_inst.fill_cache(self) def refreshDir(self): @@ -1061,7 +1061,7 @@ class TVShow(object): self.loadEpisodesFromDir() # run through all locations from DB, check that they exist - logger.log(str(self.indexerid) + u": Loading all episodes with a location from the database") + logger.log(str(self.indexerid) + u": Loading all episodes with a location from the database", logger.DEBUG) myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.indexerid]) diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index c1fff3b24b00db62dbcd30ff527e0f74f6f5d070..817fecc29f5cbafcefe6481a271c65441d47cf63 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -401,7 +401,7 @@ class WebRoot(WebHandler): def setHomeLayout(self, layout): - if layout not in ('poster', 'small', 'banner', 'simple'): + if layout not in ('poster', 'small', 'banner', 'simple', 'coverflow'): layout = 'poster' sickbeard.HOME_LAYOUT = layout