diff --git a/gui/slick/views/config.mako b/gui/slick/views/config.mako
index 48552c0e8ad5780b03cb2ed9825c314d82505c5a..3e964a45785b0b29ed25577cb691ba5fa46037c0 100644
--- a/gui/slick/views/config.mako
+++ b/gui/slick/views/config.mako
@@ -128,7 +128,7 @@
                     <i class="icon16-config-db"></i>&nbsp;&nbsp;${_('Database File')}:
                 </div>
                 <div class="col-lg-9 col-md-9 col-sm-9 col-xs-12">
-                    ${db.dbFilename()}
+                    ${db.db_full_path()}
                 </div>
             </div>
             <br/>
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
index 529af1f756bf2ccdfe7762ae06c082d784ffdd49..f05317a4d1385ff12fb3a50ce1df4343aee5e55e 100644
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -56,7 +56,6 @@ from sickbeard.providers.rsstorrent import TorrentRssProvider
 from sickrage.helper import setup_github
 from sickrage.helper.encoding import ek
 from sickrage.helper.exceptions import ex
-from sickrage.providers.GenericProvider import GenericProvider
 from sickrage.system.Shutdown import Shutdown
 
 from sickbeard import scene_exceptions
@@ -1472,19 +1471,19 @@ def initialize(consoleLogging=True):  # pylint: disable=too-many-locals, too-man
 
         # initialize the main SB database
         main_db_con = db.DBConnection()
-        db.upgradeDatabase(main_db_con, mainDB.InitialSchema)
+        db.upgrade_database(main_db_con, mainDB.InitialSchema)
 
         # initialize the cache database
         cache_db_con = db.DBConnection('cache.db')
-        db.upgradeDatabase(cache_db_con, cache_db.InitialSchema)
+        db.upgrade_database(cache_db_con, cache_db.InitialSchema)
 
         # initialize the failed downloads database
         failed_db_con = db.DBConnection('failed.db')
-        db.upgradeDatabase(failed_db_con, failed_db.InitialSchema)
+        db.upgrade_database(failed_db_con, failed_db.InitialSchema)
 
         # fix up any db problems
         main_db_con = db.DBConnection()
-        db.sanityCheckDatabase(main_db_con, mainDB.MainSanityCheck)
+        db.sanity_check_database(main_db_con, mainDB.MainSanityCheck)
 
         # migrate the config if it needs it
         migrator = ConfigMigrator(CFG)
diff --git a/sickbeard/classes.py b/sickbeard/classes.py
index 426098a5163ec2ba06d6bf2e061ae59b1e56eed7..b177bb7ea98f89ecff9664590166c0844dc1cb80 100644
--- a/sickbeard/classes.py
+++ b/sickbeard/classes.py
@@ -98,9 +98,6 @@ class SearchResult(object):  # pylint: disable=too-few-public-methods, too-many-
 
         return my_string
 
-    def fileName(self):
-        return '{0}.{1}'.format(self.episodes[0].prettyName(), self.resultType)
-
 
 class NZBSearchResult(SearchResult):  # pylint: disable=too-few-public-methods
     """
diff --git a/sickbeard/dailysearcher.py b/sickbeard/dailysearcher.py
index 34611a726afb9fa7b4d18e7c103c9bcf7d141491..a02f58ddf99b01dcd596ff30acd60fc3158e8e60 100644
--- a/sickbeard/dailysearcher.py
+++ b/sickbeard/dailysearcher.py
@@ -90,10 +90,10 @@ class DailySearcher(object):  # pylint:disable=too-few-public-methods
             ep = show.getEpisode(sqlEp[b"season"], sqlEp[b"episode"])
             with ep.lock:
                 if ep.season == 0:
-                    logger.log("New episode " + ep.prettyName() + " airs today, setting status to SKIPPED because is a special season")
+                    logger.log("New episode " + ep.pretty_name() + " airs today, setting status to SKIPPED because is a special season")
                     ep.status = common.SKIPPED
                 else:
-                    logger.log("New episode {0} airs today, setting to default episode status for this show: {1}".format(ep.prettyName(), common.statusStrings[ep.show.default_ep_status]))
+                    logger.log("New episode {0} airs today, setting to default episode status for this show: {1}".format(ep.pretty_name(), common.statusStrings[ep.show.default_ep_status]))
                     ep.status = ep.show.default_ep_status
 
                 sql_l.append(ep.get_sql())
diff --git a/sickbeard/databases/cache_db.py b/sickbeard/databases/cache_db.py
index 468fb3a1daef39d811850c11ddf8b527e23b8746..32690262d0dab739b213c7f0a29b006c58e75bbc 100644
--- a/sickbeard/databases/cache_db.py
+++ b/sickbeard/databases/cache_db.py
@@ -26,7 +26,7 @@ from sickbeard import db
 # Add new migrations at the bottom of the list; subclass the previous migration.
 class InitialSchema(db.SchemaUpgrade):
     def test(self):
-        return self.hasTable("db_version")
+        return self.has_table("db_version")
 
     def execute(self):
         queries = [
@@ -48,7 +48,7 @@ class InitialSchema(db.SchemaUpgrade):
 
 class AddSceneExceptions(InitialSchema):
     def test(self):
-        return self.hasTable("scene_exceptions")
+        return self.has_table("scene_exceptions")
 
     def execute(self):
         self.connection.action(
@@ -57,7 +57,7 @@ class AddSceneExceptions(InitialSchema):
 
 class AddSceneNameCache(AddSceneExceptions):
     def test(self):
-        return self.hasTable("scene_names")
+        return self.has_table("scene_names")
 
     def execute(self):
         self.connection.action("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);")
@@ -65,7 +65,7 @@ class AddSceneNameCache(AddSceneExceptions):
 
 class AddNetworkTimezones(AddSceneNameCache):
     def test(self):
-        return self.hasTable("network_timezones")
+        return self.has_table("network_timezones")
 
     def execute(self):
         self.connection.action("CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT);")
@@ -73,7 +73,7 @@ class AddNetworkTimezones(AddSceneNameCache):
 
 class AddLastSearch(AddNetworkTimezones):
     def test(self):
-        return self.hasTable("lastSearch")
+        return self.has_table("lastSearch")
 
     def execute(self):
         self.connection.action("CREATE TABLE lastSearch (provider TEXT, time NUMERIC);")
@@ -81,23 +81,23 @@ class AddLastSearch(AddNetworkTimezones):
 
 class AddSceneExceptionsSeasons(AddLastSearch):
     def test(self):
-        return self.hasColumn("scene_exceptions", "season")
+        return self.has_column("scene_exceptions", "season")
 
     def execute(self):
-        self.addColumn("scene_exceptions", "season", "NUMERIC", -1)
+        self.add_column("scene_exceptions", "season", "NUMERIC", -1)
 
 
 class AddSceneExceptionsCustom(AddSceneExceptionsSeasons):  # pylint:disable=too-many-ancestors
     def test(self):
-        return self.hasColumn("scene_exceptions", "custom")
+        return self.has_column("scene_exceptions", "custom")
 
     def execute(self):
-        self.addColumn("scene_exceptions", "custom", "NUMERIC", 0)
+        self.add_column("scene_exceptions", "custom", "NUMERIC", 0)
 
 
 class AddSceneExceptionsRefresh(AddSceneExceptionsCustom):  # pylint:disable=too-many-ancestors
     def test(self):
-        return self.hasTable("scene_exceptions_refresh")
+        return self.has_table("scene_exceptions_refresh")
 
     def execute(self):
         self.connection.action(
@@ -106,7 +106,7 @@ class AddSceneExceptionsRefresh(AddSceneExceptionsCustom):  # pylint:disable=too
 
 class ConvertSceneExeptionsToIndexerScheme(AddSceneExceptionsRefresh):  # pylint:disable=too-many-ancestors
     def test(self):
-        return self.hasColumn("scene_exceptions", "indexer_id")
+        return self.has_column("scene_exceptions", "indexer_id")
 
     def execute(self):
         self.connection.action("DROP TABLE IF EXISTS tmp_scene_exceptions;")
@@ -118,7 +118,7 @@ class ConvertSceneExeptionsToIndexerScheme(AddSceneExceptionsRefresh):  # pylint
 
 class ConvertSceneNamesToIndexerScheme(AddSceneExceptionsRefresh):  # pylint:disable=too-many-ancestors
     def test(self):
-        return self.hasColumn("scene_names", "indexer_id")
+        return self.has_column("scene_names", "indexer_id")
 
     def execute(self):
         self.connection.action("DROP TABLE IF EXISTS tmp_scene_names;")
diff --git a/sickbeard/databases/failed_db.py b/sickbeard/databases/failed_db.py
index 2e8a75d499a690bedb8ea6a4ef20e7c9df09ac53..3637e3052830a3fad598ccbf79cc6eeb6c9df4cc 100644
--- a/sickbeard/databases/failed_db.py
+++ b/sickbeard/databases/failed_db.py
@@ -27,7 +27,7 @@ from sickbeard.common import Quality
 # Add new migrations at the bottom of the list; subclass the previous migration.
 class InitialSchema(db.SchemaUpgrade):
     def test(self):
-        return self.hasTable('db_version')
+        return self.has_table('db_version')
 
     def execute(self):
         queries = [
@@ -45,18 +45,18 @@ class InitialSchema(db.SchemaUpgrade):
 
 class SizeAndProvider(InitialSchema):
     def test(self):
-        return self.hasColumn('failed', 'size') and self.hasColumn('failed', 'provider')
+        return self.has_column('failed', 'size') and self.has_column('failed', 'provider')
 
     def execute(self):
-        self.addColumn('failed', 'size', 'NUMERIC')
-        self.addColumn('failed', 'provider', 'TEXT', '')
+        self.add_column('failed', 'size', 'NUMERIC')
+        self.add_column('failed', 'provider', 'TEXT', '')
 
 
 class History(SizeAndProvider):
     """Snatch history that can't be modified by the user"""
 
     def test(self):
-        return self.hasTable('history')
+        return self.has_table('history')
 
     def execute(self):
         self.connection.action('CREATE TABLE history (date NUMERIC, ' +
@@ -67,10 +67,10 @@ class HistoryStatus(History):
     """Store episode status before snatch to revert to if necessary"""
 
     def test(self):
-        return self.hasColumn('history', 'old_status')
+        return self.has_column('history', 'old_status')
 
     def execute(self):
-        self.addColumn('history', 'old_status', 'NUMERIC', Quality.NONE)
-        self.addColumn('history', 'showid', 'NUMERIC', '-1')
-        self.addColumn('history', 'season', 'NUMERIC', '-1')
-        self.addColumn('history', 'episode', 'NUMERIC', '-1')
+        self.add_column('history', 'old_status', 'NUMERIC', Quality.NONE)
+        self.add_column('history', 'showid', 'NUMERIC', '-1')
+        self.add_column('history', 'season', 'NUMERIC', '-1')
+        self.add_column('history', 'episode', 'NUMERIC', '-1')
diff --git a/sickbeard/databases/mainDB.py b/sickbeard/databases/mainDB.py
index bb588f73b2d5dfef2cfe4894894bb0bdfee07934..327626ae880dce6e111dafe757bb7d5ab759ccca 100644
--- a/sickbeard/databases/mainDB.py
+++ b/sickbeard/databases/mainDB.py
@@ -219,7 +219,7 @@ class MainSanityCheck(db.DBSanityCheck):
     def fix_unaired_episodes(self):
 
         curDate = datetime.date.today()
-        
+
         if curDate.year >= 2017:
 
             sql_results = self.connection.select(
@@ -301,7 +301,7 @@ class MainSanityCheck(db.DBSanityCheck):
 
 def backupDatabase(version):
     logger.log("Backing up database before upgrade")
-    if not helpers.backupVersionedFile(db.dbFilename(), version):
+    if not helpers.backupVersionedFile(db.db_full_path(), version):
         logger.log_error_and_exit("Database backup failed, abort upgrading database")
     else:
         logger.log("Proceeding with upgrade")
@@ -314,10 +314,10 @@ def backupDatabase(version):
 
 class InitialSchema(db.SchemaUpgrade):
     def test(self):
-        return self.hasTable("db_version")
+        return self.has_table("db_version")
 
     def execute(self):
-        if not self.hasTable("tv_shows") and not self.hasTable("db_version"):
+        if not self.has_table("tv_shows") and not self.has_table("db_version"):
             queries = [
                 "CREATE TABLE db_version(db_version INTEGER);",
                 "CREATE TABLE history(action NUMERIC, date NUMERIC, showid NUMERIC, season NUMERIC, episode NUMERIC, quality NUMERIC, resource TEXT, provider TEXT, version NUMERIC DEFAULT -1);",
@@ -342,7 +342,7 @@ class InitialSchema(db.SchemaUpgrade):
                 self.connection.action(query)
 
         else:
-            cur_db_version = self.checkDBVersion()
+            cur_db_version = self.check_db_version()
 
             if cur_db_version < MIN_DB_VERSION:
                 logger.log_error_and_exit(
@@ -359,17 +359,17 @@ class InitialSchema(db.SchemaUpgrade):
 
 class AddSizeAndSceneNameFields(InitialSchema):
     def test(self):
-        return self.checkDBVersion() >= 10
+        return self.check_db_version() >= 10
 
     def execute(self):
 
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
-        if not self.hasColumn("tv_episodes", "file_size"):
-            self.addColumn("tv_episodes", "file_size")
+        if not self.has_column("tv_episodes", "file_size"):
+            self.add_column("tv_episodes", "file_size")
 
-        if not self.hasColumn("tv_episodes", "release_name"):
-            self.addColumn("tv_episodes", "release_name", "TEXT", "")
+        if not self.has_column("tv_episodes", "release_name"):
+            self.add_column("tv_episodes", "release_name", "TEXT", "")
 
         ep_results = self.connection.select("SELECT episode_id, location, file_size FROM tv_episodes")
 
@@ -466,15 +466,15 @@ class AddSizeAndSceneNameFields(InitialSchema):
             self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?",
                                    [ep_file_name, cur_result[b"episode_id"]])
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class RenameSeasonFolders(AddSizeAndSceneNameFields):
     def test(self):
-        return self.checkDBVersion() >= 11
+        return self.check_db_version() >= 11
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         # rename the column
         self.connection.action("DROP TABLE IF EXISTS tmp_tv_shows")
@@ -489,7 +489,7 @@ class RenameSeasonFolders(AddSizeAndSceneNameFields):
         self.connection.action("UPDATE tv_shows SET flatten_folders = 0 WHERE flatten_folders = 2")
         self.connection.action("DROP TABLE tmp_tv_shows")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class Add1080pAndRawHDQualities(RenameSeasonFolders):
@@ -509,7 +509,7 @@ class Add1080pAndRawHDQualities(RenameSeasonFolders):
     """
 
     def test(self):
-        return self.checkDBVersion() >= 12
+        return self.check_db_version() >= 12
 
     def _update_status(self, old_status):
         (status, quality) = common.Quality.splitCompositeStatus(old_status)
@@ -556,7 +556,7 @@ class Add1080pAndRawHDQualities(RenameSeasonFolders):
         return result
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         # update the default quality so we dont grab the wrong qualities after migration
         sickbeard.QUALITY_DEFAULT = self._update_composite_qualities(sickbeard.QUALITY_DEFAULT)
@@ -620,7 +620,7 @@ class Add1080pAndRawHDQualities(RenameSeasonFolders):
                        [self._update_quality(cur_entry[b"quality"]), cur_entry[b"showid"], cur_entry[b"date"]]])
         self.connection.mass_action(cl)
 
-        self.incDBVersion()
+        self.increment_db_version()
 
         # cleanup and reduce db if any previous data was removed
         logger.log("Performing a vacuum on the database.", logger.DEBUG)
@@ -631,130 +631,130 @@ class AddShowidTvdbidIndex(Add1080pAndRawHDQualities):
     """ Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries """
 
     def test(self):
-        return self.checkDBVersion() >= 13
+        return self.check_db_version() >= 13
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Check for duplicate shows before adding unique index.")
         MainSanityCheck(self.connection).fix_duplicate_shows(b'tvdb_id')
 
         logger.log("Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries.")
-        if not self.hasTable("idx_showid"):
+        if not self.has_table("idx_showid"):
             self.connection.action("CREATE INDEX idx_showid ON tv_episodes (showid);")
-        if not self.hasTable("idx_tvdb_id"):
+        if not self.has_table("idx_tvdb_id"):
             self.connection.action("CREATE UNIQUE INDEX idx_tvdb_id ON tv_shows (tvdb_id);")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddLastUpdateTVDB(AddShowidTvdbidIndex):
     """ Adding column last_update_tvdb to tv_shows for controlling nightly updates """
 
     def test(self):
-        return self.checkDBVersion() >= 14
+        return self.check_db_version() >= 14
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Adding column last_update_tvdb to tvshows")
-        if not self.hasColumn("tv_shows", "last_update_tvdb"):
-            self.addColumn("tv_shows", "last_update_tvdb", default=1)
+        if not self.has_column("tv_shows", "last_update_tvdb"):
+            self.add_column("tv_shows", "last_update_tvdb", default=1)
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddDBIncreaseTo15(AddLastUpdateTVDB):
     def test(self):
-        return self.checkDBVersion() >= 15
+        return self.check_db_version() >= 15
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
-        self.incDBVersion()
+        backupDatabase(self.check_db_version())
+        self.increment_db_version()
 
 
 class AddIMDbInfo(AddDBIncreaseTo15):
     def test(self):
-        return self.checkDBVersion() >= 16
+        return self.check_db_version() >= 16
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
         self.connection.action(
             "CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)")
 
-        if not self.hasColumn("tv_shows", "imdb_id"):
-            self.addColumn("tv_shows", "imdb_id")
+        if not self.has_column("tv_shows", "imdb_id"):
+            self.add_column("tv_shows", "imdb_id")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddProperNamingSupport(AddIMDbInfo):
     def test(self):
-        return self.checkDBVersion() >= 17
+        return self.check_db_version() >= 17
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
-        self.addColumn("tv_episodes", "is_proper")
-        self.incDBVersion()
+        backupDatabase(self.check_db_version())
+        self.add_column("tv_episodes", "is_proper")
+        self.increment_db_version()
 
 
 class AddEmailSubscriptionTable(AddProperNamingSupport):
     def test(self):
-        return self.checkDBVersion() >= 18
+        return self.check_db_version() >= 18
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
-        self.addColumn('tv_shows', 'notify_list', 'TEXT', None)
-        self.incDBVersion()
+        backupDatabase(self.check_db_version())
+        self.add_column('tv_shows', 'notify_list', 'TEXT', None)
+        self.increment_db_version()
 
 
 class AddProperSearch(AddEmailSubscriptionTable):
     def test(self):
-        return self.checkDBVersion() >= 19
+        return self.check_db_version() >= 19
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Adding column last_proper_search to info")
-        if not self.hasColumn("info", "last_proper_search"):
-            self.addColumn("info", "last_proper_search", default=1)
+        if not self.has_column("info", "last_proper_search"):
+            self.add_column("info", "last_proper_search", default=1)
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddDvdOrderOption(AddProperSearch):
     def test(self):
-        return self.checkDBVersion() >= 20
+        return self.check_db_version() >= 20
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
         logger.log("Adding column dvdorder to tvshows")
-        if not self.hasColumn("tv_shows", "dvdorder"):
-            self.addColumn("tv_shows", "dvdorder", "NUMERIC", "0")
+        if not self.has_column("tv_shows", "dvdorder"):
+            self.add_column("tv_shows", "dvdorder", "NUMERIC", "0")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddSubtitlesSupport(AddDvdOrderOption):
     def test(self):
-        return self.checkDBVersion() >= 21
+        return self.check_db_version() >= 21
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
-        if not self.hasColumn("tv_shows", "subtitles"):
-            self.addColumn("tv_shows", "subtitles")
-            self.addColumn("tv_episodes", "subtitles", "TEXT", "")
-            self.addColumn("tv_episodes", "subtitles_searchcount")
-            self.addColumn("tv_episodes", "subtitles_lastsearch", "TIMESTAMP", str(datetime.datetime.min))
-        self.incDBVersion()
+        backupDatabase(self.check_db_version())
+        if not self.has_column("tv_shows", "subtitles"):
+            self.add_column("tv_shows", "subtitles")
+            self.add_column("tv_episodes", "subtitles", "TEXT", "")
+            self.add_column("tv_episodes", "subtitles_searchcount")
+            self.add_column("tv_episodes", "subtitles_lastsearch", "TIMESTAMP", str(datetime.datetime.min))
+        self.increment_db_version()
 
 
 class ConvertTVShowsToIndexerScheme(AddSubtitlesSupport):
     def test(self):
-        return self.checkDBVersion() >= 22
+        return self.check_db_version() >= 22
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Converting TV Shows table to Indexer Scheme...")
 
@@ -773,15 +773,15 @@ class ConvertTVShowsToIndexerScheme(AddSubtitlesSupport):
         self.connection.action("UPDATE tv_shows SET classification = 'Scripted'")
         self.connection.action("UPDATE tv_shows SET indexer = 1")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class ConvertTVEpisodesToIndexerScheme(ConvertTVShowsToIndexerScheme):
     def test(self):
-        return self.checkDBVersion() >= 23
+        return self.check_db_version() >= 23
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Converting TV Episodes table to Indexer Scheme...")
 
@@ -806,40 +806,40 @@ class ConvertTVEpisodesToIndexerScheme(ConvertTVShowsToIndexerScheme):
 
         self.connection.action("UPDATE tv_episodes SET indexer = 1, is_proper = 0")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class ConvertIMDBInfoToIndexerScheme(ConvertTVEpisodesToIndexerScheme):
     def test(self):
-        return self.checkDBVersion() >= 24
+        return self.check_db_version() >= 24
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Converting IMDB Info table to Indexer Scheme...")
 
         self.connection.action("DROP TABLE IF EXISTS tmp_imdb_info")
 
-        if self.hasTable("imdb_info"):
+        if self.has_table("imdb_info"):
             self.connection.action("ALTER TABLE imdb_info RENAME TO tmp_imdb_info")
 
         self.connection.action(
             "CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)")
 
-        if self.hasTable("tmp_imdb_info"):
+        if self.has_table("tmp_imdb_info"):
             self.connection.action("INSERT INTO imdb_info SELECT * FROM tmp_imdb_info")
 
         self.connection.action("DROP TABLE IF EXISTS tmp_imdb_info")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class ConvertInfoToIndexerScheme(ConvertIMDBInfoToIndexerScheme):
     def test(self):
-        return self.checkDBVersion() >= 25
+        return self.check_db_version() >= 25
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Converting Info table to Indexer Scheme...")
 
@@ -852,45 +852,45 @@ class ConvertInfoToIndexerScheme(ConvertIMDBInfoToIndexerScheme):
             "INSERT INTO info SELECT * FROM tmp_info")
         self.connection.action("DROP TABLE tmp_info")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddArchiveFirstMatchOption(ConvertInfoToIndexerScheme):
     def test(self):
-        return self.checkDBVersion() >= 26
+        return self.check_db_version() >= 26
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Adding column archive_firstmatch to tvshows")
-        if not self.hasColumn("tv_shows", "archive_firstmatch"):
-            self.addColumn("tv_shows", "archive_firstmatch", "NUMERIC", "0")
+        if not self.has_column("tv_shows", "archive_firstmatch"):
+            self.add_column("tv_shows", "archive_firstmatch", "NUMERIC", "0")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddSceneNumbering(AddArchiveFirstMatchOption):
     def test(self):
-        return self.checkDBVersion() >= 27
+        return self.check_db_version() >= 27
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
-        if self.hasTable("scene_numbering"):
+        if self.has_table("scene_numbering"):
             self.connection.action("DROP TABLE scene_numbering")
 
         self.connection.action(
             "CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode, scene_season, scene_episode))")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class ConvertIndexerToInteger(AddSceneNumbering):
     def test(self):
-        return self.checkDBVersion() >= 28
+        return self.check_db_version() >= 28
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         cl = []
         logger.log("Converting Indexer to Integer ...", logger.INFO)
@@ -903,41 +903,41 @@ class ConvertIndexerToInteger(AddSceneNumbering):
 
         self.connection.mass_action(cl)
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddRequireAndIgnoreWords(ConvertIndexerToInteger):
     """ Adding column rls_require_words and rls_ignore_words to tv_shows """
 
     def test(self):
-        return self.checkDBVersion() >= 29
+        return self.check_db_version() >= 29
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Adding column rls_require_words to tvshows")
-        if not self.hasColumn("tv_shows", "rls_require_words"):
-            self.addColumn("tv_shows", "rls_require_words", "TEXT", "")
+        if not self.has_column("tv_shows", "rls_require_words"):
+            self.add_column("tv_shows", "rls_require_words", "TEXT", "")
 
         logger.log("Adding column rls_ignore_words to tvshows")
-        if not self.hasColumn("tv_shows", "rls_ignore_words"):
-            self.addColumn("tv_shows", "rls_ignore_words", "TEXT", "")
+        if not self.has_column("tv_shows", "rls_ignore_words"):
+            self.add_column("tv_shows", "rls_ignore_words", "TEXT", "")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddSportsOption(AddRequireAndIgnoreWords):
     def test(self):
-        return self.checkDBVersion() >= 30
+        return self.check_db_version() >= 30
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Adding column sports to tvshows")
-        if not self.hasColumn("tv_shows", "sports"):
-            self.addColumn("tv_shows", "sports", "NUMERIC", "0")
+        if not self.has_column("tv_shows", "sports"):
+            self.add_column("tv_shows", "sports", "NUMERIC", "0")
 
-        if self.hasColumn("tv_shows", "air_by_date") and self.hasColumn("tv_shows", "sports"):
+        if self.has_column("tv_shows", "air_by_date") and self.has_column("tv_shows", "sports"):
             # update sports column
             logger.log("[4/4] Updating tv_shows to reflect the correct sports value...", logger.INFO)
             cl = []
@@ -949,70 +949,70 @@ class AddSportsOption(AddRequireAndIgnoreWords):
                 cl.append(["UPDATE tv_shows SET air_by_date = 0 WHERE show_id = ?", [cur_entry[b"show_id"]]])
             self.connection.mass_action(cl)
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddSceneNumberingToTvEpisodes(AddSportsOption):
     def test(self):
-        return self.checkDBVersion() >= 31
+        return self.check_db_version() >= 31
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Adding column scene_season and scene_episode to tvepisodes")
-        self.addColumn("tv_episodes", "scene_season", "NUMERIC", "NULL")
-        self.addColumn("tv_episodes", "scene_episode", "NUMERIC", "NULL")
+        self.add_column("tv_episodes", "scene_season", "NUMERIC", "NULL")
+        self.add_column("tv_episodes", "scene_episode", "NUMERIC", "NULL")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddAnimeTVShow(AddSceneNumberingToTvEpisodes):
     def test(self):
-        return self.checkDBVersion() >= 32
+        return self.check_db_version() >= 32
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Adding column anime to tv_episodes")
-        self.addColumn("tv_shows", "anime", "NUMERIC", "0")
+        self.add_column("tv_shows", "anime", "NUMERIC", "0")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddAbsoluteNumbering(AddAnimeTVShow):
     def test(self):
-        return self.checkDBVersion() >= 33
+        return self.check_db_version() >= 33
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Adding column absolute_number to tv_episodes")
-        self.addColumn("tv_episodes", "absolute_number", "NUMERIC", "0")
+        self.add_column("tv_episodes", "absolute_number", "NUMERIC", "0")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddSceneAbsoluteNumbering(AddAbsoluteNumbering):
     def test(self):
-        return self.checkDBVersion() >= 34
+        return self.check_db_version() >= 34
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Adding column absolute_number and scene_absolute_number to scene_numbering")
-        self.addColumn("scene_numbering", "absolute_number", "NUMERIC", "0")
-        self.addColumn("scene_numbering", "scene_absolute_number", "NUMERIC", "0")
+        self.add_column("scene_numbering", "absolute_number", "NUMERIC", "0")
+        self.add_column("scene_numbering", "scene_absolute_number", "NUMERIC", "0")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddAnimeBlacklistWhitelist(AddSceneAbsoluteNumbering):
 
     def test(self):
-        return self.checkDBVersion() >= 35
+        return self.check_db_version() >= 35
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         cl = [
             ["CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT)"],
@@ -1021,100 +1021,100 @@ class AddAnimeBlacklistWhitelist(AddSceneAbsoluteNumbering):
 
         self.connection.mass_action(cl)
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddSceneAbsoluteNumbering2(AddAnimeBlacklistWhitelist):
     def test(self):
-        return self.checkDBVersion() >= 36
+        return self.check_db_version() >= 36
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Adding column scene_absolute_number to tv_episodes")
-        self.addColumn("tv_episodes", "scene_absolute_number", "NUMERIC", "0")
+        self.add_column("tv_episodes", "scene_absolute_number", "NUMERIC", "0")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddXemRefresh(AddSceneAbsoluteNumbering2):
     def test(self):
-        return self.checkDBVersion() >= 37
+        return self.check_db_version() >= 37
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Creating table xem_refresh")
         self.connection.action(
             "CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddSceneToTvShows(AddXemRefresh):
     def test(self):
-        return self.checkDBVersion() >= 38
+        return self.check_db_version() >= 38
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Adding column scene to tv_shows")
-        self.addColumn("tv_shows", "scene", "NUMERIC", "0")
+        self.add_column("tv_shows", "scene", "NUMERIC", "0")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddIndexerMapping(AddSceneToTvShows):
     def test(self):
-        return self.checkDBVersion() >= 39
+        return self.check_db_version() >= 39
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
-        if self.hasTable("indexer_mapping"):
+        if self.has_table("indexer_mapping"):
             self.connection.action("DROP TABLE indexer_mapping")
 
         logger.log("Adding table indexer_mapping")
         self.connection.action(
             "CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC, PRIMARY KEY (indexer_id, indexer))")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddVersionToTvEpisodes(AddIndexerMapping):
     def test(self):
-        return self.checkDBVersion() >= 40
+        return self.check_db_version() >= 40
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Adding column version to tv_episodes and history")
-        self.addColumn("tv_episodes", "version", "NUMERIC", "-1")
-        self.addColumn("tv_episodes", "release_group", "TEXT", "")
-        self.addColumn("history", "version", "NUMERIC", "-1")
+        self.add_column("tv_episodes", "version", "NUMERIC", "-1")
+        self.add_column("tv_episodes", "release_group", "TEXT", "")
+        self.add_column("history", "version", "NUMERIC", "-1")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddDefaultEpStatusToTvShows(AddVersionToTvEpisodes):
     def test(self):
-        return self.checkDBVersion() >= 41
+        return self.check_db_version() >= 41
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Adding column default_ep_status to tv_shows")
-        self.addColumn("tv_shows", "default_ep_status", "NUMERIC", "-1")
+        self.add_column("tv_shows", "default_ep_status", "NUMERIC", "-1")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AlterTVShowsFieldTypes(AddDefaultEpStatusToTvShows):
     def test(self):
-        return self.checkDBVersion() >= 42
+        return self.check_db_version() >= 42
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Converting column indexer and default_ep_status field types to numeric")
         self.connection.action("DROP TABLE IF EXISTS tmp_tv_shows")
@@ -1123,34 +1123,21 @@ class AlterTVShowsFieldTypes(AddDefaultEpStatusToTvShows):
         self.connection.action("INSERT INTO tv_shows SELECT * FROM tmp_tv_shows")
         self.connection.action("DROP TABLE tmp_tv_shows")
 
-        self.incDBVersion()
+        self.increment_db_version()
 
 
 class AddMinorVersion(AlterTVShowsFieldTypes):
     def test(self):
-        return self.checkDBVersion() >= 42 and self.hasColumn(b'db_version', b'db_minor_version')
+        return self.has_column(b'db_version', b'db_minor_version')
 
-    def incDBVersion(self):
+    def increment_db_version(self):
         warnings.warn("Deprecated: Use inc_major_version or inc_minor_version instead", DeprecationWarning)
 
-    def inc_major_version(self):
-        major_version, minor_version = self.connection.version
-        major_version += 1
-        minor_version = 0
-        self.connection.action("UPDATE db_version SET db_version = ?, db_minor_version = ?", [major_version, minor_version])
-        return self.connection.version
-
-    def inc_minor_version(self):
-        major_version, minor_version = self.connection.version
-        minor_version += 1
-        self.connection.action("UPDATE db_version SET db_version = ?, db_minor_version = ?", [major_version, minor_version])
-        return self.connection.version
-
     def execute(self):
-        backupDatabase(self.checkDBVersion())
+        backupDatabase(self.check_db_version())
 
         logger.log("Add minor version numbers to database")
-        self.addColumn(b'db_version', b'db_minor_version')
+        self.add_column(b'db_version', b'db_minor_version')
 
         self.inc_minor_version()
 
@@ -1162,11 +1149,11 @@ class UseSickRageMetadataForSubtitle(AlterTVShowsFieldTypes):
     Add a minor version for adding a show setting to use SR metadata for subtitles
     """
     def test(self):
-        return self.hasColumn('tv_shows', 'sub_use_sr_metadata')
+        return self.has_column('tv_shows', 'sub_use_sr_metadata')
 
     def execute(self):
-        backupDatabase(self.checkDBVersion())
-        self.addColumn('tv_shows', 'sub_use_sr_metadata', "NUMERIC", "0")
+        backupDatabase(self.check_db_version())
+        self.add_column('tv_shows', 'sub_use_sr_metadata', "NUMERIC", "0")
 
 
 class ResetDBVersion(UseSickRageMetadataForSubtitle):
diff --git a/sickbeard/db.py b/sickbeard/db.py
index 2bf6ce3aff7eaf61ea1f3329d4f960cd5c0f19f0..3ad661ab8cdb4a53324e5419b363a0b0efc20b37 100644
--- a/sickbeard/db.py
+++ b/sickbeard/db.py
@@ -39,12 +39,12 @@ db_cons = {}
 db_locks = {}
 
 
-def dbFilename(filename="sickbeard.db", suffix=None):
+def db_full_path(filename="sickbeard.db", suffix=None):
     """
     @param filename: The sqlite database filename to use. If not specified,
                      will be made to be sickbeard.db
-    @param suffix: The suffix to append to the filename. A '.' will be added
-                   automatically, i.e. suffix='v0' will make dbfile.db.v0
+    @param suffix: The suffix to append to the filename. A "." will be added
+                   automatically, i.e. suffix="v0" will make filename.db.v0
     @return: the correct location of the database file.
     """
     if suffix:
@@ -53,17 +53,19 @@ def dbFilename(filename="sickbeard.db", suffix=None):
 
 
 class DBConnection(object):
+    MAX_ATTEMPTS = 5
+
     def __init__(self, filename="sickbeard.db", suffix=None, row_type=None):
 
         self.filename = filename
         self.suffix = suffix
         self.row_type = row_type
-
+        self.full_path = db_full_path(self.filename, self.suffix)
         try:
             if self.filename not in db_cons or not db_cons[self.filename]:
                 db_locks[self.filename] = threading.Lock()
 
-                self.connection = sqlite3.connect(dbFilename(self.filename, self.suffix), 20, check_same_thread=False)
+                self.connection = sqlite3.connect(self.full_path, 20, check_same_thread=False)
                 self.connection.text_factory = DBConnection._unicode_text_factory
 
                 db_cons[self.filename] = self.connection
@@ -75,19 +77,46 @@ class DBConnection(object):
             # of the shared connection are done using
             # it... technically not required as row factory is reset
             # in all the public methods after the lock has been
-            # aquired
+            # acquired
             with db_locks[self.filename]:
                 self._set_row_factory()
 
         except OperationalError:
-            logger.log('Please check your database owner/permissions: {0}'.format(dbFilename(self.filename, self.suffix)), logger.WARNING)
+            # noinspection PyUnresolvedReferences
+            logger.log(_("Please check your database owner/permissions: {db_filename}").format(db_filename=self.full_path), logger.WARNING)
         except Exception as e:
-            logger.log("DB error: " + ex(e), logger.ERROR)
+            self._error_log_helper(e, logger.ERROR, locals(), None, 'DBConnection.__init__')
             raise
 
+    def _error_log_helper(self, exception, severity, local_variables, attempts, called_method):
+        if attempts in (0, self.MAX_ATTEMPTS):  # Only log the first try and the final failure
+            prefix = ("Database", "Fatal")[severity == logger.ERROR]
+            # noinspection PyUnresolvedReferences
+            logger.log(
+                _("{exception_severity} error executing query with {method} in database {db_location}: ").format(
+                    db_location=self.full_path, method=called_method, exception_severity=prefix
+                ) + ex(exception), severity
+            )
+
+            # Lets print out all of the arguments so we can debug this better
+            # noinspection PyUnresolvedReferences
+            logger.log(_("If this happened in cache.db, you can safely stop SickRage, and delete the cache.db file without losing any data"))
+            # noinspection PyUnresolvedReferences
+            logger.log(
+                _("Here is the arguments that were passed to this function (This is what the developers need to know): {local_variables:s}").format(
+                    local_variables=local_variables
+                )
+            )
+
+    @staticmethod
+    def _is_locked_or_denied(exception):
+        # noinspection PyUnresolvedReferences
+        return _("unable to open database file") in exception.args[0] or _("database is locked") in exception.args[0] or \
+                "unable to open database file" in exception.args[0] or "database is locked" in exception.args[0]
+
     def _set_row_factory(self):
         """
-        once lock is aquired we can configure the connection for
+        once lock is acquired we can configure the connection for
         this particular instance of DBConnection
         """
         if self.row_type == "dict":
@@ -119,52 +148,41 @@ class DBConnection(object):
         except Exception:
             raise
 
-    def checkDBVersion(self):
+    def check_db_version(self):
         """
         Fetch major database version
 
         :return: Integer indicating current DB major version
         """
-        if self.hasColumn('db_version', 'db_minor_version'):
-            warnings.warn('Deprecated: Use the version property', DeprecationWarning)
-        return self.check_db_major_version()
+        if self.has_column("db_version", "db_minor_version"):
+            warnings.warn("Deprecated: Use the version property", DeprecationWarning)
+        return self.get_db_major_version()
 
-    def check_db_major_version(self):
+    def get_db_major_version(self):
         """
         Fetch database version
 
-        :return: Integer inidicating current DB version
+        :return: Integer indicating current DB version
         """
-        result = None
-
+        # noinspection PyBroadException
         try:
-            if self.hasTable('db_version'):
-                result = self.select("SELECT db_version FROM db_version")
+            result = int(self.select_one("SELECT db_version FROM db_version")[0])
+            return result
         except Exception:
             return 0
 
-        if result:
-            return int(result[0][b"db_version"])
-        else:
-            return 0
-
-    def check_db_minor_version(self):
+    def get_db_minor_version(self):
         """
         Fetch database version
 
-        :return: Integer inidicating current DB major version
+        :return: Integer indicating current DB major version
         """
-        result = None
-
+        # noinspection PyBroadException
         try:
-            if self.hasColumn('db_version', 'db_minor_version'):
-                result = self.select("SELECT db_minor_version FROM db_version")
+            result = int(self.select_one("SELECT db_minor_version FROM db_version")[0])
+            return result
         except Exception:
-            return 0
-
-        if result:
-            return int(result[0][b"db_minor_version"])
-        else:
+            print('ERROR GETTING MINOR!')
             return 0
 
     @property
@@ -173,64 +191,65 @@ class DBConnection(object):
 
         :return: A tuple containing the major and minor versions
         """
-        return self.check_db_major_version(), self.check_db_minor_version()
+        # return tuple(self.select_one("SELECT * FROM db_version"))
+        return self.get_db_major_version(), self.get_db_minor_version()
 
-    def mass_action(self, querylist=None, logTransaction=False, fetchall=False):
+    def mass_action(self, query_list=None, log_transaction=False, fetchall=False):
         """
         Execute multiple queries
 
-        :param querylist: list of queries
-        :param logTransaction: Boolean to wrap all in one transaction
+        :param query_list: list of queries
+        :param log_transaction: Boolean to wrap all in one transaction
         :param fetchall: Boolean, when using a select query force returning all results
         :return: list of results
         """
 
-        assert hasattr(querylist, '__iter__'), 'You passed a non-iterable to mass_action: {0!r}'.format(querylist)
+        # noinspection PyUnresolvedReferences
+        assert hasattr(query_list, "__iter__"), _("You passed a non-iterable to mass_action: {0!r}").format(query_list)
 
         # remove None types
-        querylist = [i for i in querylist if i]
+        query_list = [i for i in query_list if i]
 
         sql_results = []
         attempt = 0
 
         with db_locks[self.filename]:
             self._set_row_factory()
-            while attempt < 5:
+            while attempt <= self.MAX_ATTEMPTS:
                 try:
-                    for qu in querylist:
+                    log_level = (logger.DB, logger.DEBUG)[log_transaction]
+                    for qu in query_list:
                         if len(qu) == 1:
-                            if logTransaction:
-                                logger.log(qu[0], logger.DEBUG)
+                            # noinspection PyUnresolvedReferences
+                            logger.log(_("{filename}: {query}").format(filename=self.filename, query=qu[0]), log_level)
                             sql_results.append(self._execute(qu[0], fetchall=fetchall))
                         elif len(qu) > 1:
-                            if logTransaction:
-                                logger.log(qu[0] + " with args " + str(qu[1]), logger.DEBUG)
+                            # noinspection PyUnresolvedReferences
+                            logger.log(_("{filename}: {query} with args {args!s}").format(filename=self.filename, query=qu[0], args=qu[1]), log_level)
                             sql_results.append(self._execute(qu[0], qu[1], fetchall=fetchall))
                     self.connection.commit()
-                    logger.log("Transaction with " + str(len(querylist)) + " queries executed", logger.DEBUG)
+                    # noinspection PyUnresolvedReferences
+                    logger.log(_("Transaction with {count!s} of queries executed successfully").format(count=len(query_list)), log_level)
 
                     # finished
                     break
-                except sqlite3.OperationalError as e:
-                    sql_results = []
+                except (sqlite3.OperationalError, sqlite3.DatabaseError) as e:
+                    sql_results = []  # Reset results because of rollback
                     if self.connection:
                         self.connection.rollback()
-                    if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
-                        logger.log("DB error: " + ex(e), logger.WARNING)
-                        attempt += 1
-                        time.sleep(1)
-                    else:
-                        logger.log("DB error: " + ex(e), logger.ERROR)
+                    severity = (logger.ERROR, logger.WARNING)[self._is_locked_or_denied(e) and attempt < self.MAX_ATTEMPTS]
+                    self._error_log_helper(e, severity, locals(), attempt, "db.mass_action")
+                    if severity == logger.ERROR:
                         raise
-                except sqlite3.DatabaseError as e:
+                    time.sleep(1)
+                except Exception as e:
                     sql_results = []
                     if self.connection:
                         self.connection.rollback()
-                    logger.log("Fatal error executing query: " + ex(e), logger.ERROR)
+                    self._error_log_helper(e, logger.ERROR, locals(), attempt, "db.mass_action")
                     raise
 
-            # time.sleep(0.02)
-
+                attempt += 1
             return sql_results
 
     def action(self, query, args=None, fetchall=False, fetchone=False):
@@ -246,36 +265,44 @@ class DBConnection(object):
         if query is None:
             return
 
-        sql_results = None
+        # noinspection PyUnresolvedReferences
+        assert not (fetchall and fetchone), _("Cannot fetch all and only one at the same time!")
+
+        sql_results = []
         attempt = 0
 
         with db_locks[self.filename]:
             self._set_row_factory()
-            while attempt < 5:
+            while attempt < self.MAX_ATTEMPTS:
                 try:
                     if args is None:
                         logger.log(self.filename + ": " + query, logger.DB)
                     else:
-                        logger.log(self.filename + ": " + query + " with args " + str(args), logger.DB)
+                        logger.log("{filename}: {query} with args {args!s}".format(filename=self.filename, query=query, args=args), logger.DB)
 
                     sql_results = self._execute(query, args, fetchall=fetchall, fetchone=fetchone)
                     self.connection.commit()
 
                     # get out of the connection attempt loop since we were successful
                     break
-                except sqlite3.OperationalError as e:
-                    if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
-                        logger.log("DB error: " + ex(e), logger.WARNING)
-                        attempt += 1
-                        time.sleep(1)
-                    else:
-                        logger.log("DB error: " + ex(e), logger.ERROR)
+                except (sqlite3.OperationalError, sqlite3.DatabaseError) as e:
+                    sql_results = []  # Reset results because of rollback
+                    if self.connection:
+                        self.connection.rollback()
+
+                    severity = (logger.ERROR, logger.WARNING)[self._is_locked_or_denied(e) and attempt < self.MAX_ATTEMPTS]
+                    self._error_log_helper(e, severity, locals(), attempt, "db.action")
+                    if severity == logger.ERROR:
                         raise
-                except sqlite3.DatabaseError as e:
-                    logger.log("Fatal error executing query: " + ex(e), logger.ERROR)
+                    time.sleep(1)
+                except Exception as e:
+                    sql_results = []
+                    if self.connection:
+                        self.connection.rollback()
+                    self._error_log_helper(e, logger.ERROR, locals(), attempt, "db.action")
                     raise
 
-            # time.sleep(0.02)
+                attempt += 1
 
             return sql_results
 
@@ -295,7 +322,7 @@ class DBConnection(object):
 
         return sql_results
 
-    def selectOne(self, query, args=None):
+    def select_one(self, query, args=None):
         """
         Perform single select query on database, returning one result
 
@@ -310,42 +337,49 @@ class DBConnection(object):
 
         return sql_results
 
-    def upsert(self, tableName, valueDict, keyDict):
+    def upsert(self, table_name, value_dict, key_dict):
         """
         Update values, or if no updates done, insert values
         TODO: Make this return true/false on success/error
 
-        :param tableName: table to update/insert
-        :param valueDict: values in table to update/insert
-        :param keyDict:  columns in table to update/insert
+        :param table_name: table to update/insert
+        :param value_dict: values in table to update/insert
+        :param key_dict:  columns in table to update/insert
         """
 
         changesBefore = self.connection.total_changes
 
-        def genParams(my_dict): return [x + " = ?" for x in my_dict.keys()]
+        # noinspection PyUnresolvedReferences
+        assert None not in key_dict.values(), _("Control dict to upsert cannot have values of None!")
+        if key_dict:
+            def make_string(my_dict, separator):
+                return separator.join([x + " = ?" for x in my_dict.keys()])
 
-        query = "UPDATE [" + tableName + "] SET " + ", ".join(genParams(valueDict)) + " WHERE " + " AND ".join(
-            genParams(keyDict))
+            query = "UPDATE [{table}] SET {pairs} WHERE {control}".format(
+                table=table_name, pairs=make_string(value_dict, ", "), control=make_string(key_dict, " AND ")
+            )
 
-        self.action(query, list(valueDict.values()) + keyDict.values())
+            self.action(query, value_dict.values() + key_dict.values())
 
         if self.connection.total_changes == changesBefore:
-            query = "INSERT INTO [" + tableName + "] (" + ", ".join(list(valueDict.keys()) + keyDict.keys()) + ")" + \
-                    " VALUES (" + ", ".join(["?"] * len(list(valueDict.keys()) + keyDict.keys())) + ")"
-            self.action(query, list(valueDict.values()) + keyDict.values())
+            keys = value_dict.keys() + key_dict.keys()
+            count = len(keys)
+            columns = ", ".join(keys)
+            replacements = ", ".join(["?"] * count)
+            values = value_dict.values() + key_dict.values()
 
-    def tableInfo(self, tableName):
+            query = "INSERT INTO '{table}' ({columns}) VALUES ({replacements})".format(table=table_name, columns=columns, replacements=replacements)
+
+            self.action(query, values)
+
+    def table_info(self, table_name):
         """
         Return information on a database table
 
-        :param tableName: name of table
+        :param table_name: name of table
         :return: array of name/type info
         """
-        sql_results = self.select("PRAGMA table_info(`{0}`)".format(tableName))
-        columns = {}
-        for column in sql_results:
-            columns[column[b'name']] = {'type': column[b'type']}
-        return columns
+        return {column[b"name"]: {"type": column[b"type"]} for column in self.select("PRAGMA table_info(`{0}`)".format(table_name))}
 
     @staticmethod
     def _unicode_text_factory(x):
@@ -355,53 +389,51 @@ class DBConnection(object):
         :param x: text to parse
         :return: six.text_type result
         """
+        # noinspection PyBroadException
         try:
             # Just revert to the old code for now, until we can fix unicode
-            return six.text_type(x, 'utf-8')
+            return six.text_type(x, "utf-8")
         except Exception:
             return six.text_type(x, sickbeard.SYS_ENCODING, errors="ignore")
 
     @staticmethod
     def _dict_factory(cursor, row):
-        d = {}
-        for idx, col in enumerate(cursor.description):
-            d[col[0]] = row[idx]
-        return d
+        return {col[0]: row[idx] for idx, col in enumerate(cursor.description)}
 
-    def hasTable(self, tableName):
+    def has_table(self, table_name):
         """
         Check if a table exists in database
 
-        :param tableName: table name to check
+        :param table_name: table name to check
         :return: True if table exists, False if it does not
         """
-        return len(self.select("SELECT 1 FROM sqlite_master WHERE name = ?;", (tableName, ))) > 0
+        return len(self.select("SELECT 1 FROM sqlite_master WHERE name = ?;", (table_name, ))) > 0
 
-    def hasColumn(self, tableName, column):
+    def has_column(self, table_name, column):
         """
         Check if a table has a column
 
-        :param tableName: Table to check
+        :param table_name: Table to check
         :param column: Column to check for
         :return: True if column exists, False if it does not
         """
-        return column in self.tableInfo(tableName)
+        return column in self.table_info(table_name)
 
-    def addColumn(self, table, column, col_type="NUMERIC", default=0):
+    def add_column(self, table, column, col_type="NUMERIC", default=0):
         """
         Adds a column to a table, default column type is NUMERIC
         TODO: Make this return true/false on success/failure
 
         :param table: Table to add column too
         :param column: Column name to add
-        :param type: Column type to add
+        :param col_type: Column type to add
         :param default: Default value for column
         """
         self.action("ALTER TABLE [{0}] ADD {1} {2}".format(table, column, col_type))
         self.action("UPDATE [{0}] SET {1} = ?".format(table, column), (default,))
 
 
-def sanityCheckDatabase(connection, sanity_check):
+def sanity_check_database(connection, sanity_check):
     sanity_check(connection).check()
 
 
@@ -417,7 +449,7 @@ class DBSanityCheck(object):  # pylint: disable=too-few-public-methods
 # = Upgrade API =
 # ===============
 
-def upgradeDatabase(connection, schema):
+def upgrade_database(connection, schema):
     """
     Perform database upgrade and provide logging
 
@@ -425,14 +457,14 @@ def upgradeDatabase(connection, schema):
     :param schema: New schema to upgrade to
     """
     logger.log("Checking database structure..." + connection.filename, logger.DEBUG)
-    _processUpgrade(connection, schema)
+    _process_upgrade(connection, schema)
 
 
-def prettyName(class_name):
-    return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)])
+def pretty_name(class_name):
+    return " ".join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)])
 
 
-def restoreDatabase(version):
+def restore_database(version):
     """
     Restores a database to a previous version (backup file of version must still exist)
 
@@ -440,30 +472,30 @@ def restoreDatabase(version):
     :return: True if restore succeeds, False if it fails
     """
     logger.log("Restoring database before trying upgrade again")
-    if not sickbeard.helpers.restoreVersionedFile(dbFilename(suffix='v' + str(version)), version):
+    if not sickbeard.helpers.restoreVersionedFile(db_full_path(suffix="v" + str(version)), version):
         logger.log_error_and_exit("Database restore failed, abort upgrading database")
         return False
     else:
         return True
 
 
-def _processUpgrade(connection, upgradeClass):
-    instance = upgradeClass(connection)
-    # logger.log("Checking " + prettyName(upgradeClass.__name__) + " database upgrade", logger.DEBUG)
+def _process_upgrade(connection, upgrade_class):
+    instance = upgrade_class(connection)
+    # logger.log("Checking " + pretty_name(upgrade_class.__name__) + " database upgrade", logger.DEBUG)
     if not instance.test():
-        logger.log("Database upgrade required: " + prettyName(upgradeClass.__name__), logger.DEBUG)
+        logger.log("Database upgrade required: " + pretty_name(upgrade_class.__name__), logger.DEBUG)
         try:
             instance.execute()
         except Exception as e:
-            logger.log("Error in " + str(upgradeClass.__name__) + ": " + ex(e), logger.ERROR)
+            logger.log("Error in " + str(upgrade_class.__name__) + ": " + ex(e), logger.ERROR)
             raise
 
-        logger.log(upgradeClass.__name__ + " upgrade completed", logger.DEBUG)
+        logger.log(upgrade_class.__name__ + " upgrade completed", logger.DEBUG)
     # else:
-    #     logger.log(upgradeClass.__name__ + " upgrade not required", logger.DEBUG)
+    #     logger.log(upgrade_class.__name__ + " upgrade not required", logger.DEBUG)
 
-    for upgradeSubClass in upgradeClass.__subclasses__():
-        _processUpgrade(connection, upgradeSubClass)
+    for upgradeSubClass in upgrade_class.__subclasses__():
+        _process_upgrade(connection, upgradeSubClass)
 
 
 # Base migration class. All future DB changes should be subclassed from this class
@@ -471,20 +503,32 @@ class SchemaUpgrade(object):
     def __init__(self, connection):
         self.connection = connection
 
-    def hasTable(self, tableName):
-        return len(self.connection.select("SELECT 1 FROM sqlite_master WHERE name = ?;", (tableName, ))) > 0
+    def has_table(self, table_name):
+        return self.connection.has_table(table_name=table_name)
 
-    def hasColumn(self, tableName, column):
-        return column in self.connection.tableInfo(tableName)
+    def has_column(self, table_name, column):
+        return self.connection.has_column(table_name=table_name, column=column)
 
-    def addColumn(self, table, column, col_type="NUMERIC", default=0):
-        self.connection.action("ALTER TABLE [{0}] ADD {1} {2}".format(table, column, col_type))
-        self.connection.action("UPDATE [{0}] SET {1} = ?".format(table, column), (default,))
+    def add_column(self, table, column, col_type="NUMERIC", default=0):
+        self.connection.add_column(table=table, column=column, col_type=col_type, default=default)
 
-    def checkDBVersion(self):
-        return self.connection.checkDBVersion()
+    def check_db_version(self):
+        return self.connection.check_db_version()
 
-    def incDBVersion(self):
-        new_version = self.checkDBVersion() + 1
+    def increment_db_version(self):
+        new_version = self.check_db_version() + 1
         self.connection.action("UPDATE db_version SET db_version = ?", [new_version])
         return new_version
+
+    def inc_major_version(self):
+        major_version, minor_version = self.connection.version
+        major_version += 1
+        minor_version = 0
+        self.connection.action("UPDATE db_version SET db_version = ?, db_minor_version = ?", [major_version, minor_version])
+        return self.connection.version
+
+    def inc_minor_version(self):
+        major_version, minor_version = self.connection.version
+        minor_version += 1
+        self.connection.action("UPDATE db_version SET db_version = ?, db_minor_version = ?", [major_version, minor_version])
+        return self.connection.version
diff --git a/sickbeard/metadata/generic.py b/sickbeard/metadata/generic.py
index 46fffd3a08c871b9f65d38951fd23875cb4bd91b..cce513d875e20db24d34b3ce7a35cbd45f09c68b 100644
--- a/sickbeard/metadata/generic.py
+++ b/sickbeard/metadata/generic.py
@@ -255,7 +255,7 @@ class GenericMetadata(object):
 
     def create_episode_metadata(self, ep_obj):
         if self.episode_metadata and ep_obj and not self._has_episode_metadata(ep_obj):
-            logger.log("Metadata provider " + self.name + " creating episode metadata for " + ep_obj.prettyName(),
+            logger.log("Metadata provider " + self.name + " creating episode metadata for " + ep_obj.pretty_name(),
                        logger.DEBUG)
             return self.write_ep_file(ep_obj)
         return False
@@ -315,7 +315,7 @@ class GenericMetadata(object):
 
     def create_episode_thumb(self, ep_obj):
         if self.episode_thumbnails and ep_obj and not self._has_episode_thumb(ep_obj):
-            logger.log("Metadata provider " + self.name + " creating episode thumbnail for " + ep_obj.prettyName(),
+            logger.log("Metadata provider " + self.name + " creating episode thumbnail for " + ep_obj.pretty_name(),
                        logger.DEBUG)
             return self.save_thumbnail(ep_obj)
         return False
diff --git a/sickbeard/metadata/wdtv.py b/sickbeard/metadata/wdtv.py
index a44f8086489454c20a2314741eacb44c900fd6ee..6e97c2b48cad617cc62f1e51bc91a9230f0d8ff2 100644
--- a/sickbeard/metadata/wdtv.py
+++ b/sickbeard/metadata/wdtv.py
@@ -235,7 +235,7 @@ class WDTVMetadata(generic.GenericMetadata):
             episodeID.text = str(curEpToWrite.indexerid)
 
             title = etree.SubElement(episode, "title")
-            title.text = ep_obj.prettyName()
+            title.text = ep_obj.pretty_name()
 
             if getattr(myShow, 'seriesname', None):
                 seriesName = etree.SubElement(episode, "series_name")
diff --git a/sickbeard/search.py b/sickbeard/search.py
index f10f243de8c1836f04c30966216d8f75c09846ac..ce38509ee8b48591e75376648ba9ee05014126e4 100644
--- a/sickbeard/search.py
+++ b/sickbeard/search.py
@@ -54,18 +54,18 @@ def _downloadResult(result):
     elif result.resultType == GenericProvider.NZBDATA:
 
         # get the final file path to the nzb
-        fileName = ek(os.path.join, sickbeard.NZB_DIR, result.name + ".nzb")
+        file_name = ek(os.path.join, sickbeard.NZB_DIR, result.name + ".nzb")
 
-        logger.log("Saving NZB to " + fileName)
+        logger.log("Saving NZB to " + file_name)
 
         newResult = True
 
         # save the data to disk
         try:
-            with ek(open, fileName, 'w') as fileOut:
+            with ek(open, file_name, 'w') as fileOut:
                 fileOut.write(result.extraInfo[0])
 
-            helpers.chmodAsParent(fileName)
+            helpers.chmodAsParent(file_name)
 
         except EnvironmentError as e:
             logger.log("Error trying to save NZB to black hole: " + ex(e), logger.ERROR)
@@ -393,14 +393,14 @@ def searchForNeededEpisodes():
         # pick a single result for each episode, respecting existing results
         for curEp in curFoundResults:
             if not curEp.show or curEp.show.paused:
-                logger.log("Skipping {0} because the show is paused ".format(curEp.prettyName()), logger.DEBUG)
+                logger.log("Skipping {0} because the show is paused ".format(curEp.pretty_name()), logger.DEBUG)
                 continue
 
             bestResult = pickBestResult(curFoundResults[curEp], curEp.show)
 
             # if all results were rejected move on to the next episode
             if not bestResult:
-                logger.log("All found results for " + curEp.prettyName() + " were rejected.", logger.DEBUG)
+                logger.log("All found results for " + curEp.pretty_name() + " were rejected.", logger.DEBUG)
                 continue
 
             # if it's already in the list (from another provider) and the newly found quality is no better then skip it
diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py
index 332c007c9bc09a9a9a892b424bcc065153e2ba49..2878cc6f48738541b06ed2633eb1b560670f0c21 100644
--- a/sickbeard/search_queue.py
+++ b/sickbeard/search_queue.py
@@ -171,7 +171,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
         super(ManualSearchQueueItem, self).run()
 
         try:
-            logger.log("Beginning manual search for: [" + self.segment.prettyName() + "]")
+            logger.log("Beginning manual search for: [" + self.segment.pretty_name() + "]")
             self.started = True
 
             searchResult = search.searchProviders(self.show, [self.segment], True, self.downCurQuality)
@@ -186,9 +186,9 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
 
             else:
                 ui.notifications.message('No downloads were found',
-                                         "Couldn't find a download for <i>{0}</i>".format(self.segment.prettyName()))
+                                         "Couldn't find a download for <i>{0}</i>".format(self.segment.pretty_name()))
 
-                logger.log("Unable to find a download for: [" + self.segment.prettyName() + "]")
+                logger.log("Unable to find a download for: [" + self.segment.pretty_name() + "]")
 
         except Exception:
             logger.log(traceback.format_exc(), logger.DEBUG)
@@ -255,7 +255,7 @@ class FailedQueueItem(generic_queue.QueueItem):
         try:
             for epObj in self.segment:
 
-                logger.log("Marking episode as bad: [" + epObj.prettyName() + "]")
+                logger.log("Marking episode as bad: [" + epObj.pretty_name() + "]")
 
                 failed_history.markFailed(epObj)
 
@@ -265,7 +265,7 @@ class FailedQueueItem(generic_queue.QueueItem):
                     history.logFailed(epObj, release, provider)
 
                 failed_history.revertEpisode(epObj)
-                logger.log("Beginning failed download search for: [" + epObj.prettyName() + "]")
+                logger.log("Beginning failed download search for: [" + epObj.pretty_name() + "]")
 
             # If it is wanted, self.downCurQuality doesnt matter
             # if it isnt wanted, we need to make sure to not overwrite the existing ep that we reverted to!
@@ -281,7 +281,7 @@ class FailedQueueItem(generic_queue.QueueItem):
                     time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
             else:
                 pass
-                # logger.log(u"No valid episode found to retry for: [" + self.segment.prettyName() + "]")
+                # logger.log(u"No valid episode found to retry for: [" + self.segment.pretty_name() + "]")
         except Exception:
             logger.log(traceback.format_exc(), logger.DEBUG)
 
diff --git a/sickbeard/tv.py b/sickbeard/tv.py
index fd7afbe0ac78558b547ee2b9c811cd35619de0ad..e7ada10653e64734251ed62ee855c7ae82248ff3 100644
--- a/sickbeard/tv.py
+++ b/sickbeard/tv.py
@@ -1174,11 +1174,8 @@ class TVShow(object):  # pylint: disable=too-many-instance-attributes, too-many-
         helpers.update_anime_support()
 
         if self.imdbid:
-            controlValueDict = {"indexer_id": self.indexerid}
-            newValueDict = self.imdb_info
-
             main_db_con = db.DBConnection()
-            main_db_con.upsert("imdb_info", newValueDict, controlValueDict)
+            main_db_con.upsert("imdb_info", self.imdb_info, controlValueDict)
 
     def __str__(self):
         toReturn = ""
@@ -1439,7 +1436,7 @@ class TVEpisode(object):  # pylint: disable=too-many-instance-attributes, too-ma
                        (id=self.show.indexerid, subtitles=subtitle_list, show=self.show.name,
                         ep=episode_num(self.season, self.episode)), logger.DEBUG)
 
-            notifiers.notify_subtitle_download(self.prettyName(), subtitle_list)
+            notifiers.notify_subtitle_download(self.pretty_name(), subtitle_list)
         else:
             logger.log("{id}: No subtitles downloaded for {show} {ep}".format
                        (id=self.show.indexerid, show=self.show.name,
@@ -2005,7 +2002,7 @@ class TVEpisode(object):  # pylint: disable=too-many-instance-attributes, too-ma
             return strings
         return self._format_pattern(pattern)
 
-    def prettyName(self):
+    def pretty_name(self):
         """
         Returns the name of this episode in a "pretty" human-readable format. Used for logging
         and notifications and such.
diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py
index 71c1c8d88f5c1d53fa091762afded88e751bc307..f23f3e0f6ec7eb253163b950209659ad3374b73b 100644
--- a/sickbeard/tvcache.py
+++ b/sickbeard/tvcache.py
@@ -39,7 +39,7 @@ class CacheDBConnection(db.DBConnection):
 
         # Create the table if it's not already there
         try:
-            if not self.hasTable(provider_name):
+            if not self.has_table(provider_name):
                 self.action(
                     "CREATE TABLE [" + provider_name + "] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT, release_group TEXT)")
             else:
@@ -52,12 +52,12 @@ class CacheDBConnection(db.DBConnection):
             self.action("CREATE UNIQUE INDEX IF NOT EXISTS idx_url ON [" + provider_name + "] (url)")
 
             # add release_group column to table if missing
-            if not self.hasColumn(provider_name, 'release_group'):
-                self.addColumn(provider_name, 'release_group', "TEXT", "")
+            if not self.has_column(provider_name, 'release_group'):
+                self.add_column(provider_name, 'release_group', "TEXT", "")
 
             # add version column to table if missing
-            if not self.hasColumn(provider_name, 'version'):
-                self.addColumn(provider_name, 'version', "NUMERIC", "-1")
+            if not self.has_column(provider_name, 'version'):
+                self.add_column(provider_name, 'version', "NUMERIC", "-1")
 
         except Exception as e:
             if str(e) != "table [" + provider_name + "] already exists":
@@ -65,7 +65,7 @@ class CacheDBConnection(db.DBConnection):
 
         # Create the table if it's not already there
         try:
-            if not self.hasTable('lastUpdate'):
+            if not self.has_table('lastUpdate'):
                 self.action("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)")
         except Exception as e:
             if str(e) != "table lastUpdate already exists":
@@ -196,7 +196,7 @@ class TVCache(object):
     def set_last_update(self, to_date=None):
         """
         Sets the last update date for the current provider in the cache database
-        
+
         :param to_date: date to set to, or None for today
         """
         if not to_date:
diff --git a/sickbeard/versionChecker.py b/sickbeard/versionChecker.py
index 7e2ac353894248d4b5efdb87a99b79623b3492a9..0236f174e07f0d04ed61def50685126ff990244c 100644
--- a/sickbeard/versionChecker.py
+++ b/sickbeard/versionChecker.py
@@ -218,7 +218,7 @@ class CheckVersion(object):
             match = re.search(r"MAX_DB_VERSION\s=\s(?P<version>\d{2,3})", response)
             branchDestDBversion = int(match.group('version'))
             main_db_con = db.DBConnection()
-            branchCurrDBversion = main_db_con.checkDBVersion()
+            branchCurrDBversion = main_db_con.check_db_version()
             if branchDestDBversion > branchCurrDBversion:
                 return 'upgrade'
             elif branchDestDBversion == branchCurrDBversion:
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index 6e60df4d99b6eb68f05c2cd0947ec2eaf9789da2..79d16a45ff045f7696270101fcfabf0fa37cb879 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -1518,9 +1518,8 @@ class Home(WebRoot):
     @staticmethod
     def plotDetails(show, season, episode):
         main_db_con = db.DBConnection()
-        result = main_db_con.selectOne(
-            "SELECT description FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
-            (int(show), int(season), int(episode)))
+        result = main_db_con.select_one(
+            "SELECT description FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", (int(show), int(season), int(episode)))
         return result[b'description'] if result else 'Episode not found.'
 
     @staticmethod
diff --git a/tests/test_lib.py b/tests/test_lib.py
index c2fc37e31006a4c956071151f29c986573aabdd3..769b64cb7629cf5edce8cf702ca8fbba1b6ced6a 100644
--- a/tests/test_lib.py
+++ b/tests/test_lib.py
@@ -257,7 +257,7 @@ class TestCacheDBConnection(TestDBConnection, object):
 
         # Create the table if it's not already there
         try:
-            if not self.hasTable(provider_name):
+            if not self.has_table(provider_name):
                 sql = "CREATE TABLE [" + provider_name + "] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT, release_group TEXT)"
                 self.connection.execute(sql)
                 self.connection.commit()
@@ -268,8 +268,8 @@ class TestCacheDBConnection(TestDBConnection, object):
                 raise
 
             # add version column to table if missing
-            if not self.hasColumn(provider_name, 'version'):
-                self.addColumn(provider_name, 'version', "NUMERIC", "-1")
+            if not self.has_column(provider_name, 'version'):
+                self.add_column(provider_name, 'version', "NUMERIC", "-1")
 
         # Create the table if it's not already there
         try:
@@ -296,16 +296,16 @@ def setup_test_db():
     """
     # Upgrade the db to the latest version.
     # upgrading the db
-    db.upgradeDatabase(db.DBConnection(), mainDB.InitialSchema)
+    db.upgrade_database(db.DBConnection(), mainDB.InitialSchema)
 
     # fix up any db problems
-    db.sanityCheckDatabase(db.DBConnection(), mainDB.MainSanityCheck)
+    db.sanity_check_database(db.DBConnection(), mainDB.MainSanityCheck)
 
     # and for cache.db too
-    db.upgradeDatabase(db.DBConnection('cache.db'), cache_db.InitialSchema)
+    db.upgrade_database(db.DBConnection('cache.db'), cache_db.InitialSchema)
 
     # and for failed.db too
-    db.upgradeDatabase(db.DBConnection('failed.db'), failed_db.InitialSchema)
+    db.upgrade_database(db.DBConnection('failed.db'), failed_db.InitialSchema)
 
 
 def teardown_test_db():