diff --git a/gui/slick/images/network/canal d.png b/gui/slick/images/network/canal d.png
new file mode 100644
index 0000000000000000000000000000000000000000..7b1b67442f75f02c8632d5c4b330e2e325d2f609
Binary files /dev/null and b/gui/slick/images/network/canal d.png differ
diff --git a/gui/slick/images/network/tv5 monde.png b/gui/slick/images/network/tv5 monde.png
new file mode 100644
index 0000000000000000000000000000000000000000..4c16bda2f8b9221f95b6caf4d2133c833a1e18a7
Binary files /dev/null and b/gui/slick/images/network/tv5 monde.png differ
diff --git a/gui/slick/images/network/tv5.png b/gui/slick/images/network/tv5.png
new file mode 100644
index 0000000000000000000000000000000000000000..e1ddcdac75ca6320067f4fb97a194d58b0810319
Binary files /dev/null and b/gui/slick/images/network/tv5.png differ
diff --git a/gui/slick/images/network/w network.png b/gui/slick/images/network/w network.png
new file mode 100644
index 0000000000000000000000000000000000000000..2cb76544f34bd3c0d0e4aeafde4a8b2d8e997e5c
Binary files /dev/null and b/gui/slick/images/network/w network.png differ
diff --git a/gui/slick/views/config_providers.mako b/gui/slick/views/config_providers.mako
index 25186301c5f5a343a82858e879e52d3637461f22..dd7be25cf69191ad1a1137027e9766dabec5490d 100644
--- a/gui/slick/views/config_providers.mako
+++ b/gui/slick/views/config_providers.mako
@@ -165,11 +165,11 @@ $('#config-components').tabs();
                         % endif
 
                         % if hasattr(curNewznabProvider, 'enable_backlog'):
-                        <div class="field-pair">
+                        <div class="field-pair${(' hidden', '')[curNewznabProvider.supportsBacklog]}">
                             <label for="${curNewznabProvider.getID()}_enable_backlog">
                                 <span class="component-title">Enable backlog searches</span>
                                 <span class="component-desc">
-                                    <input type="checkbox" name="${curNewznabProvider.getID()}_enable_backlog" id="${curNewznabProvider.getID()}_enable_backlog" ${('', 'checked="checked"')[bool(curNewznabProvider.enable_backlog)]}/>
+                                    <input type="checkbox" name="${curNewznabProvider.getID()}_enable_backlog" id="${curNewznabProvider.getID()}_enable_backlog" ${('', 'checked="checked"')[bool(curNewznabProvider.enable_backlog and curNewznabProvider.supportsBacklog)]}/>
                                     <p>enable provider to perform backlog searches.</p>
                                 </span>
                             </label>
@@ -252,11 +252,11 @@ $('#config-components').tabs();
                         % endif
 
                         % if hasattr(curNzbProvider, 'enable_backlog'):
-                        <div class="field-pair">
+                        <div class="field-pair${(' hidden', '')[curNzbProvider.supportsBacklog]}">
                             <label for="${curNzbProvider.getID()}_enable_backlog">
                                 <span class="component-title">Enable backlog searches</span>
                                 <span class="component-desc">
-                                    <input type="checkbox" name="${curNzbProvider.getID()}_enable_backlog" id="${curNzbProvider.getID()}_enable_backlog" ${('', 'checked="checked"')[bool(curNzbProvider.enable_backlog)]}/>
+                                    <input type="checkbox" name="${curNzbProvider.getID()}_enable_backlog" id="${curNzbProvider.getID()}_enable_backlog" ${('', 'checked="checked"')[bool(curNzbProvider.enable_backlog and curNzbProvider.supportsBacklog)]}/>
                                     <p>enable provider to perform backlog searches.</p>
                                 </span>
                             </label>
@@ -511,11 +511,11 @@ $('#config-components').tabs();
                         % endif
 
                         % if hasattr(curTorrentProvider, 'enable_backlog'):
-                        <div class="field-pair">
+                        <div class="field-pair${(' hidden', '')[curTorrentProvider.supportsBacklog]}">
                             <label for="${curTorrentProvider.getID()}_enable_backlog">
                                 <span class="component-title">Enable backlog searches</span>
                                 <span class="component-desc">
-                                    <input type="checkbox" name="${curTorrentProvider.getID()}_enable_backlog" id="${curTorrentProvider.getID()}_enable_backlog" ${('', 'checked="checked"')[bool(curTorrentProvider.enable_backlog)]}/>
+                                    <input type="checkbox" name="${curTorrentProvider.getID()}_enable_backlog" id="${curTorrentProvider.getID()}_enable_backlog" ${('', 'checked="checked"')[bool(curTorrentProvider.enable_backlog and curTorrentProvider.supportsBacklog)]}/>
                                     <p>enable provider to perform backlog searches.</p>
                                 </span>
                             </label>
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
index 58651edfb9206749b97f528a7473e8e3a60fdff2..e5070d571b5aac1afdce6e1610710b301a59abf8 100644
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -1182,7 +1182,7 @@ def initialize(consoleLogging=True):
         DATE_PRESET = check_setting_str(CFG, 'GUI', 'date_preset', '%x')
         TIME_PRESET_W_SECONDS = check_setting_str(CFG, 'GUI', 'time_preset', '%I:%M:%S %p')
         TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u":%S", u"")
-        TIMEZONE_DISPLAY = check_setting_str(CFG, 'GUI', 'timezone_display', 'network')
+        TIMEZONE_DISPLAY = check_setting_str(CFG, 'GUI', 'timezone_display', 'local')
         POSTER_SORTBY = check_setting_str(CFG, 'GUI', 'poster_sortby', 'name')
         POSTER_SORTDIR = check_setting_int(CFG, 'GUI', 'poster_sortdir', 1)
         FILTER_ROW =  bool(check_setting_int(CFG, 'GUI', 'filter_row', 1))
@@ -1228,10 +1228,10 @@ def initialize(consoleLogging=True):
                                                                      curTorrentProvider.getID() + '_proxy_url', '')
             if hasattr(curTorrentProvider, 'confirmed'):
                 curTorrentProvider.confirmed = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
-                                                                      curTorrentProvider.getID() + '_confirmed', 0))
+                                                                      curTorrentProvider.getID() + '_confirmed', 1))
             if hasattr(curTorrentProvider, 'ranked'):
                 curTorrentProvider.ranked = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
-                                                                      curTorrentProvider.getID() + '_ranked', 0))
+                                                                      curTorrentProvider.getID() + '_ranked', 1))
 
             if hasattr(curTorrentProvider, 'engrelease'):
                 curTorrentProvider.engrelease = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
@@ -1248,7 +1248,7 @@ def initialize(consoleLogging=True):
                                                              curTorrentProvider.getID() + '_ratio', '')
             if hasattr(curTorrentProvider, 'minseed'):
                 curTorrentProvider.minseed = check_setting_int(CFG, curTorrentProvider.getID().upper(),
-                                                               curTorrentProvider.getID() + '_minseed', 0)
+                                                               curTorrentProvider.getID() + '_minseed', 1)
             if hasattr(curTorrentProvider, 'minleech'):
                 curTorrentProvider.minleech = check_setting_int(CFG, curTorrentProvider.getID().upper(),
                                                                 curTorrentProvider.getID() + '_minleech', 0)
@@ -1272,7 +1272,7 @@ def initialize(consoleLogging=True):
             if hasattr(curTorrentProvider, 'enable_backlog'):
                 curTorrentProvider.enable_backlog = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
                                                                            curTorrentProvider.getID() + '_enable_backlog',
-                                                                           1))
+                                                                           curTorrentProvider.supportsBacklog))
 
             if hasattr(curTorrentProvider, 'cat'):
                 curTorrentProvider.cat = check_setting_int(CFG, curTorrentProvider.getID().upper(),
@@ -1307,7 +1307,7 @@ def initialize(consoleLogging=True):
             if hasattr(curNzbProvider, 'enable_backlog'):
                 curNzbProvider.enable_backlog = bool(check_setting_int(CFG, curNzbProvider.getID().upper(),
                                                                        curNzbProvider.getID() + '_enable_backlog',
-                                                                       1))
+                                                                       curNzbProvider.supportsBacklog))
 
         if not os.path.isfile(CONFIG_FILE):
             logger.log(u"Unable to find '" + CONFIG_FILE + "', all settings will be default!", logger.DEBUG)
diff --git a/sickbeard/clients/generic.py b/sickbeard/clients/generic.py
index 21e42369e95e9b46cd5762b38d4dfe73a0c11fb3..18c0669006eb26399c1d17d75ce2b88b72b9bb19 100644
--- a/sickbeard/clients/generic.py
+++ b/sickbeard/clients/generic.py
@@ -44,7 +44,7 @@ class GenericClient(object):
             logger.DEBUG)
 
         if not self.auth:
-            logger.log(self.name + u': Authentication Failed', logger.ERROR)
+            logger.log(self.name + u': Authentication Failed', logger.WARNING)
             return False
         try:
             self.response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files,
diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py
index 22ea83e07d40830d421c45a4115a701ad8ab856e..238b1cd1f42231a7f85c2b50a408fb8a1f365eb3 100644
--- a/sickbeard/helpers.py
+++ b/sickbeard/helpers.py
@@ -1624,7 +1624,7 @@ def getURL(url, post_data=None, params={}, headers={}, timeout=30, session=None,
                     logger.DEBUG)
                     return None
 
-    except SocketTimeout:
+    except (SocketTimeout, TypeError) as e:
         logger.log(u"Connection timed out (sockets) accessing getURL %s Error: %r" % (url, ex(e)), logger.WARNING)
         return None
     except requests.exceptions.HTTPError as e:
@@ -1684,6 +1684,9 @@ def download_file(url, filename, session=None, headers={}):
             except Exception:
                 logger.log(u"Problem setting permissions or writing file to: %s" % filename, logger.WARNING)
 
+    except (SocketTimeout, TypeError) as e:
+        logger.log(u"Connection timed out (sockets) while loading download URL %s Error: %r" % (url, ex(e)), logger.WARNING)
+        return None
     except requests.exceptions.HTTPError as e:
         _remove_file_failed(filename)
         logger.log(u"HTTP error %r while loading download URL %s " % (ex(e), url ), logger.WARNING)
diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py
index b8410174eeb76faeff0a7605d7ccdc3e7f09dfcf..33889a135ba9e0f7f5f86373383bbf9c65acce81 100644
--- a/sickbeard/postProcessor.py
+++ b/sickbeard/postProcessor.py
@@ -850,16 +850,16 @@ class PostProcessor(object):
         self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")")
 
         if ek(os.path.isdir, self.file_path):
-            self._log(u"File " + self.file_path + " seems to be a directory")
+            self._log(u"File %s seems to be a directory" % self.file_path)
             return False
 
         if not ek(os.path.exists, self.file_path):
-            self._log(u"File " + self.file_path + " doesn't exist, did unrar fail?")
+            self._log(u"File %s doesn't exist, did unrar fail?" % self.file_path)
             return False
 
         for ignore_file in self.IGNORED_FILESTRINGS:
             if ignore_file in self.file_path:
-                self._log(u"File " + self.file_path + " is ignored type, skipping")
+                self._log(u"File %s is ignored type, skipping" % self.file_path)
                 return False
 
         # reset per-file stuff
@@ -871,12 +871,10 @@ class PostProcessor(object):
         # try to find the file info
         (show, season, episodes, quality, version) = self._find_info()
         if not show:
-            self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode",
-                      logger.WARNING)
+            self._log(u"This show isn't in your list, you need to add it to SR before post-processing an episode")
             raise EpisodePostProcessingFailedException()
         elif season == None or not episodes:
-            self._log(u"Not enough information to determine what episode this is", logger.DEBUG)
-            self._log(u"Quitting post-processing", logger.DEBUG)
+            self._log(u"Not enough information to determine what episode this is. Quitting post-processing")
             return False
 
         # retrieve/create the corresponding TVEpisode objects
@@ -891,7 +889,7 @@ class PostProcessor(object):
         else:
             new_ep_quality = self._get_quality(ep_obj)
 
-        logger.log(u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG)
+        logger.log(u"Quality of the episode we're processing: %s" % new_ep_quality, logger.DEBUG)
 
         # see if this is a priority download (is it snatched, in history, PROPER, or BEST)
         priority_download = self._is_priority(ep_obj, new_ep_quality)
@@ -913,35 +911,33 @@ class PostProcessor(object):
 
             # Not a priority and the quality is lower than what we already have
             if (new_ep_quality < old_ep_quality and new_ep_quality != common.Quality.UNKNOWN) and not existing_file_status == PostProcessor.DOESNT_EXIST:
-                self._log(u"File exists and new file quality is lower than existing, marking it unsafe to replace", logger.DEBUG)
+                self._log(u"File exists and new file quality is lower than existing, marking it unsafe to replace")
                 return False
 
             # if there's an existing file that we don't want to replace stop here
             if existing_file_status == PostProcessor.EXISTS_LARGER:
                 if self.is_proper:
                     self._log(
-                        u"File exists and new file is smaller, new file is a proper/repack, marking it safe to replace",
-                        logger.DEBUG)
+                        u"File exists and new file is smaller, new file is a proper/repack, marking it safe to replace")
                     return True
 
                 else:
-                    self._log(u"File exists and new file is smaller, marking it unsafe to replace", logger.DEBUG)
+                    self._log(u"File exists and new file is smaller, marking it unsafe to replace")
                     return False
 
             elif existing_file_status == PostProcessor.EXISTS_SAME:
-                self._log(u"File exists and new file is same size, marking it unsafe to replace", logger.DEBUG)
+                self._log(u"File exists and new file is same size, marking it unsafe to replace")
                 return False
 
         # if the file is priority then we're going to replace it even if it exists
         else:
             self._log(
-                u"This download is marked a priority download so I'm going to replace an existing file if I find one",
-                logger.DEBUG)
+                u"This download is marked a priority download so I'm going to replace an existing file if I find one")
 
         # try to find out if we have enough space to perform the copy or move action.
         if not helpers.isFileLocked(self.file_path, False):
             if not verify_freespace(self.file_path, ep_obj.show._location, [ep_obj] + ep_obj.relatedEps):
-                self._log("Not enough space to continue PP, exiting")
+                self._log("Not enough space to continue PP, exiting", logger.WARNING)
                 return False
         else:
             self._log("Unable to determine needed filespace as the source file is locked for access")
diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py
index 4dec21de9a1d9ca47295b0515dca85425b0494f8..1a42b34fca8e740f2195d27b09ede550ee7bdf8a 100644
--- a/sickbeard/providers/generic.py
+++ b/sickbeard/providers/generic.py
@@ -212,6 +212,11 @@ class GenericProvider:
                 self.headers.update({'Referer': '/'.join(url.split('/')[:3]) + '/'})
 
             logger.log(u"Downloading a result from " + self.name + " at " + url)
+
+            # Support for Jackett/TorzNab
+            if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB):
+                filename = filename.rsplit('.', 1)[0] + '.' + GenericProvider.TORRENT
+
             if helpers.download_file(self.proxy._buildURL(url), filename, session=self.session, headers=self.headers):
                 if self._verify_download(filename):
                     logger.log(u"Saved result to " + filename, logger.INFO)
@@ -231,7 +236,7 @@ class GenericProvider:
         """
 
         # primitive verification of torrents, just make sure we didn't get a text file or something
-        if self.providerType == GenericProvider.TORRENT:
+        if file_name.endswith(GenericProvider.TORRENT):
             try:
                 parser = createParser(file_name)
                 if parser:
diff --git a/sickbeard/search.py b/sickbeard/search.py
index 090c8be24b6ef0a0edd1dbc3a49db054ddd710e7..15112ebd7f6c67d805315a55666174e010415427 100644
--- a/sickbeard/search.py
+++ b/sickbeard/search.py
@@ -27,7 +27,7 @@ import traceback
 
 import sickbeard
 
-from common import SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, Quality, SEASON_RESULT, MULTI_EP_RESULT
+from sickbeard.common import SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, Quality, SEASON_RESULT, MULTI_EP_RESULT
 
 from sickbeard import logger, db, show_name_helpers, helpers
 from sickbeard import sab
@@ -107,9 +107,12 @@ def snatchEpisode(result, endStatus=SNATCHED):
         for curEp in result.episodes:
             if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7):
                 result.priority = 1
-    if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None:
+    if re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None:
         endStatus = SNATCHED_PROPER
 
+    if result.url.startswith('magnet') or result.url.endswith('torrent'):
+        result.resultType = 'torrent'
+
     # NZBs can be sent straight to SAB or saved to disk
     if result.resultType in ("nzb", "nzbdata"):
         if sickbeard.NZB_METHOD == "blackhole":
@@ -328,7 +331,7 @@ def wantedEpisodes(show, fromDate):
     myDB = db.DBConnection()
 
     sqlResults = myDB.select("SELECT status, season, episode FROM tv_episodes WHERE showid = ? AND season > 0 and airdate > ?",
-            [show.indexerid, fromDate.toordinal()])
+                             [show.indexerid, fromDate.toordinal()])
 
     # check through the list of statuses to see if we want any
     wanted = []
@@ -476,7 +479,7 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False):
         if search_mode == 'sponly' and manualSearch == True:
             search_mode = 'eponly'
 
-        while(True):
+        while True:
             searchCount += 1
 
             if search_mode == 'eponly':
diff --git a/sickbeard/subtitles.py b/sickbeard/subtitles.py
index 70afa1feca5d91bce5688cd88b12fb8c28bacbbb..de96406bab879e9ac409d892f0e1dfe83cd02188 100644
--- a/sickbeard/subtitles.py
+++ b/sickbeard/subtitles.py
@@ -115,7 +115,7 @@ def downloadSubtitles(subtitles_info):
     languages = getNeededLanguages(existing_subtitles)
     if not languages:
         logger.log(u'%s: No missing subtitles for S%02dE%02d' % (subtitles_info['show.indexerid'], subtitles_info['season'], subtitles_info['episode']), logger.DEBUG)
-        return existing_subtitles, None
+        return (existing_subtitles, None)
 
     subtitles_path = getSubtitlesPath(subtitles_info['location']).encode(sickbeard.SYS_ENCODING)
     video_path = subtitles_info['location'].encode(sickbeard.SYS_ENCODING)
@@ -126,14 +126,14 @@ def downloadSubtitles(subtitles_info):
     except Exception:
         logger.log(u'%s: Exception caught in subliminal.scan_video for S%02dE%02d' %
         (subtitles_info['show.indexerid'], subtitles_info['season'], subtitles_info['episode']), logger.DEBUG)
-        return
+        return (existing_subtitles, None)
 
     try:
         # TODO: Add gui option for hearing_impaired parameter ?
         found_subtitles = subliminal.download_best_subtitles([video], languages=languages, hearing_impaired=False, only_one=not sickbeard.SUBTITLES_MULTI, providers=providers)
         if not found_subtitles:
             logger.log(u'%s: No subtitles found for S%02dE%02d on any provider' % (subtitles_info['show.indexerid'], subtitles_info['season'], subtitles_info['episode']), logger.DEBUG)
-            return
+            return (existing_subtitles, None)
 
         subliminal.save_subtitles(video, found_subtitles[video], directory=subtitles_path, single=not sickbeard.SUBTITLES_MULTI)
 
@@ -153,7 +153,7 @@ def downloadSubtitles(subtitles_info):
     except Exception as e:
                 logger.log("Error occurred when downloading subtitles for: %s" % video_path)
                 logger.log(traceback.format_exc(), logger.ERROR)
-                return
+                return (existing_subtitles, None)
 
     if sickbeard.SUBTITLES_HISTORY:
         for video, subtitles in found_subtitles.iteritems():
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index 66d9d600798aefea8925991c099040144d3601ae..a7b90369a1749ab8e569dea64ecaeec3baa1d350 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -3686,7 +3686,10 @@ class ConfigGeneral(Config):
             sickbeard.TIME_PRESET_W_SECONDS = time_preset
             sickbeard.TIME_PRESET = sickbeard.TIME_PRESET_W_SECONDS.replace(u":%S", u"")
 
-        sickbeard.TIMEZONE_DISPLAY = timezone_display
+        #Force all users to use local
+        #sickbeard.TIMEZONE_DISPLAY = timezone_display
+        sickbeard.TIMEZONE_DISPLAY = 'local'
+
 
         if not config.change_LOG_DIR(log_dir, web_log):
             results += ["Unable to create directory " + os.path.normpath(log_dir) + ", log directory not changed."]