diff --git a/TODO.txt b/TODO.txt
index b18ae2199fbad4053a364723d07e6ab03f2f09ec..084a6bcc9237e271abd4249a00a59e673fd35d01 100644
--- a/TODO.txt
+++ b/TODO.txt
@@ -11,9 +11,6 @@
 -Add 'add show and add another show' button to add show page
 -Change the hardcoded global ignore words to optional
 
-2014-10-08
-VAdd login page for http auth as opposed to browser dialog box
-
 2014-10-13
 -Fix broken backlog
 -Fix season searches
diff --git a/gui/slick/images/donate.jpg b/gui/slick/images/donate.jpg
index 67b8c319a2b1c6b1b95380f0b9d49e16e04e6b2a..7c4c7d7268b20bd66a2ce5e3910fd893b9d9e0de 100644
Binary files a/gui/slick/images/donate.jpg and b/gui/slick/images/donate.jpg differ
diff --git a/gui/slick/images/network/kanal 5.png b/gui/slick/images/network/kanal 5.png
new file mode 100644
index 0000000000000000000000000000000000000000..389e6372b45738a2e52e2c85ba73746f0b0d2f96
Binary files /dev/null and b/gui/slick/images/network/kanal 5.png differ
diff --git a/gui/slick/images/network/lifestyle.png b/gui/slick/images/network/lifestyle.png
new file mode 100644
index 0000000000000000000000000000000000000000..4ff7cd90d1eb9e97cafc96a55308d8fecb69c202
Binary files /dev/null and b/gui/slick/images/network/lifestyle.png differ
diff --git a/gui/slick/images/network/logo.png b/gui/slick/images/network/logo.png
new file mode 100644
index 0000000000000000000000000000000000000000..4398a439eec3f9a7b5a19f147e80a3d832e506d1
Binary files /dev/null and b/gui/slick/images/network/logo.png differ
diff --git a/gui/slick/images/network/mtv (uk).png b/gui/slick/images/network/mtv (uk).png
new file mode 100644
index 0000000000000000000000000000000000000000..0c36f2e1ca96d36b49218a319f46b9c0e782d95d
Binary files /dev/null and b/gui/slick/images/network/mtv (uk).png differ
diff --git a/gui/slick/images/network/no network.png b/gui/slick/images/network/no network.png
new file mode 100644
index 0000000000000000000000000000000000000000..4398a439eec3f9a7b5a19f147e80a3d832e506d1
Binary files /dev/null and b/gui/slick/images/network/no network.png differ
diff --git a/gui/slick/images/network/prime (nz).png b/gui/slick/images/network/prime (nz).png
new file mode 100644
index 0000000000000000000000000000000000000000..2ad936305b788d0e38561f44c6e98f825bcca78e
Binary files /dev/null and b/gui/slick/images/network/prime (nz).png differ
diff --git a/gui/slick/images/providers/xthor.png b/gui/slick/images/providers/xthor.png
new file mode 100644
index 0000000000000000000000000000000000000000..9821afb57e3e899d25cc07d7779bd8e2b954ee7d
Binary files /dev/null and b/gui/slick/images/providers/xthor.png differ
diff --git a/gui/slick/interfaces/default/IRC.tmpl b/gui/slick/interfaces/default/IRC.tmpl
index 528f5822f3477f5543dacb24ee9aad20c12daaa0..cd38b9b498da6b0ebddf5e92bf0bc6472d89b28b 100644
--- a/gui/slick/interfaces/default/IRC.tmpl
+++ b/gui/slick/interfaces/default/IRC.tmpl
@@ -4,6 +4,12 @@
 #import os.path
 #include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_top.tmpl")
 
-<iframe id="extFrame" src="https://kiwiirc.com/client/irc.freenode.net/?nick=srforums|?&theme=basic#sickrage" width="100%" height="500" frameBorder="0" style="border: 1px black solid;"></iframe>
+#if $sickbeard.GIT_USERNAME
+#set $username = $sickbeard.GIT_USERNAME
+#else
+#set $username = "SickRageUI|?"
+#end if
+
+<iframe id="extFrame" src="https://kiwiirc.com/client/irc.freenode.net/?nick=$username&theme=basic#sickrage" width="100%" height="500" frameBorder="0" style="border: 1px black solid;"></iframe>
 
 #include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_bottom.tmpl")
diff --git a/gui/slick/interfaces/default/config_general.tmpl b/gui/slick/interfaces/default/config_general.tmpl
index 792c9856be01cd3bffb291728fce373d38830997..fda5dfd1eccbeafc1f4aca5709e00005ac651995 100644
--- a/gui/slick/interfaces/default/config_general.tmpl
+++ b/gui/slick/interfaces/default/config_general.tmpl
@@ -93,7 +93,21 @@
 								</span>
 							</label>
 						</div>
-
+						<div class="field-pair">
+							<label for="default_page">
+								<span class="component-title">Initial page</span>
+								<span class="component-desc">
+									<select id="default_page" name="default_page" class="form-control input-sm">
+										<option value="news" #if $sickbeard.DEFAULT_PAGE == 'news' then 'selected="selected"' else ''#>News</option>
+										<option value="home" #if $sickbeard.DEFAULT_PAGE == 'home' then 'selected="selected"' else ''#>Home</option>
+										<option value="comingEpisodes" #if $sickbeard.DEFAULT_PAGE == 'comingEpisodes' then 'selected="selected"' else ''#>Coming Episodes</option>
+										<option value="history" #if $sickbeard.DEFAULT_PAGE == 'history' then 'selected="selected"' else ''#>History</option>
+										<option value="IRC" #if $sickbeard.DEFAULT_PAGE == 'IRC' then 'selected="selected"' else ''#>IRC</option>
+									</select>
+									<span>when launching SickRage interface</span>
+								</span>
+							</label>
+						</div>
 						<div class="field-pair">
 							<label for="showupdate_hour">
 								<span class="component-title">When to update shows</span>
diff --git a/gui/slick/interfaces/default/manage_subtitleMissed.tmpl b/gui/slick/interfaces/default/manage_subtitleMissed.tmpl
index f5962e1db98d2a576aaa77fa37cc6eba1707eb09..e3cf6050c6e01323d14b7e02cc08369488149cec 100644
--- a/gui/slick/interfaces/default/manage_subtitleMissed.tmpl
+++ b/gui/slick/interfaces/default/manage_subtitleMissed.tmpl
@@ -55,7 +55,7 @@ Download missed subtitles for selected episodes <input class="btn btn-inline" ty
 #for $cur_indexer_id in $sorted_show_ids:
  <tr id="$cur_indexer_id">
   <th><input type="checkbox" class="allCheck" id="allCheck-$cur_indexer_id" name="$cur_indexer_id-all" checked="checked" /></th>
-  <th colspan="3" style="width: 100%; text-align: left;"><a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_indexer_id">$show_names[$cur_indexer_id]</a> ($ep_counts[$cur_indexer_id]) <input type="button" class="get_more_eps btn" id="$cur_indexer_id" value="Expand" /></th>
+  <th colspan="3" style="width: 100%; text-align: left;"><a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_indexer_id">$show_names[$cur_indexer_id]</a> ($ep_counts[$cur_indexer_id]) <input type="button" class="pull-right get_more_eps btn" id="$cur_indexer_id" value="Expand" /></th>
  </tr>
 #end for
 </table>
diff --git a/gui/slick/js/manageSubtitleMissed.js b/gui/slick/js/manageSubtitleMissed.js
index 262ecbb282e38fbba962c0813ac97ca310d6ae03..08c78d890b2b38342d10ab8ac1394d285a6b2d1f 100644
--- a/gui/slick/js/manageSubtitleMissed.js
+++ b/gui/slick/js/manageSubtitleMissed.js
@@ -5,21 +5,21 @@ $(document).ready(function() {
             var checked = ' checked';
         else
             var checked = '';
-        
+
         var row = '';
-        row += ' <tr class="good">';
+        row += ' <tr class="good show-' + indexer_id + '">';
         row += '  <td align="center"><input type="checkbox" class="'+indexer_id+'-epcheck" name="'+indexer_id+'-'+season+'x'+episode+'"'+checked+'></td>';
         row += '  <td style="width: 1%;">'+season+'x'+episode+'</td>';
         row += '  <td>'+name+'</td>';
         row += '  <td style="float: right;">'; 
         	subtitles = subtitles.split(',')
-        	for (i in subtitles)
+        	for (var i in subtitles)
         	{
         		row += '   <img src="/images/subtitles/flags/'+subtitles[i]+'.png" width="16" height="11" alt="'+subtitles[i]+'" />&nbsp;';
         	}
         row += '  </td>';
         row += ' </tr>'
-        
+
         return row;
     }
 
@@ -32,21 +32,35 @@ $(document).ready(function() {
         var cur_indexer_id = $(this).attr('id');
         var checked = $('#allCheck-'+cur_indexer_id).prop('checked');
         var last_row = $('tr#'+cur_indexer_id);
-        
-        $.getJSON(sbRoot+'/manage/showSubtitleMissed',
+        var clicked = $(this).attr('data-clicked');
+        var action = $(this).attr('value');
+
+        if (!clicked) {
+            $.getJSON(sbRoot + '/manage/showSubtitleMissed',
                   {
                    indexer_id: cur_indexer_id,
                    whichSubs: $('#selectSubLang').val()
                   },
                   function (data) {
-                      $.each(data, function(season,eps){
+                      $.each(data, function(season, eps) {
                           $.each(eps, function(episode, data) {
                               //alert(season+'x'+episode+': '+name);
                               last_row.after(make_row(cur_indexer_id, season, episode, data.name, data.subtitles, checked));
                           });
                       });
                   });
-        $(this).hide();
+            $(this).attr('data-clicked', 1);
+            $(this).prop('value', 'Collapse');
+        } else {
+            if (action === 'Collapse') {
+                $('table tr').filter('.show-' + cur_indexer_id).hide();
+                $(this).prop('value', 'Expand');
+            }
+            else if (action === 'Expand') {
+                $('table tr').filter('.show-' + cur_indexer_id).show();
+                $(this).prop('value', 'Collapse');
+            }
+        }
     });
 
     // selects all visible episode checkboxes.
@@ -68,5 +82,4 @@ $(document).ready(function() {
                 this.checked = false;
         });
     });
-
 });
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
index e75df4eae5cbc735f5e628650ac5cf56cdf99b02..e81ec3161a90f36e560842a8edbc92ae8c765fb0 100644
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -37,7 +37,7 @@ from github import Github
 from sickbeard import providers, metadata, config, webserveInit
 from sickbeard.providers.generic import GenericProvider
 from providers import btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \
-    omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, nextgen, speedcd, nyaatorrents, animenzb, bluetigers, cpasbien, fnt, torrentbytes, animezb, \
+    omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, nextgen, speedcd, nyaatorrents, animenzb, bluetigers, cpasbien, fnt, xthor, torrentbytes, animezb, \
     frenchtorrentdb, freshontv, titansoftv, libertalia, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, scenetime, btdigg
 from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
     naming_ep_type
@@ -184,6 +184,7 @@ TRASH_ROTATE_LOGS = False
 SORT_ARTICLE = False
 DEBUG = False
 DISPLAY_ALL_SEASONS = True
+DEFAULT_PAGE = 'home'
 
 
 USE_LISTVIEW = False
@@ -585,7 +586,7 @@ def initialize(consoleLogging=True):
             POSTER_SORTBY, POSTER_SORTDIR, \
             METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, REQUIRE_WORDS, CALENDAR_UNPROTECTED, NO_RESTART, CREATE_MISSING_SHOW_DIRS, \
             ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, SUBTITLES_MULTI, EMBEDDED_SUBTITLES_ALL, SUBTITLES_EXTRA_SCRIPTS, subtitlesFinderScheduler, \
-            USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, DEBUG, PROXY_SETTING, PROXY_INDEXERS, \
+            USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, DEBUG, DEFAULT_PAGE, PROXY_SETTING, PROXY_INDEXERS, \
             AUTOPOSTPROCESSER_FREQUENCY, SHOWUPDATE_HOUR, DEFAULT_AUTOPOSTPROCESSER_FREQUENCY, MIN_AUTOPOSTPROCESSER_FREQUENCY, \
             ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \
             ANIME_SPLIT_HOME, SCENE_DEFAULT, DOWNLOAD_URL, BACKLOG_DAYS, GIT_ORG, GIT_REPO, GIT_USERNAME, GIT_PASSWORD, \
@@ -630,6 +631,8 @@ def initialize(consoleLogging=True):
 
         # debugging
         DEBUG = bool(check_setting_int(CFG, 'General', 'debug', 0))
+        
+        DEFAULT_PAGE = check_setting_str(CFG, 'General', 'default_page', 'home')
 
         ACTUAL_LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', 'Logs')
         LOG_DIR = os.path.normpath(os.path.join(DATA_DIR, ACTUAL_LOG_DIR))
@@ -1650,6 +1653,7 @@ def save_config():
     new_config['General']['anon_redirect'] = ANON_REDIRECT
     new_config['General']['api_key'] = API_KEY
     new_config['General']['debug'] = int(DEBUG)
+    new_config['General']['default_page'] = DEFAULT_PAGE
     new_config['General']['enable_https'] = int(ENABLE_HTTPS)
     new_config['General']['https_cert'] = HTTPS_CERT
     new_config['General']['https_key'] = HTTPS_KEY
diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py
index 053fef76092eac52921af23c1013d37d34959e3c..aaacd1fd65e219572f90f6d549194534f28f8327 100644
--- a/sickbeard/helpers.py
+++ b/sickbeard/helpers.py
@@ -142,6 +142,9 @@ def remove_non_release_groups(name):
                        '^\{ www\.SceneTime\.com \} - ': 'searchre',
                        '^\[ www\.TorrentDay\.com \] - ': 'searchre',
                        '^\[ www\.Cpasbien\.pw \] ': 'searchre',
+                       '^\[ www\.Cpasbien\.com \] ': 'searchre',
+                       '^\[www\.Cpasbien\.com\] ': 'searchre',
+                       '^\[www\.Cpasbien\.pe\] ': 'searchre',
                       }
 
     _name = name
@@ -427,7 +430,7 @@ def hardlinkFile(srcFile, destFile):
     try:
         ek.ek(link, srcFile, destFile)
         fixSetGroupID(destFile)
-    except Exception, e:
+    except Exception as e:
         logger.log(u"Failed to create hardlink of " + srcFile + " at " + destFile + ": " + ex(e) + ". Copying instead",
                    logger.ERROR)
         copyFile(srcFile, destFile)
@@ -459,16 +462,16 @@ def make_dirs(path):
     parents
     """
 
-    logger.log(u"Checking if the path " + path + " already exists", logger.DEBUG)
+    logger.log(u"Checking if the path %s already exists" % path, logger.DEBUG)
 
     if not ek.ek(os.path.isdir, path):
         # Windows, create all missing folders
         if os.name == 'nt' or os.name == 'ce':
             try:
-                logger.log(u"Folder " + path + " didn't exist, creating it", logger.DEBUG)
+                logger.log(u"Folder %s didn't exist, creating it" % path, logger.DEBUG)
                 ek.ek(os.makedirs, path)
-            except (OSError, IOError), e:
-                logger.log(u"Failed creating " + path + " : " + ex(e), logger.ERROR)
+            except (OSError, IOError) as e:
+                logger.log(u"Failed creating %s : %s" % (path, ex(e)), logger.ERROR)
                 return False
 
         # not Windows, create all missing folders and set permissions
@@ -485,14 +488,14 @@ def make_dirs(path):
                     continue
 
                 try:
-                    logger.log(u"Folder " + sofar + " didn't exist, creating it", logger.DEBUG)
+                    logger.log(u"Folder %s didn't exist, creating it" % sofar, logger.DEBUG)
                     ek.ek(os.mkdir, sofar)
                     # use normpath to remove end separator, otherwise checks permissions against itself
                     chmodAsParent(ek.ek(os.path.normpath, sofar))
                     # do the library update for synoindex
                     notifiers.synoindex_notifier.addFolder(sofar)
-                except (OSError, IOError), e:
-                    logger.log(u"Failed creating " + sofar + " : " + ex(e), logger.ERROR)
+                except (OSError, IOError) as e:
+                    logger.log(u"Failed creating %s : %s" % (sofar, ex(e)), logger.ERROR)
                     return False
 
     return True
@@ -533,10 +536,10 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
 
     # move the file
     try:
-        logger.log(u"Renaming file from " + cur_path + " to " + new_path)
+        logger.log(u"Renaming file from %s to %s" % (cur_path, new_path))
         ek.ek(shutil.move, cur_path, new_path)
-    except (OSError, IOError), e:
-        logger.log(u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR)
+    except (OSError, IOError) as e:
+        logger.log(u"Failed renaming %s to %s : %s" % (cur_path, new_path, ex(e)), logger.ERROR)
         return False
 
     # clean up any old folders that are empty
@@ -571,7 +574,7 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
                 ek.ek(os.rmdir, check_empty_dir)
                 # do the library update for synoindex
                 notifiers.synoindex_notifier.deleteFolder(check_empty_dir)
-            except OSError, e:
+            except OSError as e:
                 logger.log(u"Unable to delete " + check_empty_dir + ": " + repr(e) + " / " + str(e), logger.WARNING)
                 break
             check_empty_dir = ek.ek(os.path.dirname, check_empty_dir)
@@ -780,7 +783,7 @@ def create_https_certificates(ssl_cert, ssl_key):
         from OpenSSL import crypto  # @UnresolvedImport
         from certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, \
             serial  # @UnresolvedImport
-    except Exception, e:
+    except Exception as e:
         logger.log(u"pyopenssl module missing, please install for https access", logger.WARNING)
         return False
 
@@ -811,22 +814,22 @@ def backupVersionedFile(old_file, version):
 
     while not ek.ek(os.path.isfile, new_file):
         if not ek.ek(os.path.isfile, old_file):
-            logger.log(u"Not creating backup, " + old_file + " doesn't exist", logger.DEBUG)
+            logger.log(u"Not creating backup, %s doesn't exist" % old_file, logger.DEBUG)
             break
 
         try:
-            logger.log(u"Trying to back up " + old_file + " to " + new_file, logger.DEBUG)
+            logger.log(u"Trying to back up %s to %s" % (old_file, new_file), logger.DEBUG)
             shutil.copy(old_file, new_file)
             logger.log(u"Backup done", logger.DEBUG)
             break
-        except Exception, e:
-            logger.log(u"Error while trying to back up " + old_file + " to " + new_file + " : " + ex(e), logger.WARNING)
+        except Exception as e:
+            logger.log(u"Error while trying to back up %s to %s : %s" % (old_file, new_file, ex(e)), logger.WARNING)
             numTries += 1
             time.sleep(1)
             logger.log(u"Trying again.", logger.DEBUG)
 
         if numTries >= 10:
-            logger.log(u"Unable to back up " + old_file + " to " + new_file + " please do it manually.", logger.ERROR)
+            logger.log(u"Unable to back up %s to %s please do it manually." % (old_file, new_file), logger.ERROR)
             return False
 
     return True
@@ -839,7 +842,7 @@ def restoreVersionedFile(backup_file, version):
     restore_file = new_file + '.' + 'v' + str(version)
 
     if not ek.ek(os.path.isfile, new_file):
-        logger.log(u"Not restoring, " + new_file + " doesn't exist", logger.DEBUG)
+        logger.log(u"Not restoring, %s doesn't exist" % new_file, logger.DEBUG)
         return False
 
     try:
@@ -847,7 +850,7 @@ def restoreVersionedFile(backup_file, version):
             u"Trying to backup " + new_file + " to " + new_file + "." + "r" + str(version) + " before restoring backup",
             logger.DEBUG)
         shutil.move(new_file, new_file + '.' + 'r' + str(version))
-    except Exception, e:
+    except Exception as e:
         logger.log(
             u"Error while trying to backup DB file " + restore_file + " before proceeding with restore: " + ex(e),
             logger.WARNING)
@@ -863,7 +866,7 @@ def restoreVersionedFile(backup_file, version):
             shutil.copy(restore_file, new_file)
             logger.log(u"Restore done", logger.DEBUG)
             break
-        except Exception, e:
+        except Exception as e:
             logger.log(u"Error while trying to restore " + restore_file + ": " + ex(e), logger.WARNING)
             numTries += 1
             time.sleep(1)
@@ -1258,9 +1261,9 @@ def touchFile(fname, atime=None):
             if e.errno == errno.ENOSYS:
                 logger.log(u"File air date stamping not available on your OS", logger.DEBUG)
             elif e.errno == errno.EACCES:
-                logger.log(u"File air date stamping failed(Permission denied). Check permissions for file: {0}".format(fname), logger.ERROR)
+                logger.log(u"File air date stamping failed(Permission denied). Check permissions for file: %s" % fname, logger.ERROR)
             else:
-                logger.log(u"File air date stamping failed. The error is: {0} and the message is: {1}.".format(e.errno, e.strerror), logger.ERROR)
+                logger.log(u"File air date stamping failed. The error is: %s." % ex(e), logger.ERROR)
             pass
 
     return False
@@ -1341,9 +1344,11 @@ def headURL(url, params=None, headers={}, timeout=30, session=None, json=False,
         resp = session.head(url, timeout=timeout, allow_redirects=True, verify=session.verify)
 
         if not resp.ok:
-            logger.log(u"Requested url " + url + " returned status code is " + str(
+            logger.log(u"Requested headURL " + url + " returned status code is " + str(
                 resp.status_code) + ': ' + codeDescription(resp.status_code), logger.DEBUG)
             return False
+        else:
+            logger.log(u"Requested headURL " + url + " returned status code is " + str(resp.status_code) , logger.DEBUG)
 
         if proxyGlypeProxySSLwarning is not None:
             if re.search('The site you are attempting to browse is on a secure connection', resp.text):
@@ -1356,20 +1361,21 @@ def headURL(url, params=None, headers={}, timeout=30, session=None, json=False,
 
         return resp.status_code == 200
 
-    except requests.exceptions.HTTPError, e:
-        logger.log(u"HTTP error in headURL {0}. Error: {1}".format(url,e.errno), logger.WARNING)
+    except requests.exceptions.HTTPError as e:
+        logger.log(u"HTTP error in headURL %s. Error: %s" % (url, ex(e)), logger.WARNING)
         pass
-    except requests.exceptions.ConnectionError, e:
-        logger.log(u"Connection error to {0}. Error: {1}".format(url,e.message), logger.WARNING)
+    except requests.exceptions.ConnectionError as e:
+        logger.log(u"Connection error in headURL %s. Error: %s " % (url, ex(e)), logger.WARNING)
         pass
-    except requests.exceptions.Timeout, e:
-        logger.log(u"Connection timed out accessing {0}. Error: {1}".format(url,e.message), logger.WARNING)
+    except requests.exceptions.Timeout as e:
+        logger.log(u"Connection timed out accessing headURL %s. Error: %s" % (url, ex(e)), logger.WARNING)
         pass
     except requests.exceptions.ContentDecodingError:
-        logger.log(u"Content-Encoding was gzip, but content was not compressed", logger.WARNING)
+        logger.log(u"Content-Encoding was gzip, but content was not compressed. headURL: %s" % url, logger.DEBUG)
+        logger.log(traceback.format_exc(), logger.DEBUG)
         pass
     except Exception as e:
-        logger.log(u"Unknown exception in headURL {0}. Error: {1}".format(url,e.message), logger.WARNING)
+        logger.log(u"Unknown exception in headURL %s. Error: %s" % (url, ex(e)), logger.WARNING)
         logger.log(traceback.format_exc(), logger.WARNING)
         pass
 
@@ -1393,33 +1399,36 @@ def getURL(url, post_data=None, params={}, headers={}, timeout=30, session=None,
             resp = session.get(url, timeout=timeout, allow_redirects=True, verify=session.verify)
 
         if not resp.ok:
-            logger.log(u"Requested url " + url + " returned status code is " + str(
+            logger.log(u"Requested getURL " + url + " returned status code is " + str(
                 resp.status_code) + ': ' + codeDescription(resp.status_code), logger.DEBUG)
             return
+        else:
+            logger.log(u"Requested getURL " + url + " returned status code is " + str(resp.status_code), logger.DEBUG)
 
         if proxyGlypeProxySSLwarning is not None:
             if re.search('The site you are attempting to browse is on a secure connection', resp.text):
                 resp = session.get(proxyGlypeProxySSLwarning, timeout=timeout, allow_redirects=True, verify=session.verify)
 
                 if not resp.ok:
-                    logger.log(u"GlypeProxySSLwarning: Requested url " + url + " returned status code is " + str(
+                    logger.log(u"GlypeProxySSLwarning: Requested getURL " + url + " returned status code is " + str(
                         resp.status_code) + ': ' + codeDescription(resp.status_code), logger.DEBUG)
                     return
 
-    except requests.exceptions.HTTPError, e:
-        logger.log(u"HTTP error in getURL {0}. Error: {1}".format(url,e.errno), logger.WARNING)
+    except requests.exceptions.HTTPError as e:
+        logger.log(u"HTTP error in getURL %s Error: %s" % (url, ex(e)), logger.WARNING)
         return
-    except requests.exceptions.ConnectionError, e:
-        logger.log(u"Connection error to {0}. Error: {1}".format(url,e.message), logger.WARNING)
+    except requests.exceptions.ConnectionError as e:
+        logger.log(u"Connection error to getURL %s Error: %s" % (url, ex(e)), logger.WARNING)
         return
-    except requests.exceptions.Timeout, e:
-        logger.log(u"Connection timed out accessing {0}. Error: {1}".format(url,e.message), logger.WARNING)
+    except requests.exceptions.Timeout as e:
+        logger.log(u"Connection timed out accessing getURL %s Error: %s" % (url, ex(e)), logger.WARNING)
         return
     except requests.exceptions.ContentDecodingError:
-        logger.log(u"Content-Encoding was gzip, but content was not compressed", logger.WARNING)
+        logger.log(u"Content-Encoding was gzip, but content was not compressed. getURL: %s" % url, logger.DEBUG)
+        logger.log(traceback.format_exc(), logger.DEBUG)
         return
     except Exception as e:
-        logger.log(u"Unknown exception in getURL {0}. Error: {1}".format(url,e.message), logger.WARNING)
+        logger.log(u"Unknown exception in getURL %s Error: %s" % (url, ex(e)), logger.WARNING)
         logger.log(traceback.format_exc(), logger.WARNING)
         return
 
@@ -1434,7 +1443,7 @@ def download_file(url, filename, session=None, headers={}):
     try:
         with closing(session.get(url, allow_redirects=True, verify=session.verify)) as resp:
             if not resp.ok:
-                logger.log(u"Requested url " + url + " returned status code is " + str(
+                logger.log(u"Requested download url " + url + " returned status code is " + str(
                     resp.status_code) + ': ' + codeDescription(resp.status_code), logger.DEBUG)
                 return False
 
@@ -1449,25 +1458,25 @@ def download_file(url, filename, session=None, headers={}):
             except:
                 logger.log(u"Problem setting permissions or writing file to: %s" % filename, logger.WARNING)
 
-    except requests.exceptions.HTTPError, e:
+    except requests.exceptions.HTTPError as e:
         _remove_file_failed(filename)
-        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
+        logger.log(u"HTTP error " + ex(e) + " while loading download URL " + url, logger.WARNING)
         return False
-    except requests.exceptions.ConnectionError, e:
+    except requests.exceptions.ConnectionError as e:
         _remove_file_failed(filename)
-        logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
+        logger.log(u"Connection error " + ex(e) + " while loading download URL " + url, logger.WARNING)
         return False
-    except requests.exceptions.Timeout, e:
+    except requests.exceptions.Timeout as e:
         _remove_file_failed(filename)
-        logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
+        logger.log(u"Connection timed out " + ex(e) + " while loading download URL " + url, logger.WARNING)
         return False
-    except EnvironmentError, e:
+    except EnvironmentError as e:
         _remove_file_failed(filename)
         logger.log(u"Unable to save the file: " + ex(e), logger.WARNING)
         return False
     except Exception:
         _remove_file_failed(filename)
-        logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
+        logger.log(u"Unknown exception while loading download URL " + url + ": " + traceback.format_exc(), logger.WARNING)
         return False
 
     return True
@@ -1482,7 +1491,7 @@ def get_size(start_path='.'):
             try:
                 total_size += ek.ek(os.path.getsize, fp)
             except OSError as e:
-                logger.log('Unable to get size for file {filePath}. Error msg is: {errorMsg}'.format(filePath=fp, errorMsg=str(e)), logger.ERROR)
+                logger.log('Unable to get size for file %s Error: %s' % (fp, ex(e)), logger.ERROR)
                 logger.log(traceback.format_exc(), logger.DEBUG)
     return total_size
 
diff --git a/sickbeard/processTV.py b/sickbeard/processTV.py
index 0e7577317af58bc71f0994d9422283b5172edb82..aebf820a04efb90bf5d2a5070bd42d5b43058fe8 100644
--- a/sickbeard/processTV.py
+++ b/sickbeard/processTV.py
@@ -318,7 +318,7 @@ def validateDir(path, dirName, nzbNameOriginal, failed, result):
                 os.path.realpath, sqlShow["location"]).lower():
             result.output += logHelper(
                 u"Cannot process an episode that's already been moved to its show dir, skipping " + dirName,
-                logger.ERROR)
+                logger.WARNING)
             return False
 
     # Get the videofile list for the next checks
diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py
index 32a1d0f5bd087e263994de02e01df13b8448a9d3..42db2237ab9acb170561830da52f4ec1788477fd 100644
--- a/sickbeard/providers/__init__.py
+++ b/sickbeard/providers/__init__.py
@@ -50,6 +50,7 @@ __all__ = ['womble',
            'bluetigers',
            'cpasbien',
            'fnt',
+           'xthor',
            'scenetime',
            'btdigg',
 ]
diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py
index 16bc0b008f836512f5820d729bb1a786e9efe2d3..8d04c71017a79b02c1ea0580d279872542d401c7 100644
--- a/sickbeard/providers/generic.py
+++ b/sickbeard/providers/generic.py
@@ -75,7 +75,9 @@ class GenericProvider:
 
         self.btCacheURLS = [
                 'http://torcache.net/torrent/{torrent_hash}.torrent',
-                'http://torrage.com/torrent/{torrent_hash}.torrent',
+                'http://thetorrent.org/torrent/{torrent_hash}.torrent',
+                'http://btdig.com/torrent/{torrent_hash}.torrent',
+                #'http://torrage.com/torrent/{torrent_hash}.torrent',
                 #'http://itorrents.org/torrent/{torrent_hash}.torrent',
             ]
 
diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py
index 501331fd7e20c365278d256451914fcc213f06d1..865bf64ed39f26683e65c54c264f300c3e5e614d 100644
--- a/sickbeard/providers/newznab.py
+++ b/sickbeard/providers/newznab.py
@@ -149,7 +149,7 @@ class NewznabProvider(generic.NZBProvider):
         rid = helpers.mapIndexersToShow(ep_obj.show)[2]
         if rid:
             cur_params['rid'] = rid
-        elif 'rid' in params:
+        elif 'rid' in cur_params:
             cur_params.pop('rid')
 
         # add new query strings for exceptions
diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py
new file mode 100644
index 0000000000000000000000000000000000000000..9b0ab91d0dc92cde2604b7b49edd211eaa94c6db
--- /dev/null
+++ b/sickbeard/providers/xthor.py
@@ -0,0 +1,238 @@
+# -*- coding: latin-1 -*-
+# Author: adaur <adaur.underground@gmail.com>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of SickRage.
+#
+# SickRage is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# SickRage is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
+
+import traceback
+import re
+import datetime
+import time
+from requests.auth import AuthBase
+import sickbeard
+import generic
+import cookielib
+import urllib
+import requests
+from requests import exceptions
+from sickbeard.bs4_parser import BS4Parser
+from sickbeard.common import Quality
+from sickbeard import logger
+from sickbeard import tvcache
+from sickbeard import show_name_helpers
+from sickbeard import db
+from sickbeard import helpers
+from unidecode import unidecode
+from sickbeard import classes
+from sickbeard.helpers import sanitizeSceneName
+from sickbeard.exceptions import ex
+
+class XthorProvider(generic.TorrentProvider):
+
+    def __init__(self):
+        
+        generic.TorrentProvider.__init__(self, "Xthor")
+
+        self.supportsBacklog = True
+        
+        self.cj = cookielib.CookieJar()
+        
+        self.url = "https://xthor.bz"
+        self.urlsearch = "https://xthor.bz/browse.php?search=%s%s"
+        self.categories = "&searchin=title&incldead=0"
+
+        self.enabled = False
+        self.username = None
+        self.password = None
+        
+    def isEnabled(self):
+        return self.enabled
+
+    def imageName(self):
+        return 'xthor.png'
+        
+    def _get_season_search_strings(self, ep_obj):
+
+        search_string = {'Season': []}
+        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+            if ep_obj.show.air_by_date or ep_obj.show.sports:
+                ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
+            elif ep_obj.show.anime:
+                ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
+            else:
+                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  # 1) showName.SXX
+
+            search_string['Season'].append(ep_string)
+
+        return [search_string]
+
+    def _get_episode_search_strings(self, ep_obj, add_string=''):
+
+        search_string = {'Episode': []}
+
+        if not ep_obj:
+            return []
+
+        if self.show.air_by_date:
+            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+                ep_string = sanitizeSceneName(show_name) + '.' + \
+                            str(ep_obj.airdate).replace('-', '|')
+                search_string['Episode'].append(ep_string)
+        elif self.show.sports:
+            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+                ep_string = sanitizeSceneName(show_name) + '.' + \
+                            str(ep_obj.airdate).replace('-', '|') + '|' + \
+                            ep_obj.airdate.strftime('%b')
+                search_string['Episode'].append(ep_string)
+        elif self.show.anime:
+            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+                ep_string = sanitizeSceneName(show_name) + '.' + \
+                            "%i" % int(ep_obj.scene_absolute_number)
+                search_string['Episode'].append(ep_string)
+        else:
+            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+                ep_string = show_name_helpers.sanitizeSceneName(show_name) + '.' + \
+                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
+                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
+
+                search_string['Episode'].append(re.sub('\s+', '.', ep_string))
+
+        return [search_string]
+    
+    def _get_title_and_url(self, item):
+
+        title, url = item
+
+        if title:
+            title = u'' + title
+            title = title.replace(' ', '.')
+
+        if url:
+            url = str(url).replace('&amp;', '&')
+
+        return (title, url)  
+    
+    def getQuality(self, item, anime=False):
+        quality = Quality.sceneQuality(item[0], anime)
+        return quality
+    
+    def _doLogin(self):
+    
+        if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
+            return True
+
+        header = {'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.8 (KHTML, like Gecko) Chrome/17.0.940.0 Safari/535.8'}
+        
+        login_params = {'username': self.username,
+                        'password': self.password,
+                        'submitme': 'X'
+        }
+        
+        if not self.session:
+            self.session = requests.Session()
+            
+        logger.log('Performing authentication to Xthor', logger.DEBUG)
+        
+        try:
+            response = self.session.post(self.url + '/takelogin.php', data=login_params, timeout=30, headers=header)
+        except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
+            logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
+            return False
+
+        if re.search('donate.php', response.text):
+            logger.log(u'Login to ' + self.name + ' was successful.', logger.DEBUG)
+            return True                
+        else:
+            logger.log(u'Login to ' + self.name + ' was unsuccessful.', logger.DEBUG)                
+            return False
+
+        return True     
+
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
+
+        logger.log(u"_doSearch started with ..." + str(search_params), logger.DEBUG)
+    
+        results = []
+        items = {'Season': [], 'Episode': [], 'RSS': []}
+        
+        # check for auth
+        if not self._doLogin():
+            return False
+            
+        for mode in search_params.keys():
+
+            for search_string in search_params[mode]:
+
+                if isinstance(search_string, unicode):
+                    search_string = unidecode(search_string)
+        
+                searchURL = self.urlsearch % (urllib.quote(search_string), self.categories)
+         
+                logger.log(u"Search string: " + searchURL, logger.DEBUG)
+                
+                data = self.getURL(searchURL)
+
+                if not data:
+                    continue
+
+                with BS4Parser(data, features=["html5lib", "permissive"]) as html:
+                    resultsTable = html.find("table", { "class" : "table2 table-bordered2"  })
+                    if resultsTable:
+                        rows = resultsTable.findAll("tr")
+                        for row in rows:
+                            link = row.find("a",href=re.compile("details.php"))                                                           
+                            if link:               
+                                title = link.text
+                                logger.log(u"Xthor title : " + title, logger.DEBUG)                                                                      
+                                downloadURL =  self.url + '/' + row.find("a",href=re.compile("download.php"))['href']             
+                                logger.log(u"Xthor download URL : " + downloadURL, logger.DEBUG)                                   
+                                item = title, downloadURL
+                                items[mode].append(item)
+            results += items[mode]
+        return results 
+        
+    def seedRatio(self):
+        return self.ratio
+
+    def findPropers(self, search_date=datetime.datetime.today()):
+
+        results = []
+
+        myDB = db.DBConnection()
+        sqlResults = myDB.select(
+            'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
+            ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
+            ' WHERE e.airdate >= ' + str(search_date.toordinal()) +
+            ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
+            ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
+        )
+
+        if not sqlResults:
+            return []
+
+        for sqlshow in sqlResults:
+            self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"]))
+            if self.show:
+                curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"]))
+                search_params = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK')
+
+                for item in self._doSearch(search_params[0]):
+                    title, url = self._get_title_and_url(item)
+                    results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
+
+        return results  
+
+provider = XthorProvider()
diff --git a/sickbeard/scene_numbering.py b/sickbeard/scene_numbering.py
index 0887278d41adc44b3dd7393cf9d3607336aca63a..686729c6ab187dedf12d526c2d2545b6d3fae2e8 100644
--- a/sickbeard/scene_numbering.py
+++ b/sickbeard/scene_numbering.py
@@ -463,16 +463,12 @@ def xem_refresh(indexer_id, indexer, force=False):
     
     @param indexer_id: int
     """
-    if indexer_id is None:
+    if not indexer_id or indexer_id < 1:
         return
 
     indexer_id = int(indexer_id)
     indexer = int(indexer)
 
-    # XEM API URL
-    url = "http://thexem.de/map/all?id=%s&origin=%s&destination=scene" % (
-    indexer_id, sickbeard.indexerApi(indexer).config['xem_origin'])
-
     MAX_REFRESH_AGE_SECS = 86400  # 1 day
 
     myDB = db.DBConnection()
@@ -497,41 +493,48 @@ def xem_refresh(indexer_id, indexer, force=False):
 
         try:
             from .scene_exceptions import xem_session
+
+            # XEM MAP URL
+            url = "http://thexem.de/map/havemap?origin=%s" % sickbeard.indexerApi(indexer).config['xem_origin']
+            parsedJSON = sickbeard.helpers.getURL(url, session=xem_session, json=True)
+            if not parsedJSON or 'result' not in parsedJSON or 'success' not in parsedJSON['result'] or 'data' not in parsedJSON or indexer_id not in parsedJSON['data']:
+                return
+
+            # XEM API URL
+            url = "http://thexem.de/map/all?id=%s&origin=%s&destination=scene" % indexer_id, sickbeard.indexerApi(indexer).config['xem_origin']
+
             parsedJSON = sickbeard.helpers.getURL(url, session=xem_session, json=True)
-            if not parsedJSON or parsedJSON == '':
+            if not parsedJSON or not 'result' in parsedJSON or not 'success' in parsedJSON['result']:
                 logger.log(u'No XEM data for show "%s on %s"' % (indexer_id, sickbeard.indexerApi(indexer).name,), logger.INFO)
                 return
 
-            if 'success' in parsedJSON['result']:
-                cl = []
-                for entry in parsedJSON['data']:
-                    if 'scene' in entry:
-                        cl.append([
-                            "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
-                            [entry['scene']['season'],
-                             entry['scene']['episode'],
-                             entry['scene']['absolute'],
-                             indexer_id,
-                             entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'],
-                             entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode']
-                            ]])
-                    if 'scene_2' in entry:  # for doubles
-                        cl.append([
-                            "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
-                            [entry['scene_2']['season'],
-                             entry['scene_2']['episode'],
-                             entry['scene_2']['absolute'],
-                             indexer_id,
-                             entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'],
-                             entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode']
-                            ]])
-
-                if len(cl) > 0:
-                    myDB = db.DBConnection()
-                    myDB.mass_action(cl)
-            else:
-                logger.log(u"Empty lookup result - no XEM data for show %s on %s" % (
-                    indexer_id, sickbeard.indexerApi(indexer).name,), logger.DEBUG)
+            cl = []
+            for entry in parsedJSON['data']:
+                if 'scene' in entry:
+                    cl.append([
+                        "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
+                        [entry['scene']['season'],
+                         entry['scene']['episode'],
+                         entry['scene']['absolute'],
+                         indexer_id,
+                         entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'],
+                         entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode']
+                        ]])
+                if 'scene_2' in entry:  # for doubles
+                    cl.append([
+                        "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
+                        [entry['scene_2']['season'],
+                         entry['scene_2']['episode'],
+                         entry['scene_2']['absolute'],
+                         indexer_id,
+                         entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'],
+                         entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode']
+                        ]])
+
+            if len(cl) > 0:
+                myDB = db.DBConnection()
+                myDB.mass_action(cl)
+
         except Exception, e:
             logger.log(
                 u"Exception while refreshing XEM data for show " + str(indexer_id) + " on " + sickbeard.indexerApi(
diff --git a/sickbeard/search.py b/sickbeard/search.py
index dd087cf165a59e19b37e115af916ef1a9d155c13..90993728a045ada8a5477e1ad7d7f7f3cc62f048 100644
--- a/sickbeard/search.py
+++ b/sickbeard/search.py
@@ -593,11 +593,10 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False):
                             epObjs.append(show.getEpisode(season, curEpNum))
                     bestSeasonResult.episodes = epObjs
 
-                    epNum = MULTI_EP_RESULT
-                    if epNum in foundResults[curProvider.name]:
-                        foundResults[curProvider.name][epNum].append(bestSeasonResult)
+                    if MULTI_EP_RESULT in foundResults[curProvider.name]:
+                        foundResults[curProvider.name][MULTI_EP_RESULT].append(bestSeasonResult)
                     else:
-                        foundResults[curProvider.name][epNum] = [bestSeasonResult]
+                        foundResults[curProvider.name][MULTI_EP_RESULT] = [bestSeasonResult]
 
         # go through multi-ep results and see if we really want them or not, get rid of the rest
         multiResults = {}
@@ -606,27 +605,25 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False):
 
                 logger.log(u"Seeing if we want to bother with multi-episode result " + multiResult.name, logger.DEBUG)
 
-                if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(multiResult.name, multiResult.size,
-                                                                               multiResult.provider.name):
-                    logger.log(multiResult.name + u" has previously failed, rejecting this multi-ep result")
+                multiResult = pickBestResult([multiResult], show)
+                if not multiResult:
                     continue
 
                 # see how many of the eps that this result covers aren't covered by single results
                 neededEps = []
                 notNeededEps = []
                 for epObj in multiResult.episodes:
-                    epNum = epObj.episode
                     # if we have results for the episode
-                    if epNum in foundResults[curProvider.name] and len(foundResults[curProvider.name][epNum]) > 0:
-                        neededEps.append(epNum)
+                    if epObj.episode in foundResults[curProvider.name] and len(foundResults[curProvider.name][epObj.episode]) > 0:
+                        notNeededEps.append(epObj.episode)
                     else:
-                        notNeededEps.append(epNum)
+                        neededEps.append(epObj.episode)
 
                 logger.log(
                     u"Single-ep check result is neededEps: " + str(neededEps) + ", notNeededEps: " + str(notNeededEps),
                     logger.DEBUG)
 
-                if not notNeededEps:
+                if not neededEps:
                     logger.log(u"All of these episodes were covered by single episode results, ignoring this multi-episode result", logger.DEBUG)
                     continue
 
@@ -634,11 +631,10 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False):
                 multiNeededEps = []
                 multiNotNeededEps = []
                 for epObj in multiResult.episodes:
-                    epNum = epObj.episode
-                    if epNum in multiResults:
-                        multiNotNeededEps.append(epNum)
+                    if epObj.episode in multiResults:
+                        multiNotNeededEps.append(epObj.episode)
                     else:
-                        multiNeededEps.append(epNum)
+                        multiNeededEps.append(epObj.episode)
 
                 logger.log(
                     u"Multi-ep check result is multiNeededEps: " + str(multiNeededEps) + ", multiNotNeededEps: " + str(
@@ -650,21 +646,14 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False):
                         logger.DEBUG)
                     continue
 
-                # if we're keeping this multi-result then remember it
-                for epObj in multiResult.episodes:
-                    if not multiResult.url.startswith('magnet'):
-                        multiResult.content = multiResult.provider.getURL(cur_result.url)
-
-                    multiResults[epObj.episode] = multiResult
-
                 # don't bother with the single result if we're going to get it with a multi result
                 for epObj in multiResult.episodes:
-                    epNum = epObj.episode
-                    if epNum in foundResults[curProvider.name]:
+                    multiResults[epObj.episode] = multiResult
+                    if epObj.episode in foundResults[curProvider.name]:
                         logger.log(
                             u"A needed multi-episode result overlaps with a single-episode result for ep #" + str(
-                                epNum) + ", removing the single-episode results from the list", logger.DEBUG)
-                        del foundResults[curProvider.name][epNum]
+                                epObj.episode) + ", removing the single-episode results from the list", logger.DEBUG)
+                        del foundResults[curProvider.name][epObj.episode]
 
         # of all the single ep results narrow it down to the best one for each episode
         finalResults += set(multiResults.values())
diff --git a/sickbeard/show_queue.py b/sickbeard/show_queue.py
index df0c9475cfb287145f6cd54c71aa967f5f827dea..247de71c2663ba031aff53587ed5ae5d7f7b6b25 100644
--- a/sickbeard/show_queue.py
+++ b/sickbeard/show_queue.py
@@ -112,6 +112,8 @@ class ShowQueue(generic_queue.GenericQueue):
 
         queueItemObj = QueueItemRefresh(show, force=force)
 
+        logger.log(u"Queueing show refresh for " + show.name, logger.DEBUG)
+
         self.add_item(queueItemObj)
 
         return queueItemObj
@@ -597,6 +599,8 @@ class QueueItemUpdate(ShowQueueItem):
             logger.log(u"Launching backlog for this show since we found missing episodes")
             sickbeard.backlogSearchScheduler.action.searchBacklog([self.show])
 
+        logger.log(u"Finished update of " + self.show.name, logger.DEBUG)
+
         sickbeard.showQueueScheduler.action.refreshShow(self.show, self.force)
 
 class QueueItemForceUpdate(QueueItemUpdate):
diff --git a/sickbeard/tv.py b/sickbeard/tv.py
index deb5655fb172af006f9970c4daac38cb6baff73b..12c67056c4f0d119bb5ee755b5df447779874b0e 100644
--- a/sickbeard/tv.py
+++ b/sickbeard/tv.py
@@ -484,10 +484,12 @@ class TVShow(object):
 
         if self.lang:
             lINDEXER_API_PARMS['language'] = self.lang
+            logger.log(u"Using language: " + str(self.lang), logger.DEBUG)
 
         if self.dvdorder != 0:
             lINDEXER_API_PARMS['dvdorder'] = True
 
+        logger.log(u"lINDEXER_API_PARMS: " + str(lINDEXER_API_PARMS), logger.DEBUG)
         t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
 
         cachedShow = t[self.indexerid]
@@ -495,6 +497,7 @@ class TVShow(object):
 
         for curResult in sqlResults:
 
+            logger.log(u"loadEpisodesFromDB curResult: " + str(curResult), logger.DEBUG)
             deleteEp = False
 
             curSeason = int(curResult["season"])
@@ -509,6 +512,7 @@ class TVShow(object):
                     deleteEp = True
 
             if not curSeason in scannedEps:
+                logger.log(u"Not curSeason in scannedEps", logger.DEBUG)
                 scannedEps[curSeason] = {}
 
             logger.log(u"Loading episode S%02dE%02d from the DB" % (curSeason, curEpisode), logger.DEBUG)
@@ -530,6 +534,8 @@ class TVShow(object):
                            logger.DEBUG)
                 continue
 
+        logger.log(u"Finished loading all episodes from the DB", logger.DEBUG)
+
         return scannedEps
 
     def loadEpisodesFromIndexer(self, cache=True):
@@ -1471,7 +1477,7 @@ class TVEpisode(object):
 
             for video, subs in foundSubs.iteritems():
                 for sub in subs:
-                    subpath = subliminal.subtitle.get_subtitle_path(video.name, sub.language)
+                    subpath = subliminal.subtitle.get_subtitle_path(video.name, sub.language if sickbeard.SUBTITLES_MULTI else None)
                     if sickbeard.SUBTITLES_DIR and ek.ek(os.path.exists, sickbeard.SUBTITLES_DIR):
                         subpath = ek.ek(os.path.join, sickbeard.SUBTITLES_DIR, ek.ek(os.path.basename, subpath))
                     helpers.chmodAsParent(subpath)
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index 1ac8621b1435b260a2bac5e1f60ae3c0ea92ac23..644a4e77ac433b3d4c679eae6b6e113e85499a82 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -281,8 +281,9 @@ class WebHandler(BaseHandler):
 
 class LoginHandler(BaseHandler):
     def get(self, *args, **kwargs):
+
         if self.get_current_user():
-            self.redirect('/home/')
+            self.redirect('/' + sickbeard.DEFAULT_PAGE +'/')
         else:
             t = PageTemplate(rh=self, file="login.tmpl")
             self.finish(t.respond())
@@ -301,11 +302,11 @@ class LoginHandler(BaseHandler):
         if api_key:
             remember_me = int(self.get_argument('remember_me', default=0) or 0)
             self.set_secure_cookie('sickrage_user', api_key, expires_days=30 if remember_me > 0 else None)
-            logger.log('User logged into the SickRage web interface from IP: ' + self.request.remote_ip, logger.INFO)
+            logger.log('User logged into the SickRage web interface', logger.INFO)
         else:
             logger.log('User attempted a failed login to the SickRage web interface from IP: ' + self.request.remote_ip, logger.WARNING)    
 
-        self.redirect('/home/')
+        self.redirect('/' + sickbeard.DEFAULT_PAGE +'/')
 
 
 class LogoutHandler(BaseHandler):
@@ -340,7 +341,7 @@ class WebRoot(WebHandler):
         super(WebRoot, self).__init__(*args, **kwargs)
 
     def index(self):
-        return self.redirect('/home/')
+        return self.redirect('/' + sickbeard.DEFAULT_PAGE +'/')
 
     def robots_txt(self):
         """ Keep web crawlers out """
@@ -3768,7 +3769,7 @@ class ConfigGeneral(Config):
                     proxy_setting=None, proxy_indexers=None, anon_redirect=None, git_path=None, git_remote=None,
                     calendar_unprotected=None, debug=None, ssl_verify=None, no_restart=None, coming_eps_missed_range=None,
                     filter_row=None, fuzzy_dating=None, trim_zero=None, date_preset=None, date_preset_na=None, time_preset=None,
-                    indexer_timeout=None, download_url=None, rootDir=None, theme_name=None,
+                    indexer_timeout=None, download_url=None, rootDir=None, theme_name=None, default_page=None,
                     git_reset=None, git_username=None, git_password=None, git_autoissues=None, display_all_seasons=None):
 
         results = []
@@ -3858,6 +3859,8 @@ class ConfigGeneral(Config):
         sickbeard.HANDLE_REVERSE_PROXY = config.checkbox_to_value(handle_reverse_proxy)
 
         sickbeard.THEME_NAME = theme_name
+        
+        sickbeard.DEFAULT_PAGE = default_page
 
         sickbeard.save_config()