diff --git a/gui/slick/images/network/cmt.png b/gui/slick/images/network/cmt.png
new file mode 100644
index 0000000000000000000000000000000000000000..631d591accbbf895a7f7548dd441985fb0674537
Binary files /dev/null and b/gui/slick/images/network/cmt.png differ
diff --git a/gui/slick/images/network/national geographic channel.png b/gui/slick/images/network/national geographic channel.png
new file mode 100644
index 0000000000000000000000000000000000000000..b4a5f277b5081829253bf9b6564293f0ebfcc820
Binary files /dev/null and b/gui/slick/images/network/national geographic channel.png differ
diff --git a/gui/slick/images/network/univision.png b/gui/slick/images/network/univision.png
new file mode 100644
index 0000000000000000000000000000000000000000..686e0f2ef88763c4cabfa8579bba7bcc7343c9fd
Binary files /dev/null and b/gui/slick/images/network/univision.png differ
diff --git a/gui/slick/interfaces/default/config_general.tmpl b/gui/slick/interfaces/default/config_general.tmpl
index 8ef7f7f10b26e2d42fb971f5f8673b3ad346f149..d3cc3cfd750c2164182de7ef66bc36e34a6b5cf8 100644
--- a/gui/slick/interfaces/default/config_general.tmpl
+++ b/gui/slick/interfaces/default/config_general.tmpl
@@ -84,7 +84,6 @@
 								</span>
 							</label>
 						</div>
-
 						<div class="field-pair">
 							<label for="launch_browser">
 								<span class="component-title">Launch browser</span>
@@ -305,6 +304,15 @@
 								</span>
 							</label>
 						</div>
+						<div class="field-pair">
+							<label for="coming_eps_missed_range">
+								<span class="component-title">Missed episodes range</span>
+								<span class="component-desc">
+									<input type="number" step="1" min="7" name="coming_eps_missed_range" id="coming_eps_missed_range" value="$sickbeard.COMING_EPS_MISSED_RANGE" class="form-control input-sm input75" />
+									<p>Set the range in days of the missed episodes in the Coming Episode page</p>
+								</span>
+							</label>
+						</div>
 						<div class="field-pair">
 							<label for="fuzzy_dating">
 								<span class="component-title">Display fuzzy dates</span>
@@ -604,6 +612,28 @@
 								</span>
 							</label>
 						</div>
+						<div class="field-pair">
+							<label for="ep_default_deleted_status">
+								<span class="component-title">Default deleted episode status:</span>
+									<span class="component-desc">
+#if not $sickbeard.SKIP_REMOVED_FILES or ($sickbeard.USE_TRAKT and $sickbeard.TRAKT_USE_ROLLING_DOWNLOAD)
+										<select name="ep_default_deleted_status" id="ep_default_deleted_status" class="form-control input-sm">
+										#for $defStatus in [$ARCHIVED, $IGNORED]:
+											<option value="$defStatus" #if $defStatus == $sickbeard.EP_DEFAULT_DELETED_STATUS then 'selected="selected"' else ''#>$statusStrings[$defStatus]</option>
+										#end for
+										</select>
+#else
+										<select name="ep_default_deleted_status" id="ep_default_deleted_status" class="form-control input-sm" disabled="disabled">
+										#for $defStatus in [$ARCHIVED, $IGNORED]:
+											<option value="$defStatus" #if $defStatus == $sickbeard.EP_DEFAULT_DELETED_STATUS then 'selected="selected"' else ''#>$statusStrings[$defStatus]</option>
+										#end for
+										</select>
+										<input type="hidden" name="ep_default_deleted_status" value="$sickbeard.EP_DEFAULT_DELETED_STATUS" />
+#end if
+									<span>Define the status to be set for media file that has been deleted.</span>
+								</span>
+							</label>
+						</div>
 
 						<input type="submit" class="btn config_submitter" value="Save Changes" />
 					</fieldset>
diff --git a/gui/slick/interfaces/default/config_notifications.tmpl b/gui/slick/interfaces/default/config_notifications.tmpl
index f0babbb26ff37775534e945f863c0ca43d0a3501..c08b94d61068578713972d4e9f3907eb158a7b66 100644
--- a/gui/slick/interfaces/default/config_notifications.tmpl
+++ b/gui/slick/interfaces/default/config_notifications.tmpl
@@ -1548,20 +1548,6 @@
                                         <span class="component-desc">This feauture will try to snatch <i>number of episode</i> if the show is active. Whould you like to add new show in paused mode(this override previous choice)?</span>
                                     </label>    
                                 </div>
-                                <div class="field-pair">
-                                    <label for="trakt_rolling_default_watched_status">
-                                        <span class="component-title">Default watched status:</span>
-                                            <select name="trakt_rolling_default_watched_status" id="trakt_rolling_default_watched_status" class="form-control form-control-inline input-sm">
-                                            #for $defStatus in [$ARCHIVED, $IGNORED]:
-                                            <option value="$defStatus" #if $defStatus == $sickbeard.TRAKT_ROLLING_DEFAULT_WATCHED_STATUS then 'selected="selected"' else ''#>$statusStrings[$defStatus]</option>
-                                            #end for
-                                            </select>
-                                    </label>
-                                    <label>
-                                        <span class="component-title">&nbsp;</span>
-                                        <span class="component-desc">Define the status to be set for watched episode. This will be set only on show add.</span>
-                                    </label>
-                                </div>
                             </div>
                             <div class="testNotification" id="testTrakt-result">Click below to test.</div>
                             <input type="button" class="btn" value="Test Trakt" id="testTrakt" />
diff --git a/gui/slick/interfaces/default/config_postProcessing.tmpl b/gui/slick/interfaces/default/config_postProcessing.tmpl
index dbb1a4da648643bf20f45a8bbc740159ae9d5f8b..459a649dd2df17ea090ea2244009d2d33610d5d9 100644
--- a/gui/slick/interfaces/default/config_postProcessing.tmpl
+++ b/gui/slick/interfaces/default/config_postProcessing.tmpl
@@ -104,7 +104,7 @@
                             <input type="checkbox" name="skip_removed_files" id="skip_removed_files" #if $sickbeard.SKIP_REMOVED_FILES == True then "checked=\"checked\"" else ""# />
                             <label for="skip_removed_files">
                                 <span class="component-title">Skip Remove Detection</span>
-                                <span class="component-desc">Skip detection of removed files, so they don't get set to ignored?</span>
+                                <span class="component-desc">Skip detection of removed files, so they don't get set to ignored/archived?</span>
                             </label>
 							<label class="nocheck">
 						        <span class="component-title">&nbsp;</span>
diff --git a/gui/slick/interfaces/default/config_search.tmpl b/gui/slick/interfaces/default/config_search.tmpl
index 4e656fdd54e9a36691b9030c14c86665cf811e6f..2c745ab0d6baa96d22c8737f44480376da7a329b 100755
--- a/gui/slick/interfaces/default/config_search.tmpl
+++ b/gui/slick/interfaces/default/config_search.tmpl
@@ -77,16 +77,6 @@
 							</div>
 						</div>
 
-						<div class="field-pair">
-							<label>
-								<span class="component-title">Backlog search day(s)</span>
-								<span class="component-desc">
-									<input type="text" name="backlog_days" value="$sickbeard.BACKLOG_DAYS" class="form-control input-sm input75" />
-									<p>number of day(s) that the search will cover (e.g. 7)</p>
-								</span>
-							</label>
-						</div>
-
 						<div class="field-pair">
 							<label>
 								<span class="component-title">Backlog search frequency</span>
@@ -107,16 +97,6 @@
 							</label>
 						</div>
 
-						<div class="field-pair">
-							<label>
-								<span class="component-title">Missed episodes range</span>
-								<span class="component-desc">
-									<input type="number" step="1" min="7" name="coming_eps_missed_range" id="coming_eps_missed_range" value="$sickbeard.COMING_EPS_MISSED_RANGE" class="form-control input-sm input75" />
-									<p>Set the range in days of the missed episodes</p>
-								</span>
-							</label>
-						</div>
-
 						<div class="field-pair">
 							<label>
 								<span class="component-title">Usenet retention</span>
diff --git a/gui/slick/interfaces/default/home_postprocess.tmpl b/gui/slick/interfaces/default/home_postprocess.tmpl
index 404e8c7773beb8d2ab66c8fbb9e3c613d26ff0b1..dff00bd54b1dbcd757cb6093d37ed26a93c9af3d 100644
--- a/gui/slick/interfaces/default/home_postprocess.tmpl
+++ b/gui/slick/interfaces/default/home_postprocess.tmpl
@@ -33,7 +33,12 @@
 			<td>
 				<select name="process_method" id="process_method" class="form-control form-control-inline input-sm" >
 				#set $process_method_text = {'copy': "Copy", 'move': "Move", 'hardlink': "Hard Link", 'symlink' : "Symbolic Link"}
-				#for $curAction in ('copy', 'move', 'hardlink', 'symlink'):
+				#if sys.platform == 'win32'
+					#set $process_action = ('copy', 'move')
+				#else
+					#set $process_action = ('copy', 'move', 'hardlink', 'symlink')                                        
+				#end if
+				#for $curAction in $process_action:
 				#if $sickbeard.PROCESS_METHOD == $curAction:
 					#set $process_method = "selected=\"selected\""
 				#else
diff --git a/gui/slick/js/newShow.js b/gui/slick/js/newShow.js
index b58d0cab00bfc0f8618b1742a5e51f6a927f9ca0..812f178f7ee9d1c7aeda4885989d03a302a345f9 100644
--- a/gui/slick/js/newShow.js
+++ b/gui/slick/js/newShow.js
@@ -9,12 +9,12 @@ $(document).ready(function () {
 
         if (searchRequestXhr) searchRequestXhr.abort();
 
-        var searchingFor = $('#nameToSearch').val() + ' on ' + $('#providedIndexer option:selected').text() + ' in ' + $('#indexerLangSelect').val();
+        var searchingFor = $('#nameToSearch').val().trim() + ' on ' + $('#providedIndexer option:selected').text() + ' in ' + $('#indexerLangSelect').val();
         $('#searchResults').empty().html('<img id="searchingAnim" src="' + sbRoot + '/images/loading32' + themeSpinner + '.gif" height="32" width="32" /> searching ' + searchingFor + '...');
 
         searchRequestXhr = $.ajax({
             url: sbRoot + '/home/addShows/searchIndexersForShowName',
-            data: {'search_term': $('#nameToSearch').val(), 'lang': $('#indexerLangSelect').val(), 'indexer': $('#providedIndexer').val()},
+            data: {'search_term': $('#nameToSearch').val().trim(), 'lang': $('#indexerLangSelect').val(), 'indexer': $('#providedIndexer').val()},
             timeout: parseInt($('#indexer_timeout').val(), 10) * 1000,
             dataType: 'json',
             error: function () {
diff --git a/lib/rtorrent/lib/xmlrpc/requests_transport.py b/lib/rtorrent/lib/xmlrpc/requests_transport.py
index d5e28743c860b98d5d25376c1f1e3f409fb9a121..9a4556773cff66b14e2f99e1af4f0d49157f1bb4 100644
--- a/lib/rtorrent/lib/xmlrpc/requests_transport.py
+++ b/lib/rtorrent/lib/xmlrpc/requests_transport.py
@@ -163,7 +163,7 @@ class RequestsTransport(xmlrpc_client.Transport):
             Response tuple and target method.
         """
         p, u = self.getparser()
-        p.feed(response.text)
+        p.feed(response.text.encode('utf-8'))
         p.close()
         return u.close()
 
diff --git a/lib/trakt/trakt.py b/lib/trakt/trakt.py
index 3d04d99edd5843383bafbc4aaae2d4e9af392b81..819dd379eb1696db54ac9010ebbcc44cba427c34 100644
--- a/lib/trakt/trakt.py
+++ b/lib/trakt/trakt.py
@@ -10,7 +10,7 @@ class TraktAPI():
         self.password = password
         self.verify = not disable_ssl_verify
         self.timeout = timeout if timeout else None
-        self.api_url = 'https://api.trakt.tv/'
+        self.api_url = 'https://api-v2launch.trakt.tv/'
         self.headers = {
           'Content-Type': 'application/json',
           'trakt-api-version': '2',
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
index e539522b18f4ef97dd15cca9f3d2a58c8ce5b788..5a4f295fcdf25f73fd8e574bd0125bcae8d4f3f4 100755
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -170,6 +170,7 @@ HTTPS_CERT = None
 HTTPS_KEY = None
 
 INDEXER_DEFAULT_LANGUAGE = None
+EP_DEFAULT_DELETED_STATUS = None
 LAUNCH_BROWSER = False
 CACHE_DIR = None
 ACTUAL_CACHE_DIR = None
@@ -233,12 +234,13 @@ UPDATE_FREQUENCY = None
 DAILYSEARCH_STARTUP = False
 BACKLOG_FREQUENCY = None
 BACKLOG_STARTUP = False
-SHOWUPDATE_HOUR = 3
+SHOWUPDATE_HOUR = None
 
 DEFAULT_AUTOPOSTPROCESSER_FREQUENCY = 10
 DEFAULT_DAILYSEARCH_FREQUENCY = 40
 DEFAULT_BACKLOG_FREQUENCY = 21
 DEFAULT_UPDATE_FREQUENCY = 1
+DEFAULT_SHOWUPDATE_HOUR = 3
 
 MIN_AUTOPOSTPROCESSER_FREQUENCY = 1
 MIN_DAILYSEARCH_FREQUENCY = 10
@@ -428,7 +430,6 @@ TRAKT_USE_ROLLING_DOWNLOAD = 0
 TRAKT_ROLLING_NUM_EP = 0
 TRAKT_ROLLING_ADD_PAUSED = 1
 TRAKT_ROLLING_FREQUENCY = 15
-TRAKT_ROLLING_DEFAULT_WATCHED_STATUS = 7
 
 USE_PYTIVO = False
 PYTIVO_NOTIFY_ONSNATCH = False
@@ -507,7 +508,7 @@ EXTRA_SCRIPTS = []
 
 IGNORE_WORDS = "german,french,core2hd,dutch,swedish,reenc,MrLss"
 REQUIRE_WORDS = ""
-SYNC_FILES = "!sync,lftp-pget-status,part,bts"
+SYNC_FILES = "!sync,lftp-pget-status,part,bts,!qb"
 
 CALENDAR_UNPROTECTED = False
 NO_RESTART = False
@@ -534,10 +535,10 @@ def initialize(consoleLogging=True):
             TORRENT_USERNAME, TORRENT_PASSWORD, TORRENT_HOST, TORRENT_PATH, TORRENT_SEED_TIME, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_LABEL, TORRENT_LABEL_ANIME, TORRENT_VERIFY_CERT, TORRENT_RPCURL, TORRENT_AUTH_TYPE, \
             USE_KODI, KODI_ALWAYS_ON, KODI_NOTIFY_ONSNATCH, KODI_NOTIFY_ONDOWNLOAD, KODI_NOTIFY_ONSUBTITLEDOWNLOAD, KODI_UPDATE_FULL, KODI_UPDATE_ONLYFIRST, \
             KODI_UPDATE_LIBRARY, KODI_HOST, KODI_USERNAME, KODI_PASSWORD, BACKLOG_FREQUENCY, \
-            USE_TRAKT, TRAKT_USERNAME, TRAKT_PASSWORD, TRAKT_REMOVE_WATCHLIST, TRAKT_SYNC_WATCHLIST, TRAKT_METHOD_ADD, TRAKT_START_PAUSED, traktCheckerScheduler, traktRollingScheduler, TRAKT_USE_RECOMMENDED, TRAKT_SYNC, TRAKT_DEFAULT_INDEXER, TRAKT_REMOVE_SERIESLIST, TRAKT_DISABLE_SSL_VERIFY, TRAKT_TIMEOUT, TRAKT_BLACKLIST_NAME, TRAKT_USE_ROLLING_DOWNLOAD, TRAKT_ROLLING_NUM_EP, TRAKT_ROLLING_ADD_PAUSED, TRAKT_ROLLING_FREQUENCY, TRAKT_ROLLING_DEFAULT_WATCHED_STATUS, \
+            USE_TRAKT, TRAKT_USERNAME, TRAKT_PASSWORD, TRAKT_REMOVE_WATCHLIST, TRAKT_SYNC_WATCHLIST, TRAKT_METHOD_ADD, TRAKT_START_PAUSED, traktCheckerScheduler, traktRollingScheduler, TRAKT_USE_RECOMMENDED, TRAKT_SYNC, TRAKT_DEFAULT_INDEXER, TRAKT_REMOVE_SERIESLIST, TRAKT_DISABLE_SSL_VERIFY, TRAKT_TIMEOUT, TRAKT_BLACKLIST_NAME, TRAKT_USE_ROLLING_DOWNLOAD, TRAKT_ROLLING_NUM_EP, TRAKT_ROLLING_ADD_PAUSED, TRAKT_ROLLING_FREQUENCY, \
             USE_PLEX, PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_NOTIFY_ONSUBTITLEDOWNLOAD, PLEX_UPDATE_LIBRARY, \
             PLEX_SERVER_HOST, PLEX_SERVER_TOKEN, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, DEFAULT_BACKLOG_FREQUENCY, MIN_BACKLOG_FREQUENCY, BACKLOG_STARTUP, SKIP_REMOVED_FILES, \
-            showUpdateScheduler, __INITIALIZED__, INDEXER_DEFAULT_LANGUAGE, LAUNCH_BROWSER, UPDATE_SHOWS_ON_START, UPDATE_SHOWS_ON_SNATCH, TRASH_REMOVE_SHOW, TRASH_ROTATE_LOGS, SORT_ARTICLE, showList, loadingShowList, \
+            showUpdateScheduler, __INITIALIZED__, INDEXER_DEFAULT_LANGUAGE, EP_DEFAULT_DELETED_STATUS, LAUNCH_BROWSER, UPDATE_SHOWS_ON_START, UPDATE_SHOWS_ON_SNATCH, TRASH_REMOVE_SHOW, TRASH_ROTATE_LOGS, SORT_ARTICLE, showList, loadingShowList, \
             NEWZNAB_DATA, NZBS, NZBS_UID, NZBS_HASH, INDEXER_DEFAULT, INDEXER_TIMEOUT, USENET_RETENTION, TORRENT_DIR, \
             QUALITY_DEFAULT, FLATTEN_FOLDERS_DEFAULT, SUBTITLES_DEFAULT, STATUS_DEFAULT, DAILYSEARCH_STARTUP, \
             GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, GROWL_NOTIFY_ONSUBTITLEDOWNLOAD, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD, USE_FREEMOBILE, FREEMOBILE_ID, FREEMOBILE_APIKEY, FREEMOBILE_NOTIFY_ONSNATCH, FREEMOBILE_NOTIFY_ONDOWNLOAD, FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD, \
@@ -547,7 +548,7 @@ def initialize(consoleLogging=True):
             USE_PUSHALOT, PUSHALOT_NOTIFY_ONSNATCH, PUSHALOT_NOTIFY_ONDOWNLOAD, PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHALOT_AUTHORIZATIONTOKEN, \
             USE_PUSHBULLET, PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHBULLET_API, PUSHBULLET_DEVICE, \
             versionCheckScheduler, VERSION_NOTIFY, AUTO_UPDATE, NOTIFY_ON_UPDATE, PROCESS_AUTOMATICALLY, NO_DELETE, UNPACK, CPU_PRESET, \
-            KEEP_PROCESSED_DIR, PROCESS_METHOD, DELRARCONTENTS, TV_DOWNLOAD_DIR, MIN_DAILYSEARCH_FREQUENCY, DEFAULT_UPDATE_FREQUENCY, MIN_UPDATE_FREQUENCY, UPDATE_FREQUENCY, \
+            KEEP_PROCESSED_DIR, PROCESS_METHOD, DELRARCONTENTS, TV_DOWNLOAD_DIR, MIN_DAILYSEARCH_FREQUENCY, DEFAULT_UPDATE_FREQUENCY, DEFAULT_SHOWUPDATE_HOUR, MIN_UPDATE_FREQUENCY, UPDATE_FREQUENCY, \
             showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, TIMEZONE_DISPLAY, \
             NAMING_PATTERN, NAMING_MULTI_EP, NAMING_ANIME_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_SPORTS_PATTERN, NAMING_CUSTOM_SPORTS, NAMING_ANIME_PATTERN, NAMING_CUSTOM_ANIME, NAMING_STRIP_YEAR, \
             RENAME_EPISODES, AIRDATE_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \
@@ -723,6 +724,7 @@ def initialize(consoleLogging=True):
             WEB_COOKIE_SECRET = helpers.generateCookieSecret()
 
         INDEXER_DEFAULT_LANGUAGE = check_setting_str(CFG, 'General', 'indexerDefaultLang', 'en')
+        EP_DEFAULT_DELETED_STATUS = check_setting_int(CFG, 'General', 'ep_default_deleted_status', 6)
 
         LAUNCH_BROWSER = bool(check_setting_int(CFG, 'General', 'launch_browser', 1))
 
@@ -832,9 +834,11 @@ def initialize(consoleLogging=True):
         if UPDATE_FREQUENCY < MIN_UPDATE_FREQUENCY:
             UPDATE_FREQUENCY = MIN_UPDATE_FREQUENCY
 
-        SHOWUPDATE_HOUR = check_setting_int(CFG, 'General', 'showupdate_hour', 3)
-        if SHOWUPDATE_HOUR > 23: SHOWUPDATE_HOUR = 0;
-        elif SHOWUPDATE_HOUR < 0: SHOWUPDATE_HOUR = 0;
+        SHOWUPDATE_HOUR = check_setting_int(CFG, 'General', 'showupdate_hour', DEFAULT_SHOWUPDATE_HOUR)
+        if SHOWUPDATE_HOUR > 23:
+            SHOWUPDATE_HOUR = 0
+        elif SHOWUPDATE_HOUR < 0:
+            SHOWUPDATE_HOUR = 0
 
         BACKLOG_DAYS = check_setting_int(CFG, 'General', 'backlog_days', 7)
 
@@ -1011,7 +1015,6 @@ def initialize(consoleLogging=True):
         TRAKT_ROLLING_NUM_EP = check_setting_int(CFG, 'Trakt', 'trakt_rolling_num_ep', 0)
         TRAKT_ROLLING_ADD_PAUSED = check_setting_int(CFG, 'Trakt', 'trakt_rolling_add_paused', 1)
         TRAKT_ROLLING_FREQUENCY = check_setting_int(CFG, 'Trakt', 'trakt_rolling_frequency', 15)
-        TRAKT_ROLLING_DEFAULT_WATCHED_STATUS = check_setting_int(CFG, 'Trakt', 'trakt_rolling_default_watched_status', 3)
 
         CheckSection(CFG, 'pyTivo')
         USE_PYTIVO = bool(check_setting_int(CFG, 'pyTivo', 'use_pytivo', 0))
@@ -1286,7 +1289,7 @@ def initialize(consoleLogging=True):
         showUpdateScheduler = scheduler.Scheduler(showUpdater.ShowUpdater(),
                                                   cycleTime=datetime.timedelta(hours=1),
                                                   threadName="SHOWUPDATER",
-                                                  start_time=datetime.time(hour=SHOWUPDATE_HOUR))  # 3 AM
+                                                  start_time=datetime.time(hour=SHOWUPDATE_HOUR))
 
         # searchers
         searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(),
@@ -1334,7 +1337,7 @@ def initialize(consoleLogging=True):
                                                     silent=not USE_TRAKT)
 
         traktRollingScheduler = scheduler.Scheduler(traktChecker.TraktRolling(),
-                                                    cycleTime=datetime.timedelta(TRAKT_ROLLING_FREQUENCY),
+                                                    cycleTime=datetime.timedelta(minutes=TRAKT_ROLLING_FREQUENCY),
                                                     threadName="TRAKTROLLING",
                                                     silent=not TRAKT_USE_ROLLING_DOWNLOAD)
 
@@ -1626,6 +1629,7 @@ def save_config():
     new_config['General']['naming_anime_multi_ep'] = int(NAMING_ANIME_MULTI_EP)
     new_config['General']['naming_anime'] = int(NAMING_ANIME)
     new_config['General']['indexerDefaultLang'] = INDEXER_DEFAULT_LANGUAGE
+    new_config['General']['ep_default_deleted_status'] = int(EP_DEFAULT_DELETED_STATUS)
     new_config['General']['launch_browser'] = int(LAUNCH_BROWSER)
     new_config['General']['update_shows_on_start'] = int(UPDATE_SHOWS_ON_START)
     new_config['General']['update_shows_on_snatch'] = int(UPDATE_SHOWS_ON_SNATCH)
@@ -1936,7 +1940,6 @@ def save_config():
     new_config['Trakt']['trakt_rolling_num_ep'] = int(TRAKT_ROLLING_NUM_EP)
     new_config['Trakt']['trakt_rolling_add_paused'] = int(TRAKT_ROLLING_ADD_PAUSED)
     new_config['Trakt']['trakt_rolling_frequency'] = int(TRAKT_ROLLING_FREQUENCY)
-    new_config['Trakt']['trakt_rolling_default_watched_status'] = int(TRAKT_ROLLING_DEFAULT_WATCHED_STATUS)
 
     new_config['pyTivo'] = {}
     new_config['pyTivo']['use_pytivo'] = int(USE_PYTIVO)
diff --git a/sickbeard/clients/rtorrent.py b/sickbeard/clients/rtorrent.py
index 3a8a865d73e6c4d24fa799b01357b8dd0a1d2154..d0e2a9fa558f50aab01977e2c653309484f90e8f 100644
--- a/sickbeard/clients/rtorrent.py
+++ b/sickbeard/clients/rtorrent.py
@@ -17,8 +17,10 @@
 # along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
 
 from base64 import b64encode
+import traceback
 
 import sickbeard
+from sickbeard import logger
 from sickbeard.clients.generic import GenericClient
 from lib.rtorrent import RTorrent
 from lib.rtorrent.err import MethodError
@@ -82,7 +84,8 @@ class rTorrentAPI(GenericClient):
 
             return True
 
-        except:
+        except Exception as e:
+            logger.log(traceback.format_exc(), logger.DEBUG)
             return False
 
     def _add_torrent_file(self, result):
@@ -124,7 +127,8 @@ class rTorrentAPI(GenericClient):
 
             return True
 
-        except:
+        except Exception as e:
+            logger.log(traceback.format_exc(), logger.DEBUG)
             return False
 
     def _set_torrent_ratio(self, name):
diff --git a/sickbeard/config.py b/sickbeard/config.py
index 4b86e6f3bb6e6c3fb4b970c69636394f122f3630..2ae68c467f2f5b1695bea98be365b248cd20e7bc 100644
--- a/sickbeard/config.py
+++ b/sickbeard/config.py
@@ -185,6 +185,16 @@ def change_UPDATE_FREQUENCY(freq):
 
     sickbeard.versionCheckScheduler.cycleTime = datetime.timedelta(hours=sickbeard.UPDATE_FREQUENCY)
 
+def change_SHOWUPDATE_HOUR(freq):
+    sickbeard.SHOWUPDATE_HOUR = to_int(freq, default=sickbeard.SHOWUPDATE_HOUR)
+
+    if sickbeard.SHOWUPDATE_HOUR > 23:
+        sickbeard.SHOWUPDATE_HOUR = 0
+    elif sickbeard.SHOWUPDATE_HOUR < 0:
+        sickbeard.SHOWUPDATE_HOUR = 0
+
+    sickbeard.showUpdateScheduler.cycleTime = datetime.time(hour=sickbeard.SHOWUPDATE_HOUR)
+
 def change_VERSION_NOTIFY(version_notify):
     oldSetting = sickbeard.VERSION_NOTIFY
 
diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py
index 5382e42182e523fcc4622d14cc3a9565622ccef8..e6a8b20376e786dbe5528bb641c228d91a8c0d83 100644
--- a/sickbeard/helpers.py
+++ b/sickbeard/helpers.py
@@ -1514,7 +1514,7 @@ def verify_freespace(src, dest, oldfile=None):
     
     if oldfile:
         for file in oldfile:
-            if os.path.isfile(file.location):
+            if ek.ek(os.path.isfile, file.location):
                 diskfree += ek.ek(os.path.getsize, file.location)
         
     if diskfree > neededspace:
diff --git a/sickbeard/logger.py b/sickbeard/logger.py
index 726614d2a582867e93175b601e0b5d4e518f12e4..31bb90f1f9ab68c0acf91d3d2483fec5d693399a 100644
--- a/sickbeard/logger.py
+++ b/sickbeard/logger.py
@@ -175,13 +175,13 @@ class Logger(object):
 
             # parse and submit errors to issue tracker
             for curError in sorted(classes.ErrorViewer.errors, key=lambda error: error.time, reverse=True)[:500]:
-                if not curError.title:
-                    continue
-
-                if len(curError.title) > 1024:
-                    title_Error = str(curError.title[0:1024])
-                else:
-                    title_Error = str(curError.title)
+                try:
+                    if len(str(curError.title)) > 1024:
+                        title_Error = str(curError.title)[0:1024]
+                    else:
+                        title_Error = str(curError.title)
+                except Exception as e:
+                    title_Error = u"Unable to extract title from error"
 
                 gist = None
                 regex = "^(%s)\s*([A-Z]+)\s*(.+?)\s*\:\:\s*(.*)$" % curError.time
diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py
index 9493ef9c58e0c0292e05b95a6b8b8ee4489a599a..91d006faece1fbb2981799d1ea2d47843bb7bb66 100755
--- a/sickbeard/providers/alpharatio.py
+++ b/sickbeard/providers/alpharatio.py
@@ -150,7 +150,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -224,6 +224,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = str(url).replace('&amp;', '&')
diff --git a/sickbeard/providers/animenzb.py b/sickbeard/providers/animenzb.py
index 33f12bbd7c104b8fb986341e199be008a6badabd..b1a49692c05c96e93be4ac7fb4f68b595703fe0b 100644
--- a/sickbeard/providers/animenzb.py
+++ b/sickbeard/providers/animenzb.py
@@ -60,7 +60,7 @@ class animenzb(generic.NZBProvider):
     def _get_episode_search_strings(self, ep_obj, add_string=''):
         return [x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
 
-    def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0, epObj=None):
         if self.show and not self.show.is_anime:
             logger.log(u"" + str(self.show.name) + " is not an anime skiping ...")
             return []
diff --git a/sickbeard/providers/animezb.py b/sickbeard/providers/animezb.py
index 3d95b01a57a08a1e5cf031a8b440700c53a47967..480eba9d3b5ce99e964a8d04ec627c75ef583bf7 100644
--- a/sickbeard/providers/animezb.py
+++ b/sickbeard/providers/animezb.py
@@ -61,7 +61,7 @@ class Animezb(generic.NZBProvider):
             search_string.append(ep_string)
         return search_string
 
-    def _doSearch(self, search_string, epcount=0, age=0):
+    def _doSearch(self, search_string, epcount=0, age=0, epObj=None):
         if self.show and not self.show.is_anime:
             logger.log(u"" + str(self.show.name) + " is not an anime skiping ...")
             return []
diff --git a/sickbeard/providers/bitsoup.py b/sickbeard/providers/bitsoup.py
index 79f0f032867580e2554ca73e0895199ebd6412e2..790063ec20c54bcbb829f40568f91b46bb77d97c 100644
--- a/sickbeard/providers/bitsoup.py
+++ b/sickbeard/providers/bitsoup.py
@@ -150,7 +150,7 @@ class BitSoupProvider(generic.TorrentProvider):
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -230,6 +230,7 @@ class BitSoupProvider(generic.TorrentProvider):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = str(url).replace('&amp;', '&')
diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py
index 9e929eb7ed2c2258cd26d7528df6121b1ef00a88..884e82e389e72e7bc4e31b8084215e64f8599537 100644
--- a/sickbeard/providers/btn.py
+++ b/sickbeard/providers/btn.py
@@ -82,7 +82,7 @@ class BTNProvider(generic.TorrentProvider):
 
         return True
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         self._checkAuth()
 
diff --git a/sickbeard/providers/ezrss.py b/sickbeard/providers/ezrss.py
index ddebbca60d7a60575000af1f53a017883323239c..d5535c4be2d4de837327fa6598131f15bcaf8829 100644
--- a/sickbeard/providers/ezrss.py
+++ b/sickbeard/providers/ezrss.py
@@ -112,7 +112,7 @@ class EZRSSProvider(generic.TorrentProvider):
 
         return [params]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         params = {"mode": "rss"}
 
diff --git a/sickbeard/providers/eztv.py b/sickbeard/providers/eztv.py
index 19458418cb1c1e2c42ff77e8d39100fccb8bd412..a51dc404d5914ac1df7e7c78736b63bacd7eac45 100644
--- a/sickbeard/providers/eztv.py
+++ b/sickbeard/providers/eztv.py
@@ -21,6 +21,9 @@ import traceback
 import re, datetime
 
 import generic
+import sickbeard
+from sickbeard import classes
+from sickbeard import helpers
 from sickbeard import logger, tvcache, db
 from sickbeard.common import Quality
 
@@ -74,7 +77,7 @@ class EZTVProvider(generic.TorrentProvider):
         else:
             return Quality.sceneQuality(item.get('title'), anime)
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py
index 0500d5cf047d7dfad8290f4ed6228e2fbb7cc04e..24572d406e3c5dc4ef48315ee90a1157e198b4f0 100755
--- a/sickbeard/providers/freshontv.py
+++ b/sickbeard/providers/freshontv.py
@@ -184,7 +184,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -328,6 +328,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = str(url).replace('&amp;', '&')
diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py
index 2fe0459501bf49df17f7c11c2b4ad65aa5983fc3..5bb95a12284dc150cf424a025aa4127fa0dfc610 100644
--- a/sickbeard/providers/generic.py
+++ b/sickbeard/providers/generic.py
@@ -227,7 +227,7 @@ class GenericProvider:
         quality = Quality.sceneQuality(title, anime)
         return quality
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
         return []
 
     def _get_season_search_strings(self, episode):
@@ -286,11 +286,11 @@ class GenericProvider:
             if len(episodes) > 1:
                 # get season search results
                 for curString in self._get_season_search_strings(epObj):
-                    itemList += self._doSearch(curString, search_mode, len(episodes))
+                    itemList += self._doSearch(curString, search_mode, len(episodes), epObj=epObj)
             else:
                 # get single episode search results
                 for curString in self._get_episode_search_strings(epObj):
-                    itemList += self._doSearch(curString, 'eponly', len(episodes))
+                    itemList += self._doSearch(curString, 'eponly', len(episodes), epObj=epObj)
 
         # if we found what we needed already from cache then return results and exit
         if len(results) == len(episodes):
@@ -480,6 +480,32 @@ class TorrentProvider(GenericProvider):
         GenericProvider.__init__(self, name)
 
         self.providerType = GenericProvider.TORRENT
+        
+        # Don't add a rule to remove everything between bracket, it will break anime release
+        self.removeWordsList = {'\[rartv\]$': 'searchre',
+                               '\[rarbg\]$': 'searchre',
+                               '\[eztv\]$': 'searchre',
+                               '\[ettv\]$': 'searchre',
+                               '\[GloDLS\]$': 'searchre',
+                               '\[silv4\]$': 'searchre',
+                               '\[Seedbox\]$': 'searchre',
+                               '\[AndroidTwoU\]$': 'searchre',
+                               '\.RiPSaLoT$': 'searchre',
+                              }
+
+    def _clean_title_from_provider(self, title):
+        torrent_title = title
+        for remove_string, remove_type in self.removeWordsList.iteritems():
+            if remove_type == 'search':
+                torrent_title = torrent_title.replace(remove_string, '')
+            elif remove_type == 'searchre':
+                torrent_title = re.sub(remove_string, '', torrent_title)
+
+        if torrent_title != title:
+            logger.log(u'Change title from {old_name} to {new_name}'.format(old_name=title, new_name=torrent_title), logger.DEBUG)
+
+        return torrent_title
+
 
 class ProviderProxy:
     def __init__(self):
diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py
index 55b64bca6a1f1a2fef8667ab4b3762932bb81188..d5822bf8e851573a38825ad63bf3b5a7376fac56 100644
--- a/sickbeard/providers/hdbits.py
+++ b/sickbeard/providers/hdbits.py
@@ -86,12 +86,13 @@ class HDBitsProvider(generic.TorrentProvider):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         url = self.urls['download'] + urllib.urlencode({'id': item['id'], 'passkey': self.passkey})
 
         return (title, url)
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
         results = []
 
         self._checkAuth()
diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py
index b5d9857ba015c3fb617e6249d2c5f9750eb7fb0c..ffc3d49854fc5a14768d005efbfeb3fca2b976a6 100644
--- a/sickbeard/providers/hdtorrents.py
+++ b/sickbeard/providers/hdtorrents.py
@@ -174,7 +174,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -294,6 +294,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = str(url).replace('&amp;', '&')
diff --git a/sickbeard/providers/hounddawgs.py b/sickbeard/providers/hounddawgs.py
index 7c64fca3d1be0656c66c5144e0e61fb1bbc79b84..caecda2616df0fa737c5140c998d8f0c1d035408 100644
--- a/sickbeard/providers/hounddawgs.py
+++ b/sickbeard/providers/hounddawgs.py
@@ -151,7 +151,7 @@ class HoundDawgsProvider(generic.TorrentProvider):
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py
index 113f81611957aec4e0a353bf4f1cebc88cd2c6f3..1af7e8ca8008e926371ea5fde8efaffc9990c2c6 100644
--- a/sickbeard/providers/iptorrents.py
+++ b/sickbeard/providers/iptorrents.py
@@ -326,7 +326,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
 
         return results
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py
index 4751df835b1b4b9a3e45ba1e21141d8c15867feb..f07c41f44ae6f5191c21c161c66ec3fe7b0b7cfd 100644
--- a/sickbeard/providers/kat.py
+++ b/sickbeard/providers/kat.py
@@ -177,8 +177,8 @@ class KATProvider(generic.TorrentProvider):
                 ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) + ' -S%02d' % int(
                     ep_obj.scene_season) + 'E' + ' category:tv'  #1) showName SXX -SXXE
                 search_string['Season'].append(ep_string)
-                ep_string = show_name + ' Season ' + str(
-                    ep_obj.scene_season) + ' -Ep*' + ' category:tv'  # 2) showName Season X
+                ep_string = show_name + ' "Season ' + str(
+                    ep_obj.scene_season) + '" -Ep*' + ' category:tv'  # 2) showName "Season X"
                 search_string['Season'].append(ep_string)
 
         return [search_string]
@@ -214,7 +214,7 @@ class KATProvider(generic.TorrentProvider):
         return [search_string]
 
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -299,6 +299,7 @@ class KATProvider(generic.TorrentProvider):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = url.replace('&amp;', '&')
diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py
index 13af2a8275941b391770abc1f3f68d08cee0fa48..db40d4f7070208fd266177d918a3ec568b10f62f 100755
--- a/sickbeard/providers/morethantv.py
+++ b/sickbeard/providers/morethantv.py
@@ -166,7 +166,7 @@ class MoreThanTVProvider(generic.TorrentProvider):
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -267,6 +267,7 @@ class MoreThanTVProvider(generic.TorrentProvider):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = str(url).replace('&amp;', '&')
diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py
index 15a06bbbce1029ba4e39aac42d0cff451e1d9df3..790b01d1a1a83a4031071d70d7a8fb2f6d5aaeb9 100755
--- a/sickbeard/providers/newznab.py
+++ b/sickbeard/providers/newznab.py
@@ -239,7 +239,7 @@ class NewznabProvider(generic.NZBProvider):
         else:
             logger.log(u"Unknown error given from " + self.name + ": " + err_desc, logger.ERROR)
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         self._checkAuth()
 
diff --git a/sickbeard/providers/nextgen.py b/sickbeard/providers/nextgen.py
index c23b93f0034e0841c3735b657958ea602f7a4c5e..c3e4fd64388f208ed5f8f79ac4e791176f384f6a 100644
--- a/sickbeard/providers/nextgen.py
+++ b/sickbeard/providers/nextgen.py
@@ -185,7 +185,7 @@ class NextGenProvider(generic.TorrentProvider):
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py
index f09a698267c247080fd1d7bf62677ac54ea77137..e4e590f0355cfca2cc111baa113cdb594d9049d6 100644
--- a/sickbeard/providers/nyaatorrents.py
+++ b/sickbeard/providers/nyaatorrents.py
@@ -66,7 +66,7 @@ class NyaaProvider(generic.TorrentProvider):
     def _get_episode_search_strings(self, ep_obj, add_string=''):
         return self._get_season_search_strings(ep_obj)
 
-    def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0, epObj=None):
         if self.show and not self.show.is_anime:
             logger.log(u"" + str(self.show.name) + " is not an anime skiping " + str(self.name))
             return []
diff --git a/sickbeard/providers/oldpiratebay.py b/sickbeard/providers/oldpiratebay.py
index d6731e5cdabf2f7b6122f84039fa46681105b004..88155e797ba0450f50bf09d8b2c6f0a372ea3a58 100644
--- a/sickbeard/providers/oldpiratebay.py
+++ b/sickbeard/providers/oldpiratebay.py
@@ -226,7 +226,7 @@ class OldPirateBayProvider(generic.TorrentProvider):
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -294,6 +294,7 @@ class OldPirateBayProvider(generic.TorrentProvider):
 
         if title:
             title = u'' + title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = url.replace('&amp;', '&')
diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py
index 8c9a7f356af8517df4bfb75b0b0fc61294de8e90..d1cb1453c361194d63995acf93b7706b4509cb5c 100644
--- a/sickbeard/providers/omgwtfnzbs.py
+++ b/sickbeard/providers/omgwtfnzbs.py
@@ -101,7 +101,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
     def _get_title_and_url(self, item):
         return (item['release'], item['getnzb'])
 
-    def _doSearch(self, search, search_mode='eponly', epcount=0, retention=0):
+    def _doSearch(self, search, search_mode='eponly', epcount=0, retention=0, epObj=None):
 
         self._checkAuth()
 
diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py
index 1c843909062efde366d0571c768c2bab78f40a9e..c6afb5d2410df9ed67da54e907212af38b4f2d69 100644
--- a/sickbeard/providers/rarbg.py
+++ b/sickbeard/providers/rarbg.py
@@ -1,32 +1,35 @@
-# Author: djoole <bobby.djoole@gmail.com>
-# Author: CoRpO <corpo@gruk.org>
+# coding=utf-8
+# Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of Sick Beard.
+# This file is part of SickRage.
 #
-# Sick Beard is free software: you can redistribute it and/or modify
+# SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
 # the Free Software Foundation, either version 3 of the License, or
 # (at your option) any later version.
 #
-# Sick Beard is distributed in the hope that it will be useful,
+# SickRage is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU General Public License for more details.
 #
 # You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+# along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
 
 import traceback
 import re
 import datetime
 import urllib
-
-import sickbeard
 import generic
+import datetime
+import json
+import time
 
 from lib import requests
+from lib.requests import exceptions
 
+import sickbeard
 from sickbeard.common import Quality
 from sickbeard import logger
 from sickbeard import tvcache
@@ -35,7 +38,10 @@ from sickbeard.bs4_parser import BS4Parser
 from sickbeard import db
 from sickbeard import helpers
 from sickbeard import classes
+from sickbeard.exceptions import ex
 from sickbeard.helpers import sanitizeSceneName
+from lib.requests.exceptions import RequestException
+from sickbeard.indexers.indexer_config import INDEXER_TVDB,INDEXER_TVRAGE
 
 
 class RarbgProvider(generic.TorrentProvider):
@@ -44,47 +50,43 @@ class RarbgProvider(generic.TorrentProvider):
         generic.TorrentProvider.__init__(self, "Rarbg")
 
         self.enabled = False
-
+        self.session = None
         self.supportsBacklog = True
-
         self.ratio = None
-
-        self.cache = RarbgCache(self)
+        self.minseed = None
+        self.minleech = None
+        self.token = None
+        self.tokenExpireDate = None
 
         self.urls = {'url': 'https://rarbg.com',
-                     'base_url': 'https://rarbg.com/torrents.php',
-                     'search': 'https://rarbg.com/torrents.php?search=%s&category=%s&page=%s',
-                     'download': 'https://rarbg.com/download.php?id=%s&f=%s',
+                     'token': 'https://torrentapi.org/pubapi.php?get_token=get_token&format=json',
+                     'listing': 'https://torrentapi.org/pubapi.php?mode=list&token={token}',
+                     'search': 'https://torrentapi.org/pubapi.php?mode=search&search_string={search_string}&token={token}',
+                     'search_tvdb': 'https://torrentapi.org/pubapi.php?mode=search&search_tvdb={tvdb}&search_string={search_string}&token={token}',
+                     'search_tvrage': 'https://torrentapi.org/pubapi.php?mode=search&search_tvrage={tvrage}&search_string={search_string}&token={token}',
+                     'api_spec': 'https://rarbg.com/pubapi/apidocs.txt',
                      }
 
-        self.url = self.urls['base_url']
-
-        self.subcategories = [18,41]
-        self.pages = [1,2,3,4,5]
-
-        self.cookie = {
-            "version": 0,
-            "name": '7fAY799j',
-            "value": 'VtdTzG69',
-            "port": None,
-            # "port_specified": False,
-            "domain": 'rarbg.com',
-            # "domain_specified": False,
-            # "domain_initial_dot": False,
-            "path": '/',
-            # "path_specified": True,
-            "secure": False,
-            "expires": None,
-            "discard": True,
-            "comment": None,
-            "comment_url": None,
-            "rest": {},
-            "rfc2109": False
+        self.url = self.urls['listing']
+
+        self.urlOptions = {'categories': '&category={categories}',
+                        'seeders': '&min_seeders={min_seeders}',
+                        'leechers': '&min_leechers={min_leechers}',
+                        'sorting' : '&sort={sorting}',
+                        'limit': '&limit={limit}',
+                        'format': '&format={format}',
+                        'ranked': '&ranked={ranked}',
         }
+        
+        self.defaultOptions = self.urlOptions['categories'].format(categories='18;41') + \
+                                self.urlOptions['sorting'].format(sorting='last') + \
+                                self.urlOptions['limit'].format(limit='100') + \
+                                self.urlOptions['format'].format(format='json') + \
+                                self.urlOptions['ranked'].format(ranked='1')
 
-        self.session = requests.session()
-        self.session.headers.update({'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.99 Safari/537.36'})
-        self.session.cookies.set(**self.cookie)
+        self.next_request = datetime.datetime.now()
+
+        self.cache = RarbgCache(self)
 
     def isEnabled(self):
         return self.enabled
@@ -92,6 +94,36 @@ class RarbgProvider(generic.TorrentProvider):
     def imageName(self):
         return 'rarbg.png'
 
+    def _doLogin(self):
+        if self.token and self.tokenExpireDate and datetime.datetime.now() < self.tokenExpireDate:
+            return True
+
+        self.session = requests.Session()
+        resp_json = None
+
+        try:
+            response = self.session.get(self.urls['token'], timeout=30, verify=False)
+            response.raise_for_status()
+            resp_json = response.json()
+        except RequestException as e:
+            logger.log(u'Unable to connect to {name} provider: {error}'.format(name=self.name, error=ex(e)), logger.ERROR)
+            return False
+
+        if not resp_json:
+            logger.log(u'{name} provider: empty json response'.format(name=self.name), logger.ERROR)
+            return False
+        else:
+            try:
+                if resp_json['token']:
+                    self.token = resp_json['token']
+                    self.tokenExpireDate = datetime.datetime.now() + datetime.timedelta(minutes=15)
+                    return True
+            except Exception as e:
+                logger.log(u'{name} provider: No token found'.format(name=self.name), logger.ERROR)
+                logger.log(u'{name} provider: No token found: {error}'.format(name=self.name, error=ex(e)), logger.DEBUG)
+
+        return False
+
     def getQuality(self, item, anime=False):
         quality = Quality.sceneQuality(item[0], anime)
         return quality
@@ -101,11 +133,11 @@ class RarbgProvider(generic.TorrentProvider):
         search_string = {'Season': []}
         for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
             if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
+                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
             elif ep_obj.show.anime:
-                ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
+                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
             else:
-                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  #1) showName.SXX
+                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  #1) showName.SXX
 
             search_string['Season'].append(ep_string)
 
@@ -120,104 +152,150 @@ class RarbgProvider(generic.TorrentProvider):
 
         if self.show.air_by_date:
             for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
+                ep_string = show_name + ' ' + \
                             str(ep_obj.airdate).replace('-', '|')
                 search_string['Episode'].append(ep_string)
         elif self.show.sports:
             for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
+                ep_string = show_name + ' ' + \
                             str(ep_obj.airdate).replace('-', '|') + '|' + \
                             ep_obj.airdate.strftime('%b')
                 search_string['Episode'].append(ep_string)
         elif self.show.anime:
             for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
+                ep_string = show_name + ' ' + \
                             "%i" % int(ep_obj.scene_absolute_number)
                 search_string['Episode'].append(ep_string)
         else:
             for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = show_name_helpers.sanitizeSceneName(show_name) + '.' + \
+                ep_string = show_name + ' ' + \
                             sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
+                                                                  'episodenumber': ep_obj.scene_episode}
+                if add_string:
+                    ep_string = ep_string + ' %s' % add_string
 
-                search_string['Episode'].append(re.sub('\s+', '.', ep_string))
+                search_string['Episode'].append(ep_string)
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
 
-        for mode in search_params.keys():
-
-            for search_string in search_params[mode]:
-
-                for sc in self.subcategories:
+        if not self._doLogin():
+            return results
 
-                    for page in self.pages:
-
-                        searchURL = self.urls['search'] % (search_string.encode('UTF-8'), sc, page)
-                        logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG)
-
-                        data = self.getURL(searchURL)
-                        if not data:
-                            continue
+        if epObj != None:
+            ep_indexerid = epObj.show.indexerid
+            ep_indexer = epObj.show.indexer
+        else:
+            ep_indexerid = None
+            ep_indexer = None
 
+        for mode in search_params.keys(): #Mode = RSS, Season, Episode
+            for search_string in search_params[mode]:
+                if mode == 'RSS':
+                    searchURL = self.urls['listing'].format(token=self.token) + self.defaultOptions
+                elif mode == 'Season':
+                    if ep_indexer == INDEXER_TVDB:
+                        searchURL = self.urls['search_tvdb'].format(token=self.token, search_string=urllib.quote(search_string), tvdb=ep_indexerid) + self.defaultOptions
+                    elif ep_indexer == INDEXER_TVRAGE:
+                        searchURL = self.urls['search_tvrage'].format(token=self.token, search_string=urllib.quote(search_string), tvrage=ep_indexerid) + self.defaultOptions
+                    else:
+                        searchURL = self.urls['search'].format(token=self.token, search_string=urllib.quote(search_string)) + self.defaultOptions
+                elif mode == 'Episode':
+                    if ep_indexer == INDEXER_TVDB:
+                        searchURL = self.urls['search_tvdb'].format(token=self.token, search_string=urllib.quote(search_string), tvdb=ep_indexerid) + self.defaultOptions
+                    elif ep_indexer == INDEXER_TVRAGE:
+                        searchURL = self.urls['search_tvrage'].format(token=self.token, search_string=urllib.quote(search_string), tvrage=ep_indexerid) + self.defaultOptions
+                    else:
+                        searchURL = self.urls['search'].format(token=self.token, search_string=urllib.quote(search_string)) + self.defaultOptions
+                else:
+                    logger.log(u'{name} invalid search mode:{mode}'.format(name=self.name, mode=mode), logger.ERROR)
+
+                if self.minleech:
+                    searchURL += self.urlOptions['leechers'].format(min_leechers=int(self.minleech))
+
+                if self.minseed:
+                    searchURL += self.urlOptions['seeders'].format(min_seeders=int(self.minseed))
+
+                logger.log(u'{name} search page URL: {url}'.format(name=self.name, url=searchURL), logger.DEBUG)
+
+                time_out = 0
+                while (datetime.datetime.now() < self.next_request) and time_out <= 15:
+                    time_out = time_out + 1
+                    time.sleep(1)
+
+                data = self.getURL(searchURL)
+
+                self.next_request = datetime.datetime.now() + datetime.timedelta(seconds=10)
+
+                if not data:
+                    logger.log(u'{name} no data returned.'.format(name=self.name), logger.DEBUG)
+                    continue
+                if re.search('ERROR', data):
+                    logger.log(u'{name} returned an error.'.format(name=self.name), logger.DEBUG)
+                    continue
+                if re.search('No results found', data):
+                    logger.log(u'{name} no results found.'.format(name=self.name), logger.DEBUG)
+                    continue
+                if re.search('Invalid token set!', data):
+                    logger.log(u'{name} Invalid token set!'.format(name=self.name), logger.ERROR)
+                    return results
+                if re.search('Too many requests per minute. Please try again later!', data):
+                    logger.log(u'{name} Too many requests per minute.'.format(name=self.name), logger.ERROR)
+                    time.sleep(10)
+                    continue
+                if re.search('Cant find search_tvdb in database. Are you sure this imdb exists?', data):
+                    logger.log(u'{name} no results found. Search tvdb id do not exist on server.'.format(name=self.name), logger.DEBUG)
+                    continue
+                if re.search('Cant find search_tvrage in database. Are you sure this imdb exists?', data):
+                    logger.log(u'{name} no results found. Search tvrage id do not exist on server.'.format(name=self.name), logger.DEBUG)
+                    continue
+
+                try:
+                    data_json = json.loads(data)
+                except Exception as e:
+                    logger.log(u'{name} json load failed: {traceback_info}'.format(name=self.name, traceback_info=traceback.format_exc()), logger.DEBUG)
+                    logger.log(u'{name} json load failed. Data dump = {data}'.format(name=self.name, data=data), logger.DEBUG)
+                    logger.log(u'{name} json load failed.'.format(name=self.name), logger.ERROR)
+                    continue
+
+                try:
+                    for item in data_json:
                         try:
-                            with BS4Parser(data, features=["html5lib", "permissive"]) as html:
-                                resultsTable = html.find('table', attrs={'class': 'lista2t'})
-
-                                if not resultsTable:
-                                    logger.log(u"Data returned from " + self.name + " do not contains any torrent",
-                                               logger.DEBUG)
-                                    continue
-
-                                entries = resultsTable.find("tbody").findAll("tr")
-
-                                if len(entries) > 0:
-                                    for result in entries:
-
-                                        try:
-                                            link = result.find('a', title=True)
-                                            torrentName = link['title']
-                                            torrent_name = str(torrentName)
-                                            torrentId = result.find_all('td')[1].find_all('a')[0]['href'][1:].replace(
-                                                'torrent/', '')
-                                            torrent_download_url = (self.urls['download'] % (torrentId, urllib.quote(torrent_name) + '-[rarbg.com].torrent')).encode('utf8')
-                                        except (AttributeError, TypeError):
-                                            continue
-
-                                        if not torrent_name or not torrent_download_url:
-                                            continue
-
-                                        item = torrent_name, torrent_download_url
-                                        logger.log(u"Found result: " + torrent_name + " (" + torrent_download_url + ")",
-                                                   logger.DEBUG)
-                                        items[mode].append(item)
-
-                                else:
-                                    logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
-                                               logger.WARNING)
-                                    continue
-
-                                if len(entries) < 25:
-                                    break
-
-                        except Exception, e:
-                            logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),
-                                       logger.ERROR)
+                            torrent_title = item['f']
+                            torrent_download = item['d']
+                            if torrent_title and torrent_download:
+                                items[mode].append((torrent_title, torrent_download))
+                                logger.log(u'{name} found valid result: {title}'.format(name=self.name, title=torrent_title), logger.DEBUG)
+                            else:
+                                logger.log(u'{name} skipping invalid result'.format(name=self.name), logger.DEBUG)
+                        except Exception:
+                            logger.log(u'{name} skipping invalid result: {traceback_info}'.format(name=self.name, traceback_info=traceback.format_exc()), logger.DEBUG)
+                        
+                except Exception:
+                    logger.log(u'{name} failed parsing data: {traceback_info}'.format(name=self.name, traceback_info=traceback.format_exc()), logger.ERROR)
             results += items[mode]
 
         return results
 
     def _get_title_and_url(self, item):
+        """
+        Retrieves the title and URL data from the item XML node
+
+        item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
+
+        Returns: A tuple containing two strings representing title and URL respectively
+        """
 
         title, url = item
 
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = str(url).replace('&amp;', '&')
@@ -262,7 +340,7 @@ class RarbgCache(tvcache.TVCache):
         tvcache.TVCache.__init__(self, provider)
 
         # only poll RARbg every 15 minutes max
-        self.minTime = 15
+        self.minTime = 5
 
     def _getRSSData(self):
         search_params = {'RSS': ['']}
diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py
index 6df14be4f308c24f143cdc8168859da08ee8531b..8f6910c10c6b123b52a8544881ab1270225e9f74 100644
--- a/sickbeard/providers/rsstorrent.py
+++ b/sickbeard/providers/rsstorrent.py
@@ -80,6 +80,7 @@ class TorrentRssProvider(generic.TorrentProvider):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         attempt_list = [lambda: item.get('torrent_magneturi'),
 
diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py
index ec6a992e7a00d8623d23fcb0e9074b80513eb800..fc4071a1451db6342da45b8de9404826b2ce7c60 100644
--- a/sickbeard/providers/scc.py
+++ b/sickbeard/providers/scc.py
@@ -160,7 +160,7 @@ class SCCProvider(generic.TorrentProvider):
         else:
             return False
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -261,6 +261,7 @@ class SCCProvider(generic.TorrentProvider):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = str(url).replace('&amp;', '&')
diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py
index 0ee22e0813b12b6ed90e2ac854c3009ecbc4fe1c..0a83d9aafd38357109f97608d8d691b4c99a505d 100644
--- a/sickbeard/providers/speedcd.py
+++ b/sickbeard/providers/speedcd.py
@@ -146,7 +146,7 @@ class SpeedCDProvider(generic.TorrentProvider):
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -206,6 +206,7 @@ class SpeedCDProvider(generic.TorrentProvider):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = str(url).replace('&amp;', '&')
diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py
index d0c2ee6c819cca2cefaf4c41257144c0ff575f9e..880efdf4fd086a069f7ea6087a984fbe5df404c2 100644
--- a/sickbeard/providers/t411.py
+++ b/sickbeard/providers/t411.py
@@ -20,26 +20,26 @@
 import traceback
 import re
 import datetime
+import time
+from lib.requests.auth import AuthBase
 import sickbeard
 import generic
 
 from lib import requests
 from lib.requests import exceptions
 
-from sickbeard.common import USER_AGENT, Quality, cpu_presets
+from sickbeard.common import Quality
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard import show_name_helpers
-from sickbeard.bs4_parser import BS4Parser
 from sickbeard import db
 from sickbeard import helpers
 from sickbeard import classes
-from sickbeard.helpers import sanitizeSceneName, arithmeticEval
+from sickbeard.helpers import sanitizeSceneName
 from sickbeard.exceptions import ex
 
 
 class T411Provider(generic.TorrentProvider):
-
     def __init__(self):
         generic.TorrentProvider.__init__(self, "T411")
 
@@ -48,14 +48,16 @@ class T411Provider(generic.TorrentProvider):
         self.username = None
         self.password = None
         self.ratio = None
+        self.token = None
+        self.tokenLastUpdate = None
 
         self.cache = T411Cache(self)
 
         self.urls = {'base_url': 'http://www.t411.io/',
-                'search': 'http://www.t411.io/torrents/search/?name=%s&cat=210&subcat=%s&search=%s&submit=Recherche',
-                'login_page': 'http://www.t411.io/users/login/',
-                'download': 'http://www.t411.io/torrents/download/?id=%s',
-                }
+                     'search': 'https://api.t411.io/torrents/search/%s?cid=%s&limit=100',
+                     'login_page': 'https://api.t411.io/auth',
+                     'download': 'https://api.t411.io/torrents/download/%s',
+        }
 
         self.url = self.urls['base_url']
 
@@ -72,61 +74,36 @@ class T411Provider(generic.TorrentProvider):
         return quality
 
     def _doLogin(self):
-        login_params = {'login': self.username,
-                        'password': self.password,
-        }
+
+        if self.token is not None:
+            if time.time() < (self.tokenLastUpdate + 30 * 60):
+                logger.log('T411 Authentication token is still valid', logger.DEBUG)
+                return True
+
+        login_params = {'username': self.username,
+                        'password': self.password}
 
         self.session = requests.Session()
 
+        logger.log('Performing authentication to T411', logger.DEBUG)
+
         try:
-            response = self.session.post(self.urls['login_page'], data=login_params, timeout=30, verify=False, headers=self.headers)
+            response = helpers.getURL(self.urls['login_page'], post_data=login_params, timeout=30, session=self.session, json=True)
         except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
             logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
             return False
 
-        if re.search('confirmer le captcha', response.text.lower()):
-            logger.log(u'Too many login attempts. A captcha is displayed.', logger.INFO)
-            response = self.solveCaptcha(response, login_params)
-
-        if not re.search('/users/logout/', response.text.lower()):
-            logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+        if 'token' in response:
+            self.token = response['token']
+            self.tokenLastUpdate = time.time()
+            self.uid = response['uid'].encode('ascii', 'ignore')
+            self.session.auth = T411Auth(self.token)
+            logger.log('Using T411 Authorization token : ' + self.token, logger.DEBUG)
+            return True
+        else:
+            logger.log('T411 token not found in authentication response', logger.ERROR)
             return False
 
-        return True
-
-    def solveCaptcha(self, response, login_params):
-        """
-        When trying to connect too many times with wrong password, a captcha can be requested.
-        This captcha is really simple and can be solved by the provider.
-
-        <label for="pass">204 + 65 = </label>
-            <input type="text" size="40" name="captchaAnswer" id="lgn" value=""/>
-            <input type="hidden" name="captchaQuery" value="204 + 65 = ">
-            <input type="hidden" name="captchaToken" value="005d54a7428aaf587460207408e92145">
-        <br/>
-
-        :param response: initial login output
-        :return: response after captcha resolution
-        """
-        with BS4Parser(response.text, features=["html5lib", "permissive"]) as html:
-            query = html.find('input', {'name': 'captchaQuery'})
-            token = html.find('input', {'name': 'captchaToken'})
-            if not query or not token:
-                logger.log(u'Unable to solve login captcha.', logger.ERROR)
-                return response
-
-            query_expr = query.attrs['value'].strip('= ')
-            logger.log(u'Captcha query: ' + query_expr, logger.DEBUG)
-            answer = arithmeticEval(query_expr)
-
-            logger.log(u'Captcha answer: %s' % answer, logger.DEBUG)
-
-            login_params['captchaAnswer'] = answer
-            login_params['captchaQuery'] = query.attrs['value']
-            login_params['captchaToken'] = token.attrs['value']
-
-            return self.session.post(self.urls['login_page'], data=login_params, timeout=30, verify=False, headers=self.headers)
-
     def _get_season_search_strings(self, ep_obj):
 
         search_string = {'Season': []}
@@ -136,7 +113,7 @@ class T411Provider(generic.TorrentProvider):
             elif ep_obj.show.anime:
                 ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
             else:
-                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  #1) showName.SXX
+                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  # 1) showName.SXX
 
             search_string['Season'].append(ep_string)
 
@@ -175,66 +152,53 @@ class T411Provider(generic.TorrentProvider):
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
+
+        logger.log(u"_doSearch started with ..." + str(search_params), logger.DEBUG)
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
 
-        if not self._doLogin():
-            return results
-
         for mode in search_params.keys():
 
             for search_string in search_params[mode]:
 
-                if search_string == '':
-                    search_string2 = ''
-                else:
-                    search_string2 = '%40name+' + search_string + '+'
-
                 for sc in self.subcategories:
-                    searchURL = self.urls['search'] % (search_string, sc, search_string2)
+                    searchURL = self.urls['search'] % (search_string, sc)
                     logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG)
 
-                    data = self.getURL(searchURL)
+                    data = self.getURL(searchURL, json=True)
                     if not data:
                         continue
-
                     try:
-                        with BS4Parser(data, features=["html5lib", "permissive"]) as html:
-                            resultsTable = html.find('table', attrs={'class': 'results'})
 
-                            if not resultsTable:
-                                logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
-                                           logger.DEBUG)
-                                continue
+                        if 'torrents' not in data:
+                            logger.log(
+                                u"The Data returned from " + self.name + " do not contains any torrent : " + str(data),
+                                logger.DEBUG)
+                            continue
 
-                            entries = resultsTable.find("tbody").findAll("tr")
+                        torrents = data['torrents']
 
-                            if len(entries) > 0:
-                                for result in entries:
+                        if len(torrents) > 0:
+                            for torrent in torrents:
 
-                                    try:
-                                        link = result.find('a', title=True)
-                                        torrent_name = link['title']
-                                        torrentId = result.find_all('td')[2].find_all('a')[0]['href'][1:].replace(
-                                            'torrents/nfo/?id=', '')
-                                        torrent_download_url = (self.urls['download'] % torrentId).encode('utf8')
-                                    except (AttributeError, TypeError):
-                                        continue
+                                torrent_name = torrent['name']
+                                torrent_id = torrent['id']
+                                torrent_download_url = (self.urls['download'] % torrent_id).encode('utf8')
 
-                                    if not torrent_name or not torrent_download_url:
-                                        continue
+                                if not torrent_name or not torrent_download_url:
+                                    continue
 
-                                    item = torrent_name, torrent_download_url
-                                    logger.log(u"Found result: " + torrent_name + " (" + torrent_download_url + ")",
-                                               logger.DEBUG)
-                                    items[mode].append(item)
+                                item = torrent_name, torrent_download_url
+                                logger.log(u"Found result: " + torrent_name + " (" + torrent_download_url + ")",
+                                           logger.DEBUG)
+                                items[mode].append(item)
 
-                            else:
-                                logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
-                                           logger.WARNING)
-                                continue
+                        else:
+                            logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
+                                       logger.WARNING)
+                            continue
 
                     except Exception, e:
                         logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),
@@ -248,8 +212,9 @@ class T411Provider(generic.TorrentProvider):
         title, url = item
 
         if title:
-            title = u'' + title
+            title += u''
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = str(url).replace('&amp;', '&')
@@ -288,6 +253,16 @@ class T411Provider(generic.TorrentProvider):
         return self.ratio
 
 
+class T411Auth(AuthBase):
+    """Attaches HTTP Authentication to the given Request object."""
+    def __init__(self, token):
+        self.token = token
+
+    def __call__(self, r):
+        r.headers['Authorization'] = self.token
+        return r
+
+
 class T411Cache(tvcache.TVCache):
     def __init__(self, provider):
         tvcache.TVCache.__init__(self, provider)
diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py
index 76f4f2c53cf5717e024b66c420dad79dadca0687..2a395bbab035892b1771cf4f2f779e38ec0faaf9 100644
--- a/sickbeard/providers/thepiratebay.py
+++ b/sickbeard/providers/thepiratebay.py
@@ -218,7 +218,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -283,6 +283,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
 
         if title:
             title = u'' + title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = url.replace('&amp;', '&')
diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py
index 21a881dd88b15eb748aecdd5b2813a3fa11b3ded..f31b7c217db107fd9d268a918c5b2b486a75b5b1 100644
--- a/sickbeard/providers/tntvillage.py
+++ b/sickbeard/providers/tntvillage.py
@@ -19,10 +19,9 @@
 import re
 import traceback
 import datetime
-import urlparse
 import sickbeard
 import generic
-from sickbeard.common import Quality, cpu_presets
+from sickbeard.common import Quality
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard import db
@@ -32,10 +31,11 @@ from sickbeard import show_name_helpers
 from sickbeard.exceptions import ex, AuthException
 from sickbeard import clients
 from lib import requests
-from lib.requests import exceptions
+from lib.requests.exceptions import RequestException
 from sickbeard.bs4_parser import BS4Parser
 from lib.unidecode import unidecode
 from sickbeard.helpers import sanitizeSceneName
+from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
 
 category_excluded = {
               'Sport' : 22,
@@ -152,7 +152,7 @@ class TNTVillageProvider(generic.TorrentProvider):
 
         try:
             response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
-        except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
+        except RequestException as e:
             logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
             return False
 
@@ -174,10 +174,7 @@ class TNTVillageProvider(generic.TorrentProvider):
                 ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
             else:
                 ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  #1) showName SXX
-
             search_string['Season'].append(ep_string)
-
-
         return [search_string]
 
     def _get_episode_search_strings(self, ep_obj, add_string=''):
@@ -242,29 +239,25 @@ class TNTVillageProvider(generic.TorrentProvider):
         """
             Return The quality from the scene episode HTML row.
         """
-
         file_quality=''
-        releaser=''
 
         img_all = (torrent_rows.find_all('td'))[1].find_all('img')
         
         if len(img_all) > 0:
-            for type in img_all:
+            for img_type in img_all:
                 try:
-    
-                    file_quality = file_quality + " " + type['src'].replace("style_images/mkportal-636/","").replace(".gif","").replace(".png","")
-    
-                except Exception, e:
+                    file_quality = file_quality + " " + img_type['src'].replace("style_images/mkportal-636/","").replace(".gif","").replace(".png","")
+                except Exception:
                     logger.log(u"Failed parsing " + self.name + " Traceback: "  + traceback.format_exc(), logger.ERROR)
 
         else:
             file_quality = (torrent_rows.find_all('td'))[1].get_text()
-            logger.log(u"file_quality: " + str(file_quality), logger.DEBUG)
+            logger.log(u"Episode quality: " + str(file_quality), logger.DEBUG)
 
         checkName = lambda list, func: func([re.search(x, file_quality, re.I) for x in list])
 
         dvdOptions = checkName(["dvd", "dvdrip", "dvdmux", "DVD9", "DVD5"], any)
-        blueRayOptions = checkName(["BD","BDmux", "BDrip", "BRrip", "Bluray"], any)
+        bluRayOptions = checkName(["BD","BDmux", "BDrip", "BRrip", "Bluray"], any)
         sdOptions = checkName(["h264", "divx", "XviD", "tv", "TVrip", "SATRip", "DTTrip", "Mpeg2"], any)
         hdOptions = checkName(["720p"], any)
         fullHD = checkName(["1080p", "fullHD"], any)
@@ -274,23 +267,24 @@ class TNTVillageProvider(generic.TorrentProvider):
 
         webdl = checkName(["webdl", "webmux", "webrip", "dl-webmux", "web-dlmux", "webdl-mux", "web-dl", "webdlmux", "dlmux"], any)
 
-        logger.log(u"dvdOptions: " + str(dvdOptions) + ", blueRayOptions: " + str(blueRayOptions) + ", sdOptions: " + str(sdOptions) + ", hdOptions: " + str(hdOptions) + ", fullHD: " + str(fullHD) + ", webdl: " + str(webdl), logger.DEBUG)
+        logger.log(u"Episode options: dvdOptions: " + str(dvdOptions) + ", bluRayOptions: " + str(bluRayOptions) + \
+                   ", sdOptions: " + str(sdOptions) + ", hdOptions: " + str(hdOptions) + ", fullHD: " + str(fullHD) + ", webdl: " + str(webdl), logger.DEBUG)
 
         if sdOptions and not dvdOptions and not fullHD and not hdOptions:
             return Quality.SDTV
         elif dvdOptions:
             return Quality.SDDVD
-        elif hdOptions and not blueRayOptions and not fullHD and not webdl:
+        elif hdOptions and not bluRayOptions and not fullHD and not webdl:
             return Quality.HDTV
-        elif not hdOptions and not blueRayOptions and fullHD and not webdl:
+        elif not hdOptions and not bluRayOptions and fullHD and not webdl:
             return Quality.FULLHDTV
-        elif hdOptions and not blueRayOptions and not fullHD and webdl:
+        elif hdOptions and not bluRayOptions and not fullHD and webdl:
             return Quality.HDWEBDL
-        elif not hdOptions and not blueRayOptions and fullHD and webdl:
+        elif not hdOptions and not bluRayOptions and fullHD and webdl:
             return Quality.FULLHDWEBDL
-        elif blueRayOptions and hdOptions and not fullHD:
+        elif bluRayOptions and hdOptions and not fullHD:
             return Quality.HDBLURAY
-        elif blueRayOptions and fullHD and not hdOptions:
+        elif bluRayOptions and fullHD and not hdOptions:
             return Quality.FULLHDBLURAY
         else:
             return Quality.UNKNOWN
@@ -299,20 +293,36 @@ class TNTVillageProvider(generic.TorrentProvider):
 
         is_italian = 0
 
-        name=''
-
         span_tag = (torrent_rows.find_all('td'))[1].find('b').find('span')
 
         name = str(span_tag)
         name = name.split('sub')[0] 
 
         if re.search("ita", name, re.I):
-            logger.log(u"Found Italian Language", logger.DEBUG)
+            logger.log(u"Found Italian release", logger.DEBUG)
             is_italian=1
 
         return is_italian
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _is_season_pack(self, name):
+
+        try:
+            myParser = NameParser(tryIndexers=True, trySceneExceptions=True, convert=True)
+            parse_result = myParser.parse(name)
+        except InvalidNameException:
+            logger.log(u"Unable to parse the filename " + str(name) + " into a valid episode", logger.DEBUG)
+            return False
+        except InvalidShowException:
+            logger.log(u"Unable to parse the filename " + str(name) + " into a valid show", logger.DEBUG)
+            return False
+
+        myDB = db.DBConnection()
+        sql_selection="select count(*) as count from tv_episodes where showid = ? and season = ?"
+        episodes = myDB.select(sql_selection, [parse_result.show.indexerid, parse_result.season_number])
+        if int(episodes[0]['count']) == len(parse_result.episode_numbers):
+            return True
+
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -340,13 +350,10 @@ class TNTVillageProvider(generic.TorrentProvider):
                 search_string = str(search_string).replace('.', ' ')
 
                 for x in range(0,y):
-				
                     z=x*20
                     if last_page:
                         break	
 
-                    logger.log(u"Page: " + str(x) + " of " + str(y), logger.DEBUG)
-
                     if mode != 'RSS':
                         searchURL = (self.urls['search_page'] + '&filter={2}').format(z,self.categories,search_string)
                     else:
@@ -356,7 +363,7 @@ class TNTVillageProvider(generic.TorrentProvider):
 
                     data = self.getURL(searchURL)
                     if not data:
-                        logger.log(u"data is empty", logger.DEBUG)
+                        logger.log(u"Received no data from the server", logger.DEBUG)
                         continue
 
                     try:
@@ -365,14 +372,13 @@ class TNTVillageProvider(generic.TorrentProvider):
                             torrent_rows = torrent_table.find_all('tr') if torrent_table else []
 
                             #Continue only if one Release is found
-                            logger.log(u"Num of Row: "+ str(len(torrent_rows)), logger.DEBUG)
-
                             if len(torrent_rows)<3:
-                                logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
-                                           logger.DEBUG)
+                                logger.log(u"The server returned no torrents", logger.DEBUG)
                                 last_page=1
                                 continue
 
+                            logger.log(u"Parsing results from page " + str(x+1), logger.DEBUG)
+
                             if len(torrent_rows) < 42:
                                 last_page=1
 
@@ -396,7 +402,6 @@ class TNTVillageProvider(generic.TorrentProvider):
                                 if not title or not download_url:
                                     continue
 
-                                logger.log(u"name: " + title + "", logger.DEBUG)
                                 filename_qt = self._reverseQuality(self._episodeQuality(result))
                                 for text in self.hdtext:
                                     title1 = title
@@ -407,20 +412,19 @@ class TNTVillageProvider(generic.TorrentProvider):
                                 if Quality.nameQuality(title) == Quality.UNKNOWN:
                                     title += filename_qt 
 
-                                logger.log(u"name, inserted quallity: " + title + "", logger.DEBUG)
+                                if not self._is_italian(result) and not self.subtitle:
+                                    logger.log(u"Subtitled, skipping "  + title + "(" + searchURL + ")", logger.DEBUG)
+                                    continue
+
+                                if self._is_season_pack(title):
+                                    title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title)
 
                                 item = title, download_url, id, seeders, leechers
                                 logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG)
 
-                                if not self._is_italian(result) and not self.subtitle:
-                                    logger.log(u"Subtitled, Skipped", logger.DEBUG)
-                                    continue
-                                else:
-                                    logger.log(u"Not Subtitled or Forced, Got It!", logger.DEBUG)
-
                                 items[mode].append(item)
 
-                    except Exception, e:
+                    except Exception:
                         logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
 
                 #For each search mode sort all the items by seeders
@@ -437,6 +441,7 @@ class TNTVillageProvider(generic.TorrentProvider):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = str(url).replace('&amp;', '&')
diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py
index 651c4586008594532b02404aae7f558d2203b1be..ba9bbd7f1a8f319f74433c6d07ffcc576fbf3c1c 100644
--- a/sickbeard/providers/tokyotoshokan.py
+++ b/sickbeard/providers/tokyotoshokan.py
@@ -82,7 +82,7 @@ class TokyoToshokanProvider(generic.TorrentProvider):
     def _get_episode_search_strings(self, ep_obj, add_string=''):
         return [x.replace('.', ' ') for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
 
-    def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0, epObj=None):
         if self.show and not self.show.is_anime:
             logger.log(u"" + str(self.show.name) + " is not an anime skiping " + str(self.name))
             return []
@@ -149,6 +149,7 @@ class TokyoToshokanCache(tvcache.TVCache):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         url = item.link if item.link else None
         if url:
diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py
index 807097a3dba364d1f4ef192176af7d12f0f8c209..e52deb7fac0dc97e27595f84c7594a049c9e5760 100644
--- a/sickbeard/providers/torrentbytes.py
+++ b/sickbeard/providers/torrentbytes.py
@@ -147,7 +147,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -229,6 +229,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = str(url).replace('&amp;', '&')
diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py
index b0d86a50a756d42f1834e99c8496fe4bf7d7a1b4..787d471b9aef6649d5fa1a737afa3c94831ebdab 100644
--- a/sickbeard/providers/torrentday.py
+++ b/sickbeard/providers/torrentday.py
@@ -175,7 +175,7 @@ class TorrentDayProvider(generic.TorrentProvider):
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -238,6 +238,7 @@ class TorrentDayProvider(generic.TorrentProvider):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = str(url).replace('&amp;', '&')
diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py
index dbd5e4688be9ada61364a31d304f77bec577777d..a52cbc1e95402ac1a3c9091ea5bc6a8300daba59 100644
--- a/sickbeard/providers/torrentleech.py
+++ b/sickbeard/providers/torrentleech.py
@@ -153,7 +153,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
 
         return [search_string]
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -231,6 +231,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
         if title:
             title = u'' + title
             title = title.replace(' ', '.')
+            title = self._clean_title_from_provider(title)
 
         if url:
             url = str(url).replace('&amp;', '&')
diff --git a/sickbeard/scheduler.py b/sickbeard/scheduler.py
index 6d69dc795a8b31ee680f70c7a6746a3fbb609f32..92e57e6aa05cbabf913f110df0c1da8b20953d91 100644
--- a/sickbeard/scheduler.py
+++ b/sickbeard/scheduler.py
@@ -51,41 +51,40 @@ class Scheduler(threading.Thread):
         return False
 
     def run(self):
-
-        while not self.stop.is_set():
-
-            current_time = datetime.datetime.now()
-            should_run = False
-
-            # check if interval has passed
-            if current_time - self.lastRun >= self.cycleTime:
-                # check if wanting to start around certain time taking interval into account
-                if self.start_time:
-                    hour_diff = current_time.time().hour - self.start_time.hour
-                    if not hour_diff < 0 and hour_diff < self.cycleTime.seconds / 3600:
-                        should_run = True
+        try:
+            while not self.stop.is_set():
+
+                current_time = datetime.datetime.now()
+                should_run = False
+
+                # check if interval has passed
+                if current_time - self.lastRun >= self.cycleTime:
+                    # check if wanting to start around certain time taking interval into account
+                    if self.start_time:
+                        hour_diff = current_time.time().hour - self.start_time.hour
+                        if not hour_diff < 0 and hour_diff < self.cycleTime.seconds / 3600:
+                            should_run = True
+                        else:
+                            # set lastRun to only check start_time after another cycleTime
+                            self.lastRun = current_time
                     else:
-                        # set lastRun to only check start_time after another cycleTime
-                        self.lastRun = current_time
-                else:
-                    should_run = True
+                        should_run = True
 
-            if should_run:
-                self.lastRun = current_time
+                if should_run:
+                    self.lastRun = current_time
 
-                try:
                     if not self.silent:
                         logger.log(u"Starting new thread: " + self.name, logger.DEBUG)
 
                     self.action.run(self.force)
-                except Exception, e:
-                    logger.log(u"Exception generated in thread " + self.name + ": " + ex(e), logger.ERROR)
-                    logger.log(repr(traceback.format_exc()), logger.DEBUG)
 
-            if self.force:
-                self.force = False
+                if self.force:
+                    self.force = False
 
-            time.sleep(1)
+                time.sleep(1)
 
-        # exiting thread
-        self.stop.clear()
\ No newline at end of file
+            # exiting thread
+            self.stop.clear()
+        except Exception, e:
+            logger.log(u"Exception generated in thread " + self.name + ": " + ex(e), logger.ERROR)
+            logger.log(repr(traceback.format_exc()), logger.DEBUG)
\ No newline at end of file
diff --git a/sickbeard/search.py b/sickbeard/search.py
index 58df654e115821e11ea129d77d53835f88fa58c2..11b40043f25ee4246b1119fd8ed026ff2bd7c3d8 100644
--- a/sickbeard/search.py
+++ b/sickbeard/search.py
@@ -662,6 +662,8 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False):
 
                 # if we're keeping this multi-result then remember it
                 for epObj in multiResult.episodes:
+                    if not multiResult.url.startswith('magnet'):
+                        multiResult.content = multiResult.provider.getURL(cur_result.url)
                     multiResults[epObj.episode] = multiResult
 
                 # don't bother with the single result if we're going to get it with a multi result
diff --git a/sickbeard/traktChecker.py b/sickbeard/traktChecker.py
index bab65b89781fe600c2b7f926f17acb57c3818568..921e37c7b081265f51a65fbb4246e84942707f82 100644
--- a/sickbeard/traktChecker.py
+++ b/sickbeard/traktChecker.py
@@ -391,14 +391,18 @@ class TraktChecker():
         else:
             watchlist = self.ShowWatchlist
 
+        trakt_id = sickbeard.indexerApi(show_obj.indexer).config['trakt_id']
+        
         for watchlist_el in watchlist:
 
-            trakt_id = sickbeard.indexerApi(show_obj.indexer).config['trakt_id']
             if trakt_id == 'tvdb_id':
                 indexer_id = int(watchlist_el['show']['ids']["tvdb"])
             else:
-                indexer_id = int(watchlist_el['show']['ids']["tvrage"])
-
+                if not watchlist_el['show']['ids']["tvrage"] is None:
+                   indexer_id = int(watchlist_el['show']['ids']["tvrage"])
+                else:
+                    indexer_id = 0
+                    
             if indexer_id == show_obj.indexerid and season is None and episode is None:
                 found=True
                 break
@@ -556,10 +560,9 @@ class TraktRolling():
                 if epObj.status != SKIPPED:
                     return
 
-                logger.log(u"Setting episode s" + str(s) + "e" + str(e) + " of show " + show.name + " to wanted")
-                # figure out what segment the episode is in and remember it so we can backlog it
+                logger.log(u"Setting episode s" + str(s) + "e" + str(e) + " of show " + show.name + " to " + statusStrings[sickbeard.EP_DEFAULT_DELETED_STATUS])
 
-                epObj.status = sickbeard.TRAKT_ROLLING_DEFAULT_WATCHED_STATUS
+                epObj.status = sickbeard.EP_DEFAULT_DELETED_STATUS
                 epObj.saveToDB()
 
     def _num_ep_for_season(self, show, season, episode):
diff --git a/sickbeard/tv.py b/sickbeard/tv.py
index 58a6bdbd534427ab8eb0f22e707df41347a503db..90882e45cb11090ae7bd7c4618bb159d2accee79 100644
--- a/sickbeard/tv.py
+++ b/sickbeard/tv.py
@@ -1082,12 +1082,12 @@ class TVShow(object):
                 # check if downloaded files still exist, update our data if this has changed
                 if not sickbeard.SKIP_REMOVED_FILES:
                     with curEp.lock:
-                        # if it used to have a file associated with it and it doesn't anymore then set it to ARCHIVED
+                        # if it used to have a file associated with it and it doesn't anymore then set it to sickbeard.EP_DEFAULT_DELETED_STATUS
                         if curEp.location and curEp.status in Quality.DOWNLOADED:
                             logger.log(str(self.indexerid) + u": Location for " + str(season) + "x" + str(
-                                episode) + " doesn't exist, removing it and changing our status to ARCHIVED",
+                                episode) + " doesn't exist, removing it and changing our status to " + statusStrings[sickbeard.EP_DEFAULT_DELETED_STATUS],
                                        logger.DEBUG)
-                            curEp.status = ARCHIVED
+                            curEp.status = sickbeard.EP_DEFAULT_DELETED_STATUS
                             curEp.subtitles = list()
                             curEp.subtitles_searchcount = 0
                             curEp.subtitles_lastsearch = str(datetime.datetime.min)
@@ -1737,7 +1737,7 @@ class TVEpisode(object):
         if not ek.ek(os.path.isdir,
                      self.show._location) and not sickbeard.CREATE_MISSING_SHOW_DIRS and not sickbeard.ADD_SHOWS_WO_DIR:
             logger.log(
-                u"The show dir is missing, not bothering to change the episode statuses since it'd probably be invalid")
+                u"The show dir " + str(self.show._location) + " is missing, not bothering to change the episode statuses since it'd probably be invalid")
             return
 
         if self.location:
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index c3a0cfb29f7507276c51731c0fb77503f288ab87..39391c241b201b5e12c2e5bb13315206eaca1f73 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -597,13 +597,16 @@ class CalendarHandler(BaseHandler):
                 ical = ical + 'DTEND:' + air_date_time_end.strftime(
                     "%Y%m%d") + 'T' + air_date_time_end.strftime(
                     "%H%M%S") + 'Z\r\n'
-                ical = ical + 'SUMMARY:' + show['show_name'] + ' - ' + str(
-                    episode['season']) + "x" + str(episode['episode']) + " - " + episode['name'] + '\r\n'
+                ical = ical + u'SUMMARY: {0} - {1}x{2} - {3}\r\n'.format(
+                       show['show_name'],
+                       episode['season'],
+                       episode['episode'],
+                       episode['name'])
                 ical = ical + 'UID:Sick-Beard-' + str(datetime.date.today().isoformat()) + '-' + show[
                     'show_name'].replace(" ", "-") + '-E' + str(episode['episode']) + 'S' + str(
                     episode['season']) + '\r\n'
                 if episode['description']:
-                    ical = ical + 'DESCRIPTION: {0} on {1} \\n\\n {2}\r\n'.format(
+                    ical = ical + u'DESCRIPTION: {0} on {1} \\n\\n {2}\r\n'.format(
                         (show['airs'] or '(Unknown airs)'),
                         (show['network'] or 'Unknown network'),
                         episode['description'].splitlines()[0])
@@ -1324,6 +1327,7 @@ class Home(WebRoot):
                     except Exception as e:
                         anidb_failed = True
                         ui.notifications.error('Unable to retreive Fansub Groups from AniDB.')
+                        logger.log('Unable to retreive Fansub Groups from AniDB. Error is {0}'.format(str(e)),logger.DEBUG)
 
             with showObj.lock:
                 t.show = showObj
@@ -1393,6 +1397,7 @@ class Home(WebRoot):
                         except Exception as e:
                             anidb_failed = True
                             ui.notifications.error('Unable to retreive data from AniDB.')
+                            logger.log('Unable to retreive data from AniDB. Error is {0}'.format(str(e)),logger.DEBUG)
                             shortWhiteList = whitelist
                     else:
                         shortWhiteList = whitelist
@@ -1416,6 +1421,7 @@ class Home(WebRoot):
                         except Exception as e:
                             anidb_failed = True
                             ui.notifications.error('Unable to retreive data from AniDB.')
+                            logger.log('Unable to retreive data from AniDB. Error is {0}'.format(str(e)),logger.DEBUG)
                             shortBlacklist = blacklist
                     else:
                         shortBlacklist = blacklist
@@ -3627,12 +3633,12 @@ class ConfigGeneral(Config):
 
     def saveGeneral(self, log_dir=None, log_nr = 5, log_size = 1048576, web_port=None, web_log=None, encryption_version=None, web_ipv6=None,
                     update_shows_on_start=None, update_shows_on_snatch=None, trash_remove_show=None, trash_rotate_logs=None, update_frequency=None,
-                    indexerDefaultLang='en', launch_browser=None, showupdate_hour=3, web_username=None,
+                    indexerDefaultLang='en', ep_default_deleted_status=None, launch_browser=None, showupdate_hour=3, web_username=None,
                     api_key=None, indexer_default=None, timezone_display=None, cpu_preset=None,
                     web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None,
                     handle_reverse_proxy=None, sort_article=None, auto_update=None, notify_on_update=None,
                     proxy_setting=None, proxy_indexers=None, anon_redirect=None, git_path=None, git_remote=None,
-                    calendar_unprotected=None, debug=None, no_restart=None,
+                    calendar_unprotected=None, debug=None, no_restart=None, coming_eps_missed_range=None,
                     display_filesize=None, fuzzy_dating=None, trim_zero=None, date_preset=None, date_preset_na=None, time_preset=None,
                     indexer_timeout=None, download_url=None, rootDir=None, theme_name=None,
                     git_reset=None, git_username=None, git_password=None, git_autoissues=None):
@@ -3642,23 +3648,9 @@ class ConfigGeneral(Config):
         # Misc
         sickbeard.DOWNLOAD_URL = download_url
         sickbeard.INDEXER_DEFAULT_LANGUAGE = indexerDefaultLang
+        sickbeard.EP_DEFAULT_DELETED_STATUS = ep_default_deleted_status
         sickbeard.LAUNCH_BROWSER = config.checkbox_to_value(launch_browser)
-        if sickbeard.SHOWUPDATE_HOUR != config.to_int(showupdate_hour):
-            sickbeard.showUpdateScheduler.stop.set()
-            logger.log(u"Waiting for the SHOWUPDATER thread to exit so we can set new start hour")
-            try:
-                sickbeard.showUpdateScheduler.join(10) # Wait 10 sec for the thread to exit
-            except:
-                pass
-            if  sickbeard.showUpdateScheduler.isAlive():
-                logger.log(u"Unable to stop SHOWUPDATER thread, the new configuration will be applied after a restart", logger.WARNING)
-            else:
-                logger.log(u"Starting SHOWUPDATER thread with the new start hour: " + str(config.to_int(showupdate_hour)))
-                sickbeard.showUpdateScheduler = scheduler.Scheduler(showUpdater.ShowUpdater(),
-                                              cycleTime=datetime.timedelta(hours=1),
-                                              threadName="SHOWUPDATER",
-                                              start_time=datetime.time(hour=config.to_int(showupdate_hour)))            
-        sickbeard.SHOWUPDATE_HOUR = config.to_int(showupdate_hour)
+        config.change_SHOWUPDATE_HOUR(showupdate_hour)
         config.change_VERSION_NOTIFY(config.checkbox_to_value(version_notify))
         sickbeard.AUTO_UPDATE = config.checkbox_to_value(auto_update)
         sickbeard.NOTIFY_ON_UPDATE = config.checkbox_to_value(notify_on_update)
@@ -3686,6 +3678,7 @@ class ConfigGeneral(Config):
         sickbeard.NO_RESTART = config.checkbox_to_value(no_restart)
         sickbeard.DEBUG = config.checkbox_to_value(debug)
         # sickbeard.LOG_DIR is set in config.change_LOG_DIR()
+        sickbeard.COMING_EPS_MISSED_RANGE = config.to_int(coming_eps_missed_range,default=7)
 
         sickbeard.WEB_PORT = config.to_int(web_port)
         sickbeard.WEB_IPV6 = config.checkbox_to_value(web_ipv6)
@@ -3823,13 +3816,13 @@ class ConfigSearch(Config):
     def saveSearch(self, use_nzbs=None, use_torrents=None, nzb_dir=None, sab_username=None, sab_password=None,
                    sab_apikey=None, sab_category=None, sab_category_anime=None, sab_host=None, nzbget_username=None,
                    nzbget_password=None, nzbget_category=None, nzbget_category_anime=None, nzbget_priority=None,
-                   nzbget_host=None, nzbget_use_https=None, backlog_days=None, backlog_frequency=None,
+                   nzbget_host=None, nzbget_use_https=None, backlog_frequency=None,
                    dailysearch_frequency=None, nzb_method=None, torrent_method=None, usenet_retention=None,
                    download_propers=None, check_propers_interval=None, allow_high_priority=None, sab_forced=None,
                    randomize_providers=None, backlog_startup=None, use_failed_downloads=None, delete_failed=None,
                    dailysearch_startup=None, torrent_dir=None, torrent_username=None, torrent_password=None, torrent_host=None,
                    torrent_label=None, torrent_label_anime=None, torrent_path=None, torrent_verify_cert=None,
-                   torrent_seed_time=None, torrent_paused=None, torrent_high_bandwidth=None, coming_eps_missed_range=None,
+                   torrent_seed_time=None, torrent_paused=None, torrent_high_bandwidth=None,
                    torrent_rpcurl=None, torrent_auth_type = None, ignore_words=None, require_words=None):
 
         results = []
@@ -3844,8 +3837,6 @@ class ConfigSearch(Config):
         
 
         config.change_BACKLOG_FREQUENCY(backlog_frequency)
-        sickbeard.BACKLOG_DAYS = config.to_int(backlog_days, default=7)
-        sickbeard.COMING_EPS_MISSED_RANGE = config.to_int(coming_eps_missed_range,default=7)
 
         sickbeard.USE_NZBS = config.checkbox_to_value(use_nzbs)
         sickbeard.USE_TORRENTS = config.checkbox_to_value(use_torrents)
@@ -4629,7 +4620,7 @@ class ConfigNotifications(Config):
                           trakt_remove_watchlist=None, trakt_sync_watchlist=None, trakt_method_add=None,
                           trakt_start_paused=None, trakt_use_recommended=None, trakt_sync=None,
                           trakt_default_indexer=None, trakt_remove_serieslist=None, trakt_disable_ssl_verify=None, trakt_timeout=None, trakt_blacklist_name=None,
-                          trakt_use_rolling_download=None, trakt_rolling_num_ep=None, trakt_rolling_add_paused=None, trakt_rolling_frequency=None, trakt_rolling_default_watched_status=None, 
+                          trakt_use_rolling_download=None, trakt_rolling_num_ep=None, trakt_rolling_add_paused=None, trakt_rolling_frequency=None,
                           use_synologynotifier=None, synologynotifier_notify_onsnatch=None,
                           synologynotifier_notify_ondownload=None, synologynotifier_notify_onsubtitledownload=None,
                           use_pytivo=None, pytivo_notify_onsnatch=None, pytivo_notify_ondownload=None,
@@ -4758,7 +4749,6 @@ class ConfigNotifications(Config):
         sickbeard.TRAKT_ROLLING_NUM_EP = int(trakt_rolling_num_ep)
         sickbeard.TRAKT_ROLLING_ADD_PAUSED = config.checkbox_to_value(trakt_rolling_add_paused)
         sickbeard.TRAKT_ROLLING_FREQUENCY = int(trakt_rolling_frequency)
-        sickbeard.TRAKT_ROLLING_DEFAULT_WATCHED_STATUS = int(trakt_rolling_default_watched_status)
 
         if sickbeard.USE_TRAKT:
             sickbeard.traktCheckerScheduler.silent = False