diff --git a/SickBeard.py b/SickBeard.py
index a3a32124034f4a00d927cec5186b43a26948bcdf..833ebff5e8703fb41c7ccaaf2687c148fa220630 100755
--- a/SickBeard.py
+++ b/SickBeard.py
@@ -110,9 +110,9 @@ class SickRage(object):
         help_msg += "    -q          --quiet             Disables logging to console\n"
         help_msg += "                --nolaunch          Suppress launching web browser on startup\n"
 
-        if sys.platform == 'win32':
+        if sys.platform == 'win32' or sys.platform == 'darwin':
             help_msg += "    -d          --daemon            Running as real daemon is not supported on Windows\n"
-            help_msg += "                                    On Windows, --daemon is substituted with: --quiet --nolaunch\n"
+            help_msg += "                                    On Windows and MAC, --daemon is substituted with: --quiet --nolaunch\n"
         else:
             help_msg += "    -d          --daemon            Run as double forked daemon (includes options --quiet --nolaunch)\n"
             help_msg += "                --pidfile=<path>    Combined with --daemon creates a pidfile (full path including filename)\n"
@@ -208,7 +208,7 @@ class SickRage(object):
                 self.consoleLogging = False
                 self.noLaunch = True
 
-                if sys.platform == 'win32':
+                if sys.platform == 'win32' or sys.platform == 'darwin':
                     self.runAsDaemon = False
 
             # Write a pidfile if requested
@@ -521,9 +521,11 @@ class SickRage(object):
                     if '--nolaunch' not in popen_list:
                         popen_list += ['--nolaunch']
                     logger.log(u"Restarting SickRage with " + str(popen_list))
+                    logger.shutdown() #shutdown the logger to make sure it's released the logfile BEFORE it restarts SR.
                     subprocess.Popen(popen_list, cwd=os.getcwd())
 
         # system exit
+        logger.shutdown() #Make sure the logger has stopped, just in case
         os._exit(0)
 
 
diff --git a/gui/slick/images/network/al jazeera america.png b/gui/slick/images/network/al jazeera america.png
new file mode 100644
index 0000000000000000000000000000000000000000..d8549d78adb2c17d40ab564af97e8fd744907200
Binary files /dev/null and b/gui/slick/images/network/al jazeera america.png differ
diff --git a/gui/slick/images/network/al jazeera.png b/gui/slick/images/network/al jazeera.png
new file mode 100644
index 0000000000000000000000000000000000000000..d8549d78adb2c17d40ab564af97e8fd744907200
Binary files /dev/null and b/gui/slick/images/network/al jazeera.png differ
diff --git a/gui/slick/images/network/audience network.png b/gui/slick/images/network/audience network.png
new file mode 100644
index 0000000000000000000000000000000000000000..e5e2b8a0a075cb9c5db274812f56e2d0441922fb
Binary files /dev/null and b/gui/slick/images/network/audience network.png differ
diff --git a/gui/slick/images/network/bet.png b/gui/slick/images/network/bet.png
new file mode 100644
index 0000000000000000000000000000000000000000..ff1adacfef789164f3ad79019059e36c728b2edb
Binary files /dev/null and b/gui/slick/images/network/bet.png differ
diff --git a/gui/slick/images/network/bnn.png b/gui/slick/images/network/bnn.png
index 9267610f299d22af11b9df0aee6af7e4c342b18e..e491c238170f4f04c8c6f840022e66a3a4e79cb9 100644
Binary files a/gui/slick/images/network/bnn.png and b/gui/slick/images/network/bnn.png differ
diff --git a/gui/slick/images/network/br-alpha.png b/gui/slick/images/network/br-alpha.png
new file mode 100644
index 0000000000000000000000000000000000000000..2f7f78a469ebfd13b3130d894cbc3fb3c0b41b95
Binary files /dev/null and b/gui/slick/images/network/br-alpha.png differ
diff --git a/gui/slick/images/network/challenge.png b/gui/slick/images/network/challenge.png
new file mode 100644
index 0000000000000000000000000000000000000000..f76436b42267ac92567b5cafc83ba880a9e006ad
Binary files /dev/null and b/gui/slick/images/network/challenge.png differ
diff --git a/gui/slick/images/network/espn classic.png b/gui/slick/images/network/espn classic.png
new file mode 100644
index 0000000000000000000000000000000000000000..e450b79716ec885376a1b27b95afd13c98a93eff
Binary files /dev/null and b/gui/slick/images/network/espn classic.png differ
diff --git a/gui/slick/images/network/fox business.png b/gui/slick/images/network/fox business.png
new file mode 100644
index 0000000000000000000000000000000000000000..f15b69593c59fd7cdbc096bba8311a29c96177e1
Binary files /dev/null and b/gui/slick/images/network/fox business.png differ
diff --git a/gui/slick/images/network/fox sports 1.png b/gui/slick/images/network/fox sports 1.png
new file mode 100644
index 0000000000000000000000000000000000000000..1eb9d2053e625e8480fda9af2fc9e6b35c58672b
Binary files /dev/null and b/gui/slick/images/network/fox sports 1.png differ
diff --git a/gui/slick/images/network/fox sports.png b/gui/slick/images/network/fox sports.png
new file mode 100644
index 0000000000000000000000000000000000000000..d99c7cc10c2394e06fda2adf962f371b2541948c
Binary files /dev/null and b/gui/slick/images/network/fox sports.png differ
diff --git a/gui/slick/images/network/fuji television.png b/gui/slick/images/network/fuji television.png
new file mode 100644
index 0000000000000000000000000000000000000000..f0afab1517658cade93fee7fa73ab43dfb8c76f5
Binary files /dev/null and b/gui/slick/images/network/fuji television.png differ
diff --git a/gui/slick/images/network/fuse.png b/gui/slick/images/network/fuse.png
new file mode 100644
index 0000000000000000000000000000000000000000..4f5ff8f671b343b35eb701163dd0db23895d3f77
Binary files /dev/null and b/gui/slick/images/network/fuse.png differ
diff --git a/gui/slick/images/network/history television.png b/gui/slick/images/network/history television.png
new file mode 100644
index 0000000000000000000000000000000000000000..a2d4b25abb357db6a9bc4e443a155987db5c73ec
Binary files /dev/null and b/gui/slick/images/network/history television.png differ
diff --git a/gui/slick/images/network/kanaaltwee.png b/gui/slick/images/network/kanaaltwee.png
new file mode 100644
index 0000000000000000000000000000000000000000..7fbd9fe0377b98c4e8803b1585a1c1b57e13f865
Binary files /dev/null and b/gui/slick/images/network/kanaaltwee.png differ
diff --git a/gui/slick/images/network/nippon television.png b/gui/slick/images/network/nippon television.png
new file mode 100644
index 0000000000000000000000000000000000000000..95b18315b43fd2ca2e1c42f65d27f2e8332245db
Binary files /dev/null and b/gui/slick/images/network/nippon television.png differ
diff --git a/gui/slick/images/network/playstation network.png b/gui/slick/images/network/playstation network.png
new file mode 100644
index 0000000000000000000000000000000000000000..a9ab7a21341b6edd67f2abcbf09c76c1b4ee064c
Binary files /dev/null and b/gui/slick/images/network/playstation network.png differ
diff --git a/gui/slick/images/network/rooster teeth.png b/gui/slick/images/network/rooster teeth.png
new file mode 100644
index 0000000000000000000000000000000000000000..594d712c081a40a681d8bec12cf09e0ee2a813c5
Binary files /dev/null and b/gui/slick/images/network/rooster teeth.png differ
diff --git a/gui/slick/images/network/sky sports.png b/gui/slick/images/network/sky sports.png
new file mode 100644
index 0000000000000000000000000000000000000000..4d3658d652625caca3c3d9ef470eac594660056b
Binary files /dev/null and b/gui/slick/images/network/sky sports.png differ
diff --git a/gui/slick/images/network/speed.png b/gui/slick/images/network/speed.png
new file mode 100644
index 0000000000000000000000000000000000000000..5b7edbd88f9d2205a625f895fd3350903ff0b12d
Binary files /dev/null and b/gui/slick/images/network/speed.png differ
diff --git a/gui/slick/images/network/sportsman channel.png b/gui/slick/images/network/sportsman channel.png
new file mode 100644
index 0000000000000000000000000000000000000000..1ff6131da2d047c4110f6c997f44d8ec17f61cbf
Binary files /dev/null and b/gui/slick/images/network/sportsman channel.png differ
diff --git a/gui/slick/images/network/star plus.png b/gui/slick/images/network/star plus.png
new file mode 100644
index 0000000000000000000000000000000000000000..61d0e71db9869ce1d5f7c472ae809ea039a2519f
Binary files /dev/null and b/gui/slick/images/network/star plus.png differ
diff --git a/gui/slick/images/network/uktv history.png b/gui/slick/images/network/uktv history.png
new file mode 100644
index 0000000000000000000000000000000000000000..e72ae28ecaa1755aac6b31efa4e3016c7f4c0702
Binary files /dev/null and b/gui/slick/images/network/uktv history.png differ
diff --git a/gui/slick/images/network/vh1 classics.png b/gui/slick/images/network/vh1 classics.png
new file mode 100644
index 0000000000000000000000000000000000000000..ce30485a927ac764708858eeb524fc7c432c4c1d
Binary files /dev/null and b/gui/slick/images/network/vh1 classics.png differ
diff --git a/gui/slick/interfaces/default/comingEpisodes.tmpl b/gui/slick/interfaces/default/comingEpisodes.tmpl
index 6d8ef27d992b22dd022890afed62556b7b7ffb73..66632982dea6d44e6a5d2f63ad3668c13c3ab013 100644
--- a/gui/slick/interfaces/default/comingEpisodes.tmpl
+++ b/gui/slick/interfaces/default/comingEpisodes.tmpl
@@ -502,11 +502,15 @@
             #set $airday = $cur_result['localtime'].date()
 
             #if $airday == $day:
-                #set $day_has_show = True
-                #set $airtime = $sbdatetime.sbdatetime.fromtimestamp($time.mktime($cur_result['localtime'].timetuple())).sbftime().decode($sickbeard.SYS_ENCODING)
-                #if $sickbeard.TRIM_ZERO:
-                #set $airtime = re.sub(r'0(\d:\d\d)', r'\1', $airtime, 0, re.IGNORECASE | re.MULTILINE)
-                #end if
+                #try
+                    #set $day_has_show = True
+                    #set $airtime = $sbdatetime.sbdatetime.fromtimestamp($time.mktime($cur_result['localtime'].timetuple())).sbftime().decode($sickbeard.SYS_ENCODING)
+                    #if $sickbeard.TRIM_ZERO:
+                    #set $airtime = re.sub(r'0(\d:\d\d)', r'\1', $airtime, 0, re.IGNORECASE | re.MULTILINE)
+                    #end if
+                #except OverflowError
+                    #set $airtime = "Invalid"
+                #end try
 
 				<tr>
 					<td class="calendarShow">
diff --git a/gui/slick/interfaces/default/config.tmpl b/gui/slick/interfaces/default/config.tmpl
index 992654561530809b639e51ea9b7599d88633aaa7..268416899223a44e88554736b30a713acdf2e18e 100644
--- a/gui/slick/interfaces/default/config.tmpl
+++ b/gui/slick/interfaces/default/config.tmpl
@@ -1,3 +1,7 @@
+#compiler-settings
+useLegacyImportMode = False
+#end compiler-settings
+
 #import sickbeard
 #from sickbeard import db
 #from sickbeard.helpers import anon_url
@@ -38,6 +42,27 @@
         You don't have version checking turned on. Please turn on "Check for Update" in Config > General.<br />
 #end if
 	</td></tr>
+
+#set $sr_user = None
+#try
+#import pwd
+#set $sr_user = $pwd.getpwuid(os.getuid()).pw_name
+#except ImportError
+#import getpass
+#set $sr_user = $getpass.getuser()
+#end try
+#if $sr_user:
+    <tr><td class="infoTableHeader">SR User:</td><td class="infoTableCell">$sr_user</td></tr>
+#end if
+
+#try
+#import locale
+#set $sr_locale = $locale.getdefaultlocale()
+    <tr><td class="infoTableHeader">SR Locale:</td><td class="infoTableCell">$sr_locale</td></tr>
+#except
+#pass
+#end try
+
     <tr><td class="infoTableHeader">SR Config file:</td><td class="infoTableCell">$sickbeard.CONFIG_FILE</td></tr>
     <tr><td class="infoTableHeader">SR Database file:</td><td class="infoTableCell">$db.dbFilename()</td></tr>
     <tr><td class="infoTableHeader">SR Cache Dir:</td><td class="infoTableCell">$sickbeard.CACHE_DIR</td></tr>
diff --git a/gui/slick/interfaces/default/config_postProcessing.tmpl b/gui/slick/interfaces/default/config_postProcessing.tmpl
index d6b70956eccb2f0a87a7074bff57fcbd35f0ae08..b022142d3b23de95c15f6e5c48948b3c1e3eceb6 100644
--- a/gui/slick/interfaces/default/config_postProcessing.tmpl
+++ b/gui/slick/interfaces/default/config_postProcessing.tmpl
@@ -49,7 +49,11 @@
                             </label>
                             <label class="nocheck">
                                 <span class="component-title">&nbsp;</span>
-                                <span class="component-desc">The folder where your download client puts TV downloads.</span>
+                                <span class="component-desc">The folder where your download client puts the completed TV downloads.</span>
+                            </label>
+                            <label class="nocheck">
+                                <span class="component-title">&nbsp;</span>
+                                <span class="component-desc"><b>NOTE:</b> Please use seperate downloading and completed folders in your download client if possible. Also, if you keep seeding torrents after they finish, please set Process Method to 'copy' instead of move to prevent errors while moving files.</span>
                             </label>
                             <label class="nocheck">
                                 <span class="component-title">&nbsp;</span>
diff --git a/gui/slick/interfaces/default/config_search.tmpl b/gui/slick/interfaces/default/config_search.tmpl
index 62dd066b7c38d4ddb2e0aec540debf1668aaeb8e..b6bdedaae0fb2b1d0552bb6a2c8390f7e77069a1 100755
--- a/gui/slick/interfaces/default/config_search.tmpl
+++ b/gui/slick/interfaces/default/config_search.tmpl
@@ -122,7 +122,9 @@
 								<span class="component-title">Ignore words</span>
 								<span class="component-desc">
 									<input type="text" name="ignore_words" value="$sickbeard.IGNORE_WORDS" class="form-control input-sm input350" />
-									<div class="clear-left">results containing any word in the comma separated word list will be ignored</div>
+									<div class="clear-left">results with one or more word from this list will be ignored<br />
+									separate words with a comma, e.g. "word1,word2,word3"
+									</div>
 								</span>
 							</label>
 						</div>
@@ -132,7 +134,9 @@
 								<span class="component-title">Require words</span>
 								<span class="component-desc">
 									<input type="text" name="require_words" value="$sickbeard.REQUIRE_WORDS" class="form-control input-sm input350" />
-									<div class="clear-left">results not containing all words in the comma separated word list will be ignored</div>
+									<div class="clear-left">results with no word from this list will be ignored<br />
+									separate words with a comma, e.g. "word1,word2,word3"
+									</div>
 								</span>
 							</label>
 						</div>
@@ -493,7 +497,7 @@
 								</label>
 							</div>
 
-							<div class="field-pair" id="torrent_auth_type">
+							<div class="field-pair" id="torrent_auth_type_option">
 								<label>
 									<span class="component-title">Http Authentication</span>
 									<span class="component-desc">
@@ -501,7 +505,7 @@
 										#set $http_authtype = {'none': "None", 'basic': "Basic", 'digest': "Digest"}
 										#for $authvalue,$authname in $http_authtype.items():
     									#set $selected = $html_selected if $sickbeard.TORRENT_AUTH_TYPE == $authvalue else ''
-										    <option value="$authvalue"$selected>$authname</option>
+										    <option id="torrent_auth_type_value" value="$authvalue"$selected>$authname</option>
 										#end for
 										</select>
 										<p></p>
@@ -520,7 +524,7 @@
 								</label>
 							</div>
 
-							<div class="field-pair">
+							<div class="field-pair" id="torrent_username_option">
 								<label>
 									<span class="component-title" id="username_title">Client username</span>
 									<span class="component-desc">
@@ -530,7 +534,7 @@
 								</label>
 							</div>
 
-							<div class="field-pair">
+							<div class="field-pair" id="torrent_password_option">
 								<label>
 									<span class="component-title" id="password_title">Client password</span>
 									<span class="component-desc">
@@ -578,8 +582,8 @@
 
 							<div class="field-pair" id="torrent_seed_time_option">
 								<label>
-									<span class="component-title">Minimum seeding time is</span>
-									<span class="component-desc"><input type="number" step="0.1" name="torrent_seed_time" id="torrent_seed_time" value="$sickbeard.TORRENT_SEED_TIME" class="form-control input-sm input100" />
+									<span class="component-title" id="torrent_seed_time_label">Minimum seeding time is</span>
+									<span class="component-desc"><input type="number" step="1" name="torrent_seed_time" id="torrent_seed_time" value="$sickbeard.TORRENT_SEED_TIME" class="form-control input-sm input100" />
 									<p>hours. (default:'0' passes blank to client and '-1' passes nothing)</p></span>
 								</label>
 							</div>
diff --git a/gui/slick/interfaces/default/editShow.tmpl b/gui/slick/interfaces/default/editShow.tmpl
index a48f482fe71cf9c6af5156f53b727da0583fd6e4..75ecfd07e9851ae85c1c80fc42adeacb23c49942 100644
--- a/gui/slick/interfaces/default/editShow.tmpl
+++ b/gui/slick/interfaces/default/editShow.tmpl
@@ -120,13 +120,13 @@
 
 <b>Ignored Words:</b></br>
 <input type="text" name="rls_ignore_words" id="rls_ignore_words" value="$show.rls_ignore_words" class="form-control form-control-inline input-sm input350" /><br />
-Results with any of these words in the title will be filtered out<br />
+Results with one or more word from this list will be ignored<br />
 Separate words with a comma, e.g. "word1,word2,word3"<br />
 <br />
 
 <b>Required Words:</b></br>
 <input type="text" name="rls_require_words" id="rls_require_words" value="$show.rls_require_words" class="form-control form-control-inline input-sm input350" /><br />
-Results without one of these words in the title will be filtered out <br />
+Results with no word from this list will be ignored<br />
 Separate words with a comma, e.g. "word1,word2,word3"<br />
 <br />
 
diff --git a/gui/slick/interfaces/default/errorlogs.tmpl b/gui/slick/interfaces/default/errorlogs.tmpl
index 08b21c22e8a023cc5fdaa44ad4ff59584f432383..e2299396518e4d92d20245bd0def627ffa437004 100644
--- a/gui/slick/interfaces/default/errorlogs.tmpl
+++ b/gui/slick/interfaces/default/errorlogs.tmpl
@@ -16,9 +16,11 @@
 #end if
 <div class="align-left"><pre>
 #if $classes.ErrorViewer.errors:
-#for $curError in sorted($classes.ErrorViewer.errors, key=lambda error: error.time, reverse=True)[:500]:
+    #for $curError in sorted($classes.ErrorViewer.errors, key=lambda error: error.time, reverse=True)[:500]:
+        #filter WebSafe
 $curError.time $curError.message
-#end for
+        #end filter
+    #end for
 #end if
 </pre>
 </div>
diff --git a/gui/slick/interfaces/default/inc_addShowOptions.tmpl b/gui/slick/interfaces/default/inc_addShowOptions.tmpl
index eded7f5fc5831560b08b1f1d1ba61b16be5c60af..98a46257a93ab699aaf9a6b10e225bd870e2afa0 100644
--- a/gui/slick/interfaces/default/inc_addShowOptions.tmpl
+++ b/gui/slick/interfaces/default/inc_addShowOptions.tmpl
@@ -16,7 +16,7 @@
         
         <div class="field-pair">
             <label for="statusSelect">
-                <span class="component-title">Set the initial status<br /> of missing episodes</span>
+                <span class="component-title">Set the initial status<br /> of already aired episodes</span>
                 <span class="component-desc">
                     <select name="defaultStatus" id="statusSelect" class="form-control form-control-inline input-sm">
                     #for $curStatus in [$SKIPPED, $WANTED, $ARCHIVED, $IGNORED]:
diff --git a/gui/slick/interfaces/default/viewlogs.tmpl b/gui/slick/interfaces/default/viewlogs.tmpl
index 09daab09239ce02d795d41ef8000c59e9a09b08c..885f0c6cd9b6cfc6be23afbf16638284b759781e 100644
--- a/gui/slick/interfaces/default/viewlogs.tmpl
+++ b/gui/slick/interfaces/default/viewlogs.tmpl
@@ -83,11 +83,13 @@ Filter log by: <select name="logFilter" id="logFilter" class="form-control form-
 #end for
 </select>
 Search log by:
-<input type="text" name="logSearch" id="logSearch" value="#if $logSearch then $logSearch else ""#" class="form-control form-control-inline input-sm" />
+<input type="text" name="logSearch" placeholder="clear to reset" id="logSearch" value="#if $logSearch then $logSearch else ""#" class="form-control form-control-inline input-sm" />
 </div>
 <br />
 <div class="align-left"><pre>
+#filter WebSafe
 $logLines
+#end filter
 </pre>
 </div>
 <br />
diff --git a/gui/slick/js/configSearch.js b/gui/slick/js/configSearch.js
index 4e9aef8489b78c134a9c7c6925386110006fa84b..f586a488c832b21043c42adbc7921a3f95083776 100644
--- a/gui/slick/js/configSearch.js
+++ b/gui/slick/js/configSearch.js
@@ -34,6 +34,28 @@ $(document).ready(function(){
         }
     }
 
+    $.fn.rtorrent_scgi = function(){
+    	var selectedProvider = $('#torrent_method :selected').val();
+    	
+    	if ('rtorrent' == selectedProvider) {
+    		var hostname = $('#torrent_host').prop('value');
+    		var isMatch = hostname.substr(0, 7) == "scgi://";
+    		
+    		if (isMatch) {
+    			$('#torrent_username_option').hide();
+    			$('#torrent_username').prop('value', '');
+        		$('#torrent_password_option').hide();
+    			$('#torrent_password').prop('value', '');
+    			$('#torrent_auth_type_option').hide();
+    			$("#torrent_auth_type option[value=none]").attr('selected', 'selected');
+    		} else {
+    			$('#torrent_username_option').show();
+        		$('#torrent_password_option').show();
+        		$('#torrent_auth_type_option').show();
+    		}
+    	}
+    }
+
     $.fn.torrent_method_handler = function() {
 
         $('#options_torrent_clients').hide();
@@ -71,7 +93,7 @@ $(document).ready(function(){
             $(torrent_verify_cert_option).hide();
             $(torrent_verify_deluge).hide();
             $(torrent_verify_rtorrent).hide();
-            $(torrent_auth_type).hide();
+            $(torrent_auth_type_option).hide();
             $(torrent_path_option).show();
             $(torrent_path_option).find('.fileBrowser').show();
             $(torrent_seed_time_option).hide();
@@ -81,14 +103,17 @@ $(document).ready(function(){
             $(path_synology).hide();
             $(torrent_paused_option).show();
             $(torrent_rpcurl_option).hide();
+            $(this).rtorrent_scgi
 
             if ('utorrent' == selectedProvider) {
                 client = 'uTorrent';
                 $(torrent_path_option).hide();
+                $('#torrent_seed_time_label').text('Minimum seeding time is');
                 $(torrent_seed_time_option).show();
                 $('#host_desc_torrent').text('URL to your uTorrent client (e.g. http://localhost:8000)');
             } else if ('transmission' == selectedProvider){
                 client = 'Transmission';
+                $('#torrent_seed_time_label').text('Stop seeding when inactive for');
                 $(torrent_seed_time_option).show();
                 $(torrent_high_bandwidth_option).show();
                 $(torrent_label_option).hide();
@@ -103,6 +128,8 @@ $(document).ready(function(){
                 $(torrent_verify_rtorrent).hide();
                 $(label_warning_deluge).show();
                 $(label_anime_warning_deluge).show();
+                $('#torrent_username_option').hide();
+                $('#torrent_username').prop('value', '');
                 $('#host_desc_torrent').text('URL to your Deluge client (e.g. http://localhost:8112)');
                 //$('#directory_title').text(client + directory);
             } else if ('download_station' == selectedProvider){
@@ -117,11 +144,11 @@ $(document).ready(function(){
             } else if ('rtorrent' == selectedProvider){
                 client = 'rTorrent';
                 $(torrent_paused_option).hide();
-                $('#host_desc_torrent').text('URL to your rTorrent client (e.g. scgi://localhost:5000 </br> or https://localhost/rutorrent/plugins/httprpc/action.php)');
+                $('#host_desc_torrent').text('URL to your rTorrent client (e.g. scgi://localhost:5000 <br/> or https://localhost/rutorrent/plugins/httprpc/action.php)');
                 $(torrent_verify_cert_option).show();
                 $(torrent_verify_deluge).hide();
                 $(torrent_verify_rtorrent).show();
-                $(torrent_auth_type).show();
+                $(torrent_auth_type_option).show();
                 //$('#directory_title').text(client + directory);
             }
             $('#host_title').text(client + host);
@@ -168,5 +195,6 @@ $(document).ready(function(){
         $.get(sbRoot + '/home/testTorrent', {'torrent_method': torrent_method, 'host': torrent_host, 'username': torrent_username, 'password': torrent_password},
         function (data){ $('#test_torrent_result').html(data); });
     });
-
+    
+    $('#torrent_host').change($(this).rtorrent_scgi);
 });
diff --git a/lib/feedcache/cache.py b/lib/feedcache/cache.py
index 6a3ad53e9bddfc2af44fde79dcd3191c859cc078..7520d51ae18c5416c852dd1ab5ba57975f40c0ab 100644
--- a/lib/feedcache/cache.py
+++ b/lib/feedcache/cache.py
@@ -98,7 +98,7 @@ class Cache:
                     del self.storage[url]
         return
 
-    def fetch(self, url, force_update=False, offline=False, request_headers=None, referrer=None):
+    def fetch(self, url, force_update=False, offline=False, request_headers=None, referrer=None, handlers=[]):
         """Return the feed at url.
 
         url - The URL of the feed.
@@ -116,6 +116,8 @@ class Cache:
 
         referrer=None - Added a referrer to request
 
+        handlers=None - Urllib2 handlers
+
         If there is data for that feed in the cache already, check
         the expiration date before accessing the server.  If the
         cached data has not expired, return it without accessing the
@@ -180,7 +182,8 @@ class Cache:
                                          modified=modified,
                                          etag=etag,
                                          referrer=referrer,
-                                         request_headers=request_headers)
+                                         request_headers=request_headers,
+                                         handlers = handlers)
 
         status = parsed_result.get('status', None)
         logger.debug('HTTP status=%s' % status)
diff --git a/lib/shutil_custom/__init__.py b/lib/shutil_custom/__init__.py
index 32cdd9bf36c8b7f90c2a3d00b74d679112a6f74f..24991366296c137fc65b6b0136a33d8d4930992a 100644
--- a/lib/shutil_custom/__init__.py
+++ b/lib/shutil_custom/__init__.py
@@ -43,7 +43,7 @@ def copyfile_custom(src, dst):
         for x in iter(lambda: os.read(fin, BUFFER_SIZE), ""):
             os.write(fout, x)
     except Exception as e:
-        raise e
+        raise
     finally:
         try:
             os.close(fin)
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
index 0ed591483d544d01f585f3ae9052c471c82949d6..b70069de6f8416f5885bbb0d2555820b80cd6b00 100755
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -1469,7 +1469,7 @@ def halt():
                     ADBA_CONNECTION.join(10)
                 except:
                     pass
-
+            
             __INITIALIZED__ = False
             started = False
 
diff --git a/sickbeard/autoPostProcesser.py b/sickbeard/autoPostProcesser.py
index 956a13bd016a9df92abbe262de45a012e493bc44..4c43a80e218088f7892a8bd269baea72e937b527 100644
--- a/sickbeard/autoPostProcesser.py
+++ b/sickbeard/autoPostProcesser.py
@@ -17,7 +17,7 @@
 # along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
 
 import os.path
-
+import threading
 import sickbeard
 
 from sickbeard import logger
@@ -26,19 +26,30 @@ from sickbeard import processTV
 
 
 class PostProcesser():
+    def __init__(self):
+        self.lock = threading.Lock()
+        self.amActive = False
+
     def run(self, force=False):
+
+        self.amActive = True
+        
         if not ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR):
             logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " doesn't exist",
                        logger.ERROR)
+            self.amActive = False
             return
 
         if not ek.ek(os.path.isabs, sickbeard.TV_DOWNLOAD_DIR):
             logger.log(
                 u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " is relative (and probably not what you really want to process)",
                 logger.ERROR)
+            self.amActive = False   
             return
 
         processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)
+        
+        self.amActive = False
 
     def __del__(self):
         pass
diff --git a/sickbeard/clients/deluge.py b/sickbeard/clients/deluge.py
index 6946abd9e01629758fe6cc1cc05f210879b8d053..48282a5070aaeeb1dd34538355074f7f91921312 100644
--- a/sickbeard/clients/deluge.py
+++ b/sickbeard/clients/deluge.py
@@ -97,8 +97,7 @@ class DelugeAPI(GenericClient):
     def _add_torrent_uri(self, result):
 
         post_data = json.dumps({"method": "core.add_torrent_magnet",
-                                "params": [result.url, {"move_completed": "true",
-                                                        "move_completed_path": sickbeard.TV_DOWNLOAD_DIR}],
+                                "params": [result.url, {}],
                                 "id": 2
         })
         self._request(method='post', data=post_data)
@@ -110,9 +109,7 @@ class DelugeAPI(GenericClient):
     def _add_torrent_file(self, result):
 
         post_data = json.dumps({"method": "core.add_torrent_file",
-                                "params": [result.name + '.torrent', b64encode(result.content),
-                                           {"move_completed": "true",
-                                            "move_completed_path": sickbeard.TV_DOWNLOAD_DIR}],
+                                "params": [result.name + '.torrent', b64encode(result.content), {}],
                                 "id": 2
         })
         self._request(method='post', data=post_data)
diff --git a/sickbeard/clients/generic.py b/sickbeard/clients/generic.py
index aebecded3f039212402729e42910de7fbd336a0c..662eecbf5c73143417beb2e7615bc2114cd95e76 100644
--- a/sickbeard/clients/generic.py
+++ b/sickbeard/clients/generic.py
@@ -10,6 +10,7 @@ from sickbeard.clients import http_error_code
 from lib.bencode import bencode, bdecode
 from lib import requests
 from lib.requests import exceptions
+from lib.bencode.BTL import BTFailure
 
 class GenericClient(object):
     def __init__(self, name, host=None, username=None, password=None):
@@ -148,7 +149,17 @@ class GenericClient(object):
             if len(result.hash) == 32:
                 result.hash = b16encode(b32decode(result.hash)).lower()
         else:
-            info = bdecode(result.content)["info"]
+            try:
+                torrent_bdecode = bdecode(result.content)
+            except BTFailure as e:
+                logger.log('Unable to bdecode torrent', logger.ERROR)
+                logger.log('Torrent bencoded data: {0}'.format(str(result.content)), logger.DEBUG)
+                raise
+            try:
+                info = torrent_bdecode["info"]
+            except Exception as e:
+                logger.log('Unable to find info field in torrent', logger.ERROR)
+                raise
             result.hash = sha1(bencode(info)).hexdigest()
 
         return result
diff --git a/sickbeard/db.py b/sickbeard/db.py
index 8b119054e1042f77eb7cb2fca089c1600a42f0cc..6f1bc32eb6dee187c1a7a3a2868f987c200b616d 100644
--- a/sickbeard/db.py
+++ b/sickbeard/db.py
@@ -78,7 +78,7 @@ class DBConnection(object):
                 return self.connection.cursor().execute(query)
             return self.connection.cursor().execute(query, args)
         except Exception as e:
-            raise e
+            raise
 
     def execute(self, query, args=None, fetchall=False, fetchone=False):
         try:
@@ -89,7 +89,7 @@ class DBConnection(object):
             else:
                 return self._execute(query, args)
         except Exception as e:
-            raise e
+            raise
 
     def checkDBVersion(self):
 
diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py
index 63673b8842a361dc6f89c004bcc10bae8df9327d..773617c5481cf8ee13ada85e95e2f071b2186c62 100644
--- a/sickbeard/helpers.py
+++ b/sickbeard/helpers.py
@@ -36,6 +36,8 @@ import base64
 import zipfile
 import datetime
 import errno
+import ast
+import operator
 
 import sickbeard
 import subliminal
@@ -353,8 +355,8 @@ def listMediaFiles(path):
     return files
 
 
-def copyFile(srcFile, destFile):
-    ek.ek(shutil.copyfile, srcFile, destFile)
+def copyFile(srcFile, destFile):
+    ek.ek(shutil.copyfile, srcFile, destFile)
     try:
         ek.ek(shutil.copymode, srcFile, destFile)
     except OSError:
@@ -705,6 +707,38 @@ def sanitizeSceneName(name, ezrss=False, anime=False):
         return ''
 
 
+_binOps = {
+    ast.Add: operator.add,
+    ast.Sub: operator.sub,
+    ast.Mult: operator.mul,
+    ast.Div: operator.div,
+    ast.Mod: operator.mod
+}
+
+
+def arithmeticEval(s):
+    """
+    A safe eval supporting basic arithmetic operations.
+
+    :param s: expression to evaluate
+    :return: value
+    """
+    node = ast.parse(s, mode='eval')
+
+    def _eval(node):
+        if isinstance(node, ast.Expression):
+            return _eval(node.body)
+        elif isinstance(node, ast.Str):
+            return node.s
+        elif isinstance(node, ast.Num):
+            return node.n
+        elif isinstance(node, ast.BinOp):
+            return _binOps[type(node.op)](_eval(node.left), _eval(node.right))
+        else:
+            raise Exception('Unsupported type {}'.format(node))
+
+    return _eval(node.body)
+
 def create_https_certificates(ssl_cert, ssl_key):
     """
     Create self-signed HTTPS certificares and store in paths 'ssl_cert' and 'ssl_key'
diff --git a/sickbeard/logger.py b/sickbeard/logger.py
index 9b2b7dfa1ab68d54cb41c081cbbbe4272cd7552b..1d3de0a2284429c81bc37d3482c584e597c41230 100644
--- a/sickbeard/logger.py
+++ b/sickbeard/logger.py
@@ -24,6 +24,7 @@ import logging
 import logging.handlers
 import threading
 import platform
+import locale
 
 import sickbeard
 from sickbeard import classes, encodingKludge as ek
@@ -120,7 +121,11 @@ class Logger(object):
 
             for logger in self.loggers:
                 logger.addHandler(rfh)
-
+                
+    def shutdown(self):
+        
+        logging.shutdown()
+        
     def log(self, msg, level=INFO, *args, **kwargs):
         meThread = threading.currentThread().getName()
         message = meThread + u" :: " + msg
@@ -156,16 +161,28 @@ class Logger(object):
         try:
             # read log file
             log_data = None
-            if self.logFile and os.path.isfile(self.logFile):
+
+            if os.path.isfile(self.logFile):
                 with ek.ek(codecs.open, *[self.logFile, 'r', 'utf-8']) as f:
                     log_data = f.readlines()
-                log_data = [line for line in reversed(log_data)]
+                    
+            for i in range (1 , int(sickbeard.LOG_NR)):
+                if os.path.isfile(self.logFile + "." + str(i)) and (len(log_data) <= 500):
+                    with ek.ek(codecs.open, *[self.logFile + "." + str(i), 'r', 'utf-8']) as f:
+                            log_data += f.readlines()
+
+            log_data = [line for line in reversed(log_data)]
 
             # parse and submit errors to issue tracker
             for curError in sorted(classes.ErrorViewer.errors, key=lambda error: error.time, reverse=True)[:500]:
                 if not curError.title:
                     continue
 
+                if len(curError.title) > 1024:
+                    title_Error = str(curError.title[0:1024])
+                else:
+                    title_Error = str(curError.title)
+
                 gist = None
                 regex = "^(%s)\s*([A-Z]+)\s*(.+?)\s*\:\:\s*(.*)$" % curError.time
                 for i, x in enumerate(log_data):
@@ -178,14 +195,23 @@ class Logger(object):
                             if paste_data:
                                 gist = gh.get_user().create_gist(True, {"sickrage.log": InputFileContent(paste_data)})
                             break
+                    else:
+                        gist = 'No ERROR found'
 
                 message = u"### INFO\n"
                 message += u"Python Version: **" + sys.version[:120] + "**\n"
                 message += u"Operating System: **" + platform.platform() + "**\n"
+                if not 'Windows' in platform.platform():
+                    try:
+                        message += u"Locale: " + locale.getdefaultlocale()[1] + "\n"
+                    except:
+                        message += u"Locale: unknown" + "\n"                        
                 message += u"Branch: **" + sickbeard.BRANCH + "**\n"
                 message += u"Commit: SiCKRAGETV/SickRage@" + sickbeard.CUR_COMMIT_HASH + "\n"
-                if gist:
+                if gist and gist != 'No ERROR found':
                     message += u"Link to Log: " + gist.html_url + "\n"
+                else:
+                    message += u"No Log available with ERRORS: " + "\n"
                 message += u"### ERROR\n"
                 message += u"```\n"
                 message += curError.message + "\n"
@@ -193,7 +219,7 @@ class Logger(object):
                 message += u"---\n"
                 message += u"_STAFF NOTIFIED_: @SiCKRAGETV/owners @SiCKRAGETV/moderators"
 
-                issue = gh.get_organization(gh_org).get_repo(gh_repo).create_issue("[APP SUBMITTED]: " + str(curError.title), message)
+                issue = gh.get_organization(gh_org).get_repo(gh_repo).create_issue("[APP SUBMITTED]: " + title_Error, message)
                 if issue:
                     self.log('Your issue ticket #%s was submitted successfully!' % issue.number)
 
diff --git a/sickbeard/metadata/generic.py b/sickbeard/metadata/generic.py
index 0133df688e539b16d954ef6d7eaa45839103968e..6070c10950fb2d93547d24300e188f88bbaf4c1f 100644
--- a/sickbeard/metadata/generic.py
+++ b/sickbeard/metadata/generic.py
@@ -767,7 +767,8 @@ class GenericMetadata():
             indexer_show_obj = t[show_obj.indexerid]
         except (sickbeard.indexer_error, IOError), e:
             logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
-                show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
+                show_obj.indexer).name + ", not downloading images: " + ex(e), logger.WARNING)
+            logger.log(u"Indexer " + sickbeard.indexerApi(show_obj.indexer).name + "maybe experiencing some problems. Try again later", logger.DEBUG)                
             return None
 
         if image_type not in ('fanart', 'poster', 'banner', 'poster_thumb', 'banner_thumb'):
@@ -836,7 +837,8 @@ class GenericMetadata():
             indexer_show_obj = t[show_obj.indexerid]
         except (sickbeard.indexer_error, IOError), e:
             logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
-                show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
+                show_obj.indexer).name + ", not downloading images: " + ex(e), logger.WARNING)
+            logger.log(u"Indexer " + sickbeard.indexerApi(show_obj.indexer).name + "maybe experiencing some problems. Try again later", logger.DEBUG)
             return result
 
         # if we have no season banners then just finish
@@ -889,7 +891,8 @@ class GenericMetadata():
             indexer_show_obj = t[show_obj.indexerid]
         except (sickbeard.indexer_error, IOError), e:
             logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
-                show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
+                show_obj.indexer).name + ", not downloading images: " + ex(e), logger.WARNING)
+            logger.log(u"Indexer " + sickbeard.indexerApi(show_obj.indexer).name + "maybe experiencing some problems. Try again later", logger.DEBUG)
             return result
 
         # if we have no season banners then just finish
@@ -1032,4 +1035,4 @@ class GenericMetadata():
         except Exception as e:
             pass
 
-        logger.log(u"Could not find any " + type + " images on Fanart.tv for " + show.name, logger.DEBUG)
\ No newline at end of file
+        logger.log(u"Could not find any " + type + " images on Fanart.tv for " + show.name, logger.DEBUG)
diff --git a/sickbeard/metadata/mede8er.py b/sickbeard/metadata/mede8er.py
index c71395a7cf6dc36fb557143fb9fb6c4661a94d52..b4a9d7f645a6efea07da282fcd7f225d406df168 100644
--- a/sickbeard/metadata/mede8er.py
+++ b/sickbeard/metadata/mede8er.py
@@ -17,6 +17,7 @@
 # along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
 
 import datetime
+import os.path
 
 import sickbeard
 
@@ -24,6 +25,7 @@ import mediabrowser
 
 from sickbeard import logger, exceptions, helpers
 from sickbeard.exceptions import ex
+from sickbeard import encodingKludge as ek
 
 try:
     import xml.etree.cElementTree as etree
@@ -355,6 +357,93 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata):
 
         return data
 
+    def write_show_file(self, show_obj):
+        """
+        Generates and writes show_obj's metadata under the given path to the
+        filename given by get_show_file_path()
+
+        show_obj: TVShow object for which to create the metadata
+
+        path: An absolute or relative path where we should put the file. Note that
+                the file name will be the default show_file_name.
+
+        Note that this method expects that _show_data will return an ElementTree
+        object. If your _show_data returns data in another format you'll need to
+        override this method.
+        """
+
+        data = self._show_data(show_obj)
+
+        if not data:
+            return False
+
+        nfo_file_path = self.get_show_file_path(show_obj)
+        nfo_file_dir = ek.ek(os.path.dirname, nfo_file_path)
+
+        try:
+            if not ek.ek(os.path.isdir, nfo_file_dir):
+                logger.log(u"Metadata dir didn't exist, creating it at " + nfo_file_dir, logger.DEBUG)
+                ek.ek(os.makedirs, nfo_file_dir)
+                helpers.chmodAsParent(nfo_file_dir)
+
+            logger.log(u"Writing show nfo file to " + nfo_file_path, logger.DEBUG)
+
+            nfo_file = ek.ek(open, nfo_file_path, 'w')
+
+            data.write(nfo_file, encoding="utf-8", xml_declaration=True)
+            nfo_file.close()
+            helpers.chmodAsParent(nfo_file_path)
+        except IOError, e:
+            logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
+                       logger.ERROR)
+            return False
+
+        return True
+    
+    def write_ep_file(self, ep_obj):
+        """
+        Generates and writes ep_obj's metadata under the given path with the
+        given filename root. Uses the episode's name with the extension in
+        _ep_nfo_extension.
+
+        ep_obj: TVEpisode object for which to create the metadata
+
+        file_name_path: The file name to use for this metadata. Note that the extension
+                will be automatically added based on _ep_nfo_extension. This should
+                include an absolute path.
+
+        Note that this method expects that _ep_data will return an ElementTree
+        object. If your _ep_data returns data in another format you'll need to
+        override this method.
+        """
+
+        data = self._ep_data(ep_obj)
+
+        if not data:
+            return False
+
+        nfo_file_path = self.get_episode_file_path(ep_obj)
+        nfo_file_dir = ek.ek(os.path.dirname, nfo_file_path)
+
+        try:
+            if not ek.ek(os.path.isdir, nfo_file_dir):
+                logger.log(u"Metadata dir didn't exist, creating it at " + nfo_file_dir, logger.DEBUG)
+                ek.ek(os.makedirs, nfo_file_dir)
+                helpers.chmodAsParent(nfo_file_dir)
+
+            logger.log(u"Writing episode nfo file to " + nfo_file_path, logger.DEBUG)
+
+            nfo_file = ek.ek(open, nfo_file_path, 'w')
+
+            data.write(nfo_file, encoding="utf-8", xml_declaration = True)
+            nfo_file.close()
+            helpers.chmodAsParent(nfo_file_path)
+        except IOError, e:
+            logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
+                       logger.ERROR)
+            return False
+
+        return True
 
 # present a standard "interface" from the module
 metadata_class = Mede8erMetadata
diff --git a/sickbeard/name_parser/regexes.py b/sickbeard/name_parser/regexes.py
index 74852087e857d5dc7de5030c18b9a3a178aa120c..b1a9d6eba4212883d85be187a75d46c426d365dc 100644
--- a/sickbeard/name_parser/regexes.py
+++ b/sickbeard/name_parser/regexes.py
@@ -221,8 +221,24 @@ anime_regexes = [
      (?:(?:(?:[\[\(])(?P<extra_info>\d{3,4}[xp]?\d{0,4}[\.\w\s-]*)(?:[\]\)]))|(?:\d{3,4}[xp]))
      (?:[ ._]?\[(?P<crc>\w+)\])?
      .*?
-     """
-    ),
+     """),
+
+    ('anime_Kaerizaki-Fansub',
+     # [Kaerizaki-Fansub]_One_Piece_679_[VOSTFR][HD_1280x720].mp4
+     # [Kaerizaki-Fansub]_One_Piece_681_[VOSTFR][HD_1280x720]_V2.mp4
+     # [Kaerizaki-Fansub] High School DxD New 04 VOSTFR HD (1280x720) V2.mp4
+     # [Kaerizaki-Fansub] One Piece 603 VOSTFR PS VITA (960x544) V2.mp4
+     '''
+     ^\[(?P<release_group>Kaerizaki-Fansub?)\][ ._-]*                         # Release Group and separator
+     (?P<series_name>.+?)[ ._-]+                                              # Show_Name and separator
+     (?P<ep_ab_num>((?!\[VOSTFR|VOSTFR))\d{1,3})                              # Episode number
+     (-(?P<extra_ab_ep_num>((?!\[VOSTFR|VOSTFR))\d{1,3}))?                    # Extra episode number
+     ([ ._](\[VOSTFR\]|VOSTFR))?
+     (\[|[ ._])?(?P<extra_info>([SH]D_\d{3,4}x\d{3,4}|((SD|HD|PS\sVITA)[ ._]\(\d{3,4}x\d{3,4}\))))(\])?         # Extra info
+     ([ ._][vV](?P<version>[0-9]))?                                           # Version
+     .*?                                                                      # Separator and EOL
+     '''),
+
     ('anime_standard',
      # [Group Name] Show Name.13-14
      # [Group Name] Show Name - 13-14
diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py
index 056dc4c27c7194ce9469c1a767fb3304a770b38b..fcfb9418d3186d4001fa2a623d3d35c45943f8a6 100644
--- a/sickbeard/postProcessor.py
+++ b/sickbeard/postProcessor.py
@@ -327,7 +327,7 @@ class PostProcessor(object):
                 helpers.chmodAsParent(new_file_path)
             except (IOError, OSError), e:
                 self._log("Unable to move file " + cur_file_path + " to " + new_file_path + ": " + str(e), logger.ERROR)
-                raise e
+                raise
 
         self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_move,
                                       subtitles=subtitles)
@@ -348,7 +348,7 @@ class PostProcessor(object):
                 helpers.chmodAsParent(new_file_path)
             except (IOError, OSError), e:
                 logger.log("Unable to copy file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
-                raise e
+                raise
 
         self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_copy,
                                       subtitles=subtitles)
@@ -370,7 +370,7 @@ class PostProcessor(object):
                 helpers.chmodAsParent(new_file_path)
             except (IOError, OSError), e:
                 self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
-                raise e
+                raise
 
         self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_hard_link)
 
@@ -390,7 +390,7 @@ class PostProcessor(object):
                 helpers.chmodAsParent(new_file_path)
             except (IOError, OSError), e:
                 self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
-                raise e
+                raise
 
         self._combined_file_operation(file_path, new_path, new_base_name, associated_files,
                                       action=_int_move_and_sym_link)
diff --git a/sickbeard/processTV.py b/sickbeard/processTV.py
index 4243b5cb8821dc17b66b18f9310605ac1523435c..131d3e3559af4c17d0ffc6b2ebe0e7ada9655ae3 100644
--- a/sickbeard/processTV.py
+++ b/sickbeard/processTV.py
@@ -61,15 +61,22 @@ def delete_folder(folder, check_empty=True):
     if check_empty:
         check_files = ek.ek(os.listdir, folder)
         if check_files:
+            logger.log(u"Not deleting folder " + folder + " found the following files: " + str(check_files), logger.INFO)
+            return False
+        
+        try:
+            logger.log(u"Deleting folder (if it's empty): " + folder)
+            os.rmdir(folder)
+        except (OSError, IOError), e:
+            logger.log(u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING)
+            return False
+    else:
+        try:
+            logger.log(u"Deleting folder: " + folder)
+            shutil.rmtree(folder)
+        except (OSError, IOError), e:
+            logger.log(u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING)
             return False
-
-    # try deleting folder
-    try:
-        logger.log(u"Deleting folder: " + folder)
-        shutil.rmtree(folder)
-    except (OSError, IOError), e:
-        logger.log(u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING)
-        return False
 
     return True
 
@@ -151,6 +158,7 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
     # Don't post process if files are still being synced and option is activated
     if SyncFiles and sickbeard.POSTPONE_IF_SYNC_FILES:
         result.output += logHelper(u"Found temporary sync files, skipping post processing", logger.WARNING)
+        result.output += logHelper(u"Sync Files: " + str(SyncFiles) + " in path " + path, logger.WARNING)
         return result.output
 
     result.output += logHelper(u"PostProcessing Path: " + path, logger.DEBUG)
@@ -198,12 +206,16 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
         result.result = True
 
         for processPath, processDir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dir), topdown=False):
-
+            
+            if (not validateDir(path, processPath, nzbNameOriginal, failed, result)):
+                continue
+            
             SyncFiles = filter(helpers.isSyncFile, fileList)
 
             # Don't post process if files are still being synced and option is activated
             if SyncFiles and sickbeard.POSTPONE_IF_SYNC_FILES:
                 result.output += logHelper(u"Found temporary sync files, skipping post processing", logger.WARNING)
+                result.output += logHelper(u"Sync Files: " + str(SyncFiles) + " in path " + processPath, logger.WARNING)
                 return result.output
 
             rarFiles = filter(helpers.isRarFile, fileList)
@@ -212,6 +224,7 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
             videoFiles = filter(helpers.isMediaFile, fileList)
             videoInRar = filter(helpers.isMediaFile, rarContent)
             notwantedFiles = [x for x in fileList if x not in videoFiles]
+            result.output += logHelper(u"Found unwanted files: " + str(notwantedFiles), logger.INFO)
 
             #Don't Link media when the media is extracted from a rar in the same path
             if process_method in ('hardlink', 'symlink') and videoInRar:
@@ -237,7 +250,7 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
                 if process_method == "move" and \
                                 ek.ek(os.path.normpath, processPath) != ek.ek(os.path.normpath,
                                                                               sickbeard.TV_DOWNLOAD_DIR):
-                    if delete_folder(processPath, check_empty=False):
+                    if delete_folder(processPath, check_empty=True):
                         result.output += logHelper(u"Deleted folder: " + processPath, logger.DEBUG)
 
     if result.result:
@@ -372,7 +385,7 @@ def unRAR(path, rarFiles, force, result):
                 result.result = False
                 continue
             except NoFileToExtract:
-                result.output += logHelper(u"Failed Unrar archive {0}: Unrar: No file to extract, file already exist?".format(archive), logger.ERROR)
+                result.output += logHelper(u"Failed Unrar archive {0}: Unrar: No file extracted, check the parent folder and destination file permissions.".format(archive), logger.ERROR)
                 result.result = False
                 continue
             except GenericRARError:
diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py
index 246db5c3d5c857bcf7da8950d70d4c8088849525..21da6aa58cc757f6a37d074198c980aa99e8a428 100644
--- a/sickbeard/providers/iptorrents.py
+++ b/sickbeard/providers/iptorrents.py
@@ -20,6 +20,8 @@ import re
 import traceback
 import datetime
 import urlparse
+import itertools
+
 import sickbeard
 import generic
 from sickbeard.common import Quality
@@ -37,6 +39,7 @@ from sickbeard.bs4_parser import BS4Parser
 from lib.unidecode import unidecode
 from sickbeard.helpers import sanitizeSceneName
 from sickbeard.show_name_helpers import allPossibleShowNames
+from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
 
 
 class IPTorrentsProvider(generic.TorrentProvider):
@@ -102,21 +105,6 @@ class IPTorrentsProvider(generic.TorrentProvider):
 
         return True
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  #1) showName SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
     def _get_episode_search_strings(self, ep_obj, add_string=''):
 
         search_string = {'Episode': []}
@@ -151,7 +139,192 @@ class IPTorrentsProvider(generic.TorrentProvider):
         return [search_string]
 
     def findSearchResults(self, show, episodes, search_mode, manualSearch=False):
-        return generic.TorrentProvider.findSearchResults(self, show, episodes, 'eponly', manualSearch)
+
+        self._checkAuth()
+        self.show = show
+
+        results = {}
+        itemList = []
+
+        if search_mode == 'sponly':
+            logger.log(u"This provider doesn't support season pack. Consider setting Season search mode to episodes only and unchecked Season search fallback", logger.WARNING)
+            search_mode = 'eponly'
+
+        for epObj in episodes:
+            # search cache for episode result
+            cacheResult = self.cache.searchCache(epObj, manualSearch)
+            if cacheResult:
+                if epObj.episode not in results:
+                    results[epObj.episode] = cacheResult
+                else:
+                    results[epObj.episode].extend(cacheResult)
+
+                # found result, search next episode
+                continue
+
+            for curString in self._get_episode_search_strings(epObj):
+                itemList += self._doSearch(curString, 'eponly', len(episodes))
+
+        # if we found what we needed already from cache then return results and exit
+        if len(results) == len(episodes):
+            return results
+
+        # sort list by quality
+        if len(itemList):
+            items = {}
+            itemsUnknown = []
+            for item in itemList:
+                quality = self.getQuality(item, anime=show.is_anime)
+                if quality == Quality.UNKNOWN:
+                    itemsUnknown += [item]
+                else:
+                    if quality not in items:
+                        items[quality] = [item]
+                    else:
+                        items[quality].append(item)
+
+            itemList = list(itertools.chain(*[v for (k, v) in sorted(items.items(), reverse=True)]))
+            itemList += itemsUnknown if itemsUnknown else []
+
+        # filter results
+        cl = []
+        for item in itemList:
+            (title, url) = self._get_title_and_url(item)
+
+            # parse the file name
+            try:
+                myParser = NameParser(False, convert=True)
+                parse_result = myParser.parse(title)
+            except InvalidNameException:
+                logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.DEBUG)
+                continue
+            except InvalidShowException:
+                logger.log(u"Unable to parse the filename " + title + " into a valid show", logger.DEBUG)
+                continue
+
+            showObj = parse_result.show
+            quality = parse_result.quality
+            release_group = parse_result.release_group
+            version = parse_result.version
+
+            addCacheEntry = False
+            if not (showObj.air_by_date or showObj.sports):
+                if search_mode == 'sponly': 
+                    if len(parse_result.episode_numbers):
+                        logger.log(
+                            u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it",
+                            logger.DEBUG)
+                        addCacheEntry = True
+                    if len(parse_result.episode_numbers) and (
+                                    parse_result.season_number not in set([ep.season for ep in episodes]) or not [ep for ep in episodes if
+                                                                                 ep.scene_episode in parse_result.episode_numbers]):
+                        logger.log(
+                            u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
+                            logger.DEBUG)
+                        addCacheEntry = True
+                else:
+                    if not len(parse_result.episode_numbers) and parse_result.season_number and not [ep for ep in
+                                                                                                     episodes if
+                                                                                                     ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
+                        logger.log(
+                            u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring",
+                            logger.DEBUG)
+                        addCacheEntry = True
+                    elif len(parse_result.episode_numbers) and not [ep for ep in episodes if
+                                                                    ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
+                        logger.log(
+                            u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
+                            logger.DEBUG)
+                        addCacheEntry = True
+
+                if not addCacheEntry:
+                    # we just use the existing info for normal searches
+                    actual_season = parse_result.season_number
+                    actual_episodes = parse_result.episode_numbers
+            else:
+                if not (parse_result.is_air_by_date):
+                    logger.log(
+                        u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it",
+                        logger.DEBUG)
+                    addCacheEntry = True
+                else:
+                    airdate = parse_result.air_date.toordinal()
+                    myDB = db.DBConnection()
+                    sql_results = myDB.select(
+                        "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
+                        [showObj.indexerid, airdate])
+
+                    if len(sql_results) != 1:
+                        logger.log(
+                            u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it",
+                            logger.WARNING)
+                        addCacheEntry = True
+
+                if not addCacheEntry:
+                    actual_season = int(sql_results[0]["season"])
+                    actual_episodes = [int(sql_results[0]["episode"])]
+
+            # add parsed result to cache for usage later on
+            if addCacheEntry:
+                logger.log(u"Adding item from search to cache: " + title, logger.DEBUG)
+                ci = self.cache._addCacheEntry(title, url, parse_result=parse_result)
+                if ci is not None:
+                    cl.append(ci)
+                continue
+
+            # make sure we want the episode
+            wantEp = True
+            for epNo in actual_episodes:
+                if not showObj.wantEpisode(actual_season, epNo, quality, manualSearch):
+                    wantEp = False
+                    break
+
+            if not wantEp:
+                logger.log(
+                    u"Ignoring result " + title + " because we don't want an episode that is " +
+                    Quality.qualityStrings[
+                        quality], logger.DEBUG)
+
+                continue
+
+            logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
+
+            # make a result object
+            epObj = []
+            for curEp in actual_episodes:
+                epObj.append(showObj.getEpisode(actual_season, curEp))
+
+            result = self.getResult(epObj)
+            result.show = showObj
+            result.url = url
+            result.name = title
+            result.quality = quality
+            result.release_group = release_group
+            result.version = version
+            result.content = None
+
+            if len(epObj) == 1:
+                epNum = epObj[0].episode
+                logger.log(u"Single episode result.", logger.DEBUG)
+            elif len(epObj) > 1:
+                epNum = MULTI_EP_RESULT
+                logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(
+                    parse_result.episode_numbers), logger.DEBUG)
+            elif len(epObj) == 0:
+                epNum = SEASON_RESULT
+                logger.log(u"Separating full season result to check for later", logger.DEBUG)
+
+            if epNum not in results:
+                results[epNum] = [result]
+            else:
+                results[epNum].append(result)
+
+        # check if we have items to add to cache
+        if len(cl) > 0:
+            myDB = self.cache._getDB()
+            myDB.mass_action(cl)
+
+        return results
 
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
 
diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py
index 60400539b9685e8dcb6872162069ffba51fc933b..1c843909062efde366d0571c768c2bab78f40a9e 100644
--- a/sickbeard/providers/rarbg.py
+++ b/sickbeard/providers/rarbg.py
@@ -1,4 +1,3 @@
-# -*- coding: latin-1 -*-
 # Author: djoole <bobby.djoole@gmail.com>
 # Author: CoRpO <corpo@gruk.org>
 # URL: http://code.google.com/p/sickbeard/
@@ -19,19 +18,16 @@
 # along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
 
 import traceback
-import time
 import re
 import datetime
+import urllib
+
 import sickbeard
 import generic
-import cookielib
-import urllib
-import urllib2
 
 from lib import requests
-from lib.requests import exceptions
 
-from sickbeard.common import USER_AGENT, Quality, cpu_presets
+from sickbeard.common import Quality
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard import show_name_helpers
@@ -40,7 +36,6 @@ from sickbeard import db
 from sickbeard import helpers
 from sickbeard import classes
 from sickbeard.helpers import sanitizeSceneName
-from sickbeard.exceptions import ex
 
 
 class RarbgProvider(generic.TorrentProvider):
@@ -48,36 +43,48 @@ class RarbgProvider(generic.TorrentProvider):
     def __init__(self):
         generic.TorrentProvider.__init__(self, "Rarbg")
 
-        self.supportsBacklog = True
         self.enabled = False
 
-        self.cache = RarbgCache(self)
+        self.supportsBacklog = True
 
         self.ratio = None
 
-        self.cookies = cookielib.CookieJar()
-	self.cookie = cookielib.Cookie(version=0, name='7fAY799j', value='VtdTzG69', port=None, port_specified=False, domain='rarbg.com', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False)
-	self.cookies.set_cookie(self.cookie)
-        self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookies))
-        self.opener.addheaders=[('User-agent', 'Mozilla/5.0')]
+        self.cache = RarbgCache(self)
 
-        self.urls = {'base_url': 'https://rarbg.com/torrents.php',
-                'search': 'https://rarbg.com/torrents.php?search=%s&category=%s&page=%s',
-                'download': 'https://rarbg.com/download.php?id=%s&f=%s',
-                }
+        self.urls = {'url': 'https://rarbg.com',
+                     'base_url': 'https://rarbg.com/torrents.php',
+                     'search': 'https://rarbg.com/torrents.php?search=%s&category=%s&page=%s',
+                     'download': 'https://rarbg.com/download.php?id=%s&f=%s',
+                     }
 
         self.url = self.urls['base_url']
 
         self.subcategories = [18,41]
         self.pages = [1,2,3,4,5]
 
-
-    def getURL(self, url, post_data=None, params=None, timeout=30, json=False):
-        logger.log(u"Rarbg downloading url :" + url, logger.DEBUG)
-	request = urllib2.Request(url)
-	content = self.opener.open(request)
-	return content.read()
-
+        self.cookie = {
+            "version": 0,
+            "name": '7fAY799j',
+            "value": 'VtdTzG69',
+            "port": None,
+            # "port_specified": False,
+            "domain": 'rarbg.com',
+            # "domain_specified": False,
+            # "domain_initial_dot": False,
+            "path": '/',
+            # "path_specified": True,
+            "secure": False,
+            "expires": None,
+            "discard": True,
+            "comment": None,
+            "comment_url": None,
+            "rest": {},
+            "rfc2109": False
+        }
+
+        self.session = requests.session()
+        self.session.headers.update({'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.99 Safari/537.36'})
+        self.session.cookies.set(**self.cookie)
 
     def isEnabled(self):
         return self.enabled
@@ -89,26 +96,6 @@ class RarbgProvider(generic.TorrentProvider):
         quality = Quality.sceneQuality(item[0], anime)
         return quality
 
-#    def _doLogin(self):
-#        login_params = {'login': self.username,
-#                        'password': self.password,
-#        }
-#
-#        self.session = requests.Session()
-#
-#        try:
-#            response = self.session.post(self.urls['login_page'], data=login_params, timeout=30, verify=False)
-#            response = self.session.get(self.urls['base_url'], timeout=30, verify=False)
-#        except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
-#            logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
-#            return False
-#
-#        if not re.search('/users/logout/', response.text.lower()):
-#            logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
-#            return False
-#
-#        return True
-
     def _get_season_search_strings(self, ep_obj):
 
         search_string = {'Season': []}
@@ -162,12 +149,6 @@ class RarbgProvider(generic.TorrentProvider):
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
 
-	# Get cookie
-	#dummy = self.getURL(self.url)
-
-#        if not self._doLogin():
-#            return results
-
         for mode in search_params.keys():
 
             for search_string in search_params[mode]:
@@ -176,7 +157,7 @@ class RarbgProvider(generic.TorrentProvider):
 
                     for page in self.pages:
 
-                        searchURL = self.urls['search'] % (urllib.quote(search_string.encode('UTF-8')), sc, page)
+                        searchURL = self.urls['search'] % (search_string.encode('UTF-8'), sc, page)
                         logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG)
 
                         data = self.getURL(searchURL)
@@ -188,7 +169,7 @@ class RarbgProvider(generic.TorrentProvider):
                                 resultsTable = html.find('table', attrs={'class': 'lista2t'})
 
                                 if not resultsTable:
-                                    logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
+                                    logger.log(u"Data returned from " + self.name + " do not contains any torrent",
                                                logger.DEBUG)
                                     continue
 
@@ -274,12 +255,14 @@ class RarbgProvider(generic.TorrentProvider):
     def seedRatio(self):
         return self.ratio
 
+
 class RarbgCache(tvcache.TVCache):
     def __init__(self, provider):
+
         tvcache.TVCache.__init__(self, provider)
 
-        # Only poll Rarbg every 30 minutes max
-        self.minTime = 30
+        # only poll RARbg every 15 minutes max
+        self.minTime = 15
 
     def _getRSSData(self):
         search_params = {'RSS': ['']}
diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py
index 97f2ffae658463db14723fb5999802bce3ebfbd9..1ed51f0a0ad0cda365abfaefdfd0086824ce4487 100644
--- a/sickbeard/providers/t411.py
+++ b/sickbeard/providers/t411.py
@@ -34,7 +34,7 @@ from sickbeard.bs4_parser import BS4Parser
 from sickbeard import db
 from sickbeard import helpers
 from sickbeard import classes
-from sickbeard.helpers import sanitizeSceneName
+from sickbeard.helpers import sanitizeSceneName, arithmeticEval
 from sickbeard.exceptions import ex
 
 
@@ -84,12 +84,49 @@ class T411Provider(generic.TorrentProvider):
             logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
             return False
 
+        if re.search('confirmer le captcha', response.text.lower()):
+            logger.log(u'Too many login attempts. A captcha is displayed.', logger.INFO)
+            response = self.solveCaptcha(response, login_params)
+
         if not re.search('/users/logout/', response.text.lower()):
             logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
             return False
 
         return True
 
+    def solveCaptcha(self, response, login_params):
+        """
+        When trying to connect too many times with wrong password, a captcha can be requested.
+        This captcha is really simple and can be solved by the provider.
+
+        <label for="pass">204 + 65 = </label>
+            <input type="text" size="40" name="captchaAnswer" id="lgn" value=""/>
+            <input type="hidden" name="captchaQuery" value="204 + 65 = ">
+            <input type="hidden" name="captchaToken" value="005d54a7428aaf587460207408e92145">
+        <br/>
+
+        :param response: initial login output
+        :return: response after captcha resolution
+        """
+        with BS4Parser(response.text, features=["html5lib", "permissive"]) as html:
+            query = html.find('input', {'name': 'captchaQuery'})
+            token = html.find('input', {'name': 'captchaToken'})
+            if not query or not token:
+                logger.log(u'Unable to solve login captcha.', logger.ERROR)
+                return response
+
+            query_expr = query.attrs['value'].strip('= ')
+            logger.log(u'Captcha query: ' + query_expr, logger.DEBUG)
+            answer = arithmeticEval(query_expr)
+
+            logger.log(u'Captcha answer: %s' % answer, logger.DEBUG)
+
+            login_params['captchaAnswer'] = answer
+            login_params['captchaQuery'] = query.attrs['value']
+            login_params['captchaToken'] = token.attrs['value']
+
+            return self.session.post(self.urls['login_page'], data=login_params, timeout=30, verify=False, headers=self.headers)
+
     def _get_season_search_strings(self, ep_obj):
 
         search_string = {'Season': []}
diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py
index e74d77ba47640de6e4de5c6d39f82e1d2b4f8acd..b0d86a50a756d42f1834e99c8496fe4bf7d7a1b4 100644
--- a/sickbeard/providers/torrentday.py
+++ b/sickbeard/providers/torrentday.py
@@ -54,10 +54,10 @@ class TorrentDayProvider(generic.TorrentProvider):
 
         self.cache = TorrentDayCache(self)
 
-        self.urls = {'base_url': 'https://torrentday.eu',
-                'login': 'https://torrentday.eu/torrents/',
-                'search': 'https://torrentday.eu/V3/API/API.php',
-                'download': 'https://torrentday.eu/download.php/%s/%s'
+        self.urls = {'base_url': 'https://tdonline.org',
+                'login': 'https://tdonline.org/torrents/',
+                'search': 'https://tdonline.org/V3/API/API.php',
+                'download': 'https://tdonline.org/download.php/%s/%s'
         }
 
         self.url = self.urls['base_url']
@@ -200,11 +200,13 @@ class TorrentDayProvider(generic.TorrentProvider):
 
                 parsedJSON = self.getURL(self.urls['search'], post_data=post_data, json=True)
                 if not parsedJSON:
+                    logger.log(u"No result returned for {0}".format(search_string), logger.DEBUG)
                     continue
 
                 try:
                     torrents = parsedJSON.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
                 except:
+                    logger.log(u"No torrents found in JSON for {0}".format(search_string), logger.DEBUG)
                     continue
 
                 for torrent in torrents:
@@ -214,10 +216,12 @@ class TorrentDayProvider(generic.TorrentProvider):
                     seeders = int(torrent['seed'])
                     leechers = int(torrent['leech'])
 
-                    if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
+                    if not title or not url:
+                        logger.log(u"Discarding torrent because there's no title or url", logger.DEBUG)
                         continue
 
-                    if not title or not url:
+                    if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
+                        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                         continue
 
                     item = title, url, seeders, leechers
diff --git a/sickbeard/rssfeeds.py b/sickbeard/rssfeeds.py
index a3bc3432b01eaba731c5cf2db83846f31318342e..f87587bc63bb9de957e918a7ad3bddcf208495fd 100644
--- a/sickbeard/rssfeeds.py
+++ b/sickbeard/rssfeeds.py
@@ -31,14 +31,14 @@ class RSSFeeds:
         finally:
             self.rssDB.close()
 
-    def getFeed(self, url, post_data=None, request_headers=None, items=None):
+    def getFeed(self, url, post_data=None, request_headers=None, items=None, handlers=[]):
 
 
         if post_data:
             url += urllib.urlencode(post_data)
 
         try:
-            resp = Cache(self.rssDB).fetch(url, force_update=True, request_headers=request_headers)
+            resp = Cache(self.rssDB).fetch(url, force_update=True, request_headers=request_headers, handlers=handlers)
         finally:
             self.rssDB.close()
 
diff --git a/sickbeard/search.py b/sickbeard/search.py
index aa6995b0673d0fa9e8e1f9d1c1cdc19e9b782d7d..e1759f413b25cb21f5f506f6aa8daeffb254d0bf 100644
--- a/sickbeard/search.py
+++ b/sickbeard/search.py
@@ -181,25 +181,6 @@ def snatchEpisode(result, endStatus=SNATCHED):
     return True
 
 
-def filter_release_name(name, filter_words):
-    """
-    Filters out results based on filter_words
-
-    name: name to check
-    filter_words : Words to filter on, separated by comma
-
-    Returns: False if the release name is OK, True if it contains one of the filter_words
-    """
-    if filter_words:
-        filters = [re.compile('.*%s.*' % filter.strip(), re.I) for filter in filter_words.split(',')]
-        for regfilter in filters:
-            if regfilter.search(name):
-                logger.log(u"" + name + " contains pattern: " + regfilter.pattern, logger.DEBUG)
-                return True
-
-    return False
-
-
 def pickBestResult(results, show, quality_list=None):
     results = results if isinstance(results, list) else [results]
 
@@ -240,12 +221,12 @@ def pickBestResult(results, show, quality_list=None):
             logger.log(cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG)
             continue
 
-        if show.rls_ignore_words and filter_release_name(cur_result.name, cur_result.show.rls_ignore_words):
+        if show.rls_ignore_words and show_name_helpers.containsAtLeastOneWord(cur_result.name, cur_result.show.rls_ignore_words):
             logger.log(u"Ignoring " + cur_result.name + " based on ignored words filter: " + show.rls_ignore_words,
                        logger.INFO)
             continue
 
-        if show.rls_require_words and not filter_release_name(cur_result.name, cur_result.show.rls_require_words):
+        if show.rls_require_words and not show_name_helpers.containsAtLeastOneWord(cur_result.name, cur_result.show.rls_require_words):
             logger.log(u"Ignoring " + cur_result.name + " based on required words filter: " + show.rls_require_words,
                        logger.INFO)
             continue
diff --git a/sickbeard/showUpdater.py b/sickbeard/showUpdater.py
index d4b08f8b9334a75777051e5904f66768007833ab..669974e4d381a44d7b198f624c51802a859a2f03 100644
--- a/sickbeard/showUpdater.py
+++ b/sickbeard/showUpdater.py
@@ -18,7 +18,7 @@
 
 import datetime
 import os
-
+import threading
 import sickbeard
 
 from sickbeard import logger
@@ -31,8 +31,13 @@ from sickbeard import network_timezones
 from sickbeard import failed_history
 
 class ShowUpdater():
+    def __init__(self):
+        self.lock = threading.Lock()
+        self.amActive = False
 
     def run(self, force=False):
+ 
+        self.amActive = True
 
         update_datetime = datetime.datetime.now()
         update_date = update_datetime.date()
@@ -87,6 +92,8 @@ class ShowUpdater():
         ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator("Daily Update", piList))
 
         logger.log(u"Completed full update on all shows")
+        
+        self.amActive = False
 
     def __del__(self):
         pass
diff --git a/sickbeard/show_name_helpers.py b/sickbeard/show_name_helpers.py
index 3ffbf14270876958802e5e0e03536d60e5c76687..3c46d1d67db68d9c76d7b71ce7c6897639e46d78 100644
--- a/sickbeard/show_name_helpers.py
+++ b/sickbeard/show_name_helpers.py
@@ -37,6 +37,25 @@ resultFilters = ["sub(bed|ed|pack|s)", "(dk|fin|heb|kor|nor|nordic|pl|swe)sub(be
                  "(dir|sample|sub|nfo)fix", "sample", "(dvd)?extras",
                  "dub(bed)?"]
 
+
+def containsAtLeastOneWord(name, words):
+    """
+    Filters out results based on filter_words
+
+    name: name to check
+    words : string of words separated by a ',' or list of words
+
+    Returns: False if the name doesn't contain any word of words list, or the found word from the list.
+    """
+    if isinstance(words, basestring):
+        words = words.split(',')
+    items = [(re.compile('(^|[\W_])%s($|[\W_])' % re.escape(word.strip()), re.I), word.strip()) for word in words]
+    for regexp, word in items:
+        if regexp.search(name):
+            return word
+    return False
+
+
 def filterBadReleases(name, parse=True):
     """
     Filters out non-english and just all-around stupid releases by comparing them
@@ -59,24 +78,21 @@ def filterBadReleases(name, parse=True):
     #    return False
 
     # if any of the bad strings are in the name then say no
+    ignore_words = list(resultFilters)
     if sickbeard.IGNORE_WORDS:
-        resultFilters.extend(sickbeard.IGNORE_WORDS.split(','))
-    filters = [re.compile('(^|[\W_])%s($|[\W_])' % re.escape(filter.strip()), re.I) for filter in resultFilters]
-    for regfilter in filters:
-        if regfilter.search(name):
-            logger.log(u"Invalid scene release: " + name + " contained: " + regfilter.pattern + ", ignoring it",
-                       logger.DEBUG)
-            return False
+        ignore_words.extend(sickbeard.IGNORE_WORDS.split(','))
+    word = containsAtLeastOneWord(name, ignore_words)
+    if word:
+        logger.log(u"Invalid scene release: " + name + " contains " + word + ", ignoring it", logger.DEBUG)
+        return False
 
     # if any of the good strings aren't in the name then say no
     if sickbeard.REQUIRE_WORDS:
-        require_words = sickbeard.REQUIRE_WORDS.split(',')
-        filters = [re.compile('(^|[\W_])%s($|[\W_])' % re.escape(filter.strip()), re.I) for filter in require_words]
-        for regfilter in filters:
-            if not regfilter.search(name):
-                logger.log(u"Invalid scene release: " + name + " doesn't contain: " + regfilter.pattern + ", ignoring it",
-                           logger.DEBUG)
-                return False
+        require_words = sickbeard.REQUIRE_WORDS
+        if not containsAtLeastOneWord(name, require_words):
+            logger.log(u"Invalid scene release: " + name + " doesn't contain any of " + sickbeard.REQUIRE_WORDS +
+                       ", ignoring it", logger.DEBUG)
+            return False
 
     return True
 
diff --git a/sickbeard/show_queue.py b/sickbeard/show_queue.py
index 8a26aee4a371408dc0b694693490824194c69e69..ac3fb3315e4fee1ec5f1a7b2f980f389b3586070 100644
--- a/sickbeard/show_queue.py
+++ b/sickbeard/show_queue.py
@@ -78,16 +78,13 @@ class ShowQueue(generic_queue.GenericQueue):
     def updateShow(self, show, force=False):
 
         if self.isBeingAdded(show):
-            raise exceptions.CantUpdateException(
-                "Show is still being added, wait until it is finished before you update.")
+            logger.log(str(show.name) + u" is still being added, wait until it is finished before you update.",logger.DEBUG)
 
         if self.isBeingUpdated(show):
-            raise exceptions.CantUpdateException(
-                "This show is already being updated, can't update again until it's done.")
+            logger.log(str(show.name) + u" is already being updated by Post-processor or manually started, can't update again until it's done.",logger.DEBUG)
 
         if self.isInUpdateQueue(show):
-            raise exceptions.CantUpdateException(
-                "This show is already being updated, can't update again until it's done.")
+            logger.log(str(show.name) + u" is in process of being updated by Post-processor or manually started, can't update again until it's done.",logger.DEBUG)
 
         if not force:
             queueItemObj = QueueItemUpdate(show)
diff --git a/sickbeard/tv.py b/sickbeard/tv.py
index a7fef0de60bbb6afda2823720eeae2c1cc25697d..854c4248e4fcbacf7ab63715055a2a12ce4b8447 100644
--- a/sickbeard/tv.py
+++ b/sickbeard/tv.py
@@ -1264,7 +1264,7 @@ class TVShow(object):
 
     def getOverview(self, epStatus):
 
-        if epStatus == WANTED:
+        if epStatus == WANTED and not self.paused:
             return Overview.WANTED
         elif epStatus in (UNAIRED, UNKNOWN):
             return Overview.UNAIRED
diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py
index f3c529e17d7532ef8a35c7e3e0ff13b266ed5806..595da3f9de97adb4b47f96197cad5b8ff228ae1c 100644
--- a/sickbeard/tvcache.py
+++ b/sickbeard/tvcache.py
@@ -22,6 +22,7 @@ import time
 import datetime
 import itertools
 import traceback
+import urllib2
 
 import sickbeard
 
@@ -137,9 +138,26 @@ class TVCache():
             logger.log(traceback.format_exc(), logger.DEBUG)
 
     def getRSSFeed(self, url, post_data=None, items=[]):
+        handlers = []
+
         if self.provider.proxy.isEnabled():
             self.provider.headers.update({'Referer': self.provider.proxy.getProxyURL()})
-        return RSSFeeds(self.providerID).getFeed(self.provider.proxy._buildURL(url), post_data, self.provider.headers, items)
+        elif sickbeard.PROXY_SETTING:
+            logger.log("Using proxy for url: " + url, logger.DEBUG)
+            scheme, address = urllib2.splittype(sickbeard.PROXY_SETTING)
+            if not scheme:
+                scheme = 'http'
+                address = 'http://' + sickbeard.PROXY_SETTING
+            else:
+                address = sickbeard.PROXY_SETTING
+            handlers = [urllib2.ProxyHandler({scheme: address})]
+
+        return RSSFeeds(self.providerID).getFeed(
+            self.provider.proxy._buildURL(url),
+            post_data,
+            self.provider.headers,
+            items,
+            handlers=handlers)
 
     def _translateTitle(self, title):
         return u'' + title.replace(' ', '.')
diff --git a/sickbeard/versionChecker.py b/sickbeard/versionChecker.py
index 3631d310be9f471e0bea5fa97856c696268803d6..619a212381fe56ec00a93696c0b546433756fc84 100644
--- a/sickbeard/versionChecker.py
+++ b/sickbeard/versionChecker.py
@@ -24,13 +24,17 @@ import urllib
 import tarfile
 import stat
 import traceback
+import db
+import time
 
 import sickbeard
 from sickbeard import notifiers
 from sickbeard import ui
-from sickbeard import logger
+from sickbeard import logger, helpers
 from sickbeard.exceptions import ex
 from sickbeard import encodingKludge as ek
+from lib import requests
+from lib.requests.exceptions import RequestException
 
 import shutil
 import lib.shutil_custom
@@ -55,6 +59,7 @@ class CheckVersion():
                 self.updater = SourceUpdateManager()
 
     def run(self, force=False):
+
         if self.updater:
             # set current branch version
             sickbeard.BRANCH = self.get_branch()
@@ -63,10 +68,142 @@ class CheckVersion():
                 if sickbeard.AUTO_UPDATE:
                     logger.log(u"New update found for SickRage, starting auto-updater ...")
                     ui.notifications.message('New update found for SickRage, starting auto-updater')
-                    if sickbeard.versionCheckScheduler.action.update():
-                        logger.log(u"Update was successful!")
-                        ui.notifications.message('Update was successful')
-                        sickbeard.events.put(sickbeard.events.SystemEvent.RESTART)
+                    if self.safe_to_update() == True and self._runbackup() == True:
+                        if sickbeard.versionCheckScheduler.action.update():
+                            logger.log(u"Update was successful!")
+                            ui.notifications.message('Update was successful')
+                            sickbeard.events.put(sickbeard.events.SystemEvent.RESTART)
+
+    def _runbackup(self):
+        # Do a system backup before update
+        logger.log(u"Config backup in progress...")
+        ui.notifications.message('Backup', 'Config backup in progress...')
+        try:
+            backupDir = os.path.join(sickbeard.DATA_DIR, 'backup')
+            if not os.path.isdir(backupDir):
+                os.mkdir(backupDir)
+    
+            if self._keeplatestbackup(backupDir) == True and self._backup(backupDir) == True:
+                logger.log(u"Config backup successful, updating...")
+                ui.notifications.message('Backup', 'Config backup successful, updating...')
+                return True
+            else:
+                logger.log(u"Config backup failed, aborting update",logger.ERROR)
+                ui.notifications.message('Backup', 'Config backup failed, aborting update')
+                return False
+        except Exception as e:
+            logger.log('Update: Config backup failed. Error: {0}'.format(ex(e)),logger.ERROR)
+            ui.notifications.message('Backup', 'Config backup failed, aborting update')
+            return False
+
+    def _keeplatestbackup(self,backupDir=None):
+        if backupDir:
+            import glob
+            files = glob.glob(os.path.join(backupDir,'*.zip'))
+            if not files:
+                return True
+            now = time.time()
+            newest = files[0], now - os.path.getctime(files[0])
+            for file in files[1:]:
+                age = now - os.path.getctime(file)
+                if age < newest[1]:
+                    newest = file, age
+            files.remove(newest[0])
+            
+            for file in files:
+                os.remove(file)
+            return True
+        else:
+            return False
+    
+    # TODO: Merge with backup in helpers
+    def _backup(self,backupDir=None):
+        if backupDir:
+            source = [os.path.join(sickbeard.DATA_DIR, 'sickbeard.db'), sickbeard.CONFIG_FILE]
+            source.append(os.path.join(sickbeard.DATA_DIR, 'failed.db'))
+            source.append(os.path.join(sickbeard.DATA_DIR, 'cache.db'))
+            target = os.path.join(backupDir, 'sickrage-' + time.strftime('%Y%m%d%H%M%S') + '.zip')
+
+            for (path, dirs, files) in os.walk(sickbeard.CACHE_DIR, topdown=True):
+                for dirname in dirs:
+                    if path == sickbeard.CACHE_DIR and dirname not in ['images']:
+                        dirs.remove(dirname)
+                for filename in files:
+                    source.append(os.path.join(path, filename))
+
+            if helpers.backupConfigZip(source, target, sickbeard.DATA_DIR):
+                return True
+            else:
+                return False
+        else:
+            return False
+
+    def safe_to_update(self):
+
+        def db_safe(self):
+            try:
+                result = self.getDBcompare(sickbeard.BRANCH)
+                if result == 'equal':
+                    logger.log(u"We can proceed with the update. New update has same DB version", logger.DEBUG)
+                    return True
+                elif result == 'upgrade':
+                    logger.log(u"We can't proceed with the update. New update has a new DB version. Please manually update", logger.WARNING)
+                    return False
+                elif result == 'downgrade':
+                    logger.log(u"We can't proceed with the update. New update has a old DB version. It's not possible to downgrade", logger.ERROR)
+                    return False
+                else:
+                    logger.log(u"We can't proceed with the update. Unable to check remote DB version", logger.ERROR)
+                    return False
+            except:
+                logger.log(u"We can't proceed with the update. Unable to compare DB version", logger.ERROR)
+                return False
+        
+        def postprocessor_safe(self):
+            if not sickbeard.autoPostProcesserScheduler.action.amActive:
+                logger.log(u"We can proceed with the update. Post-Processor is not running", logger.DEBUG)
+                return True
+            else:
+                logger.log(u"We can't proceed with the update. Post-Processor is running", logger.DEBUG)
+                return False
+        
+        def showupdate_safe(self):
+            if not sickbeard.showUpdateScheduler.action.amActive:
+                logger.log(u"We can proceed with the update. Shows are not being updated", logger.DEBUG)
+                return True
+            else:
+                logger.log(u"We can't proceed with the update. Shows are being updated", logger.DEBUG)
+                return False
+
+        db_safe = db_safe(self)
+        postprocessor_safe = postprocessor_safe(self)
+        showupdate_safe = showupdate_safe(self)
+
+        if db_safe == True and postprocessor_safe == True and showupdate_safe == True:
+            logger.log(u"Proceeding with auto update", logger.DEBUG)
+            return True
+        else:
+            logger.log(u"Auto update aborted", logger.DEBUG)
+            return False
+
+    def getDBcompare(self, branchDest):
+        try:
+            response = requests.get("https://raw.githubusercontent.com/SICKRAGETV/SickRage/" + str(branchDest) +"/sickbeard/databases/mainDB.py", verify=False)
+            response.raise_for_status()
+            match = re.search(r"MAX_DB_VERSION\s=\s(?P<version>\d{2,3})",response.text)
+            branchDestDBversion = int(match.group('version'))
+            myDB = db.DBConnection()
+            branchCurrDBversion = myDB.checkDBVersion()
+            if branchDestDBversion > branchCurrDBversion:
+                return 'upgrade'
+            elif branchDestDBversion == branchCurrDBversion:
+                return 'equal'
+            else:
+                return 'downgrade'
+        except RequestException as e:
+            return 'error'
+        except Exception as e:
+            return 'error'
 
     def find_install_type(self):
         """
@@ -108,12 +245,10 @@ class CheckVersion():
         if not self.updater.need_update():
             sickbeard.NEWEST_VERSION_STRING = None
 
-            if not sickbeard.AUTO_UPDATE:
+            if force:
+                ui.notifications.message('No update needed')
                 logger.log(u"No update needed")
 
-                if force:
-                    ui.notifications.message('No update needed')
-
             # no updates needed
             return False
 
@@ -187,6 +322,7 @@ class GitUpdateManager(UpdateManager):
 
         # trying alternatives
 
+
         alternative_git = []
 
         # osx people who start sr from launchd have a broken path, so try a hail-mary attempt for them
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index 7faf558189a37bb0bb56cec9a5eb94da87a97768..64749f52d17081aef23a89de7d3a9a4d270cbec2 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -57,6 +57,7 @@ from lib.unrar2 import RarFile
 from lib import adba, subliminal
 from lib.trakt import TraktAPI
 from lib.trakt.exceptions import traktException, traktAuthException, traktServerBusy
+from versionChecker import CheckVersion
 
 try:
     import json
@@ -1073,76 +1074,26 @@ class Home(WebRoot):
         return self.redirect('/home/')
 
     def update(self, pid=None):
+        
         if str(pid) != str(sickbeard.PID):
             return self.redirect('/home/')
+            
+        checkversion = CheckVersion()
+        backup = checkversion._runbackup()
 
-        def _keeplatestbackup(backupDir):
-            import glob
-            files = glob.glob(os.path.join(backupDir,'*.zip'))
-            if not files:
-                return True
-            now = time.time()
-            newest = files[0], now - os.path.getctime(files[0])
-            for file in files[1:]:
-                age = now - os.path.getctime(file)
-                if age < newest[1]:
-                    newest = file, age
-            files.remove(newest[0])
-
-            for file in files:
-                os.remove(file)
-        
-        # TODO: Merge with backup in helpers
-        def _backup(backupDir=None):
-            if backupDir:
-                source = [os.path.join(sickbeard.DATA_DIR, 'sickbeard.db'), sickbeard.CONFIG_FILE]
-                source.append(os.path.join(sickbeard.DATA_DIR, 'failed.db'))
-                source.append(os.path.join(sickbeard.DATA_DIR, 'cache.db'))
-                target = os.path.join(backupDir, 'sickrage-' + time.strftime('%Y%m%d%H%M%S') + '.zip')
-
-                for (path, dirs, files) in os.walk(sickbeard.CACHE_DIR, topdown=True):
-                    for dirname in dirs:
-                        if path == sickbeard.CACHE_DIR and dirname not in ['images']:
-                            dirs.remove(dirname)
-                    for filename in files:
-                        source.append(os.path.join(path, filename))
-
-                if helpers.backupConfigZip(source, target, sickbeard.DATA_DIR):
-                    return True
-                else:
-                    return False
-            else:
-                return False
-
-        # Do a system backup
-        ui.notifications.message('Backup', 'Config backup in progress...')
-        try:
-            backupDir = os.path.join(sickbeard.DATA_DIR, 'backup')
-            if not os.path.isdir(backupDir):
-                os.mkdir(backupDir)
-
-            _keeplatestbackup(backupDir)
+        if backup == True:
 
-            if _backup(backupDir):
-                ui.notifications.message('Backup', 'Config backup successful, updating...')
+            if sickbeard.versionCheckScheduler.action.update():
+                # do a hard restart
+                sickbeard.events.put(sickbeard.events.SystemEvent.RESTART)
+            
+                t = PageTemplate(rh=self, file="restart_bare.tmpl")
+                return t.respond()
             else:
-                ui.notifications.message('Backup', 'Config backup failed, aborting update')
-                return self.redirect('/home/')
-
-        except Exception as e:
-            ui.notifications.message('Backup', 'Config backup failed, aborting update')
-            logger.log('Update: Config backup failed. Error: {0}'.format(ex(e)),logger.DEBUG)
-            return self.redirect('/home/')
-
-        if sickbeard.versionCheckScheduler.action.update():
-            # do a hard restart
-            sickbeard.events.put(sickbeard.events.SystemEvent.RESTART)
-
-            t = PageTemplate(rh=self, file="restart_bare.tmpl")
-            return t.respond()
+                return self._genericMessage("Update Failed",
+                                            "Update wasn't successful, not restarting. Check your log for more information.")
         else:
-            return self._genericMessage("Update Failed",
-                                        "Update wasn't successful, not restarting. Check your log for more information.")
+            return self.redirect('/home/')
 
     def branchCheckout(self, branch):
         if sickbeard.BRANCH != branch:
@@ -1154,31 +1105,21 @@ class Home(WebRoot):
             return self.redirect('/home')
 
     def getDBcompare(self, branchDest=None):
-        from lib import requests
-        from lib.requests.exceptions import RequestException
-        if not branchDest:
-            return json.dumps({ "status": "error", 'message': 'branchDest empty' })
-        try:
-            response = requests.get("https://raw.githubusercontent.com/SICKRAGETV/SickRage/" + str(branchDest) +"/sickbeard/databases/mainDB.py", verify=False)
-            response.raise_for_status()
-            match = re.search(r"MAX_DB_VERSION\s=\s(?P<version>\d{2,3})",response.text)
-            branchDestDBversion = int(match.group('version'))
-            myDB = db.DBConnection()
-            branchCurrDBversion = myDB.checkDBVersion()
-            if branchDestDBversion > branchCurrDBversion:
-                logger.log(u"Checkout branch has a new DB version - Upgrade", logger.DEBUG)
-                return json.dumps({ "status": "success", 'message': 'upgrade' })
-            elif branchDestDBversion == branchCurrDBversion:
-                logger.log(u"Checkout branch has the same DB version - Equal", logger.DEBUG)
-                return json.dumps({ "status": "success", 'message': 'equal' })
-            else:
-                logger.log(u"Checkout branch has an old DB version - Downgrade", logger.DEBUG)
-                return json.dumps({ "status": "success", 'message': 'downgrade' })
-        except RequestException as e:
-            logger.log(u"Checkout branch couldn't compare DB version - Requests error", logger.ERROR)
-            return json.dumps({ "status": "error", 'message': 'Requests error' })
-        except Exception as e:
-            logger.log(u"Checkout branch couldn't compare DB version - General exception", logger.ERROR)
+
+        checkversion = CheckVersion()
+        db_status = checkversion.getDBcompare(branchDest)
+
+        if db_status == 'upgrade':
+            logger.log(u"Checkout branch has a new DB version - Upgrade", logger.DEBUG)
+            return json.dumps({ "status": "success", 'message': 'upgrade' })
+        elif db_status == 'equal':
+            logger.log(u"Checkout branch has the same DB version - Equal", logger.DEBUG)
+            return json.dumps({ "status": "success", 'message': 'equal' })
+        elif db_status == 'downgrade':
+            logger.log(u"Checkout branch has an old DB version - Downgrade", logger.DEBUG)
+            return json.dumps({ "status": "success", 'message': 'downgrade' })
+        else:
+            logger.log(u"Checkout branch couldn't compare DB version.", logger.ERROR)
             return json.dumps({ "status": "error", 'message': 'General exception' })
 
     def displayShow(self, show=None):
@@ -1766,7 +1707,11 @@ class Home(WebRoot):
                             u"Refusing to change status of " + curEp + " to FAILED because it's not SNATCHED/DOWNLOADED",
                             logger.ERROR)
                         continue
-
+                    
+                    if epObj.status in Quality.DOWNLOADED and int(status) == WANTED:
+                        logger.log(u"Removing release_name for episode as you want to set a downloaded episode back to wanted, so obviously you want it replaced")
+                        epObj.release_name = ""
+                        
                     epObj.status = int(status)
 
                     # mass add to database
@@ -1791,7 +1736,7 @@ class Home(WebRoot):
                 myDB = db.DBConnection()
                 myDB.mass_action(sql_l)
 
-        if int(status) == WANTED:
+        if int(status) == WANTED and not showObj.paused:
             msg = "Backlog was automatically started for the following seasons of <b>" + showObj.name + "</b>:<br />"
             msg += '<ul>'
 
@@ -1807,7 +1752,9 @@ class Home(WebRoot):
 
             if segments:
                 ui.notifications.message("Backlog started", msg)
-
+        elif int(status) == WANTED and showObj.paused:
+            logger.log(u"Some episodes were set to wanted, but " + showObj.name + " is paused. Not adding to Backlog until show is unpaused")
+            
         if int(status) == FAILED:
             msg = "Retrying Search was automatically started for the following season of <b>" + showObj.name + "</b>:<br />"
             msg += '<ul>'