diff --git a/.build/package.json b/.build/package.json
index 502f6995823ce8ab760914ea2cfe868ae1061b54..ab73bd48a706d57dd108d5cfbaf4618023aea6b7 100644
--- a/.build/package.json
+++ b/.build/package.json
@@ -29,6 +29,6 @@
     "grunt-sass": "^1.1.0",
     "load-grunt-tasks": "^3.3.0",
     "mocha": "^2.3.4",
-    "snyk": "^1.3.1"
+    "snyk": "^1.6.1"
   }
 }
diff --git a/.gitignore b/.gitignore
index 8bdce657c72756a00055c9a4dce2d90d19f50d50..6e8bfdc5885393743103fbf172d14337ccd361e6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -25,6 +25,7 @@ server.key
 #  Compiled source   #
 ######################
 *.py[co]
+ua.json
 
 #  IDE specific      #
 ######################
diff --git a/.jshintrc b/.jshintrc
index 189830acc77e451853115739a34afeef0b15636a..054c5c9176fa54af36d624d0d89fd444ad0f73a0 100644
--- a/.jshintrc
+++ b/.jshintrc
@@ -30,7 +30,6 @@
         "topImageHtml": true,
         "generateBlackWhiteList": true,
         "_": true,
-        "bootbox": true,
         "PNotify": true,
         "anonURL": true,
         "window": true
diff --git a/gui/slick/css/style.css b/gui/slick/css/style.css
index 733539b7f3403e08db5d85f0e926aab97eabd756..7617360f8b1e73f49d34bbd9316b64ba7c40b17b 100644
--- a/gui/slick/css/style.css
+++ b/gui/slick/css/style.css
@@ -2960,7 +2960,6 @@ li.token-input-token {
 }
 
 li.token-input-token img {
-    padding-top: 3px;
     padding-right: 4px;
     float: left;
 }
diff --git a/gui/slick/images/providers/danishbits.png b/gui/slick/images/providers/danishbits.png
new file mode 100644
index 0000000000000000000000000000000000000000..2fc398bdf98a0a0443aeb873088efdd0e6b686a5
Binary files /dev/null and b/gui/slick/images/providers/danishbits.png differ
diff --git a/gui/slick/js/addShowOptions.js b/gui/slick/js/addShowOptions.js
index ac0605c69f60aad70bf37bed977231d9b23ca5eb..8fa692b068226c5de4539cc6d78358e8bd6e4437 100644
--- a/gui/slick/js/addShowOptions.js
+++ b/gui/slick/js/addShowOptions.js
@@ -18,7 +18,6 @@ $(document).ready(function () {
             anime: $('#anime').prop('checked'),
             scene: $('#scene').prop('checked'),
             defaultStatusAfter: $('#statusSelectAfter').val(),
-            archive: $('#archive').prop('checked')
         });
 
         $(this).attr('disabled', true);
@@ -29,7 +28,7 @@ $(document).ready(function () {
         });
     });
 
-    $('#statusSelect, #qualityPreset, #flatten_folders, #anyQualities, #bestQualities, #subtitles, #scene, #anime, #statusSelectAfter, #archive').change(function () {
+    $('#statusSelect, #qualityPreset, #flatten_folders, #anyQualities, #bestQualities, #subtitles, #scene, #anime, #statusSelectAfter').change(function () {
         $('#saveDefaultsButton').attr('disabled', false);
     });
 });
diff --git a/gui/slick/js/ajaxEpSubtitles.js b/gui/slick/js/ajaxEpSubtitles.js
index a61499c394dc069b5e79655c55c1ebe8f94a0ddf..4f2e7c97bee032196590d260b2897688e064ef44 100644
--- a/gui/slick/js/ajaxEpSubtitles.js
+++ b/gui/slick/js/ajaxEpSubtitles.js
@@ -12,7 +12,7 @@
                     var subtitles = data.subtitles.split(',');
                     subtitlesTd.empty();
                     $.each(subtitles,function(index, language){
-                        if (language !== "" && language !== "und") {
+                        if (language !== "") {
                             if (index !== subtitles.length - 1) {
                                 subtitlesTd.append($("<img/>").attr({"src": srRoot+"/images/subtitles/flags/"+language+".png", "alt": language, "width": 16, "height": 11}));
                             } else {
diff --git a/gui/slick/js/core.js b/gui/slick/js/core.js
index 90c927e06566a073bb6836d10f3b26d882345f87..32ba8b947abbc19be4b84b7d54ba2d762006b009 100644
--- a/gui/slick/js/core.js
+++ b/gui/slick/js/core.js
@@ -2432,17 +2432,16 @@ var SICKRAGE = {
                     4: { sorter: 'scene'},
                     5: { sorter: 'anime'},
                     6: { sorter: 'flatfold'},
-                    7: { sorter: 'archive_firstmatch'},
-                    8: { sorter: 'paused'},
-                    9: { sorter: 'subtitle'},
-                    10: { sorter: 'default_ep_status'},
-                    11: { sorter: 'status'},
+                    7: { sorter: 'paused'},
+                    8: { sorter: 'subtitle'},
+                    9: { sorter: 'default_ep_status'},
+                    10: { sorter: 'status'},
+                    11: { sorter: false},
                     12: { sorter: false},
                     13: { sorter: false},
                     14: { sorter: false},
                     15: { sorter: false},
-                    16: { sorter: false},
-                    17: { sorter: false}
+                    16: { sorter: false}
                 },
                 widgetOptions: {
                     'columnSelector_mediaquery': false
diff --git a/gui/slick/js/core.min.js b/gui/slick/js/core.min.js
index 3b682619b9711aad0da2b28bf9d569bff5ddc0e6..cafbadcdf14649b20160670b8b1aeb477f850f23 100644
Binary files a/gui/slick/js/core.min.js and b/gui/slick/js/core.min.js differ
diff --git a/gui/slick/js/massUpdate.js b/gui/slick/js/massUpdate.js
index 9539cf53a482c5c05184e77a4fac3dd7e04c8714..3d50184b64579be61bcd8038cddb7cde8da4f90d 100644
--- a/gui/slick/js/massUpdate.js
+++ b/gui/slick/js/massUpdate.js
@@ -1,5 +1,5 @@
 $(document).ready(function(){
-    $('#submitMassEdit').on('click', function(){
+    $('.submitMassEdit').on('click', function(){
         var editArr = [];
 
         $('.editCheck').each(function() {
@@ -11,7 +11,7 @@ $(document).ready(function(){
         window.location.href = 'massEdit?toEdit='+editArr.join('|');
     });
 
-    $('#submitMassUpdate').on('click', function(){
+    $('.submitMassUpdate').on('click', function(){
         var updateArr = [];
         var refreshArr = [];
         var renameArr = [];
@@ -47,22 +47,26 @@ $(document).ready(function(){
         });
 
         if(deleteCount >= 1) {
-            bootbox.confirm("You have selected to delete " + deleteCount + " show(s).  Are you sure you wish to continue? All files will be removed from your system.", function(result) {
-                if(result) {
+            $.confirm({
+                title: "Delete Shows",
+                text: "You have selected to delete " + deleteCount + " show(s).  Are you sure you wish to continue? All files will be removed from your system.",
+                confirmButton: "Yes",
+                cancelButton: "Cancel",
+                dialogClass: "modal-dialog",
+                post: false,
+                confirm: function() {
                     $('.deleteCheck').each(function() {
                         if(this.checked === true) {
                             deleteArr.push($(this).attr('id').split('-')[1]);
                         }
                     });
+                    if(updateArr.length+refreshArr.length+renameArr.length+subtitleArr.length+deleteArr.length+removeArr.length+metadataArr.length === 0) { return false; }
+                    window.location.href = 'massUpdate?toUpdate='+updateArr.join('|')+'&toRefresh='+refreshArr.join('|')+'&toRename='+renameArr.join('|')+'&toSubtitle='+subtitleArr.join('|')+'&toDelete='+deleteArr.join('|')+'&toRemove='+removeArr.join('|')+'&toMetadata='+metadataArr.join('|');
                 }
-                if(updateArr.length+refreshArr.length+renameArr.length+subtitleArr.length+deleteArr.length+removeArr.length+metadataArr.length === 0) { return false; }
-
-                window.location.href = 'massUpdate?toUpdate='+updateArr.join('|')+'&toRefresh='+refreshArr.join('|')+'&toRename='+renameArr.join('|')+'&toSubtitle='+subtitleArr.join('|')+'&toDelete='+deleteArr.join('|')+'&toRemove='+removeArr.join('|')+'&toMetadata='+metadataArr.join('|');
             });
-        } else {
-            if(updateArr.length+refreshArr.length+renameArr.length+subtitleArr.length+deleteArr.length+removeArr.length+metadataArr.length === 0) { return false; }
-            window.location.href = 'massUpdate?toUpdate='+updateArr.join('|')+'&toRefresh='+refreshArr.join('|')+'&toRename='+renameArr.join('|')+'&toSubtitle='+subtitleArr.join('|')+'&toDelete='+deleteArr.join('|')+'&toRemove='+removeArr.join('|')+'&toMetadata='+metadataArr.join('|');
         }
+        if(updateArr.length+refreshArr.length+renameArr.length+subtitleArr.length+deleteArr.length+removeArr.length+metadataArr.length === 0) { return false; }
+        window.location.href = 'massUpdate?toUpdate='+updateArr.join('|')+'&toRefresh='+refreshArr.join('|')+'&toRename='+renameArr.join('|')+'&toSubtitle='+subtitleArr.join('|')+'&toDelete='+deleteArr.join('|')+'&toRemove='+removeArr.join('|')+'&toMetadata='+metadataArr.join('|');
     });
 
     ['.editCheck', '.updateCheck', '.refreshCheck', '.renameCheck', '.deleteCheck', '.removeCheck'].forEach(function(name) {
diff --git a/gui/slick/views/addShows_trendingShows.mako b/gui/slick/views/addShows_trendingShows.mako
index 408a3c86dfc12d6ff3bbd4d9925b1d16ebc5edb5..d8f3bf50a6e9f7c4a9bfc3267ab0b12b4f945627 100644
--- a/gui/slick/views/addShows_trendingShows.mako
+++ b/gui/slick/views/addShows_trendingShows.mako
@@ -44,6 +44,8 @@
     <span style="margin-left:12px">Select Trakt List:</span>
     <select id="traktlistselection" class="form-control form-control-inline input-sm">
         <option value="anticipated" ${' selected="selected"' if traktList == "anticipated" else ''}>Most Anticipated</option>
+        <option value="newshow" ${' selected="selected"' if traktList == "newshow" else ''}>New Shows</option>
+        <option value="newseason" ${' selected="selected"' if traktList == "newseason" else ''}>Season Premieres</option>
         <option value="trending" ${' selected="selected"' if traktList == "trending" else ''}>Trending</option>
         <option value="popular" ${' selected="selected"' if traktList == "popular" else ''}>Popular</option>
         <option value="watched" ${' selected="selected"' if traktList == "watched" else '' }>Most Watched</option>
diff --git a/gui/slick/views/apiBuilder.mako b/gui/slick/views/apiBuilder.mako
index 9b8ae20b036bf8784aca317c137279de4c232285..aeef6b809d7dcd29c7b5ad7b64d5facf67c04b30 100644
--- a/gui/slick/views/apiBuilder.mako
+++ b/gui/slick/views/apiBuilder.mako
@@ -209,10 +209,10 @@ var episodes = ${episodes};
                 <span class="glyphicon glyphicon-remove text-muted" title="No"></span>
             % endif
         </td>
-        <td>${parameter_help['desc'] if 'desc' in parameter_help else ''}</td>
-        <td>${parameter_help['type'] if 'type' in parameter_help else ''}</td>
-        <td>${parameter_help['defaultValue'] if 'defaultValue' in parameter_help else ''}</td>
-        <td>${parameter_help['allowed_values'] if 'allowed_values' in parameter_help else ''}</td>
+        <td>${parameter_help.get('desc', '')}</td>
+        <td>${parameter_help.get('type', '')}</td>
+        <td>${parameter_help.get('defaultValue', '')}</td>
+        <td>${parameter_help.get('allowedValues', '')}</td>
     </tr>
 % endfor
 </tbody>
@@ -223,8 +223,8 @@ var episodes = ${episodes};
     % for parameter in parameters:
     <%
         parameter_help = parameters[parameter]
-        allowed_values = parameter_help['allowed_values'] if 'allowed_values' in parameter_help else ''
-        type = parameter_help['type'] if 'type' in parameter_help else ''
+        allowed_values = parameter_help.get('allowedValues', '')
+        type = parameter_help.get('type', '')
     %>
 
     % if isinstance(allowed_values, list):
diff --git a/gui/slick/views/config_general.mako b/gui/slick/views/config_general.mako
index cc61635fd7a1fd912c8547b2fcf2ddaa70444675..0f792f56b71368ab036546b4a7df79f0cf8cb23c 100644
--- a/gui/slick/views/config_general.mako
+++ b/gui/slick/views/config_general.mako
@@ -130,8 +130,8 @@
                             <label for="log_size">
                                 <span class="component-title">Size of Log files saved</span>
                                 <span class="component-desc">
-                                    <input type="number" min="1048576" step="1048576" name="log_size" id="log_size" value="${sickbeard.LOG_SIZE}" class="form-control input-sm input75" autocapitalize="off" />
-                                    <p>maximum size of a log file saved (default: 1048576 (1MB)) (REQUIRES RESTART)</p>
+                                    <input type="number" min="0.5" step="0.1" name="log_size" id="log_size" value="${sickbeard.LOG_SIZE}" class="form-control input-sm input75" autocapitalize="off" />
+                                    <p>maximum size in MB of the log file (default: 1MB) (REQUIRES RESTART)</p>
                                 </span>
                             </label>
                         </div>
@@ -156,7 +156,7 @@
                                 <span class="component-title">Timeout show indexer at</span>
                                 <span class="component-desc">
                                     <input type="number" min="10" step="1" name="indexer_timeout" id="indexer_timeout" value="${sickbeard.INDEXER_TIMEOUT}" class="form-control input-sm input75" autocapitalize="off" />
-                                    <p>seconds of inactivity when finding new shows (default:10)</p>
+                                    <p>seconds of inactivity when finding new shows (default:20)</p>
                                 </span>
                             </label>
                         </div>
@@ -209,7 +209,7 @@
                                 <span class="component-title">Check the server every*</span>
                                 <span class="component-desc">
                                     <input type="number" min="1" step="1" name="update_frequency" id="update_frequency" value="${sickbeard.UPDATE_FREQUENCY}" class="form-control input-sm input75" autocapitalize="off" />
-                                    <p>hours for software updates (default:12)</p>
+                                    <p>hours for software updates (default:1)</p>
                                 </span>
                             </label>
                         </div>
diff --git a/gui/slick/views/config_subtitles.mako b/gui/slick/views/config_subtitles.mako
index d1cee5a2085150a8e5cef6f898ccb450f176f00d..90ab34726c0111c5cb2d77fda626dc3eb9b3fa53 100644
--- a/gui/slick/views/config_subtitles.mako
+++ b/gui/slick/views/config_subtitles.mako
@@ -192,7 +192,7 @@ $('#subtitles_dir').fileBrowser({ title: 'Select Subtitles Download Directory' }
                           </li>
                         % endfor
                         </ul>
-                        <input type="hidden" name="service_order" id="service_order" value="<%" ".join(['%s:%d' % (x['name'], x['enabled']) for x in sickbeard.subtitles.sorted_service_list()])%>"/>
+                        <input type="hidden" name="service_order" id="service_order" value="${' '.join(['%s:%d' % (x['name'], x['enabled']) for x in sickbeard.subtitles.sorted_service_list()])}"/>
 
                         <br><input type="submit" class="btn config_submitter" value="Save Changes" /><br>
                     </fieldset>
diff --git a/gui/slick/views/displayShow.mako b/gui/slick/views/displayShow.mako
index c071de73206444bf3871d404defdf3a90232274a..39b3909c39099f6715691f66465c663e948d97bf 100644
--- a/gui/slick/views/displayShow.mako
+++ b/gui/slick/views/displayShow.mako
@@ -221,7 +221,6 @@
                     <tr><td class="showLegend">Anime: </td><td><img src="${srRoot}/images/${("no16.png", "yes16.png")[bool(show.is_anime)]}" alt="${("N", "Y")[bool(show.is_anime)]}" width="16" height="16" /></td></tr>
                     <tr><td class="showLegend">DVD Order: </td><td><img src="${srRoot}/images/${("no16.png", "yes16.png")[bool(show.dvdorder)]}" alt="${("N", "Y")[bool(show.dvdorder)]}" width="16" height="16" /></td></tr>
                     <tr><td class="showLegend">Scene Numbering: </td><td><img src="${srRoot}/images/${("no16.png", "yes16.png")[bool(show.scene)]}" alt="${("N", "Y")[bool(show.scene)]}" width="16" height="16" /></td></tr>
-                    <tr><td class="showLegend">Archive First Match: </td><td><img src="${srRoot}/images/${("no16.png", "yes16.png")[bool(show.archive_firstmatch)]}" alt="${("N", "Y")[bool(show.archive_firstmatch)]}" width="16" height="16" /></td></tr>
                 </table>
             </div>
         </div>
diff --git a/gui/slick/views/editShow.mako b/gui/slick/views/editShow.mako
index 9b3be1c6814317af095969c450b549e226a7df27..58e7af47c2bf9fdc11f3c0172c38195e488becbb 100644
--- a/gui/slick/views/editShow.mako
+++ b/gui/slick/views/editShow.mako
@@ -59,25 +59,12 @@
                             <label for="qualityPreset">
                                 <span class="component-title">Preferred Quality</span>
                                 <span class="component-desc">
-                                    <%
-                                        qualities = common.Quality.splitQuality(int(show.quality))
-                                        anyQualities = qualities[0]
-                                        bestQualities = qualities[1]
-                                    %>
+                                    <% anyQualities, bestQualities = common.Quality.splitQuality(int(show.quality)) %>
                                     <%include file="/inc_qualityChooser.mako"/>
                                 </span>
                             </label>
                         </div>
 
-                        <div class="field-pair">
-                            <label for="">
-                                <span class="component-title">Archive on first match</span>
-                                <span class="component-desc">
-                                    <input type="checkbox" id="archive_firstmatch" name="archive_firstmatch" ${('', 'checked="checked"')[show.archive_firstmatch == 1]} /> archive episode after the first best match is found from your archive quality list
-                                </span>
-                            </label>
-                        </div>
-
                         <div class="field-pair">
                             <label for="defaultEpStatusSelect">
                                 <span class="component-title">Default Episode Status</span>
diff --git a/gui/slick/views/errorlogs.mako b/gui/slick/views/errorlogs.mako
index 978561785d4ad748bf533720ccbc6663a9520dd4..b00c3ce384c8d58dbdfc257809f84a18238c1216 100644
--- a/gui/slick/views/errorlogs.mako
+++ b/gui/slick/views/errorlogs.mako
@@ -2,7 +2,6 @@
 <%!
     import sickbeard
     from sickbeard import classes
-    from sickbeard.logger import reverseNames
 %>
 <%block name="css">
 <style>
@@ -23,11 +22,12 @@ pre {
         title = 'ERROR logs'
 %>
 <h1 class="header">${title}</h1>
-<div class="align-left"><pre>
+<div class="align-left">
+<pre>
 % if errors:
-    % for curError in sorted(errors, key=lambda error: error.time, reverse=True)[:500]:
+% for curError in sorted(errors, key=lambda error: error.time, reverse=True)[:500]:
 ${curError.time} ${curError.message}
-    % endfor
+% endfor
 % else:
 There are no events to display.
 % endif
diff --git a/gui/slick/views/inc_addShowOptions.mako b/gui/slick/views/inc_addShowOptions.mako
index bfeb9b4653c54aa20618f28e1b5cbf3f9b737f90..673fb4f639549ce8382f886d973516e534dd1111 100644
--- a/gui/slick/views/inc_addShowOptions.mako
+++ b/gui/slick/views/inc_addShowOptions.mako
@@ -72,19 +72,7 @@
             </label>
         </div>
 
-        <div class="field-pair alt">
-            <label for="archive" class="clearfix">
-                <span class="component-title">Archive first match</span>
-                <span class="component-desc">
-                    <input type="checkbox" name="archive" id="archive" ${('', 'checked="checked"')[bool(sickbeard.ARCHIVE_DEFAULT)]} />
-                    <p>Archive episodes after downloading first match?</p>
-                </span>
-            </label>
-        </div>
-
-        <% qualities = Quality.splitQuality(sickbeard.QUALITY_DEFAULT) %>
-        <% anyQualities = qualities[0] %>
-        <% bestQualities = qualities[1] %>
+        <% anyQualities, bestQualities = Quality.splitQuality(sickbeard.QUALITY_DEFAULT) %>
         <%include file="/inc_qualityChooser.mako"/>
 
         <br>
diff --git a/gui/slick/views/inc_qualityChooser.mako b/gui/slick/views/inc_qualityChooser.mako
index f203ea3f8ddbbff2a8850afece4cdd34ff604da7..f7e35d762f3a9419b9756df195e75f7a59a1dc9f 100644
--- a/gui/slick/views/inc_qualityChooser.mako
+++ b/gui/slick/views/inc_qualityChooser.mako
@@ -9,13 +9,11 @@ if not show is UNDEFINED:
 else:
     __quality = int(sickbeard.QUALITY_DEFAULT)
 
-qualities = Quality.splitQuality(__quality)
-anyQualities = qualities[0]
-bestQualities = qualities[1]
+anyQualities, bestQualities = Quality.splitQuality(__quality)
+overall_quality = Quality.combineQualities(anyQualities, bestQualities)
+selected = None
 %>
 
-<% overall_quality = Quality.combineQualities(anyQualities, bestQualities) %>
-<% selected = None %>
 <select id="qualityPreset" name="quality_preset" class="form-control form-control-inline input-sm">
     <option value="0">Custom</option>
     % for curPreset in sorted(qualityPresets):
@@ -25,7 +23,7 @@ bestQualities = qualities[1]
 
 <div id="customQualityWrapper">
     <div id="customQuality" style="padding-left: 0px;">
-        <p><b><u>Preferred</u></b> quality's will replace those in <b><u>allowed</u></b>, even if they are lower.</p>
+        <p><b><u>Preferred</u></b> qualities will replace those in <b><u>allowed</u></b>, even if they are lower.</p>
 
         <div style="padding-right: 40px; text-align: left; float: left;">
             <h5>Allowed</h5>
diff --git a/gui/slick/views/layouts/main.mako b/gui/slick/views/layouts/main.mako
index a839a7706455cbf7b1c11bcc940a9db95cb7da56..fda12eb262d2cf65d166987b6720b9d7ced94e81 100644
--- a/gui/slick/views/layouts/main.mako
+++ b/gui/slick/views/layouts/main.mako
@@ -23,7 +23,7 @@
         <meta charset="utf-8">
         <meta name="robots" content="noindex, nofollow">
         <meta http-equiv="X-UA-Compatible" content="IE=edge">
-        <meta name="viewport" content="width=device-width">
+        <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=0">
 
         <!-- These values come from css/dark.css and css/light.css -->
         % if sickbeard.THEME_NAME == "dark":
diff --git a/gui/slick/views/manage.mako b/gui/slick/views/manage.mako
index 710a003647ba02702fba8f8f8d3e0a93d6012d39..12c52542d3326c0263ccb4cf280b6086d0bfa573 100644
--- a/gui/slick/views/manage.mako
+++ b/gui/slick/views/manage.mako
@@ -15,15 +15,15 @@
     <tr>
         <td nowrap>
             % if not header is UNDEFINED:
-                <h1 class="header" style="margin: 0;">${header}</h1>
+            <h1 class="header" style="margin: 0;">${header}</h1>
             % else:
-                <h1 class="title" style="margin: 0;">${title}</h1>
+            <h1 class="title" style="margin: 0;">${title}</h1>
             % endif
         </td>
         <td align="right">
             <div>
-                <input class="btn btn-inline" type="button" value="Edit Selected" id="submitMassEdit" />
-                <input class="btn btn-inline" type="button" value="Submit" id="submitMassUpdate" />
+                <input class="btn btn-inline submitMassEdit" type="button" value="Edit Selected" />
+                <input class="btn btn-inline submitMassUpdate" type="button" value="Submit" />
                 <span class="show-option">
                     <button id="popover" type="button" class="btn btn-inline">Select Columns <b class="caret"></b></button>
                 </span>
@@ -45,7 +45,6 @@
             <th class="col-legend">Scene</th>
             <th class="col-legend">Anime</th>
             <th class="col-legend">Season folders</th>
-            <th class="col-legend">Archive first match</th>
             <th class="col-legend">Paused</th>
             <th class="col-legend">Subtitle</th>
             <th class="col-legend">Default Ep Status</th>
@@ -61,87 +60,63 @@
             <th width="1%">Remove<br><input type="checkbox" class="bulkCheck" id="removeCheck" /></th>
         </tr>
     </thead>
-
     <tfoot>
         <tr>
-            <td rowspan="1" colspan="2" class="align-center alt"><input class="btn pull-left" type="button" value="Edit Selected" id="submitMassEdit" /></td>
-            <td rowspan="1" colspan="${(15, 16)[bool(sickbeard.USE_SUBTITLES)]}" class="align-right alt"><input class="btn pull-right" type="button" value="Submit" id="submitMassUpdate" /></td>
+            <td rowspan="1" colspan="2" class="align-center alt"><input class="btn pull-left submitMassEdit" type="button" value="Edit Selected" /></td>
+            <td rowspan="1" colspan="${(15, 16)[bool(sickbeard.USE_SUBTITLES)]}" class="align-right alt"><input class="btn pull-right submitMassUpdate" type="button" value="Submit" /></td>
         </tr>
     </tfoot>
 
     <tbody>
-        <% myShowList = sickbeard.showList %>
-        <% myShowList.sort(lambda x, y: cmp(x.name, y.name)) %>
-
-        % for curShow in myShowList:
-        <% curEp = curShow.nextaired %>
-        <% curUpdate_disabled = "" %>
-        <% curRefresh_disabled = "" %>
-        <% curRename_disabled = "" %>
-        <% curSubtitle_disabled = "" %>
-        <% curDelete_disabled = "" %>
-        <% curRemove_disabled = "" %>
-
-        % if sickbeard.showQueueScheduler.action.isBeingUpdated(curShow) or sickbeard.showQueueScheduler.action.isInUpdateQueue(curShow):
-            <% curUpdate_disabled = "disabled=\"disabled\" " %>
-        % endif
-
-        <% curUpdate = "<input type=\"checkbox\" class=\"updateCheck\" id=\"update-"+str(curShow.indexerid)+"\" "+curUpdate_disabled+"/>" %>
-
-        % if sickbeard.showQueueScheduler.action.isBeingRefreshed(curShow) or sickbeard.showQueueScheduler.action.isInRefreshQueue(curShow):
-            <% curRefresh_disabled = "disabled=\"disabled\" " %>
-        % endif
-
-        <% curRefresh = "<input type=\"checkbox\" class=\"refreshCheck\" id=\"refresh-"+str(curShow.indexerid)+"\" "+curRefresh_disabled+"/>" %>
-
-        % if sickbeard.showQueueScheduler.action.isBeingRenamed(curShow) or sickbeard.showQueueScheduler.action.isInRenameQueue(curShow):
-            <% curRename = "disabled=\"disabled\" " %>
-        % endif
+<%
+    myShowList = sickbeard.showList
+    myShowList.sort(lambda x, y: cmp(x.name, y.name))
+%>
+    % for curShow in myShowList:
+    <%
+        curEp = curShow.nextaired
 
-        <% curRename = "<input type=\"checkbox\" class=\"renameCheck\" id=\"rename-"+str(curShow.indexerid)+"\" "+curRename_disabled+"/>" %>
+        disabled = sickbeard.showQueueScheduler.action.isBeingUpdated(curShow) or sickbeard.showQueueScheduler.action.isInUpdateQueue(curShow)
+        curUpdate = "<input type=\"checkbox\" class=\"updateCheck\" id=\"update-" + str(curShow.indexerid) + "\" " + ("", "disabled=\"disabled\" ")[disabled] + "/>"
 
-        % if not curShow.subtitles or sickbeard.showQueueScheduler.action.isBeingSubtitled(curShow) or sickbeard.showQueueScheduler.action.isInSubtitleQueue(curShow):
-            <% curSubtitle_disabled = "disabled=\"disabled\" " %>
-        % endif
+        disabled = sickbeard.showQueueScheduler.action.isBeingRefreshed(curShow) or sickbeard.showQueueScheduler.action.isInRefreshQueue(curShow)
+        curRefresh = "<input type=\"checkbox\" class=\"refreshCheck\" id=\"refresh-" + str(curShow.indexerid) + "\" " + ("", "disabled=\"disabled\" ")[disabled] + "/>"
 
-        <% curSubtitle = "<input type=\"checkbox\" class=\"subtitleCheck\" id=\"subtitle-"+str(curShow.indexerid)+"\" "+curSubtitle_disabled+"/>" %>
+        disabled = sickbeard.showQueueScheduler.action.isBeingRenamed(curShow) or sickbeard.showQueueScheduler.action.isInRenameQueue(curShow)
+        curRename = "<input type=\"checkbox\" class=\"renameCheck\" id=\"rename-" + str(curShow.indexerid) + "\" " + ("", "disabled=\"disabled\" ")[disabled] + "/>"
 
-        % if sickbeard.showQueueScheduler.action.isBeingRenamed(curShow) or sickbeard.showQueueScheduler.action.isInRenameQueue(curShow) or sickbeard.showQueueScheduler.action.isInRefreshQueue(curShow):
-            <% curDelete = "disabled=\"disabled\" " %>
-        % endif
+        disabled = not curShow.subtitles or sickbeard.showQueueScheduler.action.isBeingSubtitled(curShow) or sickbeard.showQueueScheduler.action.isInSubtitleQueue(curShow)
+        curSubtitle = "<input type=\"checkbox\" class=\"subtitleCheck\" id=\"subtitle-" + str(curShow.indexerid) + "\" " + ("", "disabled=\"disabled\" ")[disabled] + "/>"
 
-        <% curDelete = "<input type=\"checkbox\" class=\"deleteCheck\" id=\"delete-"+str(curShow.indexerid)+"\" "+curDelete_disabled+"/>" %>
-
-        % if sickbeard.showQueueScheduler.action.isBeingRenamed(curShow) or sickbeard.showQueueScheduler.action.isInRenameQueue(curShow) or sickbeard.showQueueScheduler.action.isInRefreshQueue(curShow):
-            <% curRemove = "disabled=\"disabled\" " %>
-        % endif
+        disabled = sickbeard.showQueueScheduler.action.isBeingRenamed(curShow) or sickbeard.showQueueScheduler.action.isInRenameQueue(curShow) or sickbeard.showQueueScheduler.action.isInRefreshQueue(curShow)
+        curDelete = "<input type=\"checkbox\" class=\"confirm deleteCheck\" id=\"delete-" + str(curShow.indexerid) + "\" " + ("", "disabled=\"disabled\" ")[disabled] + "/>"
 
-        <% curRemove = "<input type=\"checkbox\" class=\"removeCheck\" id=\"remove-"+str(curShow.indexerid)+"\" "+curRemove_disabled+"/>" %>
-        <tr>
-            <td align="center"><input type="checkbox" class="editCheck" id="edit-${curShow.indexerid}" /></td>
-            <td class="tvShow"><a href="${srRoot}/home/displayShow?show=${curShow.indexerid}">${curShow.name}</a></td>
-            <td align="center">${renderQualityPill(curShow.quality, showTitle=True)}</td>
-            <td align="center"><img src="${srRoot}/images/${('no16.png" alt="N"', 'yes16.png" alt="Y"')[int(curShow.is_sports) == 1]} width="16" height="16" /></td>
-            <td align="center"><img src="${srRoot}/images/${('no16.png" alt="N"', 'yes16.png" alt="Y"')[int(curShow.is_scene) == 1]} width="16" height="16" /></td>
-            <td align="center"><img src="${srRoot}/images/${('no16.png" alt="N"', 'yes16.png" alt="Y"')[int(curShow.is_anime) == 1]} width="16" height="16" /></td>
-            <td align="center"><img src="${srRoot}/images/${('no16.png" alt="N"', 'yes16.png" alt="Y"')[not int(curShow.flatten_folders) == 1]} width="16" height="16" /></td>
-            <td align="center"><img src="${srRoot}/images/${('no16.png" alt="N"', 'yes16.png" alt="Y"')[int(curShow.archive_firstmatch) == 1]} width="16" height="16" /></td>
-            <td align="center"><img src="${srRoot}/images/${('no16.png" alt="N"', 'yes16.png" alt="Y"')[int(curShow.paused) == 1]} width="16" height="16" /></td>
-            <td align="center"><img src="${srRoot}/images/${('no16.png" alt="N"', 'yes16.png" alt="Y"')[int(curShow.subtitles) == 1]} width="16" height="16" /></td>
-            <td align="center">${statusStrings[curShow.default_ep_status]}</td>
-            <td align="center">${curShow.status}</td>
-            <td align="center">${curUpdate}</td>
-            <td align="center">${curRefresh}</td>
-            <td align="center">${curRename}</td>
+        disabled = sickbeard.showQueueScheduler.action.isBeingRenamed(curShow) or sickbeard.showQueueScheduler.action.isInRenameQueue(curShow) or sickbeard.showQueueScheduler.action.isInRefreshQueue(curShow)
+        curRemove = "<input type=\"checkbox\" class=\"removeCheck\" id=\"remove-" + str(curShow.indexerid) + "\" " + ("", "disabled=\"disabled\" ")[disabled] + "/>"
+    %>
+    <tr>
+        <td align="center"><input type="checkbox" class="editCheck" id="edit-${curShow.indexerid}" /></td>
+        <td class="tvShow"><a href="${srRoot}/home/displayShow?show=${curShow.indexerid}">${curShow.name}</a></td>
+        <td align="center">${renderQualityPill(curShow.quality, showTitle=True)}</td>
+        <td align="center"><img src="${srRoot}/images/${('no16.png" alt="N"', 'yes16.png" alt="Y"')[int(curShow.is_sports) == 1]} width="16" height="16" /></td>
+        <td align="center"><img src="${srRoot}/images/${('no16.png" alt="N"', 'yes16.png" alt="Y"')[int(curShow.is_scene) == 1]} width="16" height="16" /></td>
+        <td align="center"><img src="${srRoot}/images/${('no16.png" alt="N"', 'yes16.png" alt="Y"')[int(curShow.is_anime) == 1]} width="16" height="16" /></td>
+        <td align="center"><img src="${srRoot}/images/${('no16.png" alt="N"', 'yes16.png" alt="Y"')[not int(curShow.flatten_folders) == 1]} width="16" height="16" /></td>
+        <td align="center"><img src="${srRoot}/images/${('no16.png" alt="N"', 'yes16.png" alt="Y"')[int(curShow.paused) == 1]} width="16" height="16" /></td>
+        <td align="center"><img src="${srRoot}/images/${('no16.png" alt="N"', 'yes16.png" alt="Y"')[int(curShow.subtitles) == 1]} width="16" height="16" /></td>
+        <td align="center">${statusStrings[curShow.default_ep_status]}</td>
+        <td align="center">${curShow.status}</td>
+        <td align="center">${curUpdate}</td>
+        <td align="center">${curRefresh}</td>
+        <td align="center">${curRename}</td>
         % if sickbeard.USE_SUBTITLES:
-            <td align="center">${curSubtitle}</td>
+        <td align="center">${curSubtitle}</td>
         % endif
-            <td align="center">${curDelete}</td>
-            <td align="center">${curRemove}</td>
-        </tr>
-
-        % endfor
-    </tbody>
+        <td align="center">${curDelete}</td>
+        <td align="center">${curRemove}</td>
+    </tr>
+% endfor
+</tbody>
 </table>
 </form>
 </%block>
diff --git a/gui/slick/views/manage_backlogOverview.mako b/gui/slick/views/manage_backlogOverview.mako
index 9a06ea968251ad4fa2a20d056b5d399239bb01c3..3a811493e5650f851bd8e3bd1d666b0fb17e8501 100644
--- a/gui/slick/views/manage_backlogOverview.mako
+++ b/gui/slick/views/manage_backlogOverview.mako
@@ -20,7 +20,7 @@
 % endif
 
 <%
-    showQualSnatched = lambda x: Quality.splitQuality(x.quality)[1] and not x.archive_firstmatch
+    showQualSnatched = lambda x: Quality.splitQuality(x.quality)[1]
 
     totalWanted = totalQual = totalQualSnatched = 0
     backLogShows = sorted([x for x in sickbeard.showList if showCounts[x.indexerid][Overview.QUAL] + showCounts[x.indexerid][Overview.WANTED] + showCounts[x.indexerid][Overview.SNATCHED]], key=lambda x: x.name)
diff --git a/gui/slick/views/manage_massEdit.mako b/gui/slick/views/manage_massEdit.mako
index 92e880327a4d26c9e90617c40710a8fb7ecffd0a..d9ec05feb990a940383490043fad77d241ee472b 100644
--- a/gui/slick/views/manage_massEdit.mako
+++ b/gui/slick/views/manage_massEdit.mako
@@ -129,20 +129,6 @@
                             </label>
                         </div>
 
-                        <div class="field-pair">
-                            <label for="edit_archive_firstmatch">
-                                <span class="component-title">Archive on first match</span>
-                                <span class="component-desc">
-                                    <select id="edit_archive_firstmatch" name="archive_firstmatch" class="form-control form-control-inline input-sm">
-                                        <option value="keep" ${('', 'selected="selected"')[archive_firstmatch_value is None]}>&lt; Keep &gt;</option>
-                                        <option value="enable" ${('', 'selected="selected"')[archive_firstmatch_value == 1]}>Yes</option>
-                                        <option value="disable" ${('', 'selected="selected"')[archive_firstmatch_value == 0]}>No</option>
-                                    </select><br>
-                                    Archive episode after the first best match is found from your archive quality list.
-                                </span>
-                            </label>
-                        </div>
-
                         <div class="field-pair">
                             <label for="edit_flatten_folders">
                                 <span class="component-title">Season folders (<span class="separator">*</span>)</span>
diff --git a/gui/slick/views/viewlogs.mako b/gui/slick/views/viewlogs.mako
index 3c54383446dab0abaf325a97417b5e9eb9322736..31e2e53815067fb5b72cb965d23e7966ef3b21eb 100644
--- a/gui/slick/views/viewlogs.mako
+++ b/gui/slick/views/viewlogs.mako
@@ -21,26 +21,30 @@ pre {
 % endif
 
 <div class="h2footer pull-right">Minimum logging level to display: <select name="minLevel" id="minLevel" class="form-control form-control-inline input-sm">
-<% levels = reverseNames.keys() %>
-<% levels.sort(lambda x,y: cmp(reverseNames[x], reverseNames[y])) %>
-% for level in levels:
-    % if not sickbeard.DEBUG and (level == 'DEBUG' or level == 'DB'):
-       <% continue %>
-    % endif
-<option value="${reverseNames[level]}" ${('', 'selected="selected"')[minLevel == reverseNames[level]]}>${level.title()}</option>
-% endfor
-</select>
+    <%
+        levels = reverseNames.keys()
+        levels.sort(lambda x, y: cmp(reverseNames[x], reverseNames[y]))
+        if not sickbeard.DEBUG:
+            levels.remove('DEBUG')
+        if not sickbeard.DBDEBUG:
+            levels.remove('DB')
+    %>
+    % for level in levels:
+        <option value="${reverseNames[level]}" ${('', 'selected="selected"')[minLevel == reverseNames[level]]}>${level.title()}</option>
+    % endfor
+    </select>
 
-Filter log by: <select name="logFilter" id="logFilter" class="form-control form-control-inline input-sm">
-% for logNameFilter in sorted(logNameFilters):
-    <option value="${logNameFilter}" ${('', 'selected="selected"')[logFilter == logNameFilter]}>${logNameFilters[logNameFilter]}</option>
-% endfor
-</select>
-Search log by:
-<input type="text" name="logSearch" placeholder="clear to reset" id="logSearch" value="${('', logSearch)[bool(logSearch)]}" class="form-control form-control-inline input-sm" autocapitalize="off" />
+    Filter log by: <select name="logFilter" id="logFilter" class="form-control form-control-inline input-sm">
+    % for logNameFilter in sorted(logNameFilters):
+        <option value="${logNameFilter}" ${('', 'selected="selected"')[logFilter == logNameFilter]}>${logNameFilters[logNameFilter]}</option>
+    % endfor
+    </select>
+    Search log by:
+    <input type="text" name="logSearch" placeholder="clear to reset" id="logSearch" value="${('', logSearch)[bool(logSearch)]}" class="form-control form-control-inline input-sm" autocapitalize="off" />
 </div>
 <br>
-<div class="align-left"><pre>
+<div class="align-left">
+<pre>
 ${logLines}
 </pre>
 </div>
diff --git a/lib/babelfish/country.py b/lib/babelfish/country.py
index ce32d9b50519b057454e452acbc5b59d75dc5295..4c24b52b23b17fb60cf8a131efcc6354aebe5a1c 100644
--- a/lib/babelfish/country.py
+++ b/lib/babelfish/country.py
@@ -82,7 +82,10 @@ class Country(CountryMeta(str('CountryBase'), (object,), {})):
         self.alpha2 = state
 
     def __getattr__(self, name):
-        return country_converters[name].convert(self.alpha2)
+        try:
+            return country_converters[name].convert(self.alpha2)
+        except KeyError:
+            raise AttributeError(name)
 
     def __hash__(self):
         return hash(self.alpha2)
diff --git a/lib/babelfish/tests.py b/lib/babelfish/tests.py
index cf688af929e221587573c5e119ffbeb85442fc8e..beed5469418e08136bb18984029361bdc7df8cc1 100644
--- a/lib/babelfish/tests.py
+++ b/lib/babelfish/tests.py
@@ -283,6 +283,11 @@ class TestLanguage(TestCase, _Py26FixTestCase):
         self.assertTrue(hasattr(Language('fra'), 'alpha2'))
         self.assertFalse(hasattr(Language('bej'), 'alpha2'))
 
+    def test_country_hasattr(self):
+        self.assertTrue(hasattr(Country('US'), 'name'))
+        self.assertTrue(hasattr(Country('FR'), 'alpha2'))
+        self.assertFalse(hasattr(Country('BE'), 'none'))
+
     def test_country(self):
         self.assertEqual(Language('por', 'BR').country, Country('BR'))
         self.assertEqual(Language('eng', Country('US')).country, Country('US'))
diff --git a/lib/fake_useragent/__init__.py b/lib/fake_useragent/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..ebc12ee7726ea7e6c78b28b3fdb768b240f369da
--- /dev/null
+++ b/lib/fake_useragent/__init__.py
@@ -0,0 +1 @@
+from .fake import UserAgent  # noqa
diff --git a/lib/fake_useragent/fake.py b/lib/fake_useragent/fake.py
new file mode 100644
index 0000000000000000000000000000000000000000..1a55eb8a777f0f3f1d5727ac0dd23c1f0996eee2
--- /dev/null
+++ b/lib/fake_useragent/fake.py
@@ -0,0 +1,51 @@
+import random
+
+from . import settings
+from .utils import load, load_cached, update
+
+
+class UserAgent(object):
+    def __init__(self, cache=True):
+        if cache:
+            self.data = load_cached()
+        else:
+            self.data = load()
+
+        self.cache = cache
+
+    def update(self, cache=None):
+        if cache is None:
+            cache = self.cache
+
+        if self.cache:
+            update()
+
+        self.__init__(cache=cache)
+
+    def __getitem__(self, attr):
+        return self.__getattr__(attr)
+
+    def __getattr__(self, attr):
+        for replacement in settings.REPLACEMENTS:
+            attr = attr.replace(replacement, '')
+
+        attr = attr.lower()
+
+        if attr == 'random':
+            attr = self.data['randomize'][
+                str(random.randint(0, len(self.data['randomize']) - 1))
+            ]
+        else:
+            for shortcut, value in settings.SHORTCUTS:
+                if attr == shortcut:
+                    attr = value
+                    break
+
+        try:
+            return self.data['browsers'][attr][
+                random.randint(
+                    0, len(self.data['browsers'][attr]) - 1
+                )
+            ]
+        except KeyError:
+            return None
diff --git a/lib/fake_useragent/settings.py b/lib/fake_useragent/settings.py
new file mode 100644
index 0000000000000000000000000000000000000000..da1441c08b7f4e30b4857320833845de6b8dbbe1
--- /dev/null
+++ b/lib/fake_useragent/settings.py
@@ -0,0 +1,27 @@
+import os
+import tempfile
+
+DB = os.path.join(
+    tempfile.gettempdir(), 'fake_useragent.json'
+)
+
+BROWSERS_STATS_PAGE = 'http://www.w3schools.com/browsers/browsers_stats.asp'
+
+BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/%s/'
+
+BROWSERS_COUNT_LIMIT = 30
+
+REPLACEMENTS = (' ', '_')
+
+SHORTCUTS = (
+    ('internet explorer', 'internetexplorer'),
+    ('ie', 'internetexplorer'),
+    ('msie', 'internetexplorer'),
+    ('google', 'chrome'),
+    ('googlechrome', 'chrome'),
+    ('ff', 'firefox')
+)
+
+OVERRIDES = (
+    ('Internet Explorer', 'IE'),
+)
diff --git a/lib/fake_useragent/utils.py b/lib/fake_useragent/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..31e0c3b5f5a2509816eeee38155e5df345bbbb99
--- /dev/null
+++ b/lib/fake_useragent/utils.py
@@ -0,0 +1,142 @@
+import os
+import re
+
+from . import settings
+
+try:  # Python 2
+    from urllib import urlopen, quote_plus
+except ImportError:  # Python 3
+    from urllib.request import urlopen
+    from urllib.parse import quote_plus
+try:
+    import json
+except ImportError:
+    import simplejson as json
+
+
+def get(url, annex=None):
+    if annex is not None:
+        url = url % (quote_plus(annex), )
+    return urlopen(url).read()
+
+
+def get_browsers():
+    """
+    very very hardcoded/dirty re/split stuff, but no dependencies
+    """
+    html = get(settings.BROWSERS_STATS_PAGE)
+    html = html.decode('windows-1252')
+    html = html.split('<table class="w3-table-all notranslate">')[1]
+    html = html.split('</table>')[0]
+
+    browsers = re.findall(r'\.asp">(.+?)<', html, re.UNICODE)
+
+    for value, override in settings.OVERRIDES:
+        browsers = [
+            value if browser == override else browser
+            for browser in browsers
+        ]
+
+    browsers_statistics = re.findall(
+        r'td\sclass="right">(.+?)\s', html, re.UNICODE
+    )
+
+    # TODO: ensure encoding
+
+    return list(zip(browsers, browsers_statistics))
+
+
+def get_browser_versions(browser):
+    """
+    very very hardcoded/dirty re/split stuff, but no dependencies
+    """
+    html = get(settings.BROWSER_BASE_PAGE, browser)
+    html = html.decode('iso-8859-1')
+    html = html.split('<div id=\'liste\'>')[1]
+    html = html.split('</div>')[0]
+
+    browsers_iter = re.finditer(r'\.php\'>(.+?)</a', html, re.UNICODE)
+
+    count = 0
+
+    browsers = []
+
+    for browser in browsers_iter:
+        if 'more' in browser.group(1).lower():
+            continue
+
+        # TODO: ensure encoding
+        browsers.append(browser.group(1))
+        count += 1
+
+        if count == settings.BROWSERS_COUNT_LIMIT:
+            break
+
+    return browsers
+
+
+def load():
+    browsers_dict = {}
+    randomize_dict = {}
+
+    for item in get_browsers():
+        browser, percent = item
+
+        browser_key = browser
+
+        for replacement in settings.REPLACEMENTS:
+            browser_key = browser_key.replace(replacement, '')
+
+        browser_key = browser_key.lower()
+
+        browsers_dict[browser_key] = get_browser_versions(browser)
+
+        for counter in range(int(float(percent))):
+            randomize_dict[str(len(randomize_dict))] = browser_key
+
+    db = {}
+    db['browsers'] = browsers_dict
+    db['randomize'] = randomize_dict
+
+    return db
+
+
+def write(data):
+    data = json.dumps(data, ensure_ascii=False)
+
+    # no codecs\with for python 2.5
+    f = open(settings.DB, 'w+')
+    f.write(data)
+    f.close()
+
+
+def read():
+    # no codecs\with for python 2.5
+    f = open(settings.DB, 'r')
+    data = f.read()
+    f.close()
+
+    return json.loads(data)
+
+
+def exist():
+    return os.path.isfile(settings.DB)
+
+
+def rm():
+    if exist():
+        os.remove(settings.DB)
+
+
+def update():
+    if exist():
+        rm()
+
+    write(load())
+
+
+def load_cached():
+    if not exist():
+        update()
+
+    return read()
diff --git a/lib/guessit/__version__.py b/lib/guessit/__version__.py
index e841082f9a196a6d78b3b8cc54a8bbaba001f425..f34e2e905544c7e0bd455fd307344fdad27df1fe 100644
--- a/lib/guessit/__version__.py
+++ b/lib/guessit/__version__.py
@@ -17,4 +17,4 @@
 # You should have received a copy of the Lesser GNU General Public License
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 #
-__version__ = '0.11.1.dev0'
+__version__ = '1.0.2'
diff --git a/lib/guessit/language.py b/lib/guessit/language.py
index d45a1ef23231c8671057db5d2ace1a72b57dd115..b73361030f47be69ef994338feae5ad647bf9ab6 100644
--- a/lib/guessit/language.py
+++ b/lib/guessit/language.py
@@ -237,7 +237,7 @@ def find_possible_languages(string, allowed_languages=None):
                 key = 'subtitleLanguage'
         for suffix in subtitle_suffixes:
             if lang_word.endswith(suffix):
-                lang_word = lang_word[:len(suffix)]
+                lang_word = lang_word[:len(suffix)-1]
                 key = 'subtitleLanguage'
         for prefix in lang_prefixes:
             if lang_word.startswith(prefix):
diff --git a/lib/guessit/test/1MB b/lib/guessit/test/1MB
deleted file mode 100644
index 66d50a84dfddf2af162389d19170d62caa342668..0000000000000000000000000000000000000000
Binary files a/lib/guessit/test/1MB and /dev/null differ
diff --git a/lib/subliminal/providers/legendastv.py b/lib/subliminal/providers/legendastv.py
index c2a66a2d1216c6950b5a8a0cd1ae058a516b7c06..888f5cad2c4fbbda4b7acd1e0bc9a69dffd62feb 100644
--- a/lib/subliminal/providers/legendastv.py
+++ b/lib/subliminal/providers/legendastv.py
@@ -1,15 +1,14 @@
 # -*- coding: utf-8 -*-
-import json
 import logging
 import os
 import re
+import io
 
 from babelfish import Language, language_converters
 from datetime import datetime
 from guessit import guess_file_info
-from rarfile import RarFile, is_rarfile
+import rarfile
 from requests import Session
-from tempfile import NamedTemporaryFile
 from zipfile import ZipFile, is_zipfile
 
 from . import ParserBeautifulSoup, Provider
@@ -65,7 +64,7 @@ class LegendasTvProvider(Provider):
     languages = {Language.fromlegendastv(l) for l in language_converters['legendastv'].codes}
     video_types = (Episode, Movie)
     server_url = 'http://legendas.tv'
-    word_split_re = re.compile('(\w+)', re.IGNORECASE)
+    word_split_re = re.compile(r'(\w+)', re.IGNORECASE)
 
     def __init__(self, username=None, password=None):
         if username is not None and password is None or username is None and password is not None:
@@ -82,8 +81,12 @@ class LegendasTvProvider(Provider):
         if self.username is not None and self.password is not None:
             logger.info('Logging in')
             data = {'_method': 'POST', 'data[User][username]': self.username, 'data[User][password]': self.password}
-            r = self.session.post('%s/login' % self.server_url, data, allow_redirects=False, timeout=TIMEOUT)
-            r.raise_for_status()
+            try:
+                r = self.session.post('%s/login' % self.server_url, data, allow_redirects=False, timeout=TIMEOUT)
+                r.raise_for_status()
+            except Exception as e:
+                logger.error('Could not login. Error: %r' % e)
+                return
 
             soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
             auth_error = soup.find('div', {'class': 'alert-error'}, text=re.compile(u'.*Usuário ou senha inválidos.*'))
@@ -98,8 +101,11 @@ class LegendasTvProvider(Provider):
         # logout
         if self.logged_in:
             logger.info('Logging out')
-            r = self.session.get('%s/users/logout' % self.server_url, timeout=TIMEOUT)
-            r.raise_for_status()
+            try:
+                r = self.session.get('%s/users/logout' % self.server_url, timeout=TIMEOUT)
+                r.raise_for_status()
+            except Exception as e:
+                logger.error('Error logging out. Error: %r' % e)
             logger.debug('Logged out')
             self.logged_in = False
 
@@ -160,7 +166,7 @@ class LegendasTvProvider(Provider):
             return expected_name == actual_name
 
         words = self.word_split_re.findall(expected_name)
-        name_regex_re = re.compile('(.*' + '\W+'.join(words) + '.*)', re.IGNORECASE)
+        name_regex_re = re.compile('(.*' + r'\W+'.join(words) + '.*)', re.IGNORECASE)
 
         return name_regex_re.match(actual_name)
 
@@ -176,10 +182,17 @@ class LegendasTvProvider(Provider):
         :rtype: : ``list`` of ``dict``
         """
 
+        candidates = []
+
         keyword = params.get('title') if params.get('type') == 'movie' else params.get('series')
         logger.info('Searching titles using the keyword %s', keyword)
-        r = self.session.get('%s/legenda/sugestao/%s' % (self.server_url, keyword), timeout=TIMEOUT)
-        r.raise_for_status()
+        try:
+            r = self.session.get('%s/legenda/sugestao/%s' % (self.server_url, keyword), timeout=TIMEOUT)
+            r.raise_for_status()
+            results = r.json()
+        except Exception as e:
+            logger.error('Could not search for %s. Error: %r' % (keyword, e))
+            return candidates
 
         # get the shows/movies out of the suggestions.
         # json sample:
@@ -230,8 +243,6 @@ class LegendasTvProvider(Provider):
         #  imdb_id: Sometimes it appears as a number and sometimes as a string prefixed with tt
         #  temporada: Sometimes is ``null`` and season information should be extracted from dsc_nome_br
 
-        results = json.loads(r.text)
-
         # type, title, series, season, year follow guessit properties names
         mapping = dict(
             id='id_filme',
@@ -252,12 +263,11 @@ class LegendasTvProvider(Provider):
         }
 
         # Regex to extract the season number. e.g.: 3\u00aa Temporada, 1a Temporada, 2nd Season
-        season_re = re.compile('.*? - (\d{1,2}).*?((emporada)|(Season))', re.IGNORECASE)
+        season_re = re.compile(r'.*? - (\d{1,2}).*?(?:(temporada|season|series))', re.IGNORECASE)
 
         # Regex to extract the IMDB id. e.g.: tt02342
-        imdb_re = re.compile('t{0,2}(\d+)')
+        imdb_re = re.compile(r't{0,2}(\d+)')
 
-        candidates = []
         for result in results:
             entry = result['_source']
             item = {k: entry.get(v) for k, v in mapping.items()}
@@ -300,16 +310,16 @@ class LegendasTvProvider(Provider):
         language_code = language.legendastv
 
         # Regex to extract rating information (number of downloads and rate). e.g.: 12345 downloads, nota 10
-        rating_info_re = re.compile('(\d*) downloads, nota (\d{0,2})')
+        rating_info_re = re.compile(r'(\d*) downloads, nota (\d{0,2})')
 
         # Regex to extract the last update timestamp. e.g.: 25/12/2014 - 19:25
-        timestamp_info_re = re.compile('(\d{1,2}/\d{1,2}/\d{2,4} \- \d{1,2}:\d{1,2})')
+        timestamp_info_re = re.compile(r'(\d{1,2}/\d{1,2}/\d{2,4} - \d{1,2}:\d{1,2})')
 
         # Regex to identify the 'pack' suffix that candidates might have. e.g.: (p)Breaking.Bad.S05.HDTV.x264
-        pack_name_re = re.compile('^\(p\)')
+        pack_name_re = re.compile(r'^\(p\)')
 
         # Regex to extract the subtitle_id from the 'href'. e.g.: /download/560014472eb4d/foo/bar
-        subtitle_href_re = re.compile('/download/(\w+)/.+')
+        subtitle_href_re = re.compile(r'/download/(\w+)/.+')
 
         subtitles = []
         # loop over matched movies/shows
@@ -320,8 +330,12 @@ class LegendasTvProvider(Provider):
             # loop over paginated results
             while page_url:
                 # query the server
-                r = self.session.get(page_url, timeout=TIMEOUT)
-                r.raise_for_status()
+                try:
+                    r = self.session.get(page_url, timeout=TIMEOUT)
+                    r.raise_for_status()
+                except Exception as e:
+                    logger.error('Could not access URL: %s. Error: %r' % (page_url, e))
+                    return subtitles
 
                 soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
                 div_tags = soup.find_all('div', {'class': 'f_left'})
@@ -432,14 +446,11 @@ class LegendasTvProvider(Provider):
     def _uncompress(self, subtitle_id, timestamp, function, *args, **kwargs):
         content = self.download_content(subtitle_id, timestamp)
 
-        # Download content might be a rar file (most common) or a zip.
-        # Unfortunately, rarfile module only works with files (no in-memory streams)
-        tmp = NamedTemporaryFile()
+        tmp = io.BytesIO(content)
         try:
-            tmp.write(content)
-            tmp.flush()
-
-            cf = RarFile(tmp.name) if is_rarfile(tmp.name) else (ZipFile(tmp.name) if is_zipfile(tmp.name) else None)
+            rarfile.PATH_SEP = '/'
+            rarfile.NEED_COMMENTS = 0
+            cf = rarfile.RarFile(io.BytesIO(content)) if rarfile.is_rarfile(tmp) else (ZipFile(tmp.name) if is_zipfile(tmp.name) else None)
 
             return function(cf, *args, **kwargs) if cf else None
         finally:
@@ -458,10 +469,13 @@ class LegendasTvProvider(Provider):
         :rtype : ``bytes``
         """
         logger.debug('Downloading subtitle_id %s. Last update on %s' % (subtitle_id, timestamp))
-        r = self.session.get('%s/downloadarquivo/%s' % (self.server_url, subtitle_id), timeout=TIMEOUT)
-        r.raise_for_status()
-
-        return r.content
+        try:
+            r = self.session.get('%s/downloadarquivo/%s' % (self.server_url, subtitle_id), timeout=TIMEOUT)
+            r.raise_for_status()
+            return r.content
+        except Exception as e:
+            logger.error('Error downloading subtitle_id %s. Error: %r' % (subtitle_id, e))
+            return
 
     def download_subtitle(self, subtitle):
         subtitle.content = self.extract_subtitle(subtitle.subtitle_id, subtitle.name, subtitle.timestamp)
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
index 75508c6998827408151750b16811abc869ae1849..6ebb0f92be55e53172deef461f5360e5e530cc43 100644
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -36,7 +37,7 @@ from sickbeard import metadata
 from sickbeard import providers
 from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
     naming_ep_type
-from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
+from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, auto_postprocessor, \
     subtitles, traktChecker, numdict
 from sickbeard import db
 from sickbeard import helpers
@@ -47,17 +48,19 @@ from sickbeard import logger
 from sickbeard import naming
 from sickbeard import dailysearcher
 from sickbeard.indexers import indexer_api
-from sickbeard.indexers.indexer_exceptions import indexer_shownotfound, indexer_showincomplete, indexer_exception, indexer_error, \
-    indexer_episodenotfound, indexer_attributenotfound, indexer_seasonnotfound, indexer_userabort, indexerExcepts
+from sickbeard.indexers.indexer_exceptions import indexer_shownotfound, indexer_showincomplete, indexer_exception, \
+    indexer_error, indexer_episodenotfound, indexer_attributenotfound, indexer_seasonnotfound, indexer_userabort, \
+    indexerExcepts
 from sickbeard.common import SD
 from sickbeard.common import SKIPPED
 from sickbeard.common import WANTED
+from sickbeard.providers.rsstorrent import TorrentRssProvider
 from sickbeard.databases import mainDB, cache_db, failed_db
+from sickbeard.providers.newznab import NewznabProvider
 
 from sickrage.helper.encoding import ek
 from sickrage.helper.exceptions import ex
 from sickrage.providers.GenericProvider import GenericProvider
-from sickrage.show.Show import Show
 from sickrage.system.Shutdown import Shutdown
 
 from configobj import ConfigObj
@@ -149,7 +152,7 @@ started = False
 ACTUAL_LOG_DIR = None
 LOG_DIR = None
 LOG_NR = 5
-LOG_SIZE = 1048576
+LOG_SIZE = 1
 
 SOCKET_TIMEOUT = None
 
@@ -195,6 +198,7 @@ TRASH_REMOVE_SHOW = False
 TRASH_ROTATE_LOGS = False
 SORT_ARTICLE = False
 DEBUG = False
+DBDEBUG = False
 DISPLAY_ALL_SEASONS = True
 DEFAULT_PAGE = 'home'
 
@@ -217,7 +221,6 @@ INDEXER_DEFAULT = None
 INDEXER_TIMEOUT = None
 SCENE_DEFAULT = False
 ANIME_DEFAULT = False
-ARCHIVE_DEFAULT = False
 PROVIDER_ORDER = []
 
 NAMING_MULTI_EP = False
@@ -568,6 +571,7 @@ __INITIALIZED__ = False
 
 NEWZNAB_DATA = None
 
+
 def get_backlog_cycle_time():
     cycletime = DAILYSEARCH_FREQUENCY * 2 + 7
     return max([cycletime, 720])
@@ -615,10 +619,10 @@ def initialize(consoleLogging=True):
             METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, TRACKERS_LIST, IGNORED_SUBS_LIST, REQUIRE_WORDS, CALENDAR_UNPROTECTED, CALENDAR_ICONS, NO_RESTART, \
             USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, SUBTITLES_MULTI, SUBTITLES_DOWNLOAD_IN_PP, EMBEDDED_SUBTITLES_ALL, SUBTITLES_EXTRA_SCRIPTS, SUBTITLES_PERFECT_MATCH, subtitlesFinderScheduler, \
             SUBTITLES_HEARING_IMPAIRED, ADDIC7ED_USER, ADDIC7ED_PASS, LEGENDASTV_USER, LEGENDASTV_PASS, OPENSUBTITLES_USER, OPENSUBTITLES_PASS, \
-            USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, DEBUG, DEFAULT_PAGE, PROXY_SETTING, PROXY_INDEXERS, \
+            USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, DEBUG, DBDEBUG, DEFAULT_PAGE, PROXY_SETTING, PROXY_INDEXERS, \
             AUTOPOSTPROCESSER_FREQUENCY, SHOWUPDATE_HOUR, \
             ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \
-            ANIME_SPLIT_HOME, SCENE_DEFAULT, ARCHIVE_DEFAULT, DOWNLOAD_URL, BACKLOG_DAYS, GIT_USERNAME, GIT_PASSWORD, \
+            ANIME_SPLIT_HOME, SCENE_DEFAULT, DOWNLOAD_URL, BACKLOG_DAYS, GIT_USERNAME, GIT_PASSWORD, \
             GIT_AUTOISSUES, DEVELOPER, gh, DISPLAY_ALL_SEASONS, SSL_VERIFY, NEWS_LAST_READ, NEWS_LATEST, SOCKET_TIMEOUT
 
         if __INITIALIZED__:
@@ -661,6 +665,7 @@ def initialize(consoleLogging=True):
 
         # debugging
         DEBUG = bool(check_setting_int(CFG, 'General', 'debug', 0))
+        DBDEBUG = bool(check_setting_int(CFG, 'General', 'dbdebug', 0))
 
         DEFAULT_PAGE = check_setting_str(CFG, 'General', 'default_page', 'home')
         if DEFAULT_PAGE not in ('home', 'schedule', 'history', 'news', 'IRC'):
@@ -669,14 +674,16 @@ def initialize(consoleLogging=True):
         ACTUAL_LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', 'Logs')
         LOG_DIR = ek(os.path.normpath, ek(os.path.join, DATA_DIR, ACTUAL_LOG_DIR))
         LOG_NR = check_setting_int(CFG, 'General', 'log_nr', 5)  # Default to 5 backup file (sickrage.log.x)
-        LOG_SIZE = check_setting_int(CFG, 'General', 'log_size', 1048576)  # Default to max 1MB per logfile
+        LOG_SIZE = check_setting_int(CFG, 'General', 'log_size', 1)  # Default to max 1MB per logfile
+        if LOG_SIZE > 100:
+            LOG_SIZE = 1
         fileLogging = True
         if not helpers.makeDir(LOG_DIR):
             sys.stderr.write("!!! No log folder, logging to screen only!\n")
             fileLogging = False
 
         # init logging
-        logger.initLogging(consoleLogging=consoleLogging, fileLogging=fileLogging, debugLogging=DEBUG)
+        logger.initLogging(consoleLogging=consoleLogging, fileLogging=fileLogging, debugLogging=DEBUG, databaseLogging=DBDEBUG)
 
         # github api
         try:
@@ -759,7 +766,6 @@ def initialize(consoleLogging=True):
                     except Exception as e:
                         logger.log(u"Restore: Unable to remove the cache/{0} directory: {1}".format(cleanupDir, ex(e)), logger.WARNING)
 
-
         GUI_NAME = check_setting_str(CFG, 'GUI', 'gui_name', 'slick')
 
         THEME_NAME = check_setting_str(CFG, 'GUI', 'theme_name', 'dark')
@@ -817,7 +823,7 @@ def initialize(consoleLogging=True):
 
         ENABLE_HTTPS = bool(check_setting_int(CFG, 'General', 'enable_https', 0))
 
-        NOTIFY_ON_LOGIN  = bool(check_setting_int(CFG, 'General', 'notify_on_login', 0))
+        NOTIFY_ON_LOGIN = bool(check_setting_int(CFG, 'General', 'notify_on_login', 0))
 
         HTTPS_CERT = check_setting_str(CFG, 'General', 'https_cert', 'server.crt')
         HTTPS_KEY = check_setting_str(CFG, 'General', 'https_key', 'server.key')
@@ -839,7 +845,6 @@ def initialize(consoleLogging=True):
         INDEXER_TIMEOUT = check_setting_int(CFG, 'General', 'indexer_timeout', 20)
         ANIME_DEFAULT = bool(check_setting_int(CFG, 'General', 'anime_default', 0))
         SCENE_DEFAULT = bool(check_setting_int(CFG, 'General', 'scene_default', 0))
-        ARCHIVE_DEFAULT = bool(check_setting_int(CFG, 'General', 'archive_default', 0))
 
         PROVIDER_ORDER = check_setting_str(CFG, 'General', 'provider_order', '').split()
 
@@ -1224,10 +1229,10 @@ def initialize(consoleLogging=True):
         providerList = providers.makeProviderList()
 
         NEWZNAB_DATA = check_setting_str(CFG, 'Newznab', 'newznab_data', '')
-        newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
+        newznabProviderList = NewznabProvider.get_providers_list(NEWZNAB_DATA)
 
         TORRENTRSS_DATA = check_setting_str(CFG, 'TorrentRss', 'torrentrss_data', '')
-        torrentRssProviderList = providers.getTorrentRssProviderList(TORRENTRSS_DATA)
+        torrentRssProviderList = TorrentRssProvider.get_providers_list(TORRENTRSS_DATA)
 
         # dynamically load provider settings
         for curTorrentProvider in [curProvider for curProvider in providers.sortedProviderList() if
@@ -1236,7 +1241,8 @@ def initialize(consoleLogging=True):
                                                                 curTorrentProvider.get_id(), 0))
             if hasattr(curTorrentProvider, 'custom_url'):
                 curTorrentProvider.custom_url = check_setting_str(CFG, curTorrentProvider.get_id().upper(),
-                                                                curTorrentProvider.get_id() + '_custom_url', '', censor_log=True)
+                                                                  curTorrentProvider.get_id() + '_custom_url',
+                                                                  '', censor_log=True)
             if hasattr(curTorrentProvider, 'api_key'):
                 curTorrentProvider.api_key = check_setting_str(CFG, curTorrentProvider.get_id().upper(),
                                                                curTorrentProvider.get_id() + '_api_key', '', censor_log=True)
@@ -1433,7 +1439,7 @@ def initialize(consoleLogging=True):
                                                     run_delay=update_interval)
 
         # processors
-        autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(),
+        autoPostProcesserScheduler = scheduler.Scheduler(auto_postprocessor.PostProcessor(),
                                                          cycleTime=datetime.timedelta(
                                                              minutes=AUTOPOSTPROCESSER_FREQUENCY),
                                                          threadName="POSTPROCESSER",
@@ -1630,6 +1636,7 @@ def save_config():
     new_config['General']['anon_redirect'] = ANON_REDIRECT
     new_config['General']['api_key'] = API_KEY
     new_config['General']['debug'] = int(DEBUG)
+    new_config['General']['dbdebug'] = int(DBDEBUG)
     new_config['General']['default_page'] = DEFAULT_PAGE
     new_config['General']['enable_https'] = int(ENABLE_HTTPS)
     new_config['General']['notify_on_login'] = int(NOTIFY_ON_LOGIN)
@@ -1660,7 +1667,6 @@ def save_config():
     new_config['General']['indexer_timeout'] = int(INDEXER_TIMEOUT)
     new_config['General']['anime_default'] = int(ANIME_DEFAULT)
     new_config['General']['scene_default'] = int(SCENE_DEFAULT)
-    new_config['General']['archive_default'] = int(ARCHIVE_DEFAULT)
     new_config['General']['provider_order'] = ' '.join(PROVIDER_ORDER)
     new_config['General']['version_notify'] = int(VERSION_NOTIFY)
     new_config['General']['auto_update'] = int(AUTO_UPDATE)
diff --git a/sickbeard/autoPostProcesser.py b/sickbeard/auto_postprocessor.py
similarity index 71%
rename from sickbeard/autoPostProcesser.py
rename to sickbeard/auto_postprocessor.py
index b66e2562796ee405d01c1c0db07fb29e76dbbc52..b9a281fbd784c1f4d036e6f51cc96e1ce655e22f 100644
--- a/sickbeard/autoPostProcesser.py
+++ b/sickbeard/auto_postprocessor.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -25,34 +26,34 @@ from sickbeard import processTV
 from sickrage.helper.encoding import ek
 
 
-class PostProcesser():
+class PostProcessor(object):
     def __init__(self):
         self.lock = threading.Lock()
         self.amActive = False
 
     def run(self, force=False):
         """
-        TODO: Rename class to PostProcessor (classname contains a typo)
         Runs the postprocessor
-        :param force: Forces postprocessing run (reserved for future use)
+
+        :param force: Forces postprocessing run
         :return: Returns when done without a return state/code
         """
         self.amActive = True
 
         if not ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR):
-            logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " doesn't exist",
-                       logger.ERROR)
+            logger.log(u"Automatic post-processing attempted but directory doesn't exist: %s" %
+                       sickbeard.TV_DOWNLOAD_DIR, logger.ERROR)
             self.amActive = False
             return
 
-        if not ek(os.path.isabs, sickbeard.TV_DOWNLOAD_DIR):
-            logger.log(
-                u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " is relative (and probably not what you really want to process)",
-                logger.ERROR)
+        if not (force or ek(os.path.isabs, sickbeard.TV_DOWNLOAD_DIR)):
+            logger.log(u"Automatic post-processing attempted but directory is relatve "
+                       u"(and probably not what you really want to process): %s" %
+                       sickbeard.TV_DOWNLOAD_DIR, logger.ERROR)
             self.amActive = False
             return
 
-        processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)
+        processTV.processDir(sickbeard.TV_DOWNLOAD_DIR, force=force)
 
         self.amActive = False
 
diff --git a/sickbeard/browser.py b/sickbeard/browser.py
index 7cba8f7927a1e41fb5d46a35ff907616770600d2..e4853f460bb968aa57fe4e7fc45d9067b46ab066 100644
--- a/sickbeard/browser.py
+++ b/sickbeard/browser.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io/
 # Git: https://github.com/SickRage/SickRage.git
@@ -62,7 +63,8 @@ def getFileList(path, includeFiles):
             'name': filename,
             'path': fullFilename
         }
-        if not isDir: entry['isFile'] = True
+        if not isDir:
+            entry['isFile'] = True
         fileList.append(entry)
 
     return fileList
diff --git a/sickbeard/bs4_parser.py b/sickbeard/bs4_parser.py
index 4bba8eaea295df1eb4863d9580e70dd18cdb788e..43afa8bd65f8d99475bbb6f26b6cf6e7c77b6459 100644
--- a/sickbeard/bs4_parser.py
+++ b/sickbeard/bs4_parser.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: The SickRage Dev Team
 # URL: https://sickrage.github.io
 # Repository: https://github.com/SickRage/SickRage.git
@@ -19,7 +20,8 @@
 
 from bs4 import BeautifulSoup
 
-class BS4Parser:
+
+class BS4Parser(object):
     def __init__(self, *args, **kwargs):
         self.soup = BeautifulSoup(*args, **kwargs)
 
@@ -28,4 +30,4 @@ class BS4Parser:
 
     def __exit__(self, exc_ty, exc_val, tb):
         self.soup.clear(True)
-        self.soup = None
\ No newline at end of file
+        self.soup = None
diff --git a/sickbeard/classes.py b/sickbeard/classes.py
index ee3d116631ca8758ca81533dd26976c68c7349b0..f59265c139258e431b46319427c4357f81c19380 100644
--- a/sickbeard/classes.py
+++ b/sickbeard/classes.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io/
 # Git: https://github.com/SickRage/SickRage.git
diff --git a/sickbeard/clients/generic.py b/sickbeard/clients/generic.py
index 34a5d7ee08f5dce9432bcaa141435dfaa283b8b3..bcea0118ceec9836ce9ececa6917ce590555d03b 100644
--- a/sickbeard/clients/generic.py
+++ b/sickbeard/clients/generic.py
@@ -9,6 +9,7 @@ import sickbeard
 from sickbeard import logger
 from bencode import bencode, bdecode
 import requests
+import cookielib
 from bencode.BTL import BTFailure
 from sickrage.helper.common import http_code_description
 
@@ -28,8 +29,9 @@ class GenericClient(object):
         self.last_time = time.time()
         self.session = requests.Session()
         self.session.auth = (self.username, self.password)
+        self.session.cookies = cookielib.CookieJar()
 
-    def _request(self, method='get', params=None, data=None, files=None):
+    def _request(self, method='get', params=None, data=None, files=None, cookies=None):
 
         if time.time() > self.last_time + 1800 or not self.auth:
             self.last_time = time.time()
@@ -41,9 +43,10 @@ class GenericClient(object):
 
         if not self.auth:
             logger.log(self.name + u': Authentication Failed', logger.WARNING)
+        
             return False
         try:
-            self.response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files,
+            self.response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files, cookies=cookies,
                                                                   timeout=120, verify=False)
         except requests.exceptions.ConnectionError as e:
             logger.log(self.name + u': Unable to connect ' + str(e), logger.ERROR)
diff --git a/sickbeard/clients/qbittorrent_client.py b/sickbeard/clients/qbittorrent_client.py
index 962fd22ac7747597c34a9f33de2fd188e97457e6..a89f7ae6057d15a7f786aabb3c5f48f032520337 100644
--- a/sickbeard/clients/qbittorrent_client.py
+++ b/sickbeard/clients/qbittorrent_client.py
@@ -19,56 +19,91 @@
 # along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
 
 import sickbeard
+from sickbeard import logger
 from sickbeard.clients.generic import GenericClient
 from requests.auth import HTTPDigestAuth
 
 
 class qbittorrentAPI(GenericClient):
+    
     def __init__(self, host=None, username=None, password=None):
 
         super(qbittorrentAPI, self).__init__('qbittorrent', host, username, password)
 
         self.url = self.host
         self.session.auth = HTTPDigestAuth(self.username, self.password)
-
-    def _get_auth(self):
-
+    
+    @property
+    def api(self):
+        self.url = self.host + 'version/api'
         try:
-            self.response = self.session.get(self.host, verify=False)
-            self.auth = self.response.content
-        except Exception:
-            return None
-
+            version = int(self.session.get(self.url, verify=sickbeard.TORRENT_VERIFY_CERT).content)
+        except:
+            version = 1
+        return version        
+        
+    def _get_auth(self):
+        
+        if self.api > 1:            
+            self.url = self.host + 'login'
+            data = {'username': self.username, 'password': self.password}
+            try:
+                self.response = self.session.post(self.url, data=data)
+            except Exception:
+                return None
+
+        else:
+            try:
+                self.response = self.session.get(self.host, verify=sickbeard.TORRENT_VERIFY_CERT)
+                self.auth = self.response.content
+            except Exception:
+                return None
+        
+        self.session.cookies = self.response.cookies
+        self.auth = self.response.content
+                
         return self.auth if not self.response.status_code == 404 else None
 
     def _add_torrent_uri(self, result):
 
-        self.url = self.host+'command/download'
+        self.url = self.host + 'command/download'
         data = {'urls': result.url}
-        return self._request(method='post', data=data)
+        return self._request(method='post', data=data, cookies=self.session.cookies)
 
     def _add_torrent_file(self, result):
 
-        self.url = self.host+'command/upload'
+        self.url = self.host + 'command/upload'
         files = {'torrents': (result.name + '.torrent', result.content)}
-        return self._request(method='post', files=files)
+        return self._request(method='post', files=files, cookies=self.session.cookies)
+    
+    def _set_torrent_label(self, result):
+
+        label = sickbeard.TORRENT_LABEL
+        if result.show.is_anime:
+            label = sickbeard.TORRENT_LABEL_ANIME
+
+        if self.api > 6:
+            self.url = self.host + 'command/setLabel'
+            data = {'hashes': result.hash, 'label': label}
+            return self._request(method='post', data=data, cookies=self.session.cookies)
+        return None
 
     def _set_torrent_priority(self, result):
 
-        self.url = self.host+'command/decreasePrio '
+        self.url = self.host + 'command/decreasePrio '
         if result.priority == 1:
-            self.url = self.host+'command/increasePrio'
+            self.url = self.host + 'command/increasePrio'
 
         data = {'hashes': result.hash}
-        return self._request(method='post', data=data)
+        return self._request(method='post', data=data, cookies=self.session.cookies)
 
     def _set_torrent_pause(self, result):
 
-        self.url = self.host+'command/resume'
+        self.url = self.host + 'command/resume'
         if sickbeard.TORRENT_PAUSED:
-            self.url = self.host+'command/pause'
+            self.url = self.host + 'command/pause'
 
         data = {'hash': result.hash}
-        return self._request(method='post', data=data)
+        return self._request(method='post', data=data, cookies=self.session.cookies)
 
 api = qbittorrentAPI()
diff --git a/sickbeard/clients/rtorrent_client.py b/sickbeard/clients/rtorrent_client.py
index 4bce5960d2b65f9be4fea1a33c0a018cc6b28286..a7bf52aaae5a53eaa6d8da590e39553a19d503a2 100644
--- a/sickbeard/clients/rtorrent_client.py
+++ b/sickbeard/clients/rtorrent_client.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: jkaberg <joel.kaberg@gmail.com>, based on fuzemans work (https://github.com/RuudBurger/CouchPotatoServer/blob/develop/couchpotato/core/downloaders/rtorrent/main.py)
 # URL: http://code.google.com/p/sickbeard/
 #
diff --git a/sickbeard/common.py b/sickbeard/common.py
index e416ea55a17f77e0df07aa6a7b7a1b38d0ee086c..bea39f1475958b9d62123168d078ea9be7a9e350 100644
--- a/sickbeard/common.py
+++ b/sickbeard/common.py
@@ -28,7 +28,6 @@ Common interface for Quality and Status
 import operator
 from os import path
 import platform
-from random import shuffle
 import re
 import uuid
 
@@ -36,31 +35,20 @@ from hachoir_parser import createParser  # pylint: disable=import-error
 from hachoir_metadata import extractMetadata  # pylint: disable=import-error
 from hachoir_core.log import log  # pylint: disable=import-error
 
+from fake_useragent import settings as UA_SETTINGS, UserAgent
 from sickbeard.numdict import NumDict
 from sickrage.helper.encoding import ek
 
-SPOOF_USER_AGENT = False
-
 # If some provider has an issue with functionality of SR, other than user agents, it's best to come talk to us rather than block.
 # It is no different than us going to a provider if we have questions or issues. Be a team player here.
 # This is disabled, was only added for testing, and has no config.ini or web ui setting. To enable, set SPOOF_USER_AGENT = True
-user_agents = [
-    'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36',
-    'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36',
-    'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0',
-    'Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0',
-    'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A',
-    'Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25',
-    'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko',
-    'Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko'
-]
-
+SPOOF_USER_AGENT = False
 INSTANCE_ID = str(uuid.uuid1())
 USER_AGENT = ('SickRage/(' + platform.system() + '; ' + platform.release() + '; ' + INSTANCE_ID + ')')
-
+UA_SETTINGS.DB = ek(path.abspath, ek(path.join, ek(path.dirname, __file__), '../lib/fake_useragent/ua.json'))
+UA_POOL = UserAgent()
 if SPOOF_USER_AGENT:
-    shuffle(user_agents)
-    USER_AGENT = user_agents[0]
+    USER_AGENT = UA_POOL.random
 
 cpu_presets = {
     'HIGH': 5,
@@ -87,8 +75,8 @@ notifyStrings = NumDict({
     NOTIFY_SUBTITLE_DOWNLOAD: "Subtitle Download Finished",
     NOTIFY_GIT_UPDATE: "SickRage Updated",
     NOTIFY_GIT_UPDATE_TEXT: "SickRage Updated To Commit#: ",
-    NOTIFY_LOGIN : "SickRage new login",
-    NOTIFY_LOGIN_TEXT : "New login from IP: {0}. http://geomaplookup.net/?ip={0}"
+    NOTIFY_LOGIN: "SickRage new login",
+    NOTIFY_LOGIN_TEXT: "New login from IP: {0}. http://geomaplookup.net/?ip={0}"
 })
 
 # Episode statuses
@@ -144,6 +132,7 @@ class Quality(object):
     UNKNOWN = 1 << 15  # 32768
 
     qualityStrings = NumDict({
+        None: "None",
         NONE: "N/A",
         UNKNOWN: "Unknown",
         SDTV: "SDTV",
@@ -158,6 +147,7 @@ class Quality(object):
     })
 
     sceneQualityStrings = NumDict({
+        None: "None",
         NONE: "N/A",
         UNKNOWN: "Unknown",
         SDTV: "HDTV",
@@ -178,6 +168,7 @@ class Quality(object):
     })
 
     cssClassStrings = NumDict({
+        None: "None",
         NONE: "N/A",
         UNKNOWN: "Unknown",
         SDTV: "SDTV",
@@ -213,8 +204,11 @@ class Quality(object):
         """
         to_return = {}
         for quality in Quality.qualityStrings:
-            to_return[Quality.compositeStatus(status, quality)] = Quality.statusPrefixes[status] + " (" + \
-                                                           Quality.qualityStrings[quality] + ")"
+            if quality is not None:
+                stat = Quality.statusPrefixes[status]
+                qual = Quality.qualityStrings[quality]
+                comp = Quality.compositeStatus(status, quality)
+                to_return[comp] = '%s (%s)' % (stat, qual)
         return to_return
 
     @staticmethod
@@ -229,9 +223,13 @@ class Quality(object):
 
     @staticmethod
     def splitQuality(quality):
+        if quality is None:
+            quality = Quality.NONE
         any_qualities = []
         best_qualities = []
         for cur_qual in Quality.qualityStrings:
+            if cur_qual is None:
+                cur_qual = Quality.NONE
             if cur_qual & quality:
                 any_qualities.append(cur_qual)
             if cur_qual << 16 & quality:
@@ -402,6 +400,8 @@ class Quality(object):
 
     @staticmethod
     def compositeStatus(status, quality):
+        if quality is None:
+            quality = Quality.NONE
         return status + 100 * quality
 
     @staticmethod
diff --git a/sickbeard/config.py b/sickbeard/config.py
index 5125fa4c82fef6dc8620c4f68a8a0eb25ca23360..1c12d9ef855f7ddc2178a1adb3162580d7423a94 100644
--- a/sickbeard/config.py
+++ b/sickbeard/config.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -497,7 +498,7 @@ def clean_url(url):
         scheme, netloc, path, query, fragment = urlparse.urlsplit(url, 'http')
 
         if not path:
-            path = path + '/'
+            path += '/'
 
         cleaned_url = urlparse.urlunsplit((scheme, netloc, path, query, fragment))
 
@@ -606,7 +607,7 @@ def check_setting_str(config, cfg_name, item_name, def_val, silent=True, censor_
     return my_val
 
 
-class ConfigMigrator():
+class ConfigMigrator(object):
     def __init__(self, config_obj):
         """
         Initializes a config migrator that can take the config from the version indicated in the config
diff --git a/sickbeard/dailysearcher.py b/sickbeard/dailysearcher.py
index c101340580e88da1775d772e23082dba76a5bc3d..82a70e0d6f4dc6d777ea75a34b4acc166b8d01db 100644
--- a/sickbeard/dailysearcher.py
+++ b/sickbeard/dailysearcher.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -29,7 +30,7 @@ from sickrage.show.Show import Show
 from sickrage.helper.exceptions import MultipleShowObjectsException
 
 
-class DailySearcher():
+class DailySearcher(object):
     def __init__(self):
         self.lock = threading.Lock()
         self.amActive = False
diff --git a/sickbeard/databases/mainDB.py b/sickbeard/databases/mainDB.py
index 663fd6d1687e0d0139ba417398398191eaafa717..2c1584fe973f180a90f3fc2c479315f8fbcda749 100644
--- a/sickbeard/databases/mainDB.py
+++ b/sickbeard/databases/mainDB.py
@@ -48,10 +48,9 @@ class MainSanityCheck(db.DBSanityCheck):
         self.fix_subtitles_codes()
         self.fix_show_nfo_lang()
         self.convert_tvrage_to_tvdb()
-        self.convert_archived_to_compund()
+        self.convert_archived_to_compound()
 
-    # todo: fix spelling to compound
-    def convert_archived_to_compund(self):
+    def convert_archived_to_compound(self):
         logger.log(u'Checking for archived episodes not qualified', logger.DEBUG)
 
         query = "SELECT episode_id, showid, status, location, season, episode " + \
diff --git a/sickbeard/db.py b/sickbeard/db.py
index ee4924fc34a76ac1871eb97eecf495db7f1682cc..d015831da7b3bbc616b92e14cedba29e878086f2 100644
--- a/sickbeard/db.py
+++ b/sickbeard/db.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -297,7 +298,7 @@ class DBConnection(object):
             # Just revert to the old code for now, until we can fix unicode
             return unicode(x, 'utf-8')
         except:
-            return unicode(x, sickbeard.SYS_ENCODING,errors="ignore")
+            return unicode(x, sickbeard.SYS_ENCODING, errors="ignore")
 
     def _dict_factory(self, cursor, row):
         d = {}
@@ -337,9 +338,11 @@ class DBConnection(object):
         self.action("ALTER TABLE [%s] ADD %s %s" % (table, column, type))
         self.action("UPDATE [%s] SET %s = ?" % (table, column), (default,))
 
+
 def sanityCheckDatabase(connection, sanity_check):
     sanity_check(connection).check()
 
+
 class DBSanityCheck(object):
     def __init__(self, connection):
         self.connection = connection
diff --git a/sickbeard/event_queue.py b/sickbeard/event_queue.py
index 8f60d3f4e2d859456933c0f00958c52554e2e4f8..0b4290e581ee95ab74ac95ff0f01782ff26d6a4f 100644
--- a/sickbeard/event_queue.py
+++ b/sickbeard/event_queue.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 import threading
 import traceback
 from Queue import Queue, Empty
@@ -5,7 +6,7 @@ from sickbeard import logger
 from sickrage.helper.exceptions import ex
 
 
-class Event:
+class Event(object):
     def __init__(self, type):
         self._type = type
 
@@ -31,7 +32,7 @@ class Events(threading.Thread):
         Actually runs the thread to process events
         """
         try:
-            while (not self.stop.is_set()):
+            while not self.stop.is_set():
                 try:
                     # get event type
                     type = self.queue.get(True, 1)
diff --git a/sickbeard/failedProcessor.py b/sickbeard/failedProcessor.py
index be0be4b468a0ad98b6ea01694af80017a4124d3e..3b794c9ac2b0ee0d4725b690dba2772d0900cb65 100644
--- a/sickbeard/failedProcessor.py
+++ b/sickbeard/failedProcessor.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Tyler Fenby <tylerfenby@gmail.com>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
diff --git a/sickbeard/failed_history.py b/sickbeard/failed_history.py
index f504558cd7fa0866f696d02e2dd7db94815ff07a..052e7a3fa50153a098fb16fecc5b403bfe114962 100644
--- a/sickbeard/failed_history.py
+++ b/sickbeard/failed_history.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Tyler Fenby <tylerfenby@gmail.com>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -114,7 +115,7 @@ def hasFailed(release, size, provider="%"):
         "SELECT release FROM failed WHERE release=? AND size=? AND provider LIKE ? LIMIT 1",
         [release, size, provider])
 
-    return (len(sql_results) > 0)
+    return len(sql_results) > 0
 
 
 def revertEpisode(epObj):
@@ -220,7 +221,6 @@ def findRelease(epObj):
     release = None
     provider = None
 
-
     # Clear old snatches for this release if any exist
     myDB = db.DBConnection('failed.db')
     myDB.action("DELETE FROM history WHERE showid=" + str(epObj.show.indexerid) + " AND season=" + str(
@@ -242,8 +242,8 @@ def findRelease(epObj):
 
         # Found a previously failed release
         logger.log(u"Failed release found for season (%s): (%s)" % (epObj.season, result["release"]), logger.DEBUG)
-        return (release, provider)
+        return release, provider
 
     # Release was not found
     logger.log(u"No releases found for season (%s) of (%s)" % (epObj.season, epObj.show.indexerid), logger.DEBUG)
-    return (release, provider)
+    return release, provider
diff --git a/sickbeard/generic_queue.py b/sickbeard/generic_queue.py
index 61a3aa3cf3de792abcc439686cdc5b9abb942b22..e999a57895a8ad4622675a6974426927ef35c41d 100644
--- a/sickbeard/generic_queue.py
+++ b/sickbeard/generic_queue.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -23,7 +24,7 @@ import threading
 from sickbeard import logger
 
 
-class QueuePriorities:
+class QueuePriorities(object):
     LOW = 10
     NORMAL = 20
     HIGH = 30
@@ -109,6 +110,7 @@ class GenericQueue(object):
 
         self.amActive = False
 
+
 class QueueItem(threading.Thread):
     def __init__(self, name, action_id=0):
         super(QueueItem, self).__init__()
diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py
index f97ef9172c10291d90a652a5f3670607079a50d6..b31d52a845139c5c14d47c957854de6b386fa243 100644
--- a/sickbeard/helpers.py
+++ b/sickbeard/helpers.py
@@ -108,44 +108,45 @@ def remove_non_release_groups(name):
     # select release_name from tv_episodes WHERE LENGTH(release_name);
     # [eSc], [SSG], [GWC] are valid release groups for non-anime
     removeWordsList = {
-        r'\[rartv\]$':       'searchre',
-        r'\[rarbg\]$':       'searchre',
-        r'\[eztv\]$':        'searchre',
-        r'\[ettv\]$':        'searchre',
-        r'\[cttv\]$':        'searchre',
-        r'\[vtv\]$':         'searchre',
-        r'\[EtHD\]$':        'searchre',
-        r'\[GloDLS\]$':      'searchre',
-        r'\[silv4\]$':       'searchre',
-        r'\[Seedbox\]$':     'searchre',
-        r'\[PublicHD\]$':    'searchre',
+        r'\[rartv\]$': 'searchre',
+        r'\[rarbg\]$': 'searchre',
+        r'\[eztv\]$': 'searchre',
+        r'\[ettv\]$': 'searchre',
+        r'\[cttv\]$': 'searchre',
+        r'\[vtv\]$': 'searchre',
+        r'\[EtHD\]$': 'searchre',
+        r'\[GloDLS\]$': 'searchre',
+        r'\[silv4\]$': 'searchre',
+        r'\[Seedbox\]$': 'searchre',
+        r'\[PublicHD\]$': 'searchre',
         r'\[AndroidTwoU\]$': 'searchre',
-        r'\[brassetv]\]$':   'searchre',
-        r'\.\[BT\]$':        'searchre',
-        r' \[1044\]$':       'searchre',
-        r'\.RiPSaLoT$':      'searchre',
-        r'\.GiuseppeTnT$':   'searchre',
-        r'\.Renc$':          'searchre',
-        r'-NZBGEEK$':        'searchre',
-        r'-Siklopentan$':    'searchre',
-        r'-Chamele0n$':      'searchre',
-        r'-Obfuscated$':     'searchre',
-        r'-\[SpastikusTV\]$':                 'searchre',
-        r'-RP$':                             'searchre',
-        r'-20-40$':                          'searchre',
-        r'\.\[www\.usabit\.com\]$':          'searchre',
-        r'^\[www\.Cpasbien\.pe\] ':          'searchre',
-        r'^\[www\.Cpasbien\.com\] ':         'searchre',
-        r'^\[ www\.Cpasbien\.pw \] ':        'searchre',
-        r'^\.www\.Cpasbien\.pw':            'searchre',
-        r'^\[www\.newpct1\.com\]':            'searchre',
-        r'^\[ www\.Cpasbien\.com \] ':       'searchre',
-        r'- \{ www\.SceneTime\.com \}$':     'searchre',
-        r'^\{ www\.SceneTime\.com \} - ':    'searchre',
-        r'^\]\.\[www\.tensiontorrent.com\] - ':      'searchre',
-        r'^\]\.\[ www\.tensiontorrent.com \] - ':    'searchre',
-        r'- \[ www\.torrentday\.com \]$':            'searchre',
-        r'^\[ www\.TorrentDay\.com \] - ':           'searchre',
+        r'\[brassetv]\]$': 'searchre',
+        r'\.\[BT\]$': 'searchre',
+        r' \[1044\]$': 'searchre',
+        r'\.RiPSaLoT$': 'searchre',
+        r'\.GiuseppeTnT$': 'searchre',
+        r'\.Renc$': 'searchre',
+        r'\.gz$': 'searchre',
+        r'-NZBGEEK$': 'searchre',
+        r'-Siklopentan$': 'searchre',
+        r'-Chamele0n$': 'searchre',
+        r'-Obfuscated$': 'searchre',
+        r'-\[SpastikusTV\]$': 'searchre',
+        r'-RP$': 'searchre',
+        r'-20-40$': 'searchre',
+        r'\.\[www\.usabit\.com\]$': 'searchre',
+        r'^\[www\.Cpasbien\.pe\] ': 'searchre',
+        r'^\[www\.Cpasbien\.com\] ': 'searchre',
+        r'^\[ www\.Cpasbien\.pw \] ': 'searchre',
+        r'^\.www\.Cpasbien\.pw': 'searchre',
+        r'^\[www\.newpct1\.com\]': 'searchre',
+        r'^\[ www\.Cpasbien\.com \] ': 'searchre',
+        r'- \{ www\.SceneTime\.com \}$': 'searchre',
+        r'^\{ www\.SceneTime\.com \} - ': 'searchre',
+        r'^\]\.\[www\.tensiontorrent.com\] - ': 'searchre',
+        r'^\]\.\[ www\.tensiontorrent.com \] - ': 'searchre',
+        r'- \[ www\.torrentday\.com \]$': 'searchre',
+        r'^\[ www\.TorrentDay\.com \] - ': 'searchre',
         r'\[NO-RAR\] - \[ www\.torrentday\.com \]$': 'searchre',
     }
 
@@ -1741,7 +1742,7 @@ def getTVDBFromID(indexer_id, indexer):
 
         except SyntaxError:
             pass
-        
+
         return tvdb_id
     elif indexer == 'ZAP2IT':
         url = "http://www.thetvdb.com/api/GetSeriesByRemoteID.php?zap2it=%s" % indexer_id
@@ -1761,7 +1762,7 @@ def getTVDBFromID(indexer_id, indexer):
         url = "http://api.tvmaze.com/shows/%s" % indexer_id
         data = getURL(url, session=session, json=True)
         if data is None:
-            return tvdb_id        
+            return tvdb_id
         tvdb_id = data['externals']['thetvdb']
         return tvdb_id
     else:
diff --git a/sickbeard/history.py b/sickbeard/history.py
index e0a4b1f44f7fc841dc967057f1a8503181a49c8c..ad725382fd677ec09c5bad9bacefd565ad01ac3e 100644
--- a/sickbeard/history.py
+++ b/sickbeard/history.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
diff --git a/sickbeard/image_cache.py b/sickbeard/image_cache.py
index 6569c6e81ac0c72864b8ce63fb41800f2591268e..ebf03d950a63197f15b04f863b3c57335a0c09de 100644
--- a/sickbeard/image_cache.py
+++ b/sickbeard/image_cache.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -31,7 +32,8 @@ from hachoir_metadata import extractMetadata
 from hachoir_core.log import log
 log.use_print = False
 
-class ImageCache:
+
+class ImageCache(object):
     def __init__(self):
         pass
 
@@ -274,8 +276,7 @@ class ImageCache:
                        self.BANNER_THUMB: not self.has_banner_thumbnail(show_obj.indexerid),
                        self.FANART: not self.has_fanart(show_obj.indexerid)}
 
-        if not need_images[self.POSTER] and not need_images[self.BANNER] and not need_images[self.POSTER_THUMB] and not \
-        need_images[self.BANNER_THUMB] and not need_images[self.FANART]:
+        if not need_images[self.POSTER] and not need_images[self.BANNER] and not need_images[self.POSTER_THUMB] and not need_images[self.BANNER_THUMB] and not need_images[self.FANART]:
             logger.log(u"No new cache images needed, not retrieving new ones", logger.DEBUG)
             return
 
@@ -307,7 +308,7 @@ class ImageCache:
                 logger.log(u"Unable to search for images in show dir because it doesn't exist", logger.WARNING)
 
         # download from indexer for missing ones
-        for cur_image_type in [self.POSTER, self.BANNER, self.POSTER_THUMB, self.BANNER_THUMB,self.FANART]:
+        for cur_image_type in [self.POSTER, self.BANNER, self.POSTER_THUMB, self.BANNER_THUMB, self.FANART]:
             logger.log(u"Seeing if we still need an image of type " + str(cur_image_type) + ": " + str(
                 need_images[cur_image_type]), logger.DEBUG)
             if cur_image_type in need_images and need_images[cur_image_type]:
diff --git a/sickbeard/imdbPopular.py b/sickbeard/imdbPopular.py
index 475f4c315c0ace16969989dbc7e7769b537484a3..9f462cfe05afb6a8e6f85e786a84f09a58646b1f 100644
--- a/sickbeard/imdbPopular.py
+++ b/sickbeard/imdbPopular.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 import re
 import os
 import requests
@@ -8,6 +9,7 @@ import sickbeard
 from sickbeard import helpers
 from sickrage.helper.encoding import ek
 
+
 class imdbPopular(object):
     def __init__(self):
         """Gets a list of most popular TV series from imdb"""
@@ -53,7 +55,7 @@ class imdbPopular(object):
             if td:
                 show['name'] = td.find("a").contents[0]
                 show['imdb_url'] = "http://www.imdb.com" + td.find("a")["href"]
-                show['imdb_tt'] =  show['imdb_url'][-10:][0:9]
+                show['imdb_tt'] = show['imdb_url'][-10:][0:9]
                 show['year'] = td.find("span", {"class": "year_type"}).contents[0].split(" ")[0][1:]
 
                 rating_all = td.find("div", {"class": "user_rating"})
diff --git a/sickbeard/logger.py b/sickbeard/logger.py
index a591ca6addd6abe481cfece3aca37e99185bd179..56fb098e151c818ec40c9f5f180cb93c1e41bde2 100644
--- a/sickbeard/logger.py
+++ b/sickbeard/logger.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -72,11 +73,11 @@ class CensoredFormatter(logging.Formatter, object):
         msg = super(CensoredFormatter, self).format(record)
 
         if not isinstance(msg, unicode):
-            msg = msg.decode(self.encoding, 'replace') # Convert to unicode
+            msg = msg.decode(self.encoding, 'replace')  # Convert to unicode
 
         for _, v in censoredItems.iteritems():
             if not isinstance(v, unicode):
-                v = v.decode(self.encoding, 'replace') # Convert to unicode
+                v = v.decode(self.encoding, 'replace')  # Convert to unicode
             msg = msg.replace(v, len(v) * u'*')
 
         # Needed because Newznab apikey isn't stored as key=value in a section.
@@ -84,7 +85,7 @@ class CensoredFormatter(logging.Formatter, object):
         return msg
 
 
-class Logger(object):
+class Logger(object):  # pylint: disable=too-many-instance-attributes
     def __init__(self):
         self.logger = logging.getLogger('sickrage')
 
@@ -92,21 +93,24 @@ class Logger(object):
             logging.getLogger('sickrage'),
             logging.getLogger('tornado.general'),
             logging.getLogger('tornado.application'),
+            # logging.getLogger('subliminal'),
             # logging.getLogger('tornado.access'),
         ]
 
         self.consoleLogging = False
         self.fileLogging = False
         self.debugLogging = False
+        self.databaseLogging = False
         self.logFile = None
 
         self.submitter_running = False
 
-    def initLogging(self, consoleLogging=False, fileLogging=False, debugLogging=False):
+    def initLogging(self, consoleLogging=False, fileLogging=False, debugLogging=False, databaseLogging=False):
         self.logFile = self.logFile or ek(os.path.join, sickbeard.LOG_DIR, 'sickrage.log')
         self.debugLogging = debugLogging
         self.consoleLogging = consoleLogging
         self.fileLogging = fileLogging
+        self.databaseLogging = databaseLogging
 
         # add a new logging level DB
         logging.addLevelName(DB, 'DB')
@@ -120,24 +124,26 @@ class Logger(object):
                 logger.root = self.logger
                 logger.parent = self.logger
 
+        loglevel = DB if self.databaseLogging else DEBUG if self.debugLogging else INFO
+
         # set minimum logging level allowed for loggers
         for logger in self.loggers:
-            logger.setLevel(DB)
+            logger.setLevel(loglevel)
 
         # console log handler
         if self.consoleLogging:
             console = logging.StreamHandler()
             console.setFormatter(CensoredFormatter(u'%(asctime)s %(levelname)s::%(message)s', '%H:%M:%S', encoding='utf-8'))
-            console.setLevel(INFO if not self.debugLogging else DEBUG)
+            console.setLevel(loglevel)
 
             for logger in self.loggers:
                 logger.addHandler(console)
 
         # rotating log file handler
         if self.fileLogging:
-            rfh = logging.handlers.RotatingFileHandler(self.logFile, maxBytes=sickbeard.LOG_SIZE, backupCount=sickbeard.LOG_NR, encoding='utf-8')
+            rfh = logging.handlers.RotatingFileHandler(self.logFile, maxBytes=int(sickbeard.LOG_SIZE * 1048576), backupCount=sickbeard.LOG_NR, encoding='utf-8')
             rfh.setFormatter(CensoredFormatter(u'%(asctime)s %(levelname)-8s %(message)s', dateTimeFormat, encoding='utf-8'))
-            rfh.setLevel(INFO if not self.debugLogging else DEBUG)
+            rfh.setLevel(loglevel)
 
             for logger in self.loggers:
                 logger.addHandler(rfh)
@@ -148,6 +154,9 @@ class Logger(object):
 
     def log(self, msg, level=INFO, *args, **kwargs):
         meThread = threading.currentThread().getName()
+        if sickbeard.CUR_COMMIT_HASH and len(sickbeard.CUR_COMMIT_HASH) > 6 and level in [ERROR, WARNING]:
+            msg += ' [%s]' % sickbeard.CUR_COMMIT_HASH[:7]
+
         message = meThread + u" :: " + msg
 
         # Change the SSL error to a warning with a link to information about how to fix it.
@@ -177,7 +186,7 @@ class Logger(object):
         else:
             sys.exit(1)
 
-    def submit_errors(self): # Too many local variables, too many branches, pylint: disable=too-many-branches,too-many-locals
+    def submit_errors(self):  # Too many local variables, too many branches, pylint: disable=too-many-branches,too-many-locals
 
         submitter_result = u''
         issue_id = None
@@ -197,7 +206,7 @@ class Logger(object):
 
         if commits_behind is None or commits_behind > 0:
             submitter_result = u'Please update SickRage, unable to submit issue ticket to GitHub with an outdated version!'
-            return  submitter_result, issue_id
+            return submitter_result, issue_id
 
         if self.submitter_running:
             submitter_result = u'Issue submitter is running, please wait for it to complete'
@@ -231,7 +240,7 @@ class Logger(object):
                 try:
                     title_Error = ss(str(curError.title))
                     if not len(title_Error) or title_Error == 'None':
-                        title_Error = re.match(r"^[A-Z0-9\-\[\] :]+::\s*(.*)$", ss(curError.message)).group(1)
+                        title_Error = re.match(r"^[A-Z0-9\-\[\] :]+::\s*(.*)(?: \[[\w]{7}\])$", ss(curError.message)).group(1)
 
                     if len(title_Error) > 1000:
                         title_Error = title_Error[0:1000]
@@ -239,13 +248,13 @@ class Logger(object):
                     self.log("Unable to get error title : " + ex(e), ERROR)
 
                 gist = None
-                regex = ur"^(%s)\s+([A-Z]+)\s+([0-9A-Z\-]+)\s*(.*)$" % curError.time
+                regex = ur"^(%s)\s+([A-Z]+)\s+([0-9A-Z\-]+)\s*(.*)(?: \[[\w]{7}\])$" % curError.time
                 for i, x in enumerate(log_data):
                     match = re.match(regex, x)
                     if match:
                         level = match.group(2)
                         if reverseNames[level] == ERROR:
-                            paste_data = u"".join(log_data[i:i+50])
+                            paste_data = u"".join(log_data[i:i + 50])
                             if paste_data:
                                 gist = gh.get_user().create_gist(True, {"sickrage.log": InputFileContent(paste_data)})
                             break
@@ -326,6 +335,7 @@ class Logger(object):
 
         return submitter_result, issue_id
 
+
 # pylint: disable=too-few-public-methods
 class Wrapper(object):
     instance = Logger()
diff --git a/sickbeard/metadata/kodi_12plus.py b/sickbeard/metadata/kodi_12plus.py
index 673c1faa6ffb15a226888ca3f90fe789a956a594..8693e5e780eac68c6d7c973b8e263a8ca52ecccd 100644
--- a/sickbeard/metadata/kodi_12plus.py
+++ b/sickbeard/metadata/kodi_12plus.py
@@ -341,7 +341,6 @@ class KODI_12PlusMetadata(generic.GenericMetadata):
                 rating = etree.SubElement(episode, "rating")
                 rating.text = myEp['rating']
 
-
             if getattr(myEp, 'writer', None) and isinstance(myEp['writer'], basestring):
                 for writer in self._split_info(myEp['writer']):
                     cur_writer = etree.SubElement(episode, "credits")
diff --git a/sickbeard/name_cache.py b/sickbeard/name_cache.py
index 4d8d2d9dcb2e8bd2854c8c91e6de66e8aa875136..515743cc5ab0af2331d8489e2a461495b2584dab 100644
--- a/sickbeard/name_cache.py
+++ b/sickbeard/name_cache.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -24,6 +25,7 @@ from sickbeard import logger
 nameCache = {}
 nameCacheLock = threading.Lock()
 
+
 def addNameToCache(name, indexer_id=0):
     """
     Adds the show & tvdb id to the scene_names table in cache.db.
@@ -51,6 +53,7 @@ def retrieveNameFromCache(name):
     if name in nameCache:
         return int(nameCache[name])
 
+
 def clearCache(indexerid=0):
     """
     Deletes all "unknown" entries from the cache (names with indexer_id of 0).
@@ -93,4 +96,4 @@ def buildNameCache(show=None):
                     continue
 
                 nameCache[name] = int(show.indexerid)
-        logger.log(u"Internal name cache for " + show.name + " set to: [ " + u', '.join([key for key, value in nameCache.iteritems() if value == show.indexerid]) +" ]", logger.DEBUG)
+        logger.log(u"Internal name cache for " + show.name + " set to: [ " + u', '.join([key for key, value in nameCache.iteritems() if value == show.indexerid]) + " ]", logger.DEBUG)
diff --git a/sickbeard/name_parser/__init__.py b/sickbeard/name_parser/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..9bad5790a5799b96f2e164d825c0b1f8ec0c2dfb 100644
--- a/sickbeard/name_parser/__init__.py
+++ b/sickbeard/name_parser/__init__.py
@@ -0,0 +1 @@
+# coding=utf-8
diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py
index 83f8becad1670c47401689c3d80f0225ac8daeb7..584650beaeedd1b9b505576ef46f3ae9cacf1e25 100644
--- a/sickbeard/name_parser/parser.py
+++ b/sickbeard/name_parser/parser.py
@@ -37,7 +37,7 @@ class NameParser(object):
     NORMAL_REGEX = 1
     ANIME_REGEX = 2
 
-    def __init__(self, file_name=True, showObj=None, tryIndexers=False, naming_pattern=False, parse_method = None):
+    def __init__(self, file_name=True, showObj=None, tryIndexers=False, naming_pattern=False, parse_method=None):
 
         self.file_name = file_name
         self.showObj = showObj
diff --git a/sickbeard/name_parser/regexes.py b/sickbeard/name_parser/regexes.py
index b12fffb4c420114ca4f56b43d8ee8ffe87438c91..d164ee8b82758cb8f1b38b70c5c0d7501295ac34 100644
--- a/sickbeard/name_parser/regexes.py
+++ b/sickbeard/name_parser/regexes.py
@@ -34,7 +34,6 @@ normal_regexes = [
      ((?<![. _-])(?<!WEB)                        # Make sure this is really the release group
      -(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$              # Group
      '''),
-
     ('fov_repeat',
      # Show.Name.1x02.1x03.Source.Quality.Etc-Group
      # Show Name - 1x02 - 1x03 - 1x04 - Ep Name
@@ -48,7 +47,6 @@ normal_regexes = [
      ((?<![. _-])(?<!WEB)                        # Make sure this is really the release group
      -(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$              # Group
      '''),
-
     ('standard',
      # Show.Name.S01E02.Source.Quality.Etc-Group
      # Show Name - S01E02 - My Ep Name
@@ -58,15 +56,14 @@ normal_regexes = [
      # Show.Name.S01.E02.E03
      r'''
      ^((?P<series_name>.+?)[. _-]+)?             # Show_Name and separator
-     (\()?s(?P<season_num>\d+)[. _-]*            # S01 and optional separator
-     e(?P<ep_num>\d+)(\))?                       # E02 and separator
+     \(?s(?P<season_num>\d+)[. _-]*              # S01 and optional separator
+     e(?P<ep_num>\d+)\)?                         # E02 and separator
      (([. _-]*e|-)                               # linking e/- char
      (?P<extra_ep_num>(?!(1080|720|480)[pi])\d+)(\))?)*   # additional E03/etc
-     [. _-]*((?P<extra_info>.+?)                 # Source_Quality_Etc-
+     ([. _-]+((?P<extra_info>.+?)                 # Source_Quality_Etc-
      ((?<![. _-])(?<!WEB)                        # Make sure this is really the release group
-     -(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$              # Group
+     -(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?)?$              # Group
      '''),
-
     ('newpct',
      # American Horror Story - Temporada 4 HDTV x264[Cap.408_409]SPANISH AUDIO -NEWPCT
      # American Horror Story - Temporada 4 [HDTV][Cap.408][Espanol Castellano]
@@ -78,7 +75,6 @@ normal_regexes = [
      (?P<ep_num>\d{2})                           # Episode Number: 08
      ((_\d{1,2}(?P<extra_ep_num>\d{2}))|.*])     # Episode number2: 09
      '''),
-
     ('fov',
      # Show_Name.1x02.Source_Quality_Etc-Group
      # Show Name - 1x02 - My Ep Name
@@ -90,13 +86,12 @@ normal_regexes = [
      (?P<ep_num>\d+)                             # 02 and separator
      (([. _-]*x|-)                               # linking x/- char
      (?P<extra_ep_num>
-     (?!(1080|720|480)[pi])(?!(?<=x)264)             # ignore obviously wrong multi-eps
+     (?!(1080|720|480)[pi])(?!(?<=x)264)         # ignore obviously wrong multi-eps
      \d+))*                                      # additional x03/etc
      [\]. _-]*((?P<extra_info>.+?)               # Source_Quality_Etc-
      ((?<![. _-])(?<!WEB)                        # Make sure this is really the release group
      -(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$              # Group
      '''),
-
     ('scene_date_format',
      # Show.Name.2010.11.23.Source.Quality.Etc-Group
      # Show Name - 2010-11-23 - Ep Name
@@ -107,7 +102,6 @@ normal_regexes = [
      ((?<![. _-])(?<!WEB)                        # Make sure this is really the release group
      -(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$              # Group
      '''),
-
     ('scene_sports_format',
      # Show.Name.100.Event.2010.11.23.Source.Quality.Etc-Group
      # Show.Name.2010.11.23.Source.Quality.Etc-Group
@@ -119,7 +113,6 @@ normal_regexes = [
      ((?P<extra_info>.+?)((?<![. _-])
      (?<!WEB)-(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$
      '''),
-
     ('stupid',
      # tpz-abc102
      r'''
@@ -128,7 +121,6 @@ normal_regexes = [
      (?P<season_num>\d{1,2})                     # 1
      (?P<ep_num>\d{2})$                          # 02
      '''),
-
     ('verbose',
      # Show Name Season 1 Episode 2 Ep Name
      r'''
@@ -139,7 +131,6 @@ normal_regexes = [
      (?P<ep_num>\d+)[. _-]+                      # 02 and separator
      (?P<extra_info>.+)$                         # Source_Quality_Etc-
      '''),
-
     ('season_only',
      # Show.Name.S01.Source.Quality.Etc-Group
      r'''
@@ -149,9 +140,7 @@ normal_regexes = [
      [. _-]*((?P<extra_info>.+?)                 # Source_Quality_Etc-
      ((?<![. _-])(?<!WEB)                        # Make sure this is really the release group
      -(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$              # Group
-     '''
-     ),
-
+     '''),
     ('no_season_multi_ep',
      # Show.Name.E02-03
      # Show.Name.E02.2010
@@ -164,9 +153,7 @@ normal_regexes = [
      ([. _-]*(?P<extra_info>.+?)                 # Source_Quality_Etc-
      ((?<![. _-])(?<!WEB)                        # Make sure this is really the release group
      -(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$              # Group
-     '''
-     ),
-
+     '''),
     ('no_season_general',
      # Show.Name.E23.Test
      # Show.Name.Part.3.Source.Quality.Etc-Group
@@ -182,9 +169,16 @@ normal_regexes = [
      ([. _-]*(?P<extra_info>.+?)                 # Source_Quality_Etc-
      ((?<![. _-])(?<!WEB)                        # Make sure this is really the release group
      -(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$              # Group
-     '''
-     ),
-
+     '''),
+    ('bare',
+     # Show.Name.102.Source.Quality.Etc-Group
+     r'''
+     ^(?P<series_name>.+?)[. _-]+                # Show_Name and separator
+     (?P<season_num>\d{1,2})                     # 1
+     (?P<ep_num>\d{2})                           # 02 and separator
+     ([. _-]+(?P<extra_info>(?!\d{3}[. _-]+)[^-]+) # Source_Quality_Etc-
+     (-(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$                # Group
+     '''),
     ('no_season',
      # Show Name - 01 - Ep Name
      # 01 - Ep Name
@@ -193,21 +187,10 @@ normal_regexes = [
      ^((?P<series_name>.+?)(?:[. _-]{2,}|[. _]))?    # Show_Name and separator
      (?P<ep_num>\d{1,3})                             # 02
      (?:-(?P<extra_ep_num>\d{1,3}))*                 # -03-04-05 etc
-     \s?of?\s?\d{1,3}?                               # of joiner (with or without spaces) and series total ep
+     (\s*(?:of)?\s*\d{1,3})?                         # of joiner (with or without spaces) and series total ep
      [. _-]+((?P<extra_info>.+?)                     # Source_Quality_Etc-
      ((?<![. _-])(?<!WEB)                            # Make sure this is really the release group
      -(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$  # Group
-     '''
-     ),
-
-    ('bare',
-     # Show.Name.102.Source.Quality.Etc-Group
-     r'''
-     ^(?P<series_name>.+?)[. _-]+                # Show_Name and separator
-     (?P<season_num>\d{1,2})                     # 1
-     (?P<ep_num>\d{2})                           # 02 and separator
-     ([. _-]+(?P<extra_info>(?!\d{3}[. _-]+)[^-]+) # Source_Quality_Etc-
-     (-(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$                # Group
      '''),
 ]
 
@@ -236,7 +219,23 @@ anime_regexes = [
      (?:[ ._]?\[(?P<crc>\w+)\])?
      .*?
      '''),
-
+    ('anime_Kaerizaki-Fansub',
+     # [Kaerizaki-Fansub]_One_Piece_679_[VOSTFR][HD_1280x720].mp4
+     # [Kaerizaki-Fansub]_One_Piece_681_[VOSTFR][HD_1280x720]_V2.mp4
+     # [Kaerizaki-Fansub] High School DxD New 04 VOSTFR HD (1280x720) V2.mp4
+     # [Kaerizaki-Fansub] One Piece 603 VOSTFR PS VITA (960x544) V2.mp4
+     # [Kaerizaki-Fansub] One Piece 638 (HD 1280x720).mp4
+     # [Kaerizaki-Fansub] One Piece 721 720p.mp4
+     r'''
+     ^\[(?P<release_group>Kaerizaki-Fansub)\][ ._-]*                         # Release Group and separator
+     (?P<series_name>.+?)[ ._-]+                                             # Show_Name and separator
+     (?P<ep_ab_num>(?!\[VOSTFR\]|VOSTFR)\d{1,3})                             # Episode number
+     (-(?P<extra_ab_ep_num>(?!\[VOSTFR\]|VOSTFR)\d{1,3}))?                   # Extra episode number
+     [ ._](\[VOSTFR\]|VOSTFR)?
+     (\[|[ ._])?(?P<extra_info>(\(?(([SH]D|PS\sVITA)[ ._])?\(?\d{3,4}x\d{3,4}\)?|\d{3,4}[pP])?)(\]|\))?                                            # Extra info
+     ([ ._][vV](?P<version>[0-9]))?                                          # Version
+     .*?                                                                     # Separator and EOL
+     '''),
     ('anime_ISLAND',
      # [ISLAND]One_Piece_679_[VOSTFR]_[V1]_[8bit]_[720p]_[EB7838FC].mp4
      # [ISLAND]One_Piece_679_[VOSTFR]_[8bit]_[720p]_[EB7838FC].mp4
@@ -251,23 +250,6 @@ anime_regexes = [
      (\[(?P<crc>\w{8})\])?                                                    # CRC
      .*?
      '''),
-
-    ('anime_Kaerizaki-Fansub',
-     # [Kaerizaki-Fansub]_One_Piece_679_[VOSTFR][HD_1280x720].mp4
-     # [Kaerizaki-Fansub]_One_Piece_681_[VOSTFR][HD_1280x720]_V2.mp4
-     # [Kaerizaki-Fansub] High School DxD New 04 VOSTFR HD (1280x720) V2.mp4
-     # [Kaerizaki-Fansub] One Piece 603 VOSTFR PS VITA (960x544) V2.mp4
-     r'''
-     ^\[(?P<release_group>Kaerizaki-Fansub?)\][ ._-]*                         # Release Group and separator
-     (?P<series_name>.+?)[ ._-]+                                              # Show_Name and separator
-     (?P<ep_ab_num>((?!\[VOSTFR|VOSTFR))\d{1,3})                              # Episode number
-     (-(?P<extra_ab_ep_num>((?!\[VOSTFR|VOSTFR))\d{1,3}))?                    # Extra episode number
-     ([ ._](\[VOSTFR\]|VOSTFR))?
-     (\[|[ ._])?(?P<extra_info>([SH]D_\d{3,4}x\d{3,4}|((SD|HD|PS\sVITA)[ ._]\(\d{3,4}x\d{3,4}\))))(\])?         # Extra info
-     ([ ._][vV](?P<version>[0-9]))?                                           # Version
-     .*?                                                                      # Separator and EOL
-     '''),
-
     ('anime_standard',
      # [Group Name] Show Name.13-14
      # [Group Name] Show Name - 13-14
@@ -285,7 +267,6 @@ anime_regexes = [
      (\[(?P<crc>\w{8})\])?                                        # CRC
      .*?                                                          # Separator and EOL
      '''),
-
     ('anime_standard_round',
      # [Stratos-Subs]_Infinite_Stratos_-_12_(1280x720_H.264_AAC)_[379759DB]
      # [ShinBunBu-Subs] Bleach - 02-03 (CX 1280x720 x264 AAC)
@@ -299,7 +280,6 @@ anime_regexes = [
      (\[(?P<crc>\w{8})\])?                                                    # CRC
      .*?                                                                      # Separator and EOL
      '''),
-
     ('anime_slash',
      # [SGKK] Bleach 312v1 [720p/MKV]
      r'''
@@ -312,7 +292,6 @@ anime_regexes = [
      (\[(?P<crc>\w{8})\])?                 # CRC
      .*?                                   # Separator and EOL
      '''),
-
     ('anime_standard_codec',
      # [Ayako]_Infinite_Stratos_-_IS_-_07_[H264][720p][EB7838FC]
      # [Ayako] Infinite Stratos - IS - 07v2 [H264][720p][44419534]
@@ -329,17 +308,15 @@ anime_regexes = [
      (\[(?P<crc>\w{8})\])?
      .*?                                                          # Separator and EOL
      '''),
-
     ('anime_codec_crc',
      r'''
-    ^(?:\[(?P<release_group>.*?)\][ ._-]*)?
-    (?:(?P<series_name>.*?)[ ._-]*)?
-    (?:(?P<ep_ab_num>(((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))[ ._-]*).+?
-    (?:\[(?P<codec>.*?)\][ ._-]*)
-    (?:\[(?P<crc>\w{8})\])?
-    .*?
-    '''),
-
+     ^(?:\[(?P<release_group>.*?)\][ ._-]*)?
+     (?:(?P<series_name>.*?)[ ._-]*)?
+     (?:(?P<ep_ab_num>(((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))[ ._-]*).+?
+     (?:\[(?P<codec>.*?)\][ ._-]*)
+     (?:\[(?P<crc>\w{8})\])?
+     .*?
+     '''),
     ('anime_SxxExx',
      # Show.Name.S01E02.Source.Quality.Etc-Group
      # Show Name - S01E02 - My Ep Name
@@ -357,7 +334,6 @@ anime_regexes = [
      ((?<![. _-])(?<!WEB)                        # Make sure this is really the release group
      -(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$              # Group
      '''),
-
     ('anime_and_normal',
      # Bleach - s16e03-04 - 313-314
      # Bleach.s16e03-04.313-314
@@ -373,10 +349,7 @@ anime_regexes = [
      (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))?             # "-" as separator and anditional absolute number, all optinal
      (v(?P<version>[0-9]))?                       # the version e.g. "v2"
      .*?
-     '''
-
-     ),
-
+     '''),
     ('anime_and_normal_x',
      # Bleach - s16e03-04 - 313-314
      # Bleach.s16e03-04.313-314
@@ -392,10 +365,7 @@ anime_regexes = [
      (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))?             # "-" as separator and anditional absolute number, all optinal
      (v(?P<version>[0-9]))?                       # the version e.g. "v2"
      .*?
-     '''
-
-     ),
-
+     '''),
     ('anime_and_normal_reverse',
      # Bleach - 313-314 - s16e03-04
      r'''
@@ -409,9 +379,7 @@ anime_regexes = [
      (([. _-]*e|-)                                # linking e/- char
      (?P<extra_ep_num>\d+))*                      # additional E03/etc
      .*?
-     '''
-     ),
-
+     '''),
     ('anime_and_normal_front',
      # 165.Naruto Shippuuden.s08e014
      r'''
@@ -424,39 +392,34 @@ anime_regexes = [
      (([. _-]*e|-)                               # linking e/- char
      (?P<extra_ep_num>\d+))*                      # additional E03/etc
      .*?
-     '''
-     ),
-
+     '''),
     ('anime_ep_name',
      r'''
-    ^(?:\[(?P<release_group>.+?)\][ ._-]*)
-    (?P<series_name>.+?)[ ._-]+
-    (?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3})
-    (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))?[ ._-]*?
-    (?:v(?P<version>[0-9])[ ._-]+?)?
-    (?:.+?[ ._-]+?)?
-    \[(?P<extra_info>\w+)\][ ._-]?
-    (?:\[(?P<crc>\w{8})\])?
-    .*?
-     '''
-     ),
-
+     ^(?:\[(?P<release_group>.+?)\][ ._-]*)
+     (?P<series_name>.+?)[ ._-]+
+     (?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3})
+     (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))?[ ._-]*?
+     (?:v(?P<version>[0-9])[ ._-]+?)?
+     (?:.+?[ ._-]+?)?
+     \[(?P<extra_info>\w+)\][ ._-]?
+     (?:\[(?P<crc>\w{8})\])?
+     .*?
+     '''),
     ('anime_WarB3asT',
      # 003. Show Name - Ep Name.ext
      # 003-004. Show Name - Ep Name.ext
      r'''
      ^(?P<ep_ab_num>\d{3,4})(-(?P<extra_ab_ep_num>\d{3,4}))?\.\s+(?P<series_name>.+?)\s-\s.*
      '''),
-
     ('anime_bare',
      # One Piece - 102
      # [ACX]_Wolf's_Spirit_001.mkv
      r'''
      ^(\[(?P<release_group>.+?)\][ ._-]*)?
      (?P<series_name>.+?)[ ._-]+                         # Show_Name and separator
-     (?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3})                                      # E01
-     (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))?                            # E02
-     (v(?P<version>[0-9]))?                                     # v2
-     .*?                                                         # Separator and EOL
+     (?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3})            # E01
+     (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))?  # E02
+     (v(?P<version>[0-9]))?                                                  # v2
+     .*?                                                                     # Separator and EOL
      ''')
 ]
diff --git a/sickbeard/naming.py b/sickbeard/naming.py
index 9b8db1d06a1492501059e061a365060d259ac407..1cf1b1e2ca7321ca858b2a909d0c8d7787c91ab4 100644
--- a/sickbeard/naming.py
+++ b/sickbeard/naming.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
diff --git a/sickbeard/network_timezones.py b/sickbeard/network_timezones.py
index db201a562a0ff0d776fc95cfd31f9a7e26eb5f27..5485004df5b45b6a3ba3ba2ede6586bbda473b78 100644
--- a/sickbeard/network_timezones.py
+++ b/sickbeard/network_timezones.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -28,13 +29,12 @@ from sickbeard import logger
 from sickrage.helper.common import try_int
 
 # regex to parse time (12/24 hour format)
-time_regex = re.compile(r'(\d{1,2})(([:.](\d{2,2}))? ?([PA][. ]? ?M)|[:.](\d{2,2}))\b', flags=re.IGNORECASE)
-am_regex = re.compile(r'(A[. ]? ?M)', flags=re.IGNORECASE)
-pm_regex = re.compile(r'(P[. ]? ?M)', flags=re.IGNORECASE)
+time_regex = re.compile(r'(?P<hour>\d{1,2})(?:[:.](?P<minute>\d{2})?)? ?(?P<meridiem>[PA]\.? ?M?)?\b', re.I)
 
 network_dict = None
 sb_timezone = tz.tzwinlocal() if tz.tzwinlocal else tz.tzlocal()
 
+
 # update the network timezone table
 def update_network_dict():
     """Update timezone information from SR repositories"""
@@ -62,7 +62,7 @@ def update_network_dict():
 
     queries = []
     for network, timezone in d.iteritems():
-        existing = network_list.has_key(network)
+        existing = network in network_list
         if not existing:
             queries.append(['INSERT OR IGNORE INTO network_timezones VALUES (?,?);', [network, timezone]])
         elif network_list[network] is not timezone:
@@ -105,18 +105,17 @@ def get_network_timezone(network, _network_dict):
     Get a timezone of a network from a given network dict
 
     :param network: network to look up (needle)
-    :param network_dict: dict to look up in (haystack)
+    :param _network_dict: dict to look up in (haystack)
     :return:
     """
-    if network is None:
-        return sb_timezone
 
-    try:
-        n_t = tz.gettz(_network_dict[network])
-    except Exception:
-        return sb_timezone
+    # Get the name of the networks timezone from _network_dict
+    network_tz_name = _network_dict[network] if network in _network_dict else None
+
+    if network_tz_name is None:
+        logger.log(u'Network was not found in the network time zones: %s' % network, logger.ERROR)
 
-    return n_t if n_t is not None else sb_timezone
+    return tz.gettz(network_tz_name) if network_tz_name else sb_timezone
 
 
 # parse date and time string into local time
@@ -133,47 +132,31 @@ def parse_date_time(d, t, network):
     if not network_dict:
         load_network_dict()
 
-    mo = time_regex.search(t)
-    if mo is not None and len(mo.groups()) >= 5:
-        if mo.group(5) is not None:
-            try:
-                hr = try_int(mo.group(1))
-                m = try_int(mo.group(4))
-                ap = mo.group(5)
-                # convert am/pm to 24 hour clock
-                if ap is not None:
-                    if pm_regex.search(ap) is not None and hr != 12:
-                        hr += 12
-                    elif am_regex.search(ap) is not None and hr == 12:
-                        hr -= 12
-            except Exception:
-                hr = 0
-                m = 0
-        else:
-            try:
-                hr = try_int(mo.group(1))
-                m = try_int(mo.group(6))
-            except Exception:
-                hr = 0
-                m = 0
-    else:
-        hr = 0
-        m = 0
-    if hr < 0 or hr > 23 or m < 0 or m > 59:
-        hr = 0
-        m = 0
-
-    te = datetime.datetime.fromordinal(try_int(d) or 1)
-    try:
-        foreign_timezone = get_network_timezone(network, network_dict)
-        return datetime.datetime(te.year, te.month, te.day, hr, m, tzinfo=foreign_timezone)
-    except Exception:
-        return datetime.datetime(te.year, te.month, te.day, hr, m, tzinfo=sb_timezone)
+    parsed_time = time_regex.search(t)
+    network_tz = get_network_timezone(network, network_dict)
+
+    hr = 0
+    m = 0
+
+    if parsed_time:
+        hr = try_int(parsed_time.group('hour'))
+        m = try_int(parsed_time.group('minute'))
+
+        ap = parsed_time.group('meridiem')
+        ap = ap[0].lower() if ap else ''
+
+        if ap == 'a' and hr == 12:
+            hr -= 12
+        elif ap == 'p' and hr != 12:
+            hr += 12
+
+        hr = hr if 0 <= hr <= 23 else 0
+        m = m if 0 <= m <= 59 else 0
+
+    result = datetime.datetime.fromordinal(max(try_int(d), 1))
+
+    return result.replace(hour=hr, minute=m, tzinfo=network_tz)
 
 
-def test_timeformat(t):
-    mo = time_regex.search(t)
-    if mo is None or len(mo.groups()) < 2:
-        return False
-    else:
-        return True
+def test_timeformat(time_string):
+    return time_regex.search(time_string) is not None
diff --git a/sickbeard/notifiers/boxcar2.py b/sickbeard/notifiers/boxcar2.py
index 28fadc43eade167bfc260bf47748d60e4abb7272..f99ce0088ed5bf4f40b0c334d71821ed622e38ee 100644
--- a/sickbeard/notifiers/boxcar2.py
+++ b/sickbeard/notifiers/boxcar2.py
@@ -53,11 +53,11 @@ class Boxcar2Notifier(object):
         curUrl = API_URL
 
         data = urllib.urlencode({
-                'user_credentials': accesstoken,
-                'notification[title]': "SickRage : " + title + ' : ' + msg,
-                'notification[long_message]': msg,
-                'notification[sound]': "notifier-2"
-            })
+            'user_credentials': accesstoken,
+            'notification[title]': "SickRage : " + title + ' : ' + msg,
+            'notification[long_message]': msg,
+            'notification[sound]': "notifier-2"
+        })
 
         # send the request to boxcar2
         try:
diff --git a/sickbeard/notifiers/kodi.py b/sickbeard/notifiers/kodi.py
index 2e5bd4ede13c97b534e7e50f3427df175c940e76..f46d41baa1d52eb479443b39eb1c25ca301bf78f 100644
--- a/sickbeard/notifiers/kodi.py
+++ b/sickbeard/notifiers/kodi.py
@@ -571,7 +571,7 @@ class KODINotifier(object):
                 else:
                     if sickbeard.KODI_ALWAYS_ON:
                         logger.log(u"Failed to detect KODI version for '" + host + "', check configuration and try again.", logger.WARNING)
-                    result = result + 1
+                    result += 1
 
             # needed for the 'update kodi' submenu command
             # as it only cares of the final result vs the individual ones
diff --git a/sickbeard/notifiers/libnotify.py b/sickbeard/notifiers/libnotify.py
index 16e5d57941ff6982abcc2f18c7255c25de06486e..644701570e2f182fc3960d1db07dd772fc41e8a9 100644
--- a/sickbeard/notifiers/libnotify.py
+++ b/sickbeard/notifiers/libnotify.py
@@ -25,6 +25,7 @@ import sickbeard
 from sickbeard import logger, common
 from sickrage.helper.encoding import ek
 
+
 def diagnose():
     """
     Check the environment for reasons libnotify isn't working.  Return a
diff --git a/sickbeard/notifiers/nma.py b/sickbeard/notifiers/nma.py
index ad5ea24d4cfdcd898d544d47065864d318aecd68..490dd6c5d74744fc0b9f0bcebb4f3f5ab5de97d5 100644
--- a/sickbeard/notifiers/nma.py
+++ b/sickbeard/notifiers/nma.py
@@ -32,14 +32,12 @@ class NMA_Notifier(object):
             title = common.notifyStrings[common.NOTIFY_GIT_UPDATE]
             self._sendNMA(nma_api=None, nma_priority=None, event=title, message=update_text + new_version)
 
-
     def notify_login(self, ipaddress=""):
         if sickbeard.USE_NMA:
             update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT]
             title = common.notifyStrings[common.NOTIFY_LOGIN]
             self._sendNMA(nma_api=None, nma_priority=None, event=title, message=update_text.format(ipaddress))
 
-
     def _sendNMA(self, nma_api=None, nma_priority=None, event=None, message=None, force=False):
 
         title = 'SickRage'
diff --git a/sickbeard/notifiers/prowl.py b/sickbeard/notifiers/prowl.py
index fd47d5a6387c875c17c8ed619d8ec0953d8dcdc4..24b0f145e6d783546ce4c98bd3caa6caecd614c7 100644
--- a/sickbeard/notifiers/prowl.py
+++ b/sickbeard/notifiers/prowl.py
@@ -39,6 +39,7 @@ import ast
 from sickbeard import logger, common, db
 from sickrage.helper.encoding import ss
 
+
 class ProwlNotifier(object):
     def test_notify(self, prowl_api, prowl_priority):
         return self._send_prowl(prowl_api, prowl_priority, event="Test", message="Testing Prowl settings from SickRage", force=True)
@@ -53,7 +54,7 @@ class ProwlNotifier(object):
             else:
                 for api in recipients:
                     self._send_prowl(prowl_api=api, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH],
-                                     message=ep_name+" :: "+time.strftime(sickbeard.DATE_PRESET+" "+sickbeard.TIME_PRESET))
+                                     message=ep_name + " :: " + time.strftime(sickbeard.DATE_PRESET + " " + sickbeard.TIME_PRESET))
 
     def notify_download(self, ep_name):
         ep_name = ss(ep_name)
@@ -65,7 +66,7 @@ class ProwlNotifier(object):
             else:
                 for api in recipients:
                     self._send_prowl(prowl_api=api, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD],
-                                     message=ep_name+" :: "+time.strftime(sickbeard.DATE_PRESET+" "+sickbeard.TIME_PRESET))
+                                     message=ep_name + " :: " + time.strftime(sickbeard.DATE_PRESET + " " + sickbeard.TIME_PRESET))
 
     def notify_subtitle_download(self, ep_name, lang):
         ep_name = ss(ep_name)
@@ -77,7 +78,7 @@ class ProwlNotifier(object):
             else:
                 for api in recipients:
                     self._send_prowl(prowl_api=api, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD],
-                                     message=ep_name+" ["+lang+"] :: "+time.strftime(sickbeard.DATE_PRESET+" "+sickbeard.TIME_PRESET))
+                                     message=ep_name + " [" + lang + "] :: " + time.strftime(sickbeard.DATE_PRESET + " " + sickbeard.TIME_PRESET))
 
     def notify_git_update(self, new_version="??"):
         if sickbeard.USE_PROWL:
diff --git a/sickbeard/notifiers/pushover.py b/sickbeard/notifiers/pushover.py
index 064ac46f3f89318306b8188c9646d2973498ae08..e2e2b89f5bae5152e1708ca7d2bb693fdebae716 100644
--- a/sickbeard/notifiers/pushover.py
+++ b/sickbeard/notifiers/pushover.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Marvin Pinto <me@marvinp.ca>
 # Author: Dennis Lutter <lad1337@gmail.com>
 # Author: Aaron Bieber <deftly@gmail.com>
@@ -67,25 +68,27 @@ class PushoverNotifier(object):
         # send the request to pushover
         try:
             if sickbeard.PUSHOVER_SOUND != "default":
-                args = {"token": apiKey,
-                        "user": userKey,
-                        "title": title.encode('utf-8'),
-                        "message": msg.encode('utf-8'),
-                        "timestamp": int(time.time()),
-                        "retry": 60,
-                        "expire": 3600,
-                        "sound": sound,
-                       }
+                args = {
+                    "token": apiKey,
+                    "user": userKey,
+                    "title": title.encode('utf-8'),
+                    "message": msg.encode('utf-8'),
+                    "timestamp": int(time.time()),
+                    "retry": 60,
+                    "expire": 3600,
+                    "sound": sound,
+                }
             else:
                 # sound is default, so don't send it
-                args = {"token": apiKey,
-                        "user": userKey,
-                        "title": title.encode('utf-8'),
-                        "message": msg.encode('utf-8'),
-                        "timestamp": int(time.time()),
-                        "retry": 60,
-                        "expire": 3600,
-                       }
+                args = {
+                    "token": apiKey,
+                    "user": userKey,
+                    "title": title.encode('utf-8'),
+                    "message": msg.encode('utf-8'),
+                    "timestamp": int(time.time()),
+                    "retry": 60,
+                    "expire": 3600,
+                }
 
             if sickbeard.PUSHOVER_DEVICE:
                 args["device"] = sickbeard.PUSHOVER_DEVICE
@@ -136,7 +139,6 @@ class PushoverNotifier(object):
         if sickbeard.PUSHOVER_NOTIFY_ONSNATCH:
             self._notifyPushover(title, ep_name)
 
-
     def notify_download(self, ep_name, title=notifyStrings[NOTIFY_DOWNLOAD]):
         if sickbeard.PUSHOVER_NOTIFY_ONDOWNLOAD:
             self._notifyPushover(title, ep_name)
diff --git a/sickbeard/nzbget.py b/sickbeard/nzbget.py
index b8bc568eb9f8d380aa11e21ebaa0c8a232a3a8e1..0d054fca8532c89d7c59fedb26f2919475660c63 100644
--- a/sickbeard/nzbget.py
+++ b/sickbeard/nzbget.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py
index 5c88dcd5f17e383ec5aac0ae2aa1c2e0276b0b9c..0e393ef446b4e7f89b20e8777049a104963ba0e7 100644
--- a/sickbeard/postProcessor.py
+++ b/sickbeard/postProcessor.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -180,7 +181,7 @@ class PostProcessor(object):
             base_name = globbable_file_path.rpartition('.')[0]
 
         if not base_name_only:
-            base_name = base_name + '.'
+            base_name += '.'
 
         # don't strip it all and use cwd by accident
         if not base_name:
@@ -348,7 +349,10 @@ class PostProcessor(object):
                     cur_lang = cur_lang.lower()
                     if cur_lang == 'pt-br':
                         cur_lang = 'pt-BR'
-                    cur_extension = cur_lang + ek(os.path.splitext, cur_extension)[1]
+                    if new_base_name:
+                        cur_extension = cur_lang + ek(os.path.splitext, cur_extension)[1]
+                    else:
+                        cur_extension = cur_extension.rpartition('.')[2]
 
             # replace .nfo with .nfo-orig to avoid conflicts
             if cur_extension == 'nfo' and sickbeard.NFO_RENAME is True:
@@ -511,7 +515,8 @@ class PostProcessor(object):
             self.version = version
             to_return = (show, season, [], quality, version)
 
-            self._log("Found result in history for %s - Season: %s - Quality: %s - Version: %s" % (show.name, season, common.Quality.qualityStrings[quality], version), logger.DEBUG)
+            qual_str = common.Quality.qualityStrings[quality] if quality is not None else quality
+            self._log("Found result in history for %s - Season: %s - Quality: %s - Version: %s" % (show.name, season, qual_str, version), logger.DEBUG)
 
             return to_return
 
@@ -532,8 +537,8 @@ class PostProcessor(object):
 
         # if the result is complete then remember that for later
         # if the result is complete then set release name
-        if parse_result.series_name and ((parse_result.season_number is not None and parse_result.episode_numbers)
-                                         or parse_result.air_date) and parse_result.release_group:
+        if parse_result.series_name and ((parse_result.season_number is not None and parse_result.episode_numbers) or
+                                         parse_result.air_date) and parse_result.release_group:
 
             if not self.release_name:
                 self.release_name = helpers.remove_non_release_groups(remove_extension(ek(os.path.basename, parse_result.original_name)))
@@ -715,9 +720,9 @@ class PostProcessor(object):
                     season = 1
 
             if show and season and episodes:
-                return (show, season, episodes, quality, version)
+                return show, season, episodes, quality, version
 
-        return (show, season, episodes, quality, version)
+        return show, season, episodes, quality, version
 
     def _get_ep_obj(self, show, season, episodes):
         """
@@ -961,7 +966,7 @@ class PostProcessor(object):
                 self._log(u"File exists and new file is same size, marking it unsafe to replace")
                 return False
 
-            if new_ep_quality <= old_ep_quality and old_ep_quality != common.Quality.UNKNOWN and existing_file_status != PostProcessor.DOESNT_EXIST:
+            if new_ep_quality <= old_ep_quality != common.Quality.UNKNOWN and existing_file_status != PostProcessor.DOESNT_EXIST:
                 if self.is_proper and new_ep_quality == old_ep_quality:
                     self._log(u"New file is a proper/repack, marking it safe to replace")
                 else:
@@ -1040,10 +1045,7 @@ class PostProcessor(object):
                 else:
                     cur_ep.release_name = ""
 
-                if ep_obj.status in common.Quality.SNATCHED_BEST:
-                    cur_ep.status = common.Quality.compositeStatus(common.ARCHIVED, new_ep_quality)
-                else:
-                    cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality)
+                cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality)
 
                 cur_ep.subtitles = u''
 
@@ -1156,7 +1158,11 @@ class PostProcessor(object):
                     cur_ep.airdateModifyStamp()
 
         # generate nfo/tbn
-        ep_obj.createMetaFiles()
+        try:
+            ep_obj.createMetaFiles()
+        except Exception:
+            logger.log(u"Could not create/update meta files. Continuing with postProcessing...")
+            
 
         # log it to history
         history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group, new_ep_version)
diff --git a/sickbeard/processTV.py b/sickbeard/processTV.py
index 40cb0bb215adaf5c6a5c34b981ab9af5cc557c60..5970a8e15528e7f3b573f793e44c167b8fcbdeb9 100644
--- a/sickbeard/processTV.py
+++ b/sickbeard/processTV.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io/
 # Git: https://github.com/SickRage/SickRage.git
@@ -74,21 +75,22 @@ def delete_folder(folder, check_empty=True):
     if check_empty:
         check_files = ek(os.listdir, folder)
         if check_files:
-            logger.log(u"Not deleting folder " + folder + " found the following files: " + str(check_files), logger.INFO)
+            logger.log(u"Not deleting folder %s found the following files: %s" %
+                       (folder, check_files), logger.INFO)
             return False
 
         try:
-            logger.log(u"Deleting folder (if it's empty): " + folder)
+            logger.log(u"Deleting folder (if it's empty): %s" % folder)
             ek(os.rmdir, folder)
         except (OSError, IOError) as e:
-            logger.log(u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING)
+            logger.log(u"Warning: unable to delete folder: %s: %s" % (folder, ex(e)), logger.WARNING)
             return False
     else:
         try:
             logger.log(u"Deleting folder: " + folder)
             shutil.rmtree(folder)
         except (OSError, IOError) as e:
-            logger.log(u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING)
+            logger.log(u"Warning: unable to delete folder: %s: %s" % (folder, ex(e)), logger.WARNING)
             return False
 
     return True
@@ -117,21 +119,22 @@ def delete_files(processPath, notwantedFiles, result, force=False):
         if not ek(os.path.isfile, cur_file_path):
             continue  # Prevent error when a notwantedfiles is an associated files
 
-        result.output += logHelper(u"Deleting file " + cur_file, logger.DEBUG)
+        result.output += logHelper(u"Deleting file: %s" % cur_file, logger.DEBUG)
 
         # check first the read-only attribute
         file_attribute = ek(os.stat, cur_file_path)[0]
         if not file_attribute & stat.S_IWRITE:
             # File is read-only, so make it writeable
-            result.output += logHelper(u"Changing ReadOnly Flag for file " + cur_file, logger.DEBUG)
+            result.output += logHelper(u"Changing ReadOnly Flag for file: %s" % cur_file, logger.DEBUG)
             try:
                 ek(os.chmod, cur_file_path, stat.S_IWRITE)
             except OSError as e:
-                result.output += logHelper(u"Cannot change permissions of " + cur_file_path + ': ' + ex(e), logger.DEBUG)
+                result.output += logHelper(u"Cannot change permissions of %s: %s" %
+                                           (cur_file_path, ex(e)), logger.DEBUG)
         try:
             ek(os.remove, cur_file_path)
         except OSError as e:
-            result.output += logHelper(u"Unable to delete file " + cur_file + ': ' + str(e.strerror), logger.DEBUG)
+            result.output += logHelper(u"Unable to delete file %s: %s" % (cur_file, e.strerror), logger.DEBUG)
 
 
 def logHelper(logMessage, logLevel=logger.INFO):
@@ -152,41 +155,38 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
 
     result = ProcessResult()
 
-    result.output += logHelper(u"Processing folder " + dirName, logger.DEBUG)
-
-    result.output += logHelper(u"TV_DOWNLOAD_DIR: " + sickbeard.TV_DOWNLOAD_DIR, logger.DEBUG)
-    postpone = False
     # if they passed us a real dir then assume it's the one we want
     if ek(os.path.isdir, dirName):
         dirName = ek(os.path.realpath, dirName)
+        result.output += logHelper(u"Processing folder %s" % dirName, logger.DEBUG)
 
-    # if the client and SickRage are not on the same machine translate the Dir in a network dir
-    elif sickbeard.TV_DOWNLOAD_DIR and ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR) \
-            and ek(os.path.normpath, dirName) != ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR):
+    # if the client and SickRage are not on the same machine translate the directory into a network directory
+    elif all([sickbeard.TV_DOWNLOAD_DIR,
+              ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR),
+              ek(os.path.normpath, dirName) == ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR)]):
         dirName = ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR, ek(os.path.abspath, dirName).split(os.path.sep)[-1])
-        result.output += logHelper(u"Trying to use folder " + dirName, logger.DEBUG)
+        result.output += logHelper(u"Trying to use folder: %s " % dirName, logger.DEBUG)
 
     # if we didn't find a real dir then quit
     if not ek(os.path.isdir, dirName):
-        result.output += logHelper(
-            u"Unable to figure out what folder to process. If your downloader and SickRage aren't on the same PC make sure you fill out your TV download dir in the config.",
-            logger.DEBUG)
+        result.output += logHelper(u"Unable to figure out what folder to process. "
+                                   u"If your downloader and SickRage aren't on the same PC "
+                                   u"make sure you fill out your TV download dir in the config.",
+                                   logger.DEBUG)
         return result.output
 
     path, dirs, files = get_path_dir_files(dirName, nzbName, proc_type)
 
     files = [x for x in files if not is_torrent_or_nzb_file(x)]
     SyncFiles = [x for x in files if is_sync_file(x)]
+    nzbNameOriginal = nzbName
 
     # Don't post process if files are still being synced and option is activated
-    if SyncFiles and sickbeard.POSTPONE_IF_SYNC_FILES:
-        postpone = True
-
-    nzbNameOriginal = nzbName
+    postpone = SyncFiles and sickbeard.POSTPONE_IF_SYNC_FILES
 
     if not postpone:
-        result.output += logHelper(u"PostProcessing Path: " + path, logger.INFO)
-        result.output += logHelper(u"PostProcessing Dirs: " + str(dirs), logger.DEBUG)
+        result.output += logHelper(u"PostProcessing Path: %s" % path, logger.INFO)
+        result.output += logHelper(u"PostProcessing Dirs: %s" % str(dirs), logger.DEBUG)
 
         rarFiles = [x for x in files if helpers.isRarFile(x)]
         rarContent = unRAR(path, rarFiles, force, result)
@@ -194,23 +194,20 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
         videoFiles = [x for x in files if helpers.isMediaFile(x)]
         videoInRar = [x for x in rarContent if helpers.isMediaFile(x)]
 
-        result.output += logHelper(u"PostProcessing Files: " + str(files), logger.DEBUG)
-        result.output += logHelper(u"PostProcessing VideoFiles: " + str(videoFiles), logger.DEBUG)
-        result.output += logHelper(u"PostProcessing RarContent: " + str(rarContent), logger.DEBUG)
-        result.output += logHelper(u"PostProcessing VideoInRar: " + str(videoInRar), logger.DEBUG)
+        result.output += logHelper(u"PostProcessing Files: %s" % files, logger.DEBUG)
+        result.output += logHelper(u"PostProcessing VideoFiles: %s" % videoFiles, logger.DEBUG)
+        result.output += logHelper(u"PostProcessing RarContent: %s" % rarContent, logger.DEBUG)
+        result.output += logHelper(u"PostProcessing VideoInRar: %s" % videoInRar, logger.DEBUG)
 
         # If nzbName is set and there's more than one videofile in the folder, files will be lost (overwritten).
-        if len(videoFiles) >= 2:
-            nzbName = None
-
-        if not process_method:
-            process_method = sickbeard.PROCESS_METHOD
+        nzbName = None if len(videoFiles) >= 2 else nzbName
 
+        process_method = process_method if process_method else sickbeard.PROCESS_METHOD
         result.result = True
 
         # Don't Link media when the media is extracted from a rar in the same path
-        if process_method in ('hardlink', 'symlink') and videoInRar:
-            process_media(path, videoInRar, nzbName, 'move', force, is_priority, result)
+        if process_method in (u'hardlink', u'symlink') and videoInRar:
+            process_media(path, videoInRar, nzbName, u'move', force, is_priority, result)
             delete_files(path, rarContent, result)
             for video in set(videoFiles) - set(videoInRar):
                 process_media(path, [video], nzbName, process_method, force, is_priority, result)
@@ -224,13 +221,12 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
                 process_media(path, [video], nzbName, process_method, force, is_priority, result)
 
     else:
-        result.output += logHelper(u"Found temporary sync files, skipping post processing for folder " + str(path))
-        result.output += logHelper(u"Sync Files: " + str(SyncFiles) + " in path: " + path)
-        result.missedfiles.append(path + " : Syncfiles found")
+        result.output += logHelper(u"Found temporary sync files: %s in path: %s" % (SyncFiles, path))
+        result.output += logHelper(u"Skipping post processing for folder: %s" % path)
+        result.missedfiles.append(u"%s : Syncfiles found" % path)
 
     # Process Video File in all TV Subdir
     for curDir in [x for x in dirs if validateDir(path, x, nzbNameOriginal, failed, result)]:
-
         result.result = True
 
         for processPath, _, fileList in ek(os.walk, ek(os.path.join, path, curDir), topdown=False):
@@ -238,13 +234,10 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
             if not validateDir(path, processPath, nzbNameOriginal, failed, result):
                 continue
 
-            postpone = False
-
             SyncFiles = [x for x in fileList if is_sync_file(x)]
 
             # Don't post process if files are still being synced and option is activated
-            if SyncFiles and sickbeard.POSTPONE_IF_SYNC_FILES:
-                postpone = True
+            postpone = SyncFiles and sickbeard.POSTPONE_IF_SYNC_FILES
 
             if not postpone:
                 rarFiles = [x for x in fileList if helpers.isRarFile(x)]
@@ -254,11 +247,11 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
                 videoInRar = [x for x in rarContent if helpers.isMediaFile(x)]
                 notwantedFiles = [x for x in fileList if x not in videoFiles]
                 if notwantedFiles:
-                    result.output += logHelper(u"Found unwanted files: " + str(notwantedFiles), logger.DEBUG)
+                    result.output += logHelper(u"Found unwanted files: %s" % notwantedFiles, logger.DEBUG)
 
                 # Don't Link media when the media is extracted from a rar in the same path
-                if process_method in ('hardlink', 'symlink') and videoInRar:
-                    process_media(processPath, videoInRar, nzbName, 'move', force, is_priority, result)
+                if process_method in (u'hardlink', u'symlink') and videoInRar:
+                    process_media(processPath, videoInRar, nzbName, u'move', force, is_priority, result)
                     process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force,
                                   is_priority, result)
                     delete_files(processPath, rarContent, result)
@@ -270,32 +263,33 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
                 else:
                     process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, result)
 
-                    # Delete all file not needed
-                    if process_method != "move" or not result.result \
-                            or (proc_type == "manual" and not delete_on):  # Avoid to delete files if is Manual PostProcessing
+                    # Delete all file not needed and avoid deleting files if Manual PostProcessing
+                    if process_method != u"move" or not result.result or (proc_type == u"manual" and not delete_on):
                         continue
 
                     delete_files(processPath, notwantedFiles, result)
 
-                    if (not sickbeard.NO_DELETE or proc_type == "manual") and process_method == "move" and \
-                            ek(os.path.normpath, processPath) != ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR):
+                    if all([not sickbeard.NO_DELETE or proc_type == u"manual",
+                            process_method == u"move",
+                            ek(os.path.normpath, processPath) != ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR)]
+                           ):
                         if delete_folder(processPath, check_empty=True):
-                            result.output += logHelper(u"Deleted folder: " + processPath, logger.DEBUG)
+                            result.output += logHelper(u"Deleted folder: %s" % processPath, logger.DEBUG)
             else:
-                result.output += logHelper(u"Found temporary sync files, skipping post processing for folder: " + str(processPath))
-                result.output += logHelper(u"Sync Files: " + str(SyncFiles) + " in path: " + processPath)
-                result.missedfiles.append(processPath + " : Syncfiles found")
+                result.output += logHelper(u"Found temporary sync files: %s in path: %s" % (SyncFiles, processPath))
+                result.output += logHelper(u"Skipping post processing for folder: %s" % processPath)
+                result.missedfiles.append(u"%s : Syncfiles found" % path)
 
     if result.aggresult:
         result.output += logHelper(u"Successfully processed")
         if result.missedfiles:
             result.output += logHelper(u"I did encounter some unprocessable items: ")
             for missedfile in result.missedfiles:
-                result.output += logHelper(u"[" + missedfile + "]")
+                result.output += logHelper(u"[%s]" % missedfile)
     else:
         result.output += logHelper(u"Problem(s) during processing, failed the following files/folders:  ", logger.WARNING)
         for missedfile in result.missedfiles:
-            result.output += logHelper(u"[" + missedfile + "]", logger.WARNING)
+            result.output += logHelper(u"[%s]" % missedfile, logger.WARNING)
 
     return result.output
 
@@ -314,32 +308,32 @@ def validateDir(path, dirName, nzbNameOriginal, failed, result):  # pylint: disa
 
     dirName = ss(dirName)
 
-    IGNORED_FOLDERS = ['.AppleDouble', '.@__thumb', '@eaDir']
+    IGNORED_FOLDERS = [u'.AppleDouble', u'.@__thumb', u'@eaDir']
     folder_name = ek(os.path.basename, dirName)
     if folder_name in IGNORED_FOLDERS:
         return False
 
     result.output += logHelper(u"Processing folder " + dirName, logger.DEBUG)
 
-    if folder_name.startswith('_FAILED_'):
+    if folder_name.startswith(u'_FAILED_'):
         result.output += logHelper(u"The directory name indicates it failed to extract.", logger.DEBUG)
         failed = True
-    elif folder_name.startswith('_UNDERSIZED_'):
+    elif folder_name.startswith(u'_UNDERSIZED_'):
         result.output += logHelper(u"The directory name indicates that it was previously rejected for being undersized.", logger.DEBUG)
         failed = True
-    elif folder_name.upper().startswith('_UNPACK'):
+    elif folder_name.upper().startswith(u'_UNPACK'):
         result.output += logHelper(u"The directory name indicates that this release is in the process of being unpacked.", logger.DEBUG)
-        result.missedfiles.append(dirName + " : Being unpacked")
+        result.missedfiles.append(u"%s : Being unpacked" % dirName)
         return False
 
     if failed:
         process_failed(ek(os.path.join, path, dirName), nzbNameOriginal, result)
-        result.missedfiles.append(dirName + " : Failed download")
+        result.missedfiles.append(u"%s : Failed download" % dirName)
         return False
 
     if helpers.is_hidden_folder(ek(os.path.join, path, dirName)):
-        result.output += logHelper(u"Ignoring hidden folder: " + dirName, logger.DEBUG)
-        result.missedfiles.append(dirName + " : Hidden folder")
+        result.output += logHelper(u"Ignoring hidden folder: %s" % dirName, logger.DEBUG)
+        result.missedfiles.append(u"%s : Hidden folder" % dirName)
         return False
 
     # make sure the dir isn't inside a show dir
@@ -391,7 +385,7 @@ def validateDir(path, dirName, nzbNameOriginal, failed, result):  # pylint: disa
             except (InvalidNameException, InvalidShowException):
                 pass
 
-    result.output += logHelper(dirName + " : No processable items found in folder", logger.DEBUG)
+    result.output += logHelper(u"%s : No processable items found in folder" % dirName, logger.DEBUG)
     return False
 
 
@@ -410,12 +404,13 @@ def unRAR(path, rarFiles, force, result):  # pylint: disable=too-many-branches,t
 
     if sickbeard.UNPACK and rarFiles:
 
-        result.output += logHelper(u"Packed Releases detected: " + str(rarFiles), logger.DEBUG)
+        result.output += logHelper(u"Packed Releases detected: %s" % rarFiles, logger.DEBUG)
 
         for archive in rarFiles:
 
-            result.output += logHelper(u"Unpacking archive: " + archive, logger.DEBUG)
+            result.output += logHelper(u"Unpacking archive: %s" % archive, logger.DEBUG)
 
+            failure = None
             try:
                 rar_handle = RarFile(ek(os.path.join, path, archive))
 
@@ -423,9 +418,8 @@ def unRAR(path, rarFiles, force, result):  # pylint: disable=too-many-branches,t
                 skip_file = False
                 for file_in_archive in [ek(os.path.basename, x.filename) for x in rar_handle.infolist() if not x.isdir]:
                     if already_postprocessed(path, file_in_archive, force, result):
-                        result.output += logHelper(
-                            u"Archive file already post-processed, extraction skipped: " + file_in_archive,
-                            logger.DEBUG)
+                        result.output += logHelper(u"Archive file already post-processed, extraction skipped: %s" %
+                                                   file_in_archive, logger.DEBUG)
                         skip_file = True
                         break
 
@@ -440,38 +434,27 @@ def unRAR(path, rarFiles, force, result):  # pylint: disable=too-many-branches,t
                             unpacked_files.append(basename)
                 del rar_handle
 
-            except ArchiveHeaderBroken as e:
-                result.output += logHelper(u"Failed Unrar archive {0}: Unrar: Archive Header Broken".format(archive), logger.ERROR)
-                result.result = False
-                result.missedfiles.append(archive + " : Unpacking failed because the Archive Header is Broken")
-                continue
+            except ArchiveHeaderBroken:
+                failure = (u'Archive Header Broken', u'Unpacking failed because the Archive Header is Broken')
             except IncorrectRARPassword:
-                result.output += logHelper(u"Failed Unrar archive {0}: Unrar: Incorrect Rar Password".format(archive), logger.ERROR)
-                result.result = False
-                result.missedfiles.append(archive + " : Unpacking failed because of an Incorrect Rar Password")
-                continue
+                failure = (u'Incorrect RAR Password', u'Unpacking failed because of an Incorrect Rar Password')
             except FileOpenError:
-                result.output += logHelper(u"Failed Unrar archive {0}: Unrar: File Open Error, check the parent folder and destination file permissions.".format(archive), logger.ERROR)
-                result.result = False
-                result.missedfiles.append(archive + " : Unpacking failed with a File Open Error (file permissions?)")
-                continue
+                failure = (u'File Open Error, check the parent folder and destination file permissions.',
+                           u'Unpacking failed with a File Open Error (file permissions?)')
             except InvalidRARArchiveUsage:
-                result.output += logHelper(u"Failed Unrar archive {0}: Unrar: Invalid Rar Archive Usage".format(archive), logger.ERROR)
-                result.result = False
-                result.missedfiles.append(archive + " : Unpacking Failed with Invalid Rar Archive Usage")
-                continue
+                failure = (u'Invalid Rar Archive Usage', u'Unpacking Failed with Invalid Rar Archive Usage')
             except InvalidRARArchive:
-                result.output += logHelper(u"Failed Unrar archive {0}: Unrar: Invalid Rar Archive".format(archive), logger.ERROR)
-                result.result = False
-                result.missedfiles.append(archive + " : Unpacking Failed with an Invalid Rar Archive Error")
-                continue
+                failure = (u'Invalid Rar Archive', u'Unpacking Failed with an Invalid Rar Archive Error')
             except Exception as e:
-                result.output += logHelper(u"Failed Unrar archive " + archive + ': ' + ex(e), logger.ERROR)
+                failure = (ex(e), u'Unpacking failed for an unknown reason')
+
+            if failure is not None:
+                result.output += logHelper(u'Failed Unrar archive %s: %s' % (archive, failure[0]), logger.ERROR)
+                result.missedfiles += logHelper(u'%s : Unpacking failed: %s' % (archive, failure[1]))
                 result.result = False
-                result.missedfiles.append(archive + " : Unpacking failed for an unknown reason")
                 continue
 
-        result.output += logHelper(u"UnRar content: " + str(unpacked_files), logger.DEBUG)
+        result.output += logHelper(u"UnRar content: %s" % unpacked_files, logger.DEBUG)
 
     return unpacked_files
 
@@ -538,11 +521,12 @@ def process_media(processPath, videoFiles, nzbName, process_method, force, is_pr
         cur_video_file_path = ek(os.path.join, processPath, cur_video_file)
 
         if already_postprocessed(processPath, cur_video_file, force, result):
-            result.output += logHelper(u"Already Processed " + cur_video_file + " : Skipping", logger.DEBUG)
+            result.output += logHelper(u"Skipping already processed file: %s" % cur_video_file, logger.DEBUG)
             continue
 
         try:
             processor = postProcessor.PostProcessor(cur_video_file_path, nzbName, process_method, is_priority)
+
             # This feature prevents PP for files that do not have subtitle associated with the video file
             if sickbeard.POSTPONE_IF_NO_SUBS:
                 associatedFiles = processor.list_associated_files(cur_video_file_path, subtitles_only=True)
@@ -553,7 +537,7 @@ def process_media(processPath, videoFiles, nzbName, process_method, force, is_pr
                     result.output += logHelper(u"Found subtitles associated. Continuing the post-process of this file: %s" % cur_video_file)
 
             result.result = processor.process()
-            process_fail_message = ""
+            process_fail_message = u""
         except EpisodePostProcessingFailedException as e:
             result.result = False
             process_fail_message = ex(e)
@@ -562,10 +546,10 @@ def process_media(processPath, videoFiles, nzbName, process_method, force, is_pr
             result.output += processor.log
 
         if result.result:
-            result.output += logHelper(u"Processing succeeded for " + cur_video_file_path)
+            result.output += logHelper(u"Processing succeeded for %s" % cur_video_file_path)
         else:
-            result.output += logHelper(u"Processing failed for " + cur_video_file_path + ": " + process_fail_message, logger.WARNING)
-            result.missedfiles.append(cur_video_file_path + " : Processing failed: " + process_fail_message)
+            result.output += logHelper(u"Processing failed for %s: %s" % (cur_video_file_path, process_fail_message), logger.WARNING)
+            result.missedfiles.append(u"%s : Processing failed: %s" % (cur_video_file_path, process_fail_message))
             result.aggresult = False
 
 
@@ -578,17 +562,17 @@ def get_path_dir_files(dirName, nzbName, proc_type):
     :param proc_type: auto/manual
     :return: a tuple of (path,dirs,files)
     """
-    path = ""
+    path = u""
     dirs = []
     files = []
 
-    if dirName == sickbeard.TV_DOWNLOAD_DIR and not nzbName or proc_type == "manual":  # Scheduled Post Processing Active
+    if dirName == sickbeard.TV_DOWNLOAD_DIR and not nzbName or proc_type == u"manual":  # Scheduled Post Processing Active
         # Get at first all the subdir in the dirName
         for path, dirs, files in ek(os.walk, dirName):
             break
     else:
         path, dirs = ek(os.path.split, dirName)  # Script Post Processing
-        if nzbName is not None and not nzbName.endswith('.nzb') and ek(os.path.isfile, ek(os.path.join, dirName, nzbName)):  # For single torrent file without Dir
+        if not (nzbName is None or nzbName.endswith(u'.nzb')) and ek(os.path.isfile, ek(os.path.join, dirName, nzbName)):  # For single torrent file without Dir
             dirs = []
             files = [ek(os.path.join, dirName, nzbName)]
         else:
@@ -607,7 +591,7 @@ def process_failed(dirName, nzbName, result):
         try:
             processor = failedProcessor.FailedProcessor(dirName, nzbName)
             result.result = processor.process()
-            process_fail_message = ""
+            process_fail_message = u""
         except FailedPostProcessingFailedException as e:
             result.result = False
             process_fail_message = ex(e)
@@ -617,11 +601,10 @@ def process_failed(dirName, nzbName, result):
 
         if sickbeard.DELETE_FAILED and result.result:
             if delete_folder(dirName, check_empty=False):
-                result.output += logHelper(u"Deleted folder: " + dirName, logger.DEBUG)
+                result.output += logHelper(u"Deleted folder: %s" % dirName, logger.DEBUG)
 
         if result.result:
-            result.output += logHelper(u"Failed Download Processing succeeded: (" + str(nzbName) + ", " + dirName + ")")
+            result.output += logHelper(u"Failed Download Processing succeeded: (%s, %s)" % (nzbName, dirName))
         else:
-            result.output += logHelper(
-                u"Failed Download Processing failed: (" + str(nzbName) + ", " + dirName + "): " + process_fail_message,
-                logger.WARNING)
+            result.output += logHelper(u"Failed Download Processing failed: (%s, %s): %s" %
+                                       (nzbName, dirName, process_fail_message), logger.WARNING)
diff --git a/sickbeard/properFinder.py b/sickbeard/properFinder.py
index 44cec35c625731947a9bce4a4efcaf53fbd7d748..bb28758ae7007ae095e48e91b7800eee571393a0 100644
--- a/sickbeard/properFinder.py
+++ b/sickbeard/properFinder.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -36,7 +37,7 @@ from sickrage.show.History import History
 from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
 
 
-class ProperFinder:
+class ProperFinder(object):
     def __init__(self):
         self.amActive = False
 
@@ -102,7 +103,7 @@ class ProperFinder:
                     continue
 
                 name = self._genericName(x.name)
-                if not name in propers:
+                if name not in propers:
                     logger.log(u"Found new proper: " + x.name, logger.DEBUG)
                     x.provider = curProvider
                     propers[name] = x
@@ -188,7 +189,7 @@ class ProperFinder:
                 oldVersion = int(sqlResults[0]["version"])
                 oldRelease_group = (sqlResults[0]["release_group"])
 
-                if oldVersion > -1 and oldVersion < bestResult.version:
+                if -1 < oldVersion < bestResult.version:
                     logger.log(u"Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion))
                 else:
                     continue
diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py
index 05dc3407394a3608b1c00bfd15bb01485ebffac3..bd0f335b86b34bf9fadbf76e466b811a89442da8 100644
--- a/sickbeard/providers/__init__.py
+++ b/sickbeard/providers/__init__.py
@@ -71,134 +71,6 @@ def makeProviderList():
     return [x.provider for x in [getProviderModule(y) for y in __all__] if x]
 
 
-def getNewznabProviderList(data):
-    defaultList = [makeNewznabProvider(x) for x in getDefaultNewznabProviders().split('!!!')]
-    providerList = [x for x in [makeNewznabProvider(x) for x in data.split('!!!')] if x]
-
-    seen_values = set()
-    providerListDeduped = []
-    for d in providerList:
-        value = d.name
-        if value not in seen_values:
-            providerListDeduped.append(d)
-            seen_values.add(value)
-
-    providerList = providerListDeduped
-    providerDict = dict(zip([x.name for x in providerList], providerList))
-
-    for curDefault in defaultList:
-        if not curDefault:
-            continue
-
-        if curDefault.name not in providerDict:
-            curDefault.default = True
-            providerList.append(curDefault)
-        else:
-            providerDict[curDefault.name].default = True
-            providerDict[curDefault.name].name = curDefault.name
-            providerDict[curDefault.name].url = curDefault.url
-            providerDict[curDefault.name].needs_auth = curDefault.needs_auth
-            providerDict[curDefault.name].search_mode = curDefault.search_mode
-            providerDict[curDefault.name].search_fallback = curDefault.search_fallback
-            providerDict[curDefault.name].enable_daily = curDefault.enable_daily
-            providerDict[curDefault.name].enable_backlog = curDefault.enable_backlog
-
-    return [x for x in providerList if x]
-
-
-def makeNewznabProvider(configString):
-    if not configString:
-        return None
-
-    search_mode = 'eponly'
-    search_fallback = 0
-    enable_daily = 0
-    enable_backlog = 0
-
-    try:
-        values = configString.split('|')
-        if len(values) == 9:
-            name, url, key, catIDs, enabled, search_mode, search_fallback, enable_daily, enable_backlog = values
-        else:
-            name = values[0]
-            url = values[1]
-            key = values[2]
-            catIDs = values[3]
-            enabled = values[4]
-    except ValueError:
-        logger.log(u"Skipping Newznab provider string: '" + configString + "', incorrect format", logger.ERROR)
-        return None
-
-    # newznab = sys.modules['sickbeard.providers.newznab']
-
-    newProvider = newznab.NewznabProvider(name, url, key=key, catIDs=catIDs, search_mode=search_mode,
-                                          search_fallback=search_fallback, enable_daily=enable_daily,
-                                          enable_backlog=enable_backlog)
-    newProvider.enabled = enabled == '1'
-
-    return newProvider
-
-
-def getTorrentRssProviderList(data):
-    providerList = [x for x in [makeTorrentRssProvider(x) for x in data.split('!!!')] if x]
-
-    seen_values = set()
-    providerListDeduped = []
-    for d in providerList:
-        value = d.name
-        if value not in seen_values:
-            providerListDeduped.append(d)
-            seen_values.add(value)
-
-    return [x for x in providerList if x]
-
-
-def makeTorrentRssProvider(configString):
-    if not configString:
-        return None
-
-    cookies = None
-    titleTAG = 'title'
-    search_mode = 'eponly'
-    search_fallback = 0
-    enable_daily = 0
-    enable_backlog = 0
-
-    try:
-        values = configString.split('|')
-        if len(values) == 9:
-            name, url, cookies, titleTAG, enabled, search_mode, search_fallback, enable_daily, enable_backlog = values
-        elif len(values) == 8:
-            name, url, cookies, enabled, search_mode, search_fallback, enable_daily, enable_backlog = values
-        else:
-            name = values[0]
-            url = values[1]
-            enabled = values[4]
-    except ValueError:
-        logger.log(u"Skipping RSS Torrent provider string: '" + configString + "', incorrect format",
-                   logger.ERROR)
-        return None
-
-    # try:
-    #     torrentRss = sys.modules['sickbeard.providers.rsstorrent']
-    # except Exception:
-    #     return
-
-    newProvider = rsstorrent.TorrentRssProvider(name, url, cookies, titleTAG, search_mode, search_fallback, enable_daily,
-                                                enable_backlog)
-    newProvider.enabled = enabled == '1'
-
-    return newProvider
-
-
-def getDefaultNewznabProviders():
-    # name|url|key|catIDs|enabled|search_mode|search_fallback|enable_daily|enable_backlog
-    return 'NZB.Cat|https://nzb.cat/||5030,5040,5010|0|eponly|1|1|1!!!' + \
-           'NZBGeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0!!!' + \
-           'NZBs.org|https://nzbs.org/||5030,5040|0|eponly|0|0|0!!!' + \
-           'Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|eponly|0|0|0'
-
-
 def getProviderModule(name):
     name = name.lower()
     prefix = "sickbeard.providers."
@@ -211,7 +83,7 @@ def getProviderModule(name):
 def getProviderClass(provider_id):
     providerMatch = [x for x in
                      sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList if
-                     x.get_id() == provider_id]
+                     x and x.get_id() == provider_id]
 
     if len(providerMatch) != 1:
         return None
diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py
index d500d771cf24ecbda5ab576d80be7ad9c7a6bfad..39dd4d8ba6306856628f072d89dc3df4cd71a276 100644
--- a/sickbeard/providers/alpharatio.py
+++ b/sickbeard/providers/alpharatio.py
@@ -24,7 +24,7 @@ import traceback
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class AlphaRatioProvider(TorrentProvider):
@@ -111,8 +111,8 @@ class AlphaRatioProvider(TorrentProvider):
                             try:
                                 title = link.contents[0]
                                 download_url = self.urls['download'] % (url['href'])
-                                seeders = cells[len(cells)-2].contents[0]
-                                leechers = cells[len(cells)-1].contents[0]
+                                seeders = cells[len(cells) - 2].contents[0]
+                                leechers = cells[len(cells) - 1].contents[0]
                                 # FIXME
                                 size = -1
                             except (AttributeError, TypeError):
diff --git a/sickbeard/providers/animenzb.py b/sickbeard/providers/animenzb.py
index 93460b5185da94fddbf51cbd56c4a49752159db9..77a5bffb92c6a6f640e6060d1a6a4d9745c823ee 100644
--- a/sickbeard/providers/animenzb.py
+++ b/sickbeard/providers/animenzb.py
@@ -28,7 +28,7 @@ from sickbeard import show_name_helpers
 from sickbeard import logger
 
 from sickbeard import tvcache
-from sickrage.providers.NZBProvider import NZBProvider
+from sickrage.providers.nzb.NZBProvider import NZBProvider
 
 
 class animenzb(NZBProvider):
@@ -69,15 +69,16 @@ class animenzb(NZBProvider):
         searchURL = self.url + "rss?" + urllib.urlencode(params)
         logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
         results = []
-        for curItem in self.cache.getRSSFeed(searchURL)['entries'] or []:
-            (title, url) = self._get_title_and_url(curItem)
-
-            if title and url:
-                results.append(curItem)
-                logger.log(u"Found result: %s " % title, logger.DEBUG)
-
-        # For each search mode sort all the items by seeders if available if available
-        results.sort(key=lambda tup: tup[0], reverse=True)
+        if 'entries' in self.cache.getRSSFeed(searchURL): 
+            for curItem in self.cache.getRSSFeed(searchURL)['entries']:
+                (title, url) = self._get_title_and_url(curItem)
+    
+                if title and url:
+                    results.append(curItem)
+                    logger.log(u"Found result: %s " % title, logger.DEBUG)
+    
+            # For each search mode sort all the items by seeders if available if available
+            results.sort(key=lambda tup: tup[0], reverse=True)
 
         return results
 
@@ -89,7 +90,7 @@ class animenzb(NZBProvider):
 
             (title, url) = self._get_title_and_url(item)
 
-            if item.has_key('published_parsed') and item['published_parsed']:
+            if 'published_parsed' in item and item['published_parsed']:
                 result_date = item.published_parsed
                 if result_date:
                     result_date = datetime.datetime(*result_date[0:6])
diff --git a/sickbeard/providers/binsearch.py b/sickbeard/providers/binsearch.py
index 7a653f2b3a5549557afde1aac90a9bf02f5a5477..c0ea97c706d4e496143ed8235f6596441f768d0e 100644
--- a/sickbeard/providers/binsearch.py
+++ b/sickbeard/providers/binsearch.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: moparisthebest <admin@moparisthebest.com>
 #
 # This file is part of Sick Beard.
@@ -20,7 +21,7 @@ import re
 
 from sickbeard import logger
 from sickbeard import tvcache
-from sickrage.providers.NZBProvider import NZBProvider
+from sickrage.providers.nzb.NZBProvider import NZBProvider
 
 
 class BinSearchProvider(NZBProvider):
@@ -79,7 +80,7 @@ class BinSearchCache(tvcache.TVCache):
         if url:
             url = url.replace('&amp;', '&')
 
-        return (title, url)
+        return title, url
 
     def updateCache(self):
         # check if we should update
diff --git a/sickbeard/providers/bitcannon.py b/sickbeard/providers/bitcannon.py
index 69b494e9ba0dcfc39071b67dc220b889e7e357d3..12f4cdaf2d66c251b730af57956c0905962fb254 100644
--- a/sickbeard/providers/bitcannon.py
+++ b/sickbeard/providers/bitcannon.py
@@ -22,7 +22,7 @@ from urllib import urlencode
 
 from sickbeard import logger
 from sickbeard import tvcache
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class BitCannonProvider(TorrentProvider):
@@ -136,6 +136,7 @@ class BitCannonProvider(TorrentProvider):
 
         return True
 
+
 class BitCannonCache(tvcache.TVCache):
     def __init__(self, provider_obj):
 
diff --git a/sickbeard/providers/bitsnoop.py b/sickbeard/providers/bitsnoop.py
index 7ff47b0171379439c0849d467f6162d3a36abffb..5e89e174debd1c478522c4f23be6a953f3d850f0 100644
--- a/sickbeard/providers/bitsnoop.py
+++ b/sickbeard/providers/bitsnoop.py
@@ -23,10 +23,10 @@ import sickbeard
 from sickbeard import logger
 from sickbeard import tvcache
 from sickrage.helper.common import try_int
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
-class BitSnoopProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes
+class BitSnoopProvider(TorrentProvider):  # pylint: disable=too-many-instance-attributes
     def __init__(self):
         TorrentProvider.__init__(self, "BitSnoop")
 
@@ -34,7 +34,7 @@ class BitSnoopProvider(TorrentProvider): # pylint: disable=too-many-instance-att
             'index': 'http://bitsnoop.com',
             'search': 'http://bitsnoop.com/search/video/',
             'rss': 'http://bitsnoop.com/new_video.html?fmt=rss'
-            }
+        }
 
         self.url = self.urls['index']
 
@@ -47,7 +47,7 @@ class BitSnoopProvider(TorrentProvider): # pylint: disable=too-many-instance-att
 
         self.cache = BitSnoopCache(self)
 
-    def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches,too-many-locals
+    def search(self, search_strings, age=0, ep_obj=None):  # pylint: disable=too-many-branches,too-many-locals
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
diff --git a/sickbeard/providers/bitsoup.py b/sickbeard/providers/bitsoup.py
index 5060f1504121bd742688825bb808a0a3112b38ed..639bc49bf2eb64e2547e9d89bf67b878d2c4c0ae 100644
--- a/sickbeard/providers/bitsoup.py
+++ b/sickbeard/providers/bitsoup.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Idan Gutman
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -22,7 +23,7 @@ import traceback
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class BitSoupProvider(TorrentProvider):
@@ -35,7 +36,7 @@ class BitSoupProvider(TorrentProvider):
             'detail': 'https://www.bitsoup.me/details.php?id=%s',
             'search': 'https://www.bitsoup.me/browse.php',
             'download': 'https://bitsoup.me/%s',
-            }
+        }
 
         self.url = self.urls['base_url']
 
@@ -63,7 +64,7 @@ class BitSoupProvider(TorrentProvider):
             'username': self.username,
             'password': self.password,
             'ssl': 'yes'
-            }
+        }
 
         response = self.get_url(self.urls['login'], post_data=login_params, timeout=30)
         if not response:
@@ -117,8 +118,10 @@ class BitSoupProvider(TorrentProvider):
                                 title = link.getText()
                                 seeders = int(cells[10].getText().replace(',', ''))
                                 leechers = int(cells[11].getText().replace(',', ''))
-                                # FIXME
+                                torrent_size = cells[8].getText()
                                 size = -1
+                                if re.match(r"\d+([,\.]\d+)?\s*[KkMmGgTt]?[Bb]", torrent_size):
+                                    size = self._convertSize(torrent_size.rstrip())
                             except (AttributeError, TypeError):
                                 continue
 
@@ -153,6 +156,23 @@ class BitSoupProvider(TorrentProvider):
     def seed_ratio(self):
         return self.ratio
 
+    def _convertSize(self, sizeString):
+        size = sizeString[:-2].strip()
+        modifier = sizeString[-2:].upper()
+        try:
+            size = float(size)
+            if modifier in 'KB':
+                size *= 1024 ** 1
+            elif modifier in 'MB':
+                size *= 1024 ** 2
+            elif modifier in 'GB':
+                size *= 1024 ** 3
+            elif modifier in 'TB':
+                size *= 1024 ** 4
+        except Exception:
+            size = -1
+        return long(size)
+
 
 class BitSoupCache(tvcache.TVCache):
     def __init__(self, provider_obj):
diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py
index d17bb8027e9574061c9cba44d53cc72610acc089..5a7da1f3d660e6febd64706d862cce78a5a8836b 100644
--- a/sickbeard/providers/bluetigers.py
+++ b/sickbeard/providers/bluetigers.py
@@ -26,7 +26,7 @@ from sickbeard.bs4_parser import BS4Parser
 
 from sickbeard import logger
 from sickbeard import tvcache
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class BLUETIGERSProvider(TorrentProvider):
@@ -45,11 +45,11 @@ class BLUETIGERSProvider(TorrentProvider):
             'search': 'https://www.bluetigers.ca/torrents-search.php',
             'login': 'https://www.bluetigers.ca/account-login.php',
             'download': 'https://www.bluetigers.ca/torrents-details.php?id=%s&hit=1',
-            }
+        }
 
         self.search_params = {
             "c16": 1, "c10": 1, "c130": 1, "c131": 1, "c17": 1, "c18": 1, "c19": 1
-            }
+        }
 
         self.url = self.urls['base_url']
 
@@ -60,8 +60,8 @@ class BLUETIGERSProvider(TorrentProvider):
         login_params = {
             'username': self.username,
             'password': self.password,
-            'take_login' : '1'
-            }
+            'take_login': '1'
+        }
 
         response = self.get_url(self.urls['login'], post_data=login_params, timeout=30)
 
diff --git a/sickbeard/providers/btdigg.py b/sickbeard/providers/btdigg.py
index ceb8b6010677161051207455442111b1cff745e5..099b7116b2f3467e8a36ba51c16b7668813240b0 100644
--- a/sickbeard/providers/btdigg.py
+++ b/sickbeard/providers/btdigg.py
@@ -22,7 +22,7 @@
 from urllib import urlencode
 from sickbeard import logger
 from sickbeard import tvcache
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class BTDIGGProvider(TorrentProvider):
@@ -39,7 +39,7 @@ class BTDIGGProvider(TorrentProvider):
 
         self.url = self.urls['url']
 
-        # Unsupported
+        # # Unsupported
         # self.minseed = 1
         # self.minleech = 0
 
@@ -49,51 +49,63 @@ class BTDIGGProvider(TorrentProvider):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
-        search_params = {'p': 1}
+        search_params = {'p': 0}
 
-        for mode in search_strings.keys():
+        for mode in search_strings:
             logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_strings[mode]:
+                search_params['q'] = search_string.encode('utf-8')
 
                 if mode != 'RSS':
                     logger.log(u"Search string: %s" % search_string, logger.DEBUG)
+                    search_params['order'] = '0'
+                else:
+                    search_params['order'] = '2'
 
-                search_params['q'] = search_string.encode('utf-8')
-                search_params['order'] = '1' if mode != 'RSS' else '2'
-
-                searchURL = self.urls['api'] + '?' + urlencode(search_params)
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                search_url = self.urls['api'] + '?' + urlencode(search_params)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                jdata = self.get_url(searchURL, json=True)
+                jdata = self.get_url(search_url, json=True)
                 if not jdata:
-                    logger.log(u"No data returned to be parsed!!!")
-                    return []
+                    logger.log(u"No data returned to be parsed!!!", logger.DEBUG)
+                    continue
 
                 for torrent in jdata:
-                    if not torrent['ff']:
-                        title = torrent['name']
-                        download_url = torrent['magnet'] + self._custom_trackers
-                        size = torrent['size']
-                        # FIXME
-                        seeders = 1
-                        leechers = 0
-
-                        if not all([title, download_url]):
-                            continue
-
-                        # Filter unseeded torrent (Unsupported)
-                        # if seeders < self.minseed or leechers < self.minleech:
-                        #    if mode != 'RSS':
-                        #        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
-                        #    continue
-
-                        item = title, download_url, size, seeders, leechers
-                        if mode != 'RSS':
-                            logger.log(u"Found result: %s" % title, logger.DEBUG)
-
-                        items[mode].append(item)
-
-            # For each search mode sort all the items by seeders if available (Unsupported)
+                    if not torrent['name']:
+                        logger.log(u"Ignoring result since it has no name", logger.DEBUG)
+                        continue
+
+                    if torrent['ff']:
+                        logger.log(u"Ignoring result for %s since it's a fake (level = %s)" % (torrent['name'], torrent['ff']), logger.DEBUG)
+                        continue
+
+                    if not torrent['files']:
+                        logger.log(u"Ignoring result for %s without files" % torrent['name'], logger.DEBUG)
+                        continue
+
+                    download_url = torrent['magnet'] + self._custom_trackers
+
+                    if not download_url:
+                        logger.log(u"Ignoring result for %s without a url" % torrent['name'], logger.DEBUG)
+                        continue
+
+                    # FIXME
+                    seeders = 1
+                    leechers = 0
+
+                    # # Filter unseeded torrent (Unsupported)
+                    # if seeders < self.minseed or leechers < self.minleech:
+                    #    if mode != 'RSS':
+                    #        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                    #    continue
+
+                    if mode != 'RSS':
+                        logger.log(u"Found result: %s" % torrent['name'], logger.DEBUG)
+
+                    item = torrent['name'], download_url, torrent['size'], seeders, leechers
+                    items[mode].append(item)
+
+            # # For each search mode sort all the items by seeders if available (Unsupported)
             # items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
@@ -109,7 +121,7 @@ class BTDiggCache(tvcache.TVCache):
 
         tvcache.TVCache.__init__(self, provider_obj)
 
-        # Cache results for a 30min ,since BTDigg takes some time to crawl
+        # Cache results for a 30min, since BTDigg takes some time to crawl
         self.minTime = 30
 
     def _getRSSData(self):
diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py
index 95d06605699b8372dc6b25f746a1b00bbb7c3bcd..f648fefc8019acd2f8bc887239c0a459b37b54cb 100644
--- a/sickbeard/providers/btn.py
+++ b/sickbeard/providers/btn.py
@@ -31,7 +31,7 @@ from sickbeard import scene_exceptions
 from sickbeard.helpers import sanitizeSceneName
 from sickbeard.common import cpu_presets
 from sickrage.helper.exceptions import AuthException, ex
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class BTNProvider(TorrentProvider):
@@ -46,7 +46,7 @@ class BTNProvider(TorrentProvider):
         self.cache = BTNCache(self)
 
         self.urls = {'base_url': u'http://api.btnapps.net',
-                     'website': u'http://broadcasthe.net/',}
+                     'website': u'http://broadcasthe.net/', }
 
         self.url = self.urls['website']
 
@@ -82,7 +82,7 @@ class BTNProvider(TorrentProvider):
 
         if search_params:
             params.update(search_params)
-            logger.log(u"Search string: %s" %  search_params, logger.DEBUG)
+            logger.log(u"Search string: %s" % search_params, logger.DEBUG)
 
         parsedJSON = self._api_call(apikey, params)
         if not parsedJSON:
@@ -190,7 +190,7 @@ class BTNProvider(TorrentProvider):
                 # unescaped / is valid in JSON, but it can be escaped
                 url = url.replace("\\/", "/")
 
-        return (title, url)
+        return title, url
 
     def _get_season_search_strings(self, ep_obj):
         search_params = []
diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py
index 3c1c1f91ee3ffc210a65297e80198b2d16248736..dd23b79963057ae7e33687172432d7dce62b1f58 100644
--- a/sickbeard/providers/cpasbien.py
+++ b/sickbeard/providers/cpasbien.py
@@ -18,11 +18,12 @@
 # along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
 
 import traceback
+import re
 
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class CpasbienProvider(TorrentProvider):
@@ -33,6 +34,8 @@ class CpasbienProvider(TorrentProvider):
 
         self.public = True
         self.ratio = None
+        self.minseed = None
+        self.minleech = None
         self.url = "http://www.cpasbien.io"
 
         self.proper_strings = ['PROPER', 'REPACK']
@@ -50,9 +53,12 @@ class CpasbienProvider(TorrentProvider):
 
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+                if mode != 'RSS':
+                    searchURL = self.url + '/recherche/' + search_string.replace('.', '-') + '.html'
+                else:
+                    searchURL = self.url + '/view_cat.php?categorie=series'
 
-                searchURL = self.url + '/recherche/'+search_string.replace('.', '-') + '.html'
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
                 data = self.get_url(searchURL)
 
                 if not data:
@@ -65,7 +71,7 @@ class CpasbienProvider(TorrentProvider):
                         while erlin == 0:
                             try:
                                 classlin = 'ligne' + str(lin)
-                                resultlin = html.findAll(attrs={'class' : [classlin]})
+                                resultlin = html.findAll(attrs={'class': [classlin]})
                                 if resultlin:
                                     for ele in resultlin:
                                         resultdiv.append(ele)
@@ -75,23 +81,18 @@ class CpasbienProvider(TorrentProvider):
                             except Exception:
                                 erlin = 1
 
-                        for row in resultdiv:
+                        for torrent in resultdiv:
                             try:
-                                link = row.find("a", title=True)
-                                title = link.text.lower().strip()
-                                pageURL = link['href']
-
-                                # downloadTorrentLink = torrentSoup.find("a", title.startswith('Cliquer'))
-                                tmp = pageURL.split('/')[-1].replace('.html', '.torrent')
-
-                                downloadTorrentLink = ('http://www.cpasbien.io/telechargement/%s' % tmp)
-
-                                if downloadTorrentLink:
-                                    download_url = downloadTorrentLink
-                                    # FIXME
-                                    size = -1
-                                    seeders = 1
-                                    leechers = 0
+                                title = torrent.findAll(attrs={'class': ["titre"]})[0].text.replace("HDTV", "HDTV x264-CPasBien")
+                                detail_url = torrent.find("a")['href']
+                                tmp = detail_url.split('/')[-1].replace('.html', '.torrent')
+                                download_url = (self.url + '/telechargement/%s' % tmp)
+                                torrent_size = (str(torrent.findAll(attrs={'class': ["poid"]})[0].text).rstrip('&nbsp;')).rstrip()
+                                size = -1
+                                if re.match(r"\d+([,\.]\d+)?\s*[KkMmGgTt]?[Oo]", torrent_size):
+                                    size = self._convertSize(torrent_size.rstrip())
+                                seeders = torrent.findAll(attrs={'class': ["seed_ok"]})[0].text
+                                leechers = torrent.findAll(attrs={'class': ["down"]})[0].text
 
                             except (AttributeError, TypeError):
                                 continue
@@ -99,13 +100,19 @@ class CpasbienProvider(TorrentProvider):
                             if not all([title, download_url]):
                                 continue
 
+                            # Filter unseeded torrent
+                            if seeders < self.minseed or leechers < self.minleech:
+                                if mode != 'RSS':
+                                    logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                                continue
+
                             item = title, download_url, size, seeders, leechers
                             if mode != 'RSS':
                                 logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                             items[mode].append(item)
 
-                except Exception as e:
+                except Exception:
                     logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
             # For each search mode sort all the items by seeders if available
@@ -118,16 +125,33 @@ class CpasbienProvider(TorrentProvider):
     def seed_ratio(self):
         return self.ratio
 
+    def _convertSize(self, sizeString):
+        size = sizeString[:-2].strip()
+        modifier = sizeString[-2:].upper()
+        try:
+            size = float(size)
+            if modifier in 'KO':
+                size *= 1024 ** 1
+            elif modifier in 'MO':
+                size *= 1024 ** 2
+            elif modifier in 'GO':
+                size *= 1024 ** 3
+            elif modifier in 'TO':
+                size *= 1024 ** 4
+        except Exception:
+            size = -1
+        return long(size)
+
 
 class CpasbienCache(tvcache.TVCache):
     def __init__(self, provider_obj):
 
         tvcache.TVCache.__init__(self, provider_obj)
 
-        self.minTime = 30
+        self.minTime = 20
 
     def _getRSSData(self):
-        # search_strings = {'RSS': ['']}
-        return {'entries': {}}
+        search_strings = {'RSS': ['']}
+        return {'entries': self.provider.search(search_strings)}
 
 provider = CpasbienProvider()
diff --git a/sickbeard/providers/danishbits.py b/sickbeard/providers/danishbits.py
index 4f27fc0961fc90eeb14c8ef77cc27abe4d159cb0..b531cbb4d5992375783aef8c11aadeb6cbe26f1c 100644
--- a/sickbeard/providers/danishbits.py
+++ b/sickbeard/providers/danishbits.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: seedboy
 # URL: https://github.com/seedboy
 #
@@ -23,7 +24,7 @@ import re
 
 from sickbeard import logger
 from sickbeard import tvcache
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 from sickbeard.bs4_parser import BS4Parser
 
@@ -146,7 +147,7 @@ class DanishbitsProvider(TorrentProvider):  # pylint: disable=too-many-instance-
                             seeders = int(result.find_all('td')[6].text)
                             leechers = int(result.find_all('td')[7].text)
                             size = self._convertSize(result.find_all('td')[2].text)
-                            freeleech = result.find('div', attrs={'class': 'freeleech'}) is not None
+                            freeleech = result.find('span', class_='freeleech')
                             # except (AttributeError, TypeError, KeyError):
                             #     logger.log(u"attrErr: {0}, tErr: {1}, kErr: {2}".format(AttributeError, TypeError, KeyError), logger.DEBUG)
                             #    continue
@@ -186,16 +187,18 @@ class DanishbitsProvider(TorrentProvider):  # pylint: disable=too-many-instance-
         size = m.group(1)
 
         size, modifier = size[:-2], size[-2:]
+        size = size.replace(',', '')  # strip commas from comma separated values
+
         size = float(size)
         if modifier in 'KB':
-            size = size * 1024
+            size *= 1024 ** 1
         elif modifier in 'MB':
-            size = size * 1024**2
+            size *= 1024 ** 2
         elif modifier in 'GB':
-            size = size * 1024**3
+            size *= 1024 ** 3
         elif modifier in 'TB':
-            size = size * 1024**4
-        return int(size)
+            size *= 1024 ** 4
+        return long(size)
 
     def seedRatio(self):
         return self.ratio
diff --git a/sickbeard/providers/elitetorrent.py b/sickbeard/providers/elitetorrent.py
index bb1ab5b3c04017a9d0bf46167515764fe218d600..984e84a24781c3522ef8a384204a4a1233215f6d 100644
--- a/sickbeard/providers/elitetorrent.py
+++ b/sickbeard/providers/elitetorrent.py
@@ -25,7 +25,7 @@ from six.moves import urllib
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class elitetorrentProvider(TorrentProvider):
@@ -149,7 +149,6 @@ class elitetorrentProvider(TorrentProvider):
 
         return results
 
-
     @staticmethod
     def _processTitle(title):
 
@@ -163,7 +162,7 @@ class elitetorrentProvider(TorrentProvider):
         title = title.replace('(calidad regular)', 'DVDrip x264')
         title = title.replace('(calidad media)', 'DVDrip x264')
 
-        #Language, all results from this provider have spanish audio, we append it to title (avoid to download undesired torrents)
+        # Language, all results from this provider have spanish audio, we append it to title (avoid to download undesired torrents)
         title += ' SPANISH AUDIO'
         title += '-ELITETORRENT'
 
diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py
index b5a75f2dfdb29bc7648511a07c2220dc95776478..ba8a2e3d40c42fe24cea8c1c63db1390541e1e73 100644
--- a/sickbeard/providers/extratorrent.py
+++ b/sickbeard/providers/extratorrent.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: duramato <matigonkas@outlook.com>
 # Author: miigotu
 # URL: https://github.com/SickRage/sickrage
@@ -25,7 +26,7 @@ from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.common import USER_AGENT
 from sickrage.helper.common import try_int
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class ExtraTorrentProvider(TorrentProvider):
@@ -35,7 +36,7 @@ class ExtraTorrentProvider(TorrentProvider):
         self.urls = {
             'index': 'http://extratorrent.cc',
             'rss': 'http://extratorrent.cc/rss.xml',
-            }
+        }
 
         self.url = self.urls['index']
 
@@ -43,6 +44,7 @@ class ExtraTorrentProvider(TorrentProvider):
         self.ratio = None
         self.minseed = None
         self.minleech = None
+        self.custom_url = None
 
         self.cache = ExtraTorrentCache(self)
         self.headers.update({'User-Agent': USER_AGENT})
@@ -62,7 +64,11 @@ class ExtraTorrentProvider(TorrentProvider):
 
                 try:
                     self.search_params.update({'type': ('search', 'rss')[mode == 'RSS'], 'search': search_string})
-                    data = self.get_url(self.urls['rss'], params=self.search_params)
+                    if self.custom_url:
+                        url = self.custom_url + '/rss.xml'
+                        data = self.get_url(url, params=self.search_params)
+                    else:
+                        data = self.get_url(self.urls['rss'], params=self.search_params)
                     if not data:
                         logger.log(u"No data returned from provider", logger.DEBUG)
                         continue
@@ -87,7 +93,7 @@ class ExtraTorrentProvider(TorrentProvider):
 
                     for item in entries:
                         title = item['title'].decode('utf-8')
-                       # info_hash = item['info_hash']
+                        # info_hash = item['info_hash']
                         size = int(item['size'])
                         seeders = try_int(item['seeders'], 0)
                         leechers = try_int(item['leechers'], 0)
diff --git a/sickbeard/providers/fnt.py b/sickbeard/providers/fnt.py
index 29629b46bd5ca749a0fe956f7a8d62fd6e21d470..21cda3b43ca2d12c5348d9eca3455fd548777a25 100644
--- a/sickbeard/providers/fnt.py
+++ b/sickbeard/providers/fnt.py
@@ -24,7 +24,7 @@ import requests
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class FNTProvider(TorrentProvider):
@@ -39,10 +39,11 @@ class FNTProvider(TorrentProvider):
 
         self.cache = FNTCache(self)
 
-        self.urls = {'base_url': 'https://fnt.nu',
-                     'search': 'https://www.fnt.nu/torrents/recherche/',
-                     'login': 'https://fnt.nu/account-login.php',
-                    }
+        self.urls = {
+            'base_url': 'https://fnt.nu',
+            'search': 'https://www.fnt.nu/torrents/recherche/',
+            'login': 'https://fnt.nu/account-login.php',
+        }
 
         self.url = self.urls['base_url']
         self.search_params = {
@@ -50,17 +51,18 @@ class FNTProvider(TorrentProvider):
             "c137": 1, "c138": 1, "c146": 1, "c122": 1, "c110": 1, "c109": 1, "c135": 1, "c148": 1,
             "c153": 1, "c149": 1, "c150": 1, "c154": 1, "c155": 1, "c156": 1, "c114": 1,
             "visible": 1, "freeleech": 0, "nuke": 1, "3D": 0, "sort": "size", "order": "desc"
-            }
+        }
 
     def login(self):
 
         if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
             return True
 
-        login_params = {'username': self.username,
-                        'password': self.password,
-                        'submit' : 'Se loguer'
-                       }
+        login_params = {
+            'username': self.username,
+            'password': self.password,
+            'submit': 'Se loguer'
+        }
 
         response = self.get_url(self.urls['login'], post_data=login_params, timeout=30)
         if not response:
@@ -73,8 +75,6 @@ class FNTProvider(TorrentProvider):
             logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
-        return True
-
     def search(self, search_strings, age=0, ep_obj=None):
 
         results = []
@@ -106,7 +106,7 @@ class FNTProvider(TorrentProvider):
                             continue
 
                         if result_table:
-                            rows = result_table.findAll("tr", {"class" : "ligntorrent"})
+                            rows = result_table.findAll("tr", {"class": "ligntorrent"})
 
                             for row in rows:
                                 link = row.findAll('td')[1].find("a", href=re.compile("fiche_film"))
diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py
index 81ea9ff6a5d7397475a9d228093def608d3726a6..4ff0277dc7773a01eb854bf1a074631b8415902c 100644
--- a/sickbeard/providers/freshontv.py
+++ b/sickbeard/providers/freshontv.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Idan Gutman
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -25,7 +26,7 @@ from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
 from sickrage.helper.common import try_int
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class FreshOnTVProvider(TorrentProvider):
@@ -118,7 +119,7 @@ class FreshOnTVProvider(TorrentProvider):
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 searchURL = self.urls['search'] % (freeleech, search_string)
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
                 init_html = self.get_url(searchURL)
                 max_page_number = 0
 
@@ -154,8 +155,7 @@ class FreshOnTVProvider(TorrentProvider):
                     logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
                     continue
 
-                data_response_list = []
-                data_response_list.append(init_html)
+                data_response_list = [init_html]
 
                 # Freshon starts counting pages from zero, even though it displays numbers from 1
                 if max_page_number > 1:
diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py
index 1a154a24073b989f1e51486ec421075eb910ae3a..79da127b9418f064b86bf7bda3e0b8cb42f4e6f4 100644
--- a/sickbeard/providers/gftracker.py
+++ b/sickbeard/providers/gftracker.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Seamus Wassman
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -17,13 +18,14 @@
 # along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
 
 import re
+import requests
 import traceback
 
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
 from sickrage.helper.exceptions import AuthException
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class GFTrackerProvider(TorrentProvider):
@@ -38,19 +40,18 @@ class GFTrackerProvider(TorrentProvider):
         self.minseed = None
         self.minleech = None
 
-        self.urls = {'base_url': 'https://www.thegft.org',
-                     'login': 'https://www.thegft.org/loginsite.php',
-                     'search': 'https://www.thegft.org/browse.php?view=%s%s',
-                     'download': 'https://www.thegft.org/%s',
+        self.urls = {
+            'base_url': 'https://www.thegft.org',
+            'login': 'https://www.thegft.org/loginsite.php',
+            'search': 'https://www.thegft.org/browse.php?view=%s%s',
+            'download': 'https://www.thegft.org/%s',
         }
 
         self.url = self.urls['base_url']
 
-        self.cookies = None
-
         self.categories = "0&c26=1&c37=1&c19=1&c47=1&c17=1&c4=1&search="
 
-        self.proper_strings = ['PROPER', 'REPACK']
+        self.proper_strings = ['PROPER', 'REPACK', 'REAL']
 
         self.cache = GFTrackerCache(self)
 
@@ -62,13 +63,15 @@ class GFTrackerProvider(TorrentProvider):
         return True
 
     def login(self):
+        if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
+            return True
 
         login_params = {'username': self.username,
                         'password': self.password}
 
+        # Initialize session with get to have cookies
+        initialize = self.get_url(self.url, timeout=30)  # pylint: disable=unused-variable
         response = self.get_url(self.urls['login'], post_data=login_params, timeout=30)
-        # Save cookies from response
-        self.cookies = self.headers.get('Set-Cookie')
 
         if not response:
             logger.log(u"Unable to connect to provider", logger.WARNING)
@@ -96,10 +99,8 @@ class GFTrackerProvider(TorrentProvider):
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 searchURL = self.urls['search'] % (self.categories, search_string)
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
 
-                # Set cookies from response
-                self.headers.update({'Cookie': self.cookies})
                 # Returns top 30 results by default, expandable in user profile
                 data = self.get_url(searchURL)
                 if not data:
@@ -116,25 +117,19 @@ class GFTrackerProvider(TorrentProvider):
                             continue
 
                         for result in torrent_rows[1:]:
-                            cells = result.findChildren("td")
-                            title = cells[1].find("a").find_next("a")
-                            link = cells[3].find("a")
-                            shares = cells[8].get_text().split("/", 1)
-                            torrent_size = cells[7].get_text().split("/", 1)[0]
-
                             try:
-                                if title.has_key('title'):
-                                    title = title['title']
-                                else:
-                                    title = cells[1].find("a")['title']
-
-                                download_url = self.urls['download'] % (link['href'])
+                                cells = result.findChildren("td")
+                                title = cells[1].find("a").find_next("a").get('title') or cells[1].find("a").get('title')
+                                download_url = self.urls['download'] % cells[3].find("a").get('href')
+                                shares = cells[8].get_text().split("/", 1)
                                 seeders = int(shares[0])
                                 leechers = int(shares[1])
 
-                                size = -1
+                                torrent_size = cells[7].get_text().split("/", 1)[0]
                                 if re.match(r"\d+([,\.]\d+)?\s*[KkMmGgTt]?[Bb]", torrent_size):
-                                    size = self._convertSize(torrent_size.rstrip())
+                                    size = self._convertSize(torrent_size.strip())
+                                else:
+                                    size = -1
 
                             except (AttributeError, TypeError):
                                 continue
@@ -154,7 +149,7 @@ class GFTrackerProvider(TorrentProvider):
 
                             items[mode].append(item)
 
-                except Exception as e:
+                except Exception:
                     logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
             # For each search mode sort all the items by seeders if available
@@ -173,16 +168,16 @@ class GFTrackerProvider(TorrentProvider):
         try:
             size = float(size)
             if modifier in 'KB':
-                size = size * 1024
+                size *= 1024 ** 1
             elif modifier in 'MB':
-                size = size * 1024**2
+                size *= 1024 ** 2
             elif modifier in 'GB':
-                size = size * 1024**3
+                size *= 1024 ** 3
             elif modifier in 'TB':
-                size = size * 1024**4
+                size *= 1024 ** 4
         except Exception:
             size = -1
-        return int(size)
+        return long(size)
 
 
 class GFTrackerCache(tvcache.TVCache):
diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py
index f6b6e715e045fbf8a641c24b1dfcdcffc981a1d7..4cf1526a486c0a09a7c3d601c39f5efcb0699fd3 100644
--- a/sickbeard/providers/hd4free.py
+++ b/sickbeard/providers/hd4free.py
@@ -20,7 +20,7 @@
 from urllib import urlencode
 from sickbeard import logger
 from sickbeard import tvcache
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class HD4FREEProvider(TorrentProvider):  # pylint: disable=too-many-instance-attributes
@@ -41,7 +41,7 @@ class HD4FREEProvider(TorrentProvider):  # pylint: disable=too-many-instance-att
         if self.username and self.api_key:
             return True
 
-        logger.log('Your authentication credentials for %s are missing, check your config.' % self.name)
+        logger.log('Your authentication credentials for %s are missing, check your config.' % self.name, logger.WARNING)
         return False
 
     def search(self, search_strings, age=0, ep_obj=None):  # pylint: disable=too-many-locals
@@ -49,6 +49,9 @@ class HD4FREEProvider(TorrentProvider):  # pylint: disable=too-many-instance-att
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
 
+        if not self._check_auth:
+            return results
+            
         search_params = {
             'tv': 'true',
             'username': self.username,
diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py
index 888e31649df057eed21ff9d8c93791b73bea9594..bcd271ccc6dcef8d7d374262bd519de2a3864382 100644
--- a/sickbeard/providers/hdbits.py
+++ b/sickbeard/providers/hdbits.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # SickRage is free software: you can redistribute it and/or modify
@@ -19,7 +20,7 @@ import urllib
 from sickbeard import classes
 from sickbeard import logger, tvcache
 from sickrage.helper.exceptions import AuthException
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 try:
     import json
@@ -79,7 +80,7 @@ class HDBitsProvider(TorrentProvider):
         # FIXME
         results = []
 
-        logger.log(u"Search string: %s" %  search_params, logger.DEBUG)
+        logger.log(u"Search string: %s" % search_params, logger.DEBUG)
 
         self._check_auth()
 
diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py
index f50dcebaa496a29e0634a26249cb3ed52ab96421..addac0b45e58dea7e03dfe028294a2fe40f6e9bc 100644
--- a/sickbeard/providers/hdspace.py
+++ b/sickbeard/providers/hdspace.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Idan Gutman
 # Modified by jkaberg, https://github.com/jkaberg for SceneAccess
 # Modified by 7ca for HDSpace
@@ -25,7 +26,7 @@ from bs4 import BeautifulSoup
 
 from sickbeard import logger
 from sickbeard import tvcache
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class HDSpaceProvider(TorrentProvider):
@@ -45,12 +46,12 @@ class HDSpaceProvider(TorrentProvider):
                      'search': u'https://hd-space.org/index.php?page=torrents&search=%s&active=1&options=0',
                      'rss': u'https://hd-space.org/rss_torrents.php?feed=dl'}
 
-        self.categories = [15, 21, 22, 24, 25, 40] # HDTV/DOC 1080/720, bluray, remux
+        self.categories = [15, 21, 22, 24, 25, 40]  # HDTV/DOC 1080/720, bluray, remux
         self.urls['search'] += '&category='
         for cat in self.categories:
             self.urls['search'] += str(cat) + '%%3B'
             self.urls['rss'] += '&cat[]=' + str(cat)
-        self.urls['search'] = self.urls['search'][:-4] # remove extra %%3B
+        self.urls['search'] = self.urls['search'][:-4]  # remove extra %%3B
 
         self.url = self.urls['base_url']
 
@@ -97,9 +98,9 @@ class HDSpaceProvider(TorrentProvider):
                 else:
                     searchURL = self.urls['search'] % ''
 
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
                 if mode != 'RSS':
-                    logger.log(u"Search string: %s" %  search_string, logger.DEBUG)
+                    logger.log(u"Search string: %s" % search_string, logger.DEBUG)
 
                 data = self.get_url(searchURL)
                 if not data or 'please try later' in data:
@@ -132,11 +133,11 @@ class HDSpaceProvider(TorrentProvider):
                         continue
 
                     try:
-                        dl_href = result.find('a', attrs={'href':re.compile(r'download.php.*')})['href']
+                        dl_href = result.find('a', attrs={'href': re.compile(r'download.php.*')})['href']
                         title = re.search('f=(.*).torrent', dl_href).group(1).replace('+', '.')
                         download_url = self.urls['base_url'] + dl_href
-                        seeders = int(result.find('span', attrs={'class':'seedy'}).find('a').text)
-                        leechers = int(result.find('span', attrs={'class':'leechy'}).find('a').text)
+                        seeders = int(result.find('span', attrs={'class': 'seedy'}).find('a').text)
+                        leechers = int(result.find('span', attrs={'class': 'leechy'}).find('a').text)
                         size = re.match(r'.*?([0-9]+,?\.?[0-9]* [KkMmGg]+[Bb]+).*', str(result), re.DOTALL).group(1)
 
                         if not all([title, download_url]):
@@ -171,14 +172,14 @@ class HDSpaceProvider(TorrentProvider):
         size, modifier = size.split(' ')
         size = float(size)
         if modifier in 'KB':
-            size = size * 1024
+            size *= 1024 ** 1
         elif modifier in 'MB':
-            size = size * 1024**2
+            size *= 1024 ** 2
         elif modifier in 'GB':
-            size = size * 1024**3
+            size *= 1024 ** 3
         elif modifier in 'TB':
-            size = size * 1024**4
-        return int(size)
+            size *= 1024 ** 4
+        return long(size)
 
 
 class HDSpaceCache(tvcache.TVCache):
diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py
index 55ef723774b01a96b48f838898dfb52b4bdd9e69..eaa632f75b7f1df3f34afaf21cf6bd552c95acf7 100644
--- a/sickbeard/providers/hdtorrents.py
+++ b/sickbeard/providers/hdtorrents.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Idan Gutman
 # Modified by jkaberg, https://github.com/jkaberg for SceneAccess
 # URL: http://code.google.com/p/sickbeard/
@@ -25,7 +26,7 @@ import traceback
 from sickbeard.bs4_parser import BS4Parser
 from sickbeard import logger
 from sickbeard import tvcache
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class HDTorrentsProvider(TorrentProvider):
@@ -96,9 +97,9 @@ class HDTorrentsProvider(TorrentProvider):
                 else:
                     searchURL = self.urls['rss'] % self.categories
 
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
                 if mode != 'RSS':
-                    logger.log(u"Search string: %s" %  search_string, logger.DEBUG)
+                    logger.log(u"Search string: %s" % search_string, logger.DEBUG)
 
                 data = self.get_url(searchURL)
                 if not data or 'please try later' in data:
@@ -203,14 +204,14 @@ class HDTorrentsProvider(TorrentProvider):
         size, modifier = size.split(' ')
         size = float(size)
         if modifier in 'KB':
-            size = size * 1024
+            size *= 1024 ** 1
         elif modifier in 'MB':
-            size = size * 1024**2
+            size *= 1024 ** 2
         elif modifier in 'GB':
-            size = size * 1024**3
+            size *= 1024 ** 3
         elif modifier in 'TB':
-            size = size * 1024**4
-        return int(size)
+            size *= 1024 ** 4
+        return long(size)
 
 
 class HDTorrentsCache(tvcache.TVCache):
diff --git a/sickbeard/providers/hounddawgs.py b/sickbeard/providers/hounddawgs.py
index 2d2b4a77438ead5f2efebcb9889b5ab3ba162d95..f6fa855453df7e21e9dc62d5396efa155c2e9013 100644
--- a/sickbeard/providers/hounddawgs.py
+++ b/sickbeard/providers/hounddawgs.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Idan Gutman
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -22,7 +23,7 @@ from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
 from sickrage.helper.common import try_int
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class HoundDawgsProvider(TorrentProvider):  # pylint: disable=too-many-instance-attributes
@@ -39,7 +40,6 @@ class HoundDawgsProvider(TorrentProvider):  # pylint: disable=too-many-instance-
         self.freeleech = None
         self.ranked = None
 
-
         self.urls = {
             'base_url': 'https://hounddawgs.org/',
             'search': 'https://hounddawgs.org/torrents.php',
@@ -107,6 +107,9 @@ class HoundDawgsProvider(TorrentProvider):  # pylint: disable=too-many-instance-
                 self.search_params['searchstr'] = search_string
 
                 data = self.get_url(self.urls['search'], params=self.search_params)
+                if not data:
+                    logger.log(u'URL did not return data', logger.DEBUG)
+                    continue
 
                 strTableStart = "<table class=\"torrent_table"
                 startTableIndex = data.find(strTableStart)
@@ -143,7 +146,7 @@ class HoundDawgsProvider(TorrentProvider):  # pylint: disable=too-many-instance-
                                 if self.freeleech and not freeleech:
                                     continue
                                 title = allAs[2].string
-                                download_url = self.urls['base_url']+allAs[0].attrs['href']
+                                download_url = self.urls['base_url'] + allAs[0].attrs['href']
                                 torrent_size = result.find("td", class_="nobr").find_next_sibling("td").string
                                 if torrent_size:
                                     size = self._convertSize(torrent_size)
@@ -178,7 +181,6 @@ class HoundDawgsProvider(TorrentProvider):  # pylint: disable=too-many-instance-
 
         return results
 
-
     @staticmethod
     def _convertSize(size):
         size = re.sub(r'[i, ]+', '', size)
@@ -190,8 +192,7 @@ class HoundDawgsProvider(TorrentProvider):  # pylint: disable=too-many-instance-
         modifier = matches.group(2)
 
         mod = {'K': 1, 'M': 2, 'G': 3, 'T': 4}
-        return int(float(size) * 1024**mod[modifier])
-
+        return long(float(size) * 1024 ** mod[modifier])
 
     def seed_ratio(self):
         return self.ratio
diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py
index 8f1541ab502ca6ce2c73d5f64dbeaba9845bc5b9..778d4342bcc5d0717631af90c7514231c813a371 100644
--- a/sickbeard/providers/iptorrents.py
+++ b/sickbeard/providers/iptorrents.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: seedboy
 # URL: https://github.com/seedboy
 #
@@ -21,7 +22,7 @@ from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
 from sickrage.helper.exceptions import AuthException, ex
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class IPTorrentsProvider(TorrentProvider):
@@ -94,7 +95,7 @@ class IPTorrentsProvider(TorrentProvider):
                 # URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
                 searchURL = self.urls['search'] % (self.categories, freeleech, search_string)
                 searchURL += ';o=seeders' if mode != 'RSS' else ''
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
 
                 data = self.get_url(searchURL)
                 if not data:
@@ -125,7 +126,7 @@ class IPTorrentsProvider(TorrentProvider):
                                 download_url = self.urls['base_url'] + result.find_all('td')[3].find('a')['href']
                                 size = self._convertSize(result.find_all('td')[5].text)
                                 seeders = int(result.find('td', attrs={'class': 'ac t_seeders'}).text)
-                                leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).text)
+                                leechers = int(result.find('td', attrs={'class': 'ac t_leechers'}).text)
                             except (AttributeError, TypeError, KeyError):
                                 continue
 
@@ -162,14 +163,14 @@ class IPTorrentsProvider(TorrentProvider):
         size, modifier = size.split(' ')
         size = float(size)
         if modifier in 'KB':
-            size = size * 1024
+            size *= 1024 ** 1
         elif modifier in 'MB':
-            size = size * 1024**2
+            size *= 1024 ** 2
         elif modifier in 'GB':
-            size = size * 1024**3
+            size *= 1024 ** 3
         elif modifier in 'TB':
-            size = size * 1024**4
-        return int(size)
+            size *= 1024 ** 4
+        return long(size)
 
 
 class IPTorrentsCache(tvcache.TVCache):
diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py
index a52c600a57277c87efec82b6ab786855a1037284..2472449e29d23dd636b212b45d15e9357ccb73af 100755
--- a/sickbeard/providers/kat.py
+++ b/sickbeard/providers/kat.py
@@ -27,7 +27,7 @@ from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.common import USER_AGENT
 from sickrage.helper.common import try_int
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class KATProvider(TorrentProvider):
@@ -84,7 +84,7 @@ class KATProvider(TorrentProvider):
                 try:
                     searchURL = self.urls['search'] % url_fmt_string + '?' + urlencode(self.search_params)
                     if self.custom_url:
-                        searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/')) # Must use posixpath
+                        searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/'))  # Must use posixpath
 
                     logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
                     data = self.get_url(searchURL)
diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py
index 3163a0c0c267f1a22ae9d04325d5661ddefaa229..71892f1e7766fad006478162a3a625a596b2ab2d 100644
--- a/sickbeard/providers/morethantv.py
+++ b/sickbeard/providers/morethantv.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Seamus Wassman
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -28,7 +29,7 @@ from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
 from sickrage.helper.exceptions import AuthException
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class MoreThanTVProvider(TorrentProvider):
@@ -108,7 +109,7 @@ class MoreThanTVProvider(TorrentProvider):
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 searchURL = self.urls['search'] % (search_string.replace('(', '').replace(')', ''))
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
 
                 # returns top 15 results by default, expandable in user profile to 100
                 data = self.get_url(searchURL)
@@ -137,11 +138,11 @@ class MoreThanTVProvider(TorrentProvider):
                             torrent_id_long = link['href'].replace('torrents.php?action=download&id=', '')
 
                             try:
-                                if link.has_key('title'):
+                                if link.get('title', ''):
                                     title = cells[1].find('a', {'title': 'View torrent'}).contents[0].strip()
                                 else:
                                     title = link.contents[0]
-                                download_url = self.urls['download'] % (torrent_id_long)
+                                download_url = self.urls['download'] % torrent_id_long
 
                                 seeders = cells[6].contents[0]
 
@@ -154,7 +155,6 @@ class MoreThanTVProvider(TorrentProvider):
                             except (AttributeError, TypeError):
                                 continue
 
-
                             if not all([title, download_url]):
                                 continue
 
@@ -189,16 +189,16 @@ class MoreThanTVProvider(TorrentProvider):
         try:
             size = float(size)
             if modifier in 'KB':
-                size = size * 1024
+                size *= 1024 ** 1
             elif modifier in 'MB':
-                size = size * 1024**2
+                size *= 1024 ** 2
             elif modifier in 'GB':
-                size = size * 1024**3
+                size *= 1024 ** 3
             elif modifier in 'TB':
-                size = size * 1024**4
+                size *= 1024 ** 4
         except Exception:
             size = -1
-        return int(size)
+        return long(size)
 
 
 class MoreThanTVCache(tvcache.TVCache):
diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py
index a4874739365f58e7ac48578479058eb60867ddb6..2a33d293bb3372678bd57126664ef103362a4970 100644
--- a/sickbeard/providers/newpct.py
+++ b/sickbeard/providers/newpct.py
@@ -27,7 +27,7 @@ from sickbeard import helpers
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class newpctProvider(TorrentProvider):
@@ -159,7 +159,7 @@ class newpctProvider(TorrentProvider):
         """
         if need_bytes:
             data = helpers.getURL(url, post_data=None, params=None, headers=self.headers, timeout=timeout,
-                              session=self.session, json=json, need_bytes=False)
+                                  session=self.session, json=json, need_bytes=False)
             url = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group()
 
         return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
@@ -204,15 +204,14 @@ class newpctProvider(TorrentProvider):
         size, modifier = size.split(' ')
         size = float(size)
         if modifier in 'KB':
-            size = size * 1024
+            size *= 1024 ** 1
         elif modifier in 'MB':
-            size = size * 1024**2
+            size *= 1024 ** 2
         elif modifier in 'GB':
-            size = size * 1024**3
+            size *= 1024 ** 3
         elif modifier in 'TB':
-            size = size * 1024**4
-        return int(size)
-
+            size *= 1024 ** 4
+        return long(size)
 
     @staticmethod
     def _processTitle(title):
@@ -232,7 +231,7 @@ class newpctProvider(TorrentProvider):
         title = re.sub('\[BRrip[^\[]*]', '720p BlueRay x264', title, flags=re.IGNORECASE)
         title = re.sub('\[BDrip[^\[]*]', '720p BlueRay x264', title, flags=re.IGNORECASE)
 
-        #Language
+        # Language
         title = re.sub('\[Spanish[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE)
         title = re.sub('\[Castellano[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE)
         title = re.sub(ur'\[Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE)
diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py
index 1f6a14c8dad2681eab4aa3ce55bcd3f71d73ba7a..58736c70de227522be9d0b0062fee888fe154d71 100644
--- a/sickbeard/providers/newznab.py
+++ b/sickbeard/providers/newznab.py
@@ -39,7 +39,7 @@ from sickrage.helper.encoding import ek, ss
 from sickrage.show.Show import Show
 from sickrage.helper.common import try_int
 from sickbeard.common import USER_AGENT
-from sickrage.providers.NZBProvider import NZBProvider
+from sickrage.providers.nzb.NZBProvider import NZBProvider
 
 
 class NewznabProvider(NZBProvider):
@@ -65,7 +65,6 @@ class NewznabProvider(NZBProvider):
         self.enable_daily = enable_daily
         self.enable_backlog = enable_backlog
 
-
         # 0 in the key spot indicates that no key is needed
         self.needs_auth = self.key != '0'
         self.public = not self.needs_auth
@@ -84,6 +83,42 @@ class NewznabProvider(NZBProvider):
             int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback)) + '|' + str(
                 int(self.enable_daily)) + '|' + str(int(self.enable_backlog))
 
+    @staticmethod
+    def get_providers_list(data):
+        default_list = [NewznabProvider._make_provider(x) for x in NewznabProvider._get_default_providers().split('!!!')]
+        providers_list = [x for x in [NewznabProvider._make_provider(x) for x in data.split('!!!')] if x]
+        seen_values = set()
+        providers_set = []
+
+        for provider in providers_list:
+            value = provider.name
+
+            if value not in seen_values:
+                providers_set.append(provider)
+                seen_values.add(value)
+
+        providers_list = providers_set
+        providers_dict = dict(zip([x.name for x in providers_list], providers_list))
+
+        for default in default_list:
+            if not default:
+                continue
+
+            if default.name not in providers_dict:
+                default.default = True
+                providers_list.append(default)
+            else:
+                providers_dict[default.name].default = True
+                providers_dict[default.name].name = default.name
+                providers_dict[default.name].url = default.url
+                providers_dict[default.name].needs_auth = default.needs_auth
+                providers_dict[default.name].search_mode = default.search_mode
+                providers_dict[default.name].search_fallback = default.search_fallback
+                providers_dict[default.name].enable_daily = default.enable_daily
+                providers_dict[default.name].enable_backlog = default.enable_backlog
+
+        return [x for x in providers_list if x]
+
     def image_name(self):
         """
         Checks if we have an image for this provider already.
@@ -111,7 +146,7 @@ class NewznabProvider(NZBProvider):
         if self.needs_auth and self.key:
             params['apikey'] = self.key
 
-        url = ek(os.path.join, self.url, 'api?') +  urllib.urlencode(params)
+        url = ek(os.path.join, self.url, 'api?') + urllib.urlencode(params)
         data = self.get_url(url)
         if not data:
             error_string = u"Error getting xml for [%s]" % url
@@ -121,7 +156,7 @@ class NewznabProvider(NZBProvider):
         data = BeautifulSoup(data, 'html5lib')
         if not self._checkAuthFromData(data) and data.caps and data.caps.categories:
             data.decompose()
-            error_string = u"Error parsing xml for [%s]" % (self.name)
+            error_string = u"Error parsing xml for [%s]" % self.name
             logger.log(error_string, logger.DEBUG)
             return False, return_categories, error_string
 
@@ -134,6 +169,14 @@ class NewznabProvider(NZBProvider):
         data.decompose()
         return True, return_categories, ""
 
+    @staticmethod
+    def _get_default_providers():
+        # name|url|key|catIDs|enabled|search_mode|search_fallback|enable_daily|enable_backlog
+        return 'NZB.Cat|https://nzb.cat/||5030,5040,5010|0|eponly|1|1|1!!!' + \
+               'NZBGeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0!!!' + \
+               'NZBs.org|https://nzbs.org/||5030,5040|0|eponly|0|0|0!!!' + \
+               'Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|eponly|0|0|0'
+
     def _get_season_search_strings(self, ep_obj):
         """
         Makes objects to pass to search for manual and backlog season pack searching
@@ -227,7 +270,40 @@ class NewznabProvider(NZBProvider):
 
         return False
 
-    def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-arguments,too-many-locals
+    @staticmethod
+    def _make_provider(config):
+        if not config:
+            return None
+
+        enable_backlog = 0
+        enable_daily = 0
+        search_fallback = 0
+        search_mode = 'eponly'
+
+        try:
+            values = config.split('|')
+
+            if len(values) == 9:
+                name, url, key, category_ids, enabled, search_mode, search_fallback, enable_daily, enable_backlog = values
+            else:
+                category_ids = values[3]
+                enabled = values[4]
+                key = values[2]
+                name = values[0]
+                url = values[1]
+        except ValueError:
+            logger.log(u'Skipping Newznab provider string: \'%s\', incorrect format' % config, logger.ERROR)
+            return None
+
+        new_provider = NewznabProvider(
+            name, url, key=key, catIDs=category_ids, search_mode=search_mode, search_fallback=search_fallback,
+            enable_daily=enable_daily, enable_backlog=enable_backlog
+        )
+        new_provider.enabled = enabled == '1'
+
+        return new_provider
+
+    def search(self, search_params, age=0, ep_obj=None):  # pylint: disable=too-many-arguments,too-many-locals
         """
         Searches indexer using the params in search_params, either for latest releases, or a string/id search
         Returns: list of results in dict form
diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py
index c72184fde6381549e1437a8e956b3d5bfc400565..fddc7b923db01d930122eabd6fa73dfdf8cacf40 100644
--- a/sickbeard/providers/nyaatorrents.py
+++ b/sickbeard/providers/nyaatorrents.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Mr_Orange
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -21,7 +22,7 @@ import re
 
 from sickbeard import logger
 from sickbeard import tvcache
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class NyaaProvider(TorrentProvider):
@@ -67,7 +68,7 @@ class NyaaProvider(TorrentProvider):
                     params["term"] = search_string.encode('utf-8')
 
                 searchURL = self.url + '?' + urllib.urlencode(params)
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
 
                 summary_regex = ur"(\d+) seeder\(s\), (\d+) leecher\(s\), \d+ download\(s\) - (\d+.?\d* [KMGT]iB)(.*)"
                 s = re.compile(summary_regex, re.DOTALL)
@@ -110,14 +111,14 @@ class NyaaProvider(TorrentProvider):
         size, modifier = size.split(' ')
         size = float(size)
         if modifier in 'KiB':
-            size = size * 1024
+            size *= 1024 ** 1
         elif modifier in 'MiB':
-            size = size * 1024**2
+            size *= 1024 ** 2
         elif modifier in 'GiB':
-            size = size * 1024**3
+            size *= 1024 ** 3
         elif modifier in 'TiB':
-            size = size * 1024**4
-        return int(size)
+            size *= 1024 ** 4
+        return long(size)
 
     def seed_ratio(self):
         return self.ratio
diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py
index 67df4f4b2a6921f8989e7e82223d21fd887b387f..cabb18cc61df0b058e0145b1e462e75fe0e1ea45 100644
--- a/sickbeard/providers/omgwtfnzbs.py
+++ b/sickbeard/providers/omgwtfnzbs.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Jordon Smith <smith@jordon.me.uk>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -25,7 +26,7 @@ from sickbeard import classes
 from sickbeard import logger
 from sickbeard import show_name_helpers
 from sickrage.helper.common import try_int
-from sickrage.providers.NZBProvider import NZBProvider
+from sickrage.providers.nzb.NZBProvider import NZBProvider
 
 
 class OmgwtfnzbsProvider(NZBProvider):
@@ -67,7 +68,7 @@ class OmgwtfnzbsProvider(NZBProvider):
                     return True
 
                 else:
-                    logger.log(u"Unknown error: %s"  % description_text, logger.DEBUG)
+                    logger.log(u"Unknown error: %s" % description_text, logger.DEBUG)
                     return False
 
             return True
@@ -79,7 +80,7 @@ class OmgwtfnzbsProvider(NZBProvider):
         return [x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
 
     def _get_title_and_url(self, item):
-        return (item['release'], item['getnzb'])
+        return item['release'], item['getnzb']
 
     def _get_size(self, item):
         return try_int(item['sizebytes'], -1)
@@ -100,7 +101,7 @@ class OmgwtfnzbsProvider(NZBProvider):
 
         searchURL = 'https://api.omgwtfnzbs.org/json/?' + urllib.urlencode(params)
         logger.log(u"Search string: %s" % params, logger.DEBUG)
-        logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+        logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
 
         parsedJSON = self.get_url(searchURL, json=True)
         if not parsedJSON:
@@ -161,7 +162,7 @@ class OmgwtfnzbsCache(tvcache.TVCache):
         if url:
             url = url.replace('&amp;', '&')
 
-        return (title, url)
+        return title, url
 
     def _getRSSData(self):
         params = {'user': provider.username,
diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py
index e37dbbed98afac949a7c9582ccb604eeb49f5357..96fcf5ede873ffd568e784f00bf6a6a74b1b5a98 100644
--- a/sickbeard/providers/pretome.py
+++ b/sickbeard/providers/pretome.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nick Sologoub
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -23,7 +24,7 @@ import traceback
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class PretomeProvider(TorrentProvider):
@@ -93,7 +94,7 @@ class PretomeProvider(TorrentProvider):
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
 
                 data = self.get_url(searchURL)
                 if not data:
@@ -122,7 +123,7 @@ class PretomeProvider(TorrentProvider):
                             torrent_id = link['href'].replace('details.php?id=', '')
 
                             try:
-                                if link.has_key('title'):
+                                if link.get('title', ''):
                                     title = link['title']
                                 else:
                                     title = link.contents[0]
@@ -174,14 +175,14 @@ class PretomeProvider(TorrentProvider):
         modifier = sizeString[-2:]
         size = float(size)
         if modifier in 'KB':
-            size = size * 1024
+            size *= 1024 ** 1
         elif modifier in 'MB':
-            size = size * 1024**2
+            size *= 1024 ** 2
         elif modifier in 'GB':
-            size = size * 1024**3
+            size *= 1024 ** 3
         elif modifier in 'TB':
-            size = size * 1024**4
-        return int(size)
+            size *= 1024 ** 4
+        return long(size)
 
 
 class PretomeCache(tvcache.TVCache):
diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py
index d334b96425eaebeca6bb61efe217108c7b4d2b23..b13b62597709ebf13ce143ee834ac65588ef64d5 100644
--- a/sickbeard/providers/rarbg.py
+++ b/sickbeard/providers/rarbg.py
@@ -27,7 +27,7 @@ from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.common import USER_AGENT
 from sickbeard.indexers.indexer_config import INDEXER_TVDB
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class GetOutOfLoop(Exception):
@@ -57,18 +57,20 @@ class RarbgProvider(TorrentProvider):
 
         self.url = self.urls['listing']
 
-        self.urlOptions = {'categories': '&category={categories}',
-                           'seeders': '&min_seeders={min_seeders}',
-                           'leechers': '&min_leechers={min_leechers}',
-                           'sorting' : '&sort={sorting}',
-                           'limit': '&limit={limit}',
-                           'format': '&format={format}',
-                           'ranked': '&ranked={ranked}',
-                           'token': '&token={token}'}
+        self.urlOptions = {
+            'categories': '&category={categories}',
+            'seeders': '&min_seeders={min_seeders}',
+            'leechers': '&min_leechers={min_leechers}',
+            'sorting': '&sort={sorting}',
+            'limit': '&limit={limit}',
+            'format': '&format={format}',
+            'ranked': '&ranked={ranked}',
+            'token': '&token={token}'
+        }
 
         self.defaultOptions = self.urlOptions['categories'].format(categories='tv') + \
-                                self.urlOptions['limit'].format(limit='100') + \
-                                self.urlOptions['format'].format(format='json_extended')
+                              self.urlOptions['limit'].format(limit='100') + \
+                              self.urlOptions['format'].format(format='json_extended')
 
         self.proper_strings = ['{{PROPER|REPACK}}']
 
@@ -153,7 +155,7 @@ class RarbgProvider(TorrentProvider):
                     while retry > 0:
                         time_out = 0
                         while (datetime.datetime.now() < self.next_request) and time_out <= 15:
-                            time_out = time_out + 1
+                            time_out += 1
                             time.sleep(1)
 
                         data = self.get_url(searchURL + self.urlOptions['token'].format(token=self.token))
@@ -174,7 +176,7 @@ class RarbgProvider(TorrentProvider):
                             return results
                         if re.search('Too many requests per minute. Please try again later!', data):
                             logger.log(u"Too many requests per minute", logger.WARNING)
-                            retry = retry - 1
+                            retry -= 1
                             time.sleep(10)
                             continue
                         if re.search('Cant find search_tvdb in database. Are you sure this imdb exists?', data):
@@ -182,7 +184,7 @@ class RarbgProvider(TorrentProvider):
                             raise GetOutOfLoop
                         if re.search('Invalid token. Use get_token for a new one!', data):
                             logger.log(u"Invalid token, retrieving new token", logger.DEBUG)
-                            retry = retry - 1
+                            retry -= 1
                             self.token = None
                             self.tokenExpireDate = None
                             if not self.login():
diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py
index 897fffaacc26f93bfbb5ba876941a80a9f8a76e4..411bc2a4c0ea26fc99807b23c63ff2de602c51cb 100644
--- a/sickbeard/providers/rsstorrent.py
+++ b/sickbeard/providers/rsstorrent.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Mr_Orange
 #
 # This file is part of SickRage.
@@ -28,7 +29,7 @@ from sickbeard import tvcache
 
 from sickrage.helper.encoding import ek
 from sickrage.helper.exceptions import ex
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class TorrentRssProvider(TorrentProvider):
@@ -64,6 +65,21 @@ class TorrentRssProvider(TorrentProvider):
             self.enable_backlog
         )
 
+    @staticmethod
+    def get_providers_list(data):
+        providers_list = [x for x in [TorrentRssProvider._make_provider(x) for x in data.split('!!!')] if x]
+        seen_values = set()
+        providers_set = []
+
+        for provider in providers_list:
+            value = provider.name
+
+            if value not in seen_values:
+                providers_set.append(provider)
+                seen_values.add(value)
+
+        return [x for x in providers_set if x]
+
     def image_name(self):
         if ek(os.path.isfile, ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', self.get_id() + '.png')):
             return self.get_id() + '.png'
@@ -91,30 +107,65 @@ class TorrentRssProvider(TorrentProvider):
 
         return title, url
 
+    @staticmethod
+    def _make_provider(config):
+        if not config:
+            return None
+
+        cookies = None
+        enable_backlog = 0
+        enable_daily = 0
+        search_fallback = 0
+        search_mode = 'eponly'
+        title_tag = 'title'
+
+        try:
+            values = config.split('|')
+
+            if len(values) == 9:
+                name, url, cookies, title_tag, enabled, search_mode, search_fallback, enable_daily, enable_backlog = values
+            elif len(values) == 8:
+                name, url, cookies, enabled, search_mode, search_fallback, enable_daily, enable_backlog = values
+            else:
+                enabled = values[4]
+                name = values[0]
+                url = values[1]
+        except ValueError:
+            logger.log(u'Skipping RSS Torrent provider string: \'%s\', incorrect format' % config, logger.ERROR)
+            return None
+
+        new_provider = TorrentRssProvider(
+            name, url, cookies=cookies, titleTAG=title_tag, search_mode=search_mode,
+            search_fallback=search_fallback, enable_daily=enable_daily, enable_backlog=enable_backlog
+        )
+        new_provider.enabled = enabled == '1'
+
+        return new_provider
+
     def validateRSS(self):
 
         try:
             if self.cookies:
                 cookie_validator = re.compile(r"^(\w+=\w+)(;\w+=\w+)*$")
                 if not cookie_validator.match(self.cookies):
-                    return (False, 'Cookie is not correctly formatted: ' + self.cookies)
+                    return False, 'Cookie is not correctly formatted: ' + self.cookies
 
             # pylint: disable=protected-access
             # Access to a protected member of a client class
             data = self.cache._getRSSData()['entries']
             if not data:
-                return (False, 'No items found in the RSS feed ' + self.url)
+                return False, 'No items found in the RSS feed ' + self.url
 
             (title, url) = self._get_title_and_url(data[0])
 
             if not title:
-                return (False, 'Unable to get title from first item')
+                return False, 'Unable to get title from first item'
 
             if not url:
-                return (False, 'Unable to get torrent url from first item')
+                return False, 'Unable to get torrent url from first item'
 
             if url.startswith('magnet:') and re.search(r'urn:btih:([\w]{32,40})', url):
-                return (True, 'RSS feed Parsed correctly')
+                return True, 'RSS feed Parsed correctly'
             else:
                 if self.cookies:
                     requests.utils.add_dict_to_cookiejar(self.session.cookies,
@@ -124,12 +175,12 @@ class TorrentRssProvider(TorrentProvider):
                     bdecode(torrent_file)
                 except Exception as e:
                     self.dumpHTML(torrent_file)
-                    return (False, 'Torrent link is not a valid torrent file: ' + ex(e))
+                    return False, 'Torrent link is not a valid torrent file: ' + ex(e)
 
-            return (True, 'RSS feed Parsed correctly')
+            return True, 'RSS feed Parsed correctly'
 
         except Exception as e:
-            return (False, 'Error when trying to load RSS: ' + ex(e))
+            return False, 'Error when trying to load RSS: ' + ex(e)
 
     @staticmethod
     def dumpHTML(data):
diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py
index 1dab41630393e6b8d0b97da431a7e1996c8785af..0aac6ce591f12b9fcd8d9e56e8119a0a1a3af58b 100644
--- a/sickbeard/providers/scc.py
+++ b/sickbeard/providers/scc.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Idan Gutman
 # Modified by jkaberg, https://github.com/jkaberg for SceneAccess
 # URL: http://code.google.com/p/sickbeard/
@@ -26,7 +27,7 @@ from sickbeard.common import cpu_presets
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class SCCProvider(TorrentProvider):  # pylint: disable=too-many-instance-attributes
@@ -54,9 +55,9 @@ class SCCProvider(TorrentProvider):  # pylint: disable=too-many-instance-attribu
         self.url = self.urls['base_url']
 
         self.categories = {
-            'Season': 'c26=26&c44=44&c45=45', # Archive, non-scene HD, non-scene SD; need to include non-scene because WEB-DL packs get added to those categories
-            'Episode': 'c17=17&c27=27&c33=33&c34=34&c44=44&c45=45', # TV HD, TV SD, non-scene HD, non-scene SD, foreign XviD, foreign x264
-            'RSS': 'c17=17&c26=26&c27=27&c33=33&c34=34&c44=44&c45=45' # Season + Episode
+            'Season': 'c26=26&c44=44&c45=45',  # Archive, non-scene HD, non-scene SD; need to include non-scene because WEB-DL packs get added to those categories
+            'Episode': 'c17=17&c27=27&c33=33&c34=34&c44=44&c45=45',  # TV HD, TV SD, non-scene HD, non-scene SD, foreign XviD, foreign x264
+            'RSS': 'c17=17&c26=26&c27=27&c33=33&c34=34&c44=44&c45=45'  # Season + Episode
         }
 
     def login(self):
@@ -169,14 +170,14 @@ class SCCProvider(TorrentProvider):  # pylint: disable=too-many-instance-attribu
         size, base = size.split()
         size = float(size)
         if base in 'KB':
-            size = size * 1024
+            size *= 1024 ** 1
         elif base in 'MB':
-            size = size * 1024**2
+            size *= 1024 ** 2
         elif base in 'GB':
-            size = size * 1024**3
+            size *= 1024 ** 3
         elif base in 'TB':
-            size = size * 1024**4
-        return int(size)
+            size *= 1024 ** 4
+        return long(size)
 
 
 class SCCCache(tvcache.TVCache):
diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py
index 77e6673c25870cd04b7f136a2f571b50038fb4d4..351761c2c91f3031597636a1900219d5de2d40f5 100644
--- a/sickbeard/providers/scenetime.py
+++ b/sickbeard/providers/scenetime.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Idan Gutman
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -23,7 +24,7 @@ import traceback
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class SceneTimeProvider(TorrentProvider):
@@ -82,7 +83,7 @@ class SceneTimeProvider(TorrentProvider):
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories)
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
 
                 data = self.get_url(searchURL)
                 if not data:
diff --git a/sickbeard/providers/shazbat.py b/sickbeard/providers/shazbat.py
index d0a7e308d36a3df6c0d10cd576c600e8b47c627e..959d9e20e7faf8111db86d124c57ac37e5653542 100644
--- a/sickbeard/providers/shazbat.py
+++ b/sickbeard/providers/shazbat.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -19,7 +20,7 @@
 from sickbeard import logger
 from sickbeard import tvcache
 from sickrage.helper.exceptions import AuthException
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class ShazbatProvider(TorrentProvider):
@@ -36,7 +37,7 @@ class ShazbatProvider(TorrentProvider):
         self.cache = ShazbatCache(self)
 
         self.urls = {'base_url': u'http://www.shazbat.tv/',
-                     'website': u'http://www.shazbat.tv/login',}
+                     'website': u'http://www.shazbat.tv/login', }
         self.url = self.urls['website']
 
     def _check_auth(self):
diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py
index 622bf9bf0581c57714dd9cf54053b55751c9832c..d231611da1b1fa3896696c512fb76ec33e08e1a2 100644
--- a/sickbeard/providers/speedcd.py
+++ b/sickbeard/providers/speedcd.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Mr_Orange
 # URL: https://github.com/mr-orange/Sick-Beard
 #
@@ -20,7 +21,7 @@ import re
 
 from sickbeard import logger
 from sickbeard import tvcache
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class SpeedCDProvider(TorrentProvider):
diff --git a/sickbeard/providers/strike.py b/sickbeard/providers/strike.py
index a1d0f4fe30ec65fbf3ec3065918787111ac18e98..d48b0f9ea96ea9ff15ceaeab364d700c23d146db 100644
--- a/sickbeard/providers/strike.py
+++ b/sickbeard/providers/strike.py
@@ -19,7 +19,7 @@
 
 from sickbeard import logger
 from sickbeard import tvcache
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class STRIKEProvider(TorrentProvider):
@@ -46,7 +46,7 @@ class STRIKEProvider(TorrentProvider):
                     logger.log(u"Search string: " + search_string.strip(), logger.DEBUG)
 
                 searchURL = self.url + "api/v2/torrents/search/?category=TV&phrase=" + search_string
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
                 jdata = self.get_url(searchURL, json=True)
                 if not jdata:
                     logger.log(u"No data returned from provider", logger.DEBUG)
diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py
index afc40063275bde28fc6f6b9a1195491f92d58674..9ef558461bbd259749e1500bffd3ecb908d0d6ea 100644
--- a/sickbeard/providers/t411.py
+++ b/sickbeard/providers/t411.py
@@ -23,7 +23,8 @@ from requests.auth import AuthBase
 
 from sickbeard import logger
 from sickbeard import tvcache
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickbeard.common import USER_AGENT
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class T411Provider(TorrentProvider):
@@ -46,6 +47,8 @@ class T411Provider(TorrentProvider):
 
         self.url = self.urls['base_url']
 
+        self.headers.update({'User-Agent': USER_AGENT})
+
         self.subcategories = [433, 637, 455, 639]
 
         self.minseed = 0
@@ -93,7 +96,7 @@ class T411Provider(TorrentProvider):
 
                 searchURLS = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS']
                 for searchURL in searchURLS:
-                    logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                    logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
                     data = self.get_url(searchURL, json=True)
                     if not data:
                         continue
diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py
index ca75537abe61102eebcbb6740c952e7142fea86f..703aeea29c9e99f0fd236ec9ac2114d90d13e817 100644
--- a/sickbeard/providers/thepiratebay.py
+++ b/sickbeard/providers/thepiratebay.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Mr_Orange <mr_orange@hotmail.it>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -17,12 +18,12 @@
 # along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
 
 import re
-import posixpath # Must use posixpath
+import posixpath  # Must use posixpath
 from urllib import urlencode
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.common import USER_AGENT
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class ThePirateBayProvider(TorrentProvider):
@@ -80,7 +81,7 @@ class ThePirateBayProvider(TorrentProvider):
 
                 searchURL = self.urls[('search', 'rss')[mode == 'RSS']] + '?' + urlencode(self.search_params)
                 if self.custom_url:
-                    searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/')) # Must use posixpath
+                    searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/'))  # Must use posixpath
 
                 logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
                 data = self.get_url(searchURL)
@@ -129,14 +130,14 @@ class ThePirateBayProvider(TorrentProvider):
         size, modifier = size.split('&nbsp;')
         size = float(size)
         if modifier in 'KiB':
-            size = size * 1024
+            size *= 1024 ** 1
         elif modifier in 'MiB':
-            size = size * 1024**2
+            size *= 1024 ** 2
         elif modifier in 'GiB':
-            size = size * 1024**3
+            size *= 1024 ** 3
         elif modifier in 'TiB':
-            size = size * 1024**4
-        return size
+            size *= 1024 ** 4
+        return long(size)
 
     def seed_ratio(self):
         return self.ratio
diff --git a/sickbeard/providers/titansoftv.py b/sickbeard/providers/titansoftv.py
index 5efb5a34b05e72a802c44449ab68e7ebce2f521e..87820802af9a3542ad10fac7f2cbb92feb27c589 100644
--- a/sickbeard/providers/titansoftv.py
+++ b/sickbeard/providers/titansoftv.py
@@ -23,7 +23,7 @@ from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.helpers import mapIndexersToShow
 from sickrage.helper.exceptions import AuthException
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class TitansOfTVProvider(TorrentProvider):
@@ -66,7 +66,7 @@ class TitansOfTVProvider(TorrentProvider):
 
         searchURL = self.url + '?' + urllib.urlencode(params)
         logger.log(u"Search string: %s " % search_params, logger.DEBUG)
-        logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+        logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
 
         parsedJSON = self.get_url(searchURL, json=True)  # do search
 
@@ -109,9 +109,10 @@ class TitansOfTVProvider(TorrentProvider):
         return results
 
     def _get_season_search_strings(self, ep_obj):
-        search_params = {'limit': 100}
-
-        search_params['season'] = 'Season %02d' % ep_obj.scene_season
+        search_params = {
+            'limit': 100,
+            'season': 'Season %02d' % ep_obj.scene_season
+        }
 
         if ep_obj.show.indexer == 1:
             search_params['series_id'] = ep_obj.show.indexerid
@@ -127,10 +128,12 @@ class TitansOfTVProvider(TorrentProvider):
         if not ep_obj:
             return [{}]
 
-        search_params = {'limit': 100}
+        search_params = {
+            'limit': 100,
+            'episode': 'S%02dE%02d' % (ep_obj.scene_season, ep_obj.scene_episode)
+        }
 
         # Do a general name search for the episode, formatted like SXXEYY
-        search_params['episode'] = 'S%02dE%02d' % (ep_obj.scene_season, ep_obj.scene_episode)
 
         if ep_obj.show.indexer == 1:
             search_params['series_id'] = ep_obj.show.indexerid
diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py
index e1f598b3c05531272cbe7bf0b5275cc4c3b9ae3a..430467a283a93cefd7809c77fe45a1350ed5d8ca 100644
--- a/sickbeard/providers/tntvillage.py
+++ b/sickbeard/providers/tntvillage.py
@@ -27,7 +27,7 @@ from sickbeard import db
 from sickbeard.bs4_parser import BS4Parser
 from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
 from sickrage.helper.exceptions import AuthException
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 category_excluded = {'Sport': 22,
                      'Teatro': 23,
diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py
index 57227bc0915ea9dc02cc89225004d38c09fe52b0..9c8c06fba80cb6ad56e804d4dd31544735ec3e58 100644
--- a/sickbeard/providers/tokyotoshokan.py
+++ b/sickbeard/providers/tokyotoshokan.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Mr_Orange
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -23,7 +24,7 @@ from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard import show_name_helpers
 from sickbeard.bs4_parser import BS4Parser
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class TokyoToshokanProvider(TorrentProvider):
@@ -63,7 +64,7 @@ class TokyoToshokanProvider(TorrentProvider):
         }
 
         searchURL = self.url + 'search.php?' + urllib.urlencode(params)
-        logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+        logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
         data = self.get_url(searchURL)
 
         if not data:
diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py
index e9bf8d02dfb61b928276acf77de253cbc1e61de2..e22c5eb300f10783ee3bbd232426596813a0e9b9 100644
--- a/sickbeard/providers/torrentbytes.py
+++ b/sickbeard/providers/torrentbytes.py
@@ -1,4 +1,5 @@
-# Author: Idan Gutman
+# coding=utf-8
+# Author: Idan Gutman
 # URL: http://code.google.com/p/sickbeard/
 #
 # This file is part of SickRage.
@@ -23,7 +24,7 @@ import traceback
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class TorrentBytesProvider(TorrentProvider):
@@ -86,7 +87,7 @@ class TorrentBytesProvider(TorrentProvider):
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
 
                 data = self.get_url(searchURL)
                 if not data:
@@ -122,7 +123,7 @@ class TorrentBytesProvider(TorrentProvider):
                                 continue
 
                             try:
-                                if link.has_key('title'):
+                                if link.get('title', ''):
                                     title = cells[1].find('a', {'class': 'index'})['title']
                                 else:
                                     title = link.contents[0]
@@ -173,14 +174,14 @@ class TorrentBytesProvider(TorrentProvider):
         modifier = sizeString[-2:]
         size = float(size)
         if modifier in 'KB':
-            size = size * 1024
+            size *= 1024 ** 1
         elif modifier in 'MB':
-            size = size * 1024**2
+            size *= 1024 ** 2
         elif modifier in 'GB':
-            size = size * 1024**3
+            size *= 1024 ** 3
         elif modifier in 'TB':
-            size = size * 1024**4
-        return int(size)
+            size *= 1024 ** 4
+        return long(size)
 
 
 class TorrentBytesCache(tvcache.TVCache):
diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py
index a75bc6ac59fec4cc913ba2e3c09976c92cdb5215..9c79bbd6b0f9f2eb435452af6603dc67ab9d2e0f 100644
--- a/sickbeard/providers/torrentday.py
+++ b/sickbeard/providers/torrentday.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Mr_Orange <mr_orange@hotmail.it>
 #
 # This file is part of SickRage.
@@ -19,7 +20,7 @@ import re
 import requests
 from sickbeard import logger
 from sickbeard import tvcache
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class TorrentDayProvider(TorrentProvider):
@@ -41,9 +42,9 @@ class TorrentDayProvider(TorrentProvider):
 
         self.urls = {
             'base_url': 'https://classic.torrentday.com',
-             'login': 'https://classic.torrentday.com/torrents/',
-             'search': 'https://classic.torrentday.com/V3/API/API.php',
-             'download': 'https://classic.torrentday.com/download.php/%s/%s'
+            'login': 'https://classic.torrentday.com/torrents/',
+            'search': 'https://classic.torrentday.com/V3/API/API.php',
+            'download': 'https://classic.torrentday.com/download.php/%s/%s'
         }
 
         self.url = self.urls['base_url']
@@ -129,7 +130,7 @@ class TorrentDayProvider(TorrentProvider):
                 for torrent in torrents:
 
                     title = re.sub(r"\[.*\=.*\].*\[/.*\]", "", torrent['name'])
-                    download_url = self.urls['download'] % ( torrent['id'], torrent['fname'])
+                    download_url = self.urls['download'] % (torrent['id'], torrent['fname'])
                     seeders = int(torrent['seed'])
                     leechers = int(torrent['leech'])
                     # FIXME
diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py
index 7602e39595e8fc23f0c4340e95b9796eb6e2a422..5b05cd8b760906ee455c19313edb966b5bd8265d 100644
--- a/sickbeard/providers/torrentleech.py
+++ b/sickbeard/providers/torrentleech.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Idan Gutman
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -23,7 +24,7 @@ import urllib
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class TorrentLeechProvider(TorrentProvider):
@@ -90,7 +91,7 @@ class TorrentLeechProvider(TorrentProvider):
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 data = self.get_url(searchURL)
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py
index 36e70cf7af91ce6b11ff8574b5f6f1475e4f8907..eedb52d6d7a23ef456417ed65d54aeba1d9f2488 100644
--- a/sickbeard/providers/torrentproject.py
+++ b/sickbeard/providers/torrentproject.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: duramato <matigonkas@outlook.com>
 # URL: https://github.com/SickRage/sickrage
 #
@@ -16,13 +17,13 @@
 # You should have received a copy of the GNU General Public License
 # along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
 
+import posixpath # Must use posixpath
 from urllib import quote_plus
-
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.common import USER_AGENT
 from sickrage.helper.common import try_int
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class TORRENTPROJECTProvider(TorrentProvider):
@@ -31,8 +32,9 @@ class TORRENTPROJECTProvider(TorrentProvider):
 
         self.public = True
         self.ratio = 0
-        self.urls = {'api': u'https://torrentproject.se/',}
+        self.urls = {'api': u'https://torrentproject.se/', }
         self.url = self.urls['api']
+        self.custom_url = None
         self.headers.update({'User-Agent': USER_AGENT})
         self.minseed = None
         self.minleech = None
@@ -49,10 +51,11 @@ class TORRENTPROJECTProvider(TorrentProvider):
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-
                 searchURL = self.urls['api'] + "?s=%s&out=json&filter=2101&num=150" % quote_plus(search_string.encode('utf-8'))
+                if self.custom_url:
+                    searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/')) # Must use posixpath
 
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
                 torrents = self.get_url(searchURL, json=True)
                 if not (torrents and "total_found" in torrents and int(torrents["total_found"]) > 0):
                     logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
@@ -78,6 +81,8 @@ class TORRENTPROJECTProvider(TorrentProvider):
                         assert mode != 'RSS'
                         logger.log(u"Torrent has less than 10 seeds getting dyn trackers: " + title, logger.DEBUG)
                         trackerUrl = self.urls['api'] + "" + t_hash + "/trackers_json"
+                        if self.custom_url:
+                            searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/')) # Must use posixpath
                         jdata = self.get_url(trackerUrl, json=True)
                         assert jdata != "maintenance"
                         download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title + "".join(["&tr=" + s for s in jdata])
diff --git a/sickbeard/providers/torrentz.py b/sickbeard/providers/torrentz.py
index 38301b6306cfe188e1e0cf8c223549758a1f9da2..662e8058464d25d799a3c50b4d09fdd9ac71db0e 100644
--- a/sickbeard/providers/torrentz.py
+++ b/sickbeard/providers/torrentz.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Dustyn Gibson <miigotu@gmail.com>
 # URL: https://github.com/SickRage/SickRage
 #
@@ -27,7 +28,8 @@ import sickbeard
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.common import cpu_presets
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickbeard.common import USER_AGENT
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class TORRENTZProvider(TorrentProvider):
@@ -41,6 +43,7 @@ class TORRENTZProvider(TorrentProvider):
         self.minseed = None
         self.minleech = None
         self.cache = TORRENTZCache(self)
+        self.headers.update({'User-Agent': USER_AGENT})
         self.urls = {'verified': 'https://torrentz.eu/feed_verified',
                      'feed': 'https://torrentz.eu/feed',
                      'base': 'https://torrentz.eu/'}
@@ -52,7 +55,7 @@ class TORRENTZProvider(TorrentProvider):
     @staticmethod
     def _split_description(description):
         match = re.findall(r'[0-9]+', description)
-        return (int(match[0]) * 1024**2, int(match[1]), int(match[2]))
+        return int(match[0]) * 1024 ** 2, int(match[1]), int(match[2])
 
     def search(self, search_strings, age=0, ep_obj=None):
         results = []
diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py
index 18b3e64f40af9bb6b6060e2ad31f6a0ca4eb4146..928ed49db5e625cce5b71c4c50fa9153efa8d49a 100644
--- a/sickbeard/providers/transmitthenet.py
+++ b/sickbeard/providers/transmitthenet.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # SickRage is free software: you can redistribute it and/or modify
@@ -22,7 +23,7 @@ from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
 from sickrage.helper.exceptions import AuthException
 from sickrage.helper.common import try_int
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class TransmitTheNetProvider(TorrentProvider):
diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py
index f719454084ec318dcc8690702efca5e8d8b23c07..874f4761a1df8bef587e1a31602c71053968d306 100644
--- a/sickbeard/providers/tvchaosuk.py
+++ b/sickbeard/providers/tvchaosuk.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # SickRage is free software: you can redistribute it and/or modify
@@ -23,7 +24,7 @@ from sickbeard import show_name_helpers
 from sickbeard.helpers import sanitizeSceneName
 from sickbeard.bs4_parser import BS4Parser
 from sickrage.helper.exceptions import AuthException
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class TVChaosUKProvider(TorrentProvider):
@@ -48,7 +49,7 @@ class TVChaosUKProvider(TorrentProvider):
 
         self.search_params = {
             'do': 'search',
-            'keywords':  '',
+            'keywords': '',
             'search_type': 't_name',
             'category': 0,
             'include_dead_torrents': 'no',
@@ -141,20 +142,20 @@ class TVChaosUKProvider(TorrentProvider):
                     logger.log(u"No data returned from provider", logger.DEBUG)
                     continue
 
-                with BS4Parser(data) as html:
+                with BS4Parser(data, 'html5lib') as html:
                     torrent_table = html.find(id='listtorrents')
                     if not torrent_table:
                         logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                         continue
 
                     torrent_rows = torrent_table.find_all('tr')
-
                     for torrent in torrent_rows:
                         try:
+                            cells = torrent.find_all('td')
                             freeleech = torrent.find('img', alt=re.compile('Free Torrent'))
                             if self.freeleech and not freeleech:
                                 continue
-                            title = (torrent.find(attrs={'class':'tooltip-target'}).text.strip()).replace("mp4", "x264")
+                            title = (torrent.find('div', style='text-align:left; margin-top: 5px').text.strip()).replace("mp4", "x264")
                             download_url = torrent.find(title="Click to Download this Torrent!").parent['href'].strip()
                             seeders = int(torrent.find(title='Seeders').text.strip())
                             leechers = int(torrent.find(title='Leechers').text.strip())
@@ -169,20 +170,20 @@ class TVChaosUKProvider(TorrentProvider):
                                 continue
 
                             # Chop off tracker/channel prefix or we cant parse the result!
-                            show_name_first_word = re.search(r'^[^ .]+', self.search_params['keywords']).group()
-                            if not title.startswith(show_name_first_word):
-                                title = re.match(r'(.*)(' + show_name_first_word + '.*)', title).group(2)
+                            show_name_first_word = re.search(r'^[^ .]+', self.search_params['keywords'])
+                            if show_name_first_word and not title.startswith(show_name_first_word.group()) and show_name_first_word in title:
+                                title = re.match(r'.*(' + show_name_first_word + '.*)', title).group(1)
 
                             # Change title from Series to Season, or we can't parse
-                            if 'Series' in self.search_params['keywords']:
+                            if 'Series' not in self.search_params['keywords']:
                                 title = re.sub(r'(?i)series', 'Season', title)
 
                             # Strip year from the end or we can't parse it!
                             title = re.sub(r'[\. ]?\(\d{4}\)', '', title)
-
-                            # FIXME
+                            torrent_size = cells[4].getText().strip()
                             size = -1
-
+                            if re.match(r"\d+([,\.]\d+)?\s*[KkMmGgTt]?[Bb]", torrent_size):
+                                size = self._convertSize(torrent_size.rstrip())
                             item = title, download_url, size, seeders, leechers
                             if mode != 'RSS':
                                 logger.log(u"Found result: %s " % title, logger.DEBUG)
@@ -202,6 +203,23 @@ class TVChaosUKProvider(TorrentProvider):
     def seed_ratio(self):
         return self.ratio
 
+    def _convertSize(self, sizeString):
+        size = sizeString[:-2].strip()
+        modifier = sizeString[-2:].upper()
+        try:
+            size = float(size)
+            if modifier in 'KB':
+                size *= 1024 ** 1
+            elif modifier in 'MB':
+                size *= 1024 ** 2
+            elif modifier in 'GB':
+                size *= 1024 ** 3
+            elif modifier in 'TB':
+                size *= 1024 ** 4
+        except Exception:
+            size = -1
+        return long(size)
+
 
 class TVChaosUKCache(tvcache.TVCache):
     def __init__(self, provider_obj):
diff --git a/sickbeard/providers/womble.py b/sickbeard/providers/womble.py
index 3f9db0f4018d335aa80745cf6bfd00aba3bb306c..5a003863467a81064d75c801f2b589d1aafe2754 100644
--- a/sickbeard/providers/womble.py
+++ b/sickbeard/providers/womble.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -18,7 +19,7 @@
 
 from sickbeard import logger
 from sickbeard import tvcache
-from sickrage.providers.NZBProvider import NZBProvider
+from sickrage.providers.nzb.NZBProvider import NZBProvider
 
 
 class WombleProvider(NZBProvider):
diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py
index b026c80b8ec6e3300369f1c2831cb5314d0866e2..83b596a7e5e8109aca2e259e702fd8f81314e449 100644
--- a/sickbeard/providers/xthor.py
+++ b/sickbeard/providers/xthor.py
@@ -24,7 +24,7 @@ import requests
 
 from sickbeard import logger
 from sickbeard.bs4_parser import BS4Parser
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class XthorProvider(TorrentProvider):
@@ -63,8 +63,6 @@ class XthorProvider(TorrentProvider):
             logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
-        return True
-
     def search(self, search_params, age=0, ep_obj=None):
 
         results = []
@@ -82,14 +80,14 @@ class XthorProvider(TorrentProvider):
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 searchURL = self.urlsearch % (urllib.quote(search_string), self.categories)
-                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
                 data = self.get_url(searchURL)
 
                 if not data:
                     continue
 
                 with BS4Parser(data, 'html5lib') as html:
-                    resultsTable = html.find("table", {"class" : "table2 table-bordered2"})
+                    resultsTable = html.find("table", {"class": "table2 table-bordered2"})
                     if not resultsTable:
                         continue
 
diff --git a/sickbeard/rssfeeds.py b/sickbeard/rssfeeds.py
index 2e7fc26fcaea6db313d5951b334e93d8a72d47de..fd36907d19b6e70182002156b80c44ca8092578f 100644
--- a/sickbeard/rssfeeds.py
+++ b/sickbeard/rssfeeds.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 import re
 import urlparse
 from feedparser.api import parse
@@ -6,6 +7,7 @@ from feedparser.util import FeedParserDict
 from sickbeard import logger
 from sickrage.helper.exceptions import ex
 
+
 def getFeed(url, request_headers=None, handlers=None):
     parsed = list(urlparse.urlparse(url))
     parsed[2] = re.sub("/{2,}", "/", parsed[2])  # replace two or more / with one
diff --git a/sickbeard/sab.py b/sickbeard/sab.py
index cd64cd767112b1548ab7b942b7366c79879fd071..bf4a0583ca45206fdcab7c8f2507367bc141ff88 100644
--- a/sickbeard/sab.py
+++ b/sickbeard/sab.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage
@@ -17,13 +18,15 @@
 # You should have received a copy of the GNU General Public License
 # along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
 
-import urllib, httplib
+import urllib
+import httplib
 
 import sickbeard
 import datetime
 
 import MultipartPostHandler
-import urllib2, cookielib
+import urllib2
+import cookielib
 
 try:
     import json
@@ -244,12 +247,13 @@ def testAuthentication(host=None, username=None, password=None, apikey=None):
     """
 
     # build up the URL parameters
-    params = {}
-    params['mode'] = 'queue'
-    params['output'] = 'json'
-    params['ma_username'] = username
-    params['ma_password'] = password
-    params['apikey'] = apikey
+    params = {
+        'mode': 'queue',
+        'output': 'json',
+        'ma_username': username,
+        'ma_password': password,
+        'apikey': apikey
+    }
     url = host + "api?" + urllib.urlencode(params)
 
     # send the test request
diff --git a/sickbeard/sbdatetime.py b/sickbeard/sbdatetime.py
index def4ea804d1ad0d5b7ecd6cd6d58bcacbbbfc53a..f61d5ecd3b75444a78cf433b185a1f1dc26ae623 100644
--- a/sickbeard/sbdatetime.py
+++ b/sickbeard/sbdatetime.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -89,6 +90,7 @@ date_presets = (
 
 time_presets = ('%I:%M:%S %p', '%H:%M:%S')
 
+
 # helper class
 class static_or_instance(object):
     def __init__(self, func):
diff --git a/sickbeard/scene_exceptions.py b/sickbeard/scene_exceptions.py
index c7a88f4f0a0c783f6fd0f75f257a4c3a6c8a5808..0598ee4fbf8ab5b83c830edbef8576e6dd66ee94 100644
--- a/sickbeard/scene_exceptions.py
+++ b/sickbeard/scene_exceptions.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -38,6 +39,7 @@ exceptionsSeasonCache = {}
 
 exceptionLock = threading.Lock()
 
+
 def shouldRefresh(exList):
     """
     Check if we should refresh cache for items in exList
@@ -55,6 +57,7 @@ def shouldRefresh(exList):
     else:
         return True
 
+
 def setLastRefresh(exList):
     """
     Update last cache update time for shows in list
@@ -66,6 +69,7 @@ def setLastRefresh(exList):
                 {'last_refreshed': int(time.mktime(datetime.datetime.today().timetuple()))},
                 {'list': exList})
 
+
 def get_scene_exceptions(indexer_id, season=-1):
     """
     Given a indexer_id, return a list of all the scene exceptions.
@@ -80,7 +84,7 @@ def get_scene_exceptions(indexer_id, season=-1):
         if exceptions:
             exceptionsList = list(set([cur_exception["show_name"] for cur_exception in exceptions]))
 
-            if not indexer_id in exceptionsCache:
+            if indexer_id not in exceptionsCache:
                 exceptionsCache[indexer_id] = {}
             exceptionsCache[indexer_id][season] = exceptionsList
     else:
@@ -126,7 +130,7 @@ def get_scene_seasons(indexer_id):
         if sqlResults:
             exceptionsSeasonList = list(set([int(x["season"]) for x in sqlResults]))
 
-            if not indexer_id in exceptionsSeasonCache:
+            if indexer_id not in exceptionsSeasonCache:
                 exceptionsSeasonCache[indexer_id] = {}
 
             exceptionsSeasonCache[indexer_id] = exceptionsSeasonList
@@ -237,7 +241,7 @@ def retrieve_exceptions():
     for cur_indexer_id in exception_dict:
         sql_ex = myDB.select("SELECT show_name FROM scene_exceptions WHERE indexer_id = ?;", [cur_indexer_id])
         existing_exceptions = [x["show_name"] for x in sql_ex]
-        if not cur_indexer_id in exception_dict:
+        if cur_indexer_id not in exception_dict:
             continue
 
         for cur_exception_dict in exception_dict[cur_indexer_id]:
@@ -258,6 +262,7 @@ def retrieve_exceptions():
     anidb_exception_dict.clear()
     xem_exception_dict.clear()
 
+
 def update_scene_exceptions(indexer_id, scene_exceptions, season=-1):
     """
     Given a indexer_id, and a list of all show scene exceptions, update the db.
@@ -276,6 +281,7 @@ def update_scene_exceptions(indexer_id, scene_exceptions, season=-1):
         myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)",
                     [indexer_id, cur_exception, season])
 
+
 def _anidb_exceptions_fetcher():
     if shouldRefresh('anidb'):
         logger.log(u"Checking for scene exception updates for AniDB")
@@ -295,6 +301,7 @@ def _anidb_exceptions_fetcher():
 
 xem_session = requests.Session()
 
+
 def _xem_exceptions_fetcher():
     if shouldRefresh('xem'):
         for indexer in sickbeard.indexerApi().indexers:
diff --git a/sickbeard/scene_numbering.py b/sickbeard/scene_numbering.py
index 8f73628fe8b864b5b76fbf6c03e5ae7f7012b07f..b1a19387be1d2c65e5b8f1dc7ac9180e94f8e706 100644
--- a/sickbeard/scene_numbering.py
+++ b/sickbeard/scene_numbering.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -47,11 +48,11 @@ def get_scene_numbering(indexer_id, indexer, season, episode, fallback_to_xem=Tr
     :return: (int, int) a tuple with (season, episode)
     """
     if indexer_id is None or season is None or episode is None:
-        return (season, episode)
+        return season, episode
 
     showObj = Show.find(sickbeard.showList, int(indexer_id))
     if showObj and not showObj.is_scene:
-        return (season, episode)
+        return season, episode
 
     result = find_scene_numbering(int(indexer_id), int(indexer), season, episode)
     if result:
@@ -61,7 +62,7 @@ def get_scene_numbering(indexer_id, indexer, season, episode, fallback_to_xem=Tr
             xem_result = find_xem_numbering(int(indexer_id), int(indexer), season, episode)
             if xem_result:
                 return xem_result
-        return (season, episode)
+        return season, episode
 
 
 def find_scene_numbering(indexer_id, indexer, season, episode):
@@ -69,7 +70,7 @@ def find_scene_numbering(indexer_id, indexer, season, episode):
     Same as get_scene_numbering(), but returns None if scene numbering is not set
     """
     if indexer_id is None or season is None or episode is None:
-        return (season, episode)
+        return season, episode
 
     indexer_id = int(indexer_id)
     indexer = int(indexer)
@@ -80,7 +81,7 @@ def find_scene_numbering(indexer_id, indexer, season, episode):
         [indexer, indexer_id, season, episode])
 
     if rows:
-        return (int(rows[0]["scene_season"]), int(rows[0]["scene_episode"]))
+        return int(rows[0]["scene_season"]), int(rows[0]["scene_episode"])
 
 
 def get_scene_absolute_numbering(indexer_id, indexer, absolute_number, fallback_to_xem=True):
@@ -141,7 +142,7 @@ def get_indexer_numbering(indexer_id, indexer, sceneSeason, sceneEpisode, fallba
     (this works like the reverse of get_scene_numbering)
     """
     if indexer_id is None or sceneSeason is None or sceneEpisode is None:
-        return (sceneSeason, sceneEpisode)
+        return sceneSeason, sceneEpisode
 
     indexer_id = int(indexer_id)
     indexer = int(indexer)
@@ -152,11 +153,11 @@ def get_indexer_numbering(indexer_id, indexer, sceneSeason, sceneEpisode, fallba
         [indexer, indexer_id, sceneSeason, sceneEpisode])
 
     if rows:
-        return (int(rows[0]["season"]), int(rows[0]["episode"]))
+        return int(rows[0]["season"]), int(rows[0]["episode"])
     else:
         if fallback_to_xem:
             return get_indexer_numbering_for_xem(indexer_id, indexer, sceneSeason, sceneEpisode)
-        return (sceneSeason, sceneEpisode)
+        return sceneSeason, sceneEpisode
 
 
 def get_indexer_absolute_numbering(indexer_id, indexer, sceneAbsoluteNumber, fallback_to_xem=True, scene_season=None):
@@ -234,7 +235,7 @@ def find_xem_numbering(indexer_id, indexer, season, episode):
     :return: (int, int) a tuple of scene_season, scene_episode, or None if there is no special mapping.
     """
     if indexer_id is None or season is None or episode is None:
-        return (season, episode)
+        return season, episode
 
     indexer_id = int(indexer_id)
     indexer = int(indexer)
@@ -247,7 +248,7 @@ def find_xem_numbering(indexer_id, indexer, season, episode):
         [indexer, indexer_id, season, episode])
 
     if rows:
-        return (int(rows[0]["scene_season"]), int(rows[0]["scene_episode"]))
+        return int(rows[0]["scene_season"]), int(rows[0]["scene_episode"])
 
 
 def find_xem_absolute_numbering(indexer_id, indexer, absolute_number):
@@ -286,7 +287,7 @@ def get_indexer_numbering_for_xem(indexer_id, indexer, sceneSeason, sceneEpisode
     :return: (int, int) a tuple of (season, episode)
     """
     if indexer_id is None or sceneSeason is None or sceneEpisode is None:
-        return (sceneSeason, sceneEpisode)
+        return sceneSeason, sceneEpisode
 
     indexer_id = int(indexer_id)
     indexer = int(indexer)
@@ -299,9 +300,9 @@ def get_indexer_numbering_for_xem(indexer_id, indexer, sceneSeason, sceneEpisode
         [indexer, indexer_id, sceneSeason, sceneEpisode])
 
     if rows:
-        return (int(rows[0]["season"]), int(rows[0]["episode"]))
+        return int(rows[0]["season"]), int(rows[0]["episode"])
 
-    return (sceneSeason, sceneEpisode)
+    return sceneSeason, sceneEpisode
 
 
 def get_indexer_absolute_numbering_for_xem(indexer_id, indexer, sceneAbsoluteNumber, scene_season=None):
@@ -499,7 +500,7 @@ def xem_refresh(indexer_id, indexer, force=False):
             url = "http://thexem.de/map/all?id=%s&origin=%s&destination=scene" % (indexer_id, sickbeard.indexerApi(indexer).config['xem_origin'])
 
             parsedJSON = sickbeard.helpers.getURL(url, session=xem_session, json=True)
-            if not parsedJSON or not 'result' in parsedJSON or not 'success' in parsedJSON['result']:
+            if not parsedJSON or 'result' not in parsedJSON or 'success' not in parsedJSON['result']:
                 logger.log(u'No XEM data for show "%s on %s"' % (indexer_id, sickbeard.indexerApi(indexer).name,), logger.INFO)
                 return
 
@@ -514,7 +515,8 @@ def xem_refresh(indexer_id, indexer, force=False):
                          indexer_id,
                          entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'],
                          entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode']
-                        ]])
+                         ]
+                    ])
                 if 'scene_2' in entry:  # for doubles
                     cl.append([
                         "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
@@ -524,7 +526,8 @@ def xem_refresh(indexer_id, indexer, force=False):
                          indexer_id,
                          entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'],
                          entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode']
-                        ]])
+                         ]
+                    ])
 
             if len(cl) > 0:
                 myDB = db.DBConnection()
@@ -623,7 +626,8 @@ def fix_xem_numbering(indexer_id, indexer):
                  indexer_id,
                  season,
                  episode
-                ]])
+                 ]
+            ])
             update_absolute_number = False
 
         if update_scene_season:
@@ -633,7 +637,8 @@ def fix_xem_numbering(indexer_id, indexer):
                  indexer_id,
                  season,
                  episode
-                ]])
+                 ]
+            ])
             update_scene_season = False
 
         if update_scene_episode:
@@ -643,7 +648,8 @@ def fix_xem_numbering(indexer_id, indexer):
                  indexer_id,
                  season,
                  episode
-                ]])
+                 ]
+            ])
             update_scene_episode = False
 
         if update_scene_absolute_number:
@@ -653,7 +659,8 @@ def fix_xem_numbering(indexer_id, indexer):
                  indexer_id,
                  season,
                  episode
-                ]])
+                 ]
+            ])
             update_scene_absolute_number = False
 
     if len(cl) > 0:
diff --git a/sickbeard/scheduler.py b/sickbeard/scheduler.py
index a6907af2b7a616fa774542127d3ed34bd589c2d9..a4690cf5dda6c7727261dd6376b0622a27fd876b 100644
--- a/sickbeard/scheduler.py
+++ b/sickbeard/scheduler.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
diff --git a/sickbeard/search.py b/sickbeard/search.py
index 5059cf1125230e6cc423a3ca4ca9434e90d2e2be..43635288ca64d014eb7ee99a69ba53d2a82c06e2 100644
--- a/sickbeard/search.py
+++ b/sickbeard/search.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
@@ -125,7 +126,7 @@ def snatchEpisode(result, endStatus=SNATCHED):
             logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR)
             dlResult = False
 
-    # TORRENTs can be sent to clients or saved to disk
+    # Torrents can be sent to clients or saved to disk
     elif result.resultType == "torrent":
         # torrents are saved to disk when blackhole mode
         if sickbeard.TORRENT_METHOD == "blackhole":
@@ -209,7 +210,6 @@ def pickBestResult(results, show):
         if show and cur_result.show is not show:
             continue
 
-
         # build the black And white list
         if show.is_anime:
             if not show.release_groups.is_valid(cur_result):
@@ -271,18 +271,17 @@ def isFinalResult(result):
     """
     Checks if the given result is good enough quality that we can stop searching for other ones.
 
-    If the result is the highest quality in both the any/best quality lists then this function
-    returns True, if not then it's False
+    :param result: quality to check
+    :return: True if the result is the highest quality in both the any/best quality lists else False
     """
 
     logger.log(u"Checking if we should keep searching after we've found " + result.name, logger.DEBUG)
 
     show_obj = result.episodes[0].show
 
-
     any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
 
-    # if there is a redownload that's higher than this then we definitely need to keep looking
+    # if there is a re-download that's higher than this then we definitely need to keep looking
     if best_qualities and result.quality < max(best_qualities):
         return False
 
@@ -290,7 +289,7 @@ def isFinalResult(result):
     elif show_obj.is_anime and show_obj.release_groups.is_valid(result):
         return False
 
-    # if there's no redownload that's higher (above) and this is the highest initial download then we're good
+    # if there's no re-download that's higher (above) and this is the highest initial download then we're good
     elif any_qualities and result.quality in any_qualities:
         return True
 
@@ -304,21 +303,21 @@ def isFinalResult(result):
 
 def isFirstBestMatch(result):
     """
-    Checks if the given result is a best quality match and if we want to archive the episode on first match.
+    Checks if the given result is a best quality match and if we want to stop searching providers here.
+
+    :param result: to check
+    :return: True if the result is the best quality match else False
     """
 
-    logger.log(u"Checking if we should archive our first best quality match for for episode " + result.name,
+    logger.log(u"Checking if we should stop searching for a better quality for for episode " + result.name,
                logger.DEBUG)
 
     show_obj = result.episodes[0].show
 
     any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
 
-    # if there is a redownload that's a match to one of our best qualities and we want to archive the episode then we are done
-    if best_qualities and show_obj.archive_firstmatch and result.quality in best_qualities:
-        return True
+    return result.quality in best_qualities if best_qualities else False
 
-    return False
 
 def wantedEpisodes(show, fromDate):
     """
@@ -327,35 +326,42 @@ def wantedEpisodes(show, fromDate):
     :param fromDate: Search from a certain date
     :return: list of wanted episodes
     """
+    wanted = []
+    if show.paused:
+        logger.log(u"Not checking for episodes of %s because the show is paused" % show.name, logger.DEBUG)
+        return wanted
 
-    anyQualities, bestQualities = common.Quality.splitQuality(show.quality) # @UnusedVariable
-    allQualities = list(set(anyQualities + bestQualities))
+    allowed_qualities, preferred_qualities = common.Quality.splitQuality(show.quality)
+    all_qualities = list(set(allowed_qualities + preferred_qualities))
 
     logger.log(u"Seeing if we need anything from " + show.name, logger.DEBUG)
-    myDB = db.DBConnection()
+    con = db.DBConnection()
 
-    sqlResults = myDB.select("SELECT status, season, episode FROM tv_episodes WHERE showid = ? AND season > 0 and airdate > ?",
-                             [show.indexerid, fromDate.toordinal()])
+    sql_results = con.select(
+        "SELECT status, season, episode FROM tv_episodes WHERE showid = ? AND season > 0 and airdate > ?",
+        [show.indexerid, fromDate.toordinal()]
+    )
 
     # check through the list of statuses to see if we want any
-    wanted = []
-    for result in sqlResults:
-        curCompositeStatus = int(result["status"] or -1)
-        curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
+    for result in sql_results:
+        cur_status, cur_quality = common.Quality.splitCompositeStatus(int(result["status"] or -1))
+        if cur_status not in {common.WANTED, common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER}:
+            continue
 
-        if bestQualities:
-            highestBestQuality = max(allQualities)
-        else:
-            highestBestQuality = 0
+        if cur_status != common.WANTED:
+            if preferred_qualities:
+                if cur_quality in preferred_qualities:
+                    continue
+            elif cur_quality in allowed_qualities:
+                continue
 
-        # if we need a better one then say yes
-        if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER) and curQuality < highestBestQuality) or curStatus == common.WANTED:
-            epObj = show.getEpisode(int(result["season"]), int(result["episode"]))
-            epObj.wantedQuality = [i for i in allQualities if (i > curQuality and i != common.Quality.UNKNOWN)]
-            wanted.append(epObj)
+        epObj = show.getEpisode(int(result["season"]), int(result["episode"]))
+        epObj.wantedQuality = [i for i in all_qualities if i > cur_quality and i != common.Quality.UNKNOWN]
+        wanted.append(epObj)
 
     return wanted
 
+
 def searchForNeededEpisodes():
     """
     Check providers for details on wanted episodes
@@ -435,7 +441,7 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False):
     :param show: Show we are looking for
     :param episodes: Episodes we hope to find
     :param manualSearch: Boolean, is this a manual search?
-    :param downCurQuality: Boolean, should we redownload currently avaialble quality file
+    :param downCurQuality: Boolean, should we re-download currently available quality file
     :return: results for search
     """
     foundResults = {}
@@ -457,7 +463,7 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False):
 
     for curProvider in providers:
         if curProvider.anime_only and not show.is_anime:
-            logger.log(u"" + str(show.name) + " is not an anime, skiping", logger.DEBUG)
+            logger.log(u"" + str(show.name) + " is not an anime, skipping", logger.DEBUG)
             continue
 
         threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
diff --git a/sickbeard/searchBacklog.py b/sickbeard/searchBacklog.py
index cfa245844d905fb19a59d446e499e6a690523cee..be2398b002318beb9df0a5fbc496034b31d1f70f 100644
--- a/sickbeard/searchBacklog.py
+++ b/sickbeard/searchBacklog.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -41,7 +42,7 @@ class BacklogSearchScheduler(scheduler.Scheduler):
             return datetime.date.fromordinal(self.action._lastBacklog + self.action.cycleTime)
 
 
-class BacklogSearcher:
+class BacklogSearcher(object):
     def __init__(self):
 
         self._lastBacklog = self._get_lastBacklog()
@@ -135,42 +136,41 @@ class BacklogSearcher:
         return self._lastBacklog
 
     def _get_segments(self, show, fromDate):
+        wanted = {}
         if show.paused:
-            logger.log(u"Skipping backlog for {show_name} because the show is paused".format(show_name=show.name), logger.DEBUG)
-            return {}
+            logger.log(u"Skipping backlog for %s because the show is paused" % show.name, logger.DEBUG)
+            return wanted
 
-        anyQualities, bestQualities = common.Quality.splitQuality(show.quality)  # @UnusedVariable
+        allowed_qualities, preferred_qualities = common.Quality.splitQuality(show.quality)
 
-        logger.log(u"Seeing if we need anything from {show_name}".format(show_name=show.name), logger.DEBUG)
+        logger.log(u"Seeing if we need anything from %s" % show.name, logger.DEBUG)
 
-        myDB = db.DBConnection()
-        sqlResults = myDB.select("SELECT status, season, episode FROM tv_episodes WHERE airdate > ? AND showid = ?",
-                [fromDate.toordinal(), show.indexerid])
+        con = db.DBConnection()
+        sql_results = con.select(
+            "SELECT status, season, episode FROM tv_episodes WHERE airdate > ? AND showid = ?",
+            [fromDate.toordinal(), show.indexerid]
+        )
 
         # check through the list of statuses to see if we want any
-        wanted = {}
-        for result in sqlResults:
-            curCompositeStatus = int(result["status"] or -1)
-            curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
+        for result in sql_results:
+            cur_status, cur_quality = common.Quality.splitCompositeStatus(int(result["status"] or -1))
 
-            if bestQualities:
-                highestBestQuality = max(bestQualities)
-                lowestBestQuality = min(bestQualities)
-            else:
-                highestBestQuality = 0
-                lowestBestQuality=0
+            if cur_status not in {common.WANTED, common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER}:
+                continue
 
+            if cur_status != common.WANTED:
+                if preferred_qualities:
+                    if cur_quality in preferred_qualities:
+                        continue
+                elif cur_quality in allowed_qualities:
+                    continue
 
-            # if we need a better one then say yes
-            if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER) and curQuality < highestBestQuality) or curStatus == common.WANTED:
-                epObj = show.getEpisode(int(result["season"]), int(result["episode"]))
+            ep_obj = show.getEpisode(int(result["season"]), int(result["episode"]))
 
-                # only fetch if not archive on first match, or if show is lowest than the lower expected quality
-                if(epObj.show.archive_firstmatch == 0 or curQuality < lowestBestQuality):
-                    if epObj.season not in wanted:
-                        wanted[epObj.season] = [epObj]
-                    else:
-                        wanted[epObj.season].append(epObj)
+            if ep_obj.season not in wanted:
+                wanted[ep_obj.season] = [ep_obj]
+            else:
+                wanted[ep_obj.season].append(ep_obj)
 
         return wanted
 
@@ -186,7 +186,6 @@ class BacklogSearcher:
         else:
             myDB.action("UPDATE info SET last_backlog=" + str(when))
 
-
     def run(self, force=False):
         try:
             self.searchBacklog()
diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py
index 32738b930ab84c5b882b26ab1e6fd3583ed0239e..4b8fccb4dca3208ad96b1fb67ecc11e5dd8f5a99 100644
--- a/sickbeard/search_queue.py
+++ b/sickbeard/search_queue.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -38,6 +39,7 @@ MANUAL_SEARCH = 40
 MANUAL_SEARCH_HISTORY = []
 MANUAL_SEARCH_HISTORY_SIZE = 100
 
+
 class SearchQueue(generic_queue.GenericQueue):
     def __init__(self):
         generic_queue.GenericQueue.__init__(self)
@@ -109,7 +111,6 @@ class SearchQueue(generic_queue.GenericQueue):
                 length['failed'] += 1
         return length
 
-
     def add_item(self, item):
         if isinstance(item, DailySearchQueueItem):
             # daily searches
@@ -123,6 +124,7 @@ class SearchQueue(generic_queue.GenericQueue):
         else:
             logger.log(u"Not adding item, it's already in the queue", logger.DEBUG)
 
+
 class DailySearchQueueItem(generic_queue.QueueItem):
     def __init__(self):
         self.success = None
@@ -193,7 +195,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
         except Exception:
             logger.log(traceback.format_exc(), logger.DEBUG)
 
-        ### Keep a list with the 100 last executed searches
+        # ## Keep a list with the 100 last executed searches
         fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
 
         if self.success is None:
@@ -283,7 +285,7 @@ class FailedQueueItem(generic_queue.QueueItem):
         except Exception:
             logger.log(traceback.format_exc(), logger.DEBUG)
 
-        ### Keep a list with the 100 last executed searches
+        # ## Keep a list with the 100 last executed searches
         fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
 
         if self.success is None:
@@ -291,7 +293,8 @@ class FailedQueueItem(generic_queue.QueueItem):
 
         self.finish()
 
-def fifo(myList, item, maxSize = 100):
+
+def fifo(myList, item, maxSize=100):
     if len(myList) >= maxSize:
         myList.pop(0)
     myList.append(item)
diff --git a/sickbeard/showUpdater.py b/sickbeard/showUpdater.py
index 17e725e1738cdd50b7f2950f8a462775d148f798..f995d45174593692b573230c8b9a773bb8852450 100644
--- a/sickbeard/showUpdater.py
+++ b/sickbeard/showUpdater.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -33,7 +34,8 @@ from sickrage.helper.exceptions import CantRefreshShowException, CantUpdateShowE
 from sickbeard.indexers.indexer_config import INDEXER_TVRAGE
 from sickbeard.indexers.indexer_config import INDEXER_TVDB
 
-class ShowUpdater:
+
+class ShowUpdater(object):
     def __init__(self):
         self.lock = threading.Lock()
         self.amActive = False
diff --git a/sickbeard/show_name_helpers.py b/sickbeard/show_name_helpers.py
index 8e640b250869b481e22133cca8d40aa8bf71d4b3..f161b0569c67dcd64e910395a8dce8d969b716b9 100644
--- a/sickbeard/show_name_helpers.py
+++ b/sickbeard/show_name_helpers.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -40,7 +41,7 @@ resultFilters = [
     "dub(bed)?"
 ]
 
-if hasattr('General','ignored_subs_list') and sickbeard.IGNORED_SUBS_LIST:
+if hasattr('General', 'ignored_subs_list') and sickbeard.IGNORED_SUBS_LIST:
     resultFilters.append("(" + sickbeard.IGNORED_SUBS_LIST.replace(",", "|") + ")sub(bed|ed|s)?")
 
 
@@ -210,11 +211,10 @@ def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
                         if ep_obj.show.release_groups is not None:
                             if len(show.release_groups.whitelist) > 0:
                                 for keyword in show.release_groups.whitelist:
-                                    toReturn.append(keyword + '.' + curShow+ "." + cur_season)
+                                    toReturn.append(keyword + '.' + curShow + "." + cur_season)
                     else:
                         toReturn.append(curShow + "." + cur_season)
 
-
     return toReturn
 
 
@@ -335,6 +335,7 @@ def allPossibleShowNames(show, season=-1):
 
     return showNames
 
+
 def determineReleaseName(dir_name=None, nzb_name=None):
     """Determine a release name from an nzb and/or folder name"""
 
diff --git a/sickbeard/show_queue.py b/sickbeard/show_queue.py
index af60180bc5cfd02ec2a2f06676c005e8dbdaaa1a..e9bfcf309325d8a71cf3949aedaa7a785e39ec2c 100644
--- a/sickbeard/show_queue.py
+++ b/sickbeard/show_queue.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -48,8 +49,7 @@ class ShowQueue(generic_queue.GenericQueue):
         return show.indexerid in [x.show.indexerid if x.show else 0 for x in self.queue if x.action_id in actions]
 
     def _isBeingSomethinged(self, show, actions):
-        return self.currentItem is not None and show == self.currentItem.show and \
-               self.currentItem.action_id in actions
+        return self.currentItem is not None and show == self.currentItem.show and self.currentItem.action_id in actions
 
     def isInUpdateQueue(self, show):
         return self._isInQueue(show, (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE))
@@ -142,13 +142,13 @@ class ShowQueue(generic_queue.GenericQueue):
         return queueItemObj
 
     def addShow(self, indexer, indexer_id, showDir, default_status=None, quality=None, flatten_folders=None,
-                lang=None, subtitles=None, anime=None, scene=None, paused=None, blacklist=None, whitelist=None, default_status_after=None, archive=None):
+                lang=None, subtitles=None, anime=None, scene=None, paused=None, blacklist=None, whitelist=None, default_status_after=None):
 
         if lang is None:
             lang = sickbeard.INDEXER_DEFAULT_LANGUAGE
 
         queueItemObj = QueueItemAdd(indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang,
-                                    subtitles, anime, scene, paused, blacklist, whitelist, default_status_after, archive)
+                                    subtitles, anime, scene, paused, blacklist, whitelist, default_status_after)
 
         self.add_item(queueItemObj)
 
@@ -160,7 +160,7 @@ class ShowQueue(generic_queue.GenericQueue):
 
         # remove other queued actions for this show.
         for x in self.queue:
-            if show.indexerid == x.show.indexerid and x != self.currentItem:
+            if x and x != self.currentItem and show.indexerid == x.show.indexerid:
                 self.queue.remove(x)
 
         queueItemObj = QueueItemRemove(show=show, full=full)
@@ -168,6 +168,7 @@ class ShowQueue(generic_queue.GenericQueue):
 
         return queueItemObj
 
+
 class ShowQueueActions(object):
 
     def __init__(self):
@@ -225,7 +226,7 @@ class ShowQueueItem(generic_queue.QueueItem):
 
 class QueueItemAdd(ShowQueueItem):
     def __init__(self, indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang, subtitles, anime,
-                 scene, paused, blacklist, whitelist, default_status_after, archive):
+                 scene, paused, blacklist, whitelist, default_status_after):
 
         self.indexer = indexer
         self.indexer_id = indexer_id
@@ -241,7 +242,6 @@ class QueueItemAdd(ShowQueueItem):
         self.blacklist = blacklist
         self.whitelist = whitelist
         self.default_status_after = default_status_after
-        self.archive = archive
 
         self.show = None
 
@@ -294,7 +294,7 @@ class QueueItemAdd(ShowQueueItem):
             if getattr(s, 'seriesname', None) is None:
                 logger.log(u"Show in " + self.showDir + " has no name on " + str(
                     sickbeard.indexerApi(self.indexer).name) + ", probably the wrong language used to search with.",
-                           logger.ERROR)
+                    logger.ERROR)
                 ui.notifications.error("Unable to add show",
                                        "Show in " + self.showDir + " has no name on " + str(sickbeard.indexerApi(
                                            self.indexer).name) + ", probably the wrong language. Delete .nfo and add manually in the correct language.")
@@ -357,7 +357,6 @@ class QueueItemAdd(ShowQueueItem):
             self.show.flatten_folders = self.flatten_folders if self.flatten_folders is not None else sickbeard.FLATTEN_FOLDERS_DEFAULT
             self.show.anime = self.anime if self.anime is not None else sickbeard.ANIME_DEFAULT
             self.show.scene = self.scene if self.scene is not None else sickbeard.SCENE_DEFAULT
-            self.show.archive_firstmatch = self.archive if self.archive is not None else sickbeard.ARCHIVE_DEFAULT
             self.show.paused = self.paused if self.paused is not None else False
 
             # set up default new/missing episode status
@@ -511,6 +510,7 @@ class QueueItemRefresh(ShowQueueItem):
 
         self.finish()
 
+
 class QueueItemRename(ShowQueueItem):
     def __init__(self, show=None):
         ShowQueueItem.__init__(self, ShowQueueActions.RENAME, show)
@@ -551,6 +551,7 @@ class QueueItemRename(ShowQueueItem):
 
         self.finish()
 
+
 class QueueItemSubtitle(ShowQueueItem):
     def __init__(self, show=None):
         ShowQueueItem.__init__(self, ShowQueueActions.SUBTITLE, show)
@@ -563,6 +564,7 @@ class QueueItemSubtitle(ShowQueueItem):
         self.show.download_subtitles()
         self.finish()
 
+
 class QueueItemUpdate(ShowQueueItem):
     def __init__(self, show=None):
         ShowQueueItem.__init__(self, ShowQueueActions.UPDATE, show)
diff --git a/sickbeard/subtitles.py b/sickbeard/subtitles.py
index 931ca7e97b1f713563aae651400db98b02243ee0..df3ca0b5ba9896f543635d10c4792dbd21c34693 100644
--- a/sickbeard/subtitles.py
+++ b/sickbeard/subtitles.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: medariox <dariox@gmx.com>,
 # based on Antoine Bertin's <diaoulael@gmail.com> work
 # and originally written by Nyaran <nyayukko@gmail.com>
@@ -32,6 +33,7 @@ from sickbeard import logger
 from sickbeard import history
 from sickbeard import db
 from sickbeard import processTV
+from sickbeard.common import Quality
 from sickbeard.helpers import remove_non_release_groups, isMediaFile
 from sickrage.helper.common import dateTimeFormat
 from sickrage.helper.encoding import ek
@@ -59,9 +61,7 @@ ENTRY_POINTS = {
     ]
 }
 
-# pylint: disable=protected-access
-# Access to a protected member of a client class
-DISTRIBUTION._ep_map = pkg_resources.EntryPoint.parse_map(ENTRY_POINTS, DISTRIBUTION)
+DISTRIBUTION._ep_map = pkg_resources.EntryPoint.parse_map(ENTRY_POINTS, DISTRIBUTION)  # pylint: disable=protected-access
 pkg_resources.working_set.add(DISTRIBUTION)
 
 provider_manager.ENTRY_POINT_CACHE.pop('subliminal.providers')
@@ -87,8 +87,7 @@ def sorted_service_list():
     for current_service in sickbeard.SUBTITLES_SERVICES_LIST:
         if current_service in subliminal.provider_manager.names():
             new_list.append({'name': current_service,
-                             'url': PROVIDER_URLS[current_service] if current_service in
-                                    PROVIDER_URLS else lmgtfy % current_service,
+                             'url': PROVIDER_URLS[current_service] if current_service in PROVIDER_URLS else lmgtfy % current_service,
                              'image': current_service + '.png',
                              'enabled': sickbeard.SUBTITLES_SERVICES_ENABLED[current_index] == 1})
         current_index += 1
@@ -96,11 +95,9 @@ def sorted_service_list():
     for current_service in subliminal.provider_manager.names():
         if current_service not in [service['name'] for service in new_list]:
             new_list.append({'name': current_service,
-                             'url': PROVIDER_URLS[current_service] if current_service in
-                                    PROVIDER_URLS else lmgtfy % current_service,
+                             'url': PROVIDER_URLS[current_service] if current_service in PROVIDER_URLS else lmgtfy % current_service,
                              'image': current_service + '.png',
                              'enabled': False})
-
     return new_list
 
 
@@ -110,10 +107,12 @@ def enabled_service_list():
 
 def wanted_languages(sql_like=None):
     wanted = frozenset(sickbeard.SUBTITLES_LANGUAGES).intersection(subtitle_code_filter())
-    return (wanted, '%' + ','.join(sorted(wanted)) + '%')[bool(sql_like)]
+    return (wanted, '%' + ','.join(sorted(wanted)) + '%' if sickbeard.SUBTITLES_MULTI else '%und%')[bool(sql_like)]
 
 
 def get_needed_languages(subtitles):
+    if not sickbeard.SUBTITLES_MULTI:
+        return set() if 'und' in subtitles else {from_code(language) for language in wanted_languages()}
     return {from_code(language) for language in wanted_languages().difference(subtitles)}
 
 
@@ -137,10 +136,10 @@ def needs_subtitles(subtitles):
 # Hack around this for now.
 def from_code(language):
     language = language.strip()
-    if language not in language_converters['opensubtitles'].codes:
-        return Language('und')
+    if language and language in language_converters['opensubtitles'].codes:
+        return Language.fromopensubtitles(language)  # pylint: disable=no-member
 
-    return Language.fromopensubtitles(language)  # pylint: disable=no-member
+    return Language('und')
 
 
 def name_from_code(code):
@@ -151,13 +150,13 @@ def code_from_code(code):
     return from_code(code).opensubtitles
 
 
-def download_subtitles(subtitles_info):  # pylint: disable=too-many-locals
+def download_subtitles(subtitles_info):  # pylint: disable=too-many-locals, too-many-branches, too-many-statements
     existing_subtitles = subtitles_info['subtitles']
 
     if not needs_subtitles(existing_subtitles):
         logger.log(u'Episode already has all needed subtitles, skipping %s S%02dE%02d'
                    % (subtitles_info['show_name'], subtitles_info['season'], subtitles_info['episode']), logger.DEBUG)
-        return (existing_subtitles, None)
+        return existing_subtitles, None
 
     # Check if we really need subtitles
     languages = get_needed_languages(existing_subtitles)
@@ -165,7 +164,7 @@ def download_subtitles(subtitles_info):  # pylint: disable=too-many-locals
         logger.log(u'No subtitles needed for %s S%02dE%02d'
                    % (subtitles_info['show_name'], subtitles_info['season'],
                       subtitles_info['episode']), logger.DEBUG)
-        return (existing_subtitles, None)
+        return existing_subtitles, None
 
     subtitles_path = get_subtitles_path(subtitles_info['location']).encode(sickbeard.SYS_ENCODING)
     video_path = subtitles_info['location'].encode(sickbeard.SYS_ENCODING)
@@ -176,7 +175,7 @@ def download_subtitles(subtitles_info):  # pylint: disable=too-many-locals
         logger.log(u'Exception caught in subliminal.scan_video for %s S%02dE%02d'
                    % (subtitles_info['show_name'], subtitles_info['season'],
                       subtitles_info['episode']), logger.DEBUG)
-        return (existing_subtitles, None)
+        return existing_subtitles, None
 
     providers = enabled_service_list()
     provider_configs = {'addic7ed': {'username': sickbeard.ADDIC7ED_USER,
@@ -194,7 +193,7 @@ def download_subtitles(subtitles_info):  # pylint: disable=too-many-locals
             logger.log(u'No subtitles found for %s S%02dE%02d on any provider'
                        % (subtitles_info['show_name'], subtitles_info['season'],
                           subtitles_info['episode']), logger.DEBUG)
-            return (existing_subtitles, None)
+            return existing_subtitles, None
 
         for sub in subtitles_list:
             matches = sub.get_matches(video, hearing_impaired=False)
@@ -208,11 +207,15 @@ def download_subtitles(subtitles_info):  # pylint: disable=too-many-locals
 
         subliminal.save_subtitles(video, found_subtitles, directory=subtitles_path,
                                   single=not sickbeard.SUBTITLES_MULTI)
-
+    except IOError as error:
+        if 'No space left on device' in ex(error):
+            logger.log(u'Not enough space on the drive to save subtitles', logger.WARNING)
+        else:
+            logger.log(traceback.format_exc(), logger.WARNING)
     except Exception:
         logger.log(u"Error occurred when downloading subtitles for: %s" % video_path)
         logger.log(traceback.format_exc(), logger.ERROR)
-        return (existing_subtitles, None)
+        return existing_subtitles, None
 
     for subtitle in found_subtitles:
         subtitle_path = subliminal.subtitle.get_subtitle_path(video.name,
@@ -220,40 +223,44 @@ def download_subtitles(subtitles_info):  # pylint: disable=too-many-locals
                                                               subtitle.language)
         if subtitles_path is not None:
             subtitle_path = ek(os.path.join, subtitles_path, ek(os.path.split, subtitle_path)[1])
+
         sickbeard.helpers.chmodAsParent(subtitle_path)
         sickbeard.helpers.fixSetGroupID(subtitle_path)
 
-    if sickbeard.SUBTITLES_EXTRA_SCRIPTS and isMediaFile(video_path) and not sickbeard.EMBEDDED_SUBTITLES_ALL:
-        run_subs_extra_scripts(subtitles_info, found_subtitles, video, single=not sickbeard.SUBTITLES_MULTI)
-
-    current_subtitles = [subtitle.language.opensubtitles for subtitle in found_subtitles]
-    new_subtitles = frozenset(current_subtitles).difference(existing_subtitles)
-    current_subtitles += existing_subtitles
-
-    if sickbeard.SUBTITLES_HISTORY:
-        for subtitle in found_subtitles:
-            logger.log(u'history.logSubtitle %s, %s'
-                       % (subtitle.provider_name, subtitle.language.opensubtitles), logger.DEBUG)
+        if sickbeard.SUBTITLES_HISTORY:
+            logger.log(u'history.logSubtitle %s, %s' %
+                       (subtitle.provider_name, subtitle.language.opensubtitles), logger.DEBUG)
 
             history.logSubtitle(subtitles_info['show_indexerid'], subtitles_info['season'],
                                 subtitles_info['episode'], subtitles_info['status'], subtitle)
 
-    return (current_subtitles, new_subtitles)
+        if sickbeard.SUBTITLES_EXTRA_SCRIPTS and isMediaFile(video_path) and not sickbeard.EMBEDDED_SUBTITLES_ALL:
+            run_subs_extra_scripts(subtitles_info, subtitle, video, single=not sickbeard.SUBTITLES_MULTI)
+
+    new_subtitles = sorted({subtitle.language.opensubtitles for subtitle in found_subtitles})
+    current_subtitles = sorted({subtitle for subtitle in new_subtitles + existing_subtitles})
+    if not sickbeard.SUBTITLES_MULTI and len(found_subtitles) == 1:
+        new_code = found_subtitles[0].language.opensubtitles
+        if new_code not in existing_subtitles:
+            current_subtitles.remove(new_code)
+        current_subtitles.append('und')
+
+    return current_subtitles, new_subtitles
 
 
 def refresh_subtitles(episode_info, existing_subtitles):
     video = get_video(episode_info['location'].encode(sickbeard.SYS_ENCODING))
     if not video:
         logger.log(u"Exception caught in subliminal.scan_video, subtitles couldn't be refreshed", logger.DEBUG)
-        return (existing_subtitles, None)
+        return existing_subtitles, None
     current_subtitles = get_subtitles(video)
     if existing_subtitles == current_subtitles:
         logger.log(u'No changed subtitles for %s S%02dE%02d'
                    % (episode_info['show_name'], episode_info['season'],
                       episode_info['episode']), logger.DEBUG)
-        return (existing_subtitles, None)
+        return existing_subtitles, None
     else:
-        return (current_subtitles, True)
+        return current_subtitles, True
 
 
 def get_video(video_path, subtitles_path=None):
@@ -399,7 +406,7 @@ class SubtitlesFinder(object):
                 logger.log(u"Starting post-process with default settings now that we found subtitles")
                 processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)
 
-    def run(self, force=False):  # pylint: disable=unused-argument,too-many-statements,too-many-branches
+    def run(self, force=False):  # pylint: disable=too-many-branches, too-many-statements
 
         if not sickbeard.USE_SUBTITLES:
             return
@@ -411,49 +418,47 @@ class SubtitlesFinder(object):
 
         self.amActive = True
 
+        def dhm(td):
+            days = td.days
+            hours = td.seconds // 60 ** 2
+            minutes = (td.seconds // 60) % 60
+            ret = (u'', '%s days, ' % days)[days > 0] + \
+                (u'', '%s hours, ' % hours)[hours > 0] + \
+                (u'', '%s minutes' % minutes)[minutes > 0]
+            if days == 1:
+                ret = ret.replace('days', 'day')
+            if hours == 1:
+                ret = ret.replace('hours', 'hour')
+            if minutes == 1:
+                ret = ret.replace('minutes', 'minute')
+            return ret.rstrip(', ')
+
         if sickbeard.SUBTITLES_DOWNLOAD_IN_PP:
             self.subtitles_download_in_pp()
 
         logger.log(u'Checking for missed subtitles', logger.INFO)
 
-        # get episodes on which we want subtitles
-        # criteria is:
-        #  - show subtitles = 1
-        #  - episode subtitles != config wanted languages or 'und' (depends on config multi)
-        #  - search count < 2 and diff(airdate, now) > 1 week : now -> 1d
-        #  - search count < 7 and diff(airdate, now) <= 1 week : now -> 4h -> 8h -> 16h -> 1d -> 1d -> 1d
-
-        """
-        Defines the hours to wait between 2 subtitles search depending on:
-        - the episode: new or old
-        - the number of searches done so far (searchcount), represented by the index of the list
-        """
-        rules = {'old': [0, 24], 'new': [0, 4, 8, 4, 16, 24, 24]}
-
-        if sickbeard.SUBTITLES_MULTI:
-            query_languages = wanted_languages(True)
-        else:
-            query_languages = '%und%'
+        statuses = list({status for status in Quality.DOWNLOADED + Quality.ARCHIVED})
 
-        today = datetime.date.today().toordinal()
         database = db.DBConnection()
         sql_results = database.select(
-            'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.subtitles, '
-            'e.subtitles_searchcount AS searchcount, e.subtitles_lastsearch AS lastsearch, e.location, '
-            '(? - e.airdate) AS airdate_daydiff '
-            'FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id) '
-            'WHERE s.subtitles = 1 AND e.subtitles NOT LIKE ? '
-            'AND (e.subtitles_searchcount <= 2 OR (e.subtitles_searchcount <= 7 AND airdate_daydiff <= 7)) '
-            'AND e.location != ""', [today, query_languages])
-
-        if len(sql_results) == 0:
+            "SELECT s.show_name, e.showid, e.season, e.episode, "
+            "e.status, e.subtitles, e.subtitles_searchcount AS searchcount, "
+            "e.subtitles_lastsearch AS lastsearch, e.location, (? - e.airdate) as age "
+            "FROM tv_episodes AS e INNER JOIN tv_shows AS s "
+            "ON (e.showid = s.indexer_id) "
+            "WHERE s.subtitles = 1 AND e.subtitles NOT LIKE ? "
+            "AND e.location != '' AND e.status IN (%s) ORDER BY age ASC" %
+            ','.join(['?'] * len(statuses)),
+            [datetime.datetime.now().toordinal(), wanted_languages(True)] + statuses
+        )
+
+        if not sql_results:
             logger.log(u'No subtitles to download', logger.INFO)
             self.amActive = False
             return
 
-        now = datetime.datetime.now()
         for ep_to_sub in sql_results:
-
             if not ek(os.path.isfile, ep_to_sub['location']):
                 logger.log(u'Episode file does not exist, cannot download subtitles for %s S%02dE%02d'
                            % (ep_to_sub['show_name'], ep_to_sub['season'], ep_to_sub['episode']), logger.DEBUG)
@@ -465,48 +470,51 @@ class SubtitlesFinder(object):
                 continue
 
             try:
-                try:
-                    lastsearched = datetime.datetime.strptime(ep_to_sub['lastsearch'], dateTimeFormat)
-                except ValueError:
-                    lastsearched = datetime.datetime.min
+                lastsearched = datetime.datetime.strptime(ep_to_sub['lastsearch'], dateTimeFormat)
+            except ValueError:
+                lastsearched = datetime.datetime.min
 
-                if ((ep_to_sub['airdate_daydiff'] > 7 and ep_to_sub['searchcount'] < 2 and
-                     now - lastsearched > datetime.timedelta(hours=rules['old'][ep_to_sub['searchcount']])) or
-                        (ep_to_sub['airdate_daydiff'] <= 7 and ep_to_sub['searchcount'] < 7 and
-                         now - lastsearched > datetime.timedelta(hours=rules['new'][ep_to_sub['searchcount']]))):
-
-                    logger.log(u'Started missed subtitles search for %s S%02dE%02d'
-                               % (ep_to_sub['show_name'], ep_to_sub['season'], ep_to_sub['episode']), logger.INFO)
-
-                    show_object = Show.find(sickbeard.showList, int(ep_to_sub['showid']))
-                    if not show_object:
-                        logger.log(u'Show with ID %s not found in the database' % ep_to_sub['showid'], logger.DEBUG)
+            try:
+                if not force:
+                    now = datetime.datetime.now()
+                    days = int(ep_to_sub['age'])
+                    delay_time = datetime.timedelta(hours=8 if days < 10 else 7 * 24 if days < 30 else 30 * 24)
+
+                    # Search every hour for the first 24 hours since aired, then every 8 hours until 10 days passes
+                    # After 10 days, search every 7 days, after 30 days search once a month
+                    # Will always try an episode regardless of age at least 2 times
+                    if lastsearched + delay_time > now and int(ep_to_sub['searchcount']) > 2 and days:
+                        logger.log(u"Subtitle search for %s S%02dE%02d delayed for %s"
+                                   % (ep_to_sub['show_name'], ep_to_sub['season'], ep_to_sub['episode'],
+                                      dhm(lastsearched + delay_time - now)), logger.DEBUG)
                         continue
 
-                    episode_object = show_object.getEpisode(int(ep_to_sub["season"]), int(ep_to_sub["episode"]))
-                    if isinstance(episode_object, str):
-                        logger.log(u'%s S%02dE%02d not found in the database'
-                                   % (ep_to_sub['show_name'], ep_to_sub['season'], ep_to_sub['episode']), logger.DEBUG)
-                        continue
+                logger.log(u'Searching for missing subtitles of %s S%02dE%02d'
+                           % (ep_to_sub['show_name'], ep_to_sub['season'], ep_to_sub['episode']), logger.INFO)
 
-                    existing_subtitles = episode_object.subtitles
+                show_object = Show.find(sickbeard.showList, int(ep_to_sub['showid']))
+                if not show_object:
+                    logger.log(u'Show with ID %s not found in the database' % ep_to_sub['showid'], logger.DEBUG)
+                    continue
 
-                    try:
-                        episode_object.download_subtitles()
-                    except Exception as error:
-                        logger.log(u'Unable to find subtitles for %s S%02dE%02d. Error: %r'
-                                   % (ep_to_sub['show_name'], ep_to_sub['season'], ep_to_sub['episode'],
-                                      ex(error)), logger.ERROR)
-                        continue
+                episode_object = show_object.getEpisode(int(ep_to_sub["season"]), int(ep_to_sub["episode"]))
+                if isinstance(episode_object, str):
+                    logger.log(u'%s S%02dE%02d not found in the database'
+                               % (ep_to_sub['show_name'], ep_to_sub['season'], ep_to_sub['episode']), logger.DEBUG)
+                    continue
 
-                    new_subtitles = frozenset(episode_object.subtitles).difference(existing_subtitles)
-                    if new_subtitles:
-                        logger.log(u'Downloaded %s subtitles for %s S%02dE%02d'
-                                   % (', '.join(new_subtitles), ep_to_sub['show_name'],
-                                      ep_to_sub["season"], ep_to_sub["episode"]))
-                else:
-                    logger.log(u"Subtitle search limit reached for %s S%02dE%02d"
-                       % (ep_to_sub['show_name'], ep_to_sub['season'], ep_to_sub['episode']), logger.INFO)
+                try:
+                    new_subtitles = episode_object.download_subtitles()
+                except Exception as error:
+                    logger.log(u'Unable to find subtitles for %s S%02dE%02d. Error: %r'
+                               % (ep_to_sub['show_name'], ep_to_sub['season'], ep_to_sub['episode'],
+                                  ex(error)), logger.ERROR)
+                    continue
+
+                if new_subtitles:
+                    logger.log(u'Downloaded %s subtitles for %s S%02dE%02d'
+                               % (', '.join(new_subtitles), ep_to_sub['show_name'],
+                                  ep_to_sub["season"], ep_to_sub["episode"]))
 
             except Exception as error:
                 logger.log(u'Error while searching subtitles for %s S%02dE%02d. Error: %r'
@@ -514,31 +522,31 @@ class SubtitlesFinder(object):
                               ex(error)), logger.ERROR)
                 continue
 
+        logger.log(u'Finished checking for missed subtitles', logger.INFO)
         self.amActive = False
 
 
-def run_subs_extra_scripts(episode_object, found_subtitles, video, single=False):
+def run_subs_extra_scripts(episode_object, subtitle, video, single=False):
 
     for script_name in sickbeard.SUBTITLES_EXTRA_SCRIPTS:
         script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", script_name) if piece.strip()]
         script_cmd[0] = ek(os.path.abspath, script_cmd[0])
         logger.log(u"Absolute path to script: " + script_cmd[0], logger.DEBUG)
 
-        for subtitle in found_subtitles:
-            subtitle_path = subliminal.subtitle.get_subtitle_path(video.name, None if single else subtitle.language)
+        subtitle_path = subliminal.subtitle.get_subtitle_path(video.name, None if single else subtitle.language)
 
-            inner_cmd = script_cmd + [video.name, subtitle_path, subtitle.language.opensubtitles,
-                                      episode_object['show_name'], str(episode_object['season']),
-                                      str(episode_object['episode']), episode_object['name'],
-                                      str(episode_object['show_indexerid'])]
+        inner_cmd = script_cmd + [video.name, subtitle_path, subtitle.language.opensubtitles,
+                                  episode_object['show_name'], str(episode_object['season']),
+                                  str(episode_object['episode']), episode_object['name'],
+                                  str(episode_object['show_indexerid'])]
 
-            # use subprocess to run the command and capture output
-            logger.log(u"Executing command: %s" % inner_cmd)
-            try:
-                process = subprocess.Popen(inner_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
-                                           stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
-                out, _ = process.communicate()  # @UnusedVariable
-                logger.log(u"Script result: %s" % out, logger.DEBUG)
+        # use subprocess to run the command and capture output
+        logger.log(u"Executing command: %s" % inner_cmd)
+        try:
+            process = subprocess.Popen(inner_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+                                       stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
+            out, _ = process.communicate()  # @UnusedVariable
+            logger.log(u"Script result: %s" % out, logger.DEBUG)
 
-            except Exception as error:
-                logger.log(u"Unable to run subs_extra_script: " + ex(error))
+        except Exception as error:
+            logger.log(u"Unable to run subs_extra_script: " + ex(error))
diff --git a/sickbeard/traktChecker.py b/sickbeard/traktChecker.py
index 651dcb08f51f65b4de70f924360f8a308663205e..d1976b88e71e2f98d5e5070cf7fc0475841da077 100644
--- a/sickbeard/traktChecker.py
+++ b/sickbeard/traktChecker.py
@@ -28,11 +28,7 @@ from sickbeard import logger
 from sickbeard import helpers
 from sickbeard import search_queue
 from sickbeard import db
-from sickbeard.common import ARCHIVED
-from sickbeard.common import SKIPPED
-from sickbeard.common import UNKNOWN
-from sickbeard.common import WANTED
-from sickbeard.common import Quality
+from sickbeard.common import SKIPPED, UNKNOWN, WANTED, Quality
 from sickrage.helper.common import sanitize_filename
 from sickrage.helper.encoding import ek
 from sickrage.helper.exceptions import ex
@@ -221,7 +217,7 @@ class TraktChecker(object):
             logger.log(u"COLLECTION::ADD::START - Look for Episodes to Add to Trakt Collection", logger.DEBUG)
 
             myDB = db.DBConnection()
-            sql_selection = 'select tv_shows.indexer, tv_shows.startyear, showid, show_name, season, episode from tv_episodes,tv_shows where tv_shows.indexer_id = tv_episodes.showid and tv_episodes.status in (' + ','.join([str(x) for x in Quality.DOWNLOADED + [ARCHIVED]]) + ')'
+            sql_selection = 'select tv_shows.indexer, tv_shows.startyear, showid, show_name, season, episode from tv_episodes,tv_shows where tv_shows.indexer_id = tv_episodes.showid and tv_episodes.status in (' + ','.join([str(x) for x in Quality.DOWNLOADED + Quality.ARCHIVED]) + ')'
             episodes = myDB.select(sql_selection)
 
             if episodes is not None:
@@ -469,8 +465,7 @@ class TraktChecker(object):
                                                             quality=int(sickbeard.QUALITY_DEFAULT),
                                                             flatten_folders=int(sickbeard.FLATTEN_FOLDERS_DEFAULT),
                                                             paused=sickbeard.TRAKT_START_PAUSED,
-                                                            default_status_after=status,
-                                                            archive=sickbeard.ARCHIVE_DEFAULT)
+                                                            default_status_after=status)
             else:
                 logger.log(u"There was an error creating the show, no root directory setting found", logger.WARNING)
                 return
diff --git a/sickbeard/tv.py b/sickbeard/tv.py
index f5d7e7021da27a1289c35c00743cf0c98e7813d3..56d2459ed95ec6e8c3f8a5701d85ea2ae27e85a3 100644
--- a/sickbeard/tv.py
+++ b/sickbeard/tv.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -96,7 +97,6 @@ class TVShow(object):
         self._air_by_date = 0
         self._subtitles = int(sickbeard.SUBTITLES_DEFAULT)
         self._dvdorder = 0
-        self._archive_firstmatch = 0
         self._lang = lang
         self._last_update_indexer = 1
         self._sports = 0
@@ -138,7 +138,6 @@ class TVShow(object):
     air_by_date = property(lambda self: self._air_by_date, dirty_setter("_air_by_date"))
     subtitles = property(lambda self: self._subtitles, dirty_setter("_subtitles"))
     dvdorder = property(lambda self: self._dvdorder, dirty_setter("_dvdorder"))
-    archive_firstmatch = property(lambda self: self._archive_firstmatch, dirty_setter("_archive_firstmatch"))
     lang = property(lambda self: self._lang, dirty_setter("_lang"))
     last_update_indexer = property(lambda self: self._last_update_indexer, dirty_setter("_last_update_indexer"))
     sports = property(lambda self: self._sports, dirty_setter("_sports"))
@@ -207,7 +206,7 @@ class TVShow(object):
         sql_selection = "SELECT season, episode, "
 
         # subselection to detect multi-episodes early, share_location > 0
-        sql_selection = sql_selection + " (SELECT COUNT (*) FROM tv_episodes WHERE showid = tve.showid AND season = tve.season AND location != '' AND location = tve.location AND episode != tve.episode) AS share_location "
+        sql_selection += " (SELECT COUNT (*) FROM tv_episodes WHERE showid = tve.showid AND season = tve.season AND location != '' AND location = tve.location AND episode != tve.episode) AS share_location "
 
         sql_selection = sql_selection + " FROM tv_episodes tve WHERE showid = " + str(self.indexerid)
 
@@ -215,10 +214,10 @@ class TVShow(object):
             sql_selection = sql_selection + " AND season = " + str(season)
 
         if has_location:
-            sql_selection = sql_selection + " AND location != '' "
+            sql_selection += " AND location != '' "
 
         # need ORDER episode ASC to rename multi-episodes in order S01E01-02
-        sql_selection = sql_selection + " ORDER BY season ASC, episode ASC"
+        sql_selection += " ORDER BY season ASC, episode ASC"
 
         myDB = db.DBConnection()
         results = myDB.select(sql_selection)
@@ -244,7 +243,6 @@ class TVShow(object):
 
         return ep_list
 
-
     def getEpisode(self, season=None, episode=None, file=None, noCreate=False, absolute_number=None, forceUpdate=False):
 
         # if we get an anime get the real season and episode
@@ -268,10 +266,10 @@ class TVShow(object):
                     logger.DEBUG)
                 return None
 
-        if not season in self.episodes:
+        if season not in self.episodes:
             self.episodes[season] = {}
 
-        if not episode in self.episodes[season] or self.episodes[season][episode] is None:
+        if episode not in self.episodes[season] or self.episodes[season][episode] is None:
             if noCreate:
                 return None
 
@@ -307,7 +305,7 @@ class TVShow(object):
 
         if sql_result and sql_result[0]['last_aired'] != 0:
             last_airdate = datetime.date.fromordinal(sql_result[0]['last_aired'])
-            if last_airdate >= (update_date - graceperiod) and last_airdate <= (update_date + graceperiod):
+            if (update_date - graceperiod) <= last_airdate <= (update_date + graceperiod):
                 return True
 
         # get next upcoming UNAIRED episode to compare against today + graceperiod
@@ -439,7 +437,7 @@ class TVShow(object):
             except (InvalidNameException, InvalidShowException):
                 pass
 
-            if not ' ' in ep_file_name and parse_result and parse_result.release_group:
+            if ' ' not in ep_file_name and parse_result and parse_result.release_group:
                 logger.log(
                     u"Name " + ep_file_name + u" gave release group of " + parse_result.release_group + ", seems valid",
                     logger.DEBUG)
@@ -456,12 +454,10 @@ class TVShow(object):
 
                 sql_l.append(curEpisode.get_sql())
 
-
         if sql_l:
             myDB = db.DBConnection()
             myDB.mass_action(sql_l)
 
-
     def loadEpisodesFromDB(self):
 
         logger.log(u"Loading all episodes from the DB", logger.DEBUG)
@@ -509,7 +505,7 @@ class TVShow(object):
                                (curShowid, error.message, sickbeard.indexerApi(self.indexer).name, curShowName), logger.DEBUG)
                     deleteEp = True
 
-            if not curSeason in scannedEps:
+            if curSeason not in scannedEps:
                 logger.log(u"%s: Not curSeason in scannedEps" % curShowid, logger.DEBUG)
                 scannedEps[curSeason] = {}
 
@@ -712,7 +708,6 @@ class TVShow(object):
                     with curEp.lock:
                         curEp.status = Quality.compositeStatus(DOWNLOADED, newQuality)
 
-
             # check for status/quality changes as long as it's a new file
             elif not same_file and sickbeard.helpers.isMediaFile(file) and curEp.status not in Quality.DOWNLOADED + Quality.ARCHIVED + [IGNORED]:
                 oldStatus, oldQuality = Quality.splitCompositeStatus(curEp.status)
@@ -743,7 +738,7 @@ class TVShow(object):
                     with curEp.lock:
                         logger.log(u"STATUS: we have an associated file, so setting the status from " + str(
                             curEp.status) + u" to DOWNLOADED/" + str(Quality.statusFromName(file, anime=self.is_anime)),
-                                   logger.DEBUG)
+                            logger.DEBUG)
                         curEp.status = Quality.compositeStatus(newStatus, newQuality)
 
             with curEp.lock:
@@ -753,7 +748,6 @@ class TVShow(object):
             myDB = db.DBConnection()
             myDB.mass_action(sql_l)
 
-
         # creating metafiles on the root should be good enough
         if rootEp:
             with rootEp.lock:
@@ -802,7 +796,6 @@ class TVShow(object):
             self.scene = int(sqlResults[0]["scene"] or 0)
             self.subtitles = int(sqlResults[0]["subtitles"] or 0)
             self.dvdorder = int(sqlResults[0]["dvdorder"] or 0)
-            self.archive_firstmatch = int(sqlResults[0]["archive_firstmatch"] or 0)
             self.quality = int(sqlResults[0]["quality"] or UNKNOWN)
             self.flatten_folders = int(sqlResults[0]["flatten_folders"] or 0)
             self.paused = int(sqlResults[0]["paused"] or 0)
@@ -871,7 +864,7 @@ class TVShow(object):
                 self.name = myEp['seriesname'].strip()
             except AttributeError:
                 raise sickbeard.indexer_attributenotfound(
-                    "Found %s, but attribute 'seriesname' was empty." % (self.indexerid))
+                    "Found %s, but attribute 'seriesname' was empty." % self.indexerid)
 
             self.classification = getattr(myEp, 'classification', 'Scripted')
             self.genre = getattr(myEp, 'genre', '')
@@ -1169,7 +1162,6 @@ class TVShow(object):
                         "sports": self.sports,
                         "subtitles": self.subtitles,
                         "dvdorder": self.dvdorder,
-                        "archive_firstmatch": self.archive_firstmatch,
                         "startyear": self.startyear,
                         "lang": self.lang,
                         "imdb_id": self.imdbid,
@@ -1212,11 +1204,10 @@ class TVShow(object):
         toReturn += "anime: " + str(self.is_anime) + "\n"
         return toReturn
 
-
     def qualitiesToString(self, qualities=[]):
         result = u''
         for quality in qualities:
-            if Quality.qualityStrings.has_key(quality):
+            if quality in Quality.qualityStrings:
                 result += Quality.qualityStrings[quality] + u', '
             else:
                 logger.log(u"Bad quality value: " + str(quality))
@@ -1228,19 +1219,18 @@ class TVShow(object):
 
         return result
 
-
     def wantEpisode(self, season, episode, quality, manualSearch=False, downCurQuality=False):
 
         logger.log(u"Checking if found episode %s S%02dE%02d is wanted at quality %s" % (self.name, season or 0, episode or 0, Quality.qualityStrings[quality]), logger.DEBUG)
 
         # if the quality isn't one we want under any circumstances then just say no
-        anyQualities, bestQualities = Quality.splitQuality(self.quality)
+        allowed_qualities, preferred_qualities = Quality.splitQuality(self.quality)
         logger.log(u"Any,Best = [ %s ] [ %s ] Found = [ %s ]" %
-                   (self.qualitiesToString(anyQualities),
-                    self.qualitiesToString(bestQualities),
+                   (self.qualitiesToString(allowed_qualities),
+                    self.qualitiesToString(preferred_qualities),
                     self.qualitiesToString([quality])), logger.DEBUG)
 
-        if quality not in anyQualities + bestQualities or quality is UNKNOWN:
+        if quality not in allowed_qualities + preferred_qualities or quality is UNKNOWN:
             logger.log(u"Don't want this quality, ignoring found episode", logger.DEBUG)
             return False
 
@@ -1275,8 +1265,8 @@ class TVShow(object):
                     logger.DEBUG)
                 return True
 
-        # if we are re-downloading then we only want it if it's in our bestQualities list and better than what we have, or we only have one bestQuality and we do not have that quality yet
-        if epStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER and quality in bestQualities and (quality > curQuality or curQuality not in bestQualities):
+        # if we are re-downloading then we only want it if it's in our preferred_qualities list and better than what we have, or we only have one bestQuality and we do not have that quality yet
+        if epStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER and quality in preferred_qualities and (quality > curQuality or curQuality not in preferred_qualities):
             logger.log(u"Episode already exists but the found episode quality is wanted more, getting found episode",
                        logger.DEBUG)
             return True
@@ -1310,20 +1300,18 @@ class TVShow(object):
         elif epStatus in Quality.SNATCHED_BEST:
             return Overview.SNATCHED_BEST
         elif epStatus in Quality.DOWNLOADED:
-            anyQualities, bestQualities = Quality.splitQuality(self.quality)  # @UnusedVariable
-            epStatus, curQuality = Quality.splitCompositeStatus(epStatus)
+            allowed_qualities, preferred_qualities = Quality.splitQuality(self.quality)  # @UnusedVariable
+            epStatus, cur_quality = Quality.splitCompositeStatus(epStatus)
 
-            if curQuality not in anyQualities + bestQualities:
-                if curQuality != Quality.UNKNOWN and (
-                    (anyQualities and curQuality > max(anyQualities)) or
-                    (bestQualities and curQuality > max(bestQualities))
+            if cur_quality not in allowed_qualities + preferred_qualities:
+                if cur_quality != Quality.UNKNOWN and (
+                    (allowed_qualities and cur_quality > max(allowed_qualities)) or
+                    (preferred_qualities and cur_quality > max(preferred_qualities))
                 ):
                     return Overview.GOOD
                 else:
                     return Overview.QUAL
-            elif self.archive_firstmatch:
-                return Overview.GOOD
-            elif bestQualities and curQuality not in bestQualities:
+            elif preferred_qualities and cur_quality not in preferred_qualities:
                 return Overview.QUAL
             else:
                 return Overview.GOOD
@@ -1431,8 +1419,7 @@ class TVEpisode(object):
             return
 
         if not subtitles.needs_subtitles(self.subtitles):
-            logger.log(u'Episode already has all needed subtitles, skipping  episode %dx%d of show %s'
-                         % (self.season or 0,  self.episode or 0, self.show.name), logger.DEBUG)
+            logger.log(u'Episode already has all needed subtitles, skipping  episode %dx%d of show %s' % (self.season or 0, self.episode or 0, self.show.name), logger.DEBUG)
             return
 
         logger.log(u"Checking subtitle candidates for %s S%02dE%02d (%s)"
@@ -1459,6 +1446,8 @@ class TVEpisode(object):
             logger.log(u"%s: No subtitles downloaded for %s S%02dE%02d" %
                        (self.show.indexerid, self.show.name, self.season or 0, self.episode or 0), logger.DEBUG)
 
+        return new_subtitles
+
     def checkForMetaFiles(self):
 
         oldhasnfo = self.hasnfo
@@ -1627,7 +1616,7 @@ class TVEpisode(object):
             if self.name:
                 logger.log(u"" + sickbeard.indexerApi(
                     self.indexer).name + " timed out but we have enough info from other sources, allowing the error",
-                           logger.DEBUG)
+                    logger.DEBUG)
                 return
             else:
                 logger.log(u"" + sickbeard.indexerApi(self.indexer).name + " timed out, unable to create the episode",
@@ -1650,7 +1639,7 @@ class TVEpisode(object):
             # return False
 
         if getattr(myEp, 'absolute_number', None) is None:
-            logger.log(u"%s: This episode %s - S%02dE%02d has no absolute number on %s" %(self.show.indexerid, self.show.name, season or 0, episode or 0, sickbeard.indexerApi(self.indexer).name), logger.DEBUG)
+            logger.log(u"%s: This episode %s - S%02dE%02d has no absolute number on %s" % (self.show.indexerid, self.show.name, season or 0, episode or 0, sickbeard.indexerApi(self.indexer).name), logger.DEBUG)
         else:
             logger.log(u"%s: The absolute_number for S%02dE%02d is: %s " % (self.show.indexerid, season or 0, episode or 0, myEp["absolute_number"]), logger.DEBUG)
             self.absolute_number = int(myEp["absolute_number"])
@@ -1713,7 +1702,7 @@ class TVEpisode(object):
             elif self.status in [UNAIRED, UNKNOWN]:
                 # Only do UNAIRED/UNKNOWN, it could already be snatched/ignored/skipped, or downloaded/archived to disconnected media
                 logger.log(u"Episode has already aired, marking it %s" % statusStrings[self.show.default_ep_status], logger.DEBUG)
-                self.status = self.show.default_ep_status if self.season > 0 else SKIPPED # auto-skip specials
+                self.status = self.show.default_ep_status if self.season > 0 else SKIPPED  # auto-skip specials
             else:
                 logger.log(u"Not touching status [ %s ] It could be skipped/ignored/snatched/archived" % statusStrings[self.status], logger.DEBUG)
 
@@ -2109,8 +2098,9 @@ class TVEpisode(object):
             show_name = self.show.name
 
         # try to get the release group
-        rel_grp = {}
-        rel_grp["SiCKRAGE"] = 'SiCKRAGE'
+        rel_grp = {
+            "SickRage": 'SickRage'
+        }
         if hasattr(self, 'location'):  # from the location name
             rel_grp['location'] = release_group(self.show, self.location)
             if not rel_grp['location']:
@@ -2131,7 +2121,8 @@ class TVEpisode(object):
             relgrp = 'release_name'
         elif 'location' in rel_grp:
             relgrp = 'location'
-        else: relgrp = 'SiCKRAGE'
+        else:
+            relgrp = 'SickRage'
 
         # try to get the release encoder to comply with scene naming standards
         encoder = Quality.sceneQualityFromName(self.release_name.replace(rel_grp[relgrp], ""), epQual)
@@ -2481,7 +2472,6 @@ class TVEpisode(object):
             proper_related_dir = ek(os.path.dirname, ek(os.path.abspath, absolute_proper_path + file_ext))
             proper_related_path = absolute_proper_path.replace(proper_related_dir, proper_related_dir + subfolder)
 
-
             cur_result = helpers.rename_ep_file(cur_related_file, proper_related_path,
                                                 absolute_current_path_no_ext_length + len(subfolder))
             if not cur_result:
@@ -2544,14 +2534,14 @@ class TVEpisode(object):
                        "' to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime), logger.DEBUG)
             try:
                 if helpers.touchFile(self.location, time.mktime(airdatetime)):
-                    logger.log(str(self.show.indexerid) + u": Changed modify date of " + ek(os.path.basename, self.location)
-                               + " to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime))
+                    logger.log(str(self.show.indexerid) + u": Changed modify date of " + ek(os.path.basename, self.location) +
+                               " to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime))
                 else:
-                    logger.log(str(self.show.indexerid) + u": Unable to modify date of " + ek(os.path.basename, self.location)
-                               + " to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime), logger.WARNING)
+                    logger.log(str(self.show.indexerid) + u": Unable to modify date of " + ek(os.path.basename, self.location) +
+                               " to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime), logger.WARNING)
             except Exception as e:
-                logger.log(str(self.show.indexerid) + u": Failed to modify date of '" + ek(os.path.basename, self.location)
-                           + "' to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime) + ". Error: %s" % ex(e), logger.WARNING)
+                logger.log(str(self.show.indexerid) + u": Failed to modify date of '" + ek(os.path.basename, self.location) +
+                           "' to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime) + ". Error: %s" % ex(e), logger.WARNING)
 
     def __getstate__(self):
         d = dict(self.__dict__)
diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py
index 9ed63c1d1e0cec1e8df3ce3e7125f1fd3f64d5d2..eba44869940a1a2aed6de900b25dd78d22d101bc 100644
--- a/sickbeard/tvcache.py
+++ b/sickbeard/tvcache.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -200,7 +201,6 @@ class TVCache(object):
 
         return datetime.datetime.fromtimestamp(lastTime)
 
-
     def setLastUpdate(self, toDate=None):
         if not toDate:
             toDate = datetime.datetime.today()
@@ -288,7 +288,6 @@ class TVCache(object):
                 "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)",
                 [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
 
-
     def searchCache(self, episode, manualSearch=False, downCurQuality=False):
         neededEps = self.findNeededEpisodes(episode, manualSearch, downCurQuality)
         return neededEps[episode] if episode in neededEps else []
@@ -303,7 +302,6 @@ class TVCache(object):
         propers_results = myDB.select(sql)
         return [x for x in propers_results if x['indexerid']]
 
-
     def findNeededEpisodes(self, episode, manualSearch=False, downCurQuality=False):
         neededEps = {}
         cl = []
diff --git a/sickbeard/ui.py b/sickbeard/ui.py
index 7b62dac3f416e2d5f430a96dd3768cb0179464bb..60c9273df0698ab3918f15fb63d3c7420634e88c 100644
--- a/sickbeard/ui.py
+++ b/sickbeard/ui.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -22,6 +23,7 @@ import sickbeard
 MESSAGE = 'notice'
 ERROR = 'error'
 
+
 class Notifications(object):
     """
     A queue of Notification objects.
@@ -101,7 +103,6 @@ class Notification(object):
         """
         return datetime.datetime.now() - self._when > self._timeout
 
-
     def see(self, remote_ip='127.0.0.1'):
         """
         Returns this notification object and marks it as seen by the client ip
@@ -109,13 +110,15 @@ class Notification(object):
         self._seen.append(remote_ip)
         return self
 
-class ProgressIndicator():
+
+class ProgressIndicator(object):
 
     def __init__(self, percentComplete=0, currentStatus={'title': ''}):
         self.percentComplete = percentComplete
         self.currentStatus = currentStatus
 
-class ProgressIndicators():
+
+class ProgressIndicators(object):
     _pi = {'massUpdate': [],
            'massAdd': [],
            'dailyUpdate': []
@@ -138,7 +141,8 @@ class ProgressIndicators():
     def setIndicator(name, indicator):
         ProgressIndicators._pi[name].append(indicator)
 
-class QueueProgressIndicator():
+
+class QueueProgressIndicator(object):
     """
     A class used by the UI to show the progress of the queue or a part of it.
     """
@@ -156,7 +160,7 @@ class QueueProgressIndicator():
         return len([x for x in self.queueItemList if x.isInQueue()])
 
     def nextName(self):
-        for curItem in [sickbeard.showQueueScheduler.action.currentItem]+sickbeard.showQueueScheduler.action.queue:  # @UndefinedVariable
+        for curItem in [sickbeard.showQueueScheduler.action.currentItem] + sickbeard.showQueueScheduler.action.queue:  # @UndefinedVariable
             if curItem in self.queueItemList:
                 return curItem.name
 
@@ -169,9 +173,10 @@ class QueueProgressIndicator():
         if numTotal == 0:
             return 0
         else:
-            return int(float(numFinished)/float(numTotal)*100)
+            return int(float(numFinished) / float(numTotal) * 100)
+
 
-class LoadingTVShow():
+class LoadingTVShow(object):
     def __init__(self, dir):
         self.dir = dir
-        self.show = None
\ No newline at end of file
+        self.show = None
diff --git a/sickbeard/versionChecker.py b/sickbeard/versionChecker.py
index 447cd761818f4e05bef21bb1c7a3ac712892d9de..e61f258868c26f42fd92c9f989931a0c1cf6e673 100644
--- a/sickbeard/versionChecker.py
+++ b/sickbeard/versionChecker.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -132,15 +133,17 @@ class CheckVersion(object):
 
         return True
 
-
     # TODO: Merge with backup in helpers
     @staticmethod
     def _backup(backupDir=None):
         if not backupDir:
             return False
-        source = [ek(os.path.join, sickbeard.DATA_DIR, 'sickbeard.db'), sickbeard.CONFIG_FILE]
-        source.append(ek(os.path.join, sickbeard.DATA_DIR, 'failed.db'))
-        source.append(ek(os.path.join, sickbeard.DATA_DIR, 'cache.db'))
+        source = [
+            ek(os.path.join, sickbeard.DATA_DIR, 'sickbeard.db'),
+            sickbeard.CONFIG_FILE,
+            ek(os.path.join, sickbeard.DATA_DIR, 'failed.db'),
+            ek(os.path.join, sickbeard.DATA_DIR, 'cache.db')
+        ]
         target = ek(os.path.join, backupDir, 'sickrage-' + time.strftime('%Y%m%d%H%M%S') + '.zip')
 
         for (path, dirs, files) in ek(os.walk, sickbeard.CACHE_DIR, topdown=True):
@@ -279,7 +282,6 @@ class CheckVersion(object):
         self.updater.set_newest_text()
         return True
 
-
     def check_for_new_news(self, force=False):
         """
         Checks GitHub for the latest news.
@@ -351,6 +353,7 @@ class UpdateManager(object):
     def get_update_url():
         return sickbeard.WEB_ROOT + "/home/update/?pid=" + str(sickbeard.PID)
 
+
 class GitUpdateManager(UpdateManager):
     def __init__(self):
         self._git_path = self._find_working_git()
@@ -374,7 +377,10 @@ class GitUpdateManager(UpdateManager):
         return self._run_git(self._git_path, "describe --abbrev=0 " + self._cur_commit_hash)[0]
 
     def get_newest_version(self):
-        return self._run_git(self._git_path, "describe --abbrev=0 " + self._newest_commit_hash)[0]
+        if self._newest_commit_hash:
+            return self._run_git(self._git_path, "describe --abbrev=0 " + self._newest_commit_hash)[0]
+        else:
+            return self._run_git(self._git_path, "describe --abbrev=0 " + self._cur_commit_hash)[0]
 
     def get_num_commits_behind(self):
         return self._num_commits_behind
@@ -403,7 +409,6 @@ class GitUpdateManager(UpdateManager):
 
         # trying alternatives
 
-
         alternative_git = []
 
         # osx people who start sr from launchd have a broken path, so try a hail-mary attempt for them
@@ -441,7 +446,7 @@ class GitUpdateManager(UpdateManager):
         if not git_path:
             logger.log(u"No git specified, can't use git commands", logger.WARNING)
             exit_status = 1
-            return (output, err, exit_status)
+            return output, err, exit_status
 
         cmd = git_path + ' ' + args
 
@@ -455,7 +460,6 @@ class GitUpdateManager(UpdateManager):
             if output:
                 output = output.strip()
 
-
         except OSError:
             logger.log(u"Command " + cmd + " didn't work")
             exit_status = 1
@@ -479,7 +483,7 @@ class GitUpdateManager(UpdateManager):
             logger.log(cmd + u" returned : " + str(output) + u", treat as error for now", logger.ERROR)
             exit_status = 1
 
-        return (output, err, exit_status)
+        return output, err, exit_status
 
     def _find_installed_version(self):
         """
@@ -684,6 +688,7 @@ class GitUpdateManager(UpdateManager):
         if sickbeard.GIT_USERNAME:
             self._run_git(self._git_path, 'config remote.%s.pushurl %s' % (sickbeard.GIT_REMOTE, sickbeard.GIT_REMOTE_URL.replace(sickbeard.GIT_ORG, sickbeard.GIT_USERNAME, 1)))
 
+
 class SourceUpdateManager(UpdateManager):
     def __init__(self):
         self.github_org = self.get_github_org()
@@ -774,8 +779,8 @@ class SourceUpdateManager(UpdateManager):
                 # when _cur_commit_hash doesn't match anything _num_commits_behind == 100
                 self._num_commits_behind += 1
 
-        logger.log(u"cur_commit = " + str(self._cur_commit_hash) + u", newest_commit = " + str(self._newest_commit_hash)
-                   + u", num_commits_behind = " + str(self._num_commits_behind), logger.DEBUG)
+        logger.log(u"cur_commit = " + str(self._cur_commit_hash) + u", newest_commit = " + str(self._newest_commit_hash) +
+                   u", num_commits_behind = " + str(self._num_commits_behind), logger.DEBUG)
 
     def set_newest_text(self):
 
diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py
index 29cf2c25d8110b99ba98b0a616144bdaecdae8bb..8c6bb80caa8d3e6cd67048874a951de43fec0265 100644
--- a/sickbeard/webapi.py
+++ b/sickbeard/webapi.py
@@ -305,10 +305,10 @@ class ApiCall(ApiHandler):
                 for paramName in paramDict:
                     if paramName not in self._help[paramType]:
                         self._help[paramType][paramName] = {}
-                    if paramDict[paramName]["allowed_values"]:
-                        self._help[paramType][paramName]["allowed_values"] = paramDict[paramName]["allowed_values"]
+                    if paramDict[paramName]["allowedValues"]:
+                        self._help[paramType][paramName]["allowedValues"] = paramDict[paramName]["allowedValues"]
                     else:
-                        self._help[paramType][paramName]["allowed_values"] = "see desc"
+                        self._help[paramType][paramName]["allowedValues"] = "see desc"
                     self._help[paramType][paramName]["defaultValue"] = paramDict[paramName]["defaultValue"]
                     self._help[paramType][paramName]["type"] = paramDict[paramName]["type"]
 
@@ -362,7 +362,7 @@ class ApiCall(ApiHandler):
                 self._requiredParams.append(key)
             except AttributeError:
                 self._missing = []
-                self._requiredParams = {key: {"allowed_values": allowed_values,
+                self._requiredParams = {key: {"allowedValues": allowed_values,
                                               "defaultValue": org_default,
                                               "type": arg_type}}
 
@@ -370,11 +370,11 @@ class ApiCall(ApiHandler):
                 self._missing.append(key)
         else:
             try:
-                self._optionalParams[key] = {"allowed_values": allowed_values,
+                self._optionalParams[key] = {"allowedValues": allowed_values,
                                              "defaultValue": org_default,
                                              "type": arg_type}
             except AttributeError:
-                self._optionalParams = {key: {"allowed_values": allowed_values,
+                self._optionalParams = {key: {"allowedValues": allowed_values,
                                               "defaultValue": org_default,
                                               "type": arg_type}}
 
@@ -962,18 +962,13 @@ class CMD_SubtitleSearch(ApiCall):
         if isinstance(ep_obj, str):
             return _responds(RESULT_FAILURE, msg="Episode not found")
 
-        # try do download subtitles for that episode
-        previous_subtitles = ep_obj.subtitles
-
         try:
-            subtitles = ep_obj.download_subtitles()
+            new_subtitles = ep_obj.download_subtitles()
         except Exception:
             return _responds(RESULT_FAILURE, msg='Unable to find subtitles')
 
-        # return the correct json value
-        new_subtitles = frozenset(ep_obj.subtitles).difference(previous_subtitles)
         if new_subtitles:
-            new_languages = [subtitles.name_from_code(code) for code in new_subtitles]
+            new_languages = [sickbeard.subtitles.name_from_code(code) for code in new_subtitles]
             status = 'New subtitles downloaded: %s' % ', '.join(new_languages)
             response = _responds(RESULT_SUCCESS, msg='New subtitles found')
         else:
@@ -1902,7 +1897,9 @@ class CMD_Show(ApiCall):
             show_dict["rls_ignore_words"] = []
 
         show_dict["scene"] = (0, 1)[show_obj.scene]
-        show_dict["archive_firstmatch"] = (0, 1)[show_obj.archive_firstmatch]
+        # show_dict["archive_firstmatch"] = (0, 1)[show_obj.archive_firstmatch]
+        # This might need to be here for 3rd part apps?
+        show_dict["archive_firstmatch"] = 1
 
         show_dict["indexerid"] = show_obj.indexerid
         show_dict["tvdbid"] = helpers.mapIndexersToShow(show_obj)[1]
@@ -2010,9 +2007,10 @@ class CMD_ShowAddExisting(ApiCall):
             new_quality = Quality.combineQualities(i_quality_id, a_quality_id)
 
         sickbeard.showQueueScheduler.action.addShow(
-            int(indexer), int(self.indexerid), self.location, default_status=sickbeard.STATUS_DEFAULT,
-            quality=new_quality, flatten_folders=int(self.flatten_folders), subtitles=self.subtitles,
-            default_status_after=sickbeard.STATUS_DEFAULT_AFTER, archive=self.archive_firstmatch
+            int(indexer), int(self.indexerid), self.location,
+            default_status=sickbeard.STATUS_DEFAULT, quality=new_quality,
+            flatten_folders=int(self.flatten_folders), subtitles=self.subtitles,
+            default_status_after=sickbeard.STATUS_DEFAULT_AFTER
         )
 
         return _responds(RESULT_SUCCESS, {"name": indexer_name}, indexer_name + " has been queued to be added")
@@ -2036,9 +2034,6 @@ class CMD_ShowAddNew(ApiCall):
             "anime": {"desc": "True to mark the show as an anime, False otherwise"},
             "scene": {"desc": "True if episodes search should be made by scene numbering, False otherwise"},
             "future_status": {"desc": "The status of future episodes"},
-            "archive_firstmatch": {
-                "desc": "True if episodes should be archived when first match is downloaded, False otherwise"
-            },
         }
     }
 
@@ -2068,8 +2063,6 @@ class CMD_ShowAddNew(ApiCall):
                                              "bool", [])
         self.future_status, args = self.check_params(args, kwargs, "future_status", None, False, "string",
                                                      ["wanted", "skipped", "ignored"])
-        self.archive_firstmatch, args = self.check_params(args, kwargs, "archive_firstmatch",
-                                                          bool(sickbeard.ARCHIVE_DEFAULT), False, "bool", [])
 
         # super, missing, help
         ApiCall.__init__(self, args, kwargs)
@@ -2183,9 +2176,10 @@ class CMD_ShowAddNew(ApiCall):
                 helpers.chmodAsParent(show_path)
 
         sickbeard.showQueueScheduler.action.addShow(
-            int(indexer), int(self.indexerid), show_path, default_status=new_status, quality=new_quality,
-            flatten_folders=int(self.flatten_folders), lang=self.lang, subtitles=self.subtitles, anime=self.anime,
-            scene=self.scene, default_status_after=default_ep_status_after, archive=self.archive_firstmatch
+            int(indexer), int(self.indexerid), show_path, default_status=new_status,
+            quality=new_quality, flatten_folders=int(self.flatten_folders),
+            lang=self.lang, subtitles=self.subtitles, anime=self.anime,
+            scene=self.scene, default_status_after=default_ep_status_after
         )
 
         return _responds(RESULT_SUCCESS, {"name": indexer_name}, indexer_name + " has been queued to be added")
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index 5d9a5c247e1351f9ba08ce13197c540490349b44..237b4f47fbba99cfa7e2a535c46b5271673cd2d5 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -282,7 +282,7 @@ class LoginHandler(BaseHandler):
     def get(self, *args, **kwargs):
 
         if self.get_current_user():
-            self.redirect('/' + sickbeard.DEFAULT_PAGE +'/')
+            self.redirect('/' + sickbeard.DEFAULT_PAGE + '/')
         else:
             t = PageTemplate(rh=self, filename="login.mako")
             self.finish(t.render(title="Login", header="Login", topmenu="login"))
@@ -308,7 +308,7 @@ class LoginHandler(BaseHandler):
         else:
             logger.log(u'User attempted a failed login to the SickRage web interface from IP: ' + self.request.remote_ip, logger.WARNING)
 
-        self.redirect('/' + sickbeard.DEFAULT_PAGE +'/')
+        self.redirect('/' + sickbeard.DEFAULT_PAGE + '/')
 
 
 class LogoutHandler(BaseHandler):
@@ -344,7 +344,7 @@ class WebRoot(WebHandler):
         super(WebRoot, self).__init__(*args, **kwargs)
 
     def index(self):
-        return self.redirect('/' + sickbeard.DEFAULT_PAGE +'/')
+        return self.redirect('/' + sickbeard.DEFAULT_PAGE + '/')
 
     def robots_txt(self):
         """ Keep web crawlers out """
@@ -1007,7 +1007,7 @@ class Home(WebRoot):
         data = {}
         size = 0
         for r in rows:
-            NotifyList = {'emails':'', 'prowlAPIs':''}
+            NotifyList = {'emails': '', 'prowlAPIs': ''}
             if r['notify_list'] and len(r['notify_list']) > 0:
                 # First, handle legacy format (emails only)
                 if not r['notify_list'][0] == '{':
@@ -1015,10 +1015,12 @@ class Home(WebRoot):
                 else:
                     NotifyList = dict(ast.literal_eval(r['notify_list']))
 
-            data[r['show_id']] = {'id': r['show_id'], 'name': r['show_name'],
-                                  'list': NotifyList['emails'],
-                                  'prowl_notify_list': NotifyList['prowlAPIs']
-                                 }
+            data[r['show_id']] = {
+                'id': r['show_id'],
+                'name': r['show_name'],
+                'list': NotifyList['emails'],
+                'prowl_notify_list': NotifyList['prowlAPIs']
+            }
             size += 1
         data['_size'] = size
         return json.dumps(data)
@@ -1026,7 +1028,7 @@ class Home(WebRoot):
     @staticmethod
     def saveShowNotifyList(show=None, emails=None, prowlAPIs=None):
 
-        entries = {'emails':'', 'prowlAPIs':''}
+        entries = {'emails': '', 'prowlAPIs': ''}
         myDB = db.DBConnection()
 
         # Get current data
@@ -1162,7 +1164,7 @@ class Home(WebRoot):
                 return self._genericMessage("Update Failed",
                                             "Update wasn't successful, not restarting. Check your log for more information.")
         else:
-            return self.redirect('/' + sickbeard.DEFAULT_PAGE +'/')
+            return self.redirect('/' + sickbeard.DEFAULT_PAGE + '/')
 
     def branchCheckout(self, branch):
         if sickbeard.BRANCH != branch:
@@ -1171,7 +1173,7 @@ class Home(WebRoot):
             return self.update(sickbeard.PID, branch)
         else:
             ui.notifications.message('Already on branch: ', branch)
-            return self.redirect('/' + sickbeard.DEFAULT_PAGE +'/')
+            return self.redirect('/' + sickbeard.DEFAULT_PAGE + '/')
 
     @staticmethod
     def getDBcompare():
@@ -1355,11 +1357,12 @@ class Home(WebRoot):
             out.append("S" + str(season) + ": " + ", ".join(names))
         return "<br>".join(out)
 
-    def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[], exceptions_list=[],
-                 flatten_folders=None, paused=None, directCall=False, air_by_date=None, sports=None, dvdorder=None,
-                 indexerLang=None, subtitles=None, archive_firstmatch=None, rls_ignore_words=None,
-                 rls_require_words=None, anime=None, blacklist=None, whitelist=None,
-                 scene=None, defaultEpStatus=None, quality_preset=None):
+    def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[],
+                 exceptions_list=[], flatten_folders=None, paused=None, directCall=False,
+                 air_by_date=None, sports=None, dvdorder=None, indexerLang=None,
+                 subtitles=None, rls_ignore_words=None, rls_require_words=None,
+                 anime=None, blacklist=None, whitelist=None, scene=None,
+                 defaultEpStatus=None, quality_preset=None):
 
         anidb_failed = False
         if show is None:
@@ -1413,7 +1416,6 @@ class Home(WebRoot):
 
         flatten_folders = not config.checkbox_to_value(flatten_folders)  # UI inverts this value
         dvdorder = config.checkbox_to_value(dvdorder)
-        archive_firstmatch = config.checkbox_to_value(archive_firstmatch)
         paused = config.checkbox_to_value(paused)
         air_by_date = config.checkbox_to_value(air_by_date)
         scene = config.checkbox_to_value(scene)
@@ -1476,7 +1478,6 @@ class Home(WebRoot):
         with showObj.lock:
             newQuality = Quality.combineQualities([int(q) for q in anyQualities], [int(q) for q in bestQualities])
             showObj.quality = newQuality
-            showObj.archive_firstmatch = archive_firstmatch
 
             # reversed for now
             if bool(showObj.flatten_folders) != bool(flatten_folders):
@@ -1585,6 +1586,9 @@ class Home(WebRoot):
 
             time.sleep(cpu_presets[sickbeard.CPU_PRESET])
 
+        # Remove show from 'RECENT SHOWS' in 'Shows' menu
+        sickbeard.SHOWS_RECENT = [x for x in sickbeard.SHOWS_RECENT if x['indexerid'] != show.indexerid]
+
         # Don't redirect to the default page, so the user can confirm that the show was deleted
         return self.redirect('/home/')
 
@@ -1940,7 +1944,7 @@ class Home(WebRoot):
         else:
             return json.dumps({'result': 'failure'})
 
-    ### Returns the current ep_queue_item status for the current viewed show.
+    # ## Returns the current ep_queue_item status for the current viewed show.
     # Possible status: Downloaded, Snatched, etc...
     # Returns {'show': 279530, 'episodes' : ['episode' : 6, 'season' : 1, 'searchstatus' : 'queued', 'status' : 'running', 'quality': '4013']
     def getManualSearchStatus(self, show=None):
@@ -2002,7 +2006,7 @@ class Home(WebRoot):
                 if not [x for x in episodes if x['episodeindexid'] == searchThread.segment.indexerid]:
                     episodes += getEpisodes(searchThread, searchstatus)
             else:
-                ### These are only Failed Downloads/Retry SearchThreadItems.. lets loop through the segement/episodes
+                # ## These are only Failed Downloads/Retry SearchThreadItems.. lets loop through the segement/episodes
                 if not [i for i, j in zip(searchThread.segment, episodes) if i.indexerid == j['episodeindexid']]:
                     episodes += getEpisodes(searchThread, searchstatus)
 
@@ -2027,22 +2031,19 @@ class Home(WebRoot):
         if isinstance(ep_obj, str):
             return json.dumps({'result': 'failure'})
 
-        # try do download subtitles for that episode
-        previous_subtitles = ep_obj.subtitles
         try:
-            ep_obj.download_subtitles()
+            new_subtitles = ep_obj.download_subtitles()  # pylint: disable=no-member
         except Exception:
             return json.dumps({'result': 'failure'})
 
-        # return the correct json value
-        new_subtitles = frozenset(ep_obj.subtitles).difference(previous_subtitles)
         if new_subtitles:
             new_languages = [subtitles.name_from_code(code) for code in new_subtitles]
             status = 'New subtitles downloaded: %s' % ', '.join(new_languages)
         else:
             status = 'No subtitles downloaded'
-        ui.notifications.message(ep_obj.show.name, status)
-        return json.dumps({'result': status, 'subtitles': ','.join(ep_obj.subtitles)})
+
+        ui.notifications.message(ep_obj.show.name, status)  # pylint: disable=no-member
+        return json.dumps({'result': status, 'subtitles': ','.join(ep_obj.subtitles)})  # pylint: disable=no-member
 
     def setSceneNumbering(self, show, indexer, forSeason=None, forEpisode=None, forAbsolute=None, sceneSeason=None,
                           sceneEpisode=None, sceneAbsolute=None):
@@ -2473,6 +2474,10 @@ class HomeAddShows(Home):
             page_title = "Most Played Shows"
         elif traktList == "recommended":
             page_title = "Recommended Shows"
+        elif traktList == "newshow":
+            page_title = "New Shows"
+        elif traktList == "newseason":
+            page_title = "Season Premieres"
         else:
             page_title = "Most Anticipated Shows"
 
@@ -2505,6 +2510,10 @@ class HomeAddShows(Home):
             page_url = "shows/played"
         elif traktList == "recommended":
             page_url = "recommendations/shows"
+        elif traktList == "newshow":
+            page_url = 'calendars/all/shows/new/%s/30' % datetime.date.today().strftime("%Y-%m-%d")
+        elif traktList == "newseason":
+            page_url = 'calendars/all/shows/premieres/%s/30' % datetime.date.today().strftime("%Y-%m-%d")
         else:
             page_url = "shows/anticipated"
 
@@ -2521,8 +2530,8 @@ class HomeAddShows(Home):
                 else:
                     logger.log(u"Trakt blacklist name is empty", logger.DEBUG)
 
-            if traktList != "recommended":
-                limit_show = "?limit=" + str(50 + len(not_liked_show)) + "&"
+            if traktList not in ["recommended", "newshow", "newseason"]:
+                limit_show = "?limit=" + str(100 + len(not_liked_show)) + "&"
             else:
                 limit_show = "?"
 
@@ -2645,7 +2654,6 @@ class HomeAddShows(Home):
             anime=sickbeard.ANIME_DEFAULT,
             scene=sickbeard.SCENE_DEFAULT,
             default_status_after=sickbeard.STATUS_DEFAULT_AFTER,
-            archive=sickbeard.ARCHIVE_DEFAULT
         )
 
         ui.notifications.message('Show added', 'Adding the specified show into ' + show_dir)
@@ -2656,7 +2664,7 @@ class HomeAddShows(Home):
     def addNewShow(self, whichSeries=None, indexerLang=None, rootDir=None, defaultStatus=None,
                    quality_preset=None, anyQualities=None, bestQualities=None, flatten_folders=None, subtitles=None,
                    fullShowPath=None, other_shows=None, skipShow=None, providedIndexer=None, anime=None,
-                   scene=None, blacklist=None, whitelist=None, defaultStatusAfter=None, archive=None):
+                   scene=None, blacklist=None, whitelist=None, defaultStatusAfter=None):
         """
         Receive tvdb id, dir, and other options and create a show from them. If extra show dirs are
         provided then it forwards back to newShow, if not it goes to /home.
@@ -2744,7 +2752,6 @@ class HomeAddShows(Home):
         anime = config.checkbox_to_value(anime)
         flatten_folders = config.checkbox_to_value(flatten_folders)
         subtitles = config.checkbox_to_value(subtitles)
-        archive = config.checkbox_to_value(archive)
 
         if whitelist:
             whitelist = short_group_names(whitelist)
@@ -2764,7 +2771,7 @@ class HomeAddShows(Home):
         # add the show
         sickbeard.showQueueScheduler.action.addShow(indexer, indexer_id, show_dir, int(defaultStatus), newQuality,
                                                     flatten_folders, indexerLang, subtitles, anime,
-                                                    scene, None, blacklist, whitelist, int(defaultStatusAfter), archive)
+                                                    scene, None, blacklist, whitelist, int(defaultStatusAfter))
         ui.notifications.message('Show added', 'Adding the specified show into ' + show_dir)
 
         return finishAddShow()
@@ -2829,15 +2836,16 @@ class HomeAddShows(Home):
 
             if indexer is not None and indexer_id is not None:
                 # add the show
-                sickbeard.showQueueScheduler.action.addShow(indexer, indexer_id, show_dir,
-                                                            default_status=sickbeard.STATUS_DEFAULT,
-                                                            quality=sickbeard.QUALITY_DEFAULT,
-                                                            flatten_folders=sickbeard.FLATTEN_FOLDERS_DEFAULT,
-                                                            subtitles=sickbeard.SUBTITLES_DEFAULT,
-                                                            anime=sickbeard.ANIME_DEFAULT,
-                                                            scene=sickbeard.SCENE_DEFAULT,
-                                                            default_status_after=sickbeard.STATUS_DEFAULT_AFTER,
-                                                            archive=sickbeard.ARCHIVE_DEFAULT)
+                sickbeard.showQueueScheduler.action.addShow(
+                    indexer, indexer_id, show_dir,
+                    default_status=sickbeard.STATUS_DEFAULT,
+                    quality=sickbeard.QUALITY_DEFAULT,
+                    flatten_folders=sickbeard.FLATTEN_FOLDERS_DEFAULT,
+                    subtitles=sickbeard.SUBTITLES_DEFAULT,
+                    anime=sickbeard.ANIME_DEFAULT,
+                    scene=sickbeard.SCENE_DEFAULT,
+                    default_status_after=sickbeard.STATUS_DEFAULT_AFTER
+                )
                 num_added += 1
 
         if num_added:
@@ -3130,9 +3138,6 @@ class Manage(Home, WebRoot):
                 showList.append(showObj)
                 showNames.append(showObj.name)
 
-        archive_firstmatch_all_same = True
-        last_archive_firstmatch = None
-
         flatten_folders_all_same = True
         last_flatten_folders = None
 
@@ -3168,13 +3173,6 @@ class Manage(Home, WebRoot):
             if cur_root_dir not in root_dir_list:
                 root_dir_list.append(cur_root_dir)
 
-            if archive_firstmatch_all_same:
-                # if we had a value already and this value is different then they're not all the same
-                if last_archive_firstmatch not in (None, curShow.archive_firstmatch):
-                    archive_firstmatch_all_same = False
-                else:
-                    last_archive_firstmatch = curShow.archive_firstmatch
-
             # if we know they're not all the same then no point even bothering
             if paused_all_same:
                 # if we had a value already and this value is different then they're not all the same
@@ -3232,7 +3230,6 @@ class Manage(Home, WebRoot):
                 else:
                     last_air_by_date = curShow.air_by_date
 
-        archive_firstmatch_value = last_archive_firstmatch if archive_firstmatch_all_same else None
         default_ep_status_value = last_default_ep_status if default_ep_status_all_same else None
         paused_value = last_paused if paused_all_same else None
         anime_value = last_anime if anime_all_same else None
@@ -3244,12 +3241,12 @@ class Manage(Home, WebRoot):
         air_by_date_value = last_air_by_date if air_by_date_all_same else None
         root_dir_list = root_dir_list
 
-        return t.render(showList=toEdit, showNames=showNames, archive_firstmatch_value=archive_firstmatch_value, default_ep_status_value=default_ep_status_value,
+        return t.render(showList=toEdit, showNames=showNames, default_ep_status_value=default_ep_status_value,
                         paused_value=paused_value, anime_value=anime_value, flatten_folders_value=flatten_folders_value,
                         quality_value=quality_value, subtitles_value=subtitles_value, scene_value=scene_value, sports_value=sports_value,
                         air_by_date_value=air_by_date_value, root_dir_list=root_dir_list, title='Mass Edit', header='Mass Edit', topmenu='manage')
 
-    def massEditSubmit(self, archive_firstmatch=None, paused=None, default_ep_status=None,
+    def massEditSubmit(self, paused=None, default_ep_status=None,
                        anime=None, sports=None, scene=None, flatten_folders=None, quality_preset=None,
                        subtitles=None, air_by_date=None, anyQualities=[], bestQualities=[], toEdit=None, *args,
                        **kwargs):
@@ -3278,12 +3275,6 @@ class Manage(Home, WebRoot):
             else:
                 new_show_dir = showObj._location  # pylint: disable=protected-access
 
-            if archive_firstmatch == 'keep':
-                new_archive_firstmatch = showObj.archive_firstmatch
-            else:
-                new_archive_firstmatch = True if archive_firstmatch == 'enable' else False
-            new_archive_firstmatch = 'on' if new_archive_firstmatch else 'off'
-
             if paused == 'keep':
                 new_paused = showObj.paused
             else:
@@ -3342,7 +3333,6 @@ class Manage(Home, WebRoot):
             curErrors += self.editShow(curShow, new_show_dir, anyQualities,
                                        bestQualities, exceptions_list,
                                        defaultEpStatus=new_default_ep_status,
-                                       archive_firstmatch=new_archive_firstmatch,
                                        flatten_folders=new_flatten_folders,
                                        paused=new_paused, sports=new_sports,
                                        subtitles=new_subtitles, anime=new_anime,
@@ -3722,7 +3712,7 @@ class ConfigGeneral(Config):
 
     @staticmethod
     def saveAddShowDefaults(defaultStatus, anyQualities, bestQualities, defaultFlattenFolders, subtitles=False,
-                            anime=False, scene=False, defaultStatusAfter=WANTED, archive=False):
+                            anime=False, scene=False, defaultStatusAfter=WANTED):
 
         if anyQualities:
             anyQualities = anyQualities.split(',')
@@ -3744,12 +3734,11 @@ class ConfigGeneral(Config):
         sickbeard.SUBTITLES_DEFAULT = config.checkbox_to_value(subtitles)
 
         sickbeard.ANIME_DEFAULT = config.checkbox_to_value(anime)
-        sickbeard.SCENE_DEFAULT = config.checkbox_to_value(scene)
-        sickbeard.ARCHIVE_DEFAULT = config.checkbox_to_value(archive)
 
+        sickbeard.SCENE_DEFAULT = config.checkbox_to_value(scene)
         sickbeard.save_config()
 
-    def saveGeneral(self, log_dir=None, log_nr=5, log_size=1048576, web_port=None, notify_on_login=None, web_log=None, encryption_version=None, web_ipv6=None,
+    def saveGeneral(self, log_dir=None, log_nr=5, log_size=1, web_port=None, notify_on_login=None, web_log=None, encryption_version=None, web_ipv6=None,
                     trash_remove_show=None, trash_rotate_logs=None, update_frequency=None, skip_removed_files=None,
                     indexerDefaultLang='en', ep_default_deleted_status=None, launch_browser=None, showupdate_hour=3, web_username=None,
                     api_key=None, indexer_default=None, timezone_display=None, cpu_preset='NORMAL',
diff --git a/sickbeard/webserveInit.py b/sickbeard/webserveInit.py
index dd709e2e3bd87f7b7769725ae9691b138da33d4b..fcfa1ff7ff515c209c13cab3793e54dda54b2aeb 100644
--- a/sickbeard/webserveInit.py
+++ b/sickbeard/webserveInit.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 import os
 import threading
 import sickbeard
@@ -13,6 +14,7 @@ from tornado.httpserver import HTTPServer
 from tornado.ioloop import IOLoop
 from tornado.routes import route
 
+
 class SRWebServer(threading.Thread):
     def __init__(self, options={}, io_loop=None):
         threading.Thread.__init__(self)
@@ -55,7 +57,7 @@ class SRWebServer(threading.Thread):
         if self.enable_https:
             # If either the HTTPS certificate or key do not exist, make some self-signed ones.
             if not (self.https_cert and ek(os.path.exists, self.https_cert)) or not (
-                        self.https_key and ek(os.path.exists, self.https_key)):
+                    self.https_key and ek(os.path.exists, self.https_key)):
                 if not create_https_certificates(self.https_cert, self.https_key):
                     logger.log(u"Unable to create CERT/KEY files, disabling HTTPS")
                     sickbeard.ENABLE_HTTPS = False
@@ -67,13 +69,14 @@ class SRWebServer(threading.Thread):
                 self.enable_https = False
 
         # Load the app
-        self.app = Application([],
-                                 debug=True,
-                                 autoreload=False,
-                                 gzip=sickbeard.WEB_USE_GZIP,
-                                 xheaders=sickbeard.HANDLE_REVERSE_PROXY,
-                                 cookie_secret=sickbeard.WEB_COOKIE_SECRET,
-                                 login_url='%s/login/' % self.options['web_root'],
+        self.app = Application(
+            [],
+            debug=True,
+            autoreload=False,
+            gzip=sickbeard.WEB_USE_GZIP,
+            xheaders=sickbeard.HANDLE_REVERSE_PROXY,
+            cookie_secret=sickbeard.WEB_COOKIE_SECRET,
+            login_url='%s/login/' % self.options['web_root'],
         )
 
         # Main Handlers
diff --git a/sickrage/__init__.py b/sickrage/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..9bad5790a5799b96f2e164d825c0b1f8ec0c2dfb 100644
--- a/sickrage/__init__.py
+++ b/sickrage/__init__.py
@@ -0,0 +1 @@
+# coding=utf-8
diff --git a/sickrage/helper/__init__.py b/sickrage/helper/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..9bad5790a5799b96f2e164d825c0b1f8ec0c2dfb 100644
--- a/sickrage/helper/__init__.py
+++ b/sickrage/helper/__init__.py
@@ -0,0 +1 @@
+# coding=utf-8
diff --git a/sickrage/helper/common.py b/sickrage/helper/common.py
index 5d99733f28da1e536a63aeb7c03a1268c911ad85..5fad42bfd4a434c039076ea04a5d31e225c1d0a0 100644
--- a/sickrage/helper/common.py
+++ b/sickrage/helper/common.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
@@ -162,19 +163,16 @@ def pretty_file_size(size):
     :param size: The size to convert
     :return: The converted size
     """
-    if isinstance(size, (str, unicode)) and size.isdigit():
-        size = float(size)
-    elif not isinstance(size, (int, long, float)):
-        return ''
+    try:
+        size = max(float(size), 0.)
+    except (ValueError, TypeError):
+        size = 0.
 
     remaining_size = size
-
     for unit in ['B', 'KB', 'MB', 'GB', 'TB', 'PB']:
         if remaining_size < 1024.:
             return '%3.2f %s' % (remaining_size, unit)
-
         remaining_size /= 1024.
-
     return size
 
 
diff --git a/sickrage/helper/encoding.py b/sickrage/helper/encoding.py
index 5c0908901a7db2def9ea8273f21286ae4e98feb3..e199b3d61157ff9873c4e5b77643c73bad34ee52 100644
--- a/sickrage/helper/encoding.py
+++ b/sickrage/helper/encoding.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: https://sickrage.github.io
 # Git: https://github.com/SickRage/SickRage.git
diff --git a/sickrage/helper/exceptions.py b/sickrage/helper/exceptions.py
index 746b90a87c8842494128935eefeb09ddf8fd5e04..560e27af7ad1c8a4e425bcf7d2e559bf3e08928b 100644
--- a/sickrage/helper/exceptions.py
+++ b/sickrage/helper/exceptions.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
diff --git a/sickrage/helper/quality.py b/sickrage/helper/quality.py
index 1834c37f09cd068caa71cefbc51ff4cb14157a93..85002db566f4370486d61ab821da6faa6c4bac08 100644
--- a/sickrage/helper/quality.py
+++ b/sickrage/helper/quality.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
diff --git a/sickrage/media/GenericMedia.py b/sickrage/media/GenericMedia.py
index 968aac52cb74997069d78048ec7e9e5bf1d85867..b3d56e2ac7fb11a70ae4a8c0563f59843c74dbf1 100644
--- a/sickrage/media/GenericMedia.py
+++ b/sickrage/media/GenericMedia.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
@@ -27,7 +28,7 @@ from sickrage.helper.exceptions import MultipleShowObjectsException
 from sickrage.show.Show import Show
 
 
-class GenericMedia:
+class GenericMedia(object):
     def __init__(self, indexer_id, media_format='normal'):
         """
         :param indexer_id: The indexer id of the show
diff --git a/sickrage/media/ShowBanner.py b/sickrage/media/ShowBanner.py
index 5ecbef9ad26e9ac6b41f65b140f57934c67864ef..ac0427514819bc73046a1d8b3cb281065f91a80e 100644
--- a/sickrage/media/ShowBanner.py
+++ b/sickrage/media/ShowBanner.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
diff --git a/sickrage/media/ShowFanArt.py b/sickrage/media/ShowFanArt.py
index 62a588173480fc232aeaf123ec2ea03ebd63c6c4..36e0b1cc5ab68db5304b42f1b9850e4908c3ca17 100644
--- a/sickrage/media/ShowFanArt.py
+++ b/sickrage/media/ShowFanArt.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
diff --git a/sickrage/media/ShowNetworkLogo.py b/sickrage/media/ShowNetworkLogo.py
index 1a4f4c2d3dbfbf1bdf4118a832b27e54ad616a18..59910b32051f5ec8a02ee24e686a4e460fd7d7ac 100644
--- a/sickrage/media/ShowNetworkLogo.py
+++ b/sickrage/media/ShowNetworkLogo.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
diff --git a/sickrage/media/ShowPoster.py b/sickrage/media/ShowPoster.py
index 6039fbb4794c543fb55d5e73be41d93210c132d0..86629649140b0760616a47e83577cecffc61da07 100644
--- a/sickrage/media/ShowPoster.py
+++ b/sickrage/media/ShowPoster.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
diff --git a/sickrage/media/__init__.py b/sickrage/media/__init__.py
index cb64e75194e3d334bf711bbfd3fd3cc3dc5b6871..4ca69730f8248471685f1f6fc8a5eaeedff45088 100644
--- a/sickrage/media/__init__.py
+++ b/sickrage/media/__init__.py
@@ -1 +1,2 @@
+# coding=utf-8
 __all__ = ['ShowBanner', 'ShowFanArt', 'ShowNetworkLogo', 'ShowPoster']
diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py
index 1fed336152ef2c85a92f8f90582b2593ff92ac45..9ae4ee51296deaac7eded60babb5469f80611a85 100644
--- a/sickrage/providers/GenericProvider.py
+++ b/sickrage/providers/GenericProvider.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
@@ -27,7 +28,7 @@ from random import shuffle
 from requests import Session
 from sickbeard import logger
 from sickbeard.classes import Proper, SearchResult
-from sickbeard.common import MULTI_EP_RESULT, Quality, SEASON_RESULT, user_agents
+from sickbeard.common import MULTI_EP_RESULT, Quality, SEASON_RESULT, UA_POOL
 from sickbeard.db import DBConnection
 from sickbeard.helpers import download_file, getURL, remove_file_failed
 from sickbeard.name_parser.parser import InvalidNameException, InvalidShowException, NameParser
@@ -43,8 +44,6 @@ class GenericProvider(object):  # pylint: disable=too-many-instance-attributes
     TORRENT = 'torrent'
 
     def __init__(self, name):
-        shuffle(user_agents)
-
         self.name = name
 
         self.anime_only = False
@@ -59,9 +58,7 @@ class GenericProvider(object):  # pylint: disable=too-many-instance-attributes
         self.enable_backlog = False
         self.enable_daily = False
         self.enabled = False
-        self.headers = {
-            'User-Agent': user_agents[0]
-        }
+        self.headers = {'User-Agent': UA_POOL.random}
         self.proper_strings = ['PROPER|REPACK|REAL']
         self.provider_type = None
         self.public = False
@@ -213,9 +210,9 @@ class GenericProvider(object):  # pylint: disable=too-many-instance-attributes
                         )
                         add_cache_entry = True
 
-                    if len(parse_result.episode_numbers) and \
-                       (parse_result.season_number not in set([ep.season for ep in episodes]) or
-                        not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]):
+                    if len(parse_result.episode_numbers) and (
+                            parse_result.season_number not in set([ep.season for ep in episodes]) or
+                            not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]):
                         logger.log(
                             u'The result %s doesn\'t seem to be a valid episode that we are trying to snatch, ignoring' % title,
                             logger.DEBUG)
diff --git a/sickrage/providers/__init__.py b/sickrage/providers/__init__.py
index a9a2c5b3bb437bff74e283b62c894075e8c15331..4eb43219ca97e9c23699aa31f643f27c7f58986e 100644
--- a/sickrage/providers/__init__.py
+++ b/sickrage/providers/__init__.py
@@ -1 +1,2 @@
+# coding=utf-8
 __all__ = []
diff --git a/sickrage/providers/NZBProvider.py b/sickrage/providers/nzb/NZBProvider.py
similarity index 99%
rename from sickrage/providers/NZBProvider.py
rename to sickrage/providers/nzb/NZBProvider.py
index 3461e03bbe3f310ae0a60b65ce05dfc5734ba3e8..89d3f15d0942bd08df69eab1345f4d9300428290 100644
--- a/sickrage/providers/NZBProvider.py
+++ b/sickrage/providers/nzb/NZBProvider.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
diff --git a/sickrage/providers/nzb/__init__.py b/sickrage/providers/nzb/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sickrage/providers/TorrentProvider.py b/sickrage/providers/torrent/TorrentProvider.py
similarity index 88%
rename from sickrage/providers/TorrentProvider.py
rename to sickrage/providers/torrent/TorrentProvider.py
index 12fd59fe932ea40aa1729a1f70264ad846f23f35..85053687253e5a6c6e3bbe7d9dbc244dcd543b1a 100644
--- a/sickrage/providers/TorrentProvider.py
+++ b/sickrage/providers/torrent/TorrentProvider.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
@@ -42,11 +43,11 @@ class TorrentProvider(GenericProvider):
         db = DBConnection()
         placeholder = ','.join([str(x) for x in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST])
         sql_results = db.select(
-                'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate'
-                ' FROM tv_episodes AS e'
-                ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)'
-                ' WHERE e.airdate >= ' + str(search_date.toordinal()) +
-                ' AND e.status IN (' + placeholder + ')'
+            'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate'
+            ' FROM tv_episodes AS e'
+            ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)'
+            ' WHERE e.airdate >= ' + str(search_date.toordinal()) +
+            ' AND e.status IN (' + placeholder + ')'
         )
 
         for result in sql_results or []:
@@ -70,13 +71,10 @@ class TorrentProvider(GenericProvider):
 
     @property
     def _custom_trackers(self):
-        if sickbeard.TRACKERS_LIST:
-            if not self.public:
-                return ''
+        if not (sickbeard.TRACKERS_LIST and self.public):
+            return ''
 
-            return '&tr=' + '&tr='.join(set([x.strip() for x in sickbeard.TRACKERS_LIST.split(',') if x.strip()]))
-
-        return ''
+        return '&tr=' + '&tr='.join({x.strip() for x in sickbeard.TRACKERS_LIST.split(',') if x.strip()})
 
     def _get_result(self, episodes):
         return TorrentSearchResult(episodes)
diff --git a/sickrage/providers/torrent/__init__.py b/sickrage/providers/torrent/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sickrage/show/ComingEpisodes.py b/sickrage/show/ComingEpisodes.py
index 55a5f6292445ed168c3598c3ae165a2d0b96c4ec..976fc780467324b361c9d79230b4ef89213546f6 100644
--- a/sickrage/show/ComingEpisodes.py
+++ b/sickrage/show/ComingEpisodes.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
@@ -27,7 +28,7 @@ from sickrage.helper.common import dateFormat, timeFormat
 from sickrage.helper.quality import get_quality_string
 
 
-class ComingEpisodes:
+class ComingEpisodes(object):
     """
     Missed:   yesterday...(less than 1 week)
     Today:    today
diff --git a/sickrage/show/History.py b/sickrage/show/History.py
index b46d0024e10fc0a617af1a078d49cb7d82557bf2..35a372f848dc3fecd27d77ba8d54eb9f597b0264 100644
--- a/sickrage/show/History.py
+++ b/sickrage/show/History.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
@@ -23,7 +24,7 @@ from sickbeard.db import DBConnection
 from sickrage.helper.common import try_int
 
 
-class History:
+class History(object):
     date_format = '%Y%m%d%H%M%S'
 
     def __init__(self):
diff --git a/sickrage/show/Show.py b/sickrage/show/Show.py
index 441695a405cb95886b22a83ca58efb75d14b2e79..034269475e28baad0a74c26e098bca4c1d74ca22 100644
--- a/sickrage/show/Show.py
+++ b/sickrage/show/Show.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
@@ -25,7 +26,7 @@ from sickrage.helper.exceptions import CantRefreshShowException, CantRemoveShowE
 from sickrage.helper.exceptions import MultipleShowObjectsException
 
 
-class Show:
+class Show(object):
     def __init__(self):
         pass
 
@@ -45,10 +46,11 @@ class Show:
         if error is not None:
             return error, show
 
-        try:
-            sickbeard.showQueueScheduler.action.removeShow(show, bool(remove_files))
-        except CantRemoveShowException as exception:
-            return ex(exception), show
+        if show:
+            try:
+                sickbeard.showQueueScheduler.action.removeShow(show, bool(remove_files))
+            except CantRemoveShowException as exception:
+                return ex(exception), show
 
         return None, show
 
diff --git a/sickrage/show/__init__.py b/sickrage/show/__init__.py
index 48b7f8852e4976f82e435201301eb9fc999d77d0..d153353143bf006044f9ca3bf001a742a040fe35 100644
--- a/sickrage/show/__init__.py
+++ b/sickrage/show/__init__.py
@@ -1 +1,2 @@
+# coding=utf-8
 __all__ = ['ComingEpisodes', 'History', 'Show']
diff --git a/sickrage/system/Restart.py b/sickrage/system/Restart.py
index dd35c4c1a5e54e9d869e36af49c8a10a4da731ea..48056638b228c68dc1f696590accf150afff7e13 100644
--- a/sickrage/system/Restart.py
+++ b/sickrage/system/Restart.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
@@ -21,7 +22,7 @@ import sickbeard
 from sickbeard.event_queue import Events
 
 
-class Restart:
+class Restart(object):
     def __init__(self):
         pass
 
diff --git a/sickrage/system/Shutdown.py b/sickrage/system/Shutdown.py
index 686c02c63677fe0c45754c1a7c1149ece22a93a1..ca4501eb3d244e6cfe6a75ab30a2e8cdddf3bcb1 100644
--- a/sickrage/system/Shutdown.py
+++ b/sickrage/system/Shutdown.py
@@ -1,3 +1,4 @@
+# coding=utf-8
 # This file is part of SickRage.
 #
 # URL: https://sickrage.github.io
@@ -21,7 +22,7 @@ import sickbeard
 from sickbeard.event_queue import Events
 
 
-class Shutdown:
+class Shutdown(object):
     def __init__(self):
         pass
 
diff --git a/sickrage/system/__init__.py b/sickrage/system/__init__.py
index 55314582fc22f2d59c5261c56c83f8844e68f174..f9ae594a304d951c68b115d6c8623cbb7421e80c 100644
--- a/sickrage/system/__init__.py
+++ b/sickrage/system/__init__.py
@@ -1 +1,2 @@
+# coding=utf-8
 __all__ = ['Restart', 'Shutdown']
diff --git a/tests/name_parser_tests.py b/tests/name_parser_tests.py
index c71a904e2f680cb00e4af8f686303860f62dfb35..cddd8d326dd0867a1f25597aedcbd2a910456d92 100644
--- a/tests/name_parser_tests.py
+++ b/tests/name_parser_tests.py
@@ -37,7 +37,8 @@ SIMPLE_TEST_CASES = {
         'Show-Name-S06E01-720p': parser.ParseResult(None, 'Show-Name', 6, [1], '720p'),
         'Show-Name-S06E01-1080i': parser.ParseResult(None, 'Show-Name', 6, [1], '1080i'),
         'Show.Name.S06E01.Other.WEB-DL': parser.ParseResult(None, 'Show Name', 6, [1], 'Other.WEB-DL'),
-        'Show.Name.S06E01 Some-Stuff Here': parser.ParseResult(None, 'Show Name', 6, [1], 'Some-Stuff Here')
+        'Show.Name.S06E01 Some-Stuff Here': parser.ParseResult(None, 'Show Name', 6, [1], 'Some-Stuff Here'),
+        'Show Name - S03E14-36! 24! 36! Hut! (1)': parser.ParseResult(None, 'Show Name', 3, [14], '36! 24! 36! Hut! (1)'),
     },
 
     'fov': {
@@ -321,8 +322,8 @@ class BasicTests(test.SickbeardTestDBCase):
                 print 'anime:', test_result.is_anime, 'ab_episode_numbers:', test_result.ab_episode_numbers
                 print test_result
                 print result
-            self.assertEqual(test_result.which_regex, [section])
-            self.assertEqual(str(test_result), str(result))
+            self.assertEqual(test_result.which_regex, [section], '%s : %s != %s' % (cur_test, test_result.which_regex, [section]))
+            self.assertEqual(str(test_result), str(result), '%s : %s != %s' % (cur_test, str(test_result), str(result)))
 
     def test_standard_names(self):
         """
@@ -485,10 +486,9 @@ class BasicFailedTests(test.SickbeardTestDBCase):
                 print 'anime:', test_result.is_anime, 'ab_episode_numbers:', test_result.ab_episode_numbers
                 print test_result
                 print result
-            self.assertEqual(test_result.which_regex, [section])
-            self.assertEqual(str(test_result), str(result))
+            self.assertEqual(test_result.which_regex, [section], '%s : %s != %s' % (cur_test, test_result.which_regex, [section]))
+            self.assertEqual(str(test_result), str(result), '%s : %s != %s' % (cur_test, str(test_result), str(result)))
 
-    @unittest.expectedFailure
     def test_no_s_names(self):
         """
         Test no season names
@@ -496,7 +496,6 @@ class BasicFailedTests(test.SickbeardTestDBCase):
         name_parser = parser.NameParser(False)
         self._test_names(name_parser, 'no_season')
 
-    @unittest.expectedFailure
     def test_no_s_file_names(self):
         """
         Test no season file names
@@ -546,7 +545,7 @@ class BasicFailedTests(test.SickbeardTestDBCase):
 
 if __name__ == '__main__':
     if len(sys.argv) > 1:
-        SUITE = unittest.TestLoader().loadTestsFromName('name_parser_tests.BasicTests.test_'+sys.argv[1])
+        SUITE = unittest.TestLoader().loadTestsFromName('name_parser_tests.BasicTests.test_' + sys.argv[1])
         unittest.TextTestRunner(verbosity=2).run(SUITE)
     else:
         SUITE = unittest.TestLoader().loadTestsFromTestCase(BasicTests)
diff --git a/tests/notifier_tests.py b/tests/notifier_tests.py
index 9d72f62f30523a2ca355bd5640deb3be5d2853bd..6b4decb3b3f80340575fe39e8d8125f83d94cc66 100644
--- a/tests/notifier_tests.py
+++ b/tests/notifier_tests.py
@@ -64,25 +64,25 @@ class NotifierTests(test.SickbeardTestDBCase):  # pylint: disable=too-many-publi
         # one of the DB columns.  Therefore, to test properly, we must create some shows that
         # store emails in the old method (legacy method) and then other shows that will use
         # the new method.
-        for show_counter in range(100, 100+num_legacy_shows):
+        for show_counter in range(100, 100 + num_legacy_shows):
             show = TVShow(1, show_counter)
-            show.name = "Show "+str(show_counter)
+            show.name = "Show " + str(show_counter)
             show.episodes = []
             for episode_counter in range(0, num_episodes_per_show):
                 episode = TVEpisode(show, test.SEASON, episode_counter)
-                episode.name = "Episode "+str(episode_counter+1)
+                episode.name = "Episode " + str(episode_counter + 1)
                 episode.quality = "SDTV"
                 show.episodes.append(episode)
             show.saveToDB()
             cls.legacy_shows.append(show)
 
-        for show_counter in range(200, 200+num_shows):
+        for show_counter in range(200, 200 + num_shows):
             show = TVShow(1, show_counter)
-            show.name = "Show "+str(show_counter)
+            show.name = "Show " + str(show_counter)
             show.episodes = []
             for episode_counter in range(0, num_episodes_per_show):
                 episode = TVEpisode(show, test.SEASON, episode_counter)
-                episode.name = "Episode "+str(episode_counter+1)
+                episode.name = "Episode " + str(episode_counter + 1)
                 episode.quality = "SDTV"
                 show.episodes.append(episode)
             show.saveToDB()
@@ -122,15 +122,15 @@ class NotifierTests(test.SickbeardTestDBCase):  # pylint: disable=too-many-publi
             Home.saveShowNotifyList(show=showid, emails=test_emails)
 
         # Now, iterate through all shows using the email list generation routines that are used in the notifier proper
-        shows = self.legacy_shows+self.shows
+        shows = self.legacy_shows + self.shows
         for show in shows:
             for episode in show.episodes:
-                ep_name = ss(episode._format_pattern('%SN - %Sx%0E - %EN - ')+episode.quality)  # pylint: disable=protected-access
+                ep_name = ss(episode._format_pattern('%SN - %Sx%0E - %EN - ') + episode.quality)  # pylint: disable=protected-access
                 show_name = email_notifier._parseEp(ep_name)  # pylint: disable=protected-access
                 recipients = email_notifier._generate_recipients(show_name)  # pylint: disable=protected-access
-                self._debug_spew("- Email Notifications for "+show.name+" (episode: "+episode.name+") will be sent to:")
+                self._debug_spew("- Email Notifications for " + show.name + " (episode: " + episode.name + ") will be sent to:")
                 for email in recipients:
-                    self._debug_spew("-- "+email.strip())
+                    self._debug_spew("-- " + email.strip())
                 self._debug_spew("\n\r")
 
         return True
@@ -215,12 +215,12 @@ class NotifierTests(test.SickbeardTestDBCase):  # pylint: disable=too-many-publi
         # Now, iterate through all shows using the Prowl API generation routines that are used in the notifier proper
         for show in self.shows:
             for episode in show.episodes:
-                ep_name = ss(episode._format_pattern('%SN - %Sx%0E - %EN - ')+episode.quality)  # pylint: disable=protected-access
+                ep_name = ss(episode._format_pattern('%SN - %Sx%0E - %EN - ') + episode.quality)  # pylint: disable=protected-access
                 show_name = prowl_notifier._parse_episode(ep_name)  # pylint: disable=protected-access
                 recipients = prowl_notifier._generate_recipients(show_name)  # pylint: disable=protected-access
-                self._debug_spew("- Prowl Notifications for "+show.name+" (episode: "+episode.name+") will be sent to:")
+                self._debug_spew("- Prowl Notifications for " + show.name + " (episode: " + episode.name + ") will be sent to:")
                 for api in recipients:
-                    self._debug_spew("-- "+api.strip())
+                    self._debug_spew("-- " + api.strip())
                 self._debug_spew("\n\r")
 
         return True
diff --git a/tests/numdict_tests.py b/tests/numdict_tests.py
index 54f752a20288fb6082fd60abb32d2044a400b8d4..e43eed8ce6661562f2969f245f732985e1622dd2 100644
--- a/tests/numdict_tests.py
+++ b/tests/numdict_tests.py
@@ -493,7 +493,7 @@ class NumDictTest(unittest.TestCase):
                 return super(NumDictH, self).__missing__(key) + 1
 
         num_dict_h = NumDictH()
-        self.assertEqual(num_dict_h[None], num_dict_d[None]+1)
+        self.assertEqual(num_dict_h[None], num_dict_d[None] + 1)
 
 
 def test_main():
diff --git a/tests/sickrage_tests/helper/common_tests.py b/tests/sickrage_tests/helper/common_tests.py
index 2f4eb47cc193cc96b53e82157c0c52ff0bfae49d..766084ea5404d297b8d1bd9a4ca5d30112cdb0db 100644
--- a/tests/sickrage_tests/helper/common_tests.py
+++ b/tests/sickrage_tests/helper/common_tests.py
@@ -159,12 +159,12 @@ class CommonTests(unittest.TestCase):
         Test pretty file size
         """
         test_cases = {
-            None: '',
-            '': '',
+            None: '0.00 B',
+            '': '0.00 B',
             '1024': '1.00 KB',
-            '1024.5': '',
-            -42.5: '-42.50 B',
-            -42: '-42.00 B',
+            '1024.5': '1.00 KB',
+            -42.5: '0.00 B',
+            -42: '0.00 B',
             0: '0.00 B',
             25: '25.00 B',
             25.5: '25.50 B',
@@ -182,9 +182,9 @@ class CommonTests(unittest.TestCase):
         }
 
         unicode_test_cases = {
-            u'': '',
+            u'': '0.00 B',
             u'1024': '1.00 KB',
-            u'1024.5': '',
+            u'1024.5': '1.00 KB',
         }
 
         for tests in test_cases, unicode_test_cases:
diff --git a/tests/sickrage_tests/providers/generic_provider_tests.py b/tests/sickrage_tests/providers/generic_provider_tests.py
index 28e9072e82e085610b737807e6c38dc331fe4024..335491dad6e396b753e2c53868921124318dac42 100644
--- a/tests/sickrage_tests/providers/generic_provider_tests.py
+++ b/tests/sickrage_tests/providers/generic_provider_tests.py
@@ -316,14 +316,14 @@ class GenericProviderTests(unittest.TestCase):
         ]
 
         self.assertEqual(
-                len(items_list), len(results_list),
-                'Number of parameters (%d) and results (%d) does not match' % (len(items_list), len(results_list))
+            len(items_list), len(results_list),
+            'Number of parameters (%d) and results (%d) does not match' % (len(items_list), len(results_list))
         )
 
         self.assertEqual(
-                len(unicode_items_list), len(unicode_results_list),
-                'Number of parameters (%d) and results (%d) does not match' % (
-                    len(unicode_items_list), len(unicode_results_list))
+            len(unicode_items_list), len(unicode_results_list),
+            'Number of parameters (%d) and results (%d) does not match' % (
+                len(unicode_items_list), len(unicode_results_list))
         )
 
         for (index, item) in enumerate(items_list):
diff --git a/tests/sickrage_tests/providers/nzb_provider_tests.py b/tests/sickrage_tests/providers/nzb_provider_tests.py
index 2983dc9cb40cc4cfd471e131a7473283a6a5e98d..2cafcdb54e953595d2aade8dba02633fdc3a31cc 100644
--- a/tests/sickrage_tests/providers/nzb_provider_tests.py
+++ b/tests/sickrage_tests/providers/nzb_provider_tests.py
@@ -34,7 +34,7 @@ import sickbeard
 
 from generic_provider_tests import GenericProviderTests
 from sickrage.providers.GenericProvider import GenericProvider
-from sickrage.providers.NZBProvider import NZBProvider
+from sickrage.providers.nzb.NZBProvider import NZBProvider
 
 
 class NZBProviderTests(GenericProviderTests):
@@ -115,13 +115,13 @@ class NZBProviderTests(GenericProviderTests):
         ]
 
         self.assertEqual(
-                len(items_list), len(results_list),
-                'Number of parameters (%d) and results (%d) does not match' % (len(items_list), len(results_list))
+            len(items_list), len(results_list),
+            'Number of parameters (%d) and results (%d) does not match' % (len(items_list), len(results_list))
         )
 
         self.assertEqual(
-                len(unicode_items_list), len(unicode_results_list),
-                'Number of parameters (%d) and results (%d) does not match' % (
+            len(unicode_items_list), len(unicode_results_list),
+            'Number of parameters (%d) and results (%d) does not match' % (
                 len(unicode_items_list), len(unicode_results_list))
         )
 
diff --git a/tests/sickrage_tests/providers/torrent_provider_tests.py b/tests/sickrage_tests/providers/torrent_provider_tests.py
index ac1973b646be3b144527656a7992923be35c213e..48e772144d168a288cf9f2e7e3ea4eb543845bf1 100644
--- a/tests/sickrage_tests/providers/torrent_provider_tests.py
+++ b/tests/sickrage_tests/providers/torrent_provider_tests.py
@@ -34,7 +34,7 @@ import sickbeard
 
 from generic_provider_tests import GenericProviderTests
 from sickrage.providers.GenericProvider import GenericProvider
-from sickrage.providers.TorrentProvider import TorrentProvider
+from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
 class TorrentProviderTests(GenericProviderTests):
@@ -107,14 +107,14 @@ class TorrentProviderTests(GenericProviderTests):
         ]
 
         self.assertEqual(
-                len(items_list), len(results_list),
-                'Number of parameters (%d) and results (%d) does not match' % (len(items_list), len(results_list))
+            len(items_list), len(results_list),
+            'Number of parameters (%d) and results (%d) does not match' % (len(items_list), len(results_list))
         )
 
         self.assertEqual(
-                len(unicode_items_list), len(unicode_results_list),
-                'Number of parameters (%d) and results (%d) does not match' % (
-                    len(unicode_items_list), len(unicode_results_list))
+            len(unicode_items_list), len(unicode_results_list),
+            'Number of parameters (%d) and results (%d) does not match' % (
+                len(unicode_items_list), len(unicode_results_list))
         )
 
         for (index, item) in enumerate(items_list):
diff --git a/tests/ssl_sni_tests.py b/tests/ssl_sni_tests.py
index add4e704002c873d53c440416bd4c4e3a8138e53..f69618501ac8d39b9ade1ba47a5e8b34e879be11 100644
--- a/tests/ssl_sni_tests.py
+++ b/tests/ssl_sni_tests.py
@@ -66,6 +66,7 @@ def test_generator(_provider):
 
     return _connectivity_test
 
+
 class SniTests(unittest.TestCase):
     pass
 
diff --git a/tests/test_lib.py b/tests/test_lib.py
index e4bc9ed49525bb88c3c9e1e66a00645c0004a856..a3381ae84e23f85e5f5b1767d8df1868d8f4b49d 100644
--- a/tests/test_lib.py
+++ b/tests/test_lib.py
@@ -50,6 +50,7 @@ sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')
 from configobj import ConfigObj
 from sickbeard import db, providers
 from sickbeard.databases import cache_db, failed_db, mainDB
+from sickbeard.providers.newznab import NewznabProvider
 from sickbeard.tv import TVEpisode
 import shutil_custom  # pylint: disable=import-error
 import sickbeard
@@ -111,7 +112,7 @@ sickbeard.NAMING_MULTI_EP = 1
 
 
 sickbeard.PROVIDER_ORDER = ["sick_beard_index"]
-sickbeard.newznabProviderList = providers.getNewznabProviderList("'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0|eponly|0|0|0!!!NZBs.org|https://nzbs.org/||5030,5040,5060,5070,5090|0|eponly|0|0|0!!!Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040,5060|0|eponly|0|0|0'")
+sickbeard.newznabProviderList = NewznabProvider.get_providers_list("'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0|eponly|0|0|0!!!NZBs.org|https://nzbs.org/||5030,5040,5060,5070,5090|0|eponly|0|0|0!!!Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040,5060|0|eponly|0|0|0'")
 sickbeard.providerList = providers.makeProviderList()
 
 sickbeard.PROG_DIR = os.path.abspath(os.path.join(TEST_DIR, '..'))