diff --git a/autoProcessTV/autoProcessTV.py b/autoProcessTV/autoProcessTV.py
index 09532de6d988a4dd638cb0b48f8ec834e34899d9..77d1f795004c3d68328661325c2840f0f083abf7 100644
--- a/autoProcessTV/autoProcessTV.py
+++ b/autoProcessTV/autoProcessTV.py
@@ -51,7 +51,13 @@ def processEpisode(dirName, nzbName=None):
         print "ERROR: You need an autoProcessTV.cfg file - did you rename and edit the .sample?"
         sys.exit(-1)
     
-    config.read(configFilename)
+    try:
+        fp = open(configFilename, "r")
+        config.readfp(fp)
+        fp.close()
+    except IOError, e:
+        print "Could not read configuration file: ", str(e)
+        sys.exit(1)
     
     host = config.get("SickBeard", "host")
     port = config.get("SickBeard", "port")
diff --git a/data/css/formwizard.css b/data/css/lib/formwizard.css
similarity index 100%
rename from data/css/formwizard.css
rename to data/css/lib/formwizard.css
diff --git a/data/css/jquery.pnotify.default.css b/data/css/lib/jquery.pnotify.default.css
similarity index 100%
rename from data/css/jquery.pnotify.default.css
rename to data/css/lib/jquery.pnotify.default.css
diff --git a/data/css/jquery.qtip2.css b/data/css/lib/jquery.qtip2.css
similarity index 100%
rename from data/css/jquery.qtip2.css
rename to data/css/lib/jquery.qtip2.css
diff --git a/data/css/smooth-grinder/images/ui-bg_fine-grain_10_eceadf_60x60.png b/data/css/lib/smooth-grinder/images/ui-bg_fine-grain_10_eceadf_60x60.png
similarity index 100%
rename from data/css/smooth-grinder/images/ui-bg_fine-grain_10_eceadf_60x60.png
rename to data/css/lib/smooth-grinder/images/ui-bg_fine-grain_10_eceadf_60x60.png
diff --git a/data/css/smooth-grinder/images/ui-bg_flat_0_000000_40x100.png b/data/css/lib/smooth-grinder/images/ui-bg_flat_0_000000_40x100.png
similarity index 100%
rename from data/css/smooth-grinder/images/ui-bg_flat_0_000000_40x100.png
rename to data/css/lib/smooth-grinder/images/ui-bg_flat_0_000000_40x100.png
diff --git a/data/css/smooth-grinder/images/ui-bg_flat_0_6e4f1c_40x100.png b/data/css/lib/smooth-grinder/images/ui-bg_flat_0_6e4f1c_40x100.png
similarity index 100%
rename from data/css/smooth-grinder/images/ui-bg_flat_0_6e4f1c_40x100.png
rename to data/css/lib/smooth-grinder/images/ui-bg_flat_0_6e4f1c_40x100.png
diff --git a/data/css/smooth-grinder/images/ui-bg_flat_0_ffffff_40x100.png b/data/css/lib/smooth-grinder/images/ui-bg_flat_0_ffffff_40x100.png
similarity index 100%
rename from data/css/smooth-grinder/images/ui-bg_flat_0_ffffff_40x100.png
rename to data/css/lib/smooth-grinder/images/ui-bg_flat_0_ffffff_40x100.png
diff --git a/data/css/smooth-grinder/images/ui-bg_glass_55_fbf9ee_1x400.png b/data/css/lib/smooth-grinder/images/ui-bg_glass_55_fbf9ee_1x400.png
similarity index 100%
rename from data/css/smooth-grinder/images/ui-bg_glass_55_fbf9ee_1x400.png
rename to data/css/lib/smooth-grinder/images/ui-bg_glass_55_fbf9ee_1x400.png
diff --git a/data/css/smooth-grinder/images/ui-bg_glass_95_fef1ec_1x400.png b/data/css/lib/smooth-grinder/images/ui-bg_glass_95_fef1ec_1x400.png
similarity index 100%
rename from data/css/smooth-grinder/images/ui-bg_glass_95_fef1ec_1x400.png
rename to data/css/lib/smooth-grinder/images/ui-bg_glass_95_fef1ec_1x400.png
diff --git a/data/css/smooth-grinder/images/ui-bg_highlight-soft_75_dcdcdc_1x100.png b/data/css/lib/smooth-grinder/images/ui-bg_highlight-soft_75_dcdcdc_1x100.png
similarity index 100%
rename from data/css/smooth-grinder/images/ui-bg_highlight-soft_75_dcdcdc_1x100.png
rename to data/css/lib/smooth-grinder/images/ui-bg_highlight-soft_75_dcdcdc_1x100.png
diff --git a/data/css/smooth-grinder/images/ui-bg_highlight-soft_75_dddddd_1x100.png b/data/css/lib/smooth-grinder/images/ui-bg_highlight-soft_75_dddddd_1x100.png
similarity index 100%
rename from data/css/smooth-grinder/images/ui-bg_highlight-soft_75_dddddd_1x100.png
rename to data/css/lib/smooth-grinder/images/ui-bg_highlight-soft_75_dddddd_1x100.png
diff --git a/data/css/smooth-grinder/images/ui-bg_highlight-soft_75_efefef_1x100.png b/data/css/lib/smooth-grinder/images/ui-bg_highlight-soft_75_efefef_1x100.png
similarity index 100%
rename from data/css/smooth-grinder/images/ui-bg_highlight-soft_75_efefef_1x100.png
rename to data/css/lib/smooth-grinder/images/ui-bg_highlight-soft_75_efefef_1x100.png
diff --git a/data/css/smooth-grinder/images/ui-bg_inset-soft_75_dfdfdf_1x100.png b/data/css/lib/smooth-grinder/images/ui-bg_inset-soft_75_dfdfdf_1x100.png
similarity index 100%
rename from data/css/smooth-grinder/images/ui-bg_inset-soft_75_dfdfdf_1x100.png
rename to data/css/lib/smooth-grinder/images/ui-bg_inset-soft_75_dfdfdf_1x100.png
diff --git a/data/css/smooth-grinder/images/ui-icons_222222_256x240.png b/data/css/lib/smooth-grinder/images/ui-icons_222222_256x240.png
similarity index 100%
rename from data/css/smooth-grinder/images/ui-icons_222222_256x240.png
rename to data/css/lib/smooth-grinder/images/ui-icons_222222_256x240.png
diff --git a/data/css/smooth-grinder/images/ui-icons_2e83ff_256x240.png b/data/css/lib/smooth-grinder/images/ui-icons_2e83ff_256x240.png
similarity index 100%
rename from data/css/smooth-grinder/images/ui-icons_2e83ff_256x240.png
rename to data/css/lib/smooth-grinder/images/ui-icons_2e83ff_256x240.png
diff --git a/data/css/smooth-grinder/images/ui-icons_8c291d_256x240.png b/data/css/lib/smooth-grinder/images/ui-icons_8c291d_256x240.png
similarity index 100%
rename from data/css/smooth-grinder/images/ui-icons_8c291d_256x240.png
rename to data/css/lib/smooth-grinder/images/ui-icons_8c291d_256x240.png
diff --git a/data/css/smooth-grinder/images/ui-icons_cd0a0a_256x240.png b/data/css/lib/smooth-grinder/images/ui-icons_cd0a0a_256x240.png
similarity index 100%
rename from data/css/smooth-grinder/images/ui-icons_cd0a0a_256x240.png
rename to data/css/lib/smooth-grinder/images/ui-icons_cd0a0a_256x240.png
diff --git a/data/css/smooth-grinder/jquery-ui-1.8.17.custom.css b/data/css/lib/smooth-grinder/jquery-ui-1.8.17.custom.css
similarity index 100%
rename from data/css/smooth-grinder/jquery-ui-1.8.17.custom.css
rename to data/css/lib/smooth-grinder/jquery-ui-1.8.17.custom.css
diff --git a/data/css/superfish.css b/data/css/lib/superfish.css
similarity index 100%
rename from data/css/superfish.css
rename to data/css/lib/superfish.css
diff --git a/data/interfaces/default/apiBuilder.tmpl b/data/interfaces/default/apiBuilder.tmpl
index b948b0097d5435014264c0d1ef0ddc8f703864e6..f6a1dd5570d9bc10f8ad170648497af7e0fe9fb2 100644
--- a/data/interfaces/default/apiBuilder.tmpl
+++ b/data/interfaces/default/apiBuilder.tmpl
@@ -8,7 +8,7 @@
 sbRoot = "$sbRoot";
 //-->
 </script>
-<script type="text/javascript" src="$sbRoot/js/jquery-1.7.1.min.js"></script>
+<script type="text/javascript" src="$sbRoot/js/lib/jquery-1.7.1.min.js"></script>
 <script type="text/javascript" src="$sbRoot/js/apibuilder.js"></script>
 
 <style type="text/css">
diff --git a/data/interfaces/default/config_providers.tmpl b/data/interfaces/default/config_providers.tmpl
index 50b810a661bbf2f6cf71a2f077968bc3b28043c3..762a32e163101d285c1cb6a566147a91818e1b94 100755
--- a/data/interfaces/default/config_providers.tmpl
+++ b/data/interfaces/default/config_providers.tmpl
@@ -198,26 +198,8 @@ var show_nzb_providers = #if $sickbeard.USE_NZBS then "true" else "false"#;
 <div class="providerDiv" id="btnDiv">
                         <div class="field-pair">
                             <label class="clearfix">
-                                <span class="component-title">BTN User ID:</span>
-                                <input class="component-desc" type="text" name="btn_user_id" value="$sickbeard.BTN_USER_ID" />
-                            </label>
-                        </div>
-                        <div class="field-pair">
-                            <label class="clearfix">
-                                <span class="component-title">BTN Auth Token:</span>
-                                <input class="component-desc" type="text" name="btn_auth_token" value="$sickbeard.BTN_AUTH_TOKEN" size="32" />
-                            </label>
-                        </div>
-						<div class="field-pair">
-                            <label class="clearfix">
-                                <span class="component-title">BTN Passkey:</span>
-                                <input class="component-desc" type="text" name="btn_passkey" value="$sickbeard.BTN_PASSKEY" size="32" />
-                            </label>
-                        </div>
-						<div class="field-pair">
-                            <label class="clearfix">
-                                <span class="component-title">BTN Authkey:</span>
-                                <input class="component-desc" type="text" name="btn_authkey" value="$sickbeard.BTN_AUTHKEY" size="32" />
+                                <span class="component-title">BTN API KEY:</span>
+                                <input class="component-desc" type="text" name="btn_api_key" value="$sickbeard.BTN_API_KEY" size="40" />
                             </label>
                         </div>
 </div>
diff --git a/data/interfaces/default/displayShow.tmpl b/data/interfaces/default/displayShow.tmpl
index a1b5649076bf2188c609bbe0f1e5e6a9f9de56d9..92dc5e5c230d0e7ce4b7b999f3d73fbc09793012 100644
--- a/data/interfaces/default/displayShow.tmpl
+++ b/data/interfaces/default/displayShow.tmpl
@@ -9,7 +9,7 @@
 #set global $topmenu="manageShows"#
 #include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_top.tmpl")
 
-<script type="text/javascript" src="$sbRoot/js/jquery.bookmarkscroll.js"></script>
+<script type="text/javascript" src="$sbRoot/js/lib/jquery.bookmarkscroll.js"></script>
 
 
 <div class="h2footer align-right">
diff --git a/data/interfaces/default/home_newShow.tmpl b/data/interfaces/default/home_newShow.tmpl
index dbb930271f8da1032d0dcbd8dd34fe2b6e88b984..442d9ca6b25c1fd9d7d3f814f135fb3d8cf6ce00 100644
--- a/data/interfaces/default/home_newShow.tmpl
+++ b/data/interfaces/default/home_newShow.tmpl
@@ -11,8 +11,8 @@
 
 #include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_top.tmpl")
 
-<link rel="stylesheet" type="text/css" href="$sbRoot/css/formwizard.css" />
-<script type="text/javascript" src="$sbRoot/js/formwizard.js"></script>
+<link rel="stylesheet" type="text/css" href="$sbRoot/css/lib/formwizard.css" />
+<script type="text/javascript" src="$sbRoot/js/lib/formwizard.js"></script>
 <script type="text/javascript" src="$sbRoot/js/qualityChooser.js"></script>
 <script type="text/javascript" src="$sbRoot/js/newShow.js"></script>
 <script type="text/javascript" src="$sbRoot/js/addShowOptions.js"></script> 
diff --git a/data/interfaces/default/inc_top.tmpl b/data/interfaces/default/inc_top.tmpl
index 51ae9fb95d5014dbd71799de367a1469bbef7248..1f9ce911c18bdfd2b5784640b19bd922dc799ab2 100644
--- a/data/interfaces/default/inc_top.tmpl
+++ b/data/interfaces/default/inc_top.tmpl
@@ -14,11 +14,11 @@
     <link rel="stylesheet" type="text/css" href="$sbRoot/css/browser.css" />
     <link rel="stylesheet" type="text/css" href="$sbRoot/css/comingEpisodes.css" />
     <link rel="stylesheet" type="text/css" href="$sbRoot/css/config.css" />
-    <link rel="stylesheet" type="text/css" href="$sbRoot/css/jquery.pnotify.default.css" />
-    <link rel="stylesheet" type="text/css" href="$sbRoot/css/smooth-grinder/jquery-ui-1.8.17.custom.css" />
-    <link rel="stylesheet" type="text/css" href="$sbRoot/css/superfish.css" />
+    <link rel="stylesheet" type="text/css" href="$sbRoot/css/lib/jquery.pnotify.default.css" />
+    <link rel="stylesheet" type="text/css" href="$sbRoot/css/lib/smooth-grinder/jquery-ui-1.8.17.custom.css" />
+    <link rel="stylesheet" type="text/css" href="$sbRoot/css/lib/superfish.css" />
     <link rel="stylesheet" type="text/css" href="$sbRoot/css/tablesorter.css"/>
-    <link rel="stylesheet" type="text/css" href="$sbRoot/css/jquery.qtip2.css"/>
+    <link rel="stylesheet" type="text/css" href="$sbRoot/css/lib/jquery.qtip2.css"/>
     <link rel="stylesheet" type="text/css" media="only screen and (max-device-width: 480px)" href="$sbRoot/css/iphone.css" />
 
 <style type="text/css">
@@ -33,30 +33,30 @@ th.tablesorter-headerSortDown { background-image: url("$sbRoot/images/tablesorte
 
 .ui-autocomplete-loading { background: white url("$sbRoot/images/loading16.gif") right center no-repeat; }
 .browserDialog.busy .ui-dialog-buttonpane { background: url("$sbRoot/images/loading.gif") 10px 50% no-repeat !important; }
-.ui-dialog, .ui-dialog-buttonpane { background: #eceadf url("$sbRoot/css/smooth-grinder/images/ui-bg_fine-grain_10_eceadf_60x60.png") 50% 50% repeat !important; }
+.ui-dialog, .ui-dialog-buttonpane { background: #eceadf url("$sbRoot/css/lib/smooth-grinder/images/ui-bg_fine-grain_10_eceadf_60x60.png") 50% 50% repeat !important; }
 .ui-accordion-content, .ui-tabs-panel { background: #ededed !important; background-image: none !important; }
 
-.ui-widget-content { border: 1px solid #aaaaaa; background: #dcdcdc url("$sbRoot/css/smooth-grinder/images/ui-bg_highlight-soft_75_dcdcdc_1x100.png") 50% top repeat-x; color: #222222; }
-.ui-widget-header { border: 1px solid #aaaaaa; background: #ffffff url("$sbRoot/css/smooth-grinder/images/ui-bg_flat_0_ffffff_40x100.png") 50% 50% repeat-x; color: #222222; font-weight: bold; }
+.ui-widget-content { border: 1px solid #aaaaaa; background: #dcdcdc url("$sbRoot/css/lib/smooth-grinder/images/ui-bg_highlight-soft_75_dcdcdc_1x100.png") 50% top repeat-x; color: #222222; }
+.ui-widget-header { border: 1px solid #aaaaaa; background: #ffffff url("$sbRoot/css/lib/smooth-grinder/images/ui-bg_flat_0_ffffff_40x100.png") 50% 50% repeat-x; color: #222222; font-weight: bold; }
 
-.ui-state-default, .ui-widget-content .ui-state-default, .ui-widget-header .ui-state-default { border: 1px solid #aaaaaa; background: #efefef url("$sbRoot/css/smooth-grinder/images/ui-bg_highlight-soft_75_efefef_1x100.png") 50% 50% repeat-x; font-weight: bold; color: #222222; }
-.ui-state-hover, .ui-widget-content .ui-state-hover, .ui-widget-header .ui-state-hover, .ui-state-focus, .ui-widget-content .ui-state-focus, .ui-widget-header .ui-state-focus { border: 1px solid #999999; background: #dddddd url("$sbRoot/css/smooth-grinder/images/ui-bg_highlight-soft_75_dddddd_1x100.png") 50% 50% repeat-x; font-weight: bold; color: #222222; }
-.ui-state-active, .ui-widget-content .ui-state-active, .ui-widget-header .ui-state-active { border: 1px solid #aaaaaa; background: #dfdfdf url("$sbRoot/css/smooth-grinder/images/ui-bg_inset-soft_75_dfdfdf_1x100.png") 50% 50% repeat-x; font-weight: bold; color: #140f06; }
+.ui-state-default, .ui-widget-content .ui-state-default, .ui-widget-header .ui-state-default { border: 1px solid #aaaaaa; background: #efefef url("$sbRoot/css/lib/smooth-grinder/images/ui-bg_highlight-soft_75_efefef_1x100.png") 50% 50% repeat-x; font-weight: bold; color: #222222; }
+.ui-state-hover, .ui-widget-content .ui-state-hover, .ui-widget-header .ui-state-hover, .ui-state-focus, .ui-widget-content .ui-state-focus, .ui-widget-header .ui-state-focus { border: 1px solid #999999; background: #dddddd url("$sbRoot/css/lib/smooth-grinder/images/ui-bg_highlight-soft_75_dddddd_1x100.png") 50% 50% repeat-x; font-weight: bold; color: #222222; }
+.ui-state-active, .ui-widget-content .ui-state-active, .ui-widget-header .ui-state-active { border: 1px solid #aaaaaa; background: #dfdfdf url("$sbRoot/css/lib/smooth-grinder/images/ui-bg_inset-soft_75_dfdfdf_1x100.png") 50% 50% repeat-x; font-weight: bold; color: #140f06; }
 
-.ui-state-highlight, .ui-widget-content .ui-state-highlight, .ui-widget-header .ui-state-highlight {border: 1px solid #aaaaaa; background: #fbf9ee url("$sbRoot/css/smooth-grinder/images/ui-bg_glass_55_fbf9ee_1x400.png") 50% 50% repeat-x; color: #363636; }
-.ui-state-error, .ui-widget-content .ui-state-error, .ui-widget-header .ui-state-error {border: 1px solid #aaaaaa; background: #fef1ec url("$sbRoot/css/smooth-grinder/images/ui-bg_glass_95_fef1ec_1x400.png") 50% 50% repeat-x; color: #8c291d; }
+.ui-state-highlight, .ui-widget-content .ui-state-highlight, .ui-widget-header .ui-state-highlight {border: 1px solid #aaaaaa; background: #fbf9ee url("$sbRoot/css/lib/smooth-grinder/images/ui-bg_glass_55_fbf9ee_1x400.png") 50% 50% repeat-x; color: #363636; }
+.ui-state-error, .ui-widget-content .ui-state-error, .ui-widget-header .ui-state-error {border: 1px solid #aaaaaa; background: #fef1ec url("$sbRoot/css/lib/smooth-grinder/images/ui-bg_glass_95_fef1ec_1x400.png") 50% 50% repeat-x; color: #8c291d; }
 
-.ui-icon { width: 16px; height: 16px; background-image: url("$sbRoot/css/smooth-grinder/images/ui-icons_222222_256x240.png"); }
-.ui-widget-content .ui-icon {background-image: url("$sbRoot/css/smooth-grinder/images/ui-icons_222222_256x240.png"); }
-.ui-widget-header .ui-icon {background-image: url("$sbRoot/css/smooth-grinder/images/ui-icons_222222_256x240.png"); }
-.ui-state-default .ui-icon { background-image: url("$sbRoot/css/smooth-grinder/images/ui-icons_8c291d_256x240.png"); }
-.ui-state-hover .ui-icon, .ui-state-focus .ui-icon {background-image: url("$sbRoot/css/smooth-grinder/images/ui-icons_222222_256x240.png"); }
-.ui-state-active .ui-icon {background-image: url("$sbRoot/css/smooth-grinder/images/ui-icons_8c291d_256x240.png"); }
-.ui-state-highlight .ui-icon {background-image: url("$sbRoot/css/smooth-grinder/images/ui-icons_2e83ff_256x240.png"); }
-.ui-state-error .ui-icon, .ui-state-error-text .ui-icon {background-image: url("$sbRoot/css/smooth-grinder/images/ui-icons_cd0a0a_256x240.png"); }
+.ui-icon { width: 16px; height: 16px; background-image: url("$sbRoot/css/lib/smooth-grinder/images/ui-icons_222222_256x240.png"); }
+.ui-widget-content .ui-icon {background-image: url("$sbRoot/css/lib/smooth-grinder/images/ui-icons_222222_256x240.png"); }
+.ui-widget-header .ui-icon {background-image: url("$sbRoot/css/lib/smooth-grinder/images/ui-icons_222222_256x240.png"); }
+.ui-state-default .ui-icon { background-image: url("$sbRoot/css/lib/smooth-grinder/images/ui-icons_8c291d_256x240.png"); }
+.ui-state-hover .ui-icon, .ui-state-focus .ui-icon {background-image: url("$sbRoot/css/lib/smooth-grinder/images/ui-icons_222222_256x240.png"); }
+.ui-state-active .ui-icon {background-image: url("$sbRoot/css/lib/smooth-grinder/images/ui-icons_8c291d_256x240.png"); }
+.ui-state-highlight .ui-icon {background-image: url("$sbRoot/css/lib/smooth-grinder/images/ui-icons_2e83ff_256x240.png"); }
+.ui-state-error .ui-icon, .ui-state-error-text .ui-icon {background-image: url("$sbRoot/css/lib/smooth-grinder/images/ui-icons_cd0a0a_256x240.png"); }
 
-.ui-widget-overlay { background: #aaaaaa url("$sbRoot/css/smooth-grinder/images/ui-bg_flat_0_000000_40x100.png") 50% 50% repeat; opacity: .35;filter:Alpha(Opacity=35); }
-.ui-widget-shadow { margin: -8px 0 0 -8px; padding: 8px; background: #000000 url("$sbRoot/css/smooth-grinder/images/ui-bg_flat_0_000000_40x100.png") 50% 50% repeat-x; opacity: .35;filter:Alpha(Opacity=35); -moz-border-radius: 8px; -khtml-border-radius: 8px; -webkit-border-radius: 8px; border-radius: 8px; }
+.ui-widget-overlay { background: #aaaaaa url("$sbRoot/css/lib/smooth-grinder/images/ui-bg_flat_0_000000_40x100.png") 50% 50% repeat; opacity: .35;filter:Alpha(Opacity=35); }
+.ui-widget-shadow { margin: -8px 0 0 -8px; padding: 8px; background: #000000 url("$sbRoot/css/lib/smooth-grinder/images/ui-bg_flat_0_000000_40x100.png") 50% 50% repeat-x; opacity: .35;filter:Alpha(Opacity=35); -moz-border-radius: 8px; -khtml-border-radius: 8px; -webkit-border-radius: 8px; border-radius: 8px; }
 
 #if $sickbeard.NEWEST_VERSION_STRING:
 .ui-pnotify { top: 30px !important; }
@@ -64,20 +64,20 @@ th.tablesorter-headerSortDown { background-image: url("$sbRoot/images/tablesorte
 //--> 
 </style>
 
-    <script type="text/javascript" src="$sbRoot/js/jquery-1.7.1.min.js"></script>
-    <script type="text/javascript" src="$sbRoot/js/jquery-ui-1.8.17.custom.min.js"></script>
-    <script type="text/javascript" src="$sbRoot/js/superfish-1.4.8.js"></script>
-    <script type="text/javascript" src="$sbRoot/js/supersubs-0.2b.js"></script>
-    <script type="text/javascript" src="$sbRoot/js/jquery.cookie.js"></script>
-    <script type="text/javascript" src="$sbRoot/js/jquery.cookiejar.js"></script>
-    <script type="text/javascript" src="$sbRoot/js/jquery.json-2.2.min.js"></script>
-    <script type="text/javascript" src="$sbRoot/js/jquery.selectboxes.min.js"></script>
-    <script type="text/javascript" src="$sbRoot/js/jquery.tablesorter-2.1.10.min.js"></script>
-    <script type="text/javascript" src="$sbRoot/js/jquery.tablesorter.widgets.min.js"></script>
-    <script type="text/javascript" src="$sbRoot/js/jquery.qtip-2011-11-14.min.js"></script>
-    <script type="text/javascript" src="$sbRoot/js/jquery.pnotify-1.0.2.min.js"></script>
-    <script type="text/javascript" src="$sbRoot/js/jquery.expand-1.3.8.js"></script>
-    <script type="text/javascript" src="$sbRoot/js/jquery.form-2.92.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery-1.7.1.min.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery-ui-1.8.17.custom.min.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/superfish-1.4.8.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/supersubs-0.2b.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.cookie.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.cookiejar.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.json-2.2.min.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.selectboxes.min.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.tablesorter-2.1.10.min.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.tablesorter.widgets.min.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.qtip-2011-11-14.min.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.pnotify-1.0.2.min.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.expand-1.3.8.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.form-2.92.js"></script>
 
     <script type="text/javascript" charset="utf-8">
     <!--
@@ -86,7 +86,7 @@ th.tablesorter-headerSortDown { background-image: url("$sbRoot/images/tablesorte
         top_image_html = '<img src="$sbRoot/images/top.gif" style="width:31px; height:11px" alt="Jump to top" />'; 
     //-->
     </script>
-    <script type="text/javascript" src="$sbRoot/js/jquery.scrolltopcontrol-1.1.js"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.scrolltopcontrol-1.1.js"></script>
     <script type="text/javascript" src="$sbRoot/js/browser.js"></script>
     <script type="text/javascript" src="$sbRoot/js/ajaxNotifications.js"></script>
     
diff --git a/data/interfaces/default/restart_bare.tmpl b/data/interfaces/default/restart_bare.tmpl
index 150c3281f098014e1bee8f4c90eb01ad61f30814..bb2e01dbd0cfe3df89bb2e6a577a89ea0c0ea5b1 100644
--- a/data/interfaces/default/restart_bare.tmpl
+++ b/data/interfaces/default/restart_bare.tmpl
@@ -16,7 +16,7 @@ sbHost = "$curSBHost";
 //-->
 </script>
 
-<script type="text/javascript" src="$sbRoot/js/jquery-1.7.1.min.js"></script>
+<script type="text/javascript" src="$sbRoot/js/lib/jquery-1.7.1.min.js"></script>
 <script type="text/javascript" src="$sbRoot/js/restart.js"></script>
 
 <h2>Performing Restart</h2>
diff --git a/data/js/formwizard.js b/data/js/lib/formwizard.js
similarity index 100%
rename from data/js/formwizard.js
rename to data/js/lib/formwizard.js
diff --git a/data/js/jquery-1.7.1.min.js b/data/js/lib/jquery-1.7.1.min.js
similarity index 100%
rename from data/js/jquery-1.7.1.min.js
rename to data/js/lib/jquery-1.7.1.min.js
diff --git a/data/js/jquery-ui-1.8.17.custom.min.js b/data/js/lib/jquery-ui-1.8.17.custom.min.js
similarity index 100%
rename from data/js/jquery-ui-1.8.17.custom.min.js
rename to data/js/lib/jquery-ui-1.8.17.custom.min.js
diff --git a/data/js/jquery.bookmarkscroll.js b/data/js/lib/jquery.bookmarkscroll.js
similarity index 100%
rename from data/js/jquery.bookmarkscroll.js
rename to data/js/lib/jquery.bookmarkscroll.js
diff --git a/data/js/jquery.cookie.js b/data/js/lib/jquery.cookie.js
similarity index 100%
rename from data/js/jquery.cookie.js
rename to data/js/lib/jquery.cookie.js
diff --git a/data/js/jquery.cookiejar.js b/data/js/lib/jquery.cookiejar.js
similarity index 100%
rename from data/js/jquery.cookiejar.js
rename to data/js/lib/jquery.cookiejar.js
diff --git a/data/js/jquery.expand-1.3.8.js b/data/js/lib/jquery.expand-1.3.8.js
similarity index 100%
rename from data/js/jquery.expand-1.3.8.js
rename to data/js/lib/jquery.expand-1.3.8.js
diff --git a/data/js/jquery.form-2.92.js b/data/js/lib/jquery.form-2.92.js
similarity index 100%
rename from data/js/jquery.form-2.92.js
rename to data/js/lib/jquery.form-2.92.js
diff --git a/data/js/jquery.json-2.2.min.js b/data/js/lib/jquery.json-2.2.min.js
similarity index 100%
rename from data/js/jquery.json-2.2.min.js
rename to data/js/lib/jquery.json-2.2.min.js
diff --git a/data/js/jquery.pnotify-1.0.2.min.js b/data/js/lib/jquery.pnotify-1.0.2.min.js
similarity index 100%
rename from data/js/jquery.pnotify-1.0.2.min.js
rename to data/js/lib/jquery.pnotify-1.0.2.min.js
diff --git a/data/js/jquery.qtip-2011-11-14.min.js b/data/js/lib/jquery.qtip-2011-11-14.min.js
similarity index 100%
rename from data/js/jquery.qtip-2011-11-14.min.js
rename to data/js/lib/jquery.qtip-2011-11-14.min.js
diff --git a/data/js/jquery.scrolltopcontrol-1.1.js b/data/js/lib/jquery.scrolltopcontrol-1.1.js
similarity index 100%
rename from data/js/jquery.scrolltopcontrol-1.1.js
rename to data/js/lib/jquery.scrolltopcontrol-1.1.js
diff --git a/data/js/jquery.selectboxes.min.js b/data/js/lib/jquery.selectboxes.min.js
similarity index 100%
rename from data/js/jquery.selectboxes.min.js
rename to data/js/lib/jquery.selectboxes.min.js
diff --git a/data/js/jquery.tablesorter-2.1.10.min.js b/data/js/lib/jquery.tablesorter-2.1.10.min.js
similarity index 100%
rename from data/js/jquery.tablesorter-2.1.10.min.js
rename to data/js/lib/jquery.tablesorter-2.1.10.min.js
diff --git a/data/js/jquery.tablesorter.widgets.min.js b/data/js/lib/jquery.tablesorter.widgets.min.js
similarity index 100%
rename from data/js/jquery.tablesorter.widgets.min.js
rename to data/js/lib/jquery.tablesorter.widgets.min.js
diff --git a/data/js/superfish-1.4.8.js b/data/js/lib/superfish-1.4.8.js
similarity index 100%
rename from data/js/superfish-1.4.8.js
rename to data/js/lib/superfish-1.4.8.js
diff --git a/data/js/supersubs-0.2b.js b/data/js/lib/supersubs-0.2b.js
similarity index 100%
rename from data/js/supersubs-0.2b.js
rename to data/js/lib/supersubs-0.2b.js
diff --git a/lib/jsonrpclib/SimpleJSONRPCServer.py b/lib/jsonrpclib/SimpleJSONRPCServer.py
new file mode 100644
index 0000000000000000000000000000000000000000..e4b20afcb1c586895281d6c140c3f93bf969535c
--- /dev/null
+++ b/lib/jsonrpclib/SimpleJSONRPCServer.py
@@ -0,0 +1,229 @@
+import lib.jsonrpclib
+from lib.jsonrpclib import Fault
+from lib.jsonrpclib.jsonrpc import USE_UNIX_SOCKETS
+import SimpleXMLRPCServer
+import SocketServer
+import socket
+import logging
+import os
+import types
+import traceback
+import sys
+try:
+    import fcntl
+except ImportError:
+    # For Windows
+    fcntl = None
+
+def get_version(request):
+    # must be a dict
+    if 'jsonrpc' in request.keys():
+        return 2.0
+    if 'id' in request.keys():
+        return 1.0
+    return None
+    
+def validate_request(request):
+    if type(request) is not types.DictType:
+        fault = Fault(
+            -32600, 'Request must be {}, not %s.' % type(request)
+        )
+        return fault
+    rpcid = request.get('id', None)
+    version = get_version(request)
+    if not version:
+        fault = Fault(-32600, 'Request %s invalid.' % request, rpcid=rpcid)
+        return fault        
+    request.setdefault('params', [])
+    method = request.get('method', None)
+    params = request.get('params')
+    param_types = (types.ListType, types.DictType, types.TupleType)
+    if not method or type(method) not in types.StringTypes or \
+        type(params) not in param_types:
+        fault = Fault(
+            -32600, 'Invalid request parameters or method.', rpcid=rpcid
+        )
+        return fault
+    return True
+
+class SimpleJSONRPCDispatcher(SimpleXMLRPCServer.SimpleXMLRPCDispatcher):
+
+    def __init__(self, encoding=None):
+        SimpleXMLRPCServer.SimpleXMLRPCDispatcher.__init__(self,
+                                        allow_none=True,
+                                        encoding=encoding)
+
+    def _marshaled_dispatch(self, data, dispatch_method = None):
+        response = None
+        try:
+            request = jsonrpclib.loads(data)
+        except Exception, e:
+            fault = Fault(-32700, 'Request %s invalid. (%s)' % (data, e))
+            response = fault.response()
+            return response
+        if not request:
+            fault = Fault(-32600, 'Request invalid -- no request data.')
+            return fault.response()
+        if type(request) is types.ListType:
+            # This SHOULD be a batch, by spec
+            responses = []
+            for req_entry in request:
+                result = validate_request(req_entry)
+                if type(result) is Fault:
+                    responses.append(result.response())
+                    continue
+                resp_entry = self._marshaled_single_dispatch(req_entry)
+                if resp_entry is not None:
+                    responses.append(resp_entry)
+            if len(responses) > 0:
+                response = '[%s]' % ','.join(responses)
+            else:
+                response = ''
+        else:    
+            result = validate_request(request)
+            if type(result) is Fault:
+                return result.response()
+            response = self._marshaled_single_dispatch(request)
+        return response
+
+    def _marshaled_single_dispatch(self, request):
+        # TODO - Use the multiprocessing and skip the response if
+        # it is a notification
+        # Put in support for custom dispatcher here
+        # (See SimpleXMLRPCServer._marshaled_dispatch)
+        method = request.get('method')
+        params = request.get('params')
+        try:
+            response = self._dispatch(method, params)
+        except:
+            exc_type, exc_value, exc_tb = sys.exc_info()
+            fault = Fault(-32603, '%s:%s' % (exc_type, exc_value))
+            return fault.response()
+        if 'id' not in request.keys() or request['id'] == None:
+            # It's a notification
+            return None
+        try:
+            response = jsonrpclib.dumps(response,
+                                        methodresponse=True,
+                                        rpcid=request['id']
+                                        )
+            return response
+        except:
+            exc_type, exc_value, exc_tb = sys.exc_info()
+            fault = Fault(-32603, '%s:%s' % (exc_type, exc_value))
+            return fault.response()
+
+    def _dispatch(self, method, params):
+        func = None
+        try:
+            func = self.funcs[method]
+        except KeyError:
+            if self.instance is not None:
+                if hasattr(self.instance, '_dispatch'):
+                    return self.instance._dispatch(method, params)
+                else:
+                    try:
+                        func = SimpleXMLRPCServer.resolve_dotted_attribute(
+                            self.instance,
+                            method,
+                            True
+                            )
+                    except AttributeError:
+                        pass
+        if func is not None:
+            try:
+                if type(params) is types.ListType:
+                    response = func(*params)
+                else:
+                    response = func(**params)
+                return response
+            except TypeError:
+                return Fault(-32602, 'Invalid parameters.')
+            except:
+                err_lines = traceback.format_exc().splitlines()
+                trace_string = '%s | %s' % (err_lines[-3], err_lines[-1])
+                fault = jsonrpclib.Fault(-32603, 'Server error: %s' % 
+                                         trace_string)
+                return fault
+        else:
+            return Fault(-32601, 'Method %s not supported.' % method)
+
+class SimpleJSONRPCRequestHandler(
+        SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
+    
+    def do_POST(self):
+        if not self.is_rpc_path_valid():
+            self.report_404()
+            return
+        try:
+            max_chunk_size = 10*1024*1024
+            size_remaining = int(self.headers["content-length"])
+            L = []
+            while size_remaining:
+                chunk_size = min(size_remaining, max_chunk_size)
+                L.append(self.rfile.read(chunk_size))
+                size_remaining -= len(L[-1])
+            data = ''.join(L)
+            response = self.server._marshaled_dispatch(data)
+            self.send_response(200)
+        except Exception, e:
+            self.send_response(500)
+            err_lines = traceback.format_exc().splitlines()
+            trace_string = '%s | %s' % (err_lines[-3], err_lines[-1])
+            fault = jsonrpclib.Fault(-32603, 'Server error: %s' % trace_string)
+            response = fault.response()
+        if response == None:
+            response = ''
+        self.send_header("Content-type", "application/json-rpc")
+        self.send_header("Content-length", str(len(response)))
+        self.end_headers()
+        self.wfile.write(response)
+        self.wfile.flush()
+        self.connection.shutdown(1)
+
+class SimpleJSONRPCServer(SocketServer.TCPServer, SimpleJSONRPCDispatcher):
+
+    allow_reuse_address = True
+
+    def __init__(self, addr, requestHandler=SimpleJSONRPCRequestHandler,
+                 logRequests=True, encoding=None, bind_and_activate=True,
+                 address_family=socket.AF_INET):
+        self.logRequests = logRequests
+        SimpleJSONRPCDispatcher.__init__(self, encoding)
+        # TCPServer.__init__ has an extra parameter on 2.6+, so
+        # check Python version and decide on how to call it
+        vi = sys.version_info
+        self.address_family = address_family
+        if USE_UNIX_SOCKETS and address_family == socket.AF_UNIX:
+            # Unix sockets can't be bound if they already exist in the
+            # filesystem. The convention of e.g. X11 is to unlink
+            # before binding again.
+            if os.path.exists(addr): 
+                try:
+                    os.unlink(addr)
+                except OSError:
+                    logging.warning("Could not unlink socket %s", addr)
+        # if python 2.5 and lower
+        if vi[0] < 3 and vi[1] < 6:
+            SocketServer.TCPServer.__init__(self, addr, requestHandler)
+        else:
+            SocketServer.TCPServer.__init__(self, addr, requestHandler,
+                bind_and_activate)
+        if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'):
+            flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD)
+            flags |= fcntl.FD_CLOEXEC
+            fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags)
+
+class CGIJSONRPCRequestHandler(SimpleJSONRPCDispatcher):
+
+    def __init__(self, encoding=None):
+        SimpleJSONRPCDispatcher.__init__(self, encoding)
+
+    def handle_jsonrpc(self, request_text):
+        response = self._marshaled_dispatch(request_text)
+        print 'Content-Type: application/json-rpc'
+        print 'Content-Length: %d' % len(response)
+        print
+        sys.stdout.write(response)
+
+    handle_xmlrpc = handle_jsonrpc
diff --git a/lib/jsonrpclib/__init__.py b/lib/jsonrpclib/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..33f36604f9e4d6bdde227637a4d02307e1d699fb
--- /dev/null
+++ b/lib/jsonrpclib/__init__.py
@@ -0,0 +1,6 @@
+from config import Config
+config = Config.instance()
+from history import History
+history = History.instance()
+from jsonrpc import Server, MultiCall, Fault
+from jsonrpc import ProtocolError, loads, dumps
diff --git a/lib/jsonrpclib/config.py b/lib/jsonrpclib/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..4d28f1b1fa94b2f6585b007555c62d5867d4c597
--- /dev/null
+++ b/lib/jsonrpclib/config.py
@@ -0,0 +1,38 @@
+import sys
+
+class LocalClasses(dict):
+    def add(self, cls):
+        self[cls.__name__] = cls
+
+class Config(object):
+    """
+    This is pretty much used exclusively for the 'jsonclass' 
+    functionality... set use_jsonclass to False to turn it off.
+    You can change serialize_method and ignore_attribute, or use
+    the local_classes.add(class) to include "local" classes.
+    """
+    use_jsonclass = True
+    # Change to False to keep __jsonclass__ entries raw.
+    serialize_method = '_serialize'
+    # The serialize_method should be a string that references the
+    # method on a custom class object which is responsible for 
+    # returning a tuple of the constructor arguments and a dict of
+    # attributes.
+    ignore_attribute = '_ignore'
+    # The ignore attribute should be a string that references the
+    # attribute on a custom class object which holds strings and / or
+    # references of the attributes the class translator should ignore.
+    classes = LocalClasses()
+    # The list of classes to use for jsonclass translation.
+    version = 2.0
+    # Version of the JSON-RPC spec to support
+    user_agent = 'jsonrpclib/0.1 (Python %s)' % \
+        '.'.join([str(ver) for ver in sys.version_info[0:3]])
+    # User agent to use for calls.
+    _instance = None
+    
+    @classmethod
+    def instance(cls):
+        if not cls._instance:
+            cls._instance = cls()
+        return cls._instance
diff --git a/lib/jsonrpclib/history.py b/lib/jsonrpclib/history.py
new file mode 100644
index 0000000000000000000000000000000000000000..d11863dcda94b9e38bcadc00e552910d8e6ebddb
--- /dev/null
+++ b/lib/jsonrpclib/history.py
@@ -0,0 +1,40 @@
+class History(object):
+    """
+    This holds all the response and request objects for a
+    session. A server using this should call "clear" after
+    each request cycle in order to keep it from clogging 
+    memory.
+    """
+    requests = []
+    responses = []
+    _instance = None
+    
+    @classmethod
+    def instance(cls):
+        if not cls._instance:
+            cls._instance = cls()
+        return cls._instance
+
+    def add_response(self, response_obj):
+        self.responses.append(response_obj)
+    
+    def add_request(self, request_obj):
+        self.requests.append(request_obj)
+
+    @property
+    def request(self):
+        if len(self.requests) == 0:
+            return None
+        else:
+            return self.requests[-1]
+
+    @property
+    def response(self):
+        if len(self.responses) == 0:
+            return None
+        else:
+            return self.responses[-1]
+
+    def clear(self):
+        del self.requests[:]
+        del self.responses[:]
diff --git a/lib/jsonrpclib/jsonclass.py b/lib/jsonrpclib/jsonclass.py
new file mode 100644
index 0000000000000000000000000000000000000000..519bdec7cc098803ed60bf2ce7459dafbe6a4e0a
--- /dev/null
+++ b/lib/jsonrpclib/jsonclass.py
@@ -0,0 +1,145 @@
+import types
+import inspect
+import re
+import traceback
+
+from lib.jsonrpclib import config
+
+iter_types = [
+    types.DictType,
+    types.ListType,
+    types.TupleType
+]
+
+string_types = [
+    types.StringType,
+    types.UnicodeType
+]
+
+numeric_types = [
+    types.IntType,
+    types.LongType,
+    types.FloatType
+]
+
+value_types = [
+    types.BooleanType,
+    types.NoneType
+]
+
+supported_types = iter_types+string_types+numeric_types+value_types
+invalid_module_chars = r'[^a-zA-Z0-9\_\.]'
+
+class TranslationError(Exception):
+    pass
+
+def dump(obj, serialize_method=None, ignore_attribute=None, ignore=[]):
+    if not serialize_method:
+        serialize_method = config.serialize_method
+    if not ignore_attribute:
+        ignore_attribute = config.ignore_attribute
+    obj_type = type(obj)
+    # Parse / return default "types"...
+    if obj_type in numeric_types+string_types+value_types:
+        return obj
+    if obj_type in iter_types:
+        if obj_type in (types.ListType, types.TupleType):
+            new_obj = []
+            for item in obj:
+                new_obj.append(dump(item, serialize_method,
+                                     ignore_attribute, ignore))
+            if obj_type is types.TupleType:
+                new_obj = tuple(new_obj)
+            return new_obj
+        # It's a dict...
+        else:
+            new_obj = {}
+            for key, value in obj.iteritems():
+                new_obj[key] = dump(value, serialize_method,
+                                     ignore_attribute, ignore)
+            return new_obj
+    # It's not a standard type, so it needs __jsonclass__
+    module_name = inspect.getmodule(obj).__name__
+    class_name = obj.__class__.__name__
+    json_class = class_name
+    if module_name not in ['', '__main__']:
+        json_class = '%s.%s' % (module_name, json_class)
+    return_obj = {"__jsonclass__":[json_class,]}
+    # If a serialization method is defined..
+    if serialize_method in dir(obj):
+        # Params can be a dict (keyword) or list (positional)
+        # Attrs MUST be a dict.
+        serialize = getattr(obj, serialize_method)
+        params, attrs = serialize()
+        return_obj['__jsonclass__'].append(params)
+        return_obj.update(attrs)
+        return return_obj
+    # Otherwise, try to figure it out
+    # Obviously, we can't assume to know anything about the
+    # parameters passed to __init__
+    return_obj['__jsonclass__'].append([])
+    attrs = {}
+    ignore_list = getattr(obj, ignore_attribute, [])+ignore
+    for attr_name, attr_value in obj.__dict__.iteritems():
+        if type(attr_value) in supported_types and \
+                attr_name not in ignore_list and \
+                attr_value not in ignore_list:
+            attrs[attr_name] = dump(attr_value, serialize_method,
+                                     ignore_attribute, ignore)
+    return_obj.update(attrs)
+    return return_obj
+
+def load(obj):
+    if type(obj) in string_types+numeric_types+value_types:
+        return obj
+    if type(obj) is types.ListType:
+        return_list = []
+        for entry in obj:
+            return_list.append(load(entry))
+        return return_list
+    # Othewise, it's a dict type
+    if '__jsonclass__' not in obj.keys():
+        return_dict = {}
+        for key, value in obj.iteritems():
+            new_value = load(value)
+            return_dict[key] = new_value
+        return return_dict
+    # It's a dict, and it's a __jsonclass__
+    orig_module_name = obj['__jsonclass__'][0]
+    params = obj['__jsonclass__'][1]
+    if orig_module_name == '':
+        raise TranslationError('Module name empty.')
+    json_module_clean = re.sub(invalid_module_chars, '', orig_module_name)
+    if json_module_clean != orig_module_name:
+        raise TranslationError('Module name %s has invalid characters.' %
+                               orig_module_name)
+    json_module_parts = json_module_clean.split('.')
+    json_class = None
+    if len(json_module_parts) == 1:
+        # Local class name -- probably means it won't work
+        if json_module_parts[0] not in config.classes.keys():
+            raise TranslationError('Unknown class or module %s.' %
+                                   json_module_parts[0])
+        json_class = config.classes[json_module_parts[0]]
+    else:
+        json_class_name = json_module_parts.pop()
+        json_module_tree = '.'.join(json_module_parts)
+        try:
+            temp_module = __import__(json_module_tree)
+        except ImportError:
+            raise TranslationError('Could not import %s from module %s.' %
+                                   (json_class_name, json_module_tree))
+        json_class = getattr(temp_module, json_class_name)
+    # Creating the object...
+    new_obj = None
+    if type(params) is types.ListType:
+        new_obj = json_class(*params)
+    elif type(params) is types.DictType:
+        new_obj = json_class(**params)
+    else:
+        raise TranslationError('Constructor args must be a dict or list.')
+    for key, value in obj.iteritems():
+        if key == '__jsonclass__':
+            continue
+        setattr(new_obj, key, value)
+    return new_obj
diff --git a/lib/jsonrpclib/jsonrpc.py b/lib/jsonrpclib/jsonrpc.py
new file mode 100644
index 0000000000000000000000000000000000000000..a4d4c33adebfb1ca15de2233b37b11e8353f48bf
--- /dev/null
+++ b/lib/jsonrpclib/jsonrpc.py
@@ -0,0 +1,556 @@
+"""
+Licensed under the Apache License, Version 2.0 (the "License"); 
+you may not use this file except in compliance with the License. 
+You may obtain a copy of the License at 
+
+   http://www.apache.org/licenses/LICENSE-2.0 
+
+Unless required by applicable law or agreed to in writing, software 
+distributed under the License is distributed on an "AS IS" BASIS, 
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
+See the License for the specific language governing permissions and 
+limitations under the License. 
+
+============================
+JSONRPC Library (jsonrpclib)
+============================
+
+This library is a JSON-RPC v.2 (proposed) implementation which
+follows the xmlrpclib API for portability between clients. It
+uses the same Server / ServerProxy, loads, dumps, etc. syntax,
+while providing features not present in XML-RPC like:
+
+* Keyword arguments
+* Notifications
+* Versioning
+* Batches and batch notifications
+
+Eventually, I'll add a SimpleXMLRPCServer compatible library,
+and other things to tie the thing off nicely. :)
+
+For a quick-start, just open a console and type the following,
+replacing the server address, method, and parameters 
+appropriately.
+>>> import jsonrpclib
+>>> server = jsonrpclib.Server('http://localhost:8181')
+>>> server.add(5, 6)
+11
+>>> server._notify.add(5, 6)
+>>> batch = jsonrpclib.MultiCall(server)
+>>> batch.add(3, 50)
+>>> batch.add(2, 3)
+>>> batch._notify.add(3, 5)
+>>> batch()
+[53, 5]
+
+See http://code.google.com/p/jsonrpclib/ for more info.
+"""
+
+import types
+import sys
+from xmlrpclib import Transport as XMLTransport
+from xmlrpclib import SafeTransport as XMLSafeTransport
+from xmlrpclib import ServerProxy as XMLServerProxy
+from xmlrpclib import _Method as XML_Method
+import time
+import string
+import random
+
+# Library includes
+import lib.jsonrpclib
+from lib.jsonrpclib import config
+from lib.jsonrpclib import history
+
+# JSON library importing
+cjson = None
+json = None
+try:
+    import cjson
+except ImportError:
+    try:
+        import json
+    except ImportError:
+        try:
+            import lib.simplejson as json
+        except ImportError:
+            raise ImportError(
+                'You must have the cjson, json, or simplejson ' +
+                'module(s) available.'
+            )
+
+IDCHARS = string.ascii_lowercase+string.digits
+
+class UnixSocketMissing(Exception):
+    """ 
+    Just a properly named Exception if Unix Sockets usage is 
+    attempted on a platform that doesn't support them (Windows)
+    """
+    pass
+
+#JSON Abstractions
+
+def jdumps(obj, encoding='utf-8'):
+    # Do 'serialize' test at some point for other classes
+    global cjson
+    if cjson:
+        return cjson.encode(obj)
+    else:
+        return json.dumps(obj, encoding=encoding)
+
+def jloads(json_string):
+    global cjson
+    if cjson:
+        return cjson.decode(json_string)
+    else:
+        return json.loads(json_string)
+
+
+# XMLRPClib re-implementations
+
+class ProtocolError(Exception):
+    pass
+
+class TransportMixIn(object):
+    """ Just extends the XMLRPC transport where necessary. """
+    user_agent = config.user_agent
+    # for Python 2.7 support
+    _connection = None
+
+    def send_content(self, connection, request_body):
+        connection.putheader("Content-Type", "application/json-rpc")
+        connection.putheader("Content-Length", str(len(request_body)))
+        connection.endheaders()
+        if request_body:
+            connection.send(request_body)
+
+    def getparser(self):
+        target = JSONTarget()
+        return JSONParser(target), target
+
+class JSONParser(object):
+    def __init__(self, target):
+        self.target = target
+
+    def feed(self, data):
+        self.target.feed(data)
+
+    def close(self):
+        pass
+
+class JSONTarget(object):
+    def __init__(self):
+        self.data = []
+
+    def feed(self, data):
+        self.data.append(data)
+
+    def close(self):
+        return ''.join(self.data)
+
+class Transport(TransportMixIn, XMLTransport):
+    pass
+
+class SafeTransport(TransportMixIn, XMLSafeTransport):
+    pass
+from httplib import HTTP, HTTPConnection
+from socket import socket
+
+USE_UNIX_SOCKETS = False
+
+try: 
+    from socket import AF_UNIX, SOCK_STREAM
+    USE_UNIX_SOCKETS = True
+except ImportError:
+    pass
+    
+if (USE_UNIX_SOCKETS):
+    
+    class UnixHTTPConnection(HTTPConnection):
+        def connect(self):
+            self.sock = socket(AF_UNIX, SOCK_STREAM)
+            self.sock.connect(self.host)
+
+    class UnixHTTP(HTTP):
+        _connection_class = UnixHTTPConnection
+
+    class UnixTransport(TransportMixIn, XMLTransport):
+        def make_connection(self, host):
+            import httplib
+            host, extra_headers, x509 = self.get_host_info(host)
+            return UnixHTTP(host)
+
+    
+class ServerProxy(XMLServerProxy):
+    """
+    Unfortunately, much more of this class has to be copied since
+    so much of it does the serialization.
+    """
+
+    def __init__(self, uri, transport=None, encoding=None, 
+                 verbose=0, version=None):
+        import urllib
+        if not version:
+            version = config.version
+        self.__version = version
+        schema, uri = urllib.splittype(uri)
+        if schema not in ('http', 'https', 'unix'):
+            raise IOError('Unsupported JSON-RPC protocol.')
+        if schema == 'unix':
+            if not USE_UNIX_SOCKETS:
+                # Don't like the "generic" Exception...
+                raise UnixSocketMissing("Unix sockets not available.")
+            self.__host = uri
+            self.__handler = '/'
+        else:
+            self.__host, self.__handler = urllib.splithost(uri)
+            if not self.__handler:
+                # Not sure if this is in the JSON spec?
+                #self.__handler = '/'
+                self.__handler == '/'
+        if transport is None:
+            if schema == 'unix':
+                transport = UnixTransport()
+            elif schema == 'https':
+                transport = SafeTransport()
+            else:
+                transport = Transport()
+        self.__transport = transport
+        self.__encoding = encoding
+        self.__verbose = verbose
+
+    def _request(self, methodname, params, rpcid=None):
+        request = dumps(params, methodname, encoding=self.__encoding,
+                        rpcid=rpcid, version=self.__version)
+        response = self._run_request(request)
+        check_for_errors(response)
+        return response['result']
+
+    def _request_notify(self, methodname, params, rpcid=None):
+        request = dumps(params, methodname, encoding=self.__encoding,
+                        rpcid=rpcid, version=self.__version, notify=True)
+        response = self._run_request(request, notify=True)
+        check_for_errors(response)
+        return
+
+    def _run_request(self, request, notify=None):
+        history.add_request(request)
+
+        response = self.__transport.request(
+            self.__host,
+            self.__handler,
+            request,
+            verbose=self.__verbose
+        )
+        
+        # Here, the XMLRPC library translates a single list
+        # response to the single value -- should we do the
+        # same, and require a tuple / list to be passed to
+        # the response object, or expect the Server to be 
+        # outputting the response appropriately?
+        
+        history.add_response(response)
+        if not response:
+            return None
+        return_obj = loads(response)
+        return return_obj
+
+    def __getattr__(self, name):
+        # Same as original, just with new _Method reference
+        return _Method(self._request, name)
+
+    @property
+    def _notify(self):
+        # Just like __getattr__, but with notify namespace.
+        return _Notify(self._request_notify)
+
+
+class _Method(XML_Method):
+    
+    def __call__(self, *args, **kwargs):
+        if len(args) > 0 and len(kwargs) > 0:
+            raise ProtocolError('Cannot use both positional ' +
+                'and keyword arguments (according to JSON-RPC spec.)')
+        if len(args) > 0:
+            return self.__send(self.__name, args)
+        else:
+            return self.__send(self.__name, kwargs)
+
+    def __getattr__(self, name):
+        self.__name = '%s.%s' % (self.__name, name)
+        return self
+        # The old method returned a new instance, but this seemed wasteful.
+        # The only thing that changes is the name.
+        #return _Method(self.__send, "%s.%s" % (self.__name, name))
+
+class _Notify(object):
+    def __init__(self, request):
+        self._request = request
+
+    def __getattr__(self, name):
+        return _Method(self._request, name)
+        
+# Batch implementation
+
+class MultiCallMethod(object):
+    
+    def __init__(self, method, notify=False):
+        self.method = method
+        self.params = []
+        self.notify = notify
+
+    def __call__(self, *args, **kwargs):
+        if len(kwargs) > 0 and len(args) > 0:
+            raise ProtocolError('JSON-RPC does not support both ' +
+                                'positional and keyword arguments.')
+        if len(kwargs) > 0:
+            self.params = kwargs
+        else:
+            self.params = args
+
+    def request(self, encoding=None, rpcid=None):
+        return dumps(self.params, self.method, version=2.0,
+                     encoding=encoding, rpcid=rpcid, notify=self.notify)
+
+    def __repr__(self):
+        return '%s' % self.request()
+        
+    def __getattr__(self, method):
+        new_method = '%s.%s' % (self.method, method)
+        self.method = new_method
+        return self
+
+class MultiCallNotify(object):
+    
+    def __init__(self, multicall):
+        self.multicall = multicall
+
+    def __getattr__(self, name):
+        new_job = MultiCallMethod(name, notify=True)
+        self.multicall._job_list.append(new_job)
+        return new_job
+
+class MultiCallIterator(object):
+    
+    def __init__(self, results):
+        self.results = results
+
+    def __iter__(self):
+        for i in range(0, len(self.results)):
+            yield self[i]
+        raise StopIteration
+
+    def __getitem__(self, i):
+        item = self.results[i]
+        check_for_errors(item)
+        return item['result']
+
+    def __len__(self):
+        return len(self.results)
+
+class MultiCall(object):
+    
+    def __init__(self, server):
+        self._server = server
+        self._job_list = []
+
+    def _request(self):
+        if len(self._job_list) < 1:
+            # Should we alert? This /is/ pretty obvious.
+            return
+        request_body = '[ %s ]' % ','.join([job.request() for
+                                          job in self._job_list])
+        responses = self._server._run_request(request_body)
+        del self._job_list[:]
+        if not responses:
+            responses = []
+        return MultiCallIterator(responses)
+
+    @property
+    def _notify(self):
+        return MultiCallNotify(self)
+
+    def __getattr__(self, name):
+        new_job = MultiCallMethod(name)
+        self._job_list.append(new_job)
+        return new_job
+
+    __call__ = _request
+
+# These lines conform to xmlrpclib's "compatibility" line. 
+# Not really sure if we should include these, but oh well.
+Server = ServerProxy
+
+class Fault(object):
+    # JSON-RPC error class
+    def __init__(self, code=-32000, message='Server error', rpcid=None):
+        self.faultCode = code
+        self.faultString = message
+        self.rpcid = rpcid
+
+    def error(self):
+        return {'code':self.faultCode, 'message':self.faultString}
+
+    def response(self, rpcid=None, version=None):
+        if not version:
+            version = config.version
+        if rpcid:
+            self.rpcid = rpcid
+        return dumps(
+            self, methodresponse=True, rpcid=self.rpcid, version=version
+        )
+
+    def __repr__(self):
+        return '<Fault %s: %s>' % (self.faultCode, self.faultString)
+
+def random_id(length=8):
+    return_id = ''
+    for i in range(length):
+        return_id += random.choice(IDCHARS)
+    return return_id
+
+class Payload(dict):
+    def __init__(self, rpcid=None, version=None):
+        if not version:
+            version = config.version
+        self.id = rpcid
+        self.version = float(version)
+    
+    def request(self, method, params=[]):
+        if type(method) not in types.StringTypes:
+            raise ValueError('Method name must be a string.')
+        if not self.id:
+            self.id = random_id()
+        request = { 'id':self.id, 'method':method }
+        if params:
+            request['params'] = params
+        if self.version >= 2:
+            request['jsonrpc'] = str(self.version)
+        return request
+
+    def notify(self, method, params=[]):
+        request = self.request(method, params)
+        if self.version >= 2:
+            del request['id']
+        else:
+            request['id'] = None
+        return request
+
+    def response(self, result=None):
+        response = {'result':result, 'id':self.id}
+        if self.version >= 2:
+            response['jsonrpc'] = str(self.version)
+        else:
+            response['error'] = None
+        return response
+
+    def error(self, code=-32000, message='Server error.'):
+        error = self.response()
+        if self.version >= 2:
+            del error['result']
+        else:
+            error['result'] = None
+        error['error'] = {'code':code, 'message':message}
+        return error
+
+def dumps(params=[], methodname=None, methodresponse=None, 
+        encoding=None, rpcid=None, version=None, notify=None):
+    """
+    This differs from the Python implementation in that it implements 
+    the rpcid argument since the 2.0 spec requires it for responses.
+    """
+    if not version:
+        version = config.version
+    valid_params = (types.TupleType, types.ListType, types.DictType)
+    if methodname in types.StringTypes and \
+            type(params) not in valid_params and \
+            not isinstance(params, Fault):
+        """ 
+        If a method, and params are not in a listish or a Fault,
+        error out.
+        """
+        raise TypeError('Params must be a dict, list, tuple or Fault ' +
+                        'instance.')
+    # Begin parsing object
+    payload = Payload(rpcid=rpcid, version=version)
+    if not encoding:
+        encoding = 'utf-8'
+    if type(params) is Fault:
+        response = payload.error(params.faultCode, params.faultString)
+        return jdumps(response, encoding=encoding)
+    if type(methodname) not in types.StringTypes and methodresponse != True:
+        raise ValueError('Method name must be a string, or methodresponse '+
+                         'must be set to True.')
+    if config.use_jsonclass == True:
+        from lib.jsonrpclib import jsonclass
+        params = jsonclass.dump(params)
+    if methodresponse is True:
+        if rpcid is None:
+            raise ValueError('A method response must have an rpcid.')
+        response = payload.response(params)
+        return jdumps(response, encoding=encoding)
+    request = None
+    if notify == True:
+        request = payload.notify(methodname, params)
+    else:
+        request = payload.request(methodname, params)
+    return jdumps(request, encoding=encoding)
+
+def loads(data):
+    """
+    This differs from the Python implementation, in that it returns
+    the request structure in Dict format instead of the method, params.
+    It will return a list in the case of a batch request / response.
+    """
+    if data == '':
+        # notification
+        return None
+    result = jloads(data)
+    # if the above raises an error, the implementing server code 
+    # should return something like the following:
+    # { 'jsonrpc':'2.0', 'error': fault.error(), id: None }
+    if config.use_jsonclass == True:
+        from lib.jsonrpclib import jsonclass
+        result = jsonclass.load(result)
+    return result
+
+def check_for_errors(result):
+    if not result:
+        # Notification
+        return result
+    if type(result) is not types.DictType:
+        raise TypeError('Response is not a dict.')
+    if 'jsonrpc' in result.keys() and float(result['jsonrpc']) > 2.0:
+        raise NotImplementedError('JSON-RPC version not yet supported.')
+    if 'result' not in result.keys() and 'error' not in result.keys():
+        raise ValueError('Response does not have a result or error key.')
+    if 'error' in result.keys() and result['error'] != None:
+        code = result['error']['code']
+        message = result['error']['message']
+        raise ProtocolError((code, message))
+    return result
+
+def isbatch(result):
+    if type(result) not in (types.ListType, types.TupleType):
+        return False
+    if len(result) < 1:
+        return False
+    if type(result[0]) is not types.DictType:
+        return False
+    if 'jsonrpc' not in result[0].keys():
+        return False
+    try:
+        version = float(result[0]['jsonrpc'])
+    except ValueError:
+        raise ProtocolError('"jsonrpc" key must be a float(able) value.')
+    if version < 2:
+        return False
+    return True
+
+def isnotification(request):
+    if 'id' not in request.keys():
+        # 2.0 notification
+        return True
+    if request['id'] == None:
+        # 1.0 notification
+        return True
+    return False
diff --git a/readme.md b/readme.md
index 0fae42e6e0317e48f19e989d6f6627f7b62c54f0..2865d44e8cb96e7c57d99cd73773502cdb7d460d 100644
--- a/readme.md
+++ b/readme.md
@@ -30,6 +30,7 @@ Sick Beard makes use of the following projects:
 * [Python GNTP][pythongntp]
 * [SocksiPy][socks]
 * [python-dateutil][dateutil]
+* [jsonrpclib][jsonrpclib]
 
 ## Dependencies
 
@@ -51,4 +52,5 @@ If you find a bug please report it or it'll never get fixed. Verify that it hasn
 [dateutil]: http://labix.org/python-dateutil
 [googledownloads]: http://code.google.com/p/sickbeard/downloads/list
 [googleissues]: http://code.google.com/p/sickbeard/issues/list
-[googlenewissue]: http://code.google.com/p/sickbeard/issues/entry
\ No newline at end of file
+[googlenewissue]: http://code.google.com/p/sickbeard/issues/entry
+[jsonrpclib]: https://github.com/joshmarshall/jsonrpclib
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
old mode 100755
new mode 100644
index 324a66ada36ad22312977cf488b9540143f7c5bb..533983356cfd7b2d76799098386786c85f6ae878
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -157,13 +157,12 @@ TVTORRENTS_DIGEST = None
 TVTORRENTS_HASH = None
 
 BTN = False
-BTN_USER_ID = None
-BTN_AUTH_TOKEN = None
-BTN_PASSKEY = None
-BTN_AUTHKEY = None
+BTN_API_KEY = None
 
 TORRENT_DIR = None
 
+ADD_SHOWS_WO_DIR = None
+CREATE_MISSING_SHOW_DIRS = None
 RENAME_EPISODES = False
 PROCESS_AUTOMATICALLY = False
 KEEP_PROCESSED_DIR = False
@@ -393,7 +392,7 @@ def initialize(consoleLogging=True):
                 USE_PLEX, PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_UPDATE_LIBRARY, \
                 PLEX_SERVER_HOST, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, \
                 showUpdateScheduler, __INITIALIZED__, LAUNCH_BROWSER, showList, loadingShowList, \
-                NZBS, NZBS_UID, NZBS_HASH, EZRSS, TVTORRENTS, TVTORRENTS_DIGEST, TVTORRENTS_HASH, BTN, BTN_USER_ID, BTN_AUTH_TOKEN, BTN_PASSKEY, BTN_AUTHKEY, TORRENT_DIR, USENET_RETENTION, SOCKET_TIMEOUT, \
+                NZBS, NZBS_UID, NZBS_HASH, EZRSS, TVTORRENTS, TVTORRENTS_DIGEST, TVTORRENTS_HASH, BTN, BTN_API_KEY, TORRENT_DIR, USENET_RETENTION, SOCKET_TIMEOUT, \
                 SEARCH_FREQUENCY, DEFAULT_SEARCH_FREQUENCY, BACKLOG_SEARCH_FREQUENCY, \
                 QUALITY_DEFAULT, SEASON_FOLDERS_FORMAT, SEASON_FOLDERS_DEFAULT, STATUS_DEFAULT, \
                 GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, \
@@ -414,7 +413,8 @@ def initialize(consoleLogging=True):
                 USE_LIBNOTIFY, LIBNOTIFY_NOTIFY_ONSNATCH, LIBNOTIFY_NOTIFY_ONDOWNLOAD, USE_NMJ, NMJ_HOST, NMJ_DATABASE, NMJ_MOUNT, USE_SYNOINDEX, \
                 USE_BANNER, USE_LISTVIEW, METADATA_XBMC, METADATA_MEDIABROWSER, METADATA_PS3, METADATA_SYNOLOGY, metadata_provider_dict, \
                 NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, \
-                COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, METADATA_WDTV, METADATA_TIVO, IGNORE_WORDS
+                COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, METADATA_WDTV, METADATA_TIVO, IGNORE_WORDS, CREATE_MISSING_SHOW_DIRS, \
+                ADD_SHOWS_WO_DIR
 
         if __INITIALIZED__:
             return False
@@ -543,7 +543,9 @@ def initialize(consoleLogging=True):
         RENAME_EPISODES = check_setting_int(CFG, 'General', 'rename_episodes', 1)
         KEEP_PROCESSED_DIR = check_setting_int(CFG, 'General', 'keep_processed_dir', 1)
         MOVE_ASSOCIATED_FILES = check_setting_int(CFG, 'General', 'move_associated_files', 0)
-
+        CREATE_MISSING_SHOW_DIRS = check_setting_int(CFG, 'General', 'create_missing_show_dirs', 0)
+        ADD_SHOWS_WO_DIR = check_setting_int(CFG, 'General', 'add_shows_wo_dir', 0)
+        
         EZRSS = bool(check_setting_int(CFG, 'General', 'use_torrent', 0))
         if not EZRSS:
             EZRSS = bool(check_setting_int(CFG, 'EZRSS', 'ezrss', 0))
@@ -553,10 +555,7 @@ def initialize(consoleLogging=True):
         TVTORRENTS_HASH = check_setting_str(CFG, 'TVTORRENTS', 'tvtorrents_hash', '')
 
         BTN = bool(check_setting_int(CFG, 'BTN', 'btn', 0))    
-        BTN_USER_ID = check_setting_str(CFG, 'BTN', 'btn_user_id', '')
-        BTN_AUTH_TOKEN = check_setting_str(CFG, 'BTN', 'btn_auth_token', '')    
-        BTN_AUTHKEY = check_setting_str(CFG, 'BTN', 'btn_authkey', '')
-        BTN_PASSKEY = check_setting_str(CFG, 'BTN', 'btn_passkey', '')
+        BTN_API_KEY = check_setting_str(CFG, 'BTN', 'btn_api_key', '')
 
         NZBS = bool(check_setting_int(CFG, 'NZBs', 'nzbs', 0))
         NZBS_UID = check_setting_str(CFG, 'NZBs', 'nzbs_uid', '')
@@ -1060,6 +1059,8 @@ def save_config():
     new_config['General']['move_associated_files'] = int(MOVE_ASSOCIATED_FILES)
     new_config['General']['process_automatically'] = int(PROCESS_AUTOMATICALLY)
     new_config['General']['rename_episodes'] = int(RENAME_EPISODES)
+    new_config['General']['create_missing_show_dirs'] = CREATE_MISSING_SHOW_DIRS
+    new_config['General']['add_shows_wo_dir'] = ADD_SHOWS_WO_DIR
     
     new_config['General']['extra_scripts'] = '|'.join(EXTRA_SCRIPTS)
     new_config['General']['git_path'] = GIT_PATH
@@ -1079,10 +1080,7 @@ def save_config():
 
     new_config['BTN'] = {}
     new_config['BTN']['btn'] = int(BTN)
-    new_config['BTN']['btn_user_id'] = BTN_USER_ID
-    new_config['BTN']['btn_auth_token'] = BTN_AUTH_TOKEN
-    new_config['BTN']['btn_authkey'] = BTN_AUTHKEY
-    new_config['BTN']['btn_passkey'] = BTN_PASSKEY
+    new_config['BTN']['btn_api_key'] = BTN_API_KEY
 
     new_config['NZBs'] = {}
     new_config['NZBs']['nzbs'] = int(NZBS)
diff --git a/sickbeard/common.py b/sickbeard/common.py
index 4cab684f7b334c50ef6a75fe913d5666bf539aa9..a7fab222a64e6633de8ad5b3851e38c438493596 100644
--- a/sickbeard/common.py
+++ b/sickbeard/common.py
@@ -124,7 +124,7 @@ class Quality:
 
         checkName = lambda list, func: func([re.search(x, name, re.I) for x in list])
 
-        if checkName(["(pdtv|hdtv|dsr).(xvid|x264)"], all) and not checkName(["(720|1080)[pi]"], all):
+        if checkName(["(pdtv|hdtv|dsr|tvrip).(xvid|x264)"], all) and not checkName(["(720|1080)[pi]"], all):
             return Quality.SDTV
         elif checkName(["(dvdrip|bdrip)(.ws)?.(xvid|divx|x264)"], any) and not checkName(["(720|1080)[pi]"], all):
             return Quality.SDDVD
diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py
index 62d5046b1c34bd5106c5c2ae1f4b99cf2e8bd14f..18c5016277ae7eee90dd03aa5bdefd0186212a83 100644
--- a/sickbeard/helpers.py
+++ b/sickbeard/helpers.py
@@ -114,8 +114,8 @@ def sanitizeFileName (name):
     name = re.sub(r'[\\/\*]', '-', name)
     name = re.sub(r'[:"<>|?]', '', name)
     
-    # remove leading/trailing periods
-    name = re.sub(r'(^\.+|\.+$)', '', name)
+    # remove leading/trailing periods and spaces
+    name = name.strip(' .')
     
     return name
 
diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py
old mode 100755
new mode 100644
index f8070510281a4df5e85c74f4c397b19328a1031a..db081874719350cfb15f796684ea158fa5a4742f
--- a/sickbeard/postProcessor.py
+++ b/sickbeard/postProcessor.py
@@ -756,6 +756,18 @@ class PostProcessor(object):
             except OSError, IOError:
                 raise exceptions.PostProcessingFailed("Unable to delete the existing files")
         
+        # if the show directory doesn't exist then make it if allowed
+        if not ek.ek(os.path.isdir, ep_obj.show.location) and sickbeard.CREATE_MISSING_SHOW_DIRS:
+            self._log(u"Show directory doesn't exist, creating it", logger.DEBUG)
+            try:
+                ek.ek(os.mkdir, ep_obj.show.location)
+                
+            except OSError, IOError:
+                raise exceptions.PostProcessingFailed("Unable to create the show directory: "+ep_obj.show.location)
+        
+            # get metadata for the show (but not episode because it hasn't been fully processed)
+            ep_obj.show.writeMetadata(True)
+            
         # find the destination folder
         try:
             dest_path = self._find_ep_destination_folder(ep_obj)
diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py
index c332506536c1526b3b19521686e8d07448559674..ea854ac3da06297a3d222592ecb82f0a96ec1203 100644
--- a/sickbeard/providers/btn.py
+++ b/sickbeard/providers/btn.py
@@ -1,72 +1,333 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#  GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>. 
-
-import sickbeard
-import generic
-
-from sickbeard import logger
-from sickbeard import tvcache
-
-class BTNProvider(generic.TorrentProvider):
-
-    def __init__(self):
-
-        generic.TorrentProvider.__init__(self, "BTN")
-        
-        self.supportsBacklog = False
-
-        self.cache = BTNCache(self)
-
-        self.url = 'http://broadcasthe.net/'
-
-    def isEnabled(self):
-        return sickbeard.BTN
-        
-    def imageName(self):
-        return 'btn.gif'
-
-class BTNCache(tvcache.TVCache):
-
-    def __init__(self, provider):
-
-        tvcache.TVCache.__init__(self, provider)
-
-        # only poll BTN every 15 minutes max
-        self.minTime = 15
-
-    def _getRSSData(self):
-        url = 'https://broadcasthe.net/feeds.php?feed=torrents_all&user='+ sickbeard.BTN_USER_ID +'&auth='+ sickbeard.BTN_AUTH_TOKEN +'&passkey='+ sickbeard.BTN_PASSKEY +'&authkey='+ sickbeard.BTN_AUTHKEY
-        logger.log(u"BTN cache update URL: "+ url, logger.DEBUG)
-
-        data = self.provider.getURL(url)
-
-        return data
-
-    def _parseItem(self, item):
-
-        (title, url) = self.provider._get_title_and_url(item)
-
-        if not title or not url:
-            logger.log(u"The XML returned from the BTN RSS feed is incomplete, this result is unusable", logger.ERROR)
-            return
-
-        logger.log(u"Adding item from RSS to cache: "+title, logger.DEBUG)
-
-        self._addCacheEntry(title, url)
-
-provider = BTNProvider()
\ No newline at end of file
+# coding=utf-8
+# Author: Daniƫl Heimans
+# URL: http://code.google.com/p/sickbeard
+#
+# This file is part of Sick Beard.
+# 
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>. 
+
+import sickbeard
+import generic
+
+from sickbeard import scene_exceptions
+from sickbeard import logger
+from sickbeard import tvcache
+from sickbeard.helpers import sanitizeSceneName
+from sickbeard.common import Quality
+from sickbeard.exceptions import ex, AuthException
+
+from lib import jsonrpclib
+import datetime
+import time
+import socket
+import math
+import pprint
+
+class BTNProvider(generic.TorrentProvider):
+    
+    def __init__(self):
+        
+        generic.TorrentProvider.__init__(self, "BTN")
+        
+        self.supportsBacklog = True
+        self.cache = BTNCache(self)
+
+        self.url = "http://broadcasthe.net"
+    
+    def isEnabled(self):
+        return sickbeard.BTN
+    
+    def imageName(self):
+        return 'btn.gif'
+
+    def checkAuthFromData(self, data):
+        result = True
+        if 'api-error' in data:
+            logger.log("Error in sickbeard data retrieval: " + data['api-error'], logger.ERROR)
+            result = False
+
+        return result
+
+    def _doSearch(self, search_params, show=None):
+        params = {}
+        apikey = sickbeard.BTN_API_KEY
+
+        if search_params:
+            params.update(search_params)
+
+        search_results = self._api_call(apikey, params)
+        
+        if not search_results:
+            return []
+
+        if 'torrents' in search_results:
+            found_torrents = search_results['torrents']
+        else:
+            found_torrents = {}
+
+        # We got something, we know the API sends max 1000 results at a time. 
+        # See if there are more than 1000 results for our query, if not we
+        # keep requesting until we've got everything. 
+        # max 150 requests per minute so limit at that
+        max_pages = 150
+        results_per_page = 1000.0
+
+        if 'results' in search_results and search_results['results'] >= results_per_page:
+            pages_needed = int(math.ceil(int(search_results['results']) / results_per_page))
+            if pages_needed > max_pages:
+                pages_needed = max_pages
+            
+            # +1 because range(1,4) = 1, 2, 3
+            for page in range(1,pages_needed+1):
+                search_results = self._api_call(apikey, params, results_per_page, page * results_per_page)
+                # Note that this these are individual requests and might time out individually. This would result in 'gaps'
+                # in the results. There is no way to fix this though.
+                if 'torrents' in search_results:
+                    found_torrents.update(search_results['torrents'])
+
+        results = []
+
+        for torrentid, torrent_info in found_torrents.iteritems():
+            (title, url) = self._get_title_and_url(torrent_info)
+
+            if not title or not url:
+                logger.log(u"The BTN provider did not return both a valid title and URL for search parameters: " + str(params) + " but returned " + str(torrent_info), logger.WARNING)
+            results.append(torrent_info)
+
+#        Disabled this because it overspammed the debug log a bit too much
+#        logger.log(u'BTN provider returning the following results for search parameters: ' + str(params), logger.DEBUG)
+#        for result in results:
+#            (title, result) = self._get_title_and_url(result)
+#            logger.log(title, logger.DEBUG)
+            
+        return results
+
+    def _api_call(self, apikey, params={}, results_per_page=1000, offset=0):
+        server = jsonrpclib.Server('http://api.btnapps.net')
+        
+        search_results ={} 
+        try:
+            search_results = server.getTorrentsSearch(apikey, params, int(results_per_page), int(offset))
+        except jsonrpclib.jsonrpc.ProtocolError, error:
+            logger.log(u"JSON-RPC protocol error while accessing BTN API: " + ex(error), logger.ERROR)
+            search_results = {'api-error': ex(error)}
+            return search_results
+        except socket.timeout:
+            logger.log(u"Timeout while accessing BTN API", logger.WARNING)
+        except socket.error, error:
+            # Note that sometimes timeouts are thrown as socket errors
+            logger.log(u"Socket error while accessing BTN API: " + error[1], logger.ERROR)
+        except Exception, error:
+            errorstring = str(error)
+            if(errorstring.startswith('<') and errorstring.endswith('>')):
+                errorstring = errorstring[1:-1]
+            logger.log(u"Unknown error while accessing BTN API: " + errorstring, logger.ERROR)
+
+        return search_results
+
+    def _get_title_and_url(self, search_result):
+        
+        # The BTN API gives a lot of information in response, 
+        # however Sick Beard is built mostly around Scene or 
+        # release names, which is why we are using them here. 
+        if 'ReleaseName' in search_result and search_result['ReleaseName']:
+            title = search_result['ReleaseName']
+        else:
+            # If we don't have a release name we need to get creative
+            title = u''
+            if 'Series' in search_result:
+                title += search_result['Series'] 
+            if 'GroupName' in search_result:
+                title += '.' + search_result['GroupName'] if title else search_result['GroupName']
+            if 'Resolution' in search_result:
+                title += '.' + search_result['Resolution'] if title else search_result['Resolution']
+            if 'Source' in search_result:
+                title += '.' + search_result['Source'] if title else search_result['Source']
+            if 'Codec' in search_result:
+                title += '.' + search_result['Codec'] if title else search_result['Codec']
+        
+        if 'DownloadURL' in search_result:
+            url = search_result['DownloadURL']
+        else:
+            url = None
+
+        return (title, url)
+
+    def _get_season_search_strings(self, show, season=None):
+        if not show:
+            return [{}]
+
+        search_params = []
+
+        name_exceptions = scene_exceptions.get_scene_exceptions(show.tvdbid) + [show.name]
+        for name in name_exceptions:
+
+            current_params = {}
+
+            if show.tvdbid:
+                current_params['tvdb'] = show.tvdbid
+            elif show.tvrid:
+                current_params['tvrage'] = show.tvrid
+            else:
+                # Search by name if we don't have tvdb or tvrage id
+                current_params['series'] = sanitizeSceneName(name)
+
+            if season != None:
+                whole_season_params = current_params.copy()
+                partial_season_params = current_params.copy()
+                # Search for entire seasons: no need to do special things for air by date shows
+                whole_season_params['category'] = 'Season'
+                whole_season_params['name'] = 'Season ' + str(season)
+
+                search_params.append(whole_season_params)
+
+                # Search for episodes in the season
+                partial_season_params['category'] = 'Episode'
+                
+                if show.air_by_date:
+                    # Search for the year of the air by date show
+                    partial_season_params['name'] = str(season.split('-')[0])
+                else:
+                    # Search for any result which has Sxx in the name
+                    partial_season_params['name'] = 'S%02d' % int(season)
+
+                search_params.append(partial_season_params)
+
+            else:
+                search_params.append(current_params)
+        
+        return search_params
+
+    def _get_episode_search_strings(self, ep_obj):
+        
+        if not ep_obj:
+            return [{}]
+
+        search_params = {'category':'Episode'}
+
+        if ep_obj.show.tvdbid:
+            search_params['tvdb'] = ep_obj.show.tvdbid
+        elif ep_obj.show.tvrid:
+            search_params['tvrage'] = ep_obj.show.rid
+        else:
+            search_params['series'] = sanitizeSceneName(ep_obj.show_name)
+
+        if ep_obj.show.air_by_date:
+            date_str = str(ep_obj.airdate)
+            
+            # BTN uses dots in dates, we just search for the date since that 
+            # combined with the series identifier should result in just one episode
+            search_params['name'] = date_str.replace('-','.')
+
+        else:
+            # Do a general name search for the episode, formatted like SXXEYY
+            search_params['name'] = "S%02dE%02d" % (ep_obj.season,ep_obj.episode)
+
+        to_return = [search_params]
+
+        # only do scene exceptions if we are searching by name
+        if 'series' in search_params:
+            
+            # add new query string for every exception
+            name_exceptions = scene_exceptions.get_scene_exceptions(ep_obj.show.tvdbid)
+            for cur_exception in name_exceptions:
+                
+                # don't add duplicates
+                if cur_exception == ep_obj.show.name:
+                    continue
+
+                # copy all other parameters before setting the show name for this exception
+                cur_return = search_params.copy()
+                cur_return['series'] = sanitizeSceneName(cur_exception)
+                to_return.append(cur_return)
+
+        return to_return
+
+    def getQuality(self, item):
+        quality = None 
+        (title,url) = self._get_title_and_url(item)
+        quality = Quality.nameQuality(title)
+
+        return quality
+
+    def _doGeneralSearch(self, search_string):
+        # 'search' looks as broad is it can find. Can contain episode overview and title for example, 
+        # use with caution!
+        return self._doSearch({'search': search_string})
+
+class BTNCache(tvcache.TVCache):
+    
+    def __init__(self, provider):
+        tvcache.TVCache.__init__(self, provider)
+        
+        # At least 15 minutes between queries
+        self.minTime = 15
+
+    def updateCache(self):
+        if not self.shouldUpdate():
+            return
+        
+        data = self._getRSSData()
+
+        # As long as we got something from the provider we count it as an update 
+        if data:
+            self.setLastUpdate()
+        else:
+            return []
+        
+        logger.log(u"Clearing "+self.provider.name+" cache and updating with new information")
+        self._clearCache()
+
+        if not self._checkAuth(data):
+            raise AuthException("Your authentication info for "+self.provider.name+" is incorrect, check your config")
+
+        # By now we know we've got data and no auth errors, all we need to do is put it in the database
+        for item in data:
+            self._parseItem(item)
+
+    def _getRSSData(self):
+        # Get the torrents uploaded since last check.
+        seconds_since_last_update = math.ceil(time.time() - time.mktime(self._getLastUpdate().timetuple()))
+
+        
+        # default to 15 minutes
+        if seconds_since_last_update < 15*60:
+            seconds_since_last_update = 15*60
+
+        # Set maximum to 24 hours of "RSS" data search, older things will need to be done through backlog
+        if seconds_since_last_update > 24*60*60:
+            logger.log(u"The last known successful \"RSS\" update on the BTN API was more than 24 hours ago (%i hours to be precise), only trying to fetch the last 24 hours!" %(int(seconds_since_last_update)//(60*60)), logger.WARNING)
+            seconds_since_last_update = 24*60*60
+
+        age_string = "<=%i" % seconds_since_last_update  
+        search_params={'age': age_string}
+
+        data = self.provider._doSearch(search_params)
+       
+        return data
+
+    def _parseItem(self, item):
+        (title, url) = self.provider._get_title_and_url(item)
+        
+        if not title or not url:
+            logger.log(u"The result returned from the BTN regular search is incomplete, this result is unusable", logger.ERROR)
+            return
+        logger.log(u"Adding item from regular BTN search to cache: " + title, logger.DEBUG)
+
+        self._addCacheEntry(title, url)
+
+    def _checkAuth(self, data):
+        return self.provider.checkAuthFromData(data)
+
+provider = BTNProvider()
diff --git a/sickbeard/providers/newzbin.py b/sickbeard/providers/newzbin.py
index c93ed52900a6bc203f5505a0d658508fb223875c..34b833fd0052aee60e049aaf80e5f998a1af5033 100644
--- a/sickbeard/providers/newzbin.py
+++ b/sickbeard/providers/newzbin.py
@@ -115,8 +115,8 @@ class NewzbinProvider(generic.NZBProvider):
 
     def _is_SDTV(self, attrs):
 
-        # Video Fmt: (XviD, DivX, x264 or H.264), NOT 720p, NOT 1080p, NOT 1080i
-        video_fmt = 'Video Fmt' in attrs and ('XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'x264' in attrs['Video Fmt'] or 'H.264' in attrs['Video Fmt']) \
+        # Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i
+        video_fmt = 'Video Fmt' in attrs and ('XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \
                             and ('720p' not in attrs['Video Fmt']) \
                             and ('1080p' not in attrs['Video Fmt']) \
                             and ('1080i' not in attrs['Video Fmt'])
@@ -131,8 +131,8 @@ class NewzbinProvider(generic.NZBProvider):
 
     def _is_SDDVD(self, attrs):
 
-        # Video Fmt: (XviD, DivX, x264 or H.264), NOT 720p, NOT 1080p, NOT 1080i
-        video_fmt = 'Video Fmt' in attrs and ('XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'x264' in attrs['Video Fmt'] or 'H.264' in attrs['Video Fmt']) \
+        # Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i
+        video_fmt = 'Video Fmt' in attrs and ('XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \
                             and ('720p' not in attrs['Video Fmt']) \
                             and ('1080p' not in attrs['Video Fmt']) \
                             and ('1080i' not in attrs['Video Fmt'])
@@ -146,8 +146,8 @@ class NewzbinProvider(generic.NZBProvider):
         return video_fmt and source and subs
 
     def _is_HDTV(self, attrs):
-        # Video Fmt: x264, 720p
-        video_fmt = 'Video Fmt' in attrs and ('x264' in attrs['Video Fmt']) \
+        # Video Fmt: H.264/x264, 720p
+        video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
                             and ('720p' in attrs['Video Fmt'])
 
         # Source: TV Cap or HDTV or (None)
@@ -160,12 +160,12 @@ class NewzbinProvider(generic.NZBProvider):
 
     def _is_WEBDL(self, attrs):
 
-        # Video Fmt: x264, H.264, 720p
-        video_fmt = 'Video Fmt' in attrs and ('x264' in attrs['Video Fmt'] or 'H.264' in attrs['Video Fmt']) \
+        # Video Fmt: H.264/x264, 720p
+        video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
                             and ('720p' in attrs['Video Fmt'])
 
-        # Source: Web-DL
-        source = 'Source' in attrs and 'Web-dl' in attrs['Source']
+        # Source: WEB-DL
+        source = 'Source' in attrs and 'WEB-DL' in attrs['Source']
 
         # Subtitles: (None)
         subs = 'Subtitles' not in attrs
@@ -174,8 +174,8 @@ class NewzbinProvider(generic.NZBProvider):
 
     def _is_720pBluRay(self, attrs):
 
-        # Video Fmt: x264, 720p
-        video_fmt = 'Video Fmt' in attrs and ('x264' in attrs['Video Fmt']) \
+        # Video Fmt: H.264/x264, 720p
+        video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
                             and ('720p' in attrs['Video Fmt'])
 
         # Source: Blu-ray or HD-DVD
@@ -185,8 +185,8 @@ class NewzbinProvider(generic.NZBProvider):
 
     def _is_1080pBluRay(self, attrs):
 
-        # Video Fmt: x264, 1080p
-        video_fmt = 'Video Fmt' in attrs and ('x264' in attrs['Video Fmt']) \
+        # Video Fmt: H.264/x264, 1080p
+        video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
                             and ('1080p' in attrs['Video Fmt'])
 
         # Source: Blu-ray or HD-DVD
diff --git a/sickbeard/search.py b/sickbeard/search.py
index 52b1e12352a27ab00baf5dac25b8cf2c44f101f5..6f50ec39c44237b8fdede09bfba6bb4b9392a251 100644
--- a/sickbeard/search.py
+++ b/sickbeard/search.py
@@ -33,9 +33,11 @@ from sickbeard import notifiers
 from sickbeard import nzbSplitter
 from sickbeard import ui
 from sickbeard import encodingKludge as ek
-from sickbeard.exceptions import ex
 from sickbeard import providers
 
+from sickbeard.exceptions import ex
+from sickbeard.providers.generic import GenericProvider
+
 def _downloadResult(result):
     """
     Downloads a result to the appropriate black hole folder.
@@ -394,25 +396,41 @@ def findSeason(show, season):
             logger.log(u"No eps from this season are wanted at this quality, ignoring the result of "+bestSeasonNZB.name, logger.DEBUG)
 
         else:
-
-            logger.log(u"Breaking apart the NZB and adding the individual ones to our results", logger.DEBUG)
-
-            # if not, break it apart and add them as the lowest priority results
-            individualResults = nzbSplitter.splitResult(bestSeasonNZB)
-
-            individualResults = filter(lambda x:  show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, show), individualResults)
-
-            for curResult in individualResults:
-                if len(curResult.episodes) == 1:
-                    epNum = curResult.episodes[0].episode
-                elif len(curResult.episodes) > 1:
-                    epNum = MULTI_EP_RESULT
-
+            
+            if bestSeasonNZB.provider.providerType == GenericProvider.NZB:
+                logger.log(u"Breaking apart the NZB and adding the individual ones to our results", logger.DEBUG)
+                
+                # if not, break it apart and add them as the lowest priority results
+                individualResults = nzbSplitter.splitResult(bestSeasonNZB)
+
+                individualResults = filter(lambda x:  show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, show), individualResults)
+
+                for curResult in individualResults:
+                    if len(curResult.episodes) == 1:
+                        epNum = curResult.episodes[0].episode
+                    elif len(curResult.episodes) > 1:
+                        epNum = MULTI_EP_RESULT
+
+                    if epNum in foundResults:
+                        foundResults[epNum].append(curResult)
+                    else:
+                        foundResults[epNum] = [curResult]
+
+            # If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client
+            else:
+                
+                # Season result from BTN must be a full-season torrent, creating multi-ep result for it.
+                logger.log(u"Adding multi-ep result for full-season torrent. Set the episodes you don't want to 'don't download' in your torrent client if desired!")
+                epObjs = []
+                for curEpNum in allEps:
+                    epObjs.append(show.getEpisode(season, curEpNum))
+                bestSeasonNZB.episodes = epObjs
+
+                epNum = MULTI_EP_RESULT
                 if epNum in foundResults:
-                    foundResults[epNum].append(curResult)
+                    foundResults[epNum].append(bestSeasonNZB)
                 else:
-                    foundResults[epNum] = [curResult]
-
+                    foundResults[epNum] = [bestSeasonNZB]
 
     # go through multi-ep results and see if we really want them or not, get rid of the rest
     multiResults = {}
@@ -455,7 +473,7 @@ def findSeason(show, season):
 
             logger.log(u"Multi-ep check result is multiNeededEps: "+str(multiNeededEps)+", multiNotNeededEps: "+str(multiNotNeededEps), logger.DEBUG)
 
-            if not neededEps:
+            if not multiNeededEps:
                 logger.log(u"All of these episodes were covered by another multi-episode nzbs, ignoring this multi-ep result", logger.DEBUG)
                 continue
 
diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py
index 3d43a3d3bf810fdf37d81027476981714257cab8..81ef61a9b87ef0e6238e02c061a6c1221e67e9a8 100644
--- a/sickbeard/search_queue.py
+++ b/sickbeard/search_queue.py
@@ -227,7 +227,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
                 highestBestQuality = 0
 
             # if we need a better one then say yes
-            if (curStatus in (common.DOWNLOADED, common.SNATCHED) and curQuality < highestBestQuality) or curStatus == common.WANTED:
+            if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER) and curQuality < highestBestQuality) or curStatus == common.WANTED:
                 wantSeason = True
                 break
 
diff --git a/sickbeard/tv.py b/sickbeard/tv.py
index bcf2b45e1c272c37493978c6fbb182a3c0f5d30f..af0875390a33f01edebadf33618063cc86d84c49 100644
--- a/sickbeard/tv.py
+++ b/sickbeard/tv.py
@@ -82,6 +82,10 @@ class TVShow(object):
         self.saveToDB()
 
     def _getLocation(self):
+        # no dir check needed if missing show dirs are created during post-processing
+        if sickbeard.CREATE_MISSING_SHOW_DIRS:
+            return self._location
+        
         if ek.ek(os.path.isdir, self._location):
             return self._location
         else:
@@ -94,7 +98,8 @@ class TVShow(object):
 
     def _setLocation(self, newLocation):
         logger.log(u"Setter sets location to " + newLocation, logger.DEBUG)
-        if ek.ek(os.path.isdir, newLocation):
+        # Don't validate dir if user wants to add shows without creating a dir
+        if sickbeard.ADD_SHOWS_WO_DIR or ek.ek(os.path.isdir, newLocation):
             self._location = newLocation
             self._isDirGood = True
         else:
@@ -150,7 +155,7 @@ class TVShow(object):
 
         return result
 
-    def writeMetadata(self):
+    def writeMetadata(self, show_only=False):
 
         if not ek.ek(os.path.isdir, self._location):
             logger.log(str(self.tvdbid) + u": Show dir doesn't exist, skipping NFO generation")
@@ -159,8 +164,9 @@ class TVShow(object):
         self.getImages()
 
         self.writeShowNFO()
-        self.writeEpisodeNFOs()
-
+        
+        if not show_only:
+            self.writeEpisodeNFOs()
 
     def writeEpisodeNFOs (self):
 
@@ -688,8 +694,8 @@ class TVShow(object):
 
     def refreshDir(self):
 
-        # make sure the show dir is where we think it is
-        if not ek.ek(os.path.isdir, self._location):
+        # make sure the show dir is where we think it is unless dirs are created on the fly
+        if not ek.ek(os.path.isdir, self._location) and not sickbeard.CREATE_MISSING_SHOW_DIRS:
             return False
 
         # load from dir
@@ -739,7 +745,7 @@ class TVShow(object):
         logger.log(str(self.tvdbid) + ": Loading all episodes with a location from the database")
 
         myDB = db.DBConnection()
-        sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.tvdbid])
+        sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != '' ORDER BY episode ASC", [self.tvdbid])
 
         # build list of locations
         fileLocations = {}
@@ -886,7 +892,7 @@ class TVShow(object):
         curStatus, curQuality = Quality.splitCompositeStatus(epStatus)
 
         # if we are re-downloading then we only want it if it's in our bestQualities list and better than what we have
-        if curStatus in Quality.SNATCHED + Quality.DOWNLOADED and quality in bestQualities and quality > curQuality:
+        if curStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER and quality in bestQualities and quality > curQuality:
             logger.log(u"We already have this ep but the new one is better quality, saying yes", logger.DEBUG)
             return True
 
@@ -1142,7 +1148,8 @@ class TVEpisode(object):
         #early conversion to int so that episode doesn't get marked dirty
         self.tvdbid = int(myEp["id"])
         
-        if not ek.ek(os.path.isdir, self.show._location):
+        #don't update show status if show dir is missing, unless missing show dirs are created during post-processing
+        if not ek.ek(os.path.isdir, self.show._location) and not sickbeard.CREATE_MISSING_SHOW_DIRS:
             logger.log(u"The show dir is missing, not bothering to change the episode statuses since it'd probably be invalid")
             return
 
diff --git a/sickbeard/versionChecker.py b/sickbeard/versionChecker.py
index 6cfdbe537311dde64ad9eafaca601e272fdc7165..f9edff143763729aabdb3e2794dd3c4ac79c531b 100644
--- a/sickbeard/versionChecker.py
+++ b/sickbeard/versionChecker.py
@@ -330,7 +330,9 @@ class GitUpdateManager(UpdateManager):
             message = "or else you're ahead of master"
 
         elif self._num_commits_behind > 0:
-            message = "you're "+str(self._num_commits_behind)+' commits behind'
+            message = "you're %d commit" % self._num_commits_behind
+            if self._num_commits_behind > 1: message += 's'
+            message += ' behind'
 
         else:
             return
diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py
index 6fda8f10393329b0267a36d157655aaa6104b25e..77b0f8bcc7bc6b43a8f4896853f9a5c21080ad1b 100644
--- a/sickbeard/webapi.py
+++ b/sickbeard/webapi.py
@@ -104,6 +104,8 @@ class Api:
         else:# if debug was not set we wrap the "call_dispatcher" in a try block to assure a json output
             try:
                 outDict = _call_dispatcher(args, kwargs)
+            except cherrypy.HTTPRedirect: # seams like cherrypy uses exceptions for redirecting apparently this can happen when requesting images but it is ok so lets re raise it
+                raise
             except Exception, e: # real internal error oohhh nooo :(
                 logger.log(u"API :: " + ex(e), logger.ERROR)
                 errorData = {"error_msg": ex(e),
@@ -158,9 +160,13 @@ class Api:
     def _out_as_json(self, dict):
         """ set cherrypy response to json """
         response = cherrypy.response
+        request = cherrypy.request
         response.headers['Content-Type'] = 'application/json;charset=UTF-8'
         try:
             out = json.dumps(dict, indent=self.intent, sort_keys=True)
+            callback = request.params.get('callback') or request.params.get('jsonp')
+            if callback != None:
+                out = callback + '(' + out + ');' # wrap with JSONP call if requested
         except Exception, e: # if we fail to generate the output fake a error
             logger.log(u"API :: " + traceback.format_exc(), logger.DEBUG)
             out = '{"result":"' + result_type_map[RESULT_ERROR] + '", "message": "error while composing output: "' + ex(e) + '"}'
@@ -1793,12 +1799,17 @@ class CMD_ShowAddNew(ApiCall):
 
         # moved the logic check to the end in an attempt to eliminate empty directory being created from previous errors
         showPath = ek.ek(os.path.join, self.location, helpers.sanitizeFileName(tvdbName))
-        dir_exists = helpers.makeDir(showPath)
-        if not dir_exists:
-            logger.log(u"API :: Unable to create the folder " + showPath + ", can't add the show", logger.ERROR)
-            return _responds(RESULT_FAILURE, {"path": showPath}, "Unable to create the folder " + showPath + ", can't add the show")
+
+        # don't create show dir if config says not to
+        if sickbeard.ADD_SHOWS_WO_DIR:
+            logger.log(u"Skipping initial creation of " + showPath + " due to config.ini setting")
         else:
-            helpers.chmodAsParent(showPath)
+            dir_exists = helpers.makeDir(showPath)
+            if not dir_exists:
+                logger.log(u"API :: Unable to create the folder " + showPath + ", can't add the show", logger.ERROR)
+                return _responds(RESULT_FAILURE, {"path": showPath}, "Unable to create the folder " + showPath + ", can't add the show")
+            else:
+                helpers.chmodAsParent(showPath)
 
         sickbeard.showQueueScheduler.action.addShow(int(self.tvdbid), showPath, newStatus, newQuality, int(self.season_folder), self.lang) #@UndefinedVariable
         return _responds(RESULT_SUCCESS, {"name": tvdbName}, tvdbName + " has been queued to be added")
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
old mode 100755
new mode 100644
index 6d39b1e81008274341c555663fd8f218225bcaf0..6ee8ae88ddff5c87276d47a366f932bb6889dfe5
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -1092,7 +1092,7 @@ class ConfigProviders:
     def saveProviders(self, nzbmatrix_username=None, nzbmatrix_apikey=None,
                       nzbs_r_us_uid=None, nzbs_r_us_hash=None, newznab_string=None,
                       tvtorrents_digest=None, tvtorrents_hash=None,
- 					  btn_user_id=None, btn_auth_token=None, btn_passkey=None, btn_authkey=None,
+ 					  btn_api_key=None,
                       newzbin_username=None, newzbin_password=None,
                       provider_order=None):
 
@@ -1165,10 +1165,7 @@ class ConfigProviders:
         sickbeard.TVTORRENTS_DIGEST = tvtorrents_digest.strip()
         sickbeard.TVTORRENTS_HASH = tvtorrents_hash.strip()
 
-        sickbeard.BTN_USER_ID = btn_user_id.strip()
-        sickbeard.BTN_AUTH_TOKEN = btn_auth_token.strip()
-        sickbeard.BTN_PASSKEY = btn_passkey.strip()
-        sickbeard.BTN_AUTHKEY = btn_authkey.strip()
+        sickbeard.BTN_API_KEY = btn_api_key.strip()
 
         sickbeard.NZBSRUS_UID = nzbs_r_us_uid.strip()
         sickbeard.NZBSRUS_HASH = nzbs_r_us_hash.strip()
@@ -1787,16 +1784,19 @@ class NewHomeAddShows:
         # blanket policy - if the dir exists you should have used "add existing show" numbnuts
         if ek.ek(os.path.isdir, show_dir) and not fullShowPath:
             ui.notifications.error("Unable to add show", "Folder "+show_dir+" exists already")
-            redirect('/home')
+            redirect('/home/addShows/existingShows')
         
-        # create the dir and make sure it worked
-        dir_exists = helpers.makeDir(show_dir)
-        if not dir_exists:
-            logger.log(u"Unable to create the folder "+show_dir+", can't add the show", logger.ERROR)
-            ui.notifications.error("Unable to add show", "Unable to create the folder "+show_dir+", can't add the show")
-            redirect("/home")
-        else:
-            helpers.chmodAsParent(show_dir)
+        # don't create show dir if config says not to
+        if sickbeard.ADD_SHOWS_WO_DIR:
+            logger.log(u"Skipping initial creation of "+show_dir+" due to config.ini setting")
+        else:
+            dir_exists = helpers.makeDir(show_dir)
+            if not dir_exists:
+                logger.log(u"Unable to create the folder "+show_dir+", can't add the show", logger.ERROR)
+                ui.notifications.error("Unable to add show", "Unable to create the folder "+show_dir+", can't add the show")
+                redirect("/home")
+            else:
+                helpers.chmodAsParent(show_dir)
 
         # prepare the inputs for passing along
         if seasonFolders == "on":
@@ -2234,7 +2234,7 @@ class Home:
         )
 
         sqlResults = myDB.select(
-            "SELECT * FROM tv_episodes WHERE showid = ? ORDER BY season*1000+episode DESC",
+            "SELECT * FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC",
             [showObj.tvdbid]
         )