diff --git a/.build/bower.json b/.build/bower.json
index 940c5f26fcfb19964f218d1765f772b598dd4261..bb396f088855eba598330d50a98c2d81cb4b0e51 100644
--- a/.build/bower.json
+++ b/.build/bower.json
@@ -18,6 +18,7 @@
     "jquery-form": "~3.46.0",
     "jquery-timeago": "~1.4.3",
     "jquery-tokeninput": "~1.7.0",
-    "bootstrap3-typeahead": "~3.1.1"
+    "bootstrap3-typeahead": "~3.1.1",
+    "underscore": "~1.8.3"
   }
 }
diff --git a/gui/slick/css/dark.css b/gui/slick/css/dark.css
index 4bf84ccd07e1438435010351179cc1c4ce13a69a..fe24eafcb4f4ab6bee444f3f6736bdee63af8fc8 100644
--- a/gui/slick/css/dark.css
+++ b/gui/slick/css/dark.css
@@ -490,7 +490,7 @@ td.col-checkbox {
 }
 
 /* =======================================================================
-comingEpisodes.mako
+schedule.mako
 ========================================================================== */
 
 h2.day, h2.network {
diff --git a/gui/slick/css/light.css b/gui/slick/css/light.css
index 9422ab4a7fac6c647c0dce49dd837a71244281d0..1fbaed69c00b76364e82ce543dfd0dc5b312dca9 100644
--- a/gui/slick/css/light.css
+++ b/gui/slick/css/light.css
@@ -73,7 +73,7 @@ tr.seasonheader {
 }
 
 /* =======================================================================
-comingEpisodes.mako
+schedule.mako
 ========================================================================== */
 
 table.cal-odd {
diff --git a/gui/slick/css/style.css b/gui/slick/css/style.css
index 6432b901a76acc77cf16ebec024415f45fe6246a..7f7bef1f1e5663a06e39ed3e769e61ab41102ac3 100644
--- a/gui/slick/css/style.css
+++ b/gui/slick/css/style.css
@@ -1342,7 +1342,7 @@ td.col-search {
 }
 
 /* =======================================================================
-comingEpisodes.mako
+schedule.mako
 ========================================================================== */
 
 .key {
@@ -1907,6 +1907,7 @@ span.quality {
     -webkit-border-radius: 4px;
     -moz-border-radius: 4px;
     border-radius: 4px;
+    white-space: nowrap;
 }
 
 span.any-hd {
@@ -2143,9 +2144,6 @@ input, textarea, select, .uneditable-input {
 }
 
 .navbar-default .navbar-nav > li.navbar-split > a:first-child {
-    border-width: 0 1px 0 0;
-    border-style: solid;
-    border-image: linear-gradient(to bottom, rgba(0, 0, 0, 0) 20%, #ddd, rgba(0, 0, 0, 0) 80%) 1 100%;
     padding-right: 8px;
 }
 
diff --git a/gui/slick/images/network/13th street.png b/gui/slick/images/network/13th street.png
deleted file mode 100644
index 300b9cca442369b59f7b7b7f804cd22fed89ec0b..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/13th street.png and /dev/null differ
diff --git a/gui/slick/images/network/1live.png b/gui/slick/images/network/1live.png
deleted file mode 100644
index bc743facc70a9b0661c2368013cfb99e42b94651..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/1live.png and /dev/null differ
diff --git a/gui/slick/images/network/2be.png b/gui/slick/images/network/2be.png
deleted file mode 100644
index aad036a3413632220cd23d460596ddcce6e2246f..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/2be.png and /dev/null differ
diff --git a/gui/slick/images/network/33.png b/gui/slick/images/network/33.png
deleted file mode 100644
index d205e754e0e2b89f0a43dffeda6887690ca5c864..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/33.png and /dev/null differ
diff --git a/gui/slick/images/network/3fm.png b/gui/slick/images/network/3fm.png
deleted file mode 100644
index fc6a9c7c099be470a49d721061f0e3028c6f6641..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/3fm.png and /dev/null differ
diff --git a/gui/slick/images/network/3sat hd.png b/gui/slick/images/network/3sat hd.png
deleted file mode 100644
index 026fbf8f0774529ef4d0a2f29f1d5614c050397d..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/3sat hd.png and /dev/null differ
diff --git a/gui/slick/images/network/4kidstv.png b/gui/slick/images/network/4kidstv.png
deleted file mode 100644
index 631c23e050af93b11f52e9145324f6deb705a73e..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/4kidstv.png and /dev/null differ
diff --git a/gui/slick/images/network/4sd.png b/gui/slick/images/network/4sd.png
deleted file mode 100644
index bd920c58d5c2e2e66639ab9556f687daff2205a6..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/4sd.png and /dev/null differ
diff --git a/gui/slick/images/network/7mate.png b/gui/slick/images/network/7mate.png
deleted file mode 100644
index c53a31a1289619121ecec44683b3097cb3563040..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/7mate.png and /dev/null differ
diff --git a/gui/slick/images/network/abc (australia).png b/gui/slick/images/network/abc (australia).png
deleted file mode 100644
index ed50dcaf717c1c2b9b4ed87678d499e9d37edfeb..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/abc (australia).png and /dev/null differ
diff --git a/gui/slick/images/network/abc news.png b/gui/slick/images/network/abc (ph).png
similarity index 100%
rename from gui/slick/images/network/abc news.png
rename to gui/slick/images/network/abc (ph).png
diff --git a/gui/slick/images/network/abc australia.png b/gui/slick/images/network/abc australia.png
deleted file mode 100644
index ed50dcaf717c1c2b9b4ed87678d499e9d37edfeb..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/abc australia.png and /dev/null differ
diff --git a/gui/slick/images/network/abc.png b/gui/slick/images/network/abc news 24.png
similarity index 100%
rename from gui/slick/images/network/abc.png
rename to gui/slick/images/network/abc news 24.png
diff --git a/gui/slick/images/network/abc tv australia.png b/gui/slick/images/network/abc tv australia.png
deleted file mode 100644
index ed50dcaf717c1c2b9b4ed87678d499e9d37edfeb..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/abc tv australia.png and /dev/null differ
diff --git a/gui/slick/images/network/abc tv.png b/gui/slick/images/network/abc tv.png
deleted file mode 100644
index 70a1281abb8b0cff9b27410cbccaf0e5dbb6c02a..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/abc tv.png and /dev/null differ
diff --git a/gui/slick/images/network/adult-swim.png b/gui/slick/images/network/adult-swim.png
deleted file mode 100644
index 1ae908396b74f0d2014a2ff399ec7b754b2be0d3..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/adult-swim.png and /dev/null differ
diff --git a/gui/slick/images/network/adultswim.png b/gui/slick/images/network/adultswim.png
deleted file mode 100644
index 1ae908396b74f0d2014a2ff399ec7b754b2be0d3..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/adultswim.png and /dev/null differ
diff --git a/gui/slick/images/network/allocine.png b/gui/slick/images/network/allocine.png
deleted file mode 100644
index f03bfe134a41405a3c8b050c7ec66ef5621a1d0f..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/allocine.png and /dev/null differ
diff --git a/gui/slick/images/network/amazon prime instant video.png b/gui/slick/images/network/amazon prime instant video.png
deleted file mode 100644
index ffa4f382943802f311c6ee6f5f077289eee953a5..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/amazon prime instant video.png and /dev/null differ
diff --git a/gui/slick/images/network/america one.png b/gui/slick/images/network/america one.png
deleted file mode 100644
index c317e2cf96cab0f5e80cac7769b9d96a6d40afd2..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/america one.png and /dev/null differ
diff --git a/gui/slick/images/network/american heroes channel.png b/gui/slick/images/network/american heroes channel.png
deleted file mode 100644
index 9c95a0685f5f3c1564f4ece6a512d35c73824372..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/american heroes channel.png and /dev/null differ
diff --git a/gui/slick/images/network/animalplanet.png b/gui/slick/images/network/animalplanet.png
deleted file mode 100644
index 22cf4f3558dd60cdcd0f1b3562e8ba6959873780..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/animalplanet.png and /dev/null differ
diff --git a/gui/slick/images/network/anixe hd.png b/gui/slick/images/network/anixe hd.png
deleted file mode 100644
index 0eac84eaf14837f4cfa8b48096d66ef6473f820e..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/anixe hd.png and /dev/null differ
diff --git a/gui/slick/images/network/anixe sd.png b/gui/slick/images/network/anixe sd.png
deleted file mode 100644
index 5f3b0e2d39db56e73c263de998c041d21731a0c1..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/anixe sd.png and /dev/null differ
diff --git a/gui/slick/images/network/aol.png b/gui/slick/images/network/aol.png
new file mode 100644
index 0000000000000000000000000000000000000000..df5709bc1b5a15fdbc9d97be9063524df64b7f4f
Binary files /dev/null and b/gui/slick/images/network/aol.png differ
diff --git a/gui/slick/images/network/ard.png b/gui/slick/images/network/ard.png
deleted file mode 100644
index 6b39a71435a8e6542778ec54ec3d8562e4d3ccb9..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/ard.png and /dev/null differ
diff --git a/gui/slick/images/network/arte hd.png b/gui/slick/images/network/arte hd.png
deleted file mode 100644
index 07115055e9cc42fcf11b39b2c5bd54212c2dd711..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/arte hd.png and /dev/null differ
diff --git a/gui/slick/images/network/at-x.png b/gui/slick/images/network/at-x.png
deleted file mode 100644
index ea2729882c475d0a62d86770c05babb70ebfddfc..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/at-x.png and /dev/null differ
diff --git a/gui/slick/images/network/azteca.png b/gui/slick/images/network/azteca.png
deleted file mode 100644
index 03f74b2d6fc609eb7ac4478f43fb7d9dc99bd6ca..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/azteca.png and /dev/null differ
diff --git a/gui/slick/images/network/bandai.png b/gui/slick/images/network/bandai channel.png
similarity index 100%
rename from gui/slick/images/network/bandai.png
rename to gui/slick/images/network/bandai channel.png
diff --git a/gui/slick/images/network/bandai visual.png b/gui/slick/images/network/bandai visual.png
deleted file mode 100644
index 55076320cd9d159212c23815b4f5e52674bd7df8..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/bandai visual.png and /dev/null differ
diff --git a/gui/slick/images/network/bbc entertainment.png b/gui/slick/images/network/bbc entertainment.png
deleted file mode 100644
index 28b28c584a984f7e9d31da96671f5b979ba91c70..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/bbc entertainment.png and /dev/null differ
diff --git a/gui/slick/images/network/bbc.png b/gui/slick/images/network/bbc.png
deleted file mode 100644
index 28b28c584a984f7e9d31da96671f5b979ba91c70..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/bbc.png and /dev/null differ
diff --git a/gui/slick/images/network/bio..png b/gui/slick/images/network/bio..png
deleted file mode 100644
index 23072d4feff4d7d528930a449fb8f0ccc0d04fad..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/bio..png and /dev/null differ
diff --git a/gui/slick/images/network/bio.png b/gui/slick/images/network/bio.png
deleted file mode 100644
index 23072d4feff4d7d528930a449fb8f0ccc0d04fad..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/bio.png and /dev/null differ
diff --git a/gui/slick/images/network/biography channel.png b/gui/slick/images/network/biography channel.png
deleted file mode 100644
index 23072d4feff4d7d528930a449fb8f0ccc0d04fad..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/biography channel.png and /dev/null differ
diff --git a/gui/slick/images/network/blip.png b/gui/slick/images/network/blip.png
new file mode 100644
index 0000000000000000000000000000000000000000..d1007ee21430965f9e5cb63e969ef627aa6cafed
Binary files /dev/null and b/gui/slick/images/network/blip.png differ
diff --git a/gui/slick/images/network/bloomberg.png b/gui/slick/images/network/bloomberg.png
deleted file mode 100644
index b91248a93b402f64b76318d9c7d0481089b975cb..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/bloomberg.png and /dev/null differ
diff --git a/gui/slick/images/network/bnn.png b/gui/slick/images/network/bnn (nl).png
similarity index 100%
rename from gui/slick/images/network/bnn.png
rename to gui/slick/images/network/bnn (nl).png
diff --git a/gui/slick/images/network/br alpha.png b/gui/slick/images/network/br alpha.png
deleted file mode 100644
index 2f7f78a469ebfd13b3130d894cbc3fb3c0b41b95..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/br alpha.png and /dev/null differ
diff --git a/gui/slick/images/network/bravo.png b/gui/slick/images/network/bravo (ca).png
similarity index 100%
rename from gui/slick/images/network/bravo.png
rename to gui/slick/images/network/bravo (ca).png
diff --git a/gui/slick/images/network/kabel eins classics.png b/gui/slick/images/network/bravo (uk).png
similarity index 66%
rename from gui/slick/images/network/kabel eins classics.png
rename to gui/slick/images/network/bravo (uk).png
index 6e366a2b7dec793ebe7a5ab39c1f90b9cf92ee60..41329084916114564fcac8d16c82d27c295108e9 100644
Binary files a/gui/slick/images/network/kabel eins classics.png and b/gui/slick/images/network/bravo (uk).png differ
diff --git a/gui/slick/images/network/c-span.png b/gui/slick/images/network/c-span.png
new file mode 100644
index 0000000000000000000000000000000000000000..d863b84031c19705e5d2274064fce6de355ef08f
Binary files /dev/null and b/gui/slick/images/network/c-span.png differ
diff --git a/gui/slick/images/network/canvas.png b/gui/slick/images/network/canvas/ketnet.png
similarity index 100%
rename from gui/slick/images/network/canvas.png
rename to gui/slick/images/network/canvas/ketnet.png
diff --git a/gui/slick/images/network/cartoonnetwork.png b/gui/slick/images/network/cartoon network australia.png
similarity index 100%
rename from gui/slick/images/network/cartoonnetwork.png
rename to gui/slick/images/network/cartoon network australia.png
diff --git a/gui/slick/images/network/cbc.png b/gui/slick/images/network/cbc (jp).png
similarity index 100%
rename from gui/slick/images/network/cbc.png
rename to gui/slick/images/network/cbc (jp).png
diff --git a/gui/slick/images/network/centric.png b/gui/slick/images/network/centric.png
new file mode 100644
index 0000000000000000000000000000000000000000..b433689be2a28f4a95d2c9d2c78e4547c786af4a
Binary files /dev/null and b/gui/slick/images/network/centric.png differ
diff --git a/gui/slick/images/network/channel 101.jpg b/gui/slick/images/network/channel 101.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..500e9d2764aa6a8e2653cfa7a60392a5272a8e4b
Binary files /dev/null and b/gui/slick/images/network/channel 101.jpg differ
diff --git a/gui/slick/images/network/channel ten.png b/gui/slick/images/network/channel ten.png
deleted file mode 100644
index 382e76f04b3bff04518db2730e1258f3bf634ca9..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/channel ten.png and /dev/null differ
diff --git a/gui/slick/images/network/chiba tv.png b/gui/slick/images/network/chiba tv.png
deleted file mode 100644
index f98e3c03a185b51530f653393e980e16fd21e328..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/chiba tv.png and /dev/null differ
diff --git a/gui/slick/images/network/classic arts showcase.png b/gui/slick/images/network/classic arts showcase.png
new file mode 100644
index 0000000000000000000000000000000000000000..76fc344a3547015091871711dab0663030823f9d
Binary files /dev/null and b/gui/slick/images/network/classic arts showcase.png differ
diff --git a/gui/slick/images/network/club rtl.png b/gui/slick/images/network/club rtl.png
new file mode 100644
index 0000000000000000000000000000000000000000..806255be93d5264c1d077f2377b6ca6781dfc244
Binary files /dev/null and b/gui/slick/images/network/club rtl.png differ
diff --git a/gui/slick/images/network/cnni.png b/gui/slick/images/network/cnni.png
new file mode 100644
index 0000000000000000000000000000000000000000..4bea5617ee02d07beb5fdb73f8421fbc6ed2ad96
Binary files /dev/null and b/gui/slick/images/network/cnni.png differ
diff --git a/gui/slick/images/network/comedy central family.png b/gui/slick/images/network/comedy central (uk).png
similarity index 100%
rename from gui/slick/images/network/comedy central family.png
rename to gui/slick/images/network/comedy central (uk).png
diff --git a/gui/slick/images/network/comedy central hd.png b/gui/slick/images/network/comedy central (us).png
similarity index 100%
rename from gui/slick/images/network/comedy central hd.png
rename to gui/slick/images/network/comedy central (us).png
diff --git a/gui/slick/images/network/comedycentral.png b/gui/slick/images/network/comedycentral.png
deleted file mode 100644
index a3903ebce6312676106f56f1403b04fcd1bcf3b8..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/comedycentral.png and /dev/null differ
diff --git a/gui/slick/images/network/cottage life.png b/gui/slick/images/network/cottage life.png
deleted file mode 100644
index ffe793dde59b9a937e3bd89b6a60fd3354c4721d..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/cottage life.png and /dev/null differ
diff --git a/gui/slick/images/network/crime and investigation network.png b/gui/slick/images/network/crime and investigation network.png
deleted file mode 100644
index b7394a592caaf2b62311f491d6c55af8a6bd410c..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/crime and investigation network.png and /dev/null differ
diff --git a/gui/slick/images/network/cw.png b/gui/slick/images/network/cw seed.png
similarity index 100%
rename from gui/slick/images/network/cw.png
rename to gui/slick/images/network/cw seed.png
diff --git a/gui/slick/images/network/dailymotion.jpg b/gui/slick/images/network/dailymotion.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..0475f764da8809e535d328f42fb2b32819d5a76a
Binary files /dev/null and b/gui/slick/images/network/dailymotion.jpg differ
diff --git a/gui/slick/images/network/das erste hd.png b/gui/slick/images/network/das erste hd.png
deleted file mode 100644
index a55c2e822269b6c1d0bb394902e53e58c4ac2f38..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/das erste hd.png and /dev/null differ
diff --git a/gui/slick/images/network/das vierte.png b/gui/slick/images/network/das vierte.png
deleted file mode 100644
index 1c007fef7bcfb1ff3a0f6bc3b1cca3ed8b9b8bb5..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/das vierte.png and /dev/null differ
diff --git a/gui/slick/images/network/deluxe music.png b/gui/slick/images/network/deluxe music.png
deleted file mode 100644
index def9e4a5aa1de81371ad9f7f39c15da6d3817994..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/deluxe music.png and /dev/null differ
diff --git a/gui/slick/images/network/discoverychannel.png b/gui/slick/images/network/discovery channel (asia).png
similarity index 100%
rename from gui/slick/images/network/discoverychannel.png
rename to gui/slick/images/network/discovery channel (asia).png
diff --git a/gui/slick/images/network/discovery channel (australia).png b/gui/slick/images/network/discovery channel (australia).png
new file mode 100644
index 0000000000000000000000000000000000000000..dc9b22d8fb84c08a438b29ed319e4bde16b93a94
Binary files /dev/null and b/gui/slick/images/network/discovery channel (australia).png differ
diff --git a/gui/slick/images/network/discovery real time.png b/gui/slick/images/network/discovery real time.png
deleted file mode 100644
index 35ec2bd60a951ed9f211c862a9a88a6bfd4c1ad6..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/discovery real time.png and /dev/null differ
diff --git a/gui/slick/images/network/discovery turbo uk.png b/gui/slick/images/network/discovery turbo uk.png
new file mode 100644
index 0000000000000000000000000000000000000000..944a5a6fe6aaecebd7ab37652d8763b30d34508d
Binary files /dev/null and b/gui/slick/images/network/discovery turbo uk.png differ
diff --git a/gui/slick/images/network/discovery turbo.png b/gui/slick/images/network/discovery turbo.png
new file mode 100644
index 0000000000000000000000000000000000000000..944a5a6fe6aaecebd7ab37652d8763b30d34508d
Binary files /dev/null and b/gui/slick/images/network/discovery turbo.png differ
diff --git a/gui/slick/images/network/disney channel.png b/gui/slick/images/network/disney channel (germany).png
similarity index 100%
rename from gui/slick/images/network/disney channel.png
rename to gui/slick/images/network/disney channel (germany).png
diff --git a/gui/slick/images/network/disney.png b/gui/slick/images/network/disney channel (us).png
similarity index 100%
rename from gui/slick/images/network/disney.png
rename to gui/slick/images/network/disney channel (us).png
diff --git a/gui/slick/images/network/disney junior (uk).png b/gui/slick/images/network/disney junior (uk).png
new file mode 100644
index 0000000000000000000000000000000000000000..4aea6976f4acbf94c235bb0f8edaeb52c7fdcff6
Binary files /dev/null and b/gui/slick/images/network/disney junior (uk).png differ
diff --git a/gui/slick/images/network/disneychannel.png b/gui/slick/images/network/disneychannel.png
deleted file mode 100644
index df415d27b8fe50c05f2bc5d28b83d71ead5c5c4c..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/disneychannel.png and /dev/null differ
diff --git a/gui/slick/images/network/diy network canada.png b/gui/slick/images/network/diy network canada.png
new file mode 100644
index 0000000000000000000000000000000000000000..dbb7fcb5d252658190d38d7ade130bc0aafdea2c
Binary files /dev/null and b/gui/slick/images/network/diy network canada.png differ
diff --git a/gui/slick/images/network/dmax.png b/gui/slick/images/network/dmax (de).png
similarity index 100%
rename from gui/slick/images/network/dmax.png
rename to gui/slick/images/network/dmax (de).png
diff --git a/gui/slick/images/network/dmax hd.png b/gui/slick/images/network/dmax hd.png
deleted file mode 100644
index 69858af7b7661566a7e867588e3df9e5e74f0596..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/dmax hd.png and /dev/null differ
diff --git a/gui/slick/images/network/entertainment.png b/gui/slick/images/network/e! (ca).png
similarity index 100%
rename from gui/slick/images/network/entertainment.png
rename to gui/slick/images/network/e! (ca).png
diff --git a/gui/slick/images/network/eden.png b/gui/slick/images/network/eden.png
new file mode 100644
index 0000000000000000000000000000000000000000..16994484f7573b9f7d8377d09c044340fa377033
Binary files /dev/null and b/gui/slick/images/network/eden.png differ
diff --git a/gui/slick/images/network/een.png b/gui/slick/images/network/een.png
deleted file mode 100644
index 464c2701e85d4eb2a17d317e8f6fabb2833992a1..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/een.png and /dev/null differ
diff --git a/gui/slick/images/network/einsfestival hd.png b/gui/slick/images/network/einsfestival hd.png
deleted file mode 100644
index d47a060d41bdbdfee4d477fcdf81bcf5a85ae75a..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/einsfestival hd.png and /dev/null differ
diff --git a/gui/slick/images/network/einslive.png b/gui/slick/images/network/einslive.png
deleted file mode 100644
index bc743facc70a9b0661c2368013cfb99e42b94651..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/einslive.png and /dev/null differ
diff --git a/gui/slick/images/network/endemol.png b/gui/slick/images/network/endemol.png
deleted file mode 100644
index 66e8b3143728bfb5926fcd31f01bad5f4e61e318..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/endemol.png and /dev/null differ
diff --git a/gui/slick/images/network/espn 2.png b/gui/slick/images/network/espn 2.png
deleted file mode 100644
index d91137773ecd4e83ee3f0b80d7206bb5feb5d1dc..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/espn 2.png and /dev/null differ
diff --git a/gui/slick/images/network/espn classic.png b/gui/slick/images/network/espn classic.png
deleted file mode 100644
index e450b79716ec885376a1b27b95afd13c98a93eff..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/espn classic.png and /dev/null differ
diff --git a/gui/slick/images/network/euronews.png b/gui/slick/images/network/euronews.png
deleted file mode 100644
index 2927ec36cae1ada5e94e55170789d687aed3fbd4..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/euronews.png and /dev/null differ
diff --git a/gui/slick/images/network/eurosport 1.png b/gui/slick/images/network/eurosport 1.png
deleted file mode 100644
index 0d18283b04e46191582668400a2c3684eae5b512..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/eurosport 1.png and /dev/null differ
diff --git a/gui/slick/images/network/eurosport 2.png b/gui/slick/images/network/eurosport 2.png
deleted file mode 100644
index 0d18283b04e46191582668400a2c3684eae5b512..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/eurosport 2.png and /dev/null differ
diff --git a/gui/slick/images/network/eurosport hd.png b/gui/slick/images/network/eurosport hd.png
deleted file mode 100644
index 0d18283b04e46191582668400a2c3684eae5b512..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/eurosport hd.png and /dev/null differ
diff --git a/gui/slick/images/network/fox.png b/gui/slick/images/network/fox (fi).png
similarity index 100%
rename from gui/slick/images/network/fox.png
rename to gui/slick/images/network/fox (fi).png
diff --git a/gui/slick/images/network/fox (it).png b/gui/slick/images/network/fox (it).png
new file mode 100644
index 0000000000000000000000000000000000000000..2626578519b6969b2eb3a4da2766044e98d06248
Binary files /dev/null and b/gui/slick/images/network/fox (it).png differ
diff --git a/gui/slick/images/network/fox (uk).png b/gui/slick/images/network/fox (uk).png
new file mode 100644
index 0000000000000000000000000000000000000000..2626578519b6969b2eb3a4da2766044e98d06248
Binary files /dev/null and b/gui/slick/images/network/fox (uk).png differ
diff --git a/gui/slick/images/network/fox television classics.png b/gui/slick/images/network/fox television classics.png
deleted file mode 100644
index e472d9ccc9ad69c0ffe4c9cd5a88c7c52e11d64d..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/fox television classics.png and /dev/null differ
diff --git a/gui/slick/images/network/funimation.png b/gui/slick/images/network/funimation.png
deleted file mode 100644
index c2398c91def791e7fb37328551fe34d2fd818dc6..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/funimation.png and /dev/null differ
diff --git a/gui/slick/images/network/fxnetworks.png b/gui/slick/images/network/fxnetworks.png
deleted file mode 100644
index 62f98632041d66906180e5f1cddab1bda6bd0ee1..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/fxnetworks.png and /dev/null differ
diff --git a/gui/slick/images/network/g4_2.png b/gui/slick/images/network/g4 canada.png
similarity index 100%
rename from gui/slick/images/network/g4_2.png
rename to gui/slick/images/network/g4 canada.png
diff --git a/gui/slick/images/network/g4techtv canada.png b/gui/slick/images/network/g4techtv canada.png
deleted file mode 100644
index 7c5739273b12737429493565c851699633bad6d9..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/g4techtv canada.png and /dev/null differ
diff --git a/gui/slick/images/network/game show network.png b/gui/slick/images/network/game show network.png
deleted file mode 100644
index b15177119b2377665112400305744c9644d4ef09..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/game show network.png and /dev/null differ
diff --git a/gui/slick/images/network/glitz.png b/gui/slick/images/network/glitz.png
deleted file mode 100644
index d0bd1f98ede6687eaf136b7ca6e13914a63226e4..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/glitz.png and /dev/null differ
diff --git a/gui/slick/images/network/global tv.png b/gui/slick/images/network/global tv.png
deleted file mode 100644
index 24d90e2c9ba3a9ca6b1a0107be7767ad73b3ad44..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/global tv.png and /dev/null differ
diff --git a/gui/slick/images/network/hbo canada.png b/gui/slick/images/network/hbo canada.png
new file mode 100644
index 0000000000000000000000000000000000000000..271ee7cbcbacc1f4978135093feb57e0170a6bd3
Binary files /dev/null and b/gui/slick/images/network/hbo canada.png differ
diff --git a/gui/slick/images/network/hbo europe.png b/gui/slick/images/network/hbo europe.png
new file mode 100644
index 0000000000000000000000000000000000000000..271ee7cbcbacc1f4978135093feb57e0170a6bd3
Binary files /dev/null and b/gui/slick/images/network/hbo europe.png differ
diff --git a/gui/slick/images/network/hbo latin america.png b/gui/slick/images/network/hbo latin america.png
index 2d4cadda4c2022c725e882d1943432bc85e29424..271ee7cbcbacc1f4978135093feb57e0170a6bd3 100644
Binary files a/gui/slick/images/network/hbo latin america.png and b/gui/slick/images/network/hbo latin america.png differ
diff --git a/gui/slick/images/network/hbo nordic.png b/gui/slick/images/network/hbo nordic.png
new file mode 100644
index 0000000000000000000000000000000000000000..271ee7cbcbacc1f4978135093feb57e0170a6bd3
Binary files /dev/null and b/gui/slick/images/network/hbo nordic.png differ
diff --git a/gui/slick/images/network/heimatkanal.png b/gui/slick/images/network/heimatkanal.png
deleted file mode 100644
index 3b42cfe25878956fe528f7f805dfdcf25595907e..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/heimatkanal.png and /dev/null differ
diff --git a/gui/slick/images/network/history television.png b/gui/slick/images/network/history (ca).png
similarity index 100%
rename from gui/slick/images/network/history television.png
rename to gui/slick/images/network/history (ca).png
diff --git a/gui/slick/images/network/historychannel.png b/gui/slick/images/network/historychannel.png
deleted file mode 100644
index a2d4b25abb357db6a9bc4e443a155987db5c73ec..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/historychannel.png and /dev/null differ
diff --git a/gui/slick/images/network/hr.png b/gui/slick/images/network/hr.png
deleted file mode 100644
index 44e2d1d02289cf7a2f4781a9b7321b0e976deb75..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/hr.png and /dev/null differ
diff --git a/gui/slick/images/network/ikon.png b/gui/slick/images/network/ikon.png
new file mode 100644
index 0000000000000000000000000000000000000000..fc9065f567b3c2dba0488a7ffab43cac91744c7e
Binary files /dev/null and b/gui/slick/images/network/ikon.png differ
diff --git a/gui/slick/images/network/im1.png b/gui/slick/images/network/im1.png
deleted file mode 100644
index c67b9856a159cd9b9d3558b748a4705bdb9a2df3..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/im1.png and /dev/null differ
diff --git a/gui/slick/images/network/internet.png b/gui/slick/images/network/internet.png
deleted file mode 100644
index 1f1a749771116820a8cae63507aa160cc04b1106..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/internet.png and /dev/null differ
diff --git a/gui/slick/images/network/joiz.png b/gui/slick/images/network/joiz.png
deleted file mode 100644
index 696d240dfe90f8384e963e08575882531a8b6802..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/joiz.png and /dev/null differ
diff --git a/gui/slick/images/network/junior.png b/gui/slick/images/network/junior.png
deleted file mode 100644
index f3e496c38255e0203519a512fdfbd9034bb2c42c..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/junior.png and /dev/null differ
diff --git a/gui/slick/images/network/kabel eins hd.png b/gui/slick/images/network/kabel eins hd.png
deleted file mode 100644
index b03818c14879cd998d5cb40a2ca0458c8a72c8fb..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/kabel eins hd.png and /dev/null differ
diff --git a/gui/slick/images/network/kabel eins.png b/gui/slick/images/network/kabel eins.png
deleted file mode 100644
index f876918baa137983cac03020dcb2f2069b38b703..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/kabel eins.png and /dev/null differ
diff --git a/gui/slick/images/network/kanal5.png b/gui/slick/images/network/kanal5.png
deleted file mode 100644
index 389e6372b45738a2e52e2c85ba73746f0b0d2f96..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/kanal5.png and /dev/null differ
diff --git a/gui/slick/images/network/ketnet.png b/gui/slick/images/network/ketnet.png
deleted file mode 100644
index 2b0f206f738e4af737dc8cd4d2063cace6adf0c1..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/ketnet.png and /dev/null differ
diff --git a/gui/slick/images/network/kika hd.png b/gui/slick/images/network/kika hd.png
deleted file mode 100644
index 4cfc89f93b21c8530b737e0828de52bb75ab4d5e..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/kika hd.png and /dev/null differ
diff --git a/gui/slick/images/network/la deux.png b/gui/slick/images/network/la deux.png
new file mode 100644
index 0000000000000000000000000000000000000000..ff99184cee47b76703306bead4d1faf550287aaf
Binary files /dev/null and b/gui/slick/images/network/la deux.png differ
diff --git a/gui/slick/images/network/la trois.png b/gui/slick/images/network/la trois.png
new file mode 100644
index 0000000000000000000000000000000000000000..a326311a6e250e368cb38cacca958b80e1225325
Binary files /dev/null and b/gui/slick/images/network/la trois.png differ
diff --git a/gui/slick/images/network/la une.png b/gui/slick/images/network/la une.png
new file mode 100644
index 0000000000000000000000000000000000000000..b9eb639946db7032ac8b9e5c1fce75d010be97f6
Binary files /dev/null and b/gui/slick/images/network/la une.png differ
diff --git a/gui/slick/images/network/la1.png b/gui/slick/images/network/la1.png
deleted file mode 100644
index 4afdab277683e3d39ae0896220326553c39256db..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/la1.png and /dev/null differ
diff --git a/gui/slick/images/network/la2.png b/gui/slick/images/network/la2.png
deleted file mode 100644
index a016440e43fb6b704729d61988f49084049781b7..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/la2.png and /dev/null differ
diff --git a/gui/slick/images/network/hulu presents.png b/gui/slick/images/network/lifetime uk.png
similarity index 62%
rename from gui/slick/images/network/hulu presents.png
rename to gui/slick/images/network/lifetime uk.png
index b42b43d6d55b5cd75eab5567afcd28f29af5231b..7586efb63365edf6d6097dd21c71a2f0e748eb1e 100644
Binary files a/gui/slick/images/network/hulu presents.png and b/gui/slick/images/network/lifetime uk.png differ
diff --git a/gui/slick/images/network/london weekend television (lwt).png b/gui/slick/images/network/london weekend television (lwt).png
deleted file mode 100644
index 72d73f152af799c1b867f489fda07552cfc24193..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/london weekend television (lwt).png and /dev/null differ
diff --git a/gui/slick/images/network/london weekend television.png b/gui/slick/images/network/london weekend television.png
deleted file mode 100644
index 72d73f152af799c1b867f489fda07552cfc24193..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/london weekend television.png and /dev/null differ
diff --git a/gui/slick/images/network/max.png b/gui/slick/images/network/max.png
deleted file mode 100644
index 32dc40916c31c4078752fc5134b298c82610d711..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/max.png and /dev/null differ
diff --git a/gui/slick/images/network/mgm.png b/gui/slick/images/network/mgm.png
deleted file mode 100644
index c2e63252edc22b4a1f1ffdba6d32c37ab51fbb79..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/mgm.png and /dev/null differ
diff --git a/gui/slick/images/network/motorvision tv.png b/gui/slick/images/network/motorvision tv.png
deleted file mode 100644
index 9d450741e398f0a09cd5c28a23d96f9c03865ce0..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/motorvision tv.png and /dev/null differ
diff --git a/gui/slick/images/network/motorvision.png b/gui/slick/images/network/motorvision.png
deleted file mode 100644
index 9d450741e398f0a09cd5c28a23d96f9c03865ce0..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/motorvision.png and /dev/null differ
diff --git a/gui/slick/images/network/movie network.png b/gui/slick/images/network/movie network.png
deleted file mode 100644
index 4e4f0a9f9f7ab4b728cb07af180c99f74731ac2b..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/movie network.png and /dev/null differ
diff --git a/gui/slick/images/network/mtv network.png b/gui/slick/images/network/mtv network.png
deleted file mode 100644
index 30b1a899af1984fdc62673dd34e504a1367993b3..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/mtv network.png and /dev/null differ
diff --git a/gui/slick/images/network/n24 hd.png b/gui/slick/images/network/n24 hd.png
deleted file mode 100644
index ed4db9c3dbd3317eddf825a09f877bd06713e007..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/n24 hd.png and /dev/null differ
diff --git a/gui/slick/images/network/n24.png b/gui/slick/images/network/n24.png
deleted file mode 100644
index 78b793286e0e1cf15859dd2c75a8247451d6e4a4..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/n24.png and /dev/null differ
diff --git a/gui/slick/images/network/nat geo.png b/gui/slick/images/network/national geographic (uk).png
similarity index 100%
rename from gui/slick/images/network/nat geo.png
rename to gui/slick/images/network/national geographic (uk).png
diff --git a/gui/slick/images/network/national geographic channel.png b/gui/slick/images/network/national geographic channel.png
deleted file mode 100644
index b4a5f277b5081829253bf9b6564293f0ebfcc820..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/national geographic channel.png and /dev/null differ
diff --git a/gui/slick/images/network/national geographic.png b/gui/slick/images/network/national geographic.png
deleted file mode 100644
index b4a5f277b5081829253bf9b6564293f0ebfcc820..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/national geographic.png and /dev/null differ
diff --git a/gui/slick/images/network/nbcu tv.png b/gui/slick/images/network/nbcu tv.png
deleted file mode 100644
index 4f812b15ce6f4e6fbacc2aa7fd96cf8b008bf71a..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/nbcu tv.png and /dev/null differ
diff --git a/gui/slick/images/network/ncrv.png b/gui/slick/images/network/ncrv.png
new file mode 100644
index 0000000000000000000000000000000000000000..474a7fd10864096494d2fb4c7b62f30a973ecc0e
Binary files /dev/null and b/gui/slick/images/network/ncrv.png differ
diff --git a/gui/slick/images/network/ndr hd.png b/gui/slick/images/network/ndr hd.png
deleted file mode 100644
index ae33f45d85119257ca573e06483ebc2030b902dc..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/ndr hd.png and /dev/null differ
diff --git a/gui/slick/images/network/nederland 2.png b/gui/slick/images/network/nederland 2.png
deleted file mode 100644
index 89c7e583944925905de90f0c449aba211a7c1be4..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/nederland 2.png and /dev/null differ
diff --git a/gui/slick/images/network/nederland 3.png b/gui/slick/images/network/nederland 3.png
deleted file mode 100644
index 6e86cd279ef724986b4e4c7ff8123164de613e3b..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/nederland 3.png and /dev/null differ
diff --git a/gui/slick/images/network/neo kika.png b/gui/slick/images/network/neo kika.png
deleted file mode 100644
index b3e336db2d73c8ba11c610f5b00503863540418f..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/neo kika.png and /dev/null differ
diff --git a/gui/slick/images/network/net5.png b/gui/slick/images/network/net5.png
deleted file mode 100644
index 37e2ef5e689ffc2a629bc88738aae19abeb72021..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/net5.png and /dev/null differ
diff --git a/gui/slick/images/network/ngc.png b/gui/slick/images/network/ngc.png
deleted file mode 100644
index b4a5f277b5081829253bf9b6564293f0ebfcc820..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/ngc.png and /dev/null differ
diff --git a/gui/slick/images/network/nick comedy.png b/gui/slick/images/network/nick comedy.png
deleted file mode 100644
index 05d43cd0c6a3643873794c49a2db7e165d515b8b..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/nick comedy.png and /dev/null differ
diff --git a/gui/slick/images/network/nicktoons network.png b/gui/slick/images/network/nicktoons network.png
deleted file mode 100644
index 45a4d6755e1ccc14c009a89c02dc15e94dc6232b..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/nicktoons network.png and /dev/null differ
diff --git a/gui/slick/images/network/nippon television.png b/gui/slick/images/network/nippon television.png
deleted file mode 100644
index 95b18315b43fd2ca2e1c42f65d27f2e8332245db..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/nippon television.png and /dev/null differ
diff --git a/gui/slick/images/network/no network.png b/gui/slick/images/network/no network.png
deleted file mode 100644
index 4398a439eec3f9a7b5a19f147e80a3d832e506d1..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/no network.png and /dev/null differ
diff --git a/gui/slick/images/network/nonetwork.png b/gui/slick/images/network/nonetwork.png
deleted file mode 100644
index 4398a439eec3f9a7b5a19f147e80a3d832e506d1..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/nonetwork.png and /dev/null differ
diff --git a/gui/slick/images/network/nos.png b/gui/slick/images/network/nos.png
new file mode 100644
index 0000000000000000000000000000000000000000..60dc1f91b8af97a60316f664d9c9bb867f308272
Binary files /dev/null and b/gui/slick/images/network/nos.png differ
diff --git a/gui/slick/images/network/npo 1.png b/gui/slick/images/network/npo 1.png
deleted file mode 100644
index aa1c1545b2608767c3cb42be19c88d9f3e5424a1..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/npo 1.png and /dev/null differ
diff --git a/gui/slick/images/network/npo 2.png b/gui/slick/images/network/npo 2.png
deleted file mode 100644
index 7dd8a4f94f93da289351a3eb7810e10ab2d64d1f..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/npo 2.png and /dev/null differ
diff --git a/gui/slick/images/network/npo 3.png b/gui/slick/images/network/npo 3.png
deleted file mode 100644
index f30019315a747c4cf554b0459c85f6d8b5ce2d79..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/npo 3.png and /dev/null differ
diff --git a/gui/slick/images/network/nps.png b/gui/slick/images/network/nps.png
deleted file mode 100644
index 433c0b4c6719b20dba14d2afa2d47767eda439fd..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/nps.png and /dev/null differ
diff --git a/gui/slick/images/network/nrk.png b/gui/slick/images/network/nrk.png
deleted file mode 100644
index a229a7b727d8b4dbea431f560b15fd13da437017..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/nrk.png and /dev/null differ
diff --git a/gui/slick/images/network/ntv.png b/gui/slick/images/network/ntv.png
deleted file mode 100644
index d6a461dfe4eaac31f1f307d4f406bd8efa9267a3..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/ntv.png and /dev/null differ
diff --git a/gui/slick/images/network/omroep brabant.png b/gui/slick/images/network/omroep brabant.png
new file mode 100644
index 0000000000000000000000000000000000000000..2f0e98f04d725f783cea37027acb947b68301fe7
Binary files /dev/null and b/gui/slick/images/network/omroep brabant.png differ
diff --git a/gui/slick/images/network/orf.png b/gui/slick/images/network/orf 1.png
similarity index 100%
rename from gui/slick/images/network/orf.png
rename to gui/slick/images/network/orf 1.png
diff --git a/gui/slick/images/network/orf1 hd.png b/gui/slick/images/network/orf 2.png
similarity index 100%
rename from gui/slick/images/network/orf1 hd.png
rename to gui/slick/images/network/orf 2.png
diff --git a/gui/slick/images/network/orf eins.png b/gui/slick/images/network/orf eins.png
deleted file mode 100644
index 57e45bfe7cd4c2bd4bf7c98aaa96e3464c3c3c57..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/orf eins.png and /dev/null differ
diff --git a/gui/slick/images/network/nederland 1.png b/gui/slick/images/network/orf iii.png
similarity index 62%
rename from gui/slick/images/network/nederland 1.png
rename to gui/slick/images/network/orf iii.png
index 716025490b3422e0792fe82c569a268657b14156..2be5b6087da3269cc7a595a4704b35fb9b81e3c5 100644
Binary files a/gui/slick/images/network/nederland 1.png and b/gui/slick/images/network/orf iii.png differ
diff --git a/gui/slick/images/network/pay-per-view.png b/gui/slick/images/network/pay-per-view.png
deleted file mode 100644
index f093a7263e5763b2141ab64f5a50b03bb82f815b..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/pay-per-view.png and /dev/null differ
diff --git a/gui/slick/images/network/phoenix hd.png b/gui/slick/images/network/phoenix hd.png
deleted file mode 100644
index af4502274b7183dcfc5ca499f5f5c78576226976..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/phoenix hd.png and /dev/null differ
diff --git a/gui/slick/images/network/plug rtl.png b/gui/slick/images/network/plug rtl.png
new file mode 100644
index 0000000000000000000000000000000000000000..80f886ba8983ee1ebf680c6a9d23c961781b87a8
Binary files /dev/null and b/gui/slick/images/network/plug rtl.png differ
diff --git a/gui/slick/images/network/powned.png b/gui/slick/images/network/powned.png
new file mode 100644
index 0000000000000000000000000000000000000000..41b2dafdce4384d21d88c2f98f2dd3db1b364176
Binary files /dev/null and b/gui/slick/images/network/powned.png differ
diff --git a/gui/slick/images/network/prime (be).png b/gui/slick/images/network/prime (be).png
new file mode 100644
index 0000000000000000000000000000000000000000..2ad936305b788d0e38561f44c6e98f825bcca78e
Binary files /dev/null and b/gui/slick/images/network/prime (be).png differ
diff --git a/gui/slick/images/network/private spice.png b/gui/slick/images/network/private spice.png
deleted file mode 100644
index b87783d0c25cdeffb18c37ed60bacbe8bd25d14b..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/private spice.png and /dev/null differ
diff --git a/gui/slick/images/network/prosieben fun.png b/gui/slick/images/network/prosieben fun.png
deleted file mode 100644
index 2a6eea4882d27fa35e245407a12295e9cfbc134c..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/prosieben fun.png and /dev/null differ
diff --git a/gui/slick/images/network/prosieben maxx.png b/gui/slick/images/network/prosieben maxx.png
deleted file mode 100644
index 2238ea9636e5e5dc7177a2556ac89b15b6bd4bcd..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/prosieben maxx.png and /dev/null differ
diff --git a/gui/slick/images/network/quest.png b/gui/slick/images/network/quest.png
deleted file mode 100644
index 99fbd093f7435cfce84a8b2461907f30a18509b6..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/quest.png and /dev/null differ
diff --git a/gui/slick/images/network/radio bremen tv.png b/gui/slick/images/network/radio bremen.png
similarity index 100%
rename from gui/slick/images/network/radio bremen tv.png
rename to gui/slick/images/network/radio bremen.png
diff --git a/gui/slick/images/network/radio west.png b/gui/slick/images/network/radio west.png
deleted file mode 100644
index 0d64d9c3c8a29afcbf384b12fba7e84acaaf4d20..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/radio west.png and /dev/null differ
diff --git a/gui/slick/images/network/radio-canada.png b/gui/slick/images/network/radio-canada.png
deleted file mode 100644
index 43602d994d238ea863229b7d210c5ce1d9267e68..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/radio-canada.png and /dev/null differ
diff --git a/gui/slick/images/network/regio tv.png b/gui/slick/images/network/regio tv.png
deleted file mode 100644
index 33c6888faee5d8f8b7ebeed6dd7406c031a62dfa..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/regio tv.png and /dev/null differ
diff --git a/gui/slick/images/network/rtbf.png b/gui/slick/images/network/rtbf.png
new file mode 100644
index 0000000000000000000000000000000000000000..99e2dcfc40eb97f2f94cce3ab72daff76a726f43
Binary files /dev/null and b/gui/slick/images/network/rtbf.png differ
diff --git a/gui/slick/images/network/rte one.png b/gui/slick/images/network/rte one.png
deleted file mode 100644
index c723ef93efbec0e22a49fbab34ef9d9ff2f9de2b..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rte one.png and /dev/null differ
diff --git a/gui/slick/images/network/rtl crime hd.png b/gui/slick/images/network/rtl crime hd.png
deleted file mode 100644
index e23126aa054ec6f28e3fbc5acd5695fa4f7b3b65..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rtl crime hd.png and /dev/null differ
diff --git a/gui/slick/images/network/rtl crime.png b/gui/slick/images/network/rtl crime.png
deleted file mode 100644
index 8e269d6ca58b4d31a27978e23f140b8a033172e8..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rtl crime.png and /dev/null differ
diff --git a/gui/slick/images/network/rtl hd.png b/gui/slick/images/network/rtl hd.png
deleted file mode 100644
index 97d5c33caceecb4c21415bce82eb99e04ecda026..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rtl hd.png and /dev/null differ
diff --git a/gui/slick/images/network/rtl living hd.png b/gui/slick/images/network/rtl living hd.png
deleted file mode 100644
index 73a9c91a7f11d65736bd74b564a8b2f6837d83c0..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rtl living hd.png and /dev/null differ
diff --git a/gui/slick/images/network/rtl living.png b/gui/slick/images/network/rtl living.png
deleted file mode 100644
index 9e2b8ffd70b42e194507b16e94314100a5f4db8b..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rtl living.png and /dev/null differ
diff --git a/gui/slick/images/network/rtl nitro.png b/gui/slick/images/network/rtl nitro.png
deleted file mode 100644
index 6f6e9bfa01826d26340ba85925de0b92ebaa7dd4..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rtl nitro.png and /dev/null differ
diff --git a/gui/slick/images/network/rtl passion hd.png b/gui/slick/images/network/rtl passion hd.png
deleted file mode 100644
index 7ed59288c264bab2940a0e6d5cea7335233d8e23..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rtl passion hd.png and /dev/null differ
diff --git a/gui/slick/images/network/rtl passion.png b/gui/slick/images/network/rtl passion.png
deleted file mode 100644
index 27a936e4f83a35cd8a9b786344ddf968f6df48eb..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rtl passion.png and /dev/null differ
diff --git a/gui/slick/images/network/rtl tvi.jpg b/gui/slick/images/network/rtl tvi.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..42365a1329b25062e8265025f4c00ea3065aeb2e
Binary files /dev/null and b/gui/slick/images/network/rtl tvi.jpg differ
diff --git a/gui/slick/images/network/rtl2 hd.png b/gui/slick/images/network/rtl2 hd.png
deleted file mode 100644
index a02b0c6b4ec27385e9b595a27e81b2f0260f7df5..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rtl2 hd.png and /dev/null differ
diff --git a/gui/slick/images/network/rtl2.png b/gui/slick/images/network/rtl2.png
deleted file mode 100644
index 473739785496b4e1bf238c94792944b0064e2a4a..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rtl2.png and /dev/null differ
diff --git a/gui/slick/images/network/rtl4.png b/gui/slick/images/network/rtl4.png
deleted file mode 100644
index 7b2935301ef036d73aa93f70e520ee830e9affcb..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rtl4.png and /dev/null differ
diff --git a/gui/slick/images/network/rtl5.png b/gui/slick/images/network/rtl5.png
deleted file mode 100644
index 174d8f81d89c2acc68cfbe8577020993ba9e45bf..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rtl5.png and /dev/null differ
diff --git a/gui/slick/images/network/rtl7.png b/gui/slick/images/network/rtl7.png
deleted file mode 100644
index a53f92fad351923442e35c81a4071155f015c940..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rtl7.png and /dev/null differ
diff --git a/gui/slick/images/network/rtl8.png b/gui/slick/images/network/rtl8.png
deleted file mode 100644
index 6c6da6a721442f1b253574110a447ed76fa8713f..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/rtl8.png and /dev/null differ
diff --git a/gui/slick/images/network/rvu.png b/gui/slick/images/network/rvu.png
new file mode 100644
index 0000000000000000000000000000000000000000..ab1dc6cb65bb04d1dc94f6789b5432cdbb3c0467
Binary files /dev/null and b/gui/slick/images/network/rvu.png differ
diff --git a/gui/slick/images/network/sat.1 emotions.png b/gui/slick/images/network/sat.1 emotions.png
deleted file mode 100644
index 29cfa92c7b39ff8df0a9348931f19ccdf0256fd5..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sat.1 emotions.png and /dev/null differ
diff --git a/gui/slick/images/network/sat.1 gold.png b/gui/slick/images/network/sat.1 gold.png
deleted file mode 100644
index 3cd28cc38f1b9a08cb9f5b8cc6141bac905e0a68..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sat.1 gold.png and /dev/null differ
diff --git a/gui/slick/images/network/sat.1 hd.png b/gui/slick/images/network/sat.1 hd.png
deleted file mode 100644
index a8f55511fc4c60e6b6dcae46e66ab5d079402275..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sat.1 hd.png and /dev/null differ
diff --git a/gui/slick/images/network/sat1.png b/gui/slick/images/network/sat1.png
deleted file mode 100644
index 5ad0fa37da9151f94b0375e582a3e5bc27a4890c..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sat1.png and /dev/null differ
diff --git a/gui/slick/images/network/sbs 9.png b/gui/slick/images/network/sbs 9.png
deleted file mode 100644
index 42da25c1290703afe1d54d8ab098c60749f4cf57..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sbs 9.png and /dev/null differ
diff --git a/gui/slick/images/network/sbs australia.png b/gui/slick/images/network/sbs australia.png
deleted file mode 100644
index 2973a9cb37b3e33e102835c9297b1c049d6b61ae..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sbs australia.png and /dev/null differ
diff --git a/gui/slick/images/network/sbs6.png b/gui/slick/images/network/sbs6.png
deleted file mode 100644
index fb5a2454b44d63f30a679ed8c3e66dfeb9e6e585..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sbs6.png and /dev/null differ
diff --git a/gui/slick/images/network/series+.png b/gui/slick/images/network/series+.png
deleted file mode 100644
index 5703eb1c183bf1121dd7d6df2e7d7d54001b6468..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/series+.png and /dev/null differ
diff --git a/gui/slick/images/network/servustv.png b/gui/slick/images/network/servus tv.png
similarity index 100%
rename from gui/slick/images/network/servustv.png
rename to gui/slick/images/network/servus tv.png
diff --git a/gui/slick/images/network/servustv hd.png b/gui/slick/images/network/servustv hd.png
deleted file mode 100644
index 277d8235df8d5cce8a146eb6e7d7c950d201cd82..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/servustv hd.png and /dev/null differ
diff --git a/gui/slick/images/network/set.png b/gui/slick/images/network/set tv.png
similarity index 100%
rename from gui/slick/images/network/set.png
rename to gui/slick/images/network/set tv.png
diff --git a/gui/slick/images/network/showcase.png b/gui/slick/images/network/showcase (au).png
similarity index 100%
rename from gui/slick/images/network/showcase.png
rename to gui/slick/images/network/showcase (au).png
diff --git a/gui/slick/images/network/sixx hd.png b/gui/slick/images/network/sixx hd.png
deleted file mode 100644
index 1872ae0303ffbe5541926b69ff96bc4927b87318..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sixx hd.png and /dev/null differ
diff --git a/gui/slick/images/network/sky action.png b/gui/slick/images/network/sky action.png
deleted file mode 100644
index 3e35c9a616e89ce36fbad4c9dc5cc038059b4784..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky action.png and /dev/null differ
diff --git a/gui/slick/images/network/sky atlantic hd.png b/gui/slick/images/network/sky atlantic hd.png
deleted file mode 100644
index b6c8f06d3a427f0d41da53e992ccb87094690b0f..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky atlantic hd.png and /dev/null differ
diff --git a/gui/slick/images/network/sky bundesliga.png b/gui/slick/images/network/sky bundesliga.png
deleted file mode 100644
index c0f1ec58eab5f9b8e99beaa42b24dca90022fa5f..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky bundesliga.png and /dev/null differ
diff --git a/gui/slick/images/network/sky cinema.png b/gui/slick/images/network/sky cinema (it).png
similarity index 100%
rename from gui/slick/images/network/sky cinema.png
rename to gui/slick/images/network/sky cinema (it).png
diff --git a/gui/slick/images/network/sky cinema (uk).png b/gui/slick/images/network/sky cinema (uk).png
new file mode 100644
index 0000000000000000000000000000000000000000..f6309c9cc7267baa08a8b451503713a1cd953eb1
Binary files /dev/null and b/gui/slick/images/network/sky cinema (uk).png differ
diff --git a/gui/slick/images/network/sky cinema +1.png b/gui/slick/images/network/sky cinema +1.png
deleted file mode 100644
index 2b93a647cdefc4fc47e36f4e0386c1134e14a4c2..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky cinema +1.png and /dev/null differ
diff --git a/gui/slick/images/network/sky cinema +24.png b/gui/slick/images/network/sky cinema +24.png
deleted file mode 100644
index 6a7dcb965af65b5f0ccf21f4c72e0084ac5c0f04..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky cinema +24.png and /dev/null differ
diff --git a/gui/slick/images/network/sky comedy.png b/gui/slick/images/network/sky comedy.png
deleted file mode 100644
index 2648eb0261b91848e35a5da0f121f7b13a6d4daa..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky comedy.png and /dev/null differ
diff --git a/gui/slick/images/network/sky emotion.png b/gui/slick/images/network/sky emotion.png
deleted file mode 100644
index e7500e3c1727554046a9013cfdc0fffbbfbbdd1f..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky emotion.png and /dev/null differ
diff --git a/gui/slick/images/network/sky hits.png b/gui/slick/images/network/sky hits.png
deleted file mode 100644
index cb74f9400641968cf8e545dd31ae17713815e5d6..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky hits.png and /dev/null differ
diff --git a/gui/slick/images/network/sky krimi.png b/gui/slick/images/network/sky krimi.png
deleted file mode 100644
index 9689c47717f2e90f1685ce8325505135499ef816..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky krimi.png and /dev/null differ
diff --git a/gui/slick/images/network/sky nostalgie.png b/gui/slick/images/network/sky nostalgie.png
deleted file mode 100644
index 867e7dda41d5b42ebb64262bd4a192c69123a9fa..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky nostalgie.png and /dev/null differ
diff --git a/gui/slick/images/network/sky select.png b/gui/slick/images/network/sky select.png
deleted file mode 100644
index 8666cc8abfaae067458f4878021c002e99bf79c1..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky select.png and /dev/null differ
diff --git a/gui/slick/images/network/sky sport austria.png b/gui/slick/images/network/sky sport austria.png
deleted file mode 100644
index b05926462454987ec40948ecf8e7ee6f958177e1..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky sport austria.png and /dev/null differ
diff --git a/gui/slick/images/network/sky sport news hd.png b/gui/slick/images/network/sky sport news hd.png
deleted file mode 100644
index bb3512224b05c8da5953a1d30913f76ce8325e69..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky sport news hd.png and /dev/null differ
diff --git a/gui/slick/images/network/sky sport news.png b/gui/slick/images/network/sky sport news.png
deleted file mode 100644
index 2a3e6caaa31afdbdf5adb39d6e13b9661a4a888d..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky sport news.png and /dev/null differ
diff --git a/gui/slick/images/network/sky sport1.png b/gui/slick/images/network/sky sport1.png
deleted file mode 100644
index 1e9a3abfec5822476c4e4a52308c4c40faa6f5ef..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky sport1.png and /dev/null differ
diff --git a/gui/slick/images/network/sky sport2.png b/gui/slick/images/network/sky sport2.png
deleted file mode 100644
index c7060a7ffc90e9ed6898fb5c309c250dba5e3172..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky sport2.png and /dev/null differ
diff --git a/gui/slick/images/network/sky.png b/gui/slick/images/network/sky.png
deleted file mode 100644
index f15e36d401013eb01edacbf177192b1da6b29203..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sky.png and /dev/null differ
diff --git a/gui/slick/images/network/speed channel.png b/gui/slick/images/network/speed channel.png
deleted file mode 100644
index 5b7edbd88f9d2205a625f895fd3350903ff0b12d..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/speed channel.png and /dev/null differ
diff --git a/gui/slick/images/network/spiegel geschichte.png b/gui/slick/images/network/spiegel geschichte.png
deleted file mode 100644
index de15abfc3ee99b1dc161f1a06eb748ddfc9f1217..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/spiegel geschichte.png and /dev/null differ
diff --git a/gui/slick/images/network/spike.png b/gui/slick/images/network/spike.png
deleted file mode 100644
index def751e4ae7a35d507aecb2496decc68b9d22798..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/spike.png and /dev/null differ
diff --git a/gui/slick/images/network/sport1 hd.png b/gui/slick/images/network/sport1 hd.png
deleted file mode 100644
index c82a11f3a159599f1668831f30ca0c835bfd25e7..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sport1 hd.png and /dev/null differ
diff --git a/gui/slick/images/network/sport1.png b/gui/slick/images/network/sport1.png
deleted file mode 100644
index 39f8314f315e1d622ef996b2a7273acc293f8190..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sport1.png and /dev/null differ
diff --git a/gui/slick/images/network/src.png b/gui/slick/images/network/src.png
deleted file mode 100644
index ea0fb921565f5c5676bd50dd4c4828bac4dc7d24..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/src.png and /dev/null differ
diff --git a/gui/slick/images/network/srf zwei.png b/gui/slick/images/network/srf zwei.png
deleted file mode 100644
index f2af07712cd0b05b5f248975d37a093ce38054aa..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/srf zwei.png and /dev/null differ
diff --git a/gui/slick/images/network/starz.png b/gui/slick/images/network/starz.png
deleted file mode 100644
index d1e6cd5cdde927f73356077c79e3abe32a9381fe..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/starz.png and /dev/null differ
diff --git a/gui/slick/images/network/stv.png b/gui/slick/images/network/stv (uk).png
similarity index 100%
rename from gui/slick/images/network/stv.png
rename to gui/slick/images/network/stv (uk).png
diff --git a/gui/slick/images/network/style network.png b/gui/slick/images/network/style.png
similarity index 100%
rename from gui/slick/images/network/style network.png
rename to gui/slick/images/network/style.png
diff --git a/gui/slick/images/network/subtv.png b/gui/slick/images/network/sub.png
similarity index 100%
rename from gui/slick/images/network/subtv.png
rename to gui/slick/images/network/sub.png
diff --git a/gui/slick/images/network/sun-tv.png b/gui/slick/images/network/sun tv.png
similarity index 100%
rename from gui/slick/images/network/sun-tv.png
rename to gui/slick/images/network/sun tv.png
diff --git a/gui/slick/images/network/sundance.png b/gui/slick/images/network/sundance.png
deleted file mode 100644
index 3a510504afd9e3932038483330a491e6e3889217..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sundance.png and /dev/null differ
diff --git a/gui/slick/images/network/sunrise.png b/gui/slick/images/network/sunrise.png
deleted file mode 100644
index 0e31f6fe664ac72fa7342ef0ac31e55e4209b73b..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sunrise.png and /dev/null differ
diff --git a/gui/slick/images/network/super rtl hd.png b/gui/slick/images/network/super rtl hd.png
deleted file mode 100644
index b093530c6ae8ec53daee0362bc153c9236f48f69..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/super rtl hd.png and /dev/null differ
diff --git a/gui/slick/images/network/super rtl.png b/gui/slick/images/network/super rtl.png
deleted file mode 100644
index 10070c02034acf386c73c400a33d8d3ee4b98621..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/super rtl.png and /dev/null differ
diff --git a/gui/slick/images/network/sveriges television.png b/gui/slick/images/network/sveriges television.png
deleted file mode 100644
index c12c2fc7569597eb78892bce907da87a457fb0ba..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/sveriges television.png and /dev/null differ
diff --git a/gui/slick/images/network/svt drama.png b/gui/slick/images/network/svt drama.png
deleted file mode 100644
index dcb6fbb04662a3ade7b165b5b2c97b0e3315da3c..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/svt drama.png and /dev/null differ
diff --git a/gui/slick/images/network/swr hd.png b/gui/slick/images/network/swr hd.png
deleted file mode 100644
index 6793852ad3c1faea744b719c25809247072f74ef..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/swr hd.png and /dev/null differ
diff --git a/gui/slick/images/network/syndicated.png b/gui/slick/images/network/syndicated.png
deleted file mode 100644
index ece0dd551f17741aed21fa74fc9ffa2b945605bd..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/syndicated.png and /dev/null differ
diff --git a/gui/slick/images/network/tagesschau24.png b/gui/slick/images/network/tagesschau24.png
deleted file mode 100644
index 9a812a07a2c2e2459dd6d2e726ff46fe5f0ff025..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tagesschau24.png and /dev/null differ
diff --git a/gui/slick/images/network/tbs superstation.png b/gui/slick/images/network/tbs superstation.png
deleted file mode 100644
index 1792567faf689a27ff4cb66d5e9056358934c194..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tbs superstation.png and /dev/null differ
diff --git a/gui/slick/images/network/techtv canada.png b/gui/slick/images/network/techtv canada.png
deleted file mode 100644
index 7c5739273b12737429493565c851699633bad6d9..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/techtv canada.png and /dev/null differ
diff --git a/gui/slick/images/network/tele 5 hd.png b/gui/slick/images/network/tele 5 hd.png
deleted file mode 100644
index 1d268f01d352e83c235c6c1be19b9173926a2fd1..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tele 5 hd.png and /dev/null differ
diff --git a/gui/slick/images/network/tele-quebec.png b/gui/slick/images/network/tele-quebec.png
deleted file mode 100644
index c883a1bc8e181932c51235cda6300a06b0896130..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tele-quebec.png and /dev/null differ
diff --git a/gui/slick/images/network/the anime network.png b/gui/slick/images/network/the anime network.png
deleted file mode 100644
index d462d377a762509bc5f37521610cf19617883b50..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/the anime network.png and /dev/null differ
diff --git a/gui/slick/images/network/the discovery channel.png b/gui/slick/images/network/the discovery channel.png
deleted file mode 100644
index 3765c50cd5db02e041349fc5b8e0bb3d7064947a..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/the discovery channel.png and /dev/null differ
diff --git a/gui/slick/images/network/the science channel.png b/gui/slick/images/network/the science channel.png
deleted file mode 100644
index 759dbcd61edc9139c49407a668a5ce8a099629a4..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/the science channel.png and /dev/null differ
diff --git a/gui/slick/images/network/the-cw.png b/gui/slick/images/network/the-cw.png
deleted file mode 100644
index 67f5abb1a66ae8c669906aaa4cd9ad73729b58f3..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/the-cw.png and /dev/null differ
diff --git a/gui/slick/images/network/thecw.png b/gui/slick/images/network/thecw.png
deleted file mode 100644
index 67f5abb1a66ae8c669906aaa4cd9ad73729b58f3..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/thecw.png and /dev/null differ
diff --git a/gui/slick/images/network/thewb.png b/gui/slick/images/network/thewb.png
deleted file mode 100644
index a20f74bbb585b27fa25f083bb35d286d3c8a67b5..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/thewb.png and /dev/null differ
diff --git a/gui/slick/images/network/manhattan neighborhood network.png b/gui/slick/images/network/tmf.png
similarity index 63%
rename from gui/slick/images/network/manhattan neighborhood network.png
rename to gui/slick/images/network/tmf.png
index 65f53b03608a35bbc45401d2a30e005af97e1469..8fc18f0290b847a94b0154557b2e4bc1af84cf33 100644
Binary files a/gui/slick/images/network/manhattan neighborhood network.png and b/gui/slick/images/network/tmf.png differ
diff --git a/gui/slick/images/network/tnt drama.png b/gui/slick/images/network/tnt drama.png
deleted file mode 100644
index 46cfc765a66da6b2e1cad10c3b8e17dc6896e064..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tnt drama.png and /dev/null differ
diff --git a/gui/slick/images/network/tnt glitz.png b/gui/slick/images/network/tnt glitz.png
deleted file mode 100644
index 637bd7e1fea969fc49d71cc6e55b025f01f9fcad..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tnt glitz.png and /dev/null differ
diff --git a/gui/slick/images/network/tntdrama.png b/gui/slick/images/network/tntdrama.png
deleted file mode 100644
index 46cfc765a66da6b2e1cad10c3b8e17dc6896e064..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tntdrama.png and /dev/null differ
diff --git a/gui/slick/images/network/toei animation.png b/gui/slick/images/network/toei animation.png
deleted file mode 100644
index 66b7aa8c6cb96c4ef35cf207f0870efc6e5740c7..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/toei animation.png and /dev/null differ
diff --git a/gui/slick/images/network/at5.png b/gui/slick/images/network/travel channel (uk).png
similarity index 55%
rename from gui/slick/images/network/at5.png
rename to gui/slick/images/network/travel channel (uk).png
index 8269dcacc8eed12d3a83e0fbcc2c2180f71155aa..b65d1c848fe4e37d0395b55529df6ca1f485fd4e 100644
Binary files a/gui/slick/images/network/at5.png and b/gui/slick/images/network/travel channel (uk).png differ
diff --git a/gui/slick/images/network/treehouse tv.png b/gui/slick/images/network/treehouse tv.png
deleted file mode 100644
index 4be61faa6d798f6d2286ae8dba0878a87fdf6925..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/treehouse tv.png and /dev/null differ
diff --git a/gui/slick/images/network/tv guide.png b/gui/slick/images/network/tv guide.png
deleted file mode 100644
index 81c7652daba55e5cd5dba076798d66aa614c7089..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tv guide.png and /dev/null differ
diff --git a/gui/slick/images/network/tv kanagawa.png b/gui/slick/images/network/tv kanagawa.png
deleted file mode 100644
index 27bb05da53f7223f8dbe5c59a9d02db3d341d45b..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tv kanagawa.png and /dev/null differ
diff --git a/gui/slick/images/network/tv norge.png b/gui/slick/images/network/tv norge.png
deleted file mode 100644
index 4a17b749b64ca746f81274aba86092c75cbad370..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tv norge.png and /dev/null differ
diff --git a/gui/slick/images/network/tv one nz.png b/gui/slick/images/network/tv one (nz).png
similarity index 100%
rename from gui/slick/images/network/tv one nz.png
rename to gui/slick/images/network/tv one (nz).png
diff --git a/gui/slick/images/network/tv saitama.png b/gui/slick/images/network/tv saitama.png
deleted file mode 100644
index dece55b51d9ba3a0ee6a9a8b287b4d5b0959532e..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tv saitama.png and /dev/null differ
diff --git a/gui/slick/images/network/tv west.png b/gui/slick/images/network/tv west.png
deleted file mode 100644
index 0d64d9c3c8a29afcbf384b12fba7e84acaaf4d20..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tv west.png and /dev/null differ
diff --git a/gui/slick/images/network/tv1.png b/gui/slick/images/network/tv1.png
deleted file mode 100644
index a8da66b843c7396a7e12adb2dc034d575a274bd9..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tv1.png and /dev/null differ
diff --git a/gui/slick/images/network/tv3, norway.png b/gui/slick/images/network/tv3 (no).png
similarity index 100%
rename from gui/slick/images/network/tv3, norway.png
rename to gui/slick/images/network/tv3 (no).png
diff --git a/gui/slick/images/network/tv3.png b/gui/slick/images/network/tv3.png
deleted file mode 100644
index 84b134e357a3e2bb6fe94d0d1630e58b65614ad7..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tv3.png and /dev/null differ
diff --git a/gui/slick/images/network/tv7.png b/gui/slick/images/network/tv7.png
deleted file mode 100644
index d5bb6dfe09e0abeaae73874f9513f90355d76df3..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tv7.png and /dev/null differ
diff --git a/gui/slick/images/network/tve1.png b/gui/slick/images/network/tve1.png
deleted file mode 100644
index 4afdab277683e3d39ae0896220326553c39256db..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tve1.png and /dev/null differ
diff --git a/gui/slick/images/network/tve2.png b/gui/slick/images/network/tve2.png
deleted file mode 100644
index a016440e43fb6b704729d61988f49084049781b7..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tve2.png and /dev/null differ
diff --git a/gui/slick/images/network/tvp sa.png b/gui/slick/images/network/tvp sa.png
deleted file mode 100644
index 9043c8d615eefa55569b02ef9f9f03e49b127946..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/tvp sa.png and /dev/null differ
diff --git a/gui/slick/images/network/usa-network.png b/gui/slick/images/network/usa-network.png
deleted file mode 100644
index 8bec2afc8664331b0650e4fd2f0532662c1f8d9f..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/usa-network.png and /dev/null differ
diff --git a/gui/slick/images/network/usa.png b/gui/slick/images/network/usa.png
deleted file mode 100644
index 8bec2afc8664331b0650e4fd2f0532662c1f8d9f..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/usa.png and /dev/null differ
diff --git a/gui/slick/images/network/usanetwork.png b/gui/slick/images/network/usanetwork.png
deleted file mode 100644
index 8bec2afc8664331b0650e4fd2f0532662c1f8d9f..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/usanetwork.png and /dev/null differ
diff --git a/gui/slick/images/network/vier.png b/gui/slick/images/network/vier.png
new file mode 100644
index 0000000000000000000000000000000000000000..25b32afd457bbc625d96fa172da4cc3d0c6ee638
Binary files /dev/null and b/gui/slick/images/network/vier.png differ
diff --git a/gui/slick/images/network/vijftv.jpg b/gui/slick/images/network/vijftv.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..5509b16a3f0bba93c81bfa4dd6602e20533c1952
Binary files /dev/null and b/gui/slick/images/network/vijftv.jpg differ
diff --git a/gui/slick/images/network/viva hd.png b/gui/slick/images/network/viva hd.png
deleted file mode 100644
index dccd438f202348b777e75cadec91453b6b078e92..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/viva hd.png and /dev/null differ
diff --git a/gui/slick/images/network/viva.png b/gui/slick/images/network/viva.png
deleted file mode 100644
index 4659368464115984888dad8b0a19962d8c3d4d4c..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/viva.png and /dev/null differ
diff --git a/gui/slick/images/network/viz media.png b/gui/slick/images/network/viz media.png
deleted file mode 100644
index 7825b7970ce66a8a3d5903f526995afc119ad9bf..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/viz media.png and /dev/null differ
diff --git a/gui/slick/images/network/vox hd.png b/gui/slick/images/network/vox hd.png
deleted file mode 100644
index 1f5d157fc1761253ea35e6da70cb23dad886de21..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/vox hd.png and /dev/null differ
diff --git a/gui/slick/images/network/vt4.png b/gui/slick/images/network/vt4.png
new file mode 100644
index 0000000000000000000000000000000000000000..7bb4ea1cde32b613378d0c0a477da443e4c1d5e4
Binary files /dev/null and b/gui/slick/images/network/vt4.png differ
diff --git a/gui/slick/images/network/vtmkzoom.jpg b/gui/slick/images/network/vtmkzoom.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c3d69598d24554b8f6db773f71cc29612c3429b1
Binary files /dev/null and b/gui/slick/images/network/vtmkzoom.jpg differ
diff --git a/gui/slick/images/network/wdr hd.png b/gui/slick/images/network/wdr hd.png
deleted file mode 100644
index d78c900f86ba59652b9d8e138c70eb06dda786ed..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/wdr hd.png and /dev/null differ
diff --git a/gui/slick/images/network/wdr.png b/gui/slick/images/network/wdr.png
deleted file mode 100644
index 0a2dd0e1eab1a1451a7012edbe61c4a611c9c18c..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/wdr.png and /dev/null differ
diff --git a/gui/slick/images/network/wnl.png b/gui/slick/images/network/wnl.png
new file mode 100644
index 0000000000000000000000000000000000000000..581b309ad156156783714a135233724ed095df78
Binary files /dev/null and b/gui/slick/images/network/wnl.png differ
diff --git a/gui/slick/images/network/xbox live.png b/gui/slick/images/network/xbox live.png
deleted file mode 100644
index 40c0cabc0a64048e8aeec0713a506913922335d4..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/xbox live.png and /dev/null differ
diff --git a/gui/slick/images/network/xebec.png b/gui/slick/images/network/xebec.png
deleted file mode 100644
index dd9620c969b9f16e5e43e1c7a1909271b75889a0..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/xebec.png and /dev/null differ
diff --git a/gui/slick/images/network/yahoo! screen.png b/gui/slick/images/network/yahoo! screen.png
deleted file mode 100644
index e9efa1b67cad4d95336b9c57e528dd21289321e6..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/yahoo! screen.png and /dev/null differ
diff --git a/gui/slick/images/network/yorin.png b/gui/slick/images/network/yorin.png
deleted file mode 100644
index cf6a306d01ada1c083afa5cf0fda78a864b06b51..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/yorin.png and /dev/null differ
diff --git a/gui/slick/images/network/ytv.png b/gui/slick/images/network/ytv (ca).png
similarity index 100%
rename from gui/slick/images/network/ytv.png
rename to gui/slick/images/network/ytv (ca).png
diff --git a/gui/slick/images/network/zdf hd.png b/gui/slick/images/network/zdf hd.png
deleted file mode 100644
index bafb93fdef414b71186756042d4545dc24b4841e..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/zdf hd.png and /dev/null differ
diff --git a/gui/slick/images/network/zdf.kultur hd.png b/gui/slick/images/network/zdf.kultur hd.png
deleted file mode 100644
index fc6b2018af8a42f6309089562f5d4aae37a39ea0..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/zdf.kultur hd.png and /dev/null differ
diff --git a/gui/slick/images/network/zdfinfo hd.png b/gui/slick/images/network/zdfinfo hd.png
deleted file mode 100644
index 9c38daf86cea83c3fa729a93b0da7da8c7b263f5..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/zdfinfo hd.png and /dev/null differ
diff --git a/gui/slick/images/network/zdfinfo.png b/gui/slick/images/network/zdfinfo.png
deleted file mode 100644
index 95933a288b49b3345b8adede56286c60ebb6ff4a..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/zdfinfo.png and /dev/null differ
diff --git a/gui/slick/images/network/zdfneo hd.png b/gui/slick/images/network/zdfneo hd.png
deleted file mode 100644
index f99b505dcf414aee15e46ffb534f6658bfbc54c3..0000000000000000000000000000000000000000
Binary files a/gui/slick/images/network/zdfneo hd.png and /dev/null differ
diff --git a/gui/slick/images/providers/animezb.png b/gui/slick/images/providers/animenzb.png
similarity index 100%
rename from gui/slick/images/providers/animezb.png
rename to gui/slick/images/providers/animenzb.png
diff --git a/gui/slick/images/providers/BLUETIGERS.png b/gui/slick/images/providers/bluetigers.png
similarity index 100%
rename from gui/slick/images/providers/BLUETIGERS.png
rename to gui/slick/images/providers/bluetigers.png
diff --git a/gui/slick/images/providers/FNT.png b/gui/slick/images/providers/fnt.png
similarity index 100%
rename from gui/slick/images/providers/FNT.png
rename to gui/slick/images/providers/fnt.png
diff --git a/gui/slick/images/providers/kat.png b/gui/slick/images/providers/kickasstorrents.png
similarity index 100%
rename from gui/slick/images/providers/kat.png
rename to gui/slick/images/providers/kickasstorrents.png
diff --git a/gui/slick/images/providers/scc.png b/gui/slick/images/providers/sceneaccess.png
similarity index 100%
rename from gui/slick/images/providers/scc.png
rename to gui/slick/images/providers/sceneaccess.png
diff --git a/gui/slick/images/providers/shazbat.png b/gui/slick/images/providers/shazbat_tv.png
similarity index 100%
rename from gui/slick/images/providers/shazbat.png
rename to gui/slick/images/providers/shazbat_tv.png
diff --git a/gui/slick/images/providers/getstrike.png b/gui/slick/images/providers/strike.png
similarity index 100%
rename from gui/slick/images/providers/getstrike.png
rename to gui/slick/images/providers/strike.png
diff --git a/gui/slick/images/providers/torrentbytes.png b/gui/slick/images/providers/torrentbytes.png
index 9c0f5e1320a02d3d44fcf33284d01cfbc86dba4c..d9138065f1bd4495da00c85de0c312f504bcf82a 100644
Binary files a/gui/slick/images/providers/torrentbytes.png and b/gui/slick/images/providers/torrentbytes.png differ
diff --git a/gui/slick/js/_bower.min.js b/gui/slick/js/_bower.min.js
index 91e9e09910f4f92bd16a2013b4b8ff0d744f4093..aad0ddf1ce56d0457e6d9e1978f112a66e0d3d3e 100644
Binary files a/gui/slick/js/_bower.min.js and b/gui/slick/js/_bower.min.js differ
diff --git a/gui/slick/js/addTrendingShow.js b/gui/slick/js/addTrendingShow.js
deleted file mode 100644
index 12f5440b51ce13b1860095821dcb45c42008fe1f..0000000000000000000000000000000000000000
--- a/gui/slick/js/addTrendingShow.js
+++ /dev/null
@@ -1,21 +0,0 @@
-$(document).ready(function() {
-    var trendingRequestXhr = null;
-
-    function loadContent() {
-        if (trendingRequestXhr) trendingRequestXhr.abort();
-
-        $('#trendingShows').html('<img id="searchingAnim" src="' + srRoot + '/images/loading32' + themeSpinner + '.gif" height="32" width="32" /> loading trending shows...');
-        trendingRequestXhr = $.ajax({
-            url: srRoot + '/home/addShows/getTrendingShows/',
-            timeout: 60 * 1000,
-            error: function () {
-                $('#trendingShows').empty().html('Trakt timed out, refresh page to try again');
-            },
-            success: function (data) {
-                $('#trendingShows').html(data);
-            }
-        });
-    }
-
-    loadContent();
-});
diff --git a/gui/slick/js/apibuilder.js b/gui/slick/js/apibuilder.js
index c5f456ffb23902a2abd85aeb54733da7550277ce..80e27e157271f699b31197992fad60aa81c288c1 100644
--- a/gui/slick/js/apibuilder.js
+++ b/gui/slick/js/apibuilder.js
@@ -5,7 +5,7 @@ $(document).ready(function() {
         var profile = $('#option-profile').is(':checked');
         var targetId = $(this).data('target');
         var timeId = $(this).data('time');
-        var url = $('#' + $(this).data('base-url')).text();
+        var url = srRoot + $('#' + $(this).data('base-url')).text();
         var urlId = $(this).data('url');
 
         $.each(parameters, function (index, item) {
diff --git a/gui/slick/js/new/addTrendingShow.js b/gui/slick/js/new/addTrendingShow.js
new file mode 100644
index 0000000000000000000000000000000000000000..84f00c70e938ca90e69effeb330964b3bcd8394e
--- /dev/null
+++ b/gui/slick/js/new/addTrendingShow.js
@@ -0,0 +1,14 @@
+$.fn.loadContent = function(path, loadingTxt, errorTxt) {
+    $(this).html('<img id="searchingAnim" src="' + srRoot + '/images/loading32' + themeSpinner + '.gif" height="32" width="32" />&nbsp;' + loadingTxt);
+    $(this).load(srRoot + path + ' #container', function(response, status, xhr) {
+        if (status == "error") $(this).empty().html(errorTxt);
+    });
+};
+
+$(document).ready(function() {
+    $('#trendingShows').loadContent('/home/addShows/getTrendingShows/', 'Loading trending shows...', 'Trakt timed out, refresh page to try again');
+    $('#container').isotope({
+        itemSelector: '.trakt_show',
+        layoutMode: 'fitRows'
+    });
+});
diff --git a/gui/slick/js/new/comingEpisodes.js b/gui/slick/js/new/comingEpisodes.js
deleted file mode 100644
index c6de5a697996f4f61cb9a67b4e746b294f533c2e..0000000000000000000000000000000000000000
--- a/gui/slick/js/new/comingEpisodes.js
+++ /dev/null
@@ -1,41 +0,0 @@
-$(document).ready(function(){
-    if(isMeta('sickbeard.COMING_EPS_LAYOUT', ['list'])){
-        var sortCodes = {'date': 0, 'show': 1, 'network': 4};
-        var sort = getMeta('sickbeard.COMING_EPS_SORT');
-        var sortList = (sort in sortCodes) ? [[sortCodes[sort], 0]] : [[0, 0]];
-
-        $('#showListTable:has(tbody tr)').tablesorter({
-            widgets: ['stickyHeaders'],
-            sortList: sortList,
-            textExtraction: {
-                0: function(node) { return $(node).find('time').attr('datetime'); },
-                5: function(node) { return $(node).find('span').text().toLowerCase(); }
-            },
-            headers: {
-                0: { sorter: 'realISODate' },
-                1: { sorter: 'loadingNames' },
-                2: { sorter: false },
-                3: { sorter: false },
-                4: { sorter: 'loadingNames' },
-                5: { sorter: 'quality' },
-                6: { sorter: false },
-                7: { sorter: false },
-                8: { sorter: false }
-            }
-        });
-
-        $('#srRoot').ajaxEpSearch();
-    }
-
-    if(isMeta('sickbeard.COMING_EPS_LAYOUT', ['banner', 'poster'])){
-        $('#srRoot').ajaxEpSearch({'size': 16, 'loadingImage': 'loading16' + themeSpinner + '.gif'});
-        $('.ep_summary').hide();
-        $('.ep_summaryTrigger').click(function() {
-            $(this).next('.ep_summary').slideToggle('normal', function() {
-                $(this).prev('.ep_summaryTrigger').attr('src', function(i, src) {
-                    return $(this).next('.ep_summary').is(':visible') ? src.replace('plus','minus') : src.replace('minus','plus');
-                });
-            });
-        });
-    }
-});
diff --git a/gui/slick/js/script.js b/gui/slick/js/new/core.js
similarity index 100%
rename from gui/slick/js/script.js
rename to gui/slick/js/new/core.js
diff --git a/gui/slick/js/new/recommendedShows.js b/gui/slick/js/new/recommendedShows.js
new file mode 100644
index 0000000000000000000000000000000000000000..17a78a4649dd6b982de8a90f2606e072712dd631
--- /dev/null
+++ b/gui/slick/js/new/recommendedShows.js
@@ -0,0 +1,14 @@
+$.fn.loadContent = function(path, loadingTxt, errorTxt) {
+    $(this).html('<img id="searchingAnim" src="' + srRoot + '/images/loading32' + themeSpinner + '.gif" height="32" width="32" />&nbsp;' + loadingTxt);
+    $(this).load(srRoot + path + ' #container', function(response, status, xhr) {
+        if (status == "error") $(this).empty().html(errorTxt);
+    });
+};
+
+$(document).ready(function() {
+    $('#trendingShows').loadContent('/home/addShows/getRecommendedShows/', 'Loading recommended shows...', 'Trakt timed out, refresh page to try again');
+    $('#container').isotope({
+        itemSelector: '.trakt_show',
+        layoutMode: 'fitRows'
+    });
+});
diff --git a/gui/slick/js/new/schedule.js b/gui/slick/js/new/schedule.js
new file mode 100644
index 0000000000000000000000000000000000000000..a3b0883ecb474f22158c979f66fdc00babab0b5f
--- /dev/null
+++ b/gui/slick/js/new/schedule.js
@@ -0,0 +1,64 @@
+$(document).ready(function(){
+    if(isMeta('sickbeard.COMING_EPS_LAYOUT', ['list'])){
+        var sortCodes = {'date': 0, 'show': 2, 'network': 5};
+        var sort = getMeta('sickbeard.COMING_EPS_SORT');
+        var sortList = (sort in sortCodes) ? [[sortCodes[sort], 0]] : [[0, 0]];
+
+        $('#showListTable:has(tbody tr)').tablesorter({
+            widgets: ['stickyHeaders', 'filter', 'columnSelector', 'saveSort'],
+            sortList: sortList,
+            textExtraction: {
+                0: function(node) { return $(node).find('time').attr('datetime'); },
+                1: function(node) { return $(node).find('time').attr('datetime'); },
+                7: function(node) { return $(node).find('span').text().toLowerCase(); }
+            },
+            headers: {
+                0: { sorter: 'realISODate' },
+                1: { sorter: 'realISODate' },
+                2: { sorter: 'loadingNames' },
+                4: { sorter: 'loadingNames' },
+                7: { sorter: 'quality' },
+                8: { sorter: false },
+                9: { sorter: false }
+            },
+            widgetOptions: (function() {
+                if (metaToBool('sickbeard.FILTER_ROW')) {
+                    return {
+                        filter_columnFilters: true,
+                        filter_hideFilters: true,
+                        filter_saveFilters: true,
+                        columnSelector_mediaquery: false
+                    };
+                } else {
+                    return {
+                        filter_columnFilters: false,
+                        columnSelector_mediaquery: false
+                    };
+                }
+            }())
+        });
+
+        $('#srRoot').ajaxEpSearch();
+    }
+
+    if(isMeta('sickbeard.COMING_EPS_LAYOUT', ['banner', 'poster'])){
+        $('#srRoot').ajaxEpSearch({'size': 16, 'loadingImage': 'loading16' + themeSpinner + '.gif'});
+        $('.ep_summary').hide();
+        $('.ep_summaryTrigger').click(function() {
+            $(this).next('.ep_summary').slideToggle('normal', function() {
+                $(this).prev('.ep_summaryTrigger').attr('src', function(i, src) {
+                    return $(this).next('.ep_summary').is(':visible') ? src.replace('plus','minus') : src.replace('minus','plus');
+                });
+            });
+        });
+    }
+
+    $('#popover').popover({
+        placement: 'bottom',
+        html: true, // required if content has HTML
+        content: '<div id="popover-target"></div>'
+    }).on('shown.bs.popover', function () { // bootstrap popover event triggered when the popover opens
+        // call this function to copy the column selection code into the popover
+        $.tablesorter.columnSelector.attachTo( $('#showListTable'), '#popover-target');
+    });
+});
diff --git a/gui/slick/js/new/viewlogs.js b/gui/slick/js/new/viewlogs.js
index 1f4ca36c09ac30a675af85e0dddb12b571ca54df..d0ed8eee15b3e348826ea5336e9711271ae2a898 100644
--- a/gui/slick/js/new/viewlogs.js
+++ b/gui/slick/js/new/viewlogs.js
@@ -1,13 +1,11 @@
 $(document).ready(function(){
-    $('#minLevel,#logFilter,#logSearch').on('keyup change', function(){
+    $('#minLevel,#logFilter,#logSearch').on('keyup change', _.debounce(function (e) {
         if ($('#logSearch').val().length > 0){
-            $('#logSearch').prop('disabled', true);
             $('#logFilter option[value="<NONE>"]').prop('selected', true);
             $('#minLevel option[value=5]').prop('selected', true);
         }
         $('#minLevel').prop('disabled', true);
         $('#logFilter').prop('disabled', true);
-        $('#logSearch').prop('disabled', true);
         document.body.style.cursor='wait';
         url = srRoot + '/errorlogs/viewlog/?minLevel='+$('select[name=minLevel]').val()+'&logFilter='+$('select[name=logFilter]').val()+'&logSearch='+$('#logSearch').val();
         $.get(url, function(data){
@@ -15,8 +13,7 @@ $(document).ready(function(){
             $('pre').html($(data).find('pre').html());
             $('#minLevel').prop('disabled', false);
             $('#logFilter').prop('disabled', false);
-            $('#logSearch').prop('disabled', false);
             document.body.style.cursor='default';
         });
-    });
+    }, 500));
 });
diff --git a/gui/slick/js/recommendedShows.js b/gui/slick/js/recommendedShows.js
deleted file mode 100644
index 81c60af84987c92521239f9532a96fdc23e7c817..0000000000000000000000000000000000000000
--- a/gui/slick/js/recommendedShows.js
+++ /dev/null
@@ -1,21 +0,0 @@
-$(document).ready(function() {
-    var trendingRequestXhr = null;
-
-    function loadContent() {
-        if (trendingRequestXhr) trendingRequestXhr.abort();
-
-        $('#trendingShows').html('<img id="searchingAnim" src="' + srRoot + '/images/loading32' + themeSpinner + '.gif" height="32" width="32" /> Loading Recommended Shows...');
-        trendingRequestXhr = $.ajax({
-            url: srRoot + '/home/addShows/getRecommendedShows/',
-            timeout: 60 * 1000,
-            error: function () {
-                $('#trendingShows').empty().html('Trakt timed out, refresh page to try again');
-            },
-            success: function (data) {
-                $('#trendingShows').html(data);
-            }
-        });
-    }
-
-    loadContent();
-});
diff --git a/gui/slick/views/apiBuilder.mako b/gui/slick/views/apiBuilder.mako
index 8f91c4c768a3163f24a627acfc8d02e3603af422..acf688a02d2d1a48fec9708c525fb36ac9643d62 100644
--- a/gui/slick/views/apiBuilder.mako
+++ b/gui/slick/views/apiBuilder.mako
@@ -26,6 +26,25 @@
     <meta name="msapplication-TileImage" content="${srRoot}/images/ico/favicon-144.png">
     <meta name="msapplication-config" content="${srRoot}/css/browserconfig.xml">
 
+    <meta data-var="srRoot" data-content="${srRoot}">
+    <meta data-var="themeSpinner" data-content="${('', '-dark')[sickbeard.THEME_NAME == 'dark']}">
+    <meta data-var="anonURL" data-content="${sickbeard.ANON_REDIRECT}">
+
+    <meta data-var="sickbeard.ANIME_SPLIT_HOME" data-content="${sickbeard.ANIME_SPLIT_HOME}">
+    <meta data-var="sickbeard.COMING_EPS_LAYOUT" data-content="${sickbeard.COMING_EPS_LAYOUT}">
+    <meta data-var="sickbeard.COMING_EPS_SORT" data-content="${sickbeard.COMING_EPS_SORT}">
+    <meta data-var="sickbeard.DATE_PRESET" data-content="${sickbeard.DATE_PRESET}">
+    <meta data-var="sickbeard.FILTER_ROW" data-content="${sickbeard.FILTER_ROW}">
+    <meta data-var="sickbeard.FUZZY_DATING" data-content="${sickbeard.FUZZY_DATING}">
+    <meta data-var="sickbeard.HISTORY_LAYOUT" data-content="${sickbeard.HISTORY_LAYOUT}">
+    <meta data-var="sickbeard.HOME_LAYOUT" data-content="${sickbeard.HOME_LAYOUT}">
+    <meta data-var="sickbeard.POSTER_SORTBY" data-content="${sickbeard.POSTER_SORTBY}">
+    <meta data-var="sickbeard.POSTER_SORTDIR" data-content="${sickbeard.POSTER_SORTDIR}">
+    <meta data-var="sickbeard.ROOT_DIRS" data-content="${sickbeard.ROOT_DIRS}">
+    <meta data-var="sickbeard.SORT_ARTICLE" data-content="${sickbeard.SORT_ARTICLE}">
+    <meta data-var="sickbeard.TIME_PRESET" data-content="${sickbeard.TIME_PRESET}">
+    <meta data-var="sickbeard.TRIM_ZERO" data-content="${sickbeard.TRIM_ZERO}">
+
     <link rel="shortcut icon" href="${srRoot}/images/ico/favicon.ico">
     <link rel="icon" sizes="16x16 32x32 64x64" href="${srRoot}/images/ico/favicon.ico">
     <link rel="icon" type="image/png" sizes="196x196" href="${srRoot}/images/ico/favicon-196.png">
@@ -167,6 +186,8 @@ var commands = ${sorted(commands)};
 var episodes = ${episodes};
 </script>
 <script type="text/javascript" src="${srRoot}/js/_bower.min.js?${sbPID}"></script>
+<script type="text/javascript" src="${srRoot}/js/new/meta.js?${sbPID}"></script>
+<script type="text/javascript" src="${srRoot}/js/new/core.js?${sbPID}"></script>
 <script type="text/javascript" src="${srRoot}/js/apibuilder.js?${sbPID}"></script>
 </body>
 </html>
diff --git a/gui/slick/views/config_general.mako b/gui/slick/views/config_general.mako
index bc4681f545cc9eb6d9eb0689a5a15fcbdb83829e..1de330c00fdc3982ba8a7fb8442411afe4cb1749 100644
--- a/gui/slick/views/config_general.mako
+++ b/gui/slick/views/config_general.mako
@@ -75,11 +75,11 @@
                                 <span class="component-title">Initial page</span>
                                 <span class="component-desc">
                                     <select id="default_page" name="default_page" class="form-control input-sm">
-                                        <option value="news" ${('', 'selected="selected"')[sickbeard.DEFAULT_PAGE == 'news']}>News</option>
-                                        <option value="IRC" ${('', 'selected="selected"')[sickbeard.DEFAULT_PAGE == 'IRC']}>IRC</option>
                                         <option value="home" ${('', 'selected="selected"')[sickbeard.DEFAULT_PAGE == 'home']}>Shows</option>
-                                        <option value="comingEpisodes" ${('', 'selected="selected"')[sickbeard.DEFAULT_PAGE == 'comingEpisodes']}>Schedule</option>
+                                        <option value="schedule" ${('', 'selected="selected"')[sickbeard.DEFAULT_PAGE == 'schedule']}>Schedule</option>
                                         <option value="history" ${('', 'selected="selected"')[sickbeard.DEFAULT_PAGE == 'history']}>History</option>
+                                        <option value="news" ${('', 'selected="selected"')[sickbeard.DEFAULT_PAGE == 'news']}>News</option>
+                                        <option value="IRC" ${('', 'selected="selected"')[sickbeard.DEFAULT_PAGE == 'IRC']}>IRC</option>
                                     </select>
                                     <span>when launching SickRage interface</span>
                                 </span>
diff --git a/gui/slick/views/errorlogs.mako b/gui/slick/views/errorlogs.mako
index 8f793011f7d04eb7189897c8cc918325410e83ba..a3ff82f76259370f5a72a05b0544367eec5de429 100644
--- a/gui/slick/views/errorlogs.mako
+++ b/gui/slick/views/errorlogs.mako
@@ -4,6 +4,15 @@
     from sickbeard import classes
     from sickbeard.logger import reverseNames
 %>
+<%block name="css">
+<style>
+pre {
+  overflow: auto;
+  word-wrap: normal;
+  white-space: pre;
+}
+</style>
+</%block>
 <%block name="scripts">
 <script type="text/javascript" src="${srRoot}/js/new/errorlogs.js"></script>
 </%block>
diff --git a/gui/slick/views/history.mako b/gui/slick/views/history.mako
index 1d82113df68f2c21daf8401ee4b3cc9379bb3608..495e8968e6a1d337aa3b4edafdb1fe21a1128448 100644
--- a/gui/slick/views/history.mako
+++ b/gui/slick/views/history.mako
@@ -36,6 +36,8 @@
         <option value="100" ${('', 'selected="selected"')[limit == 100]}>100</option>
         <option value="250" ${('', 'selected="selected"')[limit == 250]}>250</option>
         <option value="500" ${('', 'selected="selected"')[limit == 500]}>500</option>
+        <option value="750" ${('', 'selected="selected"')[limit == 750]}>750</option>
+        <option value="1000" ${('', 'selected="selected"')[limit == 1000]}>1000</option>
         <option value="0"   ${('', 'selected="selected"')[limit == 0  ]}>All</option>
     </select>
 
diff --git a/gui/slick/views/home_recommendedShows.mako b/gui/slick/views/home_recommendedShows.mako
index 4405682a5838f912154bc9d57acfd4a15f925519..f1ede43278fa67abcc7e8f250d2293186fe5970a 100644
--- a/gui/slick/views/home_recommendedShows.mako
+++ b/gui/slick/views/home_recommendedShows.mako
@@ -3,7 +3,7 @@
     import sickbeard
 %>
 <%block name="scripts">
-<script type="text/javascript" src="${srRoot}/js/recommendedShows.js?${sbPID}"></script>
+<script type="text/javascript" src="${srRoot}/js/new/recommendedShows.js?${sbPID}"></script>
 <script type="text/javascript" src="${srRoot}/js/rootDirs.js?${sbPID}"></script>
 <script type="text/javascript" src="${srRoot}/js/plotTooltip.js?${sbPID}"></script>
 <script type="text/javascript" src="${srRoot}/js/new/home_recommendedShows.js"></script>
diff --git a/gui/slick/views/home_trendingShows.mako b/gui/slick/views/home_trendingShows.mako
index 8c2739b68e66916323741d1141d6c7bd1da331cf..a6aa74f07e755e26005a0c10fbba84da72832761 100644
--- a/gui/slick/views/home_trendingShows.mako
+++ b/gui/slick/views/home_trendingShows.mako
@@ -9,7 +9,7 @@
     from sickbeard.helpers import anon_url
 %>
 <%block name="scripts">
-<script type="text/javascript" src="${srRoot}/js/addTrendingShow.js?${sbPID}"></script>
+<script type="text/javascript" src="${srRoot}/js/new/addTrendingShow.js?${sbPID}"></script>
 <script type="text/javascript" src="${srRoot}/js/rootDirs.js?${sbPID}"></script>
 <script type="text/javascript" src="${srRoot}/js/plotTooltip.js?${sbPID}"></script>
 <script type="text/javascript" src="${srRoot}/js/new/home_trendingShows.js"></script>
diff --git a/gui/slick/views/layouts/main.mako b/gui/slick/views/layouts/main.mako
index b1843c5a0371e910cd7217491ca84410f2a81f2f..6f1d52092e5a4bdfe0e8535944f6b251233d6e78 100644
--- a/gui/slick/views/layouts/main.mako
+++ b/gui/slick/views/layouts/main.mako
@@ -127,8 +127,8 @@
                             <div style="clear:both;"></div>
                         </li>
 
-                        <li id="NAVcomingEpisodes"${('', ' class="active"')[topmenu == 'comingEpisodes']}>
-                            <a href="${srRoot}/comingEpisodes/">Schedule</a>
+                        <li id="NAVschedule"${('', ' class="active"')[topmenu == 'schedule']}>
+                            <a href="${srRoot}/schedule/">Schedule</a>
                         </li>
 
                         <li id="NAVhistory"${('', ' class="active"')[topmenu == 'history']}>
@@ -221,7 +221,9 @@
                                 <li><a href="${srRoot}/home/updateCheck?pid=${sbPID}"><i class="menu-icon-update"></i>&nbsp;Check For Updates</a></li>
                                 <li><a href="${srRoot}/home/restart/?pid=${sbPID}" class="confirm restart"><i class="menu-icon-restart"></i>&nbsp;Restart</a></li>
                                 <li><a href="${srRoot}/home/shutdown/?pid=${sbPID}" class="confirm shutdown"><i class="menu-icon-shutdown"></i>&nbsp;Shutdown</a></li>
-                                <li><a href="${srRoot}/logout" class="confirm logout"><i class="menu-icon-shutdown"></i>&nbsp;Logout</a></li>
+                                % if sbLogin != True:
+                                    <li><a href="${srRoot}/logout" class="confirm logout"><i class="menu-icon-shutdown"></i>&nbsp;Logout</a></li>
+                                % endif
                                 <li role="separator" class="divider"></li>
                                 <li><a href="${srRoot}/home/status/"><i class="menu-icon-help"></i>&nbsp;Server Status</a></li>
                             </ul>
@@ -355,7 +357,7 @@
         <script type="text/javascript" src="${srRoot}/js/lib/pnotify.custom.min.js?${sbPID}"></script><!-- Needs to be removed -->
         <script type="text/javascript" src="${srRoot}/js/new/parsers.js?${sbPID}"></script>
         <script type="text/javascript" src="${srRoot}/js/new/meta.js?${sbPID}"></script>
-        <script type="text/javascript" src="${srRoot}/js/script.js?${sbPID}"></script>
+        <script type="text/javascript" src="${srRoot}/js/new/core.js?${sbPID}"></script>
         <script type="text/javascript" src="${srRoot}/js/lib/jquery.scrolltopcontrol-1.1.js?${sbPID}"></script>
         <script type="text/javascript" src="${srRoot}/js/browser.js?${sbPID}"></script>
         <script type="text/javascript" src="${srRoot}/js/ajaxNotifications.js?${sbPID}"></script>
diff --git a/gui/slick/views/comingEpisodes.mako b/gui/slick/views/schedule.mako
similarity index 89%
rename from gui/slick/views/comingEpisodes.mako
rename to gui/slick/views/schedule.mako
index d8b18ab5be2a70702736536f7cf4f257c0bfdd3f..c1b03a6e10ed2ba170e837b2bdfd3473c4745a05 100644
--- a/gui/slick/views/comingEpisodes.mako
+++ b/gui/slick/views/schedule.mako
@@ -10,7 +10,7 @@
 <%block name="scripts">
 <script type="text/javascript" src="${srRoot}/js/ajaxEpSearch.js?${sbPID}"></script>
 <script type="text/javascript" src="${srRoot}/js/plotTooltip.js?${sbPID}"></script>
-<script type="text/javascript" src="${srRoot}/js/new/comingEpisodes.js"></script>
+<script type="text/javascript" src="${srRoot}/js/new/schedule.js"></script>
 </%block>
 <%block name="css">
 <style type="text/css">
@@ -23,29 +23,33 @@
 <%namespace file="/inc_defs.mako" import="renderQualityPill"/>
 <h1 class="header">${header}</h1>
 <div class="h2footer pull-right">
-    <span>Layout:
-        <select name="layout" class="form-control form-control-inline input-sm" onchange="location = this.options[this.selectedIndex].value;">
-            <option value="${srRoot}/setComingEpsLayout/?layout=poster" ${('', 'selected="selected"')[sickbeard.COMING_EPS_LAYOUT == 'poster']} >Poster</option>
-            <option value="${srRoot}/setComingEpsLayout/?layout=calendar" ${('', 'selected="selected"')[sickbeard.COMING_EPS_LAYOUT == 'calendar']} >Calendar</option>
-            <option value="${srRoot}/setComingEpsLayout/?layout=banner" ${('', 'selected="selected"')[sickbeard.COMING_EPS_LAYOUT == 'banner']} >Banner</option>
-            <option value="${srRoot}/setComingEpsLayout/?layout=list" ${('', 'selected="selected"')[sickbeard.COMING_EPS_LAYOUT == 'list']} >List</option>
-        </select>
-    </span>
-    &nbsp;
-
+% if layout == 'list':
+    <button id="popover" type="button" class="btn btn-inline">Select Columns <b class="caret"></b></button>
+% else:
     <span>Sort By:
         <select name="sort" class="form-control form-control-inline input-sm" onchange="location = this.options[this.selectedIndex].value;">
-            <option value="${srRoot}/setComingEpsSort/?sort=date" ${('', 'selected="selected"')[sickbeard.COMING_EPS_SORT == 'date']} >Date</option>
-            <option value="${srRoot}/setComingEpsSort/?sort=network" ${('', 'selected="selected"')[sickbeard.COMING_EPS_SORT == 'network']} >Network</option>
-            <option value="${srRoot}/setComingEpsSort/?sort=show" ${('', 'selected="selected"')[sickbeard.COMING_EPS_SORT == 'show']} >Show</option>
+            <option value="${srRoot}/setScheduleSort/?sort=date" ${('', 'selected="selected"')[sickbeard.COMING_EPS_SORT == 'date']} >Date</option>
+            <option value="${srRoot}/setScheduleSort/?sort=network" ${('', 'selected="selected"')[sickbeard.COMING_EPS_SORT == 'network']} >Network</option>
+            <option value="${srRoot}/setScheduleSort/?sort=show" ${('', 'selected="selected"')[sickbeard.COMING_EPS_SORT == 'show']} >Show</option>
         </select>
     </span>
+% endif
     &nbsp;
 
     <span>View Paused:
         <select name="viewpaused" class="form-control form-control-inline input-sm" onchange="location = this.options[this.selectedIndex].value;">
-            <option value="${srRoot}/toggleComingEpsDisplayPaused" ${('', 'selected="selected"')[not bool(sickbeard.COMING_EPS_DISPLAY_PAUSED)]}>Hidden</option>
-            <option value="${srRoot}/toggleComingEpsDisplayPaused" ${('', 'selected="selected"')[bool(sickbeard.COMING_EPS_DISPLAY_PAUSED)]}>Shown</option>
+            <option value="${srRoot}/toggleScheduleDisplayPaused" ${('', 'selected="selected"')[not bool(sickbeard.COMING_EPS_DISPLAY_PAUSED)]}>Hidden</option>
+            <option value="${srRoot}/toggleScheduleDisplayPaused" ${('', 'selected="selected"')[bool(sickbeard.COMING_EPS_DISPLAY_PAUSED)]}>Shown</option>
+        </select>
+    </span>
+    &nbsp;
+
+    <span>Layout:
+        <select name="layout" class="form-control form-control-inline input-sm" onchange="location = this.options[this.selectedIndex].value;">
+            <option value="${srRoot}/setScheduleLayout/?layout=poster" ${('', 'selected="selected"')[sickbeard.COMING_EPS_LAYOUT == 'poster']} >Poster</option>
+            <option value="${srRoot}/setScheduleLayout/?layout=calendar" ${('', 'selected="selected"')[sickbeard.COMING_EPS_LAYOUT == 'calendar']} >Calendar</option>
+            <option value="${srRoot}/setScheduleLayout/?layout=banner" ${('', 'selected="selected"')[sickbeard.COMING_EPS_LAYOUT == 'banner']} >Banner</option>
+            <option value="${srRoot}/setScheduleLayout/?layout=list" ${('', 'selected="selected"')[sickbeard.COMING_EPS_LAYOUT == 'list']} >List</option>
         </select>
     </span>
 </div>
@@ -75,10 +79,12 @@
     <thead>
         <tr>
             <th>Airdate (${('local', 'network')[sickbeard.TIMEZONE_DISPLAY == 'network']})</th>
+            <th>Ends</th>
             <th>Show</th>
-            <th nowrap="nowrap">Next Ep</th>
+            <th>Next Ep</th>
             <th>Next Ep Name</th>
             <th>Network</th>
+            <th>Run time</th>
             <th>Quality</th>
             <th>Indexers</th>
             <th>Search</th>
@@ -116,6 +122,11 @@
                 <time datetime="${airDate.isoformat('T')}" class="date">${sbdatetime.sbdatetime.sbfdatetime(airDate)}</time>
             </td>
 
+            <td align="center" nowrap="nowrap"> 
+                <% ends = sbdatetime.sbdatetime.convert_to_setting(cur_ep_enddate) %>
+                <time datetime="${ends.isoformat('T')}" class="date">${sbdatetime.sbdatetime.sbfdatetime(ends)}</time>
+            </td>
+
             <td class="tvShow" nowrap="nowrap"><a href="${srRoot}/home/displayShow?show=${cur_result['showid']}">${cur_result['show_name']}</a>
 % if int(cur_result['paused']):
                 <span class="pause">[paused]</span>
@@ -139,6 +150,10 @@
                 ${cur_result['network']}
             </td>
 
+            <td align="center">
+            ${run_time}min
+            </td>
+
             <td align="center">
                 ${renderQualityPill(cur_result['quality'], showTitle=True)}
             </td>
@@ -196,7 +211,7 @@
     % if 'network' == sort:
         <% show_network = ('no network', cur_result['network'])[bool(cur_result['network'])] %>
         % if cur_segment != show_network:
-            <div class="comingepheader">
+            <div>
                <br><h2 class="network">${show_network}</h2>
 
             <% cur_segment = cur_result['network'] %>
@@ -242,7 +257,7 @@
         % endif
 
         % if cur_ep_airdate == today.date() and not today_header:
-            <div class="comingepheader">
+            <div>
             <br /><h2 class="day">${datetime.date.fromordinal(cur_ep_airdate.toordinal()).strftime('%A').decode(sickbeard.SYS_ENCODING).capitalize()} <span style="font-size: 14px; vertical-align: top;">[Today]</span></h2>
             <% today_header = True %>
         % endif
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
index 33c2f5c54ad645ac973621d214f2d51ada12ce26..ca3f7d8dcb1df1dbebb77283cf030162178a34c3 100644
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -654,6 +654,8 @@ def initialize(consoleLogging=True):
         DEBUG = bool(check_setting_int(CFG, 'General', 'debug', 0))
 
         DEFAULT_PAGE = check_setting_str(CFG, 'General', 'default_page', 'home')
+        if DEFAULT_PAGE not in ('home', 'schedule', 'history', 'news', 'IRC'):
+            DEFAULT_PAGE = 'home'
 
         ACTUAL_LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', 'Logs')
         LOG_DIR = os.path.normpath(os.path.join(DATA_DIR, ACTUAL_LOG_DIR))
diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py
index 98869dc847a735e3e6dbc784c04e7d6fd10b002d..6b44cd198f98d5fe863435b1b4912b5134d2abd3 100644
--- a/sickbeard/helpers.py
+++ b/sickbeard/helpers.py
@@ -93,20 +93,17 @@ def indentXML(elem, level=0):
     '''
     Does our pretty printing, makes Matt very happy
     '''
-    i = "\n" + level * "  "
+    i = "\n" + level*"  "
     if len(elem):
         if not elem.text or not elem.text.strip():
             elem.text = i + "  "
         if not elem.tail or not elem.tail.strip():
             elem.tail = i
         for elem in elem:
-            indentXML(elem, level + 1)
+            indentXML(elem, level+1)
         if not elem.tail or not elem.tail.strip():
             elem.tail = i
     else:
-        # Strip out the newlines from text
-        if elem.text:
-            elem.text = elem.text.replace('\n', ' ')
         if level and (not elem.tail or not elem.tail.strip()):
             elem.tail = i
 
diff --git a/sickbeard/logger.py b/sickbeard/logger.py
index ce373096216cac8bb74dc3cf8db44cd7109ed005..bca39795f077234504aa4582eff8a9031e53b75c 100644
--- a/sickbeard/logger.py
+++ b/sickbeard/logger.py
@@ -217,7 +217,7 @@ class Logger(object):
                 try:
                     title_Error = ss(str(curError.title))
                     if not len(title_Error) or title_Error == 'None':
-                        title_Error = re.match(r"^[A-Z0-9\-\[\] :]+::\s*(.*)$", ss(str(curError.message))).group(1)
+                        title_Error = re.match(r"^[A-Z0-9\-\[\] :]+::\s*(.*)$", ss(curError.message)).group(1)
 
                     if len(title_Error) > 1000:
                         title_Error = title_Error[0:1000]
@@ -267,7 +267,7 @@ class Logger(object):
                     return re.search(r'Loaded module.*not found in sys\.modules', title) is not None
 
                 def is_ascii_error(title):
-                    return re.search(r"'ascii' codec can't encode character .* in position .*: ordinal not in range.*", title) is not None
+                    return re.search(r"'.*' codec can't encode character .* in position .*:", title) is not None
 
                 mako_error = is_mako_error(title_Error)
                 ascii_error = is_ascii_error(title_Error)
diff --git a/sickbeard/metadata/generic.py b/sickbeard/metadata/generic.py
index e1787077b424e26fd53d8d33b4f7347ad941b172..d70ec0ecdaefb1b032818b17be6b97e28e133a9a 100644
--- a/sickbeard/metadata/generic.py
+++ b/sickbeard/metadata/generic.py
@@ -292,9 +292,8 @@ class GenericMetadata:
                 indexerid = showXML.find('id')
 
                 root = showXML.getroot()
-
-                if indexerid:
-                    indexerid.text = show_obj.indexerid
+                if indexerid is not None:
+                    indexerid.text = str(show_obj.indexerid)
                 else:
                     etree.SubElement(root, "id").text = str(show_obj.indexerid)
 
diff --git a/sickbeard/metadata/kodi_12plus.py b/sickbeard/metadata/kodi_12plus.py
index ca6787127c66d8303d3a1365263c3814c729f361..3a95ed1d9c388a28d5046e6d62452ec0b727847e 100644
--- a/sickbeard/metadata/kodi_12plus.py
+++ b/sickbeard/metadata/kodi_12plus.py
@@ -186,16 +186,13 @@ class KODI_12PlusMetadata(generic.GenericMetadata):
                 cur_actor = etree.SubElement(tv_node, "actor")
 
                 cur_actor_name = etree.SubElement(cur_actor, "name")
-                if getattr(actor, 'name', None) is not None:
-                    cur_actor_name.text = actor['name'].strip()
+                cur_actor_name.text = actor['name'].strip()
 
                 cur_actor_role = etree.SubElement(cur_actor, "role")
-                if getattr(actor, 'role', None) is not None:
-                    cur_actor_role.text = actor['role']
+                cur_actor_role.text = actor['role']
 
                 cur_actor_thumb = etree.SubElement(cur_actor, "thumb")
-                if getattr(actor, 'image', None) is not None:
-                    cur_actor_thumb.text = actor['image']
+                cur_actor_thumb.text = actor['image']
 
         # Make it purdy
         helpers.indentXML(tv_node)
diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py
index fd035fa6222c160d8f7f2c390aa364d391ee6d79..1f0c322ef69905952cc2f80c9187e954a49074ff 100644
--- a/sickbeard/providers/alpharatio.py
+++ b/sickbeard/providers/alpharatio.py
@@ -1,7 +1,7 @@
 # Author: Bill Nasty
 # URL: https://github.com/SiCKRAGETV/SickRage
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -67,14 +67,6 @@ class AlphaRatioProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'alpharatio.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _doLogin(self):
         login_params = {'username': self.username,
                         'password': self.password,
@@ -84,65 +76,17 @@ class AlphaRatioProvider(generic.TorrentProvider):
 
         response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('Invalid Username/password', response) \
                 or re.search('<title>Login :: AlphaRatio.cc</title>', response):
-            logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  #1) showName SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
-
-        return [search_string]
-
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
+    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -150,13 +94,15 @@ class AlphaRatioProvider(generic.TorrentProvider):
         if not self._doLogin():
             return results
 
-        for mode in search_params.keys():
-            for search_string in search_params[mode]:
+        for mode in search_strings.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
+            for search_string in search_strings[mode]:
 
-                if isinstance(search_string, unicode):
-                    search_string = unidecode(search_string)
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 searchURL = self.urls['search'] % (search_string, self.catagories)
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
 
                 data = self.getURL(searchURL)
                 if not data:
@@ -169,8 +115,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
 
                         #Continue only if one Release is found
                         if len(torrent_rows) < 2:
-                            logger.log(u"The Data returned from " + self.name + " does not contain any torrents",
-                                       logger.DEBUG)
+                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                             continue
 
                         for result in torrent_rows[1:]:
@@ -184,43 +129,36 @@ class AlphaRatioProvider(generic.TorrentProvider):
                                 id = link['href'][-6:]
                                 seeders = cells[len(cells)-2].contents[0]
                                 leechers = cells[len(cells)-1].contents[0]
+                                #FIXME
+                                size = -1
                             except (AttributeError, TypeError):
                                 continue
 
-                            #Filter unseeded torrent
-                            if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
+                            if not all([title, download_url]):
                                 continue
 
-                            if not title or not download_url:
+                            #Filter unseeded torrent
+                            if seeders < self.minseed or leechers < self.minleech:
+                                if mode != 'RSS':
+                                    logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                 continue
 
-                            item = title, download_url, id, seeders, leechers
-                            logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG)
+                            item = title, download_url, size, seeders, leechers
+                            if mode != 'RSS':
+                                logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                             items[mode].append(item)
 
                 except Exception, e:
-                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
-            #For each search mode sort all the items by seeders
+            #For each search mode sort all the items by seeders if available
             items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url, id, seeders, leechers = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
@@ -263,7 +201,7 @@ class AlphaRatioCache(tvcache.TVCache):
         self.minTime = 20
 
     def _getRSSData(self):
-        search_params = {'RSS': ['']}
-        return {'entries': self.provider._doSearch(search_params)}
+        search_strings = {'RSS': ['']}
+        return {'entries': self.provider._doSearch(search_strings)}
 
 provider = AlphaRatioProvider()
diff --git a/sickbeard/providers/animenzb.py b/sickbeard/providers/animenzb.py
index d2ff88e659f90db32e227aee6e9449a1121780d9..fca25fb445ab3d1e76c6dd2a5392c9a1f63c8461 100644
--- a/sickbeard/providers/animenzb.py
+++ b/sickbeard/providers/animenzb.py
@@ -51,9 +51,6 @@ class animenzb(generic.NZBProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'animenzb.gif'
-
     def _get_season_search_strings(self, ep_obj):
         return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)]
 
@@ -61,8 +58,10 @@ class animenzb(generic.NZBProvider):
         return [x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
 
     def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0, epObj=None):
+
+        logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+
         if self.show and not self.show.is_anime:
-            logger.log(u"" + str(self.show.name) + " is not an anime skiping ...")
             return []
 
         params = {
@@ -71,21 +70,19 @@ class animenzb(generic.NZBProvider):
             "max": "100"
         }
 
-        search_url = self.url + "rss?" + urllib.urlencode(params)
-
-        logger.log(u"Search url: " + search_url, logger.DEBUG)
-
+        searchURL = self.url + "rss?" + urllib.urlencode(params)
+        logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)  
         results = []
-        for curItem in self.cache.getRSSFeed(search_url, items=['entries'])['entries'] or []:
+        for curItem in self.cache.getRSSFeed(searchURL, items=['entries'])['entries'] or []:
             (title, url) = self._get_title_and_url(curItem)
 
             if title and url:
                 results.append(curItem)
-            else:
-                logger.log(
-                    u"The data returned from the " + self.name + " is incomplete, this result is unusable",
-                    logger.DEBUG)
+                logger.log(u"Found result: %s " % title, logger.DEBUG)
 
+        #For each search mode sort all the items by seeders if available if available
+        results.sort(key=lambda tup: tup[0], reverse=True)
+        
         return results
 
     def findPropers(self, date=None):
@@ -101,7 +98,6 @@ class animenzb(generic.NZBProvider):
                 if result_date:
                     result_date = datetime.datetime(*result_date[0:6])
             else:
-                logger.log(u"Unable to figure out the date for entry " + title + ", skipping it")
                 continue
 
             if not date or result_date > date:
@@ -129,8 +125,6 @@ class animenzbCache(tvcache.TVCache):
 
         rss_url = self.provider.url + 'rss?' + urllib.urlencode(params)
 
-        logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
-
         return self.getRSSFeed(rss_url)
 
 provider = animenzb()
diff --git a/sickbeard/providers/binsearch.py b/sickbeard/providers/binsearch.py
index d1462ea1b8db3d1ad550e0ea4177d31942f06a01..3b7725268d87c5d322a5e9c49716bdbf65d0dee5 100644
--- a/sickbeard/providers/binsearch.py
+++ b/sickbeard/providers/binsearch.py
@@ -100,7 +100,7 @@ class BinSearchCache(tvcache.TVCache):
 
             url += urllib.urlencode(urlArgs)
 
-            logger.log(u"BinSearch cache update URL: " + url, logger.DEBUG)
+            logger.log(u"Cache update URL: %s " % url, logger.DEBUG)
 
             for item in self.getRSSFeed(url)['entries'] or []:
                 ci = self._parseItem(item)
diff --git a/sickbeard/providers/bitsoup.py b/sickbeard/providers/bitsoup.py
index 3025b7ecb5241262623dac0ac58522d10fc682e9..02c18bc8e494b164e53fa7a83259593b13927eeb 100644
--- a/sickbeard/providers/bitsoup.py
+++ b/sickbeard/providers/bitsoup.py
@@ -1,7 +1,7 @@
 # Author: Idan Gutman
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -66,17 +66,9 @@ class BitSoupProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'bitsoup.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _checkAuth(self):
         if not self.username or not self.password:
-            raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
 
         return True
 
@@ -90,63 +82,15 @@ class BitSoupProvider(generic.TorrentProvider):
 
         response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('Username or password incorrect', response):
-            logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  #1) showName SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
-
-        return [search_string]
-
     def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
@@ -156,8 +100,12 @@ class BitSoupProvider(generic.TorrentProvider):
             return results
 
         for mode in search_strings.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_strings[mode]:
-                logger.log(u"Search string: " + search_string, logger.DEBUG)
+
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+
                 self.search_params['search'] = search_string
 
                 data = self.getURL(self.urls['search'], params=self.search_params)
@@ -171,8 +119,7 @@ class BitSoupProvider(generic.TorrentProvider):
 
                         #Continue only if one Release is found
                         if len(torrent_rows) < 2:
-                             logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
-                                 logger.DEBUG)
+                             logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                              continue
 
                         for result in torrent_rows[1:]:
@@ -190,43 +137,36 @@ class BitSoupProvider(generic.TorrentProvider):
                                 id = int(id)
                                 seeders = int(cells[10].getText())
                                 leechers = int(cells[11].getText())
+                                #FIXME
+                                size = -1
                             except (AttributeError, TypeError):
                                 continue
 
-                            #Filter unseeded torrent
-                            if seeders < self.minseed or leechers < self.minleech:
+                            if not all([title, download_url]):
                                 continue
 
-                            if not title or not download_url:
+                                #Filter unseeded torrent
+                            if seeders < self.minseed or leechers < self.minleech:
+                                if mode != 'RSS':
+                                    logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                 continue
 
-                            item = title, download_url, id, seeders, leechers
-                            logger.log(u"Found result: " + title.replace(' ','.') + " (" + search_string + ")", logger.DEBUG)
+                            item = title, download_url, size, seeders, leechers
+                            if mode != 'RSS':
+                                logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                             items[mode].append(item)
 
                 except Exception, e:
-                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
-            #For each search mode sort all the items by seeders
+            #For each search mode sort all the items by seeders if available
             items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url, id, seeders, leechers = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py
index e280265b56d960785d366f6de95ea7323134bdcb..f2af9ed926327f7e90462d5edf55ca9aee71da78 100644
--- a/sickbeard/providers/bluetigers.py
+++ b/sickbeard/providers/bluetigers.py
@@ -2,7 +2,7 @@
 # Author: raver2046 <raver2046@gmail.com>
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of Sick Beard.
+# This file is part of SickRage. 
 #
 # Sick Beard is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -66,13 +66,6 @@ class BLUETIGERSProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'BLUETIGERS.png'
-
-    def getQuality(self, item, anime=False):
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _doLogin(self):
         if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
             return True
@@ -83,76 +76,31 @@ class BLUETIGERSProvider(generic.TorrentProvider):
             'take_login' : '1'
             }
 
-        logger.log('Performing authentication to BLUETIGERS', logger.DEBUG)
         response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('/account-logout.php', response):
-            logger.log(u'Login to %s was successful.' % self.name, logger.DEBUG)
             return True
         else:
-            logger.log(u'Login to %s was unsuccessful.' % self.name, logger.DEBUG)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  # 1) showName.SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', '.', ep_string))
-
-        return [search_string]
-
     def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
+
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
 
         for mode in search_strings.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_strings[mode]:
-                logger.log(u"Search string: " + search_string, logger.DEBUG)
+
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+  
                 self.search_params['search'] = search_string
 
                 data = self.getURL(self.urls['search'], params=self.search_params)
@@ -164,45 +112,44 @@ class BLUETIGERSProvider(generic.TorrentProvider):
                         result_linkz = html.findAll('a',  href=re.compile("torrents-details"))
 
                         if not result_linkz:
-                            logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
-                                       logger.DEBUG)
+                            logger.log(u"Data returned from provider do not contains any torrent", logger.DEBUG)
                             continue
 
                         if result_linkz:
                             for link in result_linkz:
                                 title = link.text
-                                logger.log(u"BLUETIGERS TITLE TEMP: " + title, logger.DEBUG)
                                 download_url =   self.urls['base_url']  + "/" + link['href']
                                 download_url = download_url.replace("torrents-details","download")
-                                logger.log(u"BLUETIGERS downloadURL: " + download_url, logger.DEBUG)
+                                #FIXME
+                                size = -1
+                                seeders = 1
+                                leechers = 0
 
                                 if not title or not download_url:
                                    continue
 
-                                item = title, download_url
-                                logger.log(u"Found result: " + title.replace(' ','.') + " (" + download_url + ")", logger.DEBUG)
+                                #Filter unseeded torrent
+                                #if seeders < self.minseed or leechers < self.minleech:
+                                #    if mode != 'RSS':
+                                #        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                                #    continue
+
+                                item = title, download_url, size, seeders, leechers
+                                if mode != 'RSS':
+                                    logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                                 items[mode].append(item)
 
                 except Exception, e:
-                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
+
+            #For each search mode sort all the items by seeders if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return title, url
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/btdigg.py b/sickbeard/providers/btdigg.py
index b608613f80904e7d11c4064bb72e9c00dec389f7..0e62f6f528a214288d981ec0f5ea9403f8c16515 100644
--- a/sickbeard/providers/btdigg.py
+++ b/sickbeard/providers/btdigg.py
@@ -3,7 +3,7 @@
 #
 #Ported to sickrage by: matigonkas
 #
-# This file is part of Sick Beard.
+# This file is part of SickRage. 
 #
 # Sick Beard is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -36,123 +36,68 @@ class BTDIGGProvider(generic.TorrentProvider):
 
         self.supportsBacklog = True
         self.public = True
-        
+
         self.urls = {'url': u'https://btdigg.org/',
                      'api': u'https://api.btdigg.org/',
                      }
         self.url = self.urls['url']
-        
+
         self.cache = BTDiggCache(self)
 
     def isEnabled(self):
         return self.enabled
 
+    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
 
-    def imageName(self):
-        return 'btdigg.png'
-
-
-    def _get_airbydate_season_range(self, season):
-        if season == None:
-            return ()
-        year, month = map(int, season.split('-'))
-        min_date = datetime.date(year, month, 1)
-        if month == 12:
-            max_date = datetime.date(year, month, 31)
-        else:
-            max_date = datetime.date(year, month+1, 1) -  datetime.timedelta(days=1)
-        return (min_date, max_date)
-
-
-    def _get_season_search_strings(self, show, season=None):
-        search_string = []
-
-        if not (show and season):
-            return []
-
-        myDB = db.DBConnection()
-
-        if show.air_by_date:
-            (min_date, max_date) = self._get_airbydate_season_range(season)
-            sqlResults = myDB.select("SELECT DISTINCT airdate FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= ? AND status = ?", [show.tvdbid,  min_date.toordinal(), max_date.toordinal(), WANTED])
-        else:
-            sqlResults = myDB.select("SELECT DISTINCT season FROM tv_episodes WHERE showid = ? AND season = ? AND status = ?", [show.tvdbid, season, WANTED])
-
-        for sqlEp in sqlResults:
-            for show_name in set(show_name_helpers.allPossibleShowNames(show)):
-                if show.air_by_date:
-                    ep_string = sanitizeSceneName(show_name) +' '+ str(datetime.date.fromordinal(sqlEp["airdate"])).replace('-', '.')
-                    search_string.append(ep_string)
-                else:
-                    ep_string = sanitizeSceneName(show_name) + ' S%02d' % sqlEp["season"]
-                    search_string.append(ep_string)
-
-        return search_string
-
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        if not ep_obj:
-            return []
-
-        search_string = []
-
-        for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
-            ep_string = sanitizeSceneName(show_name)
-            if ep_obj.show.air_by_date:
-                ep_string += ' ' + str(ep_obj.airdate).replace('-', '.')
-            else:
-                ep_string += ' ' + naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode}
-
-            if len(add_string):
-                ep_string += ' %s' % add_string
-
-            search_string.append(ep_string)
-
-        return search_string
-
-
-    def _get_title_and_url(self, item):
-        title, url, size = item
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
+        results = []
+        items = {'Season': [], 'Episode': [], 'RSS': []}
 
+        for mode in search_strings.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
+            for search_string in search_strings[mode]:
 
-    def _get_size(self, item):
-        title, url, size = item
-        logger.log(u'Size: %s' % size, logger.DEBUG)
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s" %  search_string, logger.DEBUG)
 
-        return size
+                searchURL = self.urls['api'] + "api/private-341ada3245790954/s02?q=" + search_string + "&p=0&order=1"
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
 
+                jdata = self.getURL(searchURL, json=True)
+                if not jdata:
+                    logger.log("No data returned to be parsed!!!")
+                    return []
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
+                for torrent in jdata:
+                    if not torrent['ff']:
+                        title = torrent['name']
+                        download_url = torrent['magnet']
+                        size = torrent['size']
+                        #FIXME
+                        seeders = 1
+                        leechers = 0
 
-        logger.log("Performing Search: {0}".format(search_params))
+                if not all([title, download_url]):
+                    continue
 
-        # TODO: Make order configurable. 0: weight, 1: req, 2: added, 3: size, 4: files, 5
-        searchUrl = self.urls['api'] + "api/private-341ada3245790954/s02?q=" + search_params + "&p=0&order=1"
+                #Filter unseeded torrent
+                #if seeders < self.minseed or leechers < self.minleech:
+                #    if mode != 'RSS':
+                #        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                #    continue
 
-        jdata = self.getURL(searchUrl, json=True)
-        if not jdata:
-            logger.log("No data returned to be parsed!!!")
-            return []
+                item = title, download_url, size, seeders, leechers
+                if mode != 'RSS':
+                    logger.log(u"Found result: %s " % title, logger.DEBUG)
 
-        logger.log("URL to be parsed: " + searchUrl, logger.DEBUG)
+                items[mode].append(item)
 
-        results = []
+            #For each search mode sort all the items by seeders if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
-        for torrent in jdata:
-            if not torrent['ff']:
-                results.append((torrent['name'], torrent['magnet'], torrent['size']))
+            results += items[mode]
 
         return results
 
-
 class BTDiggCache(tvcache.TVCache):
     def __init__(self, provider):
 
diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py
index e38484ad05b45c51b197a45327b1f4f4796901b6..736a688c84bd308290a9058cf950bacd2cc88d16 100644
--- a/sickbeard/providers/btn.py
+++ b/sickbeard/providers/btn.py
@@ -2,7 +2,7 @@
 # Author: Daniel Heimans
 # URL: http://code.google.com/p/sickbeard
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -62,12 +62,9 @@ class BTNProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'btn.png'
-
     def _checkAuth(self):
         if not self.api_key:
-            raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
+            logger.log(u"Invalid api key. Check your settings", logger.WARNING)
 
         return True
 
@@ -77,8 +74,7 @@ class BTNProvider(generic.TorrentProvider):
             return self._checkAuth()
 
         if 'api-error' in parsedJSON:
-            logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['api-error'],
-                       logger.DEBUG)
+            logger.log(u"Incorrect authentication credentials: % s" % parsedJSON['api-error'], logger.DEBUG)
             raise AuthException(
                 "Your authentication credentials for " + self.name + " are incorrect, check your config.")
 
@@ -98,10 +94,11 @@ class BTNProvider(generic.TorrentProvider):
 
         if search_params:
             params.update(search_params)
+            logger.log(u"Search string: %s" %  search_params, logger.DEBUG)
 
         parsedJSON = self._api_call(apikey, params)
         if not parsedJSON:
-            logger.log(u"No data returned from " + self.name, logger.ERROR)
+            logger.log("No data returned from provider", logger.DEBUG)
             return results
 
         if self._checkAuthFromData(parsedJSON):
@@ -135,8 +132,10 @@ class BTNProvider(generic.TorrentProvider):
                 (title, url) = self._get_title_and_url(torrent_info)
 
                 if title and url:
+                    logger.log(u"Found result: %s " % title, logger.DEBUG)
                     results.append(torrent_info)
 
+        #FIXME SORT RESULTS
         return results
 
     def _api_call(self, apikey, params={}, results_per_page=1000, offset=0):
@@ -150,24 +149,24 @@ class BTNProvider(generic.TorrentProvider):
 
         except jsonrpclib.jsonrpc.ProtocolError, error:
             if error.message == 'Call Limit Exceeded':
-                logger.log(u"You have exceeded the limit of 150 calls per hour, per API key which is unique to your user account.", logger.WARNING)
+                logger.log(u"You have exceeded the limit of 150 calls per hour, per API key which is unique to your user account", logger.WARNING)
             else:
-                logger.log(u"JSON-RPC protocol error while accessing " + self.name + ": " + ex(error), logger.ERROR)
+                logger.log(u"JSON-RPC protocol error while accessing provicer. Error: %s " % repr(error), logger.ERROR)
             parsedJSON = {'api-error': ex(error)}
             return parsedJSON
 
         except socket.timeout:
-            logger.log(u"Timeout while accessing " + self.name, logger.WARNING)
+            logger.log(u"Timeout while accessing provider", logger.WARNING)
 
         except socket.error, error:
             # Note that sometimes timeouts are thrown as socket errors
-            logger.log(u"Socket error while accessing " + self.name + ": " + error[1], logger.ERROR)
+            logger.log(u"Socket error while accessing provider. Error: %s " % error[1], logger.WARNING)
 
         except Exception, error:
             errorstring = str(error)
             if (errorstring.startswith('<') and errorstring.endswith('>')):
                 errorstring = errorstring[1:-1]
-            logger.log(u"Unknown error while accessing " + self.name + ": " + errorstring, logger.ERROR)
+            logger.log(u"Unknown error while accessing provider. Error: %s " % errorstring, logger.WARNING)
 
         return parsedJSON
 
@@ -363,10 +362,10 @@ class BTNProvider(generic.TorrentProvider):
                 myParser = NameParser(False)
                 parse_result = myParser.parse(title)
             except InvalidNameException:
-                logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.DEBUG)  # @UndefinedVariable
+                logger.log(u"Unable to parse the filename %s into a valid episode" % title, logger.DEBUG)
                 continue
             except InvalidShowException:
-                logger.log(u"Unable to parse the filename " + title + " into a valid show", logger.DEBUG)
+                logger.log(u"Unable to parse the filename %s into a valid show" % title, logger.DEBUG)
                 continue
 
             showObj = parse_result.show
@@ -379,14 +378,13 @@ class BTNProvider(generic.TorrentProvider):
                 if search_mode == 'sponly':
                     if len(parse_result.episode_numbers):
                         logger.log(
-                            u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it",
-                            logger.DEBUG)
+                            u"This is supposed to be a season pack search but the result %s is not a valid season pack, skipping it" % title, logger.DEBUG)
                         addCacheEntry = True
                     if len(parse_result.episode_numbers) and (
                                     parse_result.season_number not in set([ep.season for ep in episodes]) or not [ep for ep in episodes if
                                                                                  ep.scene_episode in parse_result.episode_numbers]):
                         logger.log(
-                            u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
+                            u"The result %s doesn't seem to be a valid episode that we are trying to snatch, ignoring" % title,
                             logger.DEBUG)
                         addCacheEntry = True
                 else:
@@ -400,7 +398,7 @@ class BTNProvider(generic.TorrentProvider):
                     elif len(parse_result.episode_numbers) and not [ep for ep in episodes if
                                                                     ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
                         logger.log(
-                            u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
+                            u"The result %s doesn't seem to be a valid episode that we are trying to snatch, ignoring" % title,
                             logger.DEBUG)
                         addCacheEntry = True
 
@@ -411,7 +409,7 @@ class BTNProvider(generic.TorrentProvider):
             else:
                 if not (parse_result.is_air_by_date):
                     logger.log(
-                        u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it",
+                        u"This is supposed to be a date search but the result %s didn't parse as one, skipping it" % title,
                         logger.DEBUG)
                     addCacheEntry = True
                 else:
@@ -423,7 +421,7 @@ class BTNProvider(generic.TorrentProvider):
 
                     if len(sql_results) != 1:
                         logger.log(
-                            u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it",
+                            u"Tried to look up the date for the episode %s but the database didn't give proper results, skipping it" % title,
                             logger.WARNING)
                         addCacheEntry = True
 
@@ -433,7 +431,7 @@ class BTNProvider(generic.TorrentProvider):
 
             # add parsed result to cache for usage later on
             if addCacheEntry:
-                logger.log(u"Adding item from search to cache: " + title, logger.DEBUG)
+                logger.log(u"Adding item from search to cache: %s " % title, logger.DEBUG)
                 ci = self.cache._addCacheEntry(title, url, parse_result=parse_result)
                 if ci is not None:
                     cl.append(ci)
@@ -448,13 +446,11 @@ class BTNProvider(generic.TorrentProvider):
 
             if not wantEp:
                 logger.log(
-                    u"Ignoring result " + title + " because we don't want an episode that is " +
-                    Quality.qualityStrings[
-                        quality], logger.INFO)
+                    u"Ignoring result %s because we don't want an episode that is %s" % (title, Quality.qualityStrings[quality]), logger.DEBUG)
 
                 continue
 
-            logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
+            logger.log(u"Found result: %s " % title, logger.DEBUG)
 
             # make a result object
             epObj = []
@@ -472,11 +468,10 @@ class BTNProvider(generic.TorrentProvider):
 
             if len(epObj) == 1:
                 epNum = epObj[0].episode
-                logger.log(u"Single episode result.", logger.DEBUG)
+                logger.log(u"Single episode result", logger.DEBUG)
             elif len(epObj) > 1:
                 epNum = MULTI_EP_RESULT
-                logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(
-                    parse_result.episode_numbers), logger.DEBUG)
+                logger.log(u"Separating multi-episode result to check for later - result contains episodes: %s" % parse_result.episode_numbers,logger.DEBUG)
             elif len(epObj) == 0:
                 epNum = SEASON_RESULT
                 logger.log(u"Separating full season result to check for later", logger.DEBUG)
@@ -513,8 +508,8 @@ class BTNCache(tvcache.TVCache):
         # Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search, older things will need to be done through backlog
         if seconds_since_last_update > 86400:
             logger.log(
-                u"The last known successful update on " + self.provider.name + " was more than 24 hours ago, only trying to fetch the last 24 hours!",
-                logger.WARNING)
+                u"The last known successful update was more than 24 hours ago, only trying to fetch the last 24 hours!",
+                logger.DEBUG)
             seconds_since_last_update = 86400
 
         return {'entries': self.provider._doSearch(search_params=None, age=seconds_since_last_update)}
diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py
index 3381fce7578f0fa7b14ccf80d5b951b0d18b3e95..74d68e590d20b6c68575f2b42951b4072d37297e 100644
--- a/sickbeard/providers/cpasbien.py
+++ b/sickbeard/providers/cpasbien.py
@@ -2,7 +2,7 @@
 # Author: Guillaume Serre <guillaume.serre@gmail.com>
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of Sick Beard.
+# This file is part of SickRage. 
 #
 # Sick Beard is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -32,91 +32,40 @@ from sickbeard import db
 from sickbeard import helpers
 from sickbeard import classes
 from sickbeard.helpers import sanitizeSceneName
+from sickbeard import tvcache
 
 
 
 class CpasbienProvider(generic.TorrentProvider):
 
     def __init__(self):
-        
+
         generic.TorrentProvider.__init__(self, "Cpasbien")
 
         self.supportsBacklog = True
         self.public = True
         self.ratio = None
-        
+        self.cache = CpasbienCache(self)
         self.url = "http://www.cpasbien.pw"
-        
-        
-    def isEnabled(self):
-        
-        return self.enabled
-    
-    def imageName(self):
-        return 'cpasbien.png'
-    
-    def getQuality(self, item, anime=False):
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  # 1) showName.SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
 
-        search_string = {'Episode': []}
 
-        if not ep_obj:
-            return []
+    def isEnabled(self):
+        return self.enabled
 
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', '.', ep_string))
-
-        return [search_string]
-        
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
-        
+
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
-        
-        for mode in search_params.keys():
 
+        for mode in search_params.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
-        
+
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+
                 searchURL = self.url + '/recherche/'+search_string.replace('.','-')+'.html'
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
                 data = self.getURL(searchURL)
 
                 if not data:
@@ -124,27 +73,25 @@ class CpasbienProvider(generic.TorrentProvider):
 
                 try:
                     with BS4Parser(data, features=["html5lib", "permissive"]) as html:
-                        
-                        lin=0
-                        erlin=0
-                        resultdiv=[]
-                        while erlin==0:
+                        lin= erlin = 0
+                        resultdiv= []
+                        while erlin == 0:
                             try:
                                 classlin='ligne'+str(lin)
                                 resultlin=html.findAll(attrs = {'class' : [classlin]})
                                 if resultlin:
                                     for ele in resultlin:
                                         resultdiv.append(ele)
-                                    lin+=1
+                                    lin += 1
                                 else:
-                                    erlin=1
+                                    erlin = 1
                             except:
-                                erlin=1
-                        
+                                erlin = 1
+
                         for row in resultdiv:
                             try:
                                 link = row.find("a", title=True)
-                                torrent_name = str(link.text).lower().strip()  
+                                title = str(link.text).lower().strip()
                                 pageURL = link['href']
 
                                 #downloadTorrentLink = torrentSoup.find("a", title.startswith('Cliquer'))
@@ -153,36 +100,34 @@ class CpasbienProvider(generic.TorrentProvider):
                                 downloadTorrentLink = ('http://www.cpasbien.pw/telechargement/%s' % tmp)
 
                                 if downloadTorrentLink:
-                
-                                    torrent_download_url = downloadTorrentLink
+                                    download_url = downloadTorrentLink
+                                    #FIXME
+                                    size = -1
+                                    seeders = 1
+                                    leechers = 0
+
                             except (AttributeError, TypeError):
                                     continue
-                            
-                            if not torrent_name or not torrent_download_url:
+
+                            if not all([title, download_url]):
                                 continue
 
-                            item = torrent_name, torrent_download_url
-                            logger.log(u"Found result: " + torrent_name + " (" + torrent_download_url + ")",logger.DEBUG)
+                            item = title, download_url, size, seeders, leechers
+                            if mode != 'RSS':
+                                logger.log(u"Found result: %s " % title, logger.DEBUG)
+
                             items[mode].append(item)
 
                 except Exception, e:
-                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),logger.ERROR)
-            results += items[mode]
-        return results
-    
-    def _get_title_and_url(self, item):
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
-        title, url = item
+            #For each search mode sort all the items by seeders if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
-        if title:
-            title = u'' + title
-            title = title.replace(' ', '.')
+            results += items[mode]
 
-        if url:
-            url = str(url).replace('&amp;', '&')
+        return results
 
-        return title, url
-    
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
@@ -210,8 +155,19 @@ class CpasbienProvider(generic.TorrentProvider):
                     results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
 
         return results
-    
+
     def seedRatio(self):
         return self.ratio
 
+class CpasbienCache(tvcache.TVCache):
+    def __init__(self, provider):
+
+        tvcache.TVCache.__init__(self, provider)
+
+        self.minTime = 30
+
+    def _getRSSData(self):
+        search_strings = {'RSS': ['']}
+        return {'entries': {}}
+
 provider = CpasbienProvider()
diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py
index c06a334bc48ddd2504e90cf83792633c740f4f49..1b094d8b1cb5404d03f950d1ec590a3f323472db 100644
--- a/sickbeard/providers/extratorrent.py
+++ b/sickbeard/providers/extratorrent.py
@@ -1,7 +1,7 @@
 # Author: duramato <matigonkas@outlook.com>
 # Author: miigotu
 # URL: https://github.com/SiCKRAGETV/sickrage
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -59,69 +59,33 @@ class ExtraTorrentProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  #1) showName SXX
-
-            search_string['Season'].append(ep_string.strip())
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_strings = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
-            if ep_obj.show.air_by_date:
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                                str(ep_obj.airdate).replace('-', '|')
-            elif ep_obj.show.sports:
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                                str(ep_obj.airdate).replace('-', '|') + '|' + \
-                                ep_obj.airdate.strftime('%b')
-            elif ep_obj.show.anime:
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                                "%i" % int(ep_obj.scene_absolute_number)
-            else:
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode}
-
-            if add_string:
-                ep_string += ' %s' % add_string
-
-            search_strings['Episode'].append(re.sub(r'\s+', ' ', ep_string))
-
-        return [search_strings]
-
-
     def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
 
         for mode in search_strings.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_strings[mode]:
+
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+
                 try:
                     self.search_params.update({'type': ('search', 'rss')[mode == 'RSS'], 'search': search_string.strip()})
                     data = self.getURL(self.urls['rss'], params=self.search_params)
                     if not data:
-                        logger.log(u'No response, skipping...', logger.DEBUG)
+                        logger.log("No data returned from provider", logger.DEBUG)
+                        continue
+
+                    try:
+                        data = xmltodict.parse(data)
+                    except ExpatError as e:
+                        logger.log(u"Failed parsing provider. Traceback: %r\n%r" % (traceback.format_exc(), data), logger.ERROR)
                         continue
 
-                    data = xmltodict.parse(data)
                     if not all([data, 'rss' in data, 'channel' in data['rss'], 'item' in data['rss']['channel']]):
-                        logger.log(u'Malformed rss returned, skipping...', logger.DEBUG)
+                        logger.log(u"Malformed rss returned, skipping", logger.DEBUG)
                         continue
 
                     # https://github.com/martinblech/xmltodict/issues/111
@@ -134,21 +98,33 @@ class ExtraTorrentProvider(generic.TorrentProvider):
                         size = int(item['size'])
                         seeders = helpers.tryInt(item['seeders'],0)
                         leechers = helpers.tryInt(item['leechers'],0)
-                        url = item['enclosure']['@url'] if 'enclosure' in item else self._magnet_from_details(item['link'])
+                        download_url = item['enclosure']['@url'] if 'enclosure' in item else self._magnet_from_details(item['link'])
+
+                        if not all([title, download_url]):
+                            continue
 
-                        if not all([title, url, seeders, seeders >= self.minseed, leechers >= self.minleech, size]):
+                            #Filter unseeded torrent
+                        if seeders < self.minseed or leechers < self.minleech:
+                            if mode != 'RSS':
+                                logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                             continue
 
-                        items[mode].append((title, url, seeders, leechers, size, info_hash))
+                        item = title, download_url, size, seeders, leechers
+                        if mode != 'RSS':
+                            logger.log(u"Found result: %s " % title, logger.DEBUG)
+
+                        items[mode].append(item)
 
                 except (AttributeError, TypeError, KeyError, ValueError):
-                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %r" % traceback.format_exc(), logger.ERROR)
+
+            #For each search mode sort all the items by seeders if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-
     def _magnet_from_details(self, link):
         details = self.getURL(link)
         if not details:
@@ -160,25 +136,6 @@ class ExtraTorrentProvider(generic.TorrentProvider):
 
         return match.group(1)
 
-    def _get_title_and_url(self, item):
-        #pylint: disable=W0612
-        title, url, seeders, leechers, size, info_hash = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = url.replace('&amp;', '&')
-
-
-        return (title, url)
-
-
-    def _get_size(self, item):
-        #pylint: disable=W0612
-        title, url, seeders, leechers, size, info_hash = item
-        return size
-
     def findPropers(self, search_date=datetime.datetime.today()-datetime.timedelta(days=1)):
         results = []
         myDB = db.DBConnection()
diff --git a/sickbeard/providers/fnt.py b/sickbeard/providers/fnt.py
index ecb9a54ee94d095d2af43485bac74f32f874857f..6c329cde36cd9247ed79636abb830fe1bd27e0df 100644
--- a/sickbeard/providers/fnt.py
+++ b/sickbeard/providers/fnt.py
@@ -2,7 +2,7 @@
 # Author: raver2046 <raver2046@gmail.com> from djoole <bobby.djoole@gmail.com>
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of Sick Beard.
+# This file is part of SickRage. 
 #
 # Sick Beard is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -66,13 +66,6 @@ class FNTProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'FNT.png'
-
-    def getQuality(self, item, anime=False):
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _doLogin(self):
 
         if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
@@ -83,72 +76,21 @@ class FNTProvider(generic.TorrentProvider):
                         'submit' : 'Se loguer'
                        }
 
-        logger.log('Performing authentication to FNT', logger.DEBUG)
         response = self.getURL(self.urls['login'], post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('/account-logout.php', response):
-            logger.log(u'Login to ' + self.name + ' was successful.', logger.DEBUG)
             return True
         else:
-            logger.log(u'Login to ' + self.name + ' was unsuccessful.', logger.DEBUG)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  # 1) showName.SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            sickbeard.config.naming_ep_type[2] % {
-                                'seasonnumber': ep_obj.scene_season,
-                                'episodenumber': ep_obj.scene_episode
-                                } + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub(r'\s+', '.', ep_string))
-
-        return [search_string]
-
     def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
+
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
 
@@ -157,8 +99,12 @@ class FNTProvider(generic.TorrentProvider):
             return results
 
         for mode in search_strings.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_strings[mode]:
-                logger.log(u"Search string: " + search_string, logger.DEBUG)
+
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+
                 self.search_params['recherche'] = search_string
 
                 data = self.getURL(self.urls['search'], params=self.search_params)
@@ -170,8 +116,7 @@ class FNTProvider(generic.TorrentProvider):
                         result_table = html.find('table', {'id': 'tablealign3bis'})
 
                         if not result_table:
-                            logger.log(
-                                u"The Data returned from %s does not contain any torrents" % self.name, logger.DEBUG)
+                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                             continue
 
                         if result_table:
@@ -183,55 +128,46 @@ class FNTProvider(generic.TorrentProvider):
                                 if link:
                                     try:
                                         title = link.text
-                                        logger.log(u"FNT TITLE : " + title, logger.DEBUG)
                                         download_url = self.urls['base_url'] + "/" + row.find("a", href=re.compile("download\.php"))['href']
                                     except (AttributeError, TypeError):
                                         continue
 
-                                    if not title or not download_url:
-                                        continue
-
                                     try:
                                         id = download_url.replace(self.urls['base_url'] + "/" + 'download.php?id=', '').replace('&amp;dl=oui', '').replace('&dl=oui', '')
-                                        logger.log(u"FNT id du torrent  " + str(id), logger.DEBUG)
                                         defailseedleech = link['mtcontent']
                                         seeders =  int(defailseedleech.split("<font color='#00b72e'>")[1].split("</font>")[0])
-                                        logger.log(u"FNT seeders :  " + str(seeders), logger.DEBUG)
                                         leechers = int(defailseedleech.split("<font color='red'>")[1].split("</font>")[0])
-                                        logger.log(u"FNT leechers :  " + str(leechers), logger.DEBUG)
+                                        #FIXME
+                                        size = -1
                                     except:
-                                        logger.log(u"Unable to parse torrent id & seeders leechers  " + self.name + " Traceback: " + traceback.format_exc(), logger.DEBUG)
+                                        logger.log(u"Unable to parse torrent id & seeders & leechers. Traceback: %s " % traceback.format_exc(), logger.DEBUG)
+                                        continue
+
+                                    if not all([title, download_url]):
                                         continue
 
                                     #Filter unseeded torrent
-                                    if not seeders or seeders < self.minseed or leechers < self.minleech:
-                                        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                                    if seeders < self.minseed or leechers < self.minleech:
+                                        if mode != 'RSS':
+                                            logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                         continue
 
-                                    item = title, download_url , id, seeders, leechers
-                                    logger.log(u"Found result: " + title.replace(' ','.') + " (" + download_url + ")", logger.DEBUG)
+                                    item = title, download_url, size, seeders, leechers
+                                    if mode != 'RSS':
+                                        logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                                     items[mode].append(item)
 
                 except Exception, e:
-                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
+
+            #For each search mode sort all the items by seeders if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url, id, seeders, leechers = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return title, url
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/frenchtorrentdb.py b/sickbeard/providers/frenchtorrentdb.py
index 7d68c4777979fa24d6ee435870cd6216060e5137..6d1cd5cfbfdf90cbefd90a9ce4ab893542dd3fec 100644
--- a/sickbeard/providers/frenchtorrentdb.py
+++ b/sickbeard/providers/frenchtorrentdb.py
@@ -2,7 +2,7 @@
 #          adaur <adaur.underground@gmail.com>
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of Sick Beard.
+# This file is part of SickRage. 
 #
 # Sick Beard is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -71,13 +71,6 @@ class FrenchTorrentDBProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'frenchtorrentdb.png'
-
-    def getQuality(self, item, anime=False):
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _doLogin(self):
 
         params = {
@@ -141,42 +134,8 @@ class FrenchTorrentDBProvider(generic.TorrentProvider):
             secureLogin += decodeChallenge(challenge)
         return secureLogin
 
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            sickbeard.config.naming_ep_type[2] % {
-                                'seasonnumber': ep_obj.scene_season,
-                                'episodenumber': ep_obj.scene_episode
-                                } + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub(r'\s+', '.', ep_string))
-
-        return [search_string]
-
     def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
+
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
 
@@ -185,48 +144,56 @@ class FrenchTorrentDBProvider(generic.TorrentProvider):
             return results
 
         for mode in search_strings.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_strings[mode]:
-                logger.log(u"Search string: " + search_string, logger.DEBUG)
+
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+
                 self.search_params['name'] = search_string
 
                 r = self.getURL(self.url, params=self.search_params)
                 with BS4Parser(r, features=["html5lib", "permissive"]) as html:
                     resultsTable = html.find("div", {"class": "DataGrid"})
-                    logger.log(u"Page opened", logger.DEBUG)
 
                     if resultsTable:
-                        logger.log(u"We have results ", logger.DEBUG)
                         rows = resultsTable.findAll("ul")
 
                         for row in rows:
                             link = row.find("a", title=True)
                             title = link['title']
+                            #FIXME
+                            size = -1
+                            seeders = 1
+                            leechers = 0
 
                             autogetURL = self.url +'/' + (row.find("li", {"class": "torrents_name"}).find('a')['href'][1:]).replace('#FTD_MENU' ,'&menu=4')
                             r = self.getURL(autogetURL)
                             with BS4Parser(r, features=["html5lib", "permissive"]) as html:
-                                downloadURL = html.find("div", {"class" : "autoget"}).find('a')['href']
-                                item = title, downloadURL
-                                logger.log(u"Download URL : " + downloadURL, logger.DEBUG)
 
-                                items[mode].append(item)
+                                download_url = html.find("div", {"class" : "autoget"}).find('a')['href']
 
-            results += items[mode]
+                                if not all([title, download_url]):
+                                    continue
 
-        return results
+                                #Filter unseeded torrent
+                                #if seeders < self.minseed or leechers < self.minleech:
+                                #    if mode != 'RSS':
+                                #        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                                #    continue
 
-    def _get_title_and_url(self, item):
+                                item = title, download_url, size, seeders, leechers
+                                if mode != 'RSS':
+                                    logger.log(u"Found result: %s " % title, logger.DEBUG)
 
-        title, url = item
+                                items[mode].append(item)
 
-        if title:
-            title = u'' + title
-            title = title.replace(' ', '.')
+            #For each search mode sort all the items by seeders if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
-        if url:
-            url = str(url).replace('&amp;', '&')
+            results += items[mode]
 
-        return (title, url)
+        return results
 
     def findPropers(self, search_date=datetime.datetime.today()):
 
@@ -259,22 +226,6 @@ class FrenchTorrentDBProvider(generic.TorrentProvider):
     def seedRatio(self):
         return self.ratio
 
-
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  # 1) showName.SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
 class FrenchTorrentDBAuth(AuthBase):
     """Attaches HTTP Authentication to the given Request object."""
     def __init__(self, token):
diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py
index dd7ab17ef0781bde80c97336f8821ffc8105e25d..e51264be92047f83d622d169e493ae9250ee2b53 100644
--- a/sickbeard/providers/freshontv.py
+++ b/sickbeard/providers/freshontv.py
@@ -1,7 +1,7 @@
 # Author: Idan Gutman
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -71,18 +71,10 @@ class FreshOnTVProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'freshontv.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _checkAuth(self):
 
         if not self.username or not self.password:
-            raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
 
         return True
 
@@ -100,11 +92,10 @@ class FreshOnTVProvider(generic.TorrentProvider):
 
             response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
             if not response:
-                logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+                logger.log(u"Unable to connect to provider", logger.WARNING)
                 return False
 
             if re.search('/logout.php', response):
-                logger.log(u'Login to ' + self.name + ' was successful.', logger.DEBUG)
 
                 try:
                     if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']:
@@ -116,68 +107,18 @@ class FreshOnTVProvider(generic.TorrentProvider):
                         }
                         return True
                 except:
-                    logger.log(u'Unable to obtain cookie for FreshOnTV', logger.WARNING)
+                    logger.log(u"Unable to login to provider (cookie)", logger.WARNING)
                     return False
 
             else:
-                logger.log(u'Login to ' + self.name + ' was unsuccessful.', logger.DEBUG)
                 if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response):
-                    logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+                    logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
 
                 if re.search('DDoS protection by CloudFlare', response):
-                    logger.log(u'Unable to login to ' + self.name + ' due to CloudFlare DDoS javascript check.', logger.ERROR)
+                    logger.log(u"Unable to login to provider due to CloudFlare DDoS javascript check", logger.WARNING)
 
                     return False
 
-
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  #1) showName SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
-
-        return [search_string]
-
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
@@ -189,18 +130,19 @@ class FreshOnTVProvider(generic.TorrentProvider):
             return results
 
         for mode in search_params.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
 
-                if isinstance(search_string, unicode):
-                    search_string = unidecode(search_string)
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 searchURL = self.urls['search'] % (freeleech, search_string)
-                logger.log(u"Search string: " + searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
                 init_html = self.getURL(searchURL)
                 max_page_number = 0
 
                 if not init_html:
-                    logger.log(u"The opening search response from " + self.name + " is empty.",logger.DEBUG)
+                    logger.log("No data returned from provider", logger.DEBUG)
                     continue
 
                 try:
@@ -228,7 +170,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
                         if max_page_number > 3 and mode is 'RSS':
                             max_page_number = 3
                 except:
-                    logger.log(u"BS4 parser unable to process response " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
                     continue
 
                 data_response_list = []
@@ -240,11 +182,10 @@ class FreshOnTVProvider(generic.TorrentProvider):
 
                         time.sleep(1)
                         page_searchURL = searchURL + '&page=' + str(i)
-                        logger.log(u"Search string: " + page_searchURL, logger.DEBUG)
+                        #'.log(u"Search string: " + page_searchURL, logger.DEBUG)
                         page_html = self.getURL(page_searchURL)
 
                         if not page_html:
-                            logger.log(u"The search response for page number " + str(i) + " is empty." + self.name,logger.DEBUG)
                             continue
 
                         data_response_list.append(page_html)
@@ -259,7 +200,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
 
                             #Continue only if a Release is found
                             if len(torrent_rows) == 0:
-                                logger.log(u"The Data returned from " + self.name + " does not contain any torrent", logger.DEBUG)
+                                logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                                 continue
 
                             for individual_torrent in torrent_rows:
@@ -271,7 +212,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
                                 try:
                                     title = individual_torrent.find('a', {'class': 'torrent_name_link'})['title']
                                 except:
-                                    logger.log(u"Unable to parse torrent title " + self.name + " Traceback: " + traceback.format_exc(), logger.DEBUG)
+                                    logger.log(u"Unable to parse torrent title. Traceback: %s " % traceback.format_exc(), logger.WARNING)
                                     continue
 
                                 try:
@@ -279,55 +220,45 @@ class FreshOnTVProvider(generic.TorrentProvider):
                                     id = int((re.match('.*?([0-9]+)$', details_url).group(1)).strip())
                                     download_url = self.urls['download'] % (str(id))
                                 except:
-                                    logger.log(u"Unable to parse torrent id & download url  " + self.name + " Traceback: " + traceback.format_exc(), logger.DEBUG)
                                     continue
 
                                 try:
                                     seeders = int(individual_torrent.find('td', {'class': 'table_seeders'}).find('span').text.strip())
                                 except:
-                                    logger.log(u"Unable to parse torrent seeders content  " + self.name + " Traceback: " + traceback.format_exc(), logger.DEBUG)
                                     seeders = 1
                                 try:
                                     leechers = int(individual_torrent.find('td', {'class': 'table_leechers'}).find('a').text.strip())
                                 except:
-                                    logger.log(u"Unable to parse torrent leechers content " + self.name + " Traceback: " + traceback.format_exc(), logger.DEBUG)
                                     leechers = 0
 
-                                #Filter unseeded torrent
-                                if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
-                                    logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                                #FIXME
+                                size = -1
+
+                                if not all([title, download_url]):
                                     continue
 
-                                if not title or not download_url:
+                                #Filter unseeded torrent
+                                if seeders < self.minseed or leechers < self.minleech:
+                                    if mode != 'RSS':
+                                        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                     continue
 
-                                item = title, download_url, id, seeders, leechers
-                                logger.log(u"Found result: " + title + " (" + searchURL + ")", logger.DEBUG)
+                                item = title, download_url, size, seeders, leechers
+                                if mode != 'RSS':
+                                    logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                                 items[mode].append(item)
 
                 except Exception as e:
-                    logger.log(u"Failed parsing " + " Traceback: " + traceback.format_exc(), logger.DEBUG)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
-            #For each search mode sort all the items by seeders
+            #For each search mode sort all the items by seeders if available
             items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url, id, seeders, leechers = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py
index 10009610fc83db8b413f92b7ff7dcdfe69ce0246..1340986113c083de75ebf4995105da4736733830 100644
--- a/sickbeard/providers/generic.py
+++ b/sickbeard/providers/generic.py
@@ -2,7 +2,7 @@
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -39,7 +39,7 @@ from sickbeard.common import Quality
 from sickbeard.common import user_agents
 from sickrage.helper.encoding import ek
 from sickrage.helper.exceptions import ex
-
+from sickbeard import show_name_helpers
 
 class GenericProvider:
     NZB = "nzb"
@@ -344,8 +344,6 @@ class GenericProvider:
                 # get single episode search results
                 search_strings = self._get_episode_search_strings(epObj)
 
-            if search_strings:
-                logger.log(u'search_strings = %s' % repr(search_strings), logger.DEBUG)
             first = search_strings and isinstance(search_strings[0], dict) and 'rid' in search_strings[0]
             if first:
                 logger.log(u'First search_string has rid', logger.DEBUG)
@@ -550,6 +548,72 @@ class TorrentProvider(GenericProvider):
 
         self.providerType = GenericProvider.TORRENT
 
+    def getQuality(self, item, anime=False):
+        quality = Quality.sceneQuality(item[0], anime)
+        return quality
+
+    def _get_title_and_url(self, item):
+
+        title = item[0]
+        download_url = item[1]
+
+        if title:
+            title = self._clean_title_from_provider(title)
+        if download_url:
+            download_url = download_url.replace('&amp;', '&')
+
+        return (title, download_url)
+
+
+    def _get_size(self, item):
+
+        size = item[2]
+        if not size:
+            size = -1
+
+        return size
+
+    def _get_season_search_strings(self, ep_obj):
+
+        search_string = {'Season': []}
+        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+            if ep_obj.show.air_by_date or ep_obj.show.sports:
+                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
+            elif ep_obj.show.anime:
+                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
+            else:
+                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  #1) showName.SXX
+
+            search_string['Season'].append(ep_string)
+
+        return [search_string]
+
+    def _get_episode_search_strings(self, ep_obj, add_string=''):
+
+        search_string = {'Episode': []}
+
+        if not ep_obj:
+            return []
+
+        for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
+            ep_string = show_name + ' '
+            if ep_obj.show.air_by_date:
+                ep_string += str(ep_obj.airdate).replace('-', '|')
+            elif ep_obj.show.sports:
+                ep_string += str(ep_obj.airdate).replace('-', '|') + '|' + \
+                        ep_obj.airdate.strftime('%b')
+            elif ep_obj.show.anime:
+                ep_string += "%i" % int(ep_obj.scene_absolute_number)
+            else:
+                ep_string += sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
+                                                              'episodenumber': ep_obj.scene_episode}
+            if add_string:
+                ep_string = ep_string + ' %s' % add_string
+
+            search_string['Episode'].append(ep_string)
+
+        return [search_string]
+
     def _clean_title_from_provider(self, title):
         if title:
             title = u'' + title.replace(' ', '.')
diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py
index b8d3c128a0d4dca008a5e3fd18f563340f58c061..f40a014d446fc9c7c6fa5f4e03366ac13f08a2b7 100644
--- a/sickbeard/providers/hdbits.py
+++ b/sickbeard/providers/hdbits.py
@@ -1,4 +1,4 @@
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -65,10 +65,7 @@ class HDBitsProvider(generic.TorrentProvider):
 
         if 'status' in parsedJSON and 'message' in parsedJSON:
             if parsedJSON.get('status') == 5:
-                logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['message'],
-                           logger.DEBUG)
-                raise AuthException(
-                    "Your authentication credentials for " + self.name + " are incorrect, check your config.")
+                logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
 
         return True
 
@@ -91,12 +88,13 @@ class HDBitsProvider(generic.TorrentProvider):
         return (title, url)
 
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
+
+        #FIXME 
         results = []
 
-        self._checkAuth()
+        logger.log(u"Search string: %s" %  search_params, logger.DEBUG)
 
-        logger.log(u"Search url: " + self.urls['search'] + " search_params: " + search_params,
-                   logger.DEBUG)
+        self._checkAuth()
 
         parsedJSON = self.getURL(self.urls['search'], post_data=search_params, json=True)
         if not parsedJSON:
@@ -106,12 +104,12 @@ class HDBitsProvider(generic.TorrentProvider):
             if parsedJSON and 'data' in parsedJSON:
                 items = parsedJSON['data']
             else:
-                logger.log(u"Resulting JSON from " + self.name + " isn't correct, not parsing it", logger.ERROR)
+                logger.log(u"Resulting JSON from provider isn't correct, not parsing it", logger.ERROR)
                 items = []
 
             for item in items:
                 results.append(item)
-
+        #FIXME SORTING
         return results
 
     def findPropers(self, search_date=None):
diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py
index b70ab3101f58ef9e03f6da79499280e518d21410..6fcc6a6711f76412131b2c06cde826bf86f3efb9 100644
--- a/sickbeard/providers/hdtorrents.py
+++ b/sickbeard/providers/hdtorrents.py
@@ -2,7 +2,7 @@
 # Modified by jkaberg, https://github.com/jkaberg for SceneAccess
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -34,7 +34,7 @@ from BeautifulSoup import BeautifulSoup as soup
 from unidecode import unidecode
 from sickbeard.helpers import sanitizeSceneName
 from datetime import datetime
-
+import traceback
 
 class HDTorrentsProvider(generic.TorrentProvider):
     def __init__(self):
@@ -53,6 +53,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
         self.urls = {'base_url': 'https://hd-torrents.org',
                      'login': 'https://hd-torrents.org/login.php',
                      'search': 'https://hd-torrents.org/torrents.php?search=%s&active=1&options=0%s',
+                     'rss': 'https://hd-torrents.org/torrents.php?search=&active=1&options=0%s',
                      'home': 'https://hd-torrents.org/%s'
         }
 
@@ -65,13 +66,10 @@ class HDTorrentsProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'hdtorrents.png'
-
     def _checkAuth(self):
 
         if not self.username or not self.password:
-            raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
 
         return True
 
@@ -86,160 +84,116 @@ class HDTorrentsProvider(generic.TorrentProvider):
 
         response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('You need cookies enabled to log in.', response):
-            logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
 
-    def _get_season_search_strings(self, ep_obj):
-        if not ep_obj:
-            return []
-
-        search_strings = []
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + ' S%02d' % ep_obj.scene_season
-
-            search_strings.append(ep_string)
-
-        return [search_strings]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-        if not ep_obj:
-            return []
-
-        search_strings = []
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if self.show.air_by_date:
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-            elif self.show.sports:
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-            elif self.show.anime:
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-            else:
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode}
-            if add_string:
-                ep_string += ' %s' % add_string
-
-            search_strings.append(ep_string)
-
-        return [search_strings]
-
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
+    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
+        items = {'Season': [], 'Episode': [], 'RSS': []}
 
         if not self._doLogin():
             return results
 
-        for search_string in search_params if search_params else '':
-            if isinstance(search_string, unicode):
-                search_string = unidecode(search_string)
-
-
-            searchURL = self.urls['search'] % (urllib.quote_plus(search_string.replace('.', ' ')), self.categories)
-            logger.log(u"Search string: " + searchURL, logger.DEBUG)
-            data = self.getURL(searchURL)
-            if not data:
-                logger.log(u'No grabs for you', logger.DEBUG)
-                continue
-
-            html = soup(data)
-            if not html:
-                continue
-
-            empty = html.find('No torrents here')
-            if empty:
-                logger.log(u"Could not find any torrents", logger.ERROR)
-                continue
-
-            tables = html.find('table', attrs={'class': 'mainblockcontenttt'})
-            if not tables:
-                logger.log(u"Could not find table of torrents mainblockcontenttt", logger.ERROR)
-                continue
-
-            torrents = tables.findChildren('tr')
-            if not torrents:
-                 continue
-
-            # Skip column headers
-            for result in torrents[1:]:
-                try:
-                    cells = result.findChildren('td', attrs={'class': re.compile(r'(green|yellow|red|mainblockcontent)')})
-                    if not cells:
-                        continue
+        for mode in search_strings.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
+            for search_string in search_strings[mode]:
 
-                    title = url = seeders = leechers = None
-                    size = 0
-                    for cell in cells:
-                        try:
-                            if None is title and cell.get('title') and cell.get('title') in 'Download':
-                                title = re.search('f=(.*).torrent', cell.a['href']).group(1).replace('+', '.')
-                                url = self.urls['home'] % cell.a['href']
-                            if None is seeders and cell.get('class')[0] and cell.get('class')[0] in 'green' 'yellow' 'red':
-                                seeders = int(cell.text)
-                            elif None is leechers and cell.get('class')[0] and cell.get('class')[0] in 'green' 'yellow' 'red':
-                                leechers = int(cell.text)
-
-                            # Skip torrents released before the episode aired (fakes)
-                            if re.match('..:..:..  ..:..:....', cells[6].text):
-                                if (datetime.strptime(cells[6].text, '%H:%M:%S  %m/%d/%Y') -
-                                    datetime.combine(epObj.airdate, datetime.min.time())).days < 0:
-                                    continue
-
-                            # Need size for failed downloads handling
-                            if re.match('[0-9]+,?\.?[0-9]* [KkMmGg]+[Bb]+', cells[7].text):
-                                size = self._convertSize(cells[7].text)
-
-                            if not title or not url or not seeders or leechers is None or not size or \
-                                    seeders < self.minseed or leechers < self.minleech:
-                                continue
-
-                            item = title, url, seeders, leechers, size
-                            logger.log(u"Found result: " + title + " (" + searchURL + ")", logger.DEBUG)
-
-                            results.append(item)
-
-                        except:
-                            raise
-
-                except (AttributeError, TypeError, KeyError, ValueError):
-                    continue
+                if mode != 'RSS':
+                    searchURL = self.urls['search'] % (urllib.quote_plus(search_string.replace('.', ' ')), self.categories)
+                else:
+                    searchURL = self.urls['rss'] % self.categories
 
-        results.sort(key=lambda tup: tup[3], reverse=True)
-        return results
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s" %  search_string, logger.DEBUG)
 
-    def _get_title_and_url(self, item):
+                data = self.getURL(searchURL)
+                if not data:
+                    logger.log("No data returned from provider", logger.DEBUG)
+                    continue
 
-        title, url, seeders, leechers, size = item
+                html = soup(data)
+                if not html:
+                    logger.log("No html data parsed from provider", logger.DEBUG)
+                    continue
 
-        if title:
-            title = self._clean_title_from_provider(title)
+                empty = html.find('No torrents here')
+                if empty:
+                    logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
+                    continue
 
-        if url:
-            url = str(url).replace('&amp;', '&')
+                tables = html.find('table', attrs={'class': 'mainblockcontenttt'})
+                if not tables:
+                    logger.log(u"Could not find table of torrents mainblockcontenttt", logger.ERROR)
+                    continue
 
-        return (title, url)
+                torrents = tables.findChildren('tr')
+                if not torrents:
+                    continue
 
-    def _get_size(self, item):
+                # Skip column headers
+                for result in torrents[1:]:
+                    try:
+                        cells = result.findChildren('td', attrs={'class': re.compile(r'(green|yellow|red|mainblockcontent)')})
+                        if not cells:
+                            continue
+
+                        title = download_url = seeders = leechers = None
+                        size = 0
+                        for cell in cells:
+                            try:
+                                if None is title and cell.get('title') and cell.get('title') in 'Download':
+                                    title = re.search('f=(.*).torrent', cell.a['href']).group(1).replace('+', '.')
+                                    download_url = self.urls['home'] % cell.a['href']
+                                if None is seeders and cell.get('class')[0] and cell.get('class')[0] in 'green' 'yellow' 'red':
+                                    seeders = int(cell.text)
+                                    if not seeders:
+                                        seeders = 1
+                                elif None is leechers and cell.get('class')[0] and cell.get('class')[0] in 'green' 'yellow' 'red':
+                                    leechers = int(cell.text)
+                                    if not leechers:
+                                        seeders = 0
+
+                                # Need size for failed downloads handling
+                                if re.match(r'[0-9]+,?\.?[0-9]* [KkMmGg]+[Bb]+', cells[7].text):
+                                    size = self._convertSize(cells[7].text)
+                                    if not size:
+                                        size = -1
+
+                            except:
+                                logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
+
+                        if not all([title, download_url]):
+                            continue
+
+                        #Filter unseeded torrent
+                        if seeders < self.minseed or leechers < self.minleech:
+                            if mode != 'RSS':
+                                logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                            continue
+
+                        item = title, download_url, size, seeders, leechers
+                        if mode != 'RSS':
+                            logger.log(u"Found result: %s " % title, logger.DEBUG)
+
+                        items[mode].append(item)
+
+                    except (AttributeError, TypeError, KeyError, ValueError):
+                        continue
 
-        title, url, seeders, leechers, size = item
+            #For each search mode sort all the items by seeders if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
-        return size
+            results += items[mode]
+
+        return results
 
     def findPropers(self, search_date=datetime.today()):
 
@@ -298,11 +252,10 @@ class HDTorrentsCache(tvcache.TVCache):
         tvcache.TVCache.__init__(self, provider)
 
         # only poll HDTorrents every 10 minutes max
-        self.minTime = 20
+        self.minTime = 10
 
     def _getRSSData(self):
-        search_params = []
-        return {'entries': self.provider._doSearch(search_params)}
-
+        search_strings = {'RSS': ['']}
+        return {'entries': self.provider._doSearch(search_strings)}
 
 provider = HDTorrentsProvider()
diff --git a/sickbeard/providers/hounddawgs.py b/sickbeard/providers/hounddawgs.py
index 036c96aa230368016b132c7c254d231f27f34c13..48701a4a1ea5a9c06525f5e149b80c15721a8cb6 100644
--- a/sickbeard/providers/hounddawgs.py
+++ b/sickbeard/providers/hounddawgs.py
@@ -1,7 +1,7 @@
 # Author: Idan Gutman
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -76,14 +76,6 @@ class HoundDawgsProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'hounddawgs.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _doLogin(self):
 
         login_params = {'username': self.username,
@@ -95,65 +87,17 @@ class HoundDawgsProvider(generic.TorrentProvider):
         self.getURL(self.urls['base_url'], timeout=30)
         response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('Dit brugernavn eller kodeord er forkert.', response) \
                 or re.search('<title>Login :: HoundDawgs</title>', response) \
                 or re.search('Dine cookies er ikke aktiveret.', response):
-            logger.log(u'Invalid username or password, check your settings', logger.ERROR)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  #1) showName SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode}
-
-                search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
-
-        return [search_string]
-
     def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
@@ -163,8 +107,12 @@ class HoundDawgsProvider(generic.TorrentProvider):
             return results
 
         for mode in search_strings.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_strings[mode]:
-                logger.log(u"Search string: " + search_string, logger.DEBUG)
+
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+
                 self.search_params['searchstr'] = search_string
 
                 data = self.getURL(self.urls['search'], params=self.search_params)
@@ -180,8 +128,7 @@ class HoundDawgsProvider(generic.TorrentProvider):
                         result_table = html.find('table', {'id': 'torrent_table'})
 
                         if not result_table:
-                            logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
-                                       logger.DEBUG)
+                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                             continue
 
                         result_tbody = result_table.find('tbody')
@@ -213,6 +160,10 @@ class HoundDawgsProvider(generic.TorrentProvider):
 
                                 download_url = self.urls['base_url']+allAs[0].attrs['href']
                                 id = link.replace(self.urls['base_url']+'torrents.php?id=','')
+                                #FIXME
+                                size = -1
+                                seeders = 1
+                                leechers = 0
 
                             except (AttributeError, TypeError):
                                 continue
@@ -220,31 +171,28 @@ class HoundDawgsProvider(generic.TorrentProvider):
                             if not title or not download_url:
                                 continue
 
-                            item = title, download_url
-                            logger.log(u"Found result: " + title.replace(' ','.') + " (" + download_url + ")", logger.DEBUG)
+                            #Filter unseeded torrent
+                            #if seeders < self.minseed or leechers < self.minleech:
+                            #    if mode != 'RSS':
+                            #        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                            #    continue
+
+                            item = title, download_url, size, seeders, leechers
+                            if mode != 'RSS':
+                                logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                             items[mode].append(item)
 
                 except Exception, e:
-                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
+
+            #For each search mode sort all the items by seeders if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url = item
-
-        if title:
-            title = u'' + title
-            title = title.replace(' ', '.')
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py
index 8aa5e896ff109afe919a6712c7db2bc0327c6349..1f0e24cb70b2f9b09023c74cdf91e61ee56d3290 100644
--- a/sickbeard/providers/iptorrents.py
+++ b/sickbeard/providers/iptorrents.py
@@ -1,7 +1,7 @@
 # Author: seedboy
 # URL: https://github.com/seedboy
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -52,6 +52,8 @@ class IPTorrentsProvider(generic.TorrentProvider):
         self.password = None
         self.ratio = None
         self.freeleech = False
+        self.minseed = None
+        self.minleech = None
 
         self.cache = IPTorrentsCache(self)
 
@@ -67,14 +69,6 @@ class IPTorrentsProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'iptorrents.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _checkAuth(self):
 
         if not self.username or not self.password:
@@ -92,51 +86,16 @@ class IPTorrentsProvider(generic.TorrentProvider):
         self.getURL(self.urls['login'], timeout=30)
         response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('tries left', response) \
                 or re.search('<title>IPT</title>', response):
-            logger.log(u'Invalid username or password for ' + self.name + ', Check your settings!', logger.ERROR)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
 
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', ' ')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', ' ') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + '|' + \
-                            sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
-
-        return [search_string]
-
     def findSearchResults(self, show, episodes, search_mode, manualSearch=False, downCurQuality=False):
 
         self._checkAuth()
@@ -146,7 +105,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
         itemList = []
 
         if search_mode == 'sponly':
-            logger.log(u"This provider doesn't support season pack. Consider setting Season search mode to episodes only and unchecked Season search fallback", logger.WARNING)
+            logger.log(u"Provider doesn't support season pack. Consider setting Season search mode to episodes only and unchecked Season search fallback", logger.WARNING)
             search_mode = 'eponly'
 
         for epObj in episodes:
@@ -195,10 +154,10 @@ class IPTorrentsProvider(generic.TorrentProvider):
                 myParser = NameParser(False)
                 parse_result = myParser.parse(title)
             except InvalidNameException:
-                logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.DEBUG)
+                logger.log(u"Unable to parse the filename %s into a valid episode" % title, logger.DEBUG)
                 continue
             except InvalidShowException:
-                logger.log(u"Unable to parse the filename " + title + " into a valid show", logger.DEBUG)
+                logger.log(u"Unable to parse the filename %s into a valid show" % title, logger.DEBUG)
                 continue
 
             showObj = parse_result.show
@@ -211,14 +170,14 @@ class IPTorrentsProvider(generic.TorrentProvider):
                 if search_mode == 'sponly':
                     if len(parse_result.episode_numbers):
                         logger.log(
-                            u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it",
+                            u"This is supposed to be a season pack search but the result %s is not a valid season pack, skipping it" % title,
                             logger.DEBUG)
                         addCacheEntry = True
                     if len(parse_result.episode_numbers) and (
                                     parse_result.season_number not in set([ep.season for ep in episodes]) or not [ep for ep in episodes if
                                                                                  ep.scene_episode in parse_result.episode_numbers]):
                         logger.log(
-                            u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
+                            u"The result %s doesn't seem to be a valid episode that we are trying to snatch, ignoring" % title,
                             logger.DEBUG)
                         addCacheEntry = True
                 else:
@@ -226,13 +185,13 @@ class IPTorrentsProvider(generic.TorrentProvider):
                                                                                                      episodes if
                                                                                                      ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
                         logger.log(
-                            u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring",
+                            u"The result %s doesn't seem to be a valid season that we are trying to snatch, ignoring" % title,
                             logger.DEBUG)
                         addCacheEntry = True
                     elif len(parse_result.episode_numbers) and not [ep for ep in episodes if
                                                                     ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
                         logger.log(
-                            u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
+                            u"The result %s doesn't seem to be a valid episode that we are trying to snatch, ignoring" % title,
                             logger.DEBUG)
                         addCacheEntry = True
 
@@ -243,7 +202,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
             else:
                 if not (parse_result.is_air_by_date):
                     logger.log(
-                        u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it",
+                        u"This is supposed to be a date search but the result %s didn't parse as one, skipping it" % title,
                         logger.DEBUG)
                     addCacheEntry = True
                 else:
@@ -255,7 +214,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
 
                     if len(sql_results) != 1:
                         logger.log(
-                            u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it",
+                            u"Tried to look up the date for the episode %s but the database didn't give proper results, skipping it" % title,
                             logger.WARNING)
                         addCacheEntry = True
 
@@ -265,7 +224,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
 
             # add parsed result to cache for usage later on
             if addCacheEntry:
-                logger.log(u"Adding item from search to cache: " + title, logger.DEBUG)
+                logger.log(u"Adding item from search to cache: %s " % title, logger.DEBUG)
                 ci = self.cache._addCacheEntry(title, url, parse_result=parse_result)
                 if ci is not None:
                     cl.append(ci)
@@ -280,13 +239,12 @@ class IPTorrentsProvider(generic.TorrentProvider):
 
             if not wantEp:
                 logger.log(
-                    u"Ignoring result " + title + " because we don't want an episode that is " +
-                    Quality.qualityStrings[
-                        quality], logger.INFO)
+                    u"Ignoring result %s because we don't want an episode that is %s" % (title,Quality.qualityStrings[quality]),
+                    logger.DEBUG)
 
                 continue
 
-            logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
+            logger.log(u"Found result %s at %s " % (title, url), logger.DEBUG)
 
             # make a result object
             epObj = []
@@ -304,11 +262,11 @@ class IPTorrentsProvider(generic.TorrentProvider):
 
             if len(epObj) == 1:
                 epNum = epObj[0].episode
-                logger.log(u"Single episode result.", logger.DEBUG)
+                logger.log(u"Single episode result", logger.DEBUG)
             elif len(epObj) > 1:
                 epNum = MULTI_EP_RESULT
-                logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(
-                    parse_result.episode_numbers), logger.DEBUG)
+                logger.log(u"Separating multi-episode result to check for later - result contains episodes: %s " % parse_result.episode_numbers,
+                logger.DEBUG)
             elif len(epObj) == 0:
                 epNum = SEASON_RESULT
                 logger.log(u"Separating full season result to check for later", logger.DEBUG)
@@ -336,15 +294,16 @@ class IPTorrentsProvider(generic.TorrentProvider):
             return results
 
         for mode in search_params.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
-                if isinstance(search_string, unicode):
-                    search_string = unidecode(search_string)
+
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 # URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
                 searchURL = self.urls['search'] % (self.categorie, freeleech, search_string)
                 searchURL += ';o=seeders' if mode != 'RSS' else ''
-
-                logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
 
                 data = self.getURL(searchURL)
                 if not data:
@@ -354,11 +313,11 @@ class IPTorrentsProvider(generic.TorrentProvider):
                     data = re.sub(r'(?im)<button.+?<[\/]button>', '', data, 0)
                     with BS4Parser(data, features=["html5lib", "permissive"]) as html:
                         if not html:
-                            logger.log(u"Invalid HTML data: " + str(data), logger.DEBUG)
+                            logger.log("No data returned from provider", logger.DEBUG)
                             continue
 
                         if html.find(text='No Torrents Found!'):
-                            logger.log(u"No results found for: " + search_string + " (" + searchURL + ")", logger.DEBUG)
+                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                             continue
 
                         torrent_table = html.find('table', attrs={'class': 'torrents'})
@@ -366,55 +325,47 @@ class IPTorrentsProvider(generic.TorrentProvider):
 
                         #Continue only if one Release is found
                         if len(torrents) < 2:
-                            logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
-                                       logger.WARNING)
+                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                             continue
 
                         for result in torrents[1:]:
 
                             try:
                                 torrent = result.find_all('td')[1].find('a')
-                                torrent_name = torrent.string
-                                torrent_download_url = self.urls['base_url'] + (result.find_all('td')[3].find('a'))['href']
-                                torrent_details_url = self.urls['base_url'] + torrent['href']
-                                torrent_seeders = int(result.find('td', attrs={'class': 'ac t_seeders'}).string)
-                                ## Not used, perhaps in the future ##
-                                #torrent_id = int(torrent['href'].replace('/details.php?id=', ''))
-                                #torrent_leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).string)
+                                title = torrent.string
+                                download_url = self.urls['base_url'] + (result.find_all('td')[3].find('a'))['href']
+                                details_url = self.urls['base_url'] + torrent['href']
+                                seeders = int(result.find('td', attrs={'class': 'ac t_seeders'}).string)
+                                id = int(torrent['href'].replace('/details.php?id=', ''))
+                                leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).string)
                             except (AttributeError, TypeError):
                                 continue
 
-                            # Filter unseeded torrent and torrents with no name/url
-                            if mode != 'RSS' and torrent_seeders == 0:
+                            if not all([title, download_url]):
                                 continue
 
-                            if not torrent_name or not torrent_download_url:
+                            #Filter unseeded torrent
+                            if seeders < self.minseed or leechers < self.minleech:
+                                if mode != 'RSS':
+                                    logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                 continue
 
-                            item = torrent_name, torrent_download_url
-                            logger.log(u"Found result: " + torrent_name + " (" + torrent_details_url + ")", logger.DEBUG)
+                            item = title, download_url, size, seeders, leechers
+                            if mode != 'RSS':
+                                logger.log(u"Found result: %s " % title, logger.DEBUG)
+
                             items[mode].append(item)
 
                 except Exception, e:
-                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
+
+            #For each search mode sort all the items by seeders if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url = item
-
-        if title:
-            title = u'' + title
-            title = title.replace(' ', '.')
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py
index 7b6896d27156a15f2967e8293c39d78d118fe75d..61ef2d54e354acf237d5c45cd423ddd9526ef40e 100644
--- a/sickbeard/providers/kat.py
+++ b/sickbeard/providers/kat.py
@@ -23,6 +23,7 @@ import traceback
 import re
 import datetime
 import xmltodict
+from urllib import urlencode
 
 import sickbeard
 from sickbeard import logger
@@ -71,73 +72,36 @@ class KATProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'kat.png'
-
-    def _get_season_search_strings(self, ep_obj):
-        search_string = {'Season': []}
-
-        for show_name in set(allPossibleShowNames(ep_obj.show)):
-            ep_string = sanitizeSceneName(show_name) + ' '
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string += str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string += "%02d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = '%s S%02d -S%02dE category:tv' % (sanitizeSceneName(show_name), ep_obj.scene_season, ep_obj.scene_season) #1) showName SXX -SXXE
-                search_string['Season'].append(ep_string)
-                ep_string = '%s "Season %d" -Ep* category:tv' % (sanitizeSceneName(show_name), ep_obj.scene_season) # 2) showName "Season X"
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-        search_string = {'Episode': []}
-
-        for show_name in set(allPossibleShowNames(ep_obj.show)):
-            ep_string = sanitizeSceneName(show_name) + ' '
-            if ep_obj.show.air_by_date:
-                ep_string += str(ep_obj.airdate).replace('-', ' ')
-            elif ep_obj.show.sports:
-                ep_string += str(ep_obj.airdate).replace('-', ' ') + '|' + ep_obj.airdate.strftime('%b')
-            elif ep_obj.show.anime:
-                ep_string += "%02d" % ep_obj.scene_absolute_number
-            else:
-                ep_string += sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                   'episodenumber': ep_obj.scene_episode} + '|' + \
-                             sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.scene_season,
-                                                                   'episodenumber': ep_obj.scene_episode}
-            if add_string:
-                ep_string += ' ' + add_string
-
-            search_string['Episode'].append(re.sub(r'\s+', ' ', ep_string.strip()))
-
-        return [search_string]
-
-    def _get_size(self, item):
-        #pylint: disable=W0612
-        title, url, info_hash, seeders, leechers, size, pubdate = item
-        return size or -1
-
     def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
 
         for mode in search_strings.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_strings[mode]:
+
                 self.search_params.update({'q': search_string, 'field': ('seeders', 'time_add')[mode == 'RSS']})
-                logger.log(u"Search string: %s" % unicode(self.search_params), logger.DEBUG)
+
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s" % search_string, logger.DEBUG)
 
                 try:
-                    data = self.getURL(self.urls[('search', 'rss')[mode == 'RSS']], params=self.search_params)
+                    searchURL = self.urls[('search', 'rss')[mode == 'RSS']] + '?' + urlencode(self.search_params)
+                    logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) 
+                    data = self.getURL(searchURL)
+                    #data = self.getURL(self.urls[('search', 'rss')[mode == 'RSS']], params=self.search_params)
                     if not data:
-                        logger.log(u'No response, skipping...', logger.DEBUG)
+                        logger.log("No data returned from provider", logger.DEBUG)
+                        continue
+
+                    try:
+                        data = xmltodict.parse(data)
+                    except ExpatError as e:
+                        logger.log(u"Failed parsing provider. Traceback: %r\n%r" % (traceback.format_exc(), data), logger.ERROR)
                         continue
 
-                    data = xmltodict.parse(data)
                     if not all([data, 'rss' in data, 'channel' in data['rss'], 'item' in data['rss']['channel']]):
-                        logger.log(u'Malformed rss returned, skipping...', logger.DEBUG)
+                        logger.log(u"Malformed rss returned, skipping", logger.DEBUG)
                         continue
 
                     # https://github.com/martinblech/xmltodict/issues/111
@@ -152,9 +116,9 @@ class KATProvider(generic.TorrentProvider):
                             # unless it is not torcache or we are not using blackhole
                             # because we want to use magnets if connecting direct to client
                             # so that proxies work.
-                            url = item['enclosure']['@url']
-                            if sickbeard.TORRENT_METHOD != "blackhole" or 'torcache' not in url:
-                                url = item['torrent:magnetURI']
+                            download_url = item['enclosure']['@url']
+                            if sickbeard.TORRENT_METHOD != "blackhole" or 'torcache' not in download_url:
+                                download_url = item['torrent:magnetURI']
 
                             seeders = int(item['torrent:seeds'])
                             leechers = int(item['torrent:peers'])
@@ -167,50 +131,40 @@ class KATProvider(generic.TorrentProvider):
                         except (AttributeError, TypeError, KeyError):
                             continue
 
-                        # Dont let RSS add items with no seeders either -.-
-                        if not seeders or seeders < self.minseed or leechers < self.minleech:
-                            logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
-                            continue
+                        try:
+                            pubdate = datetime.datetime.strptime(item['pubDate'], '%a, %d %b %Y %H:%M:%S +0000')
+                        except Exception:
+                            pubdate = datetime.datetime.today()
 
-                        if self.confirmed and not verified:
-                            logger.log(u"KAT Provider found result " + title + " but that doesn't seem like a verified result so I'm ignoring it", logger.DEBUG)
+                        if not all([title, download_url]):
                             continue
 
-                        if not title or not url:
+                        #Filter unseeded torrent
+                        if seeders < self.minseed or leechers < self.minleech:
+                            if mode != 'RSS':
+                                logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                             continue
 
-                        try:
-                            pubdate = datetime.datetime.strptime(item['pubDate'], '%a, %d %b %Y %H:%M:%S +0000')
-                        except Exception:
-                            pubdate = datetime.datetime.today()
+                        if self.confirmed and not verified and mode != 'RSS':
+                            logger.log(u"Found result " + title + " but that doesn't seem like a verified result so I'm ignoring it", logger.DEBUG)
+                            continue
 
-                        item = title, url, info_hash, seeders, leechers, size, pubdate
+                        item = title, download_url, size, seeders, leechers
+                        if mode != 'RSS':
+                            logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                         items[mode].append(item)
 
                 except Exception:
-                    logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(),
-                               logger.WARNING)
+                    logger.log(u"Failed parsing provider. Traceback: %r" % traceback.format_exc(), logger.ERROR)
 
-            #For each search mode sort all the items by seeders
+            #For each search mode sort all the items by seeders if available
             items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-        #pylint: disable=W0612
-        title, url, info_hash, seeders, leechers, size, pubdate = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = url.replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()-datetime.timedelta(days=1)):
         results = []
 
diff --git a/sickbeard/providers/libertalia.py b/sickbeard/providers/libertalia.py
index f7a7220faf343c3a0a4782efbc9a1f929a2996e5..17d020a1d1effda350be4e1fce37fcaaa028536b 100644
--- a/sickbeard/providers/libertalia.py
+++ b/sickbeard/providers/libertalia.py
@@ -4,7 +4,7 @@
 # based on tpi.py
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -37,7 +37,7 @@ from sickbeard import helpers
 from sickbeard import classes
 from unidecode import unidecode
 from sickbeard.helpers import sanitizeSceneName
-
+from sickbeard import tvcache
 
 class LibertaliaProvider(generic.TorrentProvider):
 
@@ -62,64 +62,11 @@ class LibertaliaProvider(generic.TorrentProvider):
         self.minseed = None
         self.minleech = None
 
+        self.cache = LibertaliaCache(self)
+
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'libertalia.png'
-
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  # 1) showName.SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', '.', ep_string))
-
-        return [search_string]
-
-    def getQuality(self, item, anime=False):
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _doLogin(self):
 
         if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
@@ -129,17 +76,15 @@ class LibertaliaProvider(generic.TorrentProvider):
                             'password': self.password
         }
 
-        logger.log('Performing authentication to Libertalia', logger.DEBUG)
         response = self.getURL(self.url + '/login.php',  post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('upload.php', response):
-            logger.log(u'Login to ' + self.name + ' was successful.', logger.DEBUG)
             return True
         else:
-            logger.log(u'Login to ' + self.name + ' was unsuccessful.', logger.WARNING)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
@@ -147,8 +92,6 @@ class LibertaliaProvider(generic.TorrentProvider):
 
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
-        logger.log(u"_doSearch started with ..." + str(search_params), logger.DEBUG)
-
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
 
@@ -157,16 +100,14 @@ class LibertaliaProvider(generic.TorrentProvider):
             return results
 
         for mode in search_params.keys():
-
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
 
-                if isinstance(search_string, unicode):
-                    search_string = unidecode(search_string)
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 searchURL = self.urlsearch % (urllib.quote(search_string), self.categories)
-
-                logger.log(u"Search string: " + searchURL, logger.DEBUG)
-
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
                 data = self.getURL(searchURL)
                 if not data:
                     continue
@@ -174,25 +115,42 @@ class LibertaliaProvider(generic.TorrentProvider):
                 with BS4Parser(data, features=["html5lib", "permissive"]) as html:
                     resultsTable = html.find("table", { "class" : "torrent_table"  })
                     if resultsTable:
-                        logger.log(u"Libertalia found result table ! " , logger.DEBUG)
                         rows = resultsTable.findAll("tr" ,  {"class" : "torrent_row  new  "}  )  # torrent_row new
 
                         for row in rows:
 
                             #bypass first row because title only
                             columns = row.find('td', {"class" : "torrent_name"} )
-                            logger.log(u"Libertalia found rows ! " , logger.DEBUG)
                             isvfclass = row.find('td', {"class" : "sprite-vf"} )
                             isvostfrclass = row.find('td', {"class" : "sprite-vostfr"} )
                             link = columns.find("a",  href=re.compile("torrents"))
                             if link:
                                 title = link.text
                                 recherched=searchURL.replace(".","(.*)").replace(" ","(.*)").replace("'","(.*)")
-                                logger.log(u"Libertalia title : " + title, logger.DEBUG)
                                 downloadURL =  row.find("a",href=re.compile("torrent_pass"))['href']
-                                logger.log(u"Libertalia download URL : " + downloadURL, logger.DEBUG)
-                                item = title, downloadURL
+                                #FIXME
+                                size = -1
+                                seeders = 1
+                                leechers = 0
+
+                                if not all([title, download_url]):
+                                    continue
+
+                                #Filter unseeded torrent
+                                #if seeders < self.minseed or leechers < self.minleech:
+                                #    if mode != 'RSS':
+                                #        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                                #    continue
+
+                                item = title, download_url, size, seeders, leechers
+                                if mode != 'RSS':
+                                    logger.log(u"Found result: %s " % title, logger.DEBUG)
+
                                 items[mode].append(item)
+
+            #For each search mode sort all the items by seeders if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
+
             results += items[mode]
 
         return results
@@ -228,17 +186,15 @@ class LibertaliaProvider(generic.TorrentProvider):
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url = item
+class LibertaliaCache(tvcache.TVCache):
+    def __init__(self, provider):
 
-        if title:
-            title = u'' + title
-            title = title.replace(' ', '.')
+        tvcache.TVCache.__init__(self, provider)
 
-        if url:
-            url = str(url).replace('&amp;', '&')
+        self.minTime = 10
 
-        return (title, url)
+    def _getRSSData(self):
+        search_strings = {'RSS': ['']}
+        return {'entries': self.provider._doSearch(search_strings)}
 
 provider = LibertaliaProvider()
diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py
index 1dbebd06c1ce9de0a89ebfda68cff0c741b11978..c1974a7a16c83b5f42ddd91c3bd7f670979ee089 100644
--- a/sickbeard/providers/morethantv.py
+++ b/sickbeard/providers/morethantv.py
@@ -1,7 +1,7 @@
 # Author: Seamus Wassman
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -74,14 +74,6 @@ class MoreThanTVProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'morethantv.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _checkAuth(self):
 
         if not self.username or not self.password:
@@ -103,63 +95,15 @@ class MoreThanTVProvider(generic.TorrentProvider):
 
             response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
             if not response:
-                logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+                logger.log(u"Unable to connect to provider", logger.WARNING)
                 return False
 
             if re.search('Your username or password was incorrect.', response):
-                logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+                logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
                 return False
 
             return True
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + '.S%02d*' % int(ep_obj.scene_season)  #1) showName SXX
-
-            search_string['Season'].append(re.sub('\.', '+', ep_string))
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\.', '+', ep_string))
-
-        return [search_string]
-
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
@@ -171,14 +115,14 @@ class MoreThanTVProvider(generic.TorrentProvider):
             return results
 
         for mode in search_params.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
 
-                if isinstance(search_string, unicode):
-                    search_string = unidecode(search_string)
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 searchURL = self.urls['search'] % (search_string)
-
-                logger.log(u"Search string: " + searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
 
                 # returns top 15 results by default, expandable in user profile to 100
                 data = self.getURL(searchURL)
@@ -192,8 +136,7 @@ class MoreThanTVProvider(generic.TorrentProvider):
 
                         #Continue only if one Release is found
                         if len(torrent_rows) < 2:
-                            logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
-                                       logger.DEBUG)
+                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                             continue
 
                         # skip colheader
@@ -204,13 +147,11 @@ class MoreThanTVProvider(generic.TorrentProvider):
 
                             link_str = str(link['href'])
 
-                            logger.log(u"link=" + link_str, logger.DEBUG)
-
                             #skip if torrent has been nuked due to poor quality
                             if cells[1].find('img', alt='Nuked') != None:
                                 continue
                             torrent_id_long = link['href'].replace('torrents.php?action=download&id=', '')
-                            torrent_id = torrent_id_long.split('&', 1)[0]
+                            id = torrent_id_long.split('&', 1)[0]
 
 
                             try:
@@ -224,49 +165,38 @@ class MoreThanTVProvider(generic.TorrentProvider):
 
                                 leechers = cells[7].contents[0]
 
+                                #FIXME
+                                size = -1
+
                             except (AttributeError, TypeError):
                                 continue
 
 
-                            #Filter unseeded torrent
-                            if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
-                                logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                            if not all([title, download_url]):
                                 continue
 
-                            if not title or not download_url:
+                            #Filter unseeded torrent
+                            if seeders < self.minseed or leechers < self.minleech:
+                                if mode != 'RSS':
+                                    logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                 continue
 
-# Debug
-#                            logger.log(u"title = " + title + ", download_url = " + download_url + ", torrent_id = " + torrent_id + ", seeders = " + seeders + ", leechers = " + leechers, logger.DEBUG)
-
-
-                            item = title, download_url, torrent_id, seeders, leechers
-                            logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG)
+                            item = title, download_url, size, seeders, leechers
+                            if mode != 'RSS':
+                                logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                             items[mode].append(item)
 
                 except Exception, e:
-                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
-            #For each search mode sort all the items by seeders
+            #For each search mode sort all the items by seeders if available
             items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url, id, seeders, leechers = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py
index 37fe0f583d158e0cb326bf6cceff028a88116a95..5253e172a1c2f7154dcba72a7283de520382aca8 100644
--- a/sickbeard/providers/newznab.py
+++ b/sickbeard/providers/newznab.py
@@ -34,6 +34,7 @@ from sickbeard.common import Quality
 from sickbeard.providers import generic
 from sickrage.helper.encoding import ek
 from sickrage.helper.exceptions import AuthException
+from sickbeard.common import USER_AGENT
 
 
 class NewznabProvider(generic.NZBProvider):
@@ -48,6 +49,8 @@ class NewznabProvider(generic.NZBProvider):
 
         self.url = self.urls['base_url']
 
+        self.headers.update({'User-Agent': USER_AGENT})
+
         self.key = key
 
         self.search_mode = search_mode
@@ -111,12 +114,12 @@ class NewznabProvider(generic.NZBProvider):
             data = self.cache.getRSSFeed("%s/api?%s" % (self.url, urllib.urlencode(params)))
         except Exception:
             logger.log(u"Error getting html for [%s]" %
-                       ("%s/api?%s" % (self.url, '&'.join("%s=%s" % (x, y) for x, y in params.iteritems()))), logger.DEBUG)
+                       ("%s/api?%s" % (self.url, '&'.join("%s=%s" % (x, y) for x, y in params.iteritems()))), logger.WARNING)
             return (False, return_categories, "Error getting html for [%s]" %
                     ("%s/api?%s" % (self.url, '&'.join("%s=%s" % (x, y) for x, y in params.iteritems()))))
 
         if not self._checkAuthFromData(data):
-            logger.log(u"Error parsing xml for [%s]" % (self.name), logger.DEBUG)
+            logger.log(u"Error parsing xml", logger.DEBUG)
             return False, return_categories, "Error parsing xml for [%s]" % (self.name)
 
         try:
@@ -197,8 +200,7 @@ class NewznabProvider(generic.NZBProvider):
     def _checkAuth(self):
 
         if self.needs_auth and not self.key:
-            logger.log(u"Incorrect authentication credentials for " + self.name + " : " + "API key is missing",
-                       logger.WARNING)
+            logger.log(u"Invalid api key. Check your settings", logger.WARNING)
             #raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
 
         return True
@@ -228,7 +230,7 @@ class NewznabProvider(generic.NZBProvider):
         elif bozo == 1:
             raise Exception(bozo_exception)
         else:
-            logger.log(u"Unknown error given from " + self.name + ": " + err_desc, logger.ERROR)
+            logger.log(u"Unknown error: %s" % err_desc, logger.ERROR)
 
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
@@ -241,6 +243,7 @@ class NewznabProvider(generic.NZBProvider):
 
         if search_params:
             params.update(search_params)
+            logger.log(u'Search parameters: %s' % repr(search_params), logger.DEBUG)
 
         # category ids
         if self.show and self.show.is_sports:
@@ -269,7 +272,7 @@ class NewznabProvider(generic.NZBProvider):
             while(datetime.datetime.now() - self.last_search).seconds < 5:
                 time.sleep(1)
 
-            logger.log(u"Search url: " + search_url, logger.DEBUG)
+            logger.log(u"Search url: %s" % search_url, logger.DEBUG)
 
             data = self.cache.getRSSFeed(search_url)
 
@@ -284,10 +287,6 @@ class NewznabProvider(generic.NZBProvider):
 
                 if title and url:
                     results.append(item)
-                else:
-                    logger.log(
-                        u"The data returned from the " + self.name + " is incomplete, this result is unusable",
-                        logger.DEBUG)
 
             # get total and offset attribs
             try:
@@ -312,7 +311,7 @@ class NewznabProvider(generic.NZBProvider):
                 logger.log(u'%d' % (total - offset) + ' more items to be fetched from provider.' +
                            'Fetching another %d' % int(params['limit']) + ' items.', logger.DEBUG)
             else:
-                logger.log(u'No more searches needed.', logger.DEBUG)
+                logger.log(u'No more searches needed', logger.DEBUG)
                 break
 
         return results
@@ -373,7 +372,7 @@ class NewznabCache(tvcache.TVCache):
         while (datetime.datetime.now() - self.last_search).seconds < 5:
             time.sleep(1)
 
-        logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG)
+        logger.log("Cache update URL: %s " % rss_url, logger.DEBUG)
         data = self.getRSSFeed(rss_url)
 
         self.last_search = datetime.datetime.now()
@@ -390,12 +389,9 @@ class NewznabCache(tvcache.TVCache):
         self._checkItemAuth(title, url)
 
         if not title or not url:
-            logger.log(
-                u"The data returned from the " + self.provider.name + " feed is incomplete, this result is unusable",
-                logger.DEBUG)
             return None
 
         tvrageid = 0
 
-        logger.log(u"Attempting to add item from RSS to cache: " + title, logger.DEBUG)
+        logger.log(u"Attempting to add item from RSS to cache: %s" % title, logger.DEBUG)
         return self._addCacheEntry(title, url, indexer_id=tvrageid)
diff --git a/sickbeard/providers/nextgen.py b/sickbeard/providers/nextgen.py
index 2c168c5c5007782a817c69fc02aa0c7dcf22b510..d399c37c3dc63d7c73b6832f6081fb83ad840513 100644
--- a/sickbeard/providers/nextgen.py
+++ b/sickbeard/providers/nextgen.py
@@ -1,7 +1,7 @@
 # Author: seedboy
 # URL: https://github.com/seedboy
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -70,14 +70,6 @@ class NextGenProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'nextgen.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def getLoginParams(self):
         return {
             'username': self.username,
@@ -125,57 +117,9 @@ class NextGenProvider(generic.TorrentProvider):
             self.login_opener = None
 
         self.login_opener = None
-        logger.log(u'Failed to login:' + str(error), logger.ERROR)
+        logger.log(u"Failed to login: %s" % error, logger.ERROR)
         return False
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  #1) showName SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub(r'\s+', ' ', ep_string))
-
-        return [search_string]
-
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
@@ -185,12 +129,14 @@ class NextGenProvider(generic.TorrentProvider):
             return results
 
         for mode in search_params.keys():
-
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
 
-                searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
-                logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG)
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
+                searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
                 data = self.getURL(searchURL)
                 if not data:
                     continue
@@ -200,8 +146,7 @@ class NextGenProvider(generic.TorrentProvider):
                         resultsTable = html.find('div', attrs={'id': 'torrent-table-wrapper'})
 
                         if not resultsTable:
-                            logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
-                                       logger.DEBUG)
+                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                             continue
 
                         # Collecting entries
@@ -222,54 +167,47 @@ class NextGenProvider(generic.TorrentProvider):
                                     torrentId = (
                                     ((result.find('div', attrs={'id': 'torrent-download'})).find('a'))['href']).replace(
                                         'download.php?id=', '')
-                                    torrent_name = str(torrentName)
-                                    torrent_download_url = (self.urls['download'] % torrentId).encode('utf8')
+                                    title = str(torrentName)
+                                    download_url = (self.urls['download'] % torrentId).encode('utf8')
                                     torrent_details_url = (self.urls['detail'] % torrentId).encode('utf8')
-                                    #torrent_seeders = int(result.find('div', attrs = {'id' : 'torrent-seeders'}).find('a')['class'][0])
+                                    seeders = int(result.find('div', attrs = {'id' : 'torrent-seeders'}).find('a')['class'][0])
                                     ## Not used, perhaps in the future ##
                                     #torrent_id = int(torrent['href'].replace('/details.php?id=', ''))
-                                    #torrent_leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).string)
+                                    leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).string)
+                                    #FIXME
+                                    size = -1
                                 except (AttributeError, TypeError):
                                     continue
 
-                                # Filter unseeded torrent and torrents with no name/url
-                                #if mode != 'RSS' and torrent_seeders == 0:
-                                #    continue
+                                if not all([title, download_url]):
+                                    continue
 
-                                if not torrent_name or not torrent_download_url:
+                                #Filter unseeded torrent
+                                if seeders < self.minseed or leechers < self.minleech:
+                                    if mode != 'RSS':
+                                        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                     continue
 
-                                item = torrent_name, torrent_download_url
-                                logger.log(u"Found result: " + torrent_name.replace(' ','.') + " (" + torrent_details_url + ")",
-                                           logger.DEBUG)
+                                item = title, download_url, size, seeders, leechers
+                                if mode != 'RSS':
+                                    logger.log(u"Found result: %s " % title, logger.DEBUG)
+
                                 items[mode].append(item)
 
                         else:
-                            logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
-                                       logger.WARNING)
+                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                             continue
 
                 except Exception, e:
-                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),
-                               logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
+
+            #For each search mode sort all the items by seeders if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url = item
-
-        if title:
-            title = u'' + title
-            title = title.replace(' ', '.')
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return title, url
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py
index 1a1b1129e94a9d452f1ac527923e1348608e73d4..086c12553eaa3141d8bc6b31319676308eccf3d0 100644
--- a/sickbeard/providers/nyaatorrents.py
+++ b/sickbeard/providers/nyaatorrents.py
@@ -1,7 +1,7 @@
 # Author: Mr_Orange
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -49,14 +49,6 @@ class NyaaProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'nyaatorrents.png'
-
-    def getQuality(self, item, anime=False):
-        title = item.get('title')
-        quality = Quality.sceneQuality(title, anime)
-        return quality
-
     def findSearchResults(self, show, episodes, search_mode, manualSearch=False, downCurQuality=False):
         return generic.TorrentProvider.findSearchResults(self, show, episodes, search_mode, manualSearch, downCurQuality)
 
@@ -67,10 +59,12 @@ class NyaaProvider(generic.TorrentProvider):
         return [x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
 
     def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0, epObj=None):
+        #FIXME
         if self.show and not self.show.is_anime:
-            logger.log(u"" + str(self.show.name) + " is not an anime skiping " + str(self.name))
             return []
 
+        logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+
         params = {
             "term": search_string.encode('utf-8'),
             "cats": '1_0',  # All anime
@@ -78,28 +72,38 @@ class NyaaProvider(generic.TorrentProvider):
         }
 
         searchURL = self.url + '?page=rss&' + urllib.urlencode(params)
+        logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
 
-        logger.log(u"Search string: " + searchURL, logger.DEBUG)
 
         results = []
         for curItem in self.cache.getRSSFeed(searchURL, items=['entries'])['entries'] or []:
-            (title, url) = self._get_title_and_url(curItem)
+            title = curItem[0]
+            download_url = curItem[1]
+            #FIXME
+            size = -1
+            seeders = 1
+            leechers = 0
 
-            if title and url:
-                results.append(curItem)
-            else:
-                logger.log(
-                    u"The data returned from the " + self.name + " is incomplete, this result is unusable",
-                    logger.DEBUG)
+            if not all([title, download_url]):
+                continue
 
-        return results
+            #Filter unseeded torrent
+            #if seeders < self.minseed or leechers < self.minleech:
+            #    if mode != 'RSS':
+            #        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+            #    continue
 
-    def _get_title_and_url(self, item):
-        return generic.TorrentProvider._get_title_and_url(self, item)
+            item = title, download_url, size, seeders, leechers
+            logger.log(u"Found result: %s " % title, logger.DEBUG)
+
+            #FIX ME SORTING
+            results.append(curItem)
+
+        return results
 
     def _extract_name_from_filename(self, filename):
         name_regex = '(.*?)\.?(\[.*]|\d+\.TPB)\.torrent$'
-        logger.log(u"Comparing " + name_regex + " against " + filename, logger.DEBUG)
+        logger.log(u"Comparing %s against %s" % (name_regex, filename), logger.DEBUG)
         match = re.match(name_regex, filename, re.I)
         if match:
             return match.group(1)
@@ -125,7 +129,7 @@ class NyaaCache(tvcache.TVCache):
 
         url = self.provider.url + '?' + urllib.urlencode(params)
 
-        logger.log(u"NyaaTorrents cache update URL: " + url, logger.DEBUG)
+        logger.log(u"Cache update URL: %s" % url, logger.DEBUG)
 
         return self.getRSSFeed(url)
 
diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py
index 1f323381d49daf1c120e580e0e2e34383e2a7522..0a8bb42ca86f781ea32b48464a109fb9724838be 100644
--- a/sickbeard/providers/omgwtfnzbs.py
+++ b/sickbeard/providers/omgwtfnzbs.py
@@ -49,7 +49,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
     def _checkAuth(self):
 
         if not self.username or not self.api_key:
-            raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
+            logger.log(u"Invalid api key. Check your settings", logger.WARNING)
 
         return True
 
@@ -68,16 +68,13 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
                 description_text = parsedJSON.get('notice')
 
                 if 'information is incorrect' in parsedJSON.get('notice'):
-                    logger.log(u"Incorrect authentication credentials for " + self.name + " : " + str(description_text),
-                               logger.DEBUG)
-                    raise AuthException(
-                        "Your authentication credentials for " + self.name + " are incorrect, check your config.")
+                    logger.log(u"Invalid api key. Check your settings", logger.WARNING)
 
                 elif '0 results matched your terms' in parsedJSON.get('notice'):
                     return True
 
                 else:
-                    logger.log(u"Unknown error given from " + self.name + " : " + str(description_text), logger.DEBUG)
+                    logger.log(u"Unknown error: %s"  % description_text, logger.DEBUG)
                     return False
 
             return True
@@ -113,10 +110,11 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
         if retention or not params['retention']:
             params['retention'] = retention
 
-        search_url = 'https://api.omgwtfnzbs.org/json/?' + urllib.urlencode(params)
-        logger.log(u"Search url: " + search_url, logger.DEBUG)
+        searchURL = 'https://api.omgwtfnzbs.org/json/?' + urllib.urlencode(params)
+        logger.log(u"Search string: % " % params, logger.DEBUG)
+        logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
 
-        parsedJSON = self.getURL(search_url, json=True)
+        parsedJSON = self.getURL(searchURL, json=True)
         if not parsedJSON:
             return []
 
@@ -125,6 +123,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
 
             for item in parsedJSON:
                 if 'release' in item and 'getnzb' in item:
+                    logger.log(u"Found result: %s " % item.get('title'), logger.DEBUG)
                     results.append(item)
 
             return results
@@ -184,7 +183,7 @@ class OmgwtfnzbsCache(tvcache.TVCache):
 
         rss_url = 'https://rss.omgwtfnzbs.org/rss-download.php?' + urllib.urlencode(params)
 
-        logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
+        logger.log(u"Cache update URL: %s" % rss_url, logger.DEBUG)
 
         return self.getRSSFeed(rss_url)
 
diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py
index a399fe48cb7287895a9ab5721aadeeb01a3c6a94..7866e2c5101837f560befb3fd1310c4d2ced4110 100644
--- a/sickbeard/providers/rarbg.py
+++ b/sickbeard/providers/rarbg.py
@@ -2,7 +2,7 @@
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -79,7 +79,7 @@ class RarbgProvider(generic.TorrentProvider):
 
         self.defaultOptions = self.urlOptions['categories'].format(categories='tv') + \
                                 self.urlOptions['limit'].format(limit='100') + \
-                                self.urlOptions['format'].format(format='json')
+                                self.urlOptions['format'].format(format='json_extended')
 
         self.next_request = datetime.datetime.now()
 
@@ -90,9 +90,6 @@ class RarbgProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'rarbg.png'
-
     def _doLogin(self):
         if self.token and self.tokenExpireDate and datetime.datetime.now() < self.tokenExpireDate:
             return True
@@ -101,7 +98,7 @@ class RarbgProvider(generic.TorrentProvider):
 
         response = self.getURL(self.urls['token'], timeout=30, json=True)
         if not response:
-            logger.log(u'Unable to connect to %s provider.' % self.name, logger.WARNING)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         try:
@@ -110,65 +107,11 @@ class RarbgProvider(generic.TorrentProvider):
                 self.tokenExpireDate = datetime.datetime.now() + datetime.timedelta(minutes=14)
                 return True
         except Exception as e:
-            logger.log(u'%s provider: No token found' % self.name, logger.WARNING)
-            logger.log(u'%s provider: No token found: %s' % (self.name, ex(e)), logger.DEBUG)
+            logger.log(u"No token found", logger.WARNING)
+            logger.log(u"No token found: %s" % repr(e), logger.DEBUG)
 
         return False
 
-    def getQuality(self, item, anime=False):
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  #1) showName.SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = show_name + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = show_name + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = show_name + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = show_name + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode}
-                if add_string:
-                    ep_string = ep_string + ' %s' % add_string
-
-                search_string['Episode'].append(ep_string)
-
-        return [search_string]
-
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
@@ -185,7 +128,12 @@ class RarbgProvider(generic.TorrentProvider):
             ep_indexer = None
 
         for mode in search_params.keys(): #Mode = RSS, Season, Episode
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
+
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+
                 if mode == 'RSS':
                     searchURL = self.urls['listing'] + self.defaultOptions
                 elif mode == 'Season':
@@ -199,7 +147,7 @@ class RarbgProvider(generic.TorrentProvider):
                     else:
                         searchURL = self.urls['search'].format(search_string=search_string) + self.defaultOptions
                 else:
-                    logger.log(u'{name} invalid search mode:{mode}'.format(name=self.name, mode=mode), logger.ERROR)
+                    logger.log(u"Invalid search mode: %s " % mode, logger.ERROR)
 
                 if self.minleech:
                     searchURL += self.urlOptions['leechers'].format(min_leechers=int(self.minleech))
@@ -213,7 +161,7 @@ class RarbgProvider(generic.TorrentProvider):
                 if self.ranked:
                     searchURL += self.urlOptions['ranked'].format(ranked=int(self.ranked))
 
-                logger.log(u'{name} search page URL: {url}'.format(name=self.name, url=searchURL), logger.DEBUG)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) 
 
                 try:
                     retry = 3
@@ -228,96 +176,89 @@ class RarbgProvider(generic.TorrentProvider):
                         self.next_request = datetime.datetime.now() + datetime.timedelta(seconds=10)
 
                         if not data:
-                            logger.log(u'{name} no data returned.'.format(name=self.name), logger.DEBUG)
+                            logger.log("No data returned from provider", logger.DEBUG)
                             raise GetOutOfLoop
                         if re.search('ERROR', data):
-                            logger.log(u'{name} returned an error.'.format(name=self.name), logger.DEBUG)
+                            logger.log(u"Error returned from provider", logger.DEBUG)
                             raise GetOutOfLoop
                         if re.search('No results found', data):
-                            logger.log(u'{name} no results found.'.format(name=self.name), logger.DEBUG)
+                            logger.log(u"No results found", logger.DEBUG)
                             raise GetOutOfLoop
                         if re.search('Invalid token set!', data):
-                            logger.log(u'{name} Invalid token set!'.format(name=self.name), logger.ERROR)
+                            logger.log(u"Invalid token!", logger.WARNING)
                             return results
                         if re.search('Too many requests per minute. Please try again later!', data):
-                            logger.log(u'{name} Too many requests per minute.'.format(name=self.name), logger.DEBUG)
+                            logger.log(u"Too many requests per minute", logger.WARNING)
                             retry = retry - 1
                             time.sleep(10)
                             continue
                         if re.search('Cant find search_tvdb in database. Are you sure this imdb exists?', data):
-                            logger.log(u'{name} no results found. Search tvdb id do not exist on server.'.format(name=self.name), logger.DEBUG)
+                            logger.log(u"No results found. The tvdb id: %s do not exist on provider" % ep_indexerid, logger.WARNING)
                             raise GetOutOfLoop
                         if re.search('Invalid token. Use get_token for a new one!', data):
-                            logger.log(u'{name} Invalid token, retrieving new token'.format(name=self.name), logger.DEBUG)
+                            logger.log(u"Invalid token, retrieving new token", logger.DEBUG)
                             retry = retry - 1
                             self.token = None
                             self.tokenExpireDate = None
                             if not self._doLogin():
-                                logger.log(u'{name} Failed retrieving new token'.format(name=self.name), logger.DEBUG)
+                                logger.log(u"Failed retrieving new token", logger.DEBUG)
                                 return results
-                            logger.log(u'{name} Using new token'.format(name=self.name), logger.DEBUG)
+                            logger.log(u"Using new token", logger.DEBUG)
                             continue
                         if re.search('<div id="error">.*</div>', data):
-                            logger.log(u'{name} {proxy} does not support https.'.format(name=self.name, proxy=self.proxy.getProxyURL()), logger.DEBUG)
+                            logger.log(u"Proxy %s does not support https" % self.proxy.getProxyURL(), logger.DEBUG)
                             searchURL = searchURL.replace(u'https', 'http')
                             continue
 
                         #No error found break
                         break
                     else:
-                        logger.log(u'{name} Retried 3 times without getting results.'.format(name=self.name), logger.DEBUG)
+                        logger.log(u"Retried 3 times without getting results", logger.DEBUG)
                         continue
                 except GetOutOfLoop:
                     continue
 
                 try:
-                    data = re.search('\[\{\"filename\".*\}\]', data)
+                    data = re.search(r'\[\{\"title\".*\}\]', data)
                     if data is not None:
                         data_json = json.loads(data.group())
                     else:
                         data_json = {}
                 except Exception as e:
-                    logger.log(u'{name} json load failed: {traceback_info}'.format(name=self.name, traceback_info=traceback.format_exc()), logger.DEBUG)
-                    logger.log(u'{name} json load failed. Data dump = {data}'.format(name=self.name, data=data), logger.DEBUG)
-                    logger.log(u'{name} json load failed.'.format(name=self.name), logger.ERROR)
+                    logger.log(u"JSON load failed: %s" % traceback.format_exc(), logger.ERROR)
+                    logger.log(u"JSON load failed. Data dump: %s" % data, logger.DEBUG)
                     continue
 
                 try:
                     for item in data_json:
                         try:
-                            torrent_title = item['filename']
-                            torrent_download = item['download']
-                            if torrent_title and torrent_download:
-                                items[mode].append((torrent_title, torrent_download))
-                            else:
-                                logger.log(u'{name} skipping invalid result'.format(name=self.name), logger.DEBUG)
+                            title = item['title']
+                            download_url = item['download']
+                            size = item['size']
+                            seeders = item['seeders']
+                            leechers = item['leechers']
+                            pubdate = item['pubdate']
+                            
+                            if not all([title, download_url]):
+                                continue
+
+                            item = title, download_url, size, seeders, leechers
+                            if mode != 'RSS':
+                                logger.log(u"Found result: %s " % title, logger.DEBUG)
+                            items[mode].append(item)
+
                         except Exception:
-                            logger.log(u'{name} skipping invalid result: {traceback_info}'.format(name=self.name, traceback_info=traceback.format_exc()), logger.DEBUG)
+                            logger.log(u"Skipping invalid result. JSON item: %s" % item, logger.DEBUG)
+ 
                 except Exception:
-                    logger.log(u'{name} failed parsing data: {traceback_info}'.format(name=self.name, traceback_info=traceback.format_exc()), logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
+
+            # For each search mode sort all the items by seeders
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-        """
-        Retrieves the title and URL data from the item XML node
-
-        item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
-
-        Returns: A tuple containing two strings representing title and URL respectively
-        """
-
-        title, url = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return title, url
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py
index 76722a8c06cdff179ea0d7661ed964520b0f9c78..cc3fc7c75f0e7cb1f59440bb40fd698b372cf844 100644
--- a/sickbeard/providers/rsstorrent.py
+++ b/sickbeard/providers/rsstorrent.py
@@ -1,6 +1,6 @@
 # Author: Mr_Orange
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -144,9 +144,9 @@ class TorrentRssProvider(generic.TorrentProvider):
             fileOut.close()
             helpers.chmodAsParent(dumpName)
         except IOError, e:
-            logger.log("Unable to save the file: " + ex(e), logger.ERROR)
+            logger.log("Unable to save the file: %s " % repr(e), logger.ERROR)
             return False
-        logger.log(u"Saved custom_torrent html dump " + dumpName + " ", logger.INFO)
+        logger.log(u"Saved custom_torrent html dump %s " % dumpName, logger.INFO)
         return True
 
     def seedRatio(self):
@@ -159,7 +159,7 @@ class TorrentRssCache(tvcache.TVCache):
         self.minTime = 15
 
     def _getRSSData(self):
-        logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG)
+        logger.log(u"Cache update URL: %s" % self.provider.url, logger.DEBUG)
 
         if self.provider.cookies:
             self.provider.headers.update({'Cookie': self.provider.cookies})
diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py
index f71151790b156fa06be28ced2e4b420bd2d662f8..5b54e930eeed747c02caa2e1a4f9684ca7d69864 100644
--- a/sickbeard/providers/scc.py
+++ b/sickbeard/providers/scc.py
@@ -2,7 +2,7 @@
 # Modified by jkaberg, https://github.com/jkaberg for SceneAccess
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -71,14 +71,6 @@ class SCCProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'scc.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _doLogin(self):
 
         login_params = {'username': self.username,
@@ -89,73 +81,31 @@ class SCCProvider(generic.TorrentProvider):
 
         response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('Username or password incorrect', response) \
                 or re.search('<title>SceneAccess \| Login</title>', response):
-            logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_strings = []
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                sp_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                sp_string = show_name + ' %d' % ep_obj.scene_absolute_number
-            else:
-                sp_string = show_name + ' S%02d' % int(ep_obj.scene_season)
-
-            search_strings.append(sp_string)
-
-        return search_strings
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_strings = []
-
-        if not ep_obj:
-            return []
-
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if self.show.air_by_date:
-                ep_string = sanitizeSceneName(show_name) + ' ' + str(ep_obj.airdate).replace('-', '.')
-            elif self.show.sports:
-                ep_string = sanitizeSceneName(show_name) + ' ' + str(ep_obj.airdate).replace('-', '.') + '|' + \
-                        ep_obj.airdate.strftime('%b')
-            elif self.show.anime:
-                ep_string = sanitizeSceneName(show_name) + ' %i' % int(ep_obj.scene_absolute_number)
-            else:
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                        sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode}
-
-            if len(add_string):
-                ep_string += ' %s' % add_string
-
-            search_strings.append(ep_string)
-
-        return search_strings
-
     def _isSection(self, section, text):
         title = '<title>.+? \| %s</title>' % section
         return re.search(title, text, re.IGNORECASE)
 
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
-
+        #FIXME ADD MODE
         results = data = []
 
         if not self._doLogin():
             return results
 
         for search_string in [search_params]:
-
-            if isinstance(search_string, unicode):
-                search_string = unidecode(search_string)
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
+            if mode != 'RSS':
+                logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
             searchURLS = []
             if search_mode == 'sponly':
@@ -166,12 +116,12 @@ class SCCProvider(generic.TorrentProvider):
                 searchURLS += [self.urls['foreign'] % (urllib.quote(search_string))]
 
             for searchURL in searchURLS:
-                logger.log(u"Search string: " + searchURL, logger.DEBUG)
                 try:
+                    logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
                     data = self.getURL(searchURL)
                     time.sleep(cpu_presets[sickbeard.CPU_PRESET])
                 except Exception as e:
-                    logger.log(u"Unable to fetch data reason: {0}".format(str(e)), logger.WARNING)
+                    logger.log(u"Unable to fetch data. Error: %s" % repr(e), logger.WARNING)
 
                 if not data:
                     continue
@@ -182,10 +132,7 @@ class SCCProvider(generic.TorrentProvider):
 
                     #Continue only if at least one Release is found
                     if len(torrent_rows) < 2:
-                        info = u'The Data returned from %s does not contain any torrent' % self.name
-                        if html.title:
-                            info += ' (%s)' % html.title
-                        logger.log(info, logger.DEBUG)
+                        logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                         continue
 
                     for result in torrent_table.find_all('tr')[1:]:
@@ -209,34 +156,28 @@ class SCCProvider(generic.TorrentProvider):
                             id = int(link['href'].replace('details?id=', ''))
                             seeders = int(result.find('td', attrs={'class': 'ttr_seeders'}).string)
                             leechers = int(result.find('td', attrs={'class': 'ttr_leechers'}).string)
+                            #FIXME
+                            size = -1
                         except (AttributeError, TypeError):
                             continue
 
-                        if not title or not download_url or seeders < self.minseed or leechers < self.minleech:
-                            logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                        if not all([title, download_url]):
                             continue
 
-                        item = title, download_url, id, seeders, leechers
-                        logger.log(u"Found result: " + title.replace(' ','.') + " (" + searchURL + ")", logger.DEBUG)
-
-                        results.append(item)
+                        #Filter unseeded torrent
+                        if seeders < self.minseed or leechers < self.minleech:
+                            if mode != 'RSS':
+                                logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                            continue
 
-        results.sort(key=lambda tup: tup[3], reverse=True)
+                        item = title, download_url, size, seeders, leechers
+                        if mode != 'RSS':
+                            logger.log(u"Found result: %s " % title, logger.DEBUG)
 
+                        results.append(item)
+        #FIX ME SORTING
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url, id, seeders, leechers = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py
index bad45bd4f3a23811445a352e20e1613cf884d515..f8347aea936b19588d0a2df3a422e706af3e1a4a 100644
--- a/sickbeard/providers/scenetime.py
+++ b/sickbeard/providers/scenetime.py
@@ -1,7 +1,7 @@
 # Author: Idan Gutman
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage.  
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -66,14 +66,6 @@ class SceneTimeProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'scenetime.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _doLogin(self):
 
         login_params = {'username': self.username,
@@ -82,63 +74,15 @@ class SceneTimeProvider(generic.TorrentProvider):
 
         response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('Username or password incorrect', response):
-            logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  #1) showName SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
-
-        return [search_string]
-
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
@@ -148,14 +92,14 @@ class SceneTimeProvider(generic.TorrentProvider):
             return results
 
         for mode in search_params.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
 
-                if isinstance(search_string, unicode):
-                    search_string = unidecode(search_string)
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories)
-
-                logger.log(u"Search string: " + searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
 
                 data = self.getURL(searchURL)
                 if not data:
@@ -168,8 +112,7 @@ class SceneTimeProvider(generic.TorrentProvider):
 
                         #Continue only if one Release is found
                         if len(torrent_rows) < 2:
-                            logger.log(u"The Data returned from %s does not contain any torrent links" % self.name,
-                                       logger.DEBUG)
+                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                             continue
 
                         # Scenetime apparently uses different number of cells in #torrenttable based
@@ -195,45 +138,37 @@ class SceneTimeProvider(generic.TorrentProvider):
                                 id = int(torrent_id)
                                 seeders = int(cells[labels.index('Seeders')].get_text())
                                 leechers = int(cells[labels.index('Leechers')].get_text())
+                                #FIXME
+                                size = -1
 
                             except (AttributeError, TypeError):
                                 continue
 
-                            #Filter unseeded torrent
-                            if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
-                                logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                            if not all([title, download_url]):
                                 continue
 
-                            if not title or not download_url:
+                            #Filter unseeded torrent
+                            if seeders < self.minseed or leechers < self.minleech:
+                                if mode != 'RSS':
+                                    logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                 continue
 
-                            item = title, download_url, id, seeders, leechers
-                            logger.log(u"Found result: " + title.replace(' ','.') + " (" + searchURL + ")", logger.DEBUG)
+                            item = title, download_url, size, seeders, leechers
+                            if mode != 'RSS':
+                                logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                             items[mode].append(item)
 
                 except Exception, e:
-                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
-            #For each search mode sort all the items by seeders
+            #For each search mode sort all the items by seeders if available
             items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url, id, seeders, leechers = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/shazbat.py b/sickbeard/providers/shazbat.py
index 282c706e63c593c70ab8e31c5d4da9599afefa74..6ec17fe9d3ebbe62436a359172c2407e55889372 100644
--- a/sickbeard/providers/shazbat.py
+++ b/sickbeard/providers/shazbat.py
@@ -1,7 +1,7 @@
 # Author: Nic Wolfe <nic@wolfeden.ca>
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -45,9 +45,6 @@ class ShazbatProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'shazbat.png'
-
     def _checkAuth(self):
         if not self.passkey:
             raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
@@ -58,9 +55,7 @@ class ShazbatProvider(generic.TorrentProvider):
         if not self.passkey:
             self._checkAuth()
         elif not (data['entries'] and data['feed']):
-            logger.log(u"Incorrect authentication credentials for " + self.name, logger.DEBUG)
-            raise AuthException(
-                u"Your authentication credentials for " + self.name + " are incorrect, check your config")
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
 
         return True
 
@@ -78,7 +73,7 @@ class ShazbatCache(tvcache.TVCache):
     def _getRSSData(self):
 
         rss_url = self.provider.urls['base_url'] + 'rss/recent?passkey=' + provider.passkey + '&fname=true'
-        logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
+        logger.log(u"Cache update URL: %s" % rss_url, logger.DEBUG)
 
         return self.getRSSFeed(rss_url, items=['entries', 'feed'])
 
diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py
index a57b7e430a73cb75d92899338a29819a784b7193..85226ef805b7ce9608fefec32149b43a648d05f1 100644
--- a/sickbeard/providers/speedcd.py
+++ b/sickbeard/providers/speedcd.py
@@ -1,7 +1,7 @@
 # Author: Mr_Orange
 # URL: https://github.com/mr-orange/Sick-Beard
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -64,14 +64,6 @@ class SpeedCDProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'speedcd.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _doLogin(self):
 
         login_params = {'username': self.username,
@@ -80,64 +72,15 @@ class SpeedCDProvider(generic.TorrentProvider):
 
         response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.',logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('Incorrect username or Password. Please try again.', response):
-            logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
 
-    def _get_season_search_strings(self, ep_obj):
-
-        #If Every episode in Season is a wanted Episode then search for Season first
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  #1) showName SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
-
-        return [search_string]
-
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
@@ -147,9 +90,11 @@ class SpeedCDProvider(generic.TorrentProvider):
             return results
 
         for mode in search_params.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
 
-                logger.log(u"Search string: " + search_string, logger.DEBUG)
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 search_string = '+'.join(search_string.split())
 
@@ -171,39 +116,34 @@ class SpeedCDProvider(generic.TorrentProvider):
                         continue
 
                     title = re.sub('<[^>]*>', '', torrent['name'])
-                    url = self.urls['download'] % (torrent['id'])
+                    download_url = self.urls['download'] % (torrent['id'])
                     seeders = int(torrent['seed'])
                     leechers = int(torrent['leech'])
+                    #FIXME
+                    size = -1
 
-                    if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
-                        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                    if not all([title, download_url]):
                         continue
 
-                    if not title or not url:
+                    #Filter unseeded torrent
+                    if seeders < self.minseed or leechers < self.minleech:
+                        if mode != 'RSS':
+                            logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                         continue
 
-                    item = title, url, seeders, leechers
+                    item = title, download_url, size, seeders, leechers
+                    if mode != 'RSS':
+                        logger.log(u"Found result: %s " % title, logger.DEBUG)
+
                     items[mode].append(item)
 
-            #For each search mode sort all the items by seeders
-            items[mode].sort(key=lambda tup: tup[2], reverse=True)
+            #For each search mode sort all the items by seeders if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url, seeders, leechers = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/strike.py b/sickbeard/providers/strike.py
index a4af145aa9e928dd5bfe3a3218fc956fc42fb436..de6f8a342cfcb7fb32e943786eb23aadbff2217b 100644
--- a/sickbeard/providers/strike.py
+++ b/sickbeard/providers/strike.py
@@ -1,7 +1,7 @@
 # Author: matigonkas
 # URL: https://github.com/SiCKRAGETV/sickrage
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -22,8 +22,6 @@ import generic
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard import show_name_helpers
-from sickbeard import db
-from sickbeard.common import WANTED
 from sickbeard.config import naming_ep_type
 from sickbeard.helpers import sanitizeSceneName
 
@@ -38,118 +36,57 @@ class STRIKEProvider(generic.TorrentProvider):
 
         self.cache = StrikeCache(self)
         self.minseed, self.minleech = 2 * [None]
-		
+
     def isEnabled(self):
         return self.enabled
 
+    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
 
-    def imageName(self):
-        return 'getstrike.png'
-
-
-    def _get_airbydate_season_range(self, season):
-        if season == None:
-            return ()
-        year, month = map(int, season.split('-'))
-        min_date = datetime.date(year, month, 1)
-        if month == 12:
-            max_date = datetime.date(year, month, 31)
-        else:
-            max_date = datetime.date(year, month+1, 1) -  datetime.timedelta(days=1)
-        return (min_date, max_date)
-
-
-    def _get_season_search_strings(self, show, season=None):
-        search_string = []
-
-        if not (show and season):
-            return []
-
-        myDB = db.DBConnection()
-
-        if show.air_by_date:
-            (min_date, max_date) = self._get_airbydate_season_range(season)
-            sqlResults = myDB.select("SELECT DISTINCT airdate FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= ? AND status = ?", [show.tvdbid,  min_date.toordinal(), max_date.toordinal(), WANTED])
-        else:
-            sqlResults = myDB.select("SELECT DISTINCT season FROM tv_episodes WHERE showid = ? AND season = ? AND status = ?", [show.tvdbid, season, WANTED])
-
-        for sqlEp in sqlResults:
-            for show_name in set(show_name_helpers.allPossibleShowNames(show)):
-                if show.air_by_date:
-                    ep_string = sanitizeSceneName(show_name) +' '+ str(datetime.date.fromordinal(sqlEp["airdate"])).replace('-', '.')
-                    search_string.append(ep_string)
-                else:
-                    ep_string = sanitizeSceneName(show_name) + ' S%02d' % sqlEp["season"]
-                    search_string.append(ep_string)
-
-        return search_string
-
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        if not ep_obj:
-            return []
-
-        search_string = []
-
-        for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
-            ep_string = sanitizeSceneName(show_name)
-            if ep_obj.show.air_by_date:
-                ep_string += ' ' + str(ep_obj.airdate).replace('-', '.')
-            else:
-                ep_string += ' ' + naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode}
-
-            if len(add_string):
-                ep_string += ' %s' % add_string
-
-            search_string.append(ep_string)
-
-        return search_string
-
-
-    def _get_title_and_url(self, item):
-        title, url, size = item
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
+        results = []
+        items = {'Season': [], 'Episode': [], 'RSS': []}
 
+        for mode in search_strings.keys(): #Mode = RSS, Season, Episode
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
+            for search_string in search_strings[mode]:
 
-    def _get_size(self, item):
-        title, url, size = item
-        logger.log(u'Size: %s' % size, logger.DEBUG)
+                if mode != 'RSS':
+                    logger.log(u"Search string: " + search_string.strip(), logger.DEBUG)
 
-        return size
+                searchURL = self.url + "api/v2/torrents/search/?category=TV&phrase=" + search_string
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
+                jdata = self.getURL(searchURL, json=True)
+                if not jdata:
+                    logger.log("No data returned from provider", logger.DEBUG)
+                    return []
 
+                results = []
 
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
+                for item in jdata['torrents']:
+                    seeders = ('seeds' in item and item['seeds']) or 0
+                    leechers = ('leeches' in item and item['leeches']) or 0
+                    title = ('torrent_title' in item and item['torrent_title']) or ''
+                    size = ('size' in item and item['size']) or 0
+                    download_url = ('magnet_uri' in item and item['magnet_uri']) or ''
 
-        logger.log("Performing Search: {0}".format(search_params))
+                    if not all([title, download_url]):
+                        continue
 
-        searchUrl = self.url + "api/v2/torrents/search/?category=TV&phrase=" + search_params
+                    #Filter unseeded torrent
+                    if seeders < self.minseed or leechers < self.minleech:
+                        if mode != 'RSS':
+                            logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                        continue
 
-        jdata = self.getURL(searchUrl, json=True)
-        if not jdata:
-            logger.log("No data returned to be parsed!!!")
-            return []
+                    if mode != 'RSS':
+                        logger.log(u"Found result: %s " % title, logger.DEBUG)
 
-        logger.log("URL to be parsed: " + searchUrl, logger.DEBUG)
+                    item = title, download_url, size, seeders, leechers
+                    items[mode].append(item)
 
-        results = []
+            #For each search mode sort all the items by seeders if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
-        for item in jdata['torrents']:
-            seeders = ('seeds' in item and item['seeds']) or 0
-            leechers = ('leeches' in item and item['leeches']) or 0
-            name = ('torrent_title' in item and item['torrent_title']) or ''
-            if seeders < self.minseed or leechers < self.minleech:
-                logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(name, seeders, leechers), logger.DEBUG)
-                continue
-            magnet = ('magnet_uri' in item and item['magnet_uri']) or ''
-            if name and magnet:
-                results.append((name, magnet, seeders))
+            results += items[mode]
 
         return results
 
diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py
index a4c7a8787c5c044c6777abc259dabafdc75855ad..b02bd94d318aa914f7164ee5ee648b16944eb590 100644
--- a/sickbeard/providers/t411.py
+++ b/sickbeard/providers/t411.py
@@ -2,7 +2,7 @@
 # Author: djoole <bobby.djoole@gmail.com>
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of Sick Beard.
+# This file is part of SickRage. 
 #
 # Sick Beard is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -63,28 +63,18 @@ class T411Provider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 't411.png'
-
-    def getQuality(self, item, anime=False):
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _doLogin(self):
 
         if self.token is not None:
             if time.time() < (self.tokenLastUpdate + 30 * 60):
-                logger.log('T411 Authentication token is still valid', logger.DEBUG)
                 return True
 
         login_params = {'username': self.username,
                         'password': self.password}
 
-        logger.log('Performing authentication to T411', logger.DEBUG)
-
         response = self.getURL(self.urls['login_page'], post_data=login_params, timeout=30, json=True)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.WARNING)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if response and 'token' in response:
@@ -92,57 +82,13 @@ class T411Provider(generic.TorrentProvider):
             self.tokenLastUpdate = time.time()
             self.uid = response['uid'].encode('ascii', 'ignore')
             self.session.auth = T411Auth(self.token)
-            logger.log('Using T411 Authorization token : ' + self.token, logger.DEBUG)
             return True
         else:
-            logger.log('T411 token not found in authentication response', logger.WARNING)
+            logger.log(u"Token not found in authentication response", logger.WARNING)
             return False
 
-    def _get_season_search_strings(self, ep_obj):
-        search_string = {'Season': []}
-        if not ep_obj:
-            return [search_string]
-
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  # 1) showName.SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-        search_string = {'Episode': []}
-        if not ep_obj:
-            return [search_string]
-
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            ep_string = sanitizeSceneName(show_name) + '.'
-            if self.show.air_by_date:
-                ep_string += str(ep_obj.airdate).replace('-', '|')
-            elif self.show.sports:
-                ep_string += str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-            elif self.show.anime:
-                ep_string += "%i" % int(ep_obj.scene_absolute_number)
-            else:
-                 ep_string += sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                   'episodenumber': ep_obj.scene_episode}
-
-            if add_string:
-                ep_string += ' %s' % add_string
-            search_string['Episode'].append(re.sub('\s+', '.', ep_string))
-
-        return [search_string]
-
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
-        logger.log(u"_doSearch started with ..." + str(search_params), logger.DEBUG)
-
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
 
@@ -150,66 +96,68 @@ class T411Provider(generic.TorrentProvider):
             return results
 
         for mode in search_params.keys():
-
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
 
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+
                 for sc in self.subcategories:
                     searchURL = self.urls['search'] % (search_string, sc)
-                    logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG)
-
+                    logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
                     data = self.getURL(searchURL, json=True)
                     if not data:
                         continue
                     try:
 
                         if 'torrents' not in data:
-                            logger.log(
-                                u"The Data returned from " + self.name + " do not contains any torrent : " + str(data),
-                                logger.DEBUG)
+                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                             continue
 
                         torrents = data['torrents']
 
                         if not torrents:
-                            logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
-                                       logger.DEBUG)
+                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                             continue
 
                         for torrent in torrents:
                             try:
-                                torrent_name = torrent['name']
+                                title = torrent['name']
                                 torrent_id = torrent['id']
-                                torrent_download_url = (self.urls['download'] % torrent_id).encode('utf8')
+                                download_url = (self.urls['download'] % torrent_id).encode('utf8')
+                                #FIXME
+                                size = -1
+                                seeders = 1
+                                leechers = 0
 
-                                if not torrent_name or not torrent_download_url:
+                                if not all([title, download_url]):
                                     continue
 
-                                item = torrent_name, torrent_download_url
-                                logger.log(u"Found result: " + torrent_name + " (" + torrent_download_url + ")",
-                                           logger.DEBUG)
+                                #Filter unseeded torrent
+                                #if seeders < self.minseed or leechers < self.minleech:
+                                #    if mode != 'RSS':
+                                #        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                                #    continue
+
+                                item = title, download_url, size, seeders, leechers
+                                if mode != 'RSS':
+                                    logger.log(u"Found result: %s " % title, logger.DEBUG)
+
                                 items[mode].append(item)
+
                             except Exception as e:
-                                logger.log(u"Invalid torrent data, skipping results: {0}".format(str(torrent)), logger.DEBUG)
+                                logger.log(u"Invalid torrent data, skipping result: %s" % torrent, logger.DEBUG)
                                 continue
 
                     except Exception, e:
-                        logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),
-                                   logger.ERROR)
-            results += items[mode]
-
-        return results
+                        logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
-    def _get_title_and_url(self, item):
+            #For each search mode sort all the items by seeders if available if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
-        title, url = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
+            results += items[mode]
 
-        return title, url
+        return results
 
     def findPropers(self, search_date=datetime.datetime.today()):
 
diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py
index 401b4331e0099e001842254b199595a26a3000ff..0a4e76ace9901f4321ab3cb0980045af7ecfcdeb 100644
--- a/sickbeard/providers/thepiratebay.py
+++ b/sickbeard/providers/thepiratebay.py
@@ -20,6 +20,7 @@ from __future__ import with_statement
 
 import re
 import datetime
+from urllib import urlencode
 
 import sickbeard
 from sickbeard.providers import generic
@@ -75,60 +76,24 @@ class ThePirateBayProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_strings = {'Season': []}
-        for show_name in set(allPossibleShowNames(ep_obj.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-                search_strings['Season'].append(ep_string)
-                ep_string = show_name + ' Season ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + ' %02d' % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)
-                search_strings['Season'].append(ep_string)
-                ep_string = show_name + ' Season ' + str(ep_obj.scene_season) + ' -Ep*'
-
-            search_strings['Season'].append(ep_string)
-
-        return [search_strings]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_strings = {'Episode': []}
-        for show_name in set(allPossibleShowNames(ep_obj.show)):
-            ep_string = sanitizeSceneName(show_name) + ' '
-            if ep_obj.show.air_by_date:
-                ep_string += str(ep_obj.airdate).replace('-', ' ')
-            elif ep_obj.show.sports:
-                ep_string += str(ep_obj.airdate).replace('-', '|') + '|' + ep_obj.airdate.strftime('%b')
-            elif ep_obj.show.anime:
-                ep_string += "%02i" % int(ep_obj.scene_absolute_number)
-            else:
-                ep_string += sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                   'episodenumber': ep_obj.scene_episode} + '|' + \
-                sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.scene_season,
-                                                      'episodenumber': ep_obj.scene_episode}
-
-            if add_string:
-                ep_string += ' %s' % add_string
-
-            search_strings['Episode'].append(re.sub(r'\s+', ' ', ep_string).strip())
-
-        return [search_strings]
-
     def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
 
         for mode in search_strings.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_strings[mode]:
+
                 self.search_params.update({'q': search_string.strip()})
-                logger.log(u"Search string: " + search_string.strip(), logger.DEBUG)
 
-                data = self.getURL(self.urls[('search', 'rss')[mode == 'RSS']], params=self.search_params)
+                if mode != 'RSS':
+                    logger.log(u"Search string: " + search_string, logger.DEBUG)
+
+                searchURL = self.urls[('search', 'rss')[mode == 'RSS']] + '?' + urlencode(self.search_params)
+                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                data = self.getURL(searchURL)
+                #data = self.getURL(self.urls[('search', 'rss')[mode == 'RSS']], params=self.search_params)
                 if not data:
                     continue
 
@@ -136,32 +101,33 @@ class ThePirateBayProvider(generic.TorrentProvider):
                 matches = re.compile(re_title_url, re.DOTALL).finditer(data)
                 for torrent in matches:
                     title = torrent.group('title')
-                    url = torrent.group('url')
+                    download_url = torrent.group('url')
                     #id = int(torrent.group('id'))
                     size = self._convertSize(torrent.group('size'))
                     seeders = int(torrent.group('seeders'))
                     leechers = int(torrent.group('leechers'))
 
-                    # Continue before we check if we need to log anything,
-                    # if there is no url or title.
-                    if not title or not url:
+                    if not all([title, download_url]):
                         continue
 
                     #Filter unseeded torrent
-                    if not seeders or seeders < self.minseed or leechers < self.minleech:
-                        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                    if seeders < self.minseed or leechers < self.minleech:
+                        if mode != 'RSS':
+                            logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                         continue
 
                     #Accept Torrent only from Good People for every Episode Search
-                    if self.confirmed and re.search(r'(VIP|Trusted|Helper|Moderator)', torrent.group(0)) is None:
-                        logger.log(u"ThePirateBay Provider found result " + title + " but that doesn't seem like a trusted result so I'm ignoring it", logger.DEBUG)
+                    if self.confirmed and re.search(r'(VIP|Trusted|Helper|Moderator)', torrent.group(0)) is None and mode != 'RSS':
+                        logger.log(u"Found result %s but that doesn't seem like a trusted result so I'm ignoring it" % title, logger.DEBUG)
                         continue
 
-                    item = title, url, size, seeders, leechers
+                    item = title, download_url, size, seeders, leechers
+                    if mode != 'RSS':
+                        logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                     items[mode].append(item)
 
-            #For each search mode sort all the items by seeders
+            #For each search mode sort all the items by seeders if available
             items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
@@ -181,23 +147,6 @@ class ThePirateBayProvider(generic.TorrentProvider):
             size = size * 1024**4
         return size
 
-    def _get_size(self, item):
-        # pylint: disable=W0612
-        title, url, size, seeders, leechers = item
-        return size
-
-    def _get_title_and_url(self, item):
-        # pylint: disable=W0612
-        title, url, size, seeders, leechers = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = url.replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()-datetime.timedelta(days=1)):
 
         results = []
diff --git a/sickbeard/providers/titansoftv.py b/sickbeard/providers/titansoftv.py
index 0f1fa667de6b9b01e49688ecdaaf71978f3d8d4f..480802de02ae7c56730aef1c399a4eecf59e1413 100644
--- a/sickbeard/providers/titansoftv.py
+++ b/sickbeard/providers/titansoftv.py
@@ -2,7 +2,7 @@
 # URL: http://code.google.com/p/sickbeard
 # Originally written for SickGear
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -41,9 +41,6 @@ class TitansOfTVProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'titansoftv.png'
-
     def seedRatio(self):
         return self.ratio
 
@@ -56,14 +53,12 @@ class TitansOfTVProvider(generic.TorrentProvider):
     def _checkAuthFromData(self, data):
 
         if 'error' in data:
-            logger.log(u'Incorrect authentication credentials for ' + self.name + ' : ' + data['error'],
-                       logger.DEBUG)
-            raise AuthException(
-                'Your authentication credentials for ' + self.name + ' are incorrect, check your config.')
+            logger.log(u"Invalid api key. Check your settings", logger.WARNING)
 
         return True
 
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
+        #FIXME ADD MODE
         self._checkAuth()
         results = []
         params = {}
@@ -72,13 +67,14 @@ class TitansOfTVProvider(generic.TorrentProvider):
         if search_params:
             params.update(search_params)
 
-        search_url = self.url + '?' + urllib.urlencode(params)
-        logger.log(u'Search url: %s' % search_url)
+        searchURL = self.url + '?' + urllib.urlencode(params)
+        logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+        logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
 
-        parsedJSON = self.getURL(search_url, json=True)  # do search
+        parsedJSON = self.getURL(searchURL, json=True)  # do search
 
         if not parsedJSON:
-            logger.log(u'No data returned from ' + self.name, logger.ERROR)
+            logger.log("No data returned from provider", logger.DEBUG)
             return results
 
         if self._checkAuthFromData(parsedJSON):
@@ -89,20 +85,31 @@ class TitansOfTVProvider(generic.TorrentProvider):
                 found_torrents = {}
 
             for result in found_torrents:
-                (title, url) = self._get_title_and_url(result)
+                title = parsedJSON['release_name']
+                id = parsedJSON['id']
+                download_url = self.download_url % (id, self.api_key)
+                #FIXME
+                size = -1
+                seeders = 1
+                leechers = 0
 
-                if title and url:
-                    results.append(result)
+                if not all([title, download_url]):
+                    continue
 
-        return results
+                #Filter unseeded torrent
+                #if seeders < self.minseed or leechers < self.minleech:
+                #    if mode != 'RSS':
+                #        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                #    continue
 
-    def _get_title_and_url(self, parsedJSON):
+                item = title, download_url, size, seeders, leechers
 
-        title = parsedJSON['release_name']
-        id = parsedJSON['id']
-        url = self.download_url % (id, self.api_key)
+                logger.log(u"Found result: %s " % title, logger.DEBUG)
+                results.append(result)
 
-        return title, url
+        #FIXME SORTING
+
+        return results
 
     def _get_season_search_strings(self, ep_obj):
         search_params = {'limit': 100}
diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py
index ffdb9f84bac8241f55b6097ea1e41d46807dd4e5..cbe223c257e8ebf578a659e34c4060d4904c24bd 100644
--- a/sickbeard/providers/tntvillage.py
+++ b/sickbeard/providers/tntvillage.py
@@ -1,7 +1,7 @@
 # Author: Giovanni Borri
 # Modified by gborri, https://github.com/gborri for TNTVillage
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -127,14 +127,6 @@ class TNTVillageProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'tntvillage.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _checkAuth(self):
 
         if not self.username or not self.password:
@@ -152,62 +144,16 @@ class TNTVillageProvider(generic.TorrentProvider):
 
         response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('Sono stati riscontrati i seguenti errori', response) \
         or re.search('<title>Connettiti</title>', response):
-            logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  #1) showName SXX
-            search_string['Season'].append(ep_string)
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
-
-        return [search_string]
-
     def _reverseQuality(self, quality):
 
         quality_string = ''
@@ -246,11 +192,11 @@ class TNTVillageProvider(generic.TorrentProvider):
                 try:
                     file_quality = file_quality + " " + img_type['src'].replace("style_images/mkportal-636/","").replace(".gif","").replace(".png","")
                 except Exception:
-                    logger.log(u"Failed parsing " + self.name + " Traceback: "  + traceback.format_exc(), logger.ERROR)
+                    logger.log(u"Failed parsing quality. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
         else:
             file_quality = (torrent_rows.find_all('td'))[1].get_text()
-            logger.log(u"Episode quality: " + str(file_quality), logger.DEBUG)
+            logger.log(u"Episode quality: %s" % file_quality, logger.DEBUG)
 
         checkName = lambda list, func: func([re.search(x, file_quality, re.I) for x in list])
 
@@ -265,9 +211,6 @@ class TNTVillageProvider(generic.TorrentProvider):
 
         webdl = checkName(["webdl", "webmux", "webrip", "dl-webmux", "web-dlmux", "webdl-mux", "web-dl", "webdlmux", "dlmux"], any)
 
-        logger.log(u"Episode options: dvdOptions: " + str(dvdOptions) + ", bluRayOptions: " + str(bluRayOptions) + \
-                   ", sdOptions: " + str(sdOptions) + ", hdOptions: " + str(hdOptions) + ", fullHD: " + str(fullHD) + ", webdl: " + str(webdl), logger.DEBUG)
-
         if sdOptions and not dvdOptions and not fullHD and not hdOptions:
             return Quality.SDTV
         elif dvdOptions:
@@ -317,10 +260,10 @@ class TNTVillageProvider(generic.TorrentProvider):
             myParser = NameParser(tryIndexers=True, trySceneExceptions=True)
             parse_result = myParser.parse(name)
         except InvalidNameException:
-            logger.log(u"Unable to parse the filename " + str(name) + " into a valid episode", logger.DEBUG)
+            logger.log(u"Unable to parse the filename %s into a valid episode" % title, logger.DEBUG)
             return False
         except InvalidShowException:
-            logger.log(u"Unable to parse the filename " + str(name) + " into a valid show", logger.DEBUG)
+            logger.log(u"Unable to parse the filename %s into a valid show" % title, logger.DEBUG)
             return False
 
         myDB = db.DBConnection()
@@ -340,11 +283,9 @@ class TNTVillageProvider(generic.TorrentProvider):
             return results
 
         for mode in search_params.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
 
-                if isinstance(search_string, unicode):
-                    search_string = unidecode(search_string)
-
                 if mode == 'RSS':
                     self.page = 2
 
@@ -366,11 +307,13 @@ class TNTVillageProvider(generic.TorrentProvider):
                     else:
                         searchURL = self.urls['search_page'].format(z,self.categories)
 
-                    logger.log(u"Search string: " + searchURL, logger.DEBUG)
+                    if mode != 'RSS':
+                        logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
+                    logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
                     data = self.getURL(searchURL)
                     if not data:
-                        logger.log(u"Received no data from the server", logger.DEBUG)
+                        logger.log("No data returned from provider", logger.DEBUG)
                         continue
 
                     try:
@@ -380,12 +323,10 @@ class TNTVillageProvider(generic.TorrentProvider):
 
                             #Continue only if one Release is found
                             if len(torrent_rows)<3:
-                                logger.log(u"The server returned no torrents", logger.DEBUG)
+                                logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                                 last_page=1
                                 continue
 
-                            logger.log(u"Parsing results from page " + str(x+1), logger.DEBUG)
-
                             if len(torrent_rows) < 42:
                                 last_page=1
 
@@ -400,14 +341,18 @@ class TNTVillageProvider(generic.TorrentProvider):
                                     leechers = int(leechers.strip('[]'))
                                     seeders = result.find_all('td')[3].find_all('td')[2].text
                                     seeders = int(seeders.strip('[]'))
+                                    #FIXME
+                                    size = -1
                                 except (AttributeError, TypeError):
                                     continue
 
-                                if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
-                                    logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                                if not all([title, download_url]):
                                     continue
 
-                                if not title or not download_url:
+                                #Filter unseeded torrent
+                                if seeders < self.minseed or leechers < self.minleech:
+                                    if mode != 'RSS':
+                                        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                     continue
 
                                 filename_qt = self._reverseQuality(self._episodeQuality(result))
@@ -421,7 +366,7 @@ class TNTVillageProvider(generic.TorrentProvider):
                                     title += filename_qt
 
                                 if not self._is_italian(result) and not self.subtitle:
-                                    logger.log(u"Subtitled, skipping "  + title + "(" + searchURL + ")", logger.DEBUG)
+                                    logger.log(u"Torrent is subtitled, skipping: %s "  % title, logger.DEBUG)
                                     continue
 
                                 search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0]
@@ -432,39 +377,27 @@ class TNTVillageProvider(generic.TorrentProvider):
                                     ep_params = title[rindex.start():]
                                 if show_title.lower() != search_show.lower() and search_show.lower() in show_title.lower():
                                     new_title = search_show + ep_params
-                                    logger.log(u"WARNING - Changing found title from: " + title + " to: " + new_title, logger.DEBUG)
                                     title = new_title
 
                                 if self._is_season_pack(title):
                                     title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title)
 
-                                item = title, download_url, id, seeders, leechers
-                                logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG)
+                                item = title, download_url, size, seeders, leechers
+                                if mode != 'RSS':
+                                    logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                                 items[mode].append(item)
 
                     except Exception:
-                        logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+                        logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
-                #For each search mode sort all the items by seeders
+                #For each search mode sort all the items by seeders if available if available
                 items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
                 results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url, id, seeders, leechers = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py
index c4266af2d6681e14975bb0c42cee83eef9740103..b488dbf71d942bacc10f41c7bf6a0e8d7398ba4a 100644
--- a/sickbeard/providers/tokyotoshokan.py
+++ b/sickbeard/providers/tokyotoshokan.py
@@ -1,7 +1,7 @@
 # Author: Mr_Orange
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -49,29 +49,9 @@ class TokyoToshokanProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'tokyotoshokan.png'
-
-    def _get_title_and_url(self, item):
-
-        title, url = item
-
-        if title:
-            title = u'' + title
-            title = title.replace(' ', '.')
-
-        if url:
-            url = url.replace('&amp;', '&')
-
-        return (title, url)
-
     def seedRatio(self):
         return self.ratio
 
-    def getQuality(self, item, anime=False):
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def findSearchResults(self, show, episodes, search_mode, manualSearch=False, downCurQuality=False):
         return generic.TorrentProvider.findSearchResults(self, show, episodes, search_mode, manualSearch, downCurQuality)
 
@@ -82,21 +62,21 @@ class TokyoToshokanProvider(generic.TorrentProvider):
         return [x.replace('.', ' ') for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
 
     def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0, epObj=None):
+        #FIXME ADD MODE
         if self.show and not self.show.is_anime:
-            logger.log(u"" + str(self.show.name) + " is not an anime skiping " + str(self.name))
             return []
 
+        logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+
         params = {
             "terms": search_string.encode('utf-8'),
             "type": 1, # get anime types
         }
 
         searchURL = self.url + 'search.php?' + urllib.urlencode(params)
-
+        logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
         data = self.getURL(searchURL)
 
-        logger.log(u"Search string: " + searchURL, logger.DEBUG)
-
         if not data:
             return []
 
@@ -110,21 +90,33 @@ class TokyoToshokanProvider(generic.TorrentProvider):
                         a = 1
                     else:
                         a = 0
-    
+
                     for top, bottom in zip(torrent_rows[a::2], torrent_rows[a::2]):
                         title = top.find('td', attrs={'class': 'desc-top'}).text
-                        url = top.find('td', attrs={'class': 'desc-top'}).find('a')['href']
-    
-                        if not title or not url:
+                        title.lstrip()
+                        download_url = top.find('td', attrs={'class': 'desc-top'}).find('a')['href']
+                        #FIXME
+                        size = -1
+                        seeders = 1
+                        leechers = 0
+
+                        if not all([title, download_url]):
+                            continue
+
+                        #Filter unseeded torrent
+                        if seeders < self.minseed or leechers < self.minleech:
+                            if mode != 'RSS':
+                                logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                             continue
-    
-                        item = title.lstrip(), url
+
+                        item = title, download_url, size, seeders, leechers
+
                         results.append(item)
 
         except Exception, e:
-            logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
-
+            logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
+        #FIXME SORTING
         return results
 
 
@@ -142,7 +134,7 @@ class TokyoToshokanCache(tvcache.TVCache):
 
         url = self.provider.url + 'rss.php?' + urllib.urlencode(params)
 
-        logger.log(u"TokyoToshokan cache update URL: " + url, logger.DEBUG)
+        logger.log(u"Cache update URL: %s" % url, logger.DEBUG)
 
         return self.getRSSFeed(url)
 
diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py
index 3170ed9ea7f28300b43de40fd6525460c85a4763..2680da69bd6d548364f4cb1e072dadba6c124bb7 100644
--- a/sickbeard/providers/torrentbytes.py
+++ b/sickbeard/providers/torrentbytes.py
@@ -1,7 +1,7 @@
 # Author: Idan Gutman
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -66,14 +66,6 @@ class TorrentBytesProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'torrentbytes.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _doLogin(self):
 
         login_params = {'username': self.username,
@@ -83,63 +75,15 @@ class TorrentBytesProvider(generic.TorrentProvider):
 
         response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('Username or password incorrect', response):
-            logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  #1) showName SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
-
-        return [search_string]
-
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
@@ -149,14 +93,14 @@ class TorrentBytesProvider(generic.TorrentProvider):
             return results
 
         for mode in search_params.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
 
-                if isinstance(search_string, unicode):
-                    search_string = unidecode(search_string)
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories)
-
-                logger.log(u"Search string: " + searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
 
                 data = self.getURL(searchURL)
                 if not data:
@@ -169,8 +113,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
 
                         #Continue only if one Release is found
                         if len(torrent_rows) < 2:
-                            logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
-                                       logger.DEBUG)
+                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                             continue
 
                         for result in torrent_rows[1:]:
@@ -190,44 +133,36 @@ class TorrentBytesProvider(generic.TorrentProvider):
                                 id = int(torrent_id)
                                 seeders = int(cells[8].find('span').contents[0])
                                 leechers = int(cells[9].find('span').contents[0])
+                                #FIXME
+                                size = -1
                             except (AttributeError, TypeError):
                                 continue
 
-                            #Filter unseeded torrent
-                            if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
-                                logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                            if not all([title, download_url]):
                                 continue
 
-                            if not title or not download_url:
+                            #Filter unseeded torrent
+                            if seeders < self.minseed or leechers < self.minleech:
+                                if mode != 'RSS':
+                                    logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                 continue
 
-                            item = title, download_url, id, seeders, leechers
-                            logger.log(u"Found result: " + title.replace(' ','.') + " (" + searchURL + ")", logger.DEBUG)
+                            item = title, download_url, size, seeders, leechers
+                            if mode != 'RSS':
+                                logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                             items[mode].append(item)
 
                 except Exception, e:
-                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
-            #For each search mode sort all the items by seeders
+            #For each search mode sort all the items by seeders if available
             items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url, id, seeders, leechers = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py
index 45e7119e40bc0e6ae4fe88037d2bc73edf42205e..b740aef6958d9d2ad7afd653c1491bce845e3ff1 100644
--- a/sickbeard/providers/torrentday.py
+++ b/sickbeard/providers/torrentday.py
@@ -1,6 +1,6 @@
 # Author: Mr_Orange <mr_orange@hotmail.it>
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -67,14 +67,6 @@ class TorrentDayProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'torrentday.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _doLogin(self):
 
         if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
@@ -92,11 +84,11 @@ class TorrentDayProvider(generic.TorrentProvider):
 
             response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
             if not response:
-                logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+                logger.log(u"Unable to connect to provider", logger.WARNING)
                 return False
 
             if re.search('You tried too often', response):
-                logger.log(u'Too many login access for ' + self.name + ', can''t retrive any data', logger.ERROR)
+                logger.log(u"Too many login access attempts", logger.WARNING)
                 return False
 
             try:
@@ -111,59 +103,9 @@ class TorrentDayProvider(generic.TorrentProvider):
             except:
                 pass
 
-            logger.log(u'Unable to obtain cookie for TorrentDay', logger.WARNING)
+            logger.log(u"Unable to obtain cookie", logger.WARNING)
             return False
 
-
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
-                search_string['Season'].append(ep_string)
-            else:
-                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  #1) showName SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
-
-        return [search_string]
-
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
@@ -175,9 +117,11 @@ class TorrentDayProvider(generic.TorrentProvider):
             return results
 
         for mode in search_params.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
 
-                logger.log(u"Search string: " + search_string, logger.DEBUG)
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 search_string = '+'.join(search_string.split())
 
@@ -189,49 +133,46 @@ class TorrentDayProvider(generic.TorrentProvider):
 
                 parsedJSON = self.getURL(self.urls['search'], post_data=post_data, json=True)
                 if not parsedJSON:
-                    logger.log(u"No result returned for {0}".format(search_string), logger.DEBUG)
+                    logger.log("No data returned from provider", logger.DEBUG)
                     continue
 
                 try:
                     torrents = parsedJSON.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
                 except:
-                    logger.log(u"No torrents found in JSON for {0}".format(search_string), logger.DEBUG)
+                    logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                     continue
 
                 for torrent in torrents:
 
                     title = re.sub(r"\[.*\=.*\].*\[/.*\]", "", torrent['name'])
-                    url = self.urls['download'] % ( torrent['id'], torrent['fname'] )
+                    download_url = self.urls['download'] % ( torrent['id'], torrent['fname'] )
                     seeders = int(torrent['seed'])
                     leechers = int(torrent['leech'])
+                    #FIXME
+                    size = -1
 
-                    if not title or not url:
-                        logger.log(u"Discarding torrent because there's no title or url", logger.DEBUG)
+                    if not all([title, download_url]):
                         continue
 
-                    if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
-                        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                    #Filter unseeded torrent
+                    if seeders < self.minseed or leechers < self.minleech:
+                        if mode != 'RSS':
+                            logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                         continue
 
-                    item = title, url, seeders, leechers
+                    item = title, download_url, size, seeders, leechers
+                    if mode != 'RSS':
+                        logger.log(u"Found result: %s " % title, logger.DEBUG)
+
                     items[mode].append(item)
 
+            #For each search mode sort all the items by seeders if available if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
+
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url = item[0], item[1]
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py
index 777a7470b7e35c31edfef51a62f35dfcec441a6c..7c814ed213c532f45ba89da62ba03686320eb161 100644
--- a/sickbeard/providers/torrentleech.py
+++ b/sickbeard/providers/torrentleech.py
@@ -1,7 +1,7 @@
 # Author: Idan Gutman
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -68,14 +68,6 @@ class TorrentLeechProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'torrentleech.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _doLogin(self):
 
         login_params = {'username': self.username,
@@ -86,63 +78,15 @@ class TorrentLeechProvider(generic.TorrentProvider):
 
         response = self.getURL(self.urls['login'],  post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('Invalid Username/password', response) or re.search('<title>Login :: TorrentLeech.org</title>', response):
-            logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  #1) showName SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
-
-        return [search_string]
-
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
@@ -152,19 +96,17 @@ class TorrentLeechProvider(generic.TorrentProvider):
             return results
 
         for mode in search_params.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
 
-                if isinstance(search_string, unicode):
-                    search_string = unidecode(search_string)
-
                 if mode == 'RSS':
                     searchURL = self.urls['index'] % self.categories
                 else:
                     searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories)
-
-                logger.log(u"Search string: " + searchURL, logger.DEBUG)
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 data = self.getURL(searchURL)
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
                 if not data:
                     continue
 
@@ -175,8 +117,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
 
                         #Continue only if one Release is found
                         if len(torrent_rows) < 2:
-                            logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
-                                       logger.DEBUG)
+                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                             continue
 
                         for result in torrent_table.find_all('tr')[1:]:
@@ -189,44 +130,36 @@ class TorrentLeechProvider(generic.TorrentProvider):
                                 id = int(link['href'].replace('/torrent/', ''))
                                 seeders = int(result.find('td', attrs={'class': 'seeders'}).string)
                                 leechers = int(result.find('td', attrs={'class': 'leechers'}).string)
+                                #FIXME
+                                size = -1
                             except (AttributeError, TypeError):
                                 continue
 
-                            #Filter unseeded torrent
-                            if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
-                                logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                            if not all([title, download_url]):
                                 continue
 
-                            if not title or not download_url:
+                            #Filter unseeded torrent
+                            if seeders < self.minseed or leechers < self.minleech:
+                                if mode != 'RSS':
+                                    logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                 continue
 
-                            item = title, download_url, id, seeders, leechers
-                            logger.log(u"Found result: " + title.replace(' ','.') + " (" + download_url + ")", logger.DEBUG)
+                            item = title, download_url, size, seeders, leechers
+                            if mode != 'RSS':
+                                logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                             items[mode].append(item)
 
                 except Exception, e:
-                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
-            #For each search mode sort all the items by seeders
+            #For each search mode sort all the items by seeders if available
             items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url, id, seeders, leechers = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py
index dc81feeb531b5eac3cb4aacbbcd3863da9d4fd1b..fb3bee16862052cac8499eaaa116e76010ae18dd 100644
--- a/sickbeard/providers/torrentproject.py
+++ b/sickbeard/providers/torrentproject.py
@@ -1,7 +1,7 @@
 # Author: duramato <matigonkas@outlook.com>
 # URL: https://github.com/SiCKRAGETV/sickrage
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -35,7 +35,7 @@ class TORRENTPROJECTProvider(generic.TorrentProvider):
 
         self.supportsBacklog = True
         self.public = True
-        
+
         self.urls = {'api': u'https://torrentproject.se/',}
         self.url = self.urls['api']
         self.minseed = None
@@ -45,11 +45,6 @@ class TORRENTPROJECTProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-
-    def imageName(self):
-        return 'torrentproject.png'
-
-
     def _get_airbydate_season_range(self, season):
         if season == None:
             return ()
@@ -61,106 +56,66 @@ class TORRENTPROJECTProvider(generic.TorrentProvider):
             max_date = datetime.date(year, month+1, 1) -  datetime.timedelta(days=1)
         return (min_date, max_date)
 
+    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
 
-    def _get_season_search_strings(self, show, season=None):
-        search_string = []
-
-        if not (show and season):
-            return []
-
-        myDB = db.DBConnection()
-
-        if show.air_by_date:
-            (min_date, max_date) = self._get_airbydate_season_range(season)
-            sqlResults = myDB.select("SELECT DISTINCT airdate FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= ? AND status = ?", [show.tvdbid,  min_date.toordinal(), max_date.toordinal(), WANTED])
-        else:
-            sqlResults = myDB.select("SELECT DISTINCT season FROM tv_episodes WHERE showid = ? AND season = ? AND status = ?", [show.tvdbid, season, WANTED])
-
-        for sqlEp in sqlResults:
-            for show_name in set(show_name_helpers.allPossibleShowNames(show)):
-                if show.air_by_date:
-                    ep_string = sanitizeSceneName(show_name) +' '+ str(datetime.date.fromordinal(sqlEp["airdate"])).replace('-', '.')
-                    search_string.append(ep_string)
-                else:
-                    ep_string = sanitizeSceneName(show_name) + ' S%02d' % sqlEp["season"]
-                    search_string.append(ep_string)
-
-        return search_string
-
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        if not ep_obj:
-            return []
-
-        search_string = []
-
-        for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
-            ep_string = sanitizeSceneName(show_name)
-            if ep_obj.show.air_by_date:
-                ep_string += ' ' + str(ep_obj.airdate).replace('-', '.')
-            else:
-                ep_string += ' ' + naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode}
-
-            if len(add_string):
-                ep_string += ' %s' % add_string
-
-            search_string.append(ep_string)
-
-        return search_string
-
-
-    def _get_title_and_url(self, item):
-        title, url, size = item
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
-
-    def _get_size(self, item):
-        title, url, size = item
-        logger.log(u'Size: %s' % size, logger.DEBUG)
-
-        return size
-
-
-    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
-
-        logger.log("Performing Search: {0}".format(search_params))
-
-        searchUrl = self.urls['api'] + "?s=" + search_params + "&out=json"
-		
-        torrents = self.getURL(searchUrl, json=True)
-        del torrents["total_found"]
-		
-		
         results = []
-        for i in torrents:
-            name = torrents[i]["title"]
-            seeders = torrents[i]["seeds"]
-            leechers = torrents[i]["leechs"]
-            if seeders < self.minseed or leechers < self.minleech:
-                logger.log("Torrent doesn't meet minimum seeds & leechers not selecting :   " + name, logger.DEBUG)
-                continue
-            hash = torrents[i]["torrent_hash"]
-            size = torrents[i]["torrent_size"]
-            trackerUrl = self.urls['api'] + "" + hash + "/trackers_json"
-            logger.log(u'The tracker list is: ' + trackerUrl, logger.DEBUG)
-            jdata = self.getURL(trackerUrl, json=True)
-            magnet = "magnet:?xt=urn:btih:" + hash + "&dn=" + name + "".join(["&tr=" + s for s in jdata])
-            logger.log(u'Magnet URL is: ' + magnet, logger.DEBUG)
-            results.append((name, magnet, size))
-
-        logger.log("URL to be parsed: " + searchUrl, logger.DEBUG)
-
+        items = {'Season': [], 'Episode': [], 'RSS': []}
+
+        for mode in search_strings.keys(): #Mode = RSS, Season, Episode
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
+            for search_string in search_strings[mode]:
+
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+
+                searchURL = self.urls['api'] + "?s=" + search_string + "&out=json"
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
+                torrents = self.getURL(searchURL, json=True)
+                if int(torrents["total_found"]) == 0:
+                    logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
+                    continue
+                del torrents["total_found"]
+
+                if not torrents:
+                    logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
+                    continue
+
+                results = []
+                for i in torrents:
+                    title = torrents[i]["title"]
+                    seeders = torrents[i]["seeds"]
+                    leechers = torrents[i]["leechs"]
+                    if seeders < self.minseed or leechers < self.minleech:
+                        logger.log("Torrent doesn't meet minimum seeds & leechers not selecting :   " + title, logger.DEBUG)
+                        continue
+                    hash = torrents[i]["torrent_hash"]
+                    size = torrents[i]["torrent_size"]
+                    trackerUrl = self.urls['api'] + "" + hash + "/trackers_json"
+                    jdata = self.getURL(trackerUrl, json=True)
+                    download_url = "magnet:?xt=urn:btih:" + hash + "&dn=" + title + "".join(["&tr=" + s for s in jdata])
+
+                    if not all([title, download_url]):
+                        continue
+
+                    #Filter unseeded torrent
+                    if seeders < self.minseed or leechers < self.minleech:
+                        if mode != 'RSS':
+                            logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                        continue
+
+                    item = title, download_url, size, seeders, leechers
+
+                    if mode != 'RSS':
+                        logger.log(u"Found result: %s " % title, logger.DEBUG)
+                    items[mode].append(item)
+
+            # For each search mode sort all the items by seeders
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
+
+            results += items[mode]
 
         return results
 
-
 class TORRENTPROJECTCache(tvcache.TVCache):
     def __init__(self, provider):
 
@@ -172,6 +127,7 @@ class TORRENTPROJECTCache(tvcache.TVCache):
     def _getRSSData(self):
         # no rss for torrentproject afaik,& can't search with empty string
         # newest results are always > 1 day since added anyways
+        search_strings = {'RSS': ['']}
         return {'entries': {}}
 
 provider = TORRENTPROJECTProvider()
diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py
index 0779e5207ab8a297f071406d423df71ad4690c7b..6770e1cdaee615dd0f128c0a632a2c75e7dc4269 100644
--- a/sickbeard/providers/transmitthenet.py
+++ b/sickbeard/providers/transmitthenet.py
@@ -1,4 +1,4 @@
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -64,14 +64,6 @@ class TransmitTheNetProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'transmitthenet.png'
-
-    def getQuality(self, item, anime=False):
-
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _checkAuth(self):
 
         if not self.username or not self.password:
@@ -90,65 +82,15 @@ class TransmitTheNetProvider(generic.TorrentProvider):
 
         response = self.getURL(self.urls['index'], params={'page': 'login'}, post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('Username Incorrect', response) or re.search('Password Incorrect', response):
-            logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
-        else:
-            logger.log(u'Login successful for ' + self.name, logger.DEBUG)
 
         return True
 
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)  # 1) showName SXX
-
-            search_string['Season'].append(ep_string.strip())
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + ' ' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', ' ', ep_string.replace('.', ' ').strip()))
-
-        return [search_string]
-
     def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
@@ -160,18 +102,17 @@ class TransmitTheNetProvider(generic.TorrentProvider):
         for mode in search_strings.keys():
             for search_string in search_strings[mode]:
 
-                self.search_params['search'] = search_string
-                logger.log(u"Search string: " + self.search_params['search'] + " for " + self.name, logger.DEBUG)
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 data = self.getURL(self.urls['index'], params=self.search_params)
-                url_searched = self.urls['index'] + "?" + urlencode(self.search_params)
+                searchURL = self.urls['index'] + "?" + urlencode(self.search_params)
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG) 
 
                 if not data:
-                    logger.log(u"The response from (" + url_searched + ") is empty.", logger.DEBUG)
+                    logger.log("No data returned from provider", logger.DEBUG)
                     continue
 
-                logger.log(u"Search query from (" + url_searched + ") returned data.", logger.DEBUG)
-
                 try:
                     with BS4Parser(data) as html:
 
@@ -186,8 +127,7 @@ class TransmitTheNetProvider(generic.TorrentProvider):
 
                         # Continue only if one Release is found
                         if len(torrent_rows) < 1:
-                            logger.log(u"The Data returned from " + self.name + " did not contain any torrent",
-                                       logger.DEBUG)
+                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                             continue
 
                         for torrent_row in torrent_rows:
@@ -197,27 +137,27 @@ class TransmitTheNetProvider(generic.TorrentProvider):
                             id = torrent_row.find('a', {"data-src": True})['href'].split("&id=", 1)[1]
                             seeders = int(torrent_row.findAll('a', {'title': 'Click here to view peers details'})[0].text.strip())
                             leechers = int(torrent_row.findAll('a', {'title': 'Click here to view peers details'})[1].text.strip())
+                            download_url = self.urls['base_url'] + download_href
+                            #FIXME
+                            size = -1
 
-                            # Filter unseeded torrent
-                            if seeders < self.minseed:
-                                logger.log(
-                                    u"Discarding torrent because it doesn't meet the minimum seeders: {0} (S:{1})".format(
-                                        title, seeders), logger.DEBUG)
+                            if not all([title, download_url]):
                                 continue
 
-                            if not title or not download_href:
+                            #Filter unseeded torrent
+                            if seeders < self.minseed or leechers < self.minleech:
+                                if mode != 'RSS':
+                                    logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                 continue
 
-                            download_url = self.urls['base_url'] + download_href
-
-                            item = title, download_url, id, seeders, leechers
-                            logger.log(u"Found result: " + title.replace(' ', '.') + " (" + download_url + ")",
-                                       logger.DEBUG)
+                            item = title, download_url, size, seeders, leechers
+                            if mode != 'RSS':
+                                logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                             items[mode].append(item)
 
                 except:
-                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
 
             # For each search mode sort all the items by seeders
             items[mode].sort(key=lambda tup: tup[3], reverse=True)
@@ -226,18 +166,6 @@ class TransmitTheNetProvider(generic.TorrentProvider):
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url, id, seeders, leechers = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py
index d0fa2cb2a514b73b7190383ee5fc88cee1224aeb..d5b179883cdc4efc49e7bb9b29b26d7da306bd95 100644
--- a/sickbeard/providers/tvchaosuk.py
+++ b/sickbeard/providers/tvchaosuk.py
@@ -1,4 +1,4 @@
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -67,33 +67,12 @@ class TVChaosUKProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'tvchaosuk.png'
-
-    def getQuality(self, item, anime=False):
-        return Quality.sceneQuality(item[0], anime)
-
     def _checkAuth(self):
         if self.username and self.password:
             return True
 
         raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.')
 
-    def _doLogin(self):
-
-        login_params = {'username': self.username, 'password': self.password}
-        response = self.getURL(self.urls['login'], post_data=login_params, timeout=30)
-        if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
-            return False
-
-        if re.search('Error: Username or password incorrect!', response):
-            logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
-            return False
-
-        logger.log(u'Login successful for ' + self.name, logger.DEBUG)
-        return True
-
     def _get_season_search_strings(self, ep_obj):
 
         search_string = {'Season': []}
@@ -138,6 +117,20 @@ class TVChaosUKProvider(generic.TorrentProvider):
 
         return [search_string]
 
+    def _doLogin(self):
+
+        login_params = {'username': self.username, 'password': self.password}
+        response = self.getURL(self.urls['login'], post_data=login_params, timeout=30)
+        if not response:
+            logger.log(u"Unable to connect to provider", logger.WARNING)
+            return False
+
+        if re.search('Error: Username or password incorrect!', response):
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
+            return False
+
+        return True
+
     def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
 
         results = []
@@ -147,34 +140,36 @@ class TVChaosUKProvider(generic.TorrentProvider):
             return results
 
         for mode in search_strings.keys():
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_strings[mode]:
-                self.search_params['keywords'] = search_string.strip()
-                logger.log(u'Search string: ' + self.search_params['keywords'] + ' for ' + self.name, logger.DEBUG)
 
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
+
+                self.search_params['keywords'] = search_string.strip()
                 data = self.getURL(self.urls['search'], params=self.search_params)
                 url_searched = self.urls['search'] + '?' + urlencode(self.search_params)
 
                 if not data:
-                    logger.log(u'The response from (' + url_searched + ') is empty.',logger.DEBUG)
+                    logger.log("No data returned from provider", logger.DEBUG)
                     continue
 
-                logger.log(u'Search query from (' + url_searched + ') returned data.',logger.DEBUG)
-
                 with BS4Parser(data) as html:
                     torrent_table = html.find(id='listtorrents').find_all('tr')
                     for torrent in torrent_table:
                         try:
                             title = torrent.find(attrs={'class':'tooltip-content'}).text.strip()
-                            url = torrent.find(title="Click to Download this Torrent!").parent['href'].strip()
+                            download_url = torrent.find(title="Click to Download this Torrent!").parent['href'].strip()
                             seeders = int(torrent.find(title='Seeders').text.strip())
                             leechers = int(torrent.find(title='Leechers').text.strip())
 
-                            #Filter unseeded torrent
-                            if not seeders or seeders < self.minseed or leechers < self.minleech:
-                                logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                            if not all([title, download_url]):
                                 continue
 
-                            if not title or not url:
+                            #Filter unseeded torrent
+                            if seeders < self.minseed or leechers < self.minleech:
+                                if mode != 'RSS':
+                                    logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                 continue
 
                             # Chop off tracker/channel prefix or we cant parse the result!
@@ -189,33 +184,25 @@ class TVChaosUKProvider(generic.TorrentProvider):
                             # Strip year from the end or we can't parse it!
                             title = re.sub(r'[\. ]?\(\d{4}\)', '', title)
 
-                            item = title, url, seeders, leechers
-                            logger.log(u'Found result: ' + title.replace(' ', '.') + ' (' + url + ')', logger.DEBUG)
+                            #FIXME
+                            size = -1
+
+                            item = title, download_url, size, seeders, leechers
+                            if mode != 'RSS':
+                                logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                             items[mode].append(item)
 
                         except:
                             continue
 
-            #For each search mode sort all the items by seeders
+            #For each search mode sort all the items by seeders if available
             items[mode].sort(key=lambda tup: tup[3], reverse=True)
 
             results += items[mode]
 
         return results
 
-    def _get_title_and_url(self, item):
-
-        title, url, seeders, leechers = item
-
-        if title:
-            title = self._clean_title_from_provider(title)
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
     def findPropers(self, search_date=datetime.datetime.today()):
 
         results = []
diff --git a/sickbeard/providers/womble.py b/sickbeard/providers/womble.py
index 4e11676858e6802ea2c8a4cffffe30673d2b4413..3bfa59c4ebefb7263a0f02943e615e68aa5c8d49 100644
--- a/sickbeard/providers/womble.py
+++ b/sickbeard/providers/womble.py
@@ -57,7 +57,7 @@ class WombleCache(tvcache.TVCache):
                     self.provider.url + 'rss/?sec=tv-sd&fr=false',
                     self.provider.url + 'rss/?sec=tv-dvd&fr=false',
                     self.provider.url + 'rss/?sec=tv-hd&fr=false']:
-            logger.log(u'Womble\'s Index cache update URL: ' + url, logger.DEBUG)
+            logger.log(u"Cache update URL: %s" % url, logger.DEBUG)
 
             for item in self.getRSSFeed(url)['entries'] or []:
                 ci = self._parseItem(item)
@@ -72,4 +72,3 @@ class WombleCache(tvcache.TVCache):
         return data if data['feed'] and data['feed']['title'] != 'Invalid Link' else None
 
 provider = WombleProvider()
-
diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py
index bdb587eece5cc5759162f44f540805e8e09d1f8e..af5158efba3608ff331e188800b7ede875e109a5 100644
--- a/sickbeard/providers/xthor.py
+++ b/sickbeard/providers/xthor.py
@@ -2,7 +2,7 @@
 # Author: adaur <adaur.underground@gmail.com>
 # URL: http://code.google.com/p/sickbeard/
 #
-# This file is part of SickRage.
+# This file is part of SickRage. 
 #
 # SickRage is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -58,74 +58,6 @@ class XthorProvider(generic.TorrentProvider):
     def isEnabled(self):
         return self.enabled
 
-    def imageName(self):
-        return 'xthor.png'
-
-    def _get_season_search_strings(self, ep_obj):
-
-        search_string = {'Season': []}
-        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-            if ep_obj.show.air_by_date or ep_obj.show.sports:
-                ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
-            elif ep_obj.show.anime:
-                ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
-            else:
-                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  # 1) showName.SXX
-
-            search_string['Season'].append(ep_string)
-
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj, add_string=''):
-
-        search_string = {'Episode': []}
-
-        if not ep_obj:
-            return []
-
-        if self.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            str(ep_obj.airdate).replace('-', '|')
-                search_string['Episode'].append(ep_string)
-        elif self.show.sports:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            str(ep_obj.airdate).replace('-', '|') + '|' + \
-                            ep_obj.airdate.strftime('%b')
-                search_string['Episode'].append(ep_string)
-        elif self.show.anime:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            "%i" % int(ep_obj.scene_absolute_number)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
-                ep_string = sanitizeSceneName(show_name) + '.' + \
-                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
-                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
-
-                search_string['Episode'].append(re.sub('\s+', '.', ep_string))
-
-        return [search_string]
-
-    def _get_title_and_url(self, item):
-
-        title, url = item
-
-        if title:
-            title = u'' + title
-            title = title.replace(' ', '.')
-
-        if url:
-            url = str(url).replace('&amp;', '&')
-
-        return (title, url)
-
-    def getQuality(self, item, anime=False):
-        quality = Quality.sceneQuality(item[0], anime)
-        return quality
-
     def _doLogin(self):
 
         if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
@@ -136,26 +68,21 @@ class XthorProvider(generic.TorrentProvider):
                         'submitme': 'X'
         }
 
-        logger.log('Performing authentication to Xthor', logger.DEBUG)
-
         response = self.getURL(self.url + '/takelogin.php',  post_data=login_params, timeout=30)
         if not response:
-            logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
+            logger.log(u"Unable to connect to provider", logger.WARNING)
             return False
 
         if re.search('donate.php', response):
-            logger.log(u'Login to ' + self.name + ' was successful.', logger.DEBUG)
             return True
         else:
-            logger.log(u'Login to ' + self.name + ' was unsuccessful.', logger.DEBUG)
+            logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
             return False
 
         return True
 
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
 
-        logger.log(u"_doSearch started with ..." + str(search_params), logger.DEBUG)
-
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
 
@@ -164,16 +91,14 @@ class XthorProvider(generic.TorrentProvider):
             return results
 
         for mode in search_params.keys():
-
+            logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_params[mode]:
 
-                if isinstance(search_string, unicode):
-                    search_string = unidecode(search_string)
+                if mode != 'RSS':
+                    logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 searchURL = self.urlsearch % (urllib.quote(search_string), self.categories)
-
-                logger.log(u"Search string: " + searchURL, logger.DEBUG)
-
+                logger.log(u"Search URL: %s" %  searchURL, logger.DEBUG)
                 data = self.getURL(searchURL)
 
                 if not data:
@@ -187,12 +112,32 @@ class XthorProvider(generic.TorrentProvider):
                             link = row.find("a",href=re.compile("details.php"))
                             if link:
                                 title = link.text
-                                logger.log(u"Xthor title : " + title, logger.DEBUG)
                                 downloadURL =  self.url + '/' + row.find("a",href=re.compile("download.php"))['href']
-                                logger.log(u"Xthor download URL : " + downloadURL, logger.DEBUG)
-                                item = title, downloadURL
+                                #FIXME
+                                size = -1
+                                seeders = 1
+                                leechers = 0
+        
+                                if not all([title, download_url]):
+                                    continue
+            
+                                #Filter unseeded torrent
+                                #if seeders < self.minseed or leechers < self.minleech:
+                                #    if mode != 'RSS':
+                                #        logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                                #    continue
+
+                                item = title, download_url, size, seeders, leechers
+                                if mode != 'RSS':
+                                    logger.log(u"Found result: %s " % title, logger.DEBUG)
+
                                 items[mode].append(item)
+
+            #For each search mode sort all the items by seeders if available if available
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)
+
             results += items[mode]
+
         return results
 
     def seedRatio(self):
diff --git a/sickbeard/searchBacklog.py b/sickbeard/searchBacklog.py
index a7a710b90026502e84b92ea281ac5c2626ac71db..961c9e11e83a41260c74db04056764d30d26776f 100644
--- a/sickbeard/searchBacklog.py
+++ b/sickbeard/searchBacklog.py
@@ -155,16 +155,22 @@ class BacklogSearcher:
 
             if bestQualities:
                 highestBestQuality = max(bestQualities)
+                lowestBestQuality = min(bestQualities)
             else:
                 highestBestQuality = 0
+                lowestBestQuality=0
+
 
             # if we need a better one then say yes
             if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER) and curQuality < highestBestQuality) or curStatus == common.WANTED:
                 epObj = show.getEpisode(int(result["season"]), int(result["episode"]))
-                if epObj.season not in wanted:
-                    wanted[epObj.season] = [epObj]
-                else:
-                    wanted[epObj.season].append(epObj)
+
+                # only fetch if not archive on first match, or if show is lowest than the lower expected quality
+                if(epObj.show.archive_firstmatch == 0 or curQuality < lowestBestQuality):
+                    if epObj.season not in wanted:
+                        wanted[epObj.season] = [epObj]
+                    else:
+                        wanted[epObj.season].append(epObj)
 
         return wanted
 
diff --git a/sickbeard/traktChecker.py b/sickbeard/traktChecker.py
index 8b7248401b6d8bddaf67f00890ec31d62681b683..b0754d6a1d2d369772a4cda9ed4009513884520e 100644
--- a/sickbeard/traktChecker.py
+++ b/sickbeard/traktChecker.py
@@ -78,7 +78,7 @@ class TraktChecker():
         if sickbeard.TRAKT_SYNC_WATCHLIST:
             self.todoWanted = []  # its about to all get re-added
             if len(sickbeard.ROOT_DIRS.split('|')) < 2:
-                logger.log(u"No default root directory", logger.ERROR)
+                logger.log(u"No default root directory", logger.WARNING)
                 return
 
             try:
@@ -110,7 +110,7 @@ class TraktChecker():
 
             traktShow = filter(lambda x: int(indexerid) in [int(x['show']['ids']['tvdb'] or 0), int(x['show']['ids']['tvrage'] or 0)], library)
         except traktException as e:
-            logger.log(u"Could not connect to Trakt service: %s" % ex(e), logger.WARNING)
+            logger.log(u"Could not connect to Trakt service. Aborting library check. Error: %s" % repr(e), logger.WARNING)
 
         return traktShow
 
@@ -133,11 +133,12 @@ class TraktChecker():
             else:
                 data['shows'][0]['ids']['tvrage'] = show_obj.indexerid
 
-            logger.log(u"Removing " + show_obj.name + " from trakt.tv library", logger.DEBUG)
+            logger.log(u"Removing %s from trakt.tv library" % show_obj.name, logger.DEBUG)
+
             try:
                 self.trakt_api.traktRequest("sync/collection/remove", data, method='POST')
             except traktException as e:
-                logger.log(u"Could not connect to Trakt service: %s" % ex(e), logger.WARNING)
+                logger.log(u"Could not connect to Trakt service. Aborting removing show %s from Trakt library. Error: %s" % (show_obj.name, repr(e)), logger.WARNING)
 
     def addShowToTraktLibrary(self, show_obj):
         """
@@ -166,12 +167,12 @@ class TraktChecker():
                 data['shows'][0]['ids']['tvrage'] = show_obj.indexerid
 
         if len(data):
-            logger.log(u"Adding " + show_obj.name + " to trakt.tv library", logger.DEBUG)
+            logger.log(u"Adding %s to trakt.tv library" % show_obj.name, logger.DEBUG)
 
             try:
                 self.trakt_api.traktRequest("sync/collection", data, method='POST')
             except traktException as e:
-                logger.log(u"Could not connect to Trakt service: %s" % ex(e), logger.WARNING)
+                logger.log(u"Could not connect to Trakt service. Aborting adding show %s to Trakt library. Error: %s" % (show_obj.name, repr(e)), logger.WARNING)
                 return
 
     def syncLibrary(self):
@@ -197,13 +198,17 @@ class TraktChecker():
                     if self._checkInList(trakt_id,str(cur_episode["showid"]),str(cur_episode["season"]),str(cur_episode["episode"]), List='Collection'):
                         if cur_episode["location"] == '':
                             logger.log(u"Removing Episode %s S%02dE%02d from collection" %
-                            (cur_episode["show_name"],cur_episode["season"],cur_episode["episode"]), logger.DEBUG)
+                            (cur_episode["show_name"], cur_episode["season"], cur_episode["episode"]), logger.DEBUG)
                             trakt_data.append((cur_episode["showid"],cur_episode["indexer"],cur_episode["show_name"],cur_episode["startyear"],cur_episode["season"], cur_episode["episode"]))
 
             if len(trakt_data):
-                data = self.trakt_bulk_data_generate(trakt_data)
-                self.trakt_api.traktRequest("sync/collection/remove", data, method='POST')
-                self._getShowCollection()
+
+                try:
+                    data = self.trakt_bulk_data_generate(trakt_data)
+                    self.trakt_api.traktRequest("sync/collection/remove", data, method='POST')
+                    self._getShowCollection()
+                except traktException as e:
+                    logger.log(u"Could not connect to Trakt service. Aborting removing episode %s S%02dE%02d from Trakt collection. Error: %s" % (cur_episode["show_name"], cur_episode["season"], cur_episode["episode"], repr(e)), logger.WARNING)
 
             logger.log(u"COLLECTION::REMOVE::FINISH - Look for Episodes to Remove From Trakt Collection", logger.DEBUG)
 
@@ -220,13 +225,17 @@ class TraktChecker():
                     trakt_id = sickbeard.indexerApi(cur_episode["indexer"]).config['trakt_id']
                     if not self._checkInList(trakt_id,str(cur_episode["showid"]),str(cur_episode["season"]),str(cur_episode["episode"]), List='Collection'):
                         logger.log(u"Adding Episode %s S%02dE%02d to collection" %
-                        (cur_episode["show_name"],cur_episode["season"],cur_episode["episode"]), logger.DEBUG)
+                        (cur_episode["show_name"], cur_episode["season"], cur_episode["episode"]), logger.DEBUG)
                         trakt_data.append((cur_episode["showid"],cur_episode["indexer"],cur_episode["show_name"],cur_episode["startyear"],cur_episode["season"], cur_episode["episode"]))
 
                 if len(trakt_data):
-                    data = self.trakt_bulk_data_generate(trakt_data)
-                    self.trakt_api.traktRequest("sync/collection", data, method='POST')
-                    self._getShowCollection()
+                    
+                    try:
+                        data = self.trakt_bulk_data_generate(trakt_data)
+                        self.trakt_api.traktRequest("sync/collection", data, method='POST')
+                        self._getShowCollection()
+                    except traktException as e:
+                        logger.log(u"Could not connect to Trakt service. Aborting adding episode to Trakt collection. Error: %s" % repr(e), logger.WARNING)
 
             logger.log(u"COLLECTION::ADD::FINISH - Look for Episodes to Add to Trakt Collection", logger.DEBUG)
 
@@ -259,13 +268,17 @@ class TraktChecker():
                     if self._checkInList(trakt_id,str(cur_episode["showid"]),str(cur_episode["season"]),str(cur_episode["episode"])):
                         if cur_episode["status"] not in Quality.SNATCHED + Quality.SNATCHED_PROPER + [UNKNOWN] + [WANTED]:
                             logger.log(u"Removing Episode %s S%02dE%02d from watchlist" %
-                            (cur_episode["show_name"],cur_episode["season"],cur_episode["episode"]), logger.DEBUG)
+                            (cur_episode["show_name"], cur_episode["season"], cur_episode["episode"]), logger.DEBUG)
                             trakt_data.append((cur_episode["showid"],cur_episode["indexer"],cur_episode["show_name"],cur_episode["startyear"],cur_episode["season"], cur_episode["episode"]))
 
             if len(trakt_data):
-                data = self.trakt_bulk_data_generate(trakt_data)
-                self.trakt_api.traktRequest("sync/watchlist/remove", data, method='POST')
-                self._getEpisodeWatchlist()
+                
+                try:
+                    data = self.trakt_bulk_data_generate(trakt_data)
+                    self.trakt_api.traktRequest("sync/watchlist/remove", data, method='POST')
+                    self._getEpisodeWatchlist()
+                except traktException as e:
+                    logger.log(u"Could not connect to Trakt service. Aborting removing episode %s S%02dE%02d from Trakt watchlist. Error: %s" % (cur_episode["show_name"], cur_episode["season"], cur_episode["episode"], repr(e)), logger.WARNING)
 
             logger.log(u"WATCHLIST::REMOVE::FINISH - Look for Episodes to Remove from Trakt Watchlist", logger.DEBUG)
 
@@ -282,13 +295,18 @@ class TraktChecker():
                     trakt_id = sickbeard.indexerApi(cur_episode["indexer"]).config['trakt_id']
                     if not self._checkInList(trakt_id,str(cur_episode["showid"]),str(cur_episode["season"]),str(cur_episode["episode"])):
                         logger.log(u"Adding Episode %s S%02dE%02d to watchlist" %
-                        (cur_episode["show_name"],cur_episode["season"],cur_episode["episode"]), logger.DEBUG)
-                        trakt_data.append((cur_episode["showid"],cur_episode["indexer"],cur_episode["show_name"],cur_episode["startyear"],cur_episode["season"], cur_episode["episode"]))
+                        (cur_episode["show_name"], cur_episode["season"], cur_episode["episode"]), logger.DEBUG)
+                        trakt_data.append((cur_episode["showid"],cur_episode["indexer"],cur_episode["show_name"],cur_episode["startyear"],cur_episode["season"],
+                        cur_episode["episode"]))
 
                 if len(trakt_data):
-                    data = self.trakt_bulk_data_generate(trakt_data)
-                    self.trakt_api.traktRequest("sync/watchlist", data, method='POST')
-                    self._getEpisodeWatchlist()
+
+                    try:
+                        data = self.trakt_bulk_data_generate(trakt_data)
+                        self.trakt_api.traktRequest("sync/watchlist", data, method='POST')
+                        self._getEpisodeWatchlist()
+                    except traktException as e:
+                        logger.log(u"Could not connect to Trakt service. Aborting adding episode %s S%02dE%02d to Trakt watchlist. Error: %s" % (cur_episode["show_name"], cur_episode["season"], cur_episode["episode"], repr(e)), logger.WARNING)
 
             logger.log(u"WATCHLIST::ADD::FINISH - Look for Episodes to Add to Trakt Watchlist", logger.DEBUG)
 
@@ -310,9 +328,13 @@ class TraktChecker():
                         trakt_data.append(show_el)
 
                 if len(trakt_data):
-                    data = {'shows': trakt_data}
-                    self.trakt_api.traktRequest("sync/watchlist", data, method='POST')
-                    self._getShowWatchlist()
+                    
+                    try:
+                        data = {'shows': trakt_data}
+                        self.trakt_api.traktRequest("sync/watchlist", data, method='POST')
+                        self._getShowWatchlist()
+                    except traktException as e:
+                        logger.log(u"Could not connect to Trakt service. Aborting adding show %s to Trakt watchlist. Error: %s" % (show.name, repr(e)), logger.WARNING)
 
             logger.log(u"SHOW_WATCHLIST::ADD::FINISH - Look for Shows to Add to Trakt Watchlist", logger.DEBUG)
 
@@ -326,12 +348,12 @@ class TraktChecker():
                         try:
                             progress = self.trakt_api.traktRequest("shows/" + show.imdbid + "/progress/watched") or []
                         except traktException as e:
-                            logger.log(u"Could not connect to Trakt service: %s" % ex(e), logger.WARNING)
+                            logger.log(u"Could not connect to Trakt service. Aborting removing show %s from SickRage. Error: %s" % (show.name, repr(e)), logger.WARNING)
                             return
 
                         if 'aired' in progress and 'completed' in progress and progress['aired'] == progress['completed']:
                             sickbeard.showQueueScheduler.action.removeShow(show, full=True)
-                            logger.log(u"Show: " + show.name + " has been removed from SickRage", logger.DEBUG)
+                            logger.log(u"Show: %s has been removed from SickRage" % show.name, logger.DEBUG)
 
             logger.log(u"SHOW_SICKRAGE::REMOVE::FINISH - Trakt Show Watchlist", logger.DEBUG)
 
@@ -400,7 +422,7 @@ class TraktChecker():
                             for episode_el in show['seasons'][season_el]['episodes']:
                                 setEpisodeToWanted(newShow, season, int(episode_el))
             except TypeError:
-                logger.log(u"Could not parse the output from trakt for " + show["title"], logger.DEBUG)
+                logger.log(u"Could not parse the output from trakt for %s " % show["title"], logger.DEBUG)
         logger.log(u"SHOW_WATCHLIST::CHECK::FINISH - Trakt Episode Watchlist", logger.DEBUG)
 
     def addDefaultShow(self, indexer, indexer_id, name, status):
@@ -420,7 +442,7 @@ class TraktChecker():
                 showPath = ek(os.path.join, location, helpers.sanitizeFileName(name))
                 dir_exists = helpers.makeDir(showPath)
                 if not dir_exists:
-                    logger.log(u"Unable to create the folder " + showPath + ", can't add the show", logger.ERROR)
+                    logger.log(u"Unable to create the folder %s , can't add the show" % showPath, logger.WARNING)
                     return
                 else:
                     helpers.chmodAsParent(showPath)
@@ -433,7 +455,7 @@ class TraktChecker():
                                                             default_status_after=status,
                                                             archive=sickbeard.ARCHIVE_DEFAULT)
             else:
-                logger.log(u"There was an error creating the show, no root directory setting found", logger.ERROR)
+                logger.log(u"There was an error creating the show, no root directory setting found", logger.WARNING)
                 return
 
     def manageNewShow(self, show):
@@ -499,7 +521,7 @@ class TraktChecker():
                     self.ShowWatchlist[tvrage_id + '_id'][showid] = { 'id': showid , 'title' : title , 'year': year }
 
         except traktException as e:
-            logger.log(u"Could not connect to trakt service, cannot download Show Watchlist: %s" % ex(e), logger.ERROR)
+            logger.log(u"Could not connect to trakt service, cannot download Show Watchlist: %s" % repr(e), logger.WARNING)
             return False
 
         return True
@@ -551,7 +573,7 @@ class TraktChecker():
                         self.EpisodeWatchlist[tvrage_id + '_id'][showid]['seasons'][season]['episodes'][episode] = episode
 
         except traktException as e:
-            logger.log(u"Could not connect to trakt service, cannot download Episode Watchlist: %s" % ex(e), logger.WARNING)
+            logger.log(u"Could not connect to trakt service, cannot download Episode Watchlist: %s" % repr(e), logger.WARNING)
             return False
 
         return True
@@ -609,7 +631,7 @@ class TraktChecker():
                                     self.Collectionlist[tvrage_id + '_id'][showid]['seasons'][season]['episodes'][episode] = episode
 
         except traktException as e:
-            logger.log(u"Could not connect to trakt service, cannot download Show Collection: %s" % ex(e), logger.ERROR)
+            logger.log(u"Could not connect to trakt service, cannot download Show Collection: %s" % repr(e), logger.WARNING)
             return False
 
         return True
diff --git a/sickbeard/tv.py b/sickbeard/tv.py
index 6022cf8bb15d66ca20a15bc46faaaea544bb8ac6..300070eb4b5430953badbfe74e7ab23c099f3c0b 100644
--- a/sickbeard/tv.py
+++ b/sickbeard/tv.py
@@ -492,7 +492,8 @@ class TVShow(object):
         if self.dvdorder != 0:
             lINDEXER_API_PARMS['dvdorder'] = True
 
-        logger.log(u"lINDEXER_API_PARMS: " + str(lINDEXER_API_PARMS), logger.DEBUG)
+        #logger.log(u"lINDEXER_API_PARMS: " + str(lINDEXER_API_PARMS), logger.DEBUG)
+        #Spamming log
         t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
 
         cachedShow = t[self.indexerid]
@@ -500,25 +501,26 @@ class TVShow(object):
 
         for curResult in sqlResults:
 
-            logger.log(u"loadEpisodesFromDB curResult: " + str(curResult), logger.DEBUG)
-            deleteEp = False
-
             curSeason = int(curResult["season"])
             curEpisode = int(curResult["episode"])
+            curShowid = int(curResult['showid'])
+
+            logger.log(u"%s: loading Episodes from DB" % curShowid, logger.DEBUG)
+            deleteEp = False
 
             if curSeason not in cachedSeasons:
                 try:
                     cachedSeasons[curSeason] = cachedShow[curSeason]
                 except sickbeard.indexer_seasonnotfound, e:
-                    logger.log(u"Error when trying to load the episode from " + sickbeard.indexerApi(
-                        self.indexer).name + ": " + e.message, logger.WARNING)
+                    logger.log(u"%s: Error when trying to load the episode from %. Message: %s " %
+                    (curShowid, sickbeard.indexerApi(self.indexer).name, e.message), logger.WARNING)
                     deleteEp = True
 
             if not curSeason in scannedEps:
                 logger.log(u"Not curSeason in scannedEps", logger.DEBUG)
                 scannedEps[curSeason] = {}
 
-            logger.log(u"Loading episode S%02dE%02d from the DB" % (curSeason, curEpisode), logger.DEBUG)
+            logger.log(u"%s: Loading episode S%02dE%02d from the DB" % (curShowid, curSeason, curEpisode), logger.DEBUG)
 
             try:
                 curEp = self.getEpisode(curSeason, curEpisode)
@@ -1500,7 +1502,7 @@ class TVEpisode(object):
                 logger.log(u'%s: No subtitles found for S%02dE%02d on any provider' % (self.show.indexerid, self.season, self.episode), logger.DEBUG)
                 return
 
-            subliminal.save_subtitles(foundSubs, directory=subs_path, single=not sickbeard.SUBTITLES_MULTI)
+            subliminal.save_subtitles(foundSubs, directory=subs_path.encode(sickbeard.SYS_ENCODING), single=not sickbeard.SUBTITLES_MULTI)
 
             for video, subs in foundSubs.iteritems():
                 for sub in subs:
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index 85ed9b4480a4f06b69f2bff8d7ff5557827291a7..52449e566097cd2cb17ef26547f7dbfa64250efa 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -415,7 +415,7 @@ class WebRoot(WebHandler):
 
         return self.redirect("/home/displayShow?show=" + show)
 
-    def setComingEpsLayout(self, layout):
+    def setScheduleLayout(self, layout):
         if layout not in ('poster', 'banner', 'list', 'calendar'):
             layout = 'banner'
 
@@ -424,15 +424,15 @@ class WebRoot(WebHandler):
 
         sickbeard.COMING_EPS_LAYOUT = layout
 
-        return self.redirect("/comingEpisodes/")
+        return self.redirect("/schedule/")
 
-    def toggleComingEpsDisplayPaused(self):
+    def toggleScheduleDisplayPaused(self):
 
         sickbeard.COMING_EPS_DISPLAY_PAUSED = not sickbeard.COMING_EPS_DISPLAY_PAUSED
 
-        return self.redirect("/comingEpisodes/")
+        return self.redirect("/schedule/")
 
-    def setComingEpsSort(self, sort):
+    def setScheduleSort(self, sort):
         if sort not in ('date', 'network', 'show'):
             sort = 'date'
 
@@ -442,9 +442,9 @@ class WebRoot(WebHandler):
 
         sickbeard.COMING_EPS_SORT = sort
 
-        return self.redirect("/comingEpisodes/")
+        return self.redirect("/schedule/")
 
-    def comingEpisodes(self, layout=None):
+    def schedule(self, layout=None):
         next_week = datetime.date.today() + datetime.timedelta(days=7)
         next_week1 = datetime.datetime.combine(next_week, datetime.time(tzinfo=network_timezones.sb_timezone))
         results = ComingEpisodes.get_coming_episodes(ComingEpisodes.categories, sickbeard.COMING_EPS_SORT, False)
@@ -454,26 +454,26 @@ class WebRoot(WebHandler):
             {
                 'title': 'Sort by:',
                 'path': {
-                    'Date': 'setComingEpsSort/?sort=date',
-                    'Show': 'setComingEpsSort/?sort=show',
-                    'Network': 'setComingEpsSort/?sort=network',
+                    'Date': 'setScheduleSort/?sort=date',
+                    'Show': 'setScheduleSort/?sort=show',
+                    'Network': 'setScheduleSort/?sort=network',
                 }
             },
             {
                 'title': 'Layout:',
                 'path': {
-                    'Banner': 'setComingEpsLayout/?layout=banner',
-                    'Poster': 'setComingEpsLayout/?layout=poster',
-                    'List': 'setComingEpsLayout/?layout=list',
-                    'Calendar': 'setComingEpsLayout/?layout=calendar',
+                    'Banner': 'setScheduleLayout/?layout=banner',
+                    'Poster': 'setScheduleLayout/?layout=poster',
+                    'List': 'setScheduleLayout/?layout=list',
+                    'Calendar': 'setScheduleLayout/?layout=calendar',
                 }
             },
             {
                 'title': 'View Paused:',
                 'path': {
-                    'Hide': 'toggleComingEpsDisplayPaused'
+                    'Hide': 'toggleScheduleDisplayPaused'
                 } if sickbeard.COMING_EPS_DISPLAY_PAUSED else {
-                    'Show': 'toggleComingEpsDisplayPaused'
+                    'Show': 'toggleScheduleDisplayPaused'
                 }
             },
         ]
@@ -484,9 +484,9 @@ class WebRoot(WebHandler):
         else:
             layout = sickbeard.COMING_EPS_LAYOUT
 
-        t = PageTemplate(rh=self, file='comingEpisodes.mako')
+        t = PageTemplate(rh=self, file='schedule.mako')
         return t.render(submenu=submenu, next_week=next_week1, today=today, results=results, layout=layout,
-                        title='Schedule', header='Schedule', topmenu='comingEpisodes')
+                        title='Schedule', header='Schedule', topmenu='schedule')
 
 
 class CalendarHandler(BaseHandler):
@@ -4866,19 +4866,23 @@ class ErrorLogs(WebRoot):
     def __init__(self, *args, **kwargs):
         super(ErrorLogs, self).__init__(*args, **kwargs)
 
-    def ErrorLogsMenu(self):
+    def ErrorLogsMenu(self, level):
         menu = [
-            {'title': 'Clear Errors', 'path': 'errorlogs/clearerrors/', 'requires': self.haveErrors(), 'icon': 'ui-icon ui-icon-trash'},
-            {'title': 'Clear Warnings', 'path': 'errorlogs/clearerrors/?level='+str(logger.WARNING), 'requires': self.haveWarnings(), 'icon': 'ui-icon ui-icon-trash'},
-            {'title': 'Submit Errors', 'path': 'errorlogs/submit_errors/', 'requires': self.haveErrors(), 'class':'sumbiterrors', 'confirm': True, 'icon': 'ui-icon ui-icon-arrowreturnthick-1-n'},
+            {'title': 'Clear Errors', 'path': 'errorlogs/clearerrors/', 'requires': self.haveErrors() and level == logger.ERROR, 'icon': 'ui-icon ui-icon-trash'},
+            {'title': 'Clear Warnings', 'path': 'errorlogs/clearerrors/?level='+str(logger.WARNING), 'requires': self.haveWarnings() and level == logger.WARNING, 'icon': 'ui-icon ui-icon-trash'},
+            {'title': 'Submit Errors', 'path': 'errorlogs/submit_errors/', 'requires': self.haveErrors() and level == logger.ERROR, 'class':'sumbiterrors', 'confirm': True, 'icon': 'ui-icon ui-icon-arrowreturnthick-1-n'},
         ]
 
         return menu
 
     def index(self, level=logger.ERROR):
+        try:
+            level = int(level)
+        except:
+            level = logger.ERROR
 
         t = PageTemplate(rh=self, file="errorlogs.mako")
-        return t.render(header="Logs &amp; Errors", title="Logs &amp; Errors", topmenu="system", submenu=self.ErrorLogsMenu(), logLevel=int(level))
+        return t.render(header="Logs &amp; Errors", title="Logs &amp; Errors", topmenu="system", submenu=self.ErrorLogsMenu(level), logLevel=level)
 
     def haveErrors(self):
         if len(classes.ErrorViewer.errors) > 0:
@@ -4980,17 +4984,14 @@ class ErrorLogs(WebRoot):
                 with ek(codecs.open, *[logger.logFile + "." + str(i), 'r', 'utf-8']) as f:
                         data += Get_Data(minLevel, f.readlines(), len(data), regex, logFilter, logSearch, maxLines)
 
-        return t.render(header="Log File", title="Logs", topmenu="system", submenu=self.ErrorLogsMenu(),
+        return t.render(header="Log File", title="Logs", topmenu="system",
                 logLines="".join(data), minLevel=minLevel, logNameFilters=logNameFilters,
                 logFilter=logFilter, logSearch=logSearch)
 
     def submit_errors(self):
-        if not (sickbeard.GIT_USERNAME and sickbeard.GIT_PASSWORD):
-            ui.notifications.error("Missing information", "Please set your GitHub username and password in the config.")
-            logger.log(u'Please set your GitHub username and password in the config, unable to submit issue ticket to GitHub!')
-        else:
-            submitter_result, issue_id = logger.submit_errors()
-            logger.log(submitter_result, (logger.INFO, logger.WARNING)[issue_id is None])
-            ui.notifications.message(submitter_result)
+        submitter_result, issue_id = logger.submit_errors()
+        logger.log(submitter_result, (logger.INFO, logger.WARNING)[issue_id is None])
+        submitter_notification = ui.notifications.error if issue_id is None else ui.notifications.message
+        submitter_notification(submitter_result)
 
         return self.redirect("/errorlogs/")
diff --git a/tests/all_tests.py b/tests/all_tests.py
index 5351032c1ca3b7a25adf9b4461e56bd61863dab2..010946da9ea0ff772dff8a2268efc3a0c7be66a3 100755
--- a/tests/all_tests.py
+++ b/tests/all_tests.py
@@ -18,8 +18,6 @@
 # You should have received a copy of the GNU General Public License
 # along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
 
-import glob
-import unittest
 import sys, os.path
 
 tests_dir=os.path.abspath(__file__)[:-len(os.path.basename(__file__))]
@@ -27,9 +25,12 @@ tests_dir=os.path.abspath(__file__)[:-len(os.path.basename(__file__))]
 sys.path.insert(1, os.path.join(tests_dir, '../lib'))
 sys.path.insert(1, os.path.join(tests_dir, '..'))
 
+import glob
+import unittest
+
 class AllTests(unittest.TestCase):
     #Block issue_submitter_tests to avoid issue tracker spam on every build
-    blacklist = [tests_dir + 'all_tests.py', tests_dir + 'issue_submitter_tests.py']
+    blacklist = [tests_dir + 'all_tests.py', tests_dir + 'issue_submitter_tests.py', tests_dir + 'search_tests.py']
     def setUp(self):
         self.test_file_strings = [ x for x in glob.glob(tests_dir + '*_tests.py') if not x in self.blacklist ]
         self.module_strings = [file_string[len(tests_dir):len(file_string) - 3] for file_string in self.test_file_strings]
diff --git a/tests/common_tests.py b/tests/common_tests.py
index 5e2563067b77460c399afa6f7ce5606df1317bb7..024652ab38293e98ca436ac168bc34288ef8d9d2 100644
--- a/tests/common_tests.py
+++ b/tests/common_tests.py
@@ -1,11 +1,11 @@
-import unittest
-
 import sys
 import os.path
 
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
 
+import unittest
+
 from sickbeard import common
 
 class QualityTests(unittest.TestCase):
diff --git a/tests/config_tests.py b/tests/config_tests.py
index 7b0e61cf3b28b370d2465d1e77df38a44d8785f9..36619ed1a7c0e1262df60da13cdb82069e0b3b3d 100644
--- a/tests/config_tests.py
+++ b/tests/config_tests.py
@@ -1,11 +1,11 @@
-import unittest
-
 import sys
 import os.path
 
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
 
+import unittest
+
 from sickbeard import config
 
 class QualityTests(unittest.TestCase):
diff --git a/tests/encoding_tests.py b/tests/encoding_tests.py
index 0caf4b3e8317d849a1416142b18b68d24bc57398..fb67ccef49f4da13e3ab06f59b3c6c7759c213e6 100644
--- a/tests/encoding_tests.py
+++ b/tests/encoding_tests.py
@@ -1,12 +1,13 @@
 # coding=utf-8
 
-import locale
-import unittest
 import sys, os.path
 
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
 
+import locale
+import unittest
+
 import sickbeard
 from sickbeard.helpers import sanitizeFileName
 from sickrage.helper.encoding import ek
diff --git a/tests/feedparser_tests.py b/tests/feedparser_tests.py
index b5c02f1f8136f2b7b6760c320111540d3d388dc7..61603bbd525f6e43a572a44fb748704d907733c2 100644
--- a/tests/feedparser_tests.py
+++ b/tests/feedparser_tests.py
@@ -1,10 +1,11 @@
-import unittest
 import sys, os.path
-import test_lib as test
 
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
 
+import unittest
+import test_lib as test
+
 from sickbeard.rssfeeds import RSSFeeds
 from sickbeard.tvcache import TVCache
 class FeedParserTests(unittest.TestCase):
diff --git a/tests/issue_submitter_tests.py b/tests/issue_submitter_tests.py
index 36d1bfe28a5c57c95587fb35a42691476be999b6..cd318f404b8fadfe79c8f33ea254222345cfbf17 100644
--- a/tests/issue_submitter_tests.py
+++ b/tests/issue_submitter_tests.py
@@ -19,15 +19,15 @@
 
 from __future__ import with_statement
 
-import unittest
 import sys, os.path
 
-from sickbeard import logger
-from sickrage.helper.exceptions import ex
-
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
 
+import unittest
+
+from sickbeard import logger
+from sickrage.helper.exceptions import ex
 
 def error():
     try:
diff --git a/tests/name_parser_tests.py b/tests/name_parser_tests.py
index df73515edfb2812ab77ef96abdd875a500213469..febd47ce0ef7c0533f24358430a3d49ca4ab47f1 100644
--- a/tests/name_parser_tests.py
+++ b/tests/name_parser_tests.py
@@ -1,11 +1,11 @@
-import datetime
-import unittest
-import test_lib as test
-
 import sys, os.path
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
 
+import datetime
+import unittest
+
+import test_lib as test
 from sickbeard.name_parser import parser
 
 import sickbeard
@@ -31,7 +31,7 @@ simple_test_cases = {
               'Show.Name.S06E01.Other.WEB-DL': parser.ParseResult(None, 'Show Name', 6, [1], 'Other.WEB-DL' ),
               'Show.Name.S06E01 Some-Stuff Here': parser.ParseResult(None, 'Show Name', 6, [1], 'Some-Stuff Here' ),
               },
-              
+
               'fov': {
               'Show_Name.1x02.Source_Quality_Etc-Group': parser.ParseResult(None, 'Show Name', 1, [2], 'Source_Quality_Etc', 'Group'),
               'Show Name 1x02': parser.ParseResult(None, 'Show Name', 1, [2]),
@@ -52,7 +52,7 @@ simple_test_cases = {
               'Show Name - S01E02 - S01E03 - S01E04 - Ep Name': parser.ParseResult(None, 'Show Name', 1, [2,3,4], 'Ep Name'),
               'Show.Name.S01E02.S01E03.WEB-DL': parser.ParseResult(None, 'Show Name', 1, [2,3], 'WEB-DL'),
               },
-              
+
               'fov_repeat': {
               'Show.Name.1x02.1x03.Source.Quality.Etc-Group': parser.ParseResult(None, 'Show Name', 1, [2,3], 'Source.Quality.Etc', 'Group'),
               'Show.Name.1x02.1x03': parser.ParseResult(None, 'Show Name', 1, [2,3]),
@@ -68,12 +68,12 @@ simple_test_cases = {
               'the.event.401.hdtv-lol': parser.ParseResult(None, 'the event', 4, [1], 'hdtv', 'lol'),
               'show.name.2010.special.hdtv-blah': None,
               },
-              
+
               'stupid': {
               'tpz-abc102': parser.ParseResult(None, None, 1, [2], None, 'tpz'),
               'tpz-abc.102': parser.ParseResult(None, None, 1, [2], None, 'tpz'),
               },
-              
+
               'no_season': {
               'Show Name - 01 - Ep Name': parser.ParseResult(None, 'Show Name', None, [1], 'Ep Name'),
               '01 - Ep Name': parser.ParseResult(None, None, None, [1], 'Ep Name'),
@@ -101,7 +101,7 @@ simple_test_cases = {
               'Show Name Season 2': parser.ParseResult(None, 'Show Name', 2),
               'Season 02': parser.ParseResult(None, None, 2),
               },
-              
+
               'scene_date_format': {
               'Show.Name.2010.11.23.Source.Quality.Etc-Group': parser.ParseResult(None, 'Show Name', None, [], 'Source.Quality.Etc', 'Group', datetime.date(2010,11,23)),
               'Show Name - 2010.11.23': parser.ParseResult(None, 'Show Name', air_date = datetime.date(2010,11,23)),
@@ -116,7 +116,7 @@ combination_test_cases = [
                           ('/test/path/to/Season 02/03 - Ep Name.avi',
                            parser.ParseResult(None, None, 2, [3], 'Ep Name'),
                            ['no_season', 'season_only']),
-                          
+
                           ('Show.Name.S02.Source.Quality.Etc-Group/tpz-sn203.avi',
                            parser.ParseResult(None, 'Show Name', 2, [3], 'Source.Quality.Etc', 'Group'),
                            ['stupid', 'season_only']),
@@ -124,11 +124,11 @@ combination_test_cases = [
                           ('MythBusters.S08E16.720p.HDTV.x264-aAF/aaf-mb.s08e16.720p.mkv',
                            parser.ParseResult(None, 'MythBusters', 8, [16], '720p.HDTV.x264', 'aAF'),
                            ['standard']),
-                           
+
                           ('/home/drop/storage/TV/Terminator The Sarah Connor Chronicles/Season 2/S02E06 The Tower is Tall, But the Fall is Short.mkv',
                            parser.ParseResult(None, None, 2, [6], 'The Tower is Tall, But the Fall is Short'),
                            ['standard']),
-                           
+
                           (r'/Test/TV/Jimmy Fallon/Season 2/Jimmy Fallon - 2010-12-15 - blah.avi',
                            parser.ParseResult(None, 'Jimmy Fallon', extra_info = 'blah', air_date = datetime.date(2010,12,15)),
                            ['scene_date_format']),
@@ -136,11 +136,11 @@ combination_test_cases = [
                           (r'/X/30 Rock/Season 4/30 Rock - 4x22 -.avi',
                            parser.ParseResult(None, '30 Rock', 4, [22]),
                            ['fov']),
-                           
+
                           ('Season 2\\Show Name - 03-04 - Ep Name.ext',
                            parser.ParseResult(None, 'Show Name', 2, [3,4], extra_info = 'Ep Name'),
                            ['no_season', 'season_only']),
-                           
+
                           ('Season 02\\03-04-05 - Ep Name.ext',
                            parser.ParseResult(None, None, 2, [3,4,5], extra_info = 'Ep Name'),
                            ['no_season', 'season_only']),
@@ -159,7 +159,7 @@ failure_cases = ['7sins-jfcs01e09-720p-bluray-x264']
 
 
 class UnicodeTests(test.SickbeardTestDBCase):
-    
+
     def _test_unicode(self, name, result):
         np = parser.NameParser(True)
 
@@ -170,47 +170,47 @@ class UnicodeTests(test.SickbeardTestDBCase):
 
         # this shouldn't raise an exception
         a = repr(str(parse_result))
-    
+
     def test_unicode(self):
         for (name, result) in unicode_test_cases:
             self._test_unicode(name, result)
 
 class FailureCaseTests(test.SickbeardTestDBCase):
-    
+
     def _test_name(self, name):
         np = parser.NameParser(True)
         try:
             parse_result = np.parse(name)
         except (parser.InvalidNameException, parser.InvalidShowException):
             return True
-        
+
         if VERBOSE:
             print 'Actual: ', parse_result.which_regex, parse_result
         return False
-    
+
     def test_failures(self):
         for name in failure_cases:
             self.assertTrue(self._test_name(name))
 
 class ComboTests(test.SickbeardTestDBCase):
-    
+
     def _test_combo(self, name, result, which_regexes):
-        
+
         if VERBOSE:
             print
-            print 'Testing', name 
-        
+            print 'Testing', name
+
         np = parser.NameParser(True)
 
         try:
             test_result = np.parse(name)
         except parser.InvalidShowException:
             return False
-        
+
         if DEBUG:
             print test_result, test_result.which_regex
             print result, which_regexes
-            
+
 
         self.assertEqual(test_result, result)
         for cur_regex in which_regexes:
@@ -218,7 +218,7 @@ class ComboTests(test.SickbeardTestDBCase):
         self.assertEqual(len(which_regexes), len(test_result.which_regex))
 
     def test_combos(self):
-        
+
         for (name, result, which_regexes) in combination_test_cases:
             # Normalise the paths. Converts UNIX-style paths into Windows-style
             # paths when test is run on Windows.
@@ -245,7 +245,7 @@ class BasicTests(test.SickbeardTestDBCase):
                 return
             else:
                 test_result = np.parse(cur_test)
-            
+
             if DEBUG or verbose:
                 print 'air_by_date:', test_result.is_air_by_date, 'air_date:', test_result.air_date
                 print 'anime:', test_result.is_anime, 'ab_episode_numbers:', test_result.ab_episode_numbers
diff --git a/tests/pp_tests.py b/tests/pp_tests.py
index 5db84328002b015589da2a5909a40bbdf183523d..29d13985c58de8b6fc844a47681c15d8c4520b5e 100644
--- a/tests/pp_tests.py
+++ b/tests/pp_tests.py
@@ -17,15 +17,15 @@
 # You should have received a copy of the GNU General Public License
 # along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
 
+import sys, os.path
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
 import random
 import unittest
 
 import test_lib as test
 
-import sys, os.path
-sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
-sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
-
 from sickbeard.postProcessor import PostProcessor
 import sickbeard
 from sickbeard.tv import TVEpisode, TVShow
diff --git a/tests/scene_helpers_tests.py b/tests/scene_helpers_tests.py
index b044894f4dcc489304c959235fbfa5c9b2cf810c..63ffad6feb8523758df0f92aa12b53e0b927617a 100644
--- a/tests/scene_helpers_tests.py
+++ b/tests/scene_helpers_tests.py
@@ -1,10 +1,10 @@
-import unittest
-import test_lib as test
-
 import sys, os.path
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
 
+import unittest
+
+import test_lib as test
 from sickbeard import show_name_helpers, scene_exceptions, common, name_cache
 
 import sickbeard
diff --git a/tests/search_tests.py b/tests/search_tests.py
new file mode 100755
index 0000000000000000000000000000000000000000..5f00ccce8de8351b6aa517a3fbde3858b46ea46e
--- /dev/null
+++ b/tests/search_tests.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python2.7
+# coding=UTF-8
+# Author: Dennis Lutter <lad1337@gmail.com>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of SickRage.
+#
+# SickRage is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# SickRage is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
+
+import sys, os.path
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+import random
+import unittest
+
+import test_lib as test
+
+import sickbeard.search as search
+import sickbeard
+from sickbeard.tv import TVEpisode, TVShow
+import sickbeard.common as c
+
+from sickbeard.providers.generic import GenericProvider
+
+tests = {"Game of Thrones":
+               {"tvdbid": 121361, "s": 5, "e": [10],
+                "s_strings": [{"Season": [u"Game of Thrones S05"]}],
+                "e_strings": [{"Episode": [u"Game of Thrones S05E10"]}]}}
+
+class SearchTest(test.SickbeardTestDBCase):
+
+    def __init__(self, something):
+        super(SearchTest, self).__init__(something)
+
+
+def test_generator(curData, name, provider, forceSearch):
+
+    def test(self):
+        show = TVShow(1, int(curData["tvdbid"]))
+        show.name = name
+        show.quality = c.ANY | c.Quality.UNKNOWN | c.Quality.RAWHDTV
+        show.saveToDB()
+        sickbeard.showList.append(show)
+
+        for epNumber in curData["e"]:
+            episode = TVEpisode(show, curData["s"], epNumber)
+            episode.status = c.WANTED
+
+            # We arent updating scene numbers, so fake it here
+            episode.scene_season = curData["s"]
+            episode.scene_episode = epNumber
+
+            episode.saveToDB()
+
+            provider.show = show
+            season_strings = provider._get_season_search_strings(episode)
+            episode_strings = provider._get_episode_search_strings(episode)
+
+            fail = False
+            for cur_string in season_strings, episode_strings:
+                if not all([isinstance(cur_string, list), isinstance(cur_string[0], dict)]):
+                    print " %s is using a wrong string format!" % provider.name
+                    print cur_string
+                    fail = True
+                    continue
+
+            if fail:
+                continue
+
+            try:
+                assert(season_strings == curData["s_strings"])
+                assert(episode_strings == curData["e_strings"])
+            except AssertionError:
+                print " %s is using a wrong string format!" % provider.name
+                print cur_string
+                continue
+
+            search_strings = episode_strings[0]
+            #search_strings.update(season_strings[0])
+            #search_strings.update({"RSS":['']})
+
+            #print search_strings
+
+            if not provider.public:
+                continue
+
+            items = provider._doSearch(search_strings)
+            if not items:
+                print "No results from provider?"
+                continue
+
+            title, url = provider._get_title_and_url(items[0])
+            for word in show.name.split(" "):
+                if not word in title:
+                    print "Show name not in title: %s" % title
+                    continue
+
+            if not url:
+                print "url is empty"
+                continue
+
+            quality = provider.getQuality(items[0])
+            size = provider._get_size(items[0])
+            if not show.quality & quality:
+                print "Quality not in common.ANY, %r" % quality
+                continue
+
+    return test
+
+if __name__ == '__main__':
+    print "=================="
+    print "STARTING - Search TESTS"
+    print "=================="
+    print "######################################################################"
+    # create the test methods
+    for forceSearch in (True, False):
+        for name, curData in tests.items():
+            fname = name.replace(' ', '_')
+
+            for provider in sickbeard.providers.sortedProviderList():
+                if provider.providerType == GenericProvider.TORRENT:
+                    if forceSearch:
+                        test_name = 'test_manual_%s_%s_%s' % (fname, curData["tvdbid"], provider.name)
+                    else:
+                        test_name = 'test_%s_%s_%s' % (fname, curData["tvdbid"], provider.name)
+                    test = test_generator(curData, name, provider, forceSearch)
+                    setattr(SearchTest, test_name, test)
+
+    suite = unittest.TestLoader().loadTestsFromTestCase(SearchTest)
+    unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/tests/snatch_tests.py b/tests/snatch_tests.py
index 6f68c9366103d0f45c34ee7fc6af82208f871055..d22926594e1c73c0b25c84c8192e876af4f12aed 100644
--- a/tests/snatch_tests.py
+++ b/tests/snatch_tests.py
@@ -17,15 +17,15 @@
 # You should have received a copy of the GNU General Public License
 # along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
 
+import sys, os.path
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
 import random
 import unittest
 
 import test_lib as test
 
-import sys, os.path
-sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
-sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
-
 import sickbeard.search as search
 import sickbeard
 from sickbeard.tv import TVEpisode, TVShow
diff --git a/tests/ssl_sni_tests.py b/tests/ssl_sni_tests.py
index 88303cc8cdcd1683983c81d7d64451aee06f1de7..f7adf5bd398713007e0ce67386dcc030380ce772 100644
--- a/tests/ssl_sni_tests.py
+++ b/tests/ssl_sni_tests.py
@@ -17,12 +17,13 @@
 # You should have received a copy of the GNU General Public License
 # along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
 
-import unittest
 import sys, os.path
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
 
+import unittest
 import requests
+
 import sickbeard.providers as providers
 import certifi
 from sickrage.helper.exceptions import ex
diff --git a/tests/test_lib.py b/tests/test_lib.py
index fa6c10c934a118640d6bb5654b49ca0b822024e8..f7a44c2ec75453cce50d6e2ec8fb2f3b9c52ba0a 100644
--- a/tests/test_lib.py
+++ b/tests/test_lib.py
@@ -19,13 +19,13 @@
 
 from __future__ import with_statement
 
-import unittest
-from configobj import ConfigObj
-
 import sys, os.path
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
 
+import unittest
+
+from configobj import ConfigObj
 import sickbeard
 
 from sickbeard import providers, tvcache
diff --git a/tests/torrent_tests.py b/tests/torrent_tests.py
index d1b4a7063c3990e1834ab4e588aec53f48a0b70c..73b6ff289f14f8b2a436cb7c0b5a9c1ca209408a 100644
--- a/tests/torrent_tests.py
+++ b/tests/torrent_tests.py
@@ -19,12 +19,12 @@
 
 from __future__ import with_statement
 
-import unittest
-
 import sys, os.path
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
 
+import unittest
+
 import urlparse
 import test_lib as test
 from bs4 import BeautifulSoup
diff --git a/tests/tv_tests.py b/tests/tv_tests.py
index a6b8cb30957b46e1a090abd359cd30d8f816a9b5..cefe61d6891877cdb03fae4e845e43f3d96c56dc 100644
--- a/tests/tv_tests.py
+++ b/tests/tv_tests.py
@@ -17,13 +17,14 @@
 # You should have received a copy of the GNU General Public License
 # along with SickRage.  If not, see <http://www.gnu.org/licenses/>.
 
-import unittest
-import test_lib as test
-
 import sys, os.path
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
 
+import unittest
+
+import test_lib as test
+
 import sickbeard
 from sickbeard.tv import TVEpisode, TVShow
 
diff --git a/tests/xem_tests.py b/tests/xem_tests.py
index a26477f09a0f8e9952f94ff045cdb7f44ac66bb3..0aa22c1b4989846dff02ded21c0dfaea69e3087f 100644
--- a/tests/xem_tests.py
+++ b/tests/xem_tests.py
@@ -19,14 +19,14 @@
 
 from __future__ import with_statement
 
-import unittest
-import datetime
-import re
-
 import sys, os.path
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
 
+import datetime
+import unittest
+import re
+
 import test_lib as test
 import sickbeard
 from sickbeard.helpers import sanitizeSceneName