diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py
index 483f3797360f74885bb01dca82e6b0d21b75e0da..a32b9ef8c9fda8834ca2e8453ac024c2876ca66a 100644
--- a/sickbeard/providers/__init__.py
+++ b/sickbeard/providers/__init__.py
@@ -23,7 +23,7 @@ from random import shuffle
 
 import sickbeard
 from sickbeard.providers import btn, newznab, rsstorrent, womble, thepiratebay, torrentleech, kat, iptorrents, torrentz, \
-    omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, cpasbien, fnt, xthor, torrentbytes, \
+    omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, fnt, xthor, torrentbytes, \
     freshontv, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \
     scenetime, btdigg, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct, elitetorrent, bitsnoop, danishbits, hd4free, limetorrents
 
@@ -32,7 +32,7 @@ __all__ = [
     'torrentday', 'hdbits', 'hounddawgs', 'iptorrents', 'omgwtfnzbs',
     'speedcd', 'nyaatorrents', 'torrentbytes', 'freshontv',
     'morethantv', 'bitsoup', 't411', 'tokyotoshokan', 'alpharatio',
-    'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'bluetigers', 'cpasbien',
+    'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'bluetigers',
     'fnt', 'xthor', 'scenetime', 'btdigg', 'transmitthenet', 'tvchaosuk',
     'torrentproject', 'extratorrent', 'bitcannon', 'torrentz', 'pretome', 'gftracker',
     'hdspace', 'newpct', 'elitetorrent', 'bitsnoop', 'danishbits', 'hd4free', 'limetorrents'
diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py
index 9182ccdb6f59a0e15c1b6c34a05f1f9446e62265..b4f509d356a2b02ff91832078c6132685de1b860 100644
--- a/sickbeard/providers/cpasbien.py
+++ b/sickbeard/providers/cpasbien.py
@@ -16,8 +16,7 @@
 #
 # You should have received a copy of the GNU General Public License
 # along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-import traceback
+import re
 
 from sickbeard import logger
 from sickbeard import tvcache
@@ -39,10 +38,9 @@ class CpasbienProvider(TorrentProvider):
         self.url = "http://www.cpasbien.io"
 
         self.proper_strings = ['PROPER', 'REPACK']
-
         self.cache = CpasbienCache(self)
 
-    def search(self, search_strings, age=0, ep_obj=None):  # pylint: disable=too-many-locals, too-many-statements, too-many-branches
+    def search(self, search_strings, age=0, ep_obj=None):  # pylint: disable=too-many-locals
         results = []
         for mode in search_strings:
             items = []
@@ -51,57 +49,44 @@ class CpasbienProvider(TorrentProvider):
 
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
-                    searchURL = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html'
+                    search_url = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html,trie-seeds-d'
                 else:
-                    searchURL = self.url + '/view_cat.php?categorie=series&trie=date-d'
+                    search_url = self.url + '/view_cat.php?categorie=series&trie=date-d'
 
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                data = self.get_url(searchURL)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                data = self.get_url(search_url)
                 if not data:
                     continue
 
-                try:
-                    with BS4Parser(data, 'html5lib') as html:
-                        line = 0
-                        torrents = []
-                        while True:
-                            resultlin = html.findAll(class_='ligne%i' % line)
-                            if not resultlin:
-                                break
-
-                            torrents += resultlin
-                            line += 1
-
-                        for torrent in torrents:
-                            try:
-                                title = torrent.find(class_="titre").get_text(strip=True).replace("HDTV", "HDTV x264-CPasBien")
-                                tmp = torrent.find("a")['href'].split('/')[-1].replace('.html', '.torrent').strip()
-                                download_url = (self.url + '/telechargement/%s' % tmp)
-                                seeders = try_int(torrent.find(class_="up").get_text(strip=True))
-                                leechers = try_int(torrent.find(class_="down").get_text(strip=True))
-                                torrent_size = torrent.find(class_="poid").get_text()
-
-                                size = convert_size(torrent_size) or -1
-                            except (AttributeError, TypeError, KeyError, IndexError):
-                                continue
-
+                with BS4Parser(data, 'html5lib') as html:
+                    torrent_rows = html.find_all(class_=re.compile('ligne[01]'))
+                    for result in torrent_rows:
+                        try:
+                            title = result.find(class_="titre").get_text(strip=True).replace("HDTV", "HDTV x264-CPasBien")
+                            tmp = result.find("a")['href'].split('/')[-1].replace('.html', '.torrent').strip()
+                            download_url = (self.url + '/telechargement/%s' % tmp)
                             if not all([title, download_url]):
                                 continue
 
-                            # Filter unseeded torrent
+                            seeders = try_int(result.find(class_="up").get_text(strip=True))
+                            leechers = try_int(result.find(class_="down").get_text(strip=True))
                             if seeders < self.minseed or leechers < self.minleech:
                                 if mode != 'RSS':
                                     logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                 continue
 
+                            torrent_size = result.find(class_="poid").get_text(strip=True)
+
+                            units = ['o', 'Ko', 'Mo', 'Go', 'To', 'Po']
+                            size = convert_size(torrent_size, units=units) or -1
+
                             item = title, download_url, size, seeders, leechers
                             if mode != 'RSS':
                                 logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                             items.append(item)
-
-                except Exception:
-                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
+                        except StandardError:
+                            continue
 
             # For each search mode sort all the items by seeders if available
             items.sort(key=lambda tup: tup[3], reverse=True)
diff --git a/sickbeard/providers/elitetorrent.py b/sickbeard/providers/elitetorrent.py
index d3b9f2e5226ba117437b0ed85708de29aef26445..a267c8dc5d281204582e8750048a618244d9132c 100644
--- a/sickbeard/providers/elitetorrent.py
+++ b/sickbeard/providers/elitetorrent.py
@@ -87,10 +87,10 @@ class elitetorrentProvider(TorrentProvider):
                 search_string = re.sub(r'S0*(\d*)E(\d*)', r'\1x\2', search_string)
                 self.search_params['buscar'] = search_string.strip() if mode != 'RSS' else ''
 
-                searchURL = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL, timeout=30)
+                data = self.get_url(search_url, timeout=30)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py
index 30edf297366e24c7a8c98d678f7947cc1c9bb45c..3efefdf0f1655ced937eed5a4c7bb8b95a10048d 100644
--- a/sickbeard/providers/freshontv.py
+++ b/sickbeard/providers/freshontv.py
@@ -116,9 +116,9 @@ class FreshOnTVProvider(TorrentProvider): # pylint: disable=too-many-instance-at
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['search'] % (freeleech, search_string)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                init_html = self.get_url(searchURL)
+                search_url = self.urls['search'] % (freeleech, search_string)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                init_html = self.get_url(search_url)
                 max_page_number = 0
 
                 if not init_html:
@@ -160,9 +160,9 @@ class FreshOnTVProvider(TorrentProvider): # pylint: disable=too-many-instance-at
                     for i in range(1, max_page_number):
 
                         time.sleep(1)
-                        page_searchURL = searchURL + '&page=' + str(i)
-                        # '.log(u"Search string: " + page_searchURL, logger.DEBUG)
-                        page_html = self.get_url(page_searchURL)
+                        page_search_url = search_url + '&page=' + str(i)
+                        # '.log(u"Search string: " + page_search_url, logger.DEBUG)
+                        page_html = self.get_url(page_search_url)
 
                         if not page_html:
                             continue
diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py
index 72c96af92b48dcfa57c68f1e2eb365383f80cb21..6b381325eb02c3285d1519f449d413b1dcdeb19c 100644
--- a/sickbeard/providers/gftracker.py
+++ b/sickbeard/providers/gftracker.py
@@ -97,11 +97,11 @@ class GFTrackerProvider(TorrentProvider):  # pylint: disable=too-many-instance-a
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['search'] % (self.categories, search_string)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.urls['search'] % (self.categories, search_string)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
                 # Returns top 30 results by default, expandable in user profile
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py
index 2fdf868bad03358ac1f309a03b25ce5630524fcd..a5a543e6327f1a3cfdc98c20427da74bce2a3f4b 100644
--- a/sickbeard/providers/hdspace.py
+++ b/sickbeard/providers/hdspace.py
@@ -92,15 +92,15 @@ class HDSpaceProvider(TorrentProvider): # pylint: disable=too-many-instance-attr
             logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_strings[mode]:
                 if mode != 'RSS':
-                    searchURL = self.urls['search'] % (urllib.quote_plus(search_string.replace('.', ' ')),)
+                    search_url = self.urls['search'] % (urllib.quote_plus(search_string.replace('.', ' ')),)
                 else:
-                    searchURL = self.urls['search'] % ''
+                    search_url = self.urls['search'] % ''
 
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
                 if mode != 'RSS':
                     logger.log(u"Search string: %s" % search_string, logger.DEBUG)
 
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data or 'please try later' in data:
                     logger.log(u"No data returned from provider", logger.DEBUG)
                     continue
diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py
index 105489c6c6ce65d67ff31ac8f4d80f8d040f9825..8754a1b5150b585f32bd7e3821af872212740243 100644
--- a/sickbeard/providers/hdtorrents.py
+++ b/sickbeard/providers/hdtorrents.py
@@ -92,17 +92,17 @@ class HDTorrentsProvider(TorrentProvider):  # pylint: disable=too-many-instance-
             for search_string in search_strings[mode]:
 
                 if mode != 'RSS':
-                    searchURL = self.urls['search'] % (urllib.quote_plus(search_string), self.categories)
+                    search_url = self.urls['search'] % (urllib.quote_plus(search_string), self.categories)
                     logger.log(u"Search string: %s" % search_string, logger.DEBUG)
                 else:
-                    searchURL = self.urls['rss'] % self.categories
+                    search_url = self.urls['rss'] % self.categories
 
                 if self.freeleech:
-                    searchURL = searchURL.replace('active=1', 'active=5')
+                    search_url = search_url.replace('active=1', 'active=5')
 
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data or 'please try later' in data:
                     logger.log(u"No data returned from provider", logger.DEBUG)
                     continue
diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py
index f1055e33ca806e0d87e65547db8fd8910df7c658..3e36754a83ffc42bcc27fc4bcc588cab02d1a213 100644
--- a/sickbeard/providers/iptorrents.py
+++ b/sickbeard/providers/iptorrents.py
@@ -95,11 +95,11 @@ class IPTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance-a
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 # URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
-                searchURL = self.urls['search'] % (self.categories, freeleech, search_string)
-                searchURL += ';o=seeders' if mode != 'RSS' else ''
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.urls['search'] % (self.categories, freeleech, search_string)
+                search_url += ';o=seeders' if mode != 'RSS' else ''
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py
index 27105064f994f484769f0de7beae6015e58813fb..8a6a71092dfa2bd449fa6cb0078d18951b48b8fe 100644
--- a/sickbeard/providers/kat.py
+++ b/sickbeard/providers/kat.py
@@ -82,12 +82,12 @@ class KatProvider(TorrentProvider): # pylint: disable=too-many-instance-attribut
 
                 url_fmt_string = 'usearch' if mode != 'RSS' else search_string
                 try:
-                    searchURL = self.urls['search'] % url_fmt_string + '?' + urlencode(self.search_params)
+                    search_url = self.urls['search'] % url_fmt_string + '?' + urlencode(self.search_params)
                     if self.custom_url:
-                        searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/'))  # Must use posixpath
+                        search_url = posixpath.join(self.custom_url, search_url.split(self.url)[1].lstrip('/'))  # Must use posixpath
 
-                    logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                    data = self.get_url(searchURL)
+                    logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                    data = self.get_url(search_url)
                     if not data:
                         logger.log(u'URL did not return data, maybe try a custom url, or a different one', logger.DEBUG)
                         continue
diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py
index fb5da040356a6278e439be90c0ee3d2d61854901..3a9153b5d48031ff918a1dc0c9abea2112048bdf 100644
--- a/sickbeard/providers/newpct.py
+++ b/sickbeard/providers/newpct.py
@@ -92,10 +92,10 @@ class newpctProvider(TorrentProvider):
                 self.search_params['q'] = search_string.strip() if mode != 'RSS' else ''
                 self.search_params['bus_de_'] = 'All' if mode != 'RSS' else 'hoy'
 
-                searchURL = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL, timeout=30)
+                data = self.get_url(search_url, timeout=30)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py
index e4f96f4e3207819eeda8d43c2126a9e279ba6a75..e50ba7bf8dc98332e7bbfa077914ab5c030ef0f7 100644
--- a/sickbeard/providers/nyaatorrents.py
+++ b/sickbeard/providers/nyaatorrents.py
@@ -67,14 +67,14 @@ class NyaaProvider(TorrentProvider):  # pylint: disable=too-many-instance-attrib
                 if mode != 'RSS':
                     params["term"] = search_string.encode('utf-8')
 
-                searchURL = self.url + '?' + urllib.urlencode(params)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.url + '?' + urllib.urlencode(params)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
                 summary_regex = ur"(\d+) seeder\(s\), (\d+) leecher\(s\), \d+ download\(s\) - (\d+.?\d* [KMGT]iB)(.*)"
                 s = re.compile(summary_regex, re.DOTALL)
 
                 results = []
-                for curItem in self.cache.getRSSFeed(searchURL)['entries'] or []:
+                for curItem in self.cache.getRSSFeed(search_url)['entries'] or []:
                     title = curItem['title']
                     download_url = curItem['link']
                     if not all([title, download_url]):
diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py
index b175728d6eb91f3d012f7e7a5f9562d46bb0be52..121ea1538a178242e1068f03e2638cfd013d1cb0 100644
--- a/sickbeard/providers/pretome.py
+++ b/sickbeard/providers/pretome.py
@@ -91,10 +91,10 @@ class PretomeProvider(TorrentProvider):  # pylint: disable=too-many-instance-att
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py
index 04ce4c10f945917cabe929fa54d787035eb2bc4f..250b2e1ec12b69cea4c8e0b10953d51f0c6a6e3e 100644
--- a/sickbeard/providers/scc.py
+++ b/sickbeard/providers/scc.py
@@ -99,11 +99,11 @@ class SCCProvider(TorrentProvider):  # pylint: disable=too-many-instance-attribu
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories[mode])
+                search_url = self.urls['search'] % (urllib.quote(search_string), self.categories[mode])
 
                 try:
-                    logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                    data = self.get_url(searchURL)
+                    logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                    data = self.get_url(search_url)
                     time.sleep(cpu_presets[sickbeard.CPU_PRESET])
                 except Exception as e:
                     logger.log(u"Unable to fetch data. Error: %s" % repr(e), logger.WARNING)
diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py
index 23d72a4505967fb696515bafb178771995b7cf8e..8e3eba84fb7153e1586852eb50dbbe9ec73d0c17 100644
--- a/sickbeard/providers/scenetime.py
+++ b/sickbeard/providers/scenetime.py
@@ -82,10 +82,10 @@ class SceneTimeProvider(TorrentProvider):  # pylint: disable=too-many-instance-a
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.urls['search'] % (urllib.quote(search_string), self.categories)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/strike.py b/sickbeard/providers/strike.py
index 6cf31397bf775239a0cd5771be8eaca94c305e25..c3955871da1a408fb8bd4d9677f626939d43dfd8 100644
--- a/sickbeard/providers/strike.py
+++ b/sickbeard/providers/strike.py
@@ -44,9 +44,9 @@ class StrikeProvider(TorrentProvider):
                 if mode != 'RSS':
                     logger.log(u"Search string: " + search_string.strip(), logger.DEBUG)
 
-                searchURL = self.url + "api/v2/torrents/search/?category=TV&phrase=" + search_string
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                jdata = self.get_url(searchURL, json=True)
+                search_url = self.url + "api/v2/torrents/search/?category=TV&phrase=" + search_string
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                jdata = self.get_url(search_url, json=True)
                 if not jdata:
                     logger.log(u"No data returned from provider", logger.DEBUG)
                     return []
diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py
index f8804ee77c0c8672aff1ae9b0274e140170eef67..4775e25bf6f119c4f0691ccb61ba6b5197b9476f 100644
--- a/sickbeard/providers/t411.py
+++ b/sickbeard/providers/t411.py
@@ -93,10 +93,10 @@ class T411Provider(TorrentProvider):  # pylint: disable=too-many-instance-attrib
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURLS = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS']
-                for searchURL in searchURLS:
-                    logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                    data = self.get_url(searchURL, json=True)
+                search_urlS = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS']
+                for search_url in search_urlS:
+                    logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                    data = self.get_url(search_url, json=True)
                     if not data:
                         continue
 
diff --git a/sickbeard/providers/titansoftv.py b/sickbeard/providers/titansoftv.py
index dfb3044e8522b235007cfe86b4d0c33c99ba53c4..7cb72144bdb47f318b19041ef6ac827343614cf4 100644
--- a/sickbeard/providers/titansoftv.py
+++ b/sickbeard/providers/titansoftv.py
@@ -64,11 +64,11 @@ class TitansOfTVProvider(TorrentProvider):
         if search_params:
             params.update(search_params)
 
-        searchURL = self.url + '?' + urllib.urlencode(params)
+        search_url = self.url + '?' + urllib.urlencode(params)
         logger.log(u"Search string: %s " % search_params, logger.DEBUG)
-        logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+        logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-        parsedJSON = self.get_url(searchURL, json=True)  # do search
+        parsedJSON = self.get_url(search_url, json=True)  # do search
 
         if not parsedJSON:
             logger.log(u"No data returned from provider", logger.DEBUG)
diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py
index 70e1efb528987e57674e33a77c8f291cecf802a1..e79044502998071b6fbe73567167a36fd14288bb 100644
--- a/sickbeard/providers/tntvillage.py
+++ b/sickbeard/providers/tntvillage.py
@@ -302,15 +302,15 @@ class TNTVillageProvider(TorrentProvider):  # pylint: disable=too-many-instance-
                         break
 
                     if mode != 'RSS':
-                        searchURL = (self.urls['search_page'] + '&filter={2}').format(z, self.categories, search_string)
+                        search_url = (self.urls['search_page'] + '&filter={2}').format(z, self.categories, search_string)
                     else:
-                        searchURL = self.urls['search_page'].format(z, self.categories)
+                        search_url = self.urls['search_page'].format(z, self.categories)
 
                     if mode != 'RSS':
                         logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                    logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                    data = self.get_url(searchURL)
+                    logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                    data = self.get_url(search_url)
                     if not data:
                         logger.log(u"No data returned from provider", logger.DEBUG)
                         continue
diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py
index 79fb3af8c9730149ccc37ebc943ee4bd708b30b6..08e9cf3ecca9f3da7791c8a42fd9a28172da96b2 100644
--- a/sickbeard/providers/torrentbytes.py
+++ b/sickbeard/providers/torrentbytes.py
@@ -87,10 +87,10 @@ class TorrentBytesProvider(TorrentProvider):  # pylint: disable=too-many-instanc
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py
index d4346cca072747fa4f414981795e868ae9b90a1c..b5b15844762d80f47ec971c1b7d78a11581270fe 100644
--- a/sickbeard/providers/torrentproject.py
+++ b/sickbeard/providers/torrentproject.py
@@ -49,12 +49,12 @@ class TorrentProjectProvider(TorrentProvider):
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['api'] + "?s=%s&out=json&filter=2101&num=150" % quote_plus(search_string.encode('utf-8'))
+                search_url = self.urls['api'] + "?s=%s&out=json&filter=2101&num=150" % quote_plus(search_string.encode('utf-8'))
                 if self.custom_url:
-                    searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/')) # Must use posixpath
+                    search_url = posixpath.join(self.custom_url, search_url.split(self.url)[1].lstrip('/')) # Must use posixpath
 
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                torrents = self.get_url(searchURL, json=True)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                torrents = self.get_url(search_url, json=True)
                 if not (torrents and "total_found" in torrents and int(torrents["total_found"]) > 0):
                     logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                     continue
@@ -81,7 +81,7 @@ class TorrentProjectProvider(TorrentProvider):
                         logger.log(u"Torrent has less than 10 seeds getting dyn trackers: " + title, logger.DEBUG)
                         trackerUrl = self.urls['api'] + "" + t_hash + "/trackers_json"
                         if self.custom_url:
-                            searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/')) # Must use posixpath
+                            search_url = posixpath.join(self.custom_url, search_url.split(self.url)[1].lstrip('/')) # Must use posixpath
                         jdata = self.get_url(trackerUrl, json=True)
                         assert jdata != "maintenance"
                         download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title + "".join(["&tr=" + s for s in jdata])
diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py
index 22271f4f4fa288329c322b5ba717423d5efae370..c5b479fa0b4ea6de4b2e28c9cf201118431c690e 100644
--- a/sickbeard/providers/xthor.py
+++ b/sickbeard/providers/xthor.py
@@ -108,9 +108,9 @@ class XthorProvider(TorrentProvider):  # pylint: disable=too-many-instance-attri
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 search_params['search'] = search_string
-                searchURL = self.urls['search'] + urlencode(search_params)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                data = self.get_url(searchURL)
+                search_url = self.urls['search'] + urlencode(search_params)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                data = self.get_url(search_url)
                 if not data:
                     continue