From e11deb67d95f614f6ed0b2a9f63622e4c0481b8c Mon Sep 17 00:00:00 2001
From: miigotu <miigotu@gmail.com>
Date: Mon, 11 Jan 2016 16:27:30 -0800
Subject: [PATCH] Replace all instances of searchURL for search_url in
 providers Rework cpasbian, but it is blocking us with CF with the js page,
 disabled in code until it can be resolved

---
 sickbeard/providers/__init__.py       |  4 +-
 sickbeard/providers/cpasbien.py       | 59 ++++++++++-----------------
 sickbeard/providers/elitetorrent.py   |  6 +--
 sickbeard/providers/freshontv.py      | 12 +++---
 sickbeard/providers/gftracker.py      |  6 +--
 sickbeard/providers/hdspace.py        |  8 ++--
 sickbeard/providers/hdtorrents.py     | 10 ++---
 sickbeard/providers/iptorrents.py     |  8 ++--
 sickbeard/providers/kat.py            |  8 ++--
 sickbeard/providers/newpct.py         |  6 +--
 sickbeard/providers/nyaatorrents.py   |  6 +--
 sickbeard/providers/pretome.py        |  6 +--
 sickbeard/providers/scc.py            |  6 +--
 sickbeard/providers/scenetime.py      |  6 +--
 sickbeard/providers/strike.py         |  6 +--
 sickbeard/providers/t411.py           |  8 ++--
 sickbeard/providers/titansoftv.py     |  6 +--
 sickbeard/providers/tntvillage.py     |  8 ++--
 sickbeard/providers/torrentbytes.py   |  6 +--
 sickbeard/providers/torrentproject.py | 10 ++---
 sickbeard/providers/xthor.py          |  6 +--
 21 files changed, 93 insertions(+), 108 deletions(-)

diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py
index 483f37973..a32b9ef8c 100644
--- a/sickbeard/providers/__init__.py
+++ b/sickbeard/providers/__init__.py
@@ -23,7 +23,7 @@ from random import shuffle
 
 import sickbeard
 from sickbeard.providers import btn, newznab, rsstorrent, womble, thepiratebay, torrentleech, kat, iptorrents, torrentz, \
-    omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, cpasbien, fnt, xthor, torrentbytes, \
+    omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, fnt, xthor, torrentbytes, \
     freshontv, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \
     scenetime, btdigg, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct, elitetorrent, bitsnoop, danishbits, hd4free, limetorrents
 
@@ -32,7 +32,7 @@ __all__ = [
     'torrentday', 'hdbits', 'hounddawgs', 'iptorrents', 'omgwtfnzbs',
     'speedcd', 'nyaatorrents', 'torrentbytes', 'freshontv',
     'morethantv', 'bitsoup', 't411', 'tokyotoshokan', 'alpharatio',
-    'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'bluetigers', 'cpasbien',
+    'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'bluetigers',
     'fnt', 'xthor', 'scenetime', 'btdigg', 'transmitthenet', 'tvchaosuk',
     'torrentproject', 'extratorrent', 'bitcannon', 'torrentz', 'pretome', 'gftracker',
     'hdspace', 'newpct', 'elitetorrent', 'bitsnoop', 'danishbits', 'hd4free', 'limetorrents'
diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py
index 9182ccdb6..b4f509d35 100644
--- a/sickbeard/providers/cpasbien.py
+++ b/sickbeard/providers/cpasbien.py
@@ -16,8 +16,7 @@
 #
 # You should have received a copy of the GNU General Public License
 # along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-import traceback
+import re
 
 from sickbeard import logger
 from sickbeard import tvcache
@@ -39,10 +38,9 @@ class CpasbienProvider(TorrentProvider):
         self.url = "http://www.cpasbien.io"
 
         self.proper_strings = ['PROPER', 'REPACK']
-
         self.cache = CpasbienCache(self)
 
-    def search(self, search_strings, age=0, ep_obj=None):  # pylint: disable=too-many-locals, too-many-statements, too-many-branches
+    def search(self, search_strings, age=0, ep_obj=None):  # pylint: disable=too-many-locals
         results = []
         for mode in search_strings:
             items = []
@@ -51,57 +49,44 @@ class CpasbienProvider(TorrentProvider):
 
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
-                    searchURL = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html'
+                    search_url = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html,trie-seeds-d'
                 else:
-                    searchURL = self.url + '/view_cat.php?categorie=series&trie=date-d'
+                    search_url = self.url + '/view_cat.php?categorie=series&trie=date-d'
 
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                data = self.get_url(searchURL)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                data = self.get_url(search_url)
                 if not data:
                     continue
 
-                try:
-                    with BS4Parser(data, 'html5lib') as html:
-                        line = 0
-                        torrents = []
-                        while True:
-                            resultlin = html.findAll(class_='ligne%i' % line)
-                            if not resultlin:
-                                break
-
-                            torrents += resultlin
-                            line += 1
-
-                        for torrent in torrents:
-                            try:
-                                title = torrent.find(class_="titre").get_text(strip=True).replace("HDTV", "HDTV x264-CPasBien")
-                                tmp = torrent.find("a")['href'].split('/')[-1].replace('.html', '.torrent').strip()
-                                download_url = (self.url + '/telechargement/%s' % tmp)
-                                seeders = try_int(torrent.find(class_="up").get_text(strip=True))
-                                leechers = try_int(torrent.find(class_="down").get_text(strip=True))
-                                torrent_size = torrent.find(class_="poid").get_text()
-
-                                size = convert_size(torrent_size) or -1
-                            except (AttributeError, TypeError, KeyError, IndexError):
-                                continue
-
+                with BS4Parser(data, 'html5lib') as html:
+                    torrent_rows = html.find_all(class_=re.compile('ligne[01]'))
+                    for result in torrent_rows:
+                        try:
+                            title = result.find(class_="titre").get_text(strip=True).replace("HDTV", "HDTV x264-CPasBien")
+                            tmp = result.find("a")['href'].split('/')[-1].replace('.html', '.torrent').strip()
+                            download_url = (self.url + '/telechargement/%s' % tmp)
                             if not all([title, download_url]):
                                 continue
 
-                            # Filter unseeded torrent
+                            seeders = try_int(result.find(class_="up").get_text(strip=True))
+                            leechers = try_int(result.find(class_="down").get_text(strip=True))
                             if seeders < self.minseed or leechers < self.minleech:
                                 if mode != 'RSS':
                                     logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
                                 continue
 
+                            torrent_size = result.find(class_="poid").get_text(strip=True)
+
+                            units = ['o', 'Ko', 'Mo', 'Go', 'To', 'Po']
+                            size = convert_size(torrent_size, units=units) or -1
+
                             item = title, download_url, size, seeders, leechers
                             if mode != 'RSS':
                                 logger.log(u"Found result: %s " % title, logger.DEBUG)
 
                             items.append(item)
-
-                except Exception:
-                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
+                        except StandardError:
+                            continue
 
             # For each search mode sort all the items by seeders if available
             items.sort(key=lambda tup: tup[3], reverse=True)
diff --git a/sickbeard/providers/elitetorrent.py b/sickbeard/providers/elitetorrent.py
index d3b9f2e52..a267c8dc5 100644
--- a/sickbeard/providers/elitetorrent.py
+++ b/sickbeard/providers/elitetorrent.py
@@ -87,10 +87,10 @@ class elitetorrentProvider(TorrentProvider):
                 search_string = re.sub(r'S0*(\d*)E(\d*)', r'\1x\2', search_string)
                 self.search_params['buscar'] = search_string.strip() if mode != 'RSS' else ''
 
-                searchURL = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL, timeout=30)
+                data = self.get_url(search_url, timeout=30)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py
index 30edf2973..3efefdf0f 100644
--- a/sickbeard/providers/freshontv.py
+++ b/sickbeard/providers/freshontv.py
@@ -116,9 +116,9 @@ class FreshOnTVProvider(TorrentProvider): # pylint: disable=too-many-instance-at
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['search'] % (freeleech, search_string)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                init_html = self.get_url(searchURL)
+                search_url = self.urls['search'] % (freeleech, search_string)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                init_html = self.get_url(search_url)
                 max_page_number = 0
 
                 if not init_html:
@@ -160,9 +160,9 @@ class FreshOnTVProvider(TorrentProvider): # pylint: disable=too-many-instance-at
                     for i in range(1, max_page_number):
 
                         time.sleep(1)
-                        page_searchURL = searchURL + '&page=' + str(i)
-                        # '.log(u"Search string: " + page_searchURL, logger.DEBUG)
-                        page_html = self.get_url(page_searchURL)
+                        page_search_url = search_url + '&page=' + str(i)
+                        # '.log(u"Search string: " + page_search_url, logger.DEBUG)
+                        page_html = self.get_url(page_search_url)
 
                         if not page_html:
                             continue
diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py
index 72c96af92..6b381325e 100644
--- a/sickbeard/providers/gftracker.py
+++ b/sickbeard/providers/gftracker.py
@@ -97,11 +97,11 @@ class GFTrackerProvider(TorrentProvider):  # pylint: disable=too-many-instance-a
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['search'] % (self.categories, search_string)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.urls['search'] % (self.categories, search_string)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
                 # Returns top 30 results by default, expandable in user profile
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py
index 2fdf868ba..a5a543e63 100644
--- a/sickbeard/providers/hdspace.py
+++ b/sickbeard/providers/hdspace.py
@@ -92,15 +92,15 @@ class HDSpaceProvider(TorrentProvider): # pylint: disable=too-many-instance-attr
             logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_strings[mode]:
                 if mode != 'RSS':
-                    searchURL = self.urls['search'] % (urllib.quote_plus(search_string.replace('.', ' ')),)
+                    search_url = self.urls['search'] % (urllib.quote_plus(search_string.replace('.', ' ')),)
                 else:
-                    searchURL = self.urls['search'] % ''
+                    search_url = self.urls['search'] % ''
 
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
                 if mode != 'RSS':
                     logger.log(u"Search string: %s" % search_string, logger.DEBUG)
 
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data or 'please try later' in data:
                     logger.log(u"No data returned from provider", logger.DEBUG)
                     continue
diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py
index 105489c6c..8754a1b51 100644
--- a/sickbeard/providers/hdtorrents.py
+++ b/sickbeard/providers/hdtorrents.py
@@ -92,17 +92,17 @@ class HDTorrentsProvider(TorrentProvider):  # pylint: disable=too-many-instance-
             for search_string in search_strings[mode]:
 
                 if mode != 'RSS':
-                    searchURL = self.urls['search'] % (urllib.quote_plus(search_string), self.categories)
+                    search_url = self.urls['search'] % (urllib.quote_plus(search_string), self.categories)
                     logger.log(u"Search string: %s" % search_string, logger.DEBUG)
                 else:
-                    searchURL = self.urls['rss'] % self.categories
+                    search_url = self.urls['rss'] % self.categories
 
                 if self.freeleech:
-                    searchURL = searchURL.replace('active=1', 'active=5')
+                    search_url = search_url.replace('active=1', 'active=5')
 
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data or 'please try later' in data:
                     logger.log(u"No data returned from provider", logger.DEBUG)
                     continue
diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py
index f1055e33c..3e36754a8 100644
--- a/sickbeard/providers/iptorrents.py
+++ b/sickbeard/providers/iptorrents.py
@@ -95,11 +95,11 @@ class IPTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance-a
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 # URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
-                searchURL = self.urls['search'] % (self.categories, freeleech, search_string)
-                searchURL += ';o=seeders' if mode != 'RSS' else ''
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.urls['search'] % (self.categories, freeleech, search_string)
+                search_url += ';o=seeders' if mode != 'RSS' else ''
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py
index 27105064f..8a6a71092 100644
--- a/sickbeard/providers/kat.py
+++ b/sickbeard/providers/kat.py
@@ -82,12 +82,12 @@ class KatProvider(TorrentProvider): # pylint: disable=too-many-instance-attribut
 
                 url_fmt_string = 'usearch' if mode != 'RSS' else search_string
                 try:
-                    searchURL = self.urls['search'] % url_fmt_string + '?' + urlencode(self.search_params)
+                    search_url = self.urls['search'] % url_fmt_string + '?' + urlencode(self.search_params)
                     if self.custom_url:
-                        searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/'))  # Must use posixpath
+                        search_url = posixpath.join(self.custom_url, search_url.split(self.url)[1].lstrip('/'))  # Must use posixpath
 
-                    logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                    data = self.get_url(searchURL)
+                    logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                    data = self.get_url(search_url)
                     if not data:
                         logger.log(u'URL did not return data, maybe try a custom url, or a different one', logger.DEBUG)
                         continue
diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py
index fb5da0403..3a9153b5d 100644
--- a/sickbeard/providers/newpct.py
+++ b/sickbeard/providers/newpct.py
@@ -92,10 +92,10 @@ class newpctProvider(TorrentProvider):
                 self.search_params['q'] = search_string.strip() if mode != 'RSS' else ''
                 self.search_params['bus_de_'] = 'All' if mode != 'RSS' else 'hoy'
 
-                searchURL = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL, timeout=30)
+                data = self.get_url(search_url, timeout=30)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py
index e4f96f4e3..e50ba7bf8 100644
--- a/sickbeard/providers/nyaatorrents.py
+++ b/sickbeard/providers/nyaatorrents.py
@@ -67,14 +67,14 @@ class NyaaProvider(TorrentProvider):  # pylint: disable=too-many-instance-attrib
                 if mode != 'RSS':
                     params["term"] = search_string.encode('utf-8')
 
-                searchURL = self.url + '?' + urllib.urlencode(params)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.url + '?' + urllib.urlencode(params)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
                 summary_regex = ur"(\d+) seeder\(s\), (\d+) leecher\(s\), \d+ download\(s\) - (\d+.?\d* [KMGT]iB)(.*)"
                 s = re.compile(summary_regex, re.DOTALL)
 
                 results = []
-                for curItem in self.cache.getRSSFeed(searchURL)['entries'] or []:
+                for curItem in self.cache.getRSSFeed(search_url)['entries'] or []:
                     title = curItem['title']
                     download_url = curItem['link']
                     if not all([title, download_url]):
diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py
index b175728d6..121ea1538 100644
--- a/sickbeard/providers/pretome.py
+++ b/sickbeard/providers/pretome.py
@@ -91,10 +91,10 @@ class PretomeProvider(TorrentProvider):  # pylint: disable=too-many-instance-att
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py
index 04ce4c10f..250b2e1ec 100644
--- a/sickbeard/providers/scc.py
+++ b/sickbeard/providers/scc.py
@@ -99,11 +99,11 @@ class SCCProvider(TorrentProvider):  # pylint: disable=too-many-instance-attribu
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories[mode])
+                search_url = self.urls['search'] % (urllib.quote(search_string), self.categories[mode])
 
                 try:
-                    logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                    data = self.get_url(searchURL)
+                    logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                    data = self.get_url(search_url)
                     time.sleep(cpu_presets[sickbeard.CPU_PRESET])
                 except Exception as e:
                     logger.log(u"Unable to fetch data. Error: %s" % repr(e), logger.WARNING)
diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py
index 23d72a450..8e3eba84f 100644
--- a/sickbeard/providers/scenetime.py
+++ b/sickbeard/providers/scenetime.py
@@ -82,10 +82,10 @@ class SceneTimeProvider(TorrentProvider):  # pylint: disable=too-many-instance-a
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.urls['search'] % (urllib.quote(search_string), self.categories)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/strike.py b/sickbeard/providers/strike.py
index 6cf31397b..c3955871d 100644
--- a/sickbeard/providers/strike.py
+++ b/sickbeard/providers/strike.py
@@ -44,9 +44,9 @@ class StrikeProvider(TorrentProvider):
                 if mode != 'RSS':
                     logger.log(u"Search string: " + search_string.strip(), logger.DEBUG)
 
-                searchURL = self.url + "api/v2/torrents/search/?category=TV&phrase=" + search_string
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                jdata = self.get_url(searchURL, json=True)
+                search_url = self.url + "api/v2/torrents/search/?category=TV&phrase=" + search_string
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                jdata = self.get_url(search_url, json=True)
                 if not jdata:
                     logger.log(u"No data returned from provider", logger.DEBUG)
                     return []
diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py
index f8804ee77..4775e25bf 100644
--- a/sickbeard/providers/t411.py
+++ b/sickbeard/providers/t411.py
@@ -93,10 +93,10 @@ class T411Provider(TorrentProvider):  # pylint: disable=too-many-instance-attrib
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURLS = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS']
-                for searchURL in searchURLS:
-                    logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                    data = self.get_url(searchURL, json=True)
+                search_urlS = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS']
+                for search_url in search_urlS:
+                    logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                    data = self.get_url(search_url, json=True)
                     if not data:
                         continue
 
diff --git a/sickbeard/providers/titansoftv.py b/sickbeard/providers/titansoftv.py
index dfb3044e8..7cb72144b 100644
--- a/sickbeard/providers/titansoftv.py
+++ b/sickbeard/providers/titansoftv.py
@@ -64,11 +64,11 @@ class TitansOfTVProvider(TorrentProvider):
         if search_params:
             params.update(search_params)
 
-        searchURL = self.url + '?' + urllib.urlencode(params)
+        search_url = self.url + '?' + urllib.urlencode(params)
         logger.log(u"Search string: %s " % search_params, logger.DEBUG)
-        logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+        logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-        parsedJSON = self.get_url(searchURL, json=True)  # do search
+        parsedJSON = self.get_url(search_url, json=True)  # do search
 
         if not parsedJSON:
             logger.log(u"No data returned from provider", logger.DEBUG)
diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py
index 70e1efb52..e79044502 100644
--- a/sickbeard/providers/tntvillage.py
+++ b/sickbeard/providers/tntvillage.py
@@ -302,15 +302,15 @@ class TNTVillageProvider(TorrentProvider):  # pylint: disable=too-many-instance-
                         break
 
                     if mode != 'RSS':
-                        searchURL = (self.urls['search_page'] + '&filter={2}').format(z, self.categories, search_string)
+                        search_url = (self.urls['search_page'] + '&filter={2}').format(z, self.categories, search_string)
                     else:
-                        searchURL = self.urls['search_page'].format(z, self.categories)
+                        search_url = self.urls['search_page'].format(z, self.categories)
 
                     if mode != 'RSS':
                         logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                    logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                    data = self.get_url(searchURL)
+                    logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                    data = self.get_url(search_url)
                     if not data:
                         logger.log(u"No data returned from provider", logger.DEBUG)
                         continue
diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py
index 79fb3af8c..08e9cf3ec 100644
--- a/sickbeard/providers/torrentbytes.py
+++ b/sickbeard/providers/torrentbytes.py
@@ -87,10 +87,10 @@ class TorrentBytesProvider(TorrentProvider):  # pylint: disable=too-many-instanc
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_url = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data:
                     continue
 
diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py
index d4346cca0..b5b158447 100644
--- a/sickbeard/providers/torrentproject.py
+++ b/sickbeard/providers/torrentproject.py
@@ -49,12 +49,12 @@ class TorrentProjectProvider(TorrentProvider):
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['api'] + "?s=%s&out=json&filter=2101&num=150" % quote_plus(search_string.encode('utf-8'))
+                search_url = self.urls['api'] + "?s=%s&out=json&filter=2101&num=150" % quote_plus(search_string.encode('utf-8'))
                 if self.custom_url:
-                    searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/')) # Must use posixpath
+                    search_url = posixpath.join(self.custom_url, search_url.split(self.url)[1].lstrip('/')) # Must use posixpath
 
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                torrents = self.get_url(searchURL, json=True)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                torrents = self.get_url(search_url, json=True)
                 if not (torrents and "total_found" in torrents and int(torrents["total_found"]) > 0):
                     logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
                     continue
@@ -81,7 +81,7 @@ class TorrentProjectProvider(TorrentProvider):
                         logger.log(u"Torrent has less than 10 seeds getting dyn trackers: " + title, logger.DEBUG)
                         trackerUrl = self.urls['api'] + "" + t_hash + "/trackers_json"
                         if self.custom_url:
-                            searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/')) # Must use posixpath
+                            search_url = posixpath.join(self.custom_url, search_url.split(self.url)[1].lstrip('/')) # Must use posixpath
                         jdata = self.get_url(trackerUrl, json=True)
                         assert jdata != "maintenance"
                         download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title + "".join(["&tr=" + s for s in jdata])
diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py
index 22271f4f4..c5b479fa0 100644
--- a/sickbeard/providers/xthor.py
+++ b/sickbeard/providers/xthor.py
@@ -108,9 +108,9 @@ class XthorProvider(TorrentProvider):  # pylint: disable=too-many-instance-attri
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
                 search_params['search'] = search_string
-                searchURL = self.urls['search'] + urlencode(search_params)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
-                data = self.get_url(searchURL)
+                search_url = self.urls['search'] + urlencode(search_params)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
+                data = self.get_url(search_url)
                 if not data:
                     continue
 
-- 
GitLab