Private GIT

Skip to content
Snippets Groups Projects
Commit beee97e4 authored by miigotu's avatar miigotu
Browse files

Merge pull request #2004 from miigotu/patch

Fix and clean SCC, don't spam them with faulty queries, and process e…
parents b679bbbd b85a1d96
Branches
Tags
No related merge requests found
...@@ -110,44 +110,35 @@ class SCCProvider(generic.TorrentProvider): ...@@ -110,44 +110,35 @@ class SCCProvider(generic.TorrentProvider):
def _get_season_search_strings(self, ep_obj): def _get_season_search_strings(self, ep_obj):
search_string = {'Season': []} search_strings = []
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
if ep_obj.show.air_by_date or ep_obj.show.sports: if ep_obj.show.air_by_date or ep_obj.show.sports:
ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0] sp_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
elif ep_obj.show.anime: elif ep_obj.show.anime:
ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number sp_string = show_name + ' %d' % ep_obj.scene_absolute_number
else: else:
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX sp_string = show_name + ' S%02d' % int(ep_obj.scene_season)
search_string['Season'].append(ep_string) search_strings.append(sp_string)
return [search_string] return search_strings
def _get_episode_search_strings(self, ep_obj, add_string=''): def _get_episode_search_strings(self, ep_obj, add_string=''):
search_string = {'Episode': []} search_strings = []
if not ep_obj: if not ep_obj:
return [] return []
if self.show.air_by_date:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = sanitizeSceneName(show_name) + ' ' + \ if self.show.air_by_date:
str(ep_obj.airdate).replace('-', '.') ep_string = sanitizeSceneName(show_name) + ' ' + str(ep_obj.airdate).replace('-', '.')
search_string['Episode'].append(ep_string)
elif self.show.sports: elif self.show.sports:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): ep_string = sanitizeSceneName(show_name) + ' ' + str(ep_obj.airdate).replace('-', '.') + '|' + \
ep_string = sanitizeSceneName(show_name) + ' ' + \
str(ep_obj.airdate).replace('-', '.') + '|' + \
ep_obj.airdate.strftime('%b') ep_obj.airdate.strftime('%b')
search_string['Episode'].append(ep_string)
elif self.show.anime: elif self.show.anime:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): ep_string = sanitizeSceneName(show_name) + ' %i' % int(ep_obj.scene_absolute_number)
ep_string = sanitizeSceneName(show_name) + ' ' + \
"%i" % int(ep_obj.scene_absolute_number)
search_string['Episode'].append(ep_string)
else: else:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \ ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season, sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
'episodenumber': ep_obj.scene_episode} 'episodenumber': ep_obj.scene_episode}
...@@ -155,35 +146,28 @@ class SCCProvider(generic.TorrentProvider): ...@@ -155,35 +146,28 @@ class SCCProvider(generic.TorrentProvider):
if len(add_string): if len(add_string):
ep_string += ' %s' % add_string ep_string += ' %s' % add_string
search_string['Episode'].append(re.sub('\s+', ' ', ep_string)) search_strings.append(ep_string)
return [search_string] return search_strings
def _isSection(self, section, text): def _isSection(self, section, text):
title = '<title>.+? \| %s</title>' % section title = '<title>.+? \| %s</title>' % section
if re.search(title, text, re.IGNORECASE): return re.search(title, text, re.IGNORECASE)
return True
else:
return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
results = [] results = data = []
items = {'Season': [], 'Episode': [], 'RSS': []}
if not self._doLogin(): if not self._doLogin():
return results return results
data = [] for search_string in [search_params]:
searchURLS = []
for mode in search_params.keys():
for search_string in search_params[mode]:
if isinstance(search_string, unicode): if isinstance(search_string, unicode):
search_string = unidecode(search_string) search_string = unidecode(search_string)
if mode == 'Season' and search_mode == 'sponly': searchURLS = []
if search_mode == 'sponly':
searchURLS += [self.urls['archive'] % (urllib.quote(search_string))] searchURLS += [self.urls['archive'] % (urllib.quote(search_string))]
else: else:
searchURLS += [self.urls['search'] % (urllib.quote(search_string), self.categories)] searchURLS += [self.urls['search'] % (urllib.quote(search_string), self.categories)]
...@@ -193,27 +177,21 @@ class SCCProvider(generic.TorrentProvider): ...@@ -193,27 +177,21 @@ class SCCProvider(generic.TorrentProvider):
for searchURL in searchURLS: for searchURL in searchURLS:
logger.log(u"Search string: " + searchURL, logger.DEBUG) logger.log(u"Search string: " + searchURL, logger.DEBUG)
try: try:
data += [x for x in [self.getURL(searchURL)] if x] data = self.getURL(searchURL)
time.sleep(cpu_presets[sickbeard.CPU_PRESET]) time.sleep(cpu_presets[sickbeard.CPU_PRESET])
except Exception as e: except Exception as e:
logger.log(u"Unable to fetch data reason: {0}".format(str(e)), logger.WARNING) logger.log(u"Unable to fetch data reason: {0}".format(str(e)), logger.WARNING)
if not len(data): if not data:
continue continue
try: with BS4Parser(data, features=["html5lib", "permissive"]) as html:
for dataItem in data:
with BS4Parser(dataItem, features=["html5lib", "permissive"]) as html:
torrent_table = html.find('table', attrs={'id': 'torrents-table'}) torrent_table = html.find('table', attrs={'id': 'torrents-table'})
torrent_rows = torrent_table.find_all('tr') if torrent_table else [] torrent_rows = torrent_table.find_all('tr') if torrent_table else []
#Continue only if at least one Release is found #Continue only if at least one Release is found
if len(torrent_rows) < 2: if len(torrent_rows) < 2:
if html.title: logger.log(u'The Data returned from %s%s does not contain any torrent' % (self.name, ('', ' (%s)' % html.title)[html.title]), logger.DEBUG)
source = self.name + " (" + html.title.string + ")"
else:
source = self.name
logger.log(u"The Data returned from " + source + " does not contain any torrent", logger.DEBUG)
continue continue
for result in torrent_table.find_all('tr')[1:]: for result in torrent_table.find_all('tr')[1:]:
...@@ -222,7 +200,7 @@ class SCCProvider(generic.TorrentProvider): ...@@ -222,7 +200,7 @@ class SCCProvider(generic.TorrentProvider):
link = result.find('td', attrs={'class': 'ttr_name'}).find('a') link = result.find('td', attrs={'class': 'ttr_name'}).find('a')
all_urls = result.find('td', attrs={'class': 'td_dl'}).find_all('a', limit=2) all_urls = result.find('td', attrs={'class': 'td_dl'}).find_all('a', limit=2)
# Foreign section contain two links, the others one # Foreign section contain two links, the others one
if self._isSection('Foreign', dataItem): if self._isSection('Foreign', data):
url = all_urls[1] url = all_urls[1]
else: else:
url = all_urls[0] url = all_urls[0]
...@@ -240,24 +218,15 @@ class SCCProvider(generic.TorrentProvider): ...@@ -240,24 +218,15 @@ class SCCProvider(generic.TorrentProvider):
except (AttributeError, TypeError): except (AttributeError, TypeError):
continue continue
if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech): if not title or not download_url or seeders < self.minseed or leechers < self.minleech:
continue
if not title or not download_url:
continue continue
item = title, download_url, id, seeders, leechers item = title, download_url, id, seeders, leechers
logger.log(u"Found result: " + title.replace(' ','.') + " (" + searchURL + ")", logger.DEBUG) logger.log(u"Found result: " + title.replace(' ','.') + " (" + searchURL + ")", logger.DEBUG)
items[mode].append(item) results.append(item)
# for each search mode sort all the items by seeders results.sort(key=lambda tup: tup[3], reverse=True)
items[mode].sort(key=lambda tup: tup[3], reverse=True)
results += items[mode]
except Exception, e:
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
continue
return results return results
...@@ -311,11 +280,11 @@ class SCCCache(tvcache.TVCache): ...@@ -311,11 +280,11 @@ class SCCCache(tvcache.TVCache):
tvcache.TVCache.__init__(self, provider) tvcache.TVCache.__init__(self, provider)
# only poll SCC every 10 minutes max # only poll SCC every 20 minutes max
self.minTime = 20 self.minTime = 20
def _getRSSData(self): def _getRSSData(self):
search_params = {'RSS': ['']} search_params = []
return {'entries': self.provider._doSearch(search_params)} return {'entries': self.provider._doSearch(search_params)}
provider = SCCProvider() provider = SCCProvider()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment