diff --git a/gui/slick/interfaces/default/editShow.tmpl b/gui/slick/interfaces/default/editShow.tmpl index 678dbaef803b3733ff1b5909733f2f6a42db0d8b..a73698500eb8399009e99eba5cd23f6908e3ee90 100644 --- a/gui/slick/interfaces/default/editShow.tmpl +++ b/gui/slick/interfaces/default/editShow.tmpl @@ -63,14 +63,16 @@ This will <b>affect the episode show search</b> on nzb and torrent provider.<br #include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_qualityChooser.tmpl") <br /> +<!-- <b>Default Episode Status:</b><br /> -(this will set a default status to be applied to any newly added episodes)<br /> +(this will set a default status of already aired episodes)<br /> <select name="defaultEpStatus" id="defaultEpStatusSelect" class="form-control form-control-inline input-sm"> #for $curStatus in [$WANTED, $SKIPPED, $ARCHIVED, $IGNORED]: <option value="$curStatus" #if $curStatus == $show.default_ep_status then 'selected="selected"' else ''#>$statusStrings[$curStatus]</option> #end for </select><br /> <br /> +--> <b>Info Language:</b><br /> (this will only affect the language of the retrieved metadata file contents and episode filenames)<br /> diff --git a/gui/slick/interfaces/default/inc_qualityChooser.tmpl b/gui/slick/interfaces/default/inc_qualityChooser.tmpl index 569667d8e85735d8682ff6cc59088f580353695d..235134f80e6a7633145dda95e895aab1561c762d 100644 --- a/gui/slick/interfaces/default/inc_qualityChooser.tmpl +++ b/gui/slick/interfaces/default/inc_qualityChooser.tmpl @@ -21,11 +21,11 @@ <div id="customQualityWrapper"> <div id="customQuality"> <div class="component-group-desc"> - <p>One of the <b>Initial</b> quality selections must be obtained before SickRage will attempt to search and process the <b>Archive</b> selections.</p> + <p><b>Preferred<b> qualities will replace an <b>Allowed<b> quality if found, initially or in the future, even if it is a lower quality</p> </div> <div style="padding-right: 40px; text-align: left; float: left;"> - <h5>Initial</h4> + <h5>Allowed</h4> #set $anyQualityList = filter(lambda x: x > $Quality.NONE, $Quality.qualityStrings) <select id="anyQualities" name="anyQualities" multiple="multiple" size="$len($anyQualityList)" class="form-control form-control-inline input-sm"> #for $curQuality in sorted($anyQualityList): @@ -35,7 +35,7 @@ </div> <div style="text-align: left; float: left;"> - <h5>Archive</h4> + <h5>Preferred</h4> #set $bestQualityList = filter(lambda x: x >= $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings) <select id="bestQualities" name="bestQualities" multiple="multiple" size="$len($bestQualityList)" class="form-control form-control-inline input-sm"> #for $curQuality in sorted($bestQualityList): diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 36b84eddbb66ba84d4aa27f3abad10608b5fd1d0..4546404c35870069ac9f372b5c38516cc698d2f5 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -36,9 +36,9 @@ from github import Github from sickbeard import providers, metadata, config, webserveInit from sickbeard.providers.generic import GenericProvider -from providers import ezrss, btn, newznab, womble, thepiratebay, oldpiratebay, torrentleech, kat, iptorrents, \ +from providers import btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \ omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, nextgen, speedcd, nyaatorrents, animenzb, torrentbytes, animezb, \ - freshontv, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, eztv, scenetime + freshontv, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, scenetime from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \ naming_ep_type from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \ diff --git a/sickbeard/common.py b/sickbeard/common.py index cafe17c4b1cac597f43b0647c34c586550ec5f4c..2786762a591646858687ad7f73a74d1aae404182 100644 --- a/sickbeard/common.py +++ b/sickbeard/common.py @@ -23,7 +23,10 @@ import re import uuid INSTANCE_ID = str(uuid.uuid1()) -USER_AGENT = ('SickRage/(' + platform.system() + '; ' + platform.release() + '; ' + INSTANCE_ID + ')') +#Use Sick Beard USER_AGENT until they stop throttling us, +#newznab searching has long been fixed, but we now limit it to 400 results just as they do. +#USER_AGENT = ('SickRage/(' + platform.system() + '; ' + platform.release() + '; ' + INSTANCE_ID + ')') +USER_AGENT = 'Sick Beard/alpha2-master' + ' (' + platform.system() + ' ' + platform.release() + ')' mediaExtensions = ['avi', 'mkv', 'mpg', 'mpeg', 'wmv', 'ogm', 'mp4', 'iso', 'img', 'divx', diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index e44c7514f92fd4d3b05df63a22a02120a5e6d84b..1d76f79847c08e07e060b6d60251b401773a62ee 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -698,41 +698,34 @@ def get_all_episodes_from_absolute_number(show, absolute_numbers, indexer_id=Non return (season, episodes) -def sanitizeSceneName(name, ezrss=False, anime=False): +def sanitizeSceneName(name, anime=False): """ Takes a show name and returns the "scenified" version of it. - - ezrss: If true the scenified version will follow EZRSS's cracksmoker rules as best as possible anime: Some show have a ' in their name(Kuroko's Basketball) and is needed for search. Returns: A string containing the scene version of the show name given. """ - if name: - # anime: removed ' for Kuroko's Basketball - if anime: - bad_chars = u",:()!?\u2019" - # ezrss leaves : and ! in their show names as far as I can tell - elif ezrss: - bad_chars = u",()'?\u2019" - else: - bad_chars = u",:()'!?\u2019" + if not name: + return u'' - # strip out any bad chars - for x in bad_chars: - name = name.replace(x, "") + bad_chars = u',:()!?\u2019' + if not anime: + bad_chars += u"'" - # tidy up stuff that doesn't belong in scene names - name = name.replace("- ", ".").replace(" ", ".").replace("&", "and").replace('/', '.') - name = re.sub("\.\.*", ".", name) + # strip out any bad chars + for x in bad_chars: + name = u'' + name.replace(x, "") - if name.endswith('.'): - name = name[:-1] + # tidy up stuff that doesn't belong in scene names + name = name.replace("- ", ".").replace(" ", ".").replace("&", "and").replace('/', '.') + name = re.sub("\.\.*", ".", name) - return name - else: - return '' + if name.endswith('.'): + name = name[:-1] + + return name _binOps = { @@ -1402,7 +1395,7 @@ def download_file(url, filename, session=None): return False except EnvironmentError, e: _remove_file_failed(filename) - logger.log(u"Unable to save the file: " + ex(e), logger.ERROR) + logger.log(u"Unable to save the file: " + ex(e), logger.WARNING) return False except Exception: _remove_file_failed(filename) diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py index e2ad166259743d8ff969ae83784cfd97e325d9cb..e0761992e1fab0a8ea3457b6b2031847b47cb87d 100644 --- a/sickbeard/name_parser/parser.py +++ b/sickbeard/name_parser/parser.py @@ -26,7 +26,8 @@ import os.path import regexes import sickbeard -from sickbeard import logger, helpers, scene_numbering, common, exceptions as ex, scene_exceptions, encodingKludge as ek, db +from sickbeard import logger, helpers, scene_numbering, common, scene_exceptions, encodingKludge as ek, db +from sickbeard.exceptions import ex from dateutil import parser diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py old mode 100755 new mode 100644 index 453a34eb3b0836e4791abb12158d961789ea2ca1..cca817b2257de0177b1ecfa10cbeeba4b0c38db9 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -16,18 +16,16 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. -__all__ = ['ezrss', - 'womble', +__all__ = ['womble', 'btn', 'thepiratebay', - 'oldpiratebay', 'kat', 'torrentleech', 'scc', 'hdtorrents', 'torrentday', 'hdbits', - 'hounddawgs', + 'hounddawgs', 'iptorrents', 'omgwtfnzbs', 'nextgen', @@ -46,7 +44,6 @@ __all__ = ['ezrss', 'rarbg', 'tntvillage', 'binsearch', - 'eztv', 'scenetime', ] diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py old mode 100755 new mode 100644 diff --git a/sickbeard/providers/binsearch.py b/sickbeard/providers/binsearch.py index a0ae8995fca115469a4fa624b8168d553ca4bf2b..d9afe715b297358064dbd050c5052fb53ad14e93 100644 --- a/sickbeard/providers/binsearch.py +++ b/sickbeard/providers/binsearch.py @@ -96,7 +96,7 @@ class BinSearchCache(tvcache.TVCache): self.setLastUpdate() cl = [] - for group in ['alt.binaries.boneless','alt.binaries.misc','alt.binaries.hdtv','alt.binaries.hdtv.x264','alt.binaries.tv','alt.binaries.tvseries']: + for group in ['alt.binaries.boneless','alt.binaries.misc','alt.binaries.hdtv','alt.binaries.hdtv.x264','alt.binaries.tv','alt.binaries.tvseries','alt.binaries.teevee']: url = self.provider.url + 'rss.php?' urlArgs = {'max': 1000,'g': group} diff --git a/sickbeard/providers/ezrss.py b/sickbeard/providers/ezrss.py deleted file mode 100644 index d5535c4be2d4de837327fa6598131f15bcaf8829..0000000000000000000000000000000000000000 --- a/sickbeard/providers/ezrss.py +++ /dev/null @@ -1,178 +0,0 @@ -# Author: Nic Wolfe <nic@wolfeden.ca> -# URL: http://code.google.com/p/sickbeard/ -# -# This file is part of SickRage. -# -# SickRage is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickRage is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickRage. If not, see <http://www.gnu.org/licenses/>. - -import urllib -import re - -try: - import xml.etree.cElementTree as etree -except ImportError: - import elementtree.ElementTree as etree - -import sickbeard -import generic - -from sickbeard.common import Quality -from sickbeard import logger -from sickbeard import tvcache -from sickbeard import helpers - - -class EZRSSProvider(generic.TorrentProvider): - def __init__(self): - - self.urls = {'base_url': 'https://www.ezrss.it/'} - - self.url = self.urls['base_url'] - - generic.TorrentProvider.__init__(self, "EZRSS") - - self.supportsBacklog = True - self.enabled = False - self.ratio = None - - self.cache = EZRSSCache(self) - - def isEnabled(self): - return self.enabled - - def imageName(self): - return 'ezrss.png' - - def getQuality(self, item, anime=False): - - try: - quality = Quality.sceneQuality(item.filename, anime) - except: - quality = Quality.UNKNOWN - - return quality - - def findSearchResults(self, show, episodes, search_mode, manualSearch=False, downCurQuality=False): - - self.show = show - - results = {} - - if show.air_by_date or show.sports: - logger.log(self.name + u" doesn't support air-by-date or sports backloging because of limitations on their RSS search.", - logger.WARNING) - return results - - results = generic.TorrentProvider.findSearchResults(self, show, episodes, search_mode, manualSearch, downCurQuality) - - return results - - def _get_season_search_strings(self, ep_obj): - - params = {} - - params['show_name'] = helpers.sanitizeSceneName(self.show.name, ezrss=True).replace('.', ' ').encode('utf-8') - - if ep_obj.show.air_by_date or ep_obj.show.sports: - params['season'] = str(ep_obj.airdate).split('-')[0] - elif ep_obj.show.anime: - params['season'] = "%d" % ep_obj.scene_absolute_number - else: - params['season'] = ep_obj.scene_season - - return [params] - - def _get_episode_search_strings(self, ep_obj, add_string=''): - - params = {} - - if not ep_obj: - return params - - params['show_name'] = helpers.sanitizeSceneName(self.show.name, ezrss=True).replace('.', ' ').encode('utf-8') - - if self.show.air_by_date or self.show.sports: - params['date'] = str(ep_obj.airdate) - elif self.show.anime: - params['episode'] = "%i" % int(ep_obj.scene_absolute_number) - else: - params['season'] = ep_obj.scene_season - params['episode'] = ep_obj.scene_episode - - return [params] - - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): - - params = {"mode": "rss"} - - if search_params: - params.update(search_params) - - search_url = self.url + 'search/index.php?' + urllib.urlencode(params) - - logger.log(u"Search string: " + search_url, logger.DEBUG) - - results = [] - for curItem in self.cache.getRSSFeed(search_url, items=['entries'])['entries'] or []: - - (title, url) = self._get_title_and_url(curItem) - - if title and url: - logger.log(u"RSS Feed provider: [" + self.name + "] Attempting to add item to cache: " + title, logger.DEBUG) - results.append(curItem) - - return results - - def _get_title_and_url(self, item): - (title, url) = generic.TorrentProvider._get_title_and_url(self, item) - - try: - new_title = self._extract_name_from_filename(item.filename) - except: - new_title = None - - if new_title: - title = new_title - logger.log(u"Extracted the name " + title + " from the torrent link", logger.DEBUG) - - return (title, url) - - def _extract_name_from_filename(self, filename): - name_regex = '(.*?)\.?(\[.*]|\d+\.TPB)\.torrent$' - logger.log(u"Comparing " + name_regex + " against " + filename, logger.DEBUG) - match = re.match(name_regex, filename, re.I) - if match: - return match.group(1) - return None - - def seedRatio(self): - return self.ratio - - -class EZRSSCache(tvcache.TVCache): - def __init__(self, provider): - - tvcache.TVCache.__init__(self, provider) - - # only poll EZRSS every 15 minutes max - self.minTime = 15 - - def _getRSSData(self): - - rss_url = self.provider.url + 'feed/' - logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG) - - return self.getRSSFeed(rss_url) - -provider = EZRSSProvider() diff --git a/sickbeard/providers/eztv.py b/sickbeard/providers/eztv.py deleted file mode 100644 index e648b590f63df9629568d4ad4965bb1658b02ebb..0000000000000000000000000000000000000000 --- a/sickbeard/providers/eztv.py +++ /dev/null @@ -1,239 +0,0 @@ -# coding=utf-8 -# Author: Nicolas Martinelli <nicolas.martinelli@gmail.com> -# URL: http://code.google.com/p/sickbeard/ -# -# This file is part of Sick Beard. -# -# Sick Beard is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Sick Beard is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>. -import traceback -import re, datetime - -import generic -import sickbeard -from sickbeard import classes -from sickbeard import helpers -from sickbeard import logger, tvcache, db -from sickbeard.common import Quality -from sickbeard.bs4_parser import BS4Parser - -class EZTVProvider(generic.TorrentProvider): - - def __init__(self): - generic.TorrentProvider.__init__(self, "EZTV") - - self.supportsBacklog = False - self.enabled = False - self.ratio = None - - self.cache = EZTVCache(self) - - self.urls = { - 'base_url': 'https://eztv.ch/', - 'rss': 'https://eztv.ch/', - 'episode': 'http://eztvapi.re/show/%s', - } - - self.url = self.urls['base_url'] - - def isEnabled(self): - return self.enabled - - def seedRatio(self): - return self.ratio - - def imageName(self): - return 'eztv_bt_chat.png' - - def _get_episode_search_strings(self, ep_obj, add_string=''): - - search_string = {'Episode': []} - - search_string['Episode'].append({ - 'imdb_id': self.show.imdbid, - 'season': int(ep_obj.scene_season), - 'episode': int(ep_obj.scene_episode), - 'add_string': add_string, - }) - - return [search_string] - - def getQuality(self, item, anime=False): - if 'quality' in item: - if item.get('quality') == "480p": - return Quality.SDTV - elif item.get('quality') == "720p": - return Quality.HDWEBDL - elif item.get('quality') == "1080p": - return Quality.FULLHDWEBDL - else: - return Quality.sceneQuality(item.get('title'), anime) - else: - return Quality.sceneQuality(item.get('title'), anime) - - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): - - results = [] - items = {'Season': [], 'Episode': [], 'RSS': []} - - for mode in search_params.keys(): - - if mode == 'RSS': - for search_string in search_params[mode]: - searchURL = self.urls['rss'] - logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG) - - HTML = self.getURL(searchURL) - if not HTML: - logger.log(u"" + self.name + " could not retrieve page URL:" + searchURL, logger.DEBUG) - return results - - try: - with BS4Parser(HTML, features=["html5lib", "permissive"]) as parsedHTML: - resultsTable = parsedHTML.find_all('tr', attrs={'name': 'hover', 'class': 'forum_header_border'}) - - if not resultsTable: - logger.log(u"The Data returned from " + self.name + " do not contains any torrent", - logger.DEBUG) - continue - - for entries in resultsTable: - title = entries.find('a', attrs={'class': 'epinfo'}).contents[0] - for link_type in ('magnet', 'download_1', 'download_3'): - link = entries.find('a', attrs={'class': link_type}) - if link: - link = link.get('href') - else: - continue - - item = { - 'title': title, - 'link': link, - } - - items[mode].append(item) - continue - - except Exception, e: - logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), - logger.ERROR) - - elif mode == 'Episode': - for search_string in search_params[mode]: - searchURL = self.urls['episode'] % (search_string['imdb_id']) - logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG) - - try: - parsedJSON = self.getURL(searchURL, json=True) - except ValueError as e: - parsedJSON = None - - if not parsedJSON: - logger.log(u"" + self.name + " could not retrieve page URL:" + searchURL, logger.DEBUG) - return results - - try: - for episode in parsedJSON['episodes']: - if int(episode.get('season')) == search_string.get('season') and \ - int(episode.get('episode')) == search_string.get('episode'): - - for quality in episode['torrents'].keys(): - link = episode['torrents'][quality]['url'] - if not re.match('magnet', link) and not re.match('http', link): - continue - - # Get title from link: - # 1) try magnet link - # 2) try rarbg link - # 3) try extratorrent link - # 4) try '([^/]+$)' : everything after last slash character (not accurate) - # 5) fallback, title is equal to link - if re.match('.*&dn=(.*?)&', link): - title = re.match('.*&dn=(.*?)&', link).group(1) - elif re.match('http://rarbg.to', link): - title = re.search('([^=]+$)', link).group(0) - elif re.match('http://extratorrent.cc', link): - title = re.search('([^/]+$)', link).group(0) - elif re.search('([^/]+$)', link): - title = re.search('([^/]+$)', link).group(0) - else: - title = link - - title = title.replace('+', '.').replace('%20', '.').replace('%5B', '[').replace('%5D', ']') - item = { - 'title': title, - 'link': link, - 'quality': quality - } - - # re.search in case of PROPER|REPACK. In other cases - # add_string is empty, so condition is met. - if 'add_string' in search_string and re.search(search_string.get('add_string'), title): - items[mode].append(item) - - break - - except Exception, e: - logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), - logger.ERROR) - - else: - logger.log(u"" + self.name + " does not accept " + mode + " mode", logger.DEBUG) - return results - - results += items[mode] - - return results - - def findPropers(self, search_date=datetime.datetime.today()): - - results = [] - - myDB = db.DBConnection() - sqlResults = myDB.select( - 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + - ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + - ' WHERE e.airdate >= ' + str(search_date.toordinal()) + - ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + - ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' - ) - - if not sqlResults: - return [] - - for sqlshow in sqlResults: - self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"])) - - if self.show: - curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"])) - - searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK') - - for item in self._doSearch(searchString[0]): - title, url = self._get_title_and_url(item) - results.append(classes.Proper(title, url, datetime.datetime.today(), self.show)) - - return results - -class EZTVCache(tvcache.TVCache): - def __init__(self, provider): - tvcache.TVCache.__init__(self, provider) - - # Only poll EZTV every 5 minutes max - self.minTime = 5 - - def _getRSSData(self): - search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} - -provider = EZTVProvider() diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py old mode 100755 new mode 100644 diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index f9a20749a885e7f481627c24b0e3a56709cb4257..ea5c5e8a98f5fb217ebdedef80e7fb7008bfdf57 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -218,7 +218,7 @@ class HDTorrentsProvider(generic.TorrentProvider): continue try: - title = entries[22].find('a')['title'].strip('History - ').replace('Blu-ray', 'bd50') + title = entries[22].find('a')['title'].replace('History - ','').replace('Blu-ray', 'bd50') url = self.urls['home'] % entries[15].find('a')['href'] download_url = self.urls['home'] % entries[15].find('a')['href'] id = entries[23].find('div')['id'] diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index 763091ddcebc6e4d7be4a733aa32ee5d551accd5..bdefc78cdb8b2c583fa123e57d10990880e38146 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -214,6 +214,13 @@ class KATProvider(generic.TorrentProvider): return [search_string] + def _get_size(self, item): + title, url, id, seeders, leechers, size, pubdate = item + if not size: + return -1 + + return size + def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): results = [] @@ -243,7 +250,7 @@ class KATProvider(generic.TorrentProvider): seeders = int(item['torrent_seeds']) leechers = int(item['torrent_peers']) size = int(item['torrent_contentlength']) - except (AttributeError, TypeError): + except (AttributeError, TypeError, KeyError): continue if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech): diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py old mode 100755 new mode 100644 diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py old mode 100755 new mode 100644 index 11f6621cc0aca641d5066fe72b495f067f9066ee..96696d1bc1e436543b9ec7b6a2673befef9be7af --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -270,7 +270,8 @@ class NewznabProvider(generic.NZBProvider): results = [] offset = total = 0 - while (total >= offset) and (offset < 1000): + # Limit to 400 results, like Sick Beard does, to prevent throttling + while (total >= offset) and (offset <= 400): search_url = self.url + 'api?' + urllib.urlencode(params) logger.log(u"Search url: " + search_url, logger.DEBUG) @@ -300,22 +301,19 @@ class NewznabProvider(generic.NZBProvider): # No items found, prevent from doing another search if total == 0: break - + if offset != params['offset']: logger.log("Tell your newznab provider to fix their bloody newznab responses") break - + params['offset'] += params['limit'] - if (total > int(params['offset'])): + if (total > int(params['offset'])) and (int(params['offset']) <= 400): offset = int(params['offset']) # if there are more items available then the amount given in one call, grab some more - logger.log(str( - total - int(params['offset'])) + " more items to be fetched from provider. Fetching another " + str( - params['limit']) + " items.", logger.DEBUG) + logger.log(u'%d' % (total - offset) + ' more items to be fetched from provider.' + + 'Fetching another %d' % int(params['limit']) + ' items.', logger.DEBUG) else: - logger.log(str( - total - int(params['offset'])) + " No more searches needed, could find anything I was looking for! " + str( - params['limit']) + " items.", logger.DEBUG) + logger.log(u'No more searches needed.', logger.DEBUG) break time.sleep(0.2) @@ -389,8 +387,8 @@ class NewznabCache(tvcache.TVCache): tvcache.TVCache.__init__(self, provider) - # only poll newznab providers every 15 minutes max - self.minTime = 15 + # only poll newznab providers every 30 minutes max, doubled so we don't get throttled again. + self.minTime = 30 def _getRSSData(self): diff --git a/sickbeard/providers/oldpiratebay.py b/sickbeard/providers/oldpiratebay.py deleted file mode 100644 index d6de1213bdd364828a91946016bdf789f4b6bc51..0000000000000000000000000000000000000000 --- a/sickbeard/providers/oldpiratebay.py +++ /dev/null @@ -1,350 +0,0 @@ -# Author: Mr_Orange <mr_orange@hotmail.it> -# URL: http://code.google.com/p/sickbeard/ -# -# This file is part of SickRage. -# -# SickRage is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickRage is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickRage. If not, see <http://www.gnu.org/licenses/>. - -from __future__ import with_statement - -import time -import re -import urllib, urllib2, urlparse -import sys -import os -import traceback -import datetime - -import sickbeard -import generic -from sickbeard.common import Quality, cpu_presets -from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException -from sickbeard import db -from sickbeard import classes -from sickbeard import logger -from sickbeard import tvcache -from sickbeard import helpers -from sickbeard import clients -from sickbeard.show_name_helpers import allPossibleShowNames, sanitizeSceneName -from sickbeard.bs4_parser import BS4Parser -from sickbeard.common import Overview -from sickbeard.exceptions import ex -from sickbeard import encodingKludge as ek -from lib import requests -from lib.requests import exceptions -from lib.unidecode import unidecode - - -class OldPirateBayProvider(generic.TorrentProvider): - def __init__(self): - - generic.TorrentProvider.__init__(self, "OldPirateBay") - - self.supportsBacklog = True - - self.enabled = False - self.ratio = None - self.confirmed = False - self.minseed = None - self.minleech = None - - self.cache = OldPirateBayCache(self) - - self.urls = {'base_url': 'https://oldpiratebay.org/'} - - self.url = self.urls['base_url'] - - self.searchurl = self.url + 'search.php?q=%s&Torrent_sort=seeders.desc' # order by seed - - self.re_title_url = '/torrent/(?P<id>\d+)/(?P<title>.*?)//1".+?(?P<url>magnet.*?)//1".+?(?P<seeders>\d+)</td>.+?(?P<leechers>\d+)</td>' - - def isEnabled(self): - return self.enabled - - def imageName(self): - return 'oldpiratebay.png' - - def getQuality(self, item, anime=False): - - quality = Quality.sceneQuality(item[0], anime) - return quality - - def _reverseQuality(self, quality): - - quality_string = '' - - if quality == Quality.SDTV: - quality_string = 'HDTV x264' - if quality == Quality.SDDVD: - quality_string = 'DVDRIP' - elif quality == Quality.HDTV: - quality_string = '720p HDTV x264' - elif quality == Quality.FULLHDTV: - quality_string = '1080p HDTV x264' - elif quality == Quality.RAWHDTV: - quality_string = '1080i HDTV mpeg2' - elif quality == Quality.HDWEBDL: - quality_string = '720p WEB-DL h264' - elif quality == Quality.FULLHDWEBDL: - quality_string = '1080p WEB-DL h264' - elif quality == Quality.HDBLURAY: - quality_string = '720p Bluray x264' - elif quality == Quality.FULLHDBLURAY: - quality_string = '1080p Bluray x264' - - return quality_string - - def _find_season_quality(self, title, torrent_id, ep_number): - """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ - - mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', - 'vob', 'dvr-ms', 'wtv', 'ts' - 'ogv', 'rar', 'zip', 'mp4'] - - quality = Quality.UNKNOWN - - fileName = None - - fileURL = self.url + 'torrent/' + str(torrent_id) - data = self.getURL(fileURL) - if not data: - return None - - try: - with BS4Parser(data, features=["html5lib", "permissive"]) as soup: - files_tbody = soup.find('div', attrs={'class': 'description-files'}).find('tbody') - if (not files_tbody): - return None - files = [] - rows = files_tbody.find_all('tr') - for row in rows: - files.append(row.find_all('td')[1].text) - - videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files) - - #Filtering SingleEpisode/MultiSeason Torrent - if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1): - logger.log(u"Result " + title + " have " + str( - ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG) - logger.log( - u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", - logger.DEBUG) - return None - - if Quality.sceneQuality(title) != Quality.UNKNOWN: - return title - - for fileName in videoFiles: - quality = Quality.sceneQuality(os.path.basename(fileName)) - if quality != Quality.UNKNOWN: break - - if fileName is not None and quality == Quality.UNKNOWN: - quality = Quality.assumeQuality(os.path.basename(fileName)) - - if quality == Quality.UNKNOWN: - logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) - return None - - try: - myParser = NameParser(showObj=self.show) - parse_result = myParser.parse(fileName) - except (InvalidNameException, InvalidShowException): - return None - - logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) - - if parse_result.series_name and parse_result.season_number: - title = parse_result.series_name + ' S%02d' % int( - parse_result.season_number) + ' ' + self._reverseQuality(quality) - - return title - - except Exception, e: - logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) - - def _get_season_search_strings(self, ep_obj): - - search_string = {'Season': []} - for show_name in set(allPossibleShowNames(self.show)): - if ep_obj.show.air_by_date or ep_obj.show.sports: - ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0] - search_string['Season'].append(ep_string) - ep_string = show_name + ' Season ' + str(ep_obj.airdate).split('-')[0] - search_string['Season'].append(ep_string) - elif ep_obj.show.anime: - ep_string = show_name + ' ' + "%02d" % ep_obj.scene_absolute_number - search_string['Season'].append(ep_string) - else: - ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) - search_string['Season'].append(ep_string) - ep_string = show_name + ' Season ' + str(ep_obj.scene_season) + ' -Ep*' - search_string['Season'].append(ep_string) - - search_string['Season'].append(ep_string) - - return [search_string] - - def _get_episode_search_strings(self, ep_obj, add_string=''): - - search_string = {'Episode': []} - - if self.show.air_by_date: - for show_name in set(allPossibleShowNames(self.show)): - ep_string = sanitizeSceneName(show_name) + ' ' + \ - str(ep_obj.airdate).replace('-', ' ') - search_string['Episode'].append(ep_string) - elif self.show.sports: - for show_name in set(allPossibleShowNames(self.show)): - ep_string = sanitizeSceneName(show_name) + ' ' + \ - str(ep_obj.airdate).replace('-', '|') + '|' + \ - ep_obj.airdate.strftime('%b') - search_string['Episode'].append(ep_string) - elif self.show.anime: - for show_name in set(allPossibleShowNames(self.show)): - ep_string = sanitizeSceneName(show_name) + ' ' + \ - "%02i" % int(ep_obj.scene_absolute_number) - search_string['Episode'].append(ep_string) - else: - for show_name in set(allPossibleShowNames(self.show)): - ep_string = sanitizeSceneName(show_name) + ' ' + \ - sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season, - 'episodenumber': ep_obj.scene_episode} + '|' + \ - sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.scene_season, - 'episodenumber': ep_obj.scene_episode} + ' %s' % add_string - search_string['Episode'].append(re.sub('\s+', ' ', ep_string)) - - return [search_string] - - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): - - results = [] - items = {'Season': [], 'Episode': [], 'RSS': []} - - for mode in search_params.keys(): - for search_string in search_params[mode]: - if isinstance(search_string, unicode): - search_string = unidecode(search_string) - - if mode != 'RSS': - searchURL = self.searchurl % (urllib.quote(search_string)) - else: - searchURL = self.url + 'search?iht=8&sort=-created_at' - - logger.log(u"Search string: " + searchURL, logger.DEBUG) - - data = self.getURL(searchURL) - if not data: - continue - - re_title_url = self.proxy._buildRE(self.re_title_url) - - match = re.compile(re_title_url, re.DOTALL).finditer(urllib.unquote(data)) - - for torrent in match: - title = torrent.group('title').replace('_', - '.') #Do not know why but SickBeard skip release with '_' in name - url = torrent.group('url') - id = int(torrent.group('id')) - seeders = int(torrent.group('seeders')) - leechers = int(torrent.group('leechers')) - - #Filter unseeded torrent - if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech): - continue - - #Accept Torrent only from Good People for every Episode Search - if self.confirmed and re.search('(VIP|Trusted|Helper|Moderator)', torrent.group(0)) is None: - logger.log(u"OldPirateBay Provider found result " + torrent.group( - 'title') + " but that doesn't seem like a trusted result so I'm ignoring it", logger.DEBUG) - continue - - #Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent - if mode == 'Season' and search_mode == 'sponly': - ep_number = int(epcount / len(set(allPossibleShowNames(self.show)))) - title = self._find_season_quality(title, id, ep_number) - - if not title or not url: - continue - - item = title, url, id, seeders, leechers - - items[mode].append(item) - - #For each search mode sort all the items by seeders - items[mode].sort(key=lambda tup: tup[3], reverse=True) - - results += items[mode] - - return results - - def _get_title_and_url(self, item): - - title, url, id, seeders, leechers = item - - if title: - title = self._clean_title_from_provider(title) - - if url: - url = url.replace('&', '&') - - return (title, url) - - def findPropers(self, search_date=datetime.datetime.today()): - - results = [] - - myDB = db.DBConnection() - sqlResults = myDB.select( - 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + - ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + - ' WHERE e.airdate >= ' + str(search_date.toordinal()) + - ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + - ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' - ) - - if not sqlResults: - return [] - - for sqlshow in sqlResults: - self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"])) - - if self.show: - curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"])) - - searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK') - - for item in self._doSearch(searchString[0]): - title, url = self._get_title_and_url(item) - if re.search('(PROPER|REPACK)', title, re.I): - results.append(classes.Proper(title, url, datetime.datetime.today(), self.show)) - - return results - - def seedRatio(self): - return self.ratio - - -class OldPirateBayCache(tvcache.TVCache): - def __init__(self, provider): - - tvcache.TVCache.__init__(self, provider) - - # only poll OldPirateBay every 10 minutes max - self.minTime = 20 - - def _getRSSData(self): - search_params = {'RSS': ['rss']} - return {'entries': self.provider._doSearch(search_params)} - -provider = OldPirateBayProvider() diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index d1cb1453c361194d63995acf93b7706b4509cb5c..6d4265474752ce6f6f4dda48e67f5cf437f26c46 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -101,6 +101,14 @@ class OmgwtfnzbsProvider(generic.NZBProvider): def _get_title_and_url(self, item): return (item['release'], item['getnzb']) + def _get_size(self, item): + try: + size = int(item['sizebytes']) + except (ValueError, TypeError, AttributeError, KeyError): + return -1 + + return size + def _doSearch(self, search, search_mode='eponly', epcount=0, retention=0, epObj=None): self._checkAuth() diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index e0d938eea1769cfa59d05f1e1af71be80e231bc4..6b11252a90a9c5536d42f9b0cf89ef00063af1e4 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -65,11 +65,11 @@ class RarbgProvider(generic.TorrentProvider): self.tokenExpireDate = None self.urls = {'url': u'https://rarbg.com', - 'token': u'https://torrentapi.org/pubapi.php?get_token=get_token&format=json', - 'listing': u'https://torrentapi.org/pubapi.php?mode=list', - 'search': u'https://torrentapi.org/pubapi.php?mode=search&search_string={search_string}', - 'search_tvdb': u'https://torrentapi.org/pubapi.php?mode=search&search_tvdb={tvdb}&search_string={search_string}', - 'search_tvrage': u'https://torrentapi.org/pubapi.php?mode=search&search_tvrage={tvrage}&search_string={search_string}', + 'token': u'https://torrentapi.org/pubapi_v2.php?get_token=get_token&format=json&app_id=sickrage', + 'listing': u'https://torrentapi.org/pubapi_v2.php?mode=list&app_id=sickrage', + 'search': u'https://torrentapi.org/pubapi_v2.php?mode=search&app_id=sickrage&search_string={search_string}', + 'search_tvdb': u'https://torrentapi.org/pubapi_v2.php?mode=search&app_id=sickrage&search_tvdb={tvdb}&search_string={search_string}', + 'search_tvrage': u'https://torrentapi.org/pubapi_v2.php?mode=search&app_id=sickrage&search_tvrage={tvrage}&search_string={search_string}', 'api_spec': u'https://rarbg.com/pubapi/apidocs.txt', } @@ -85,7 +85,7 @@ class RarbgProvider(generic.TorrentProvider): 'token': '&token={token}', } - self.defaultOptions = self.urlOptions['categories'].format(categories='18;41') + \ + self.defaultOptions = self.urlOptions['categories'].format(categories='tv') + \ self.urlOptions['limit'].format(limit='100') + \ self.urlOptions['format'].format(format='json') @@ -294,7 +294,7 @@ class RarbgProvider(generic.TorrentProvider): continue try: - data = re.search('\[\{\"f\".*\}\]', data) + data = re.search('\[\{\"filename\".*\}\]', data) if data is not None: data_json = json.loads(data.group()) else: @@ -308,8 +308,8 @@ class RarbgProvider(generic.TorrentProvider): try: for item in data_json: try: - torrent_title = item['f'] - torrent_download = item['d'] + torrent_title = item['filename'] + torrent_download = item['download'] if torrent_title and torrent_download: items[mode].append((torrent_title, torrent_download)) else: diff --git a/sickbeard/search.py b/sickbeard/search.py index b3e572ebea1932268a6e336118fae35ec0cc24c7..e27f64b4a657c1178ee1ec8f5ea280d31894adcd 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -187,7 +187,7 @@ def snatchEpisode(result, endStatus=SNATCHED): return True -def pickBestResult(results, show, quality_list=None): +def pickBestResult(results, show): results = results if isinstance(results, list) else [results] logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG) @@ -199,18 +199,10 @@ def pickBestResult(results, show, quality_list=None): if show and cur_result.show is not show: continue - # filter out possible bad torrents from providers such as ezrss - if isinstance(cur_result, sickbeard.classes.SearchResult): - if cur_result.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole": - if not cur_result.url.startswith('magnet'): - cur_result.content = cur_result.provider.getURL(cur_result.url) - if not cur_result.content: - continue - else: - if not cur_result.url.startswith('magnet'): - cur_result.content = cur_result.provider.getURL(cur_result.url) - if not cur_result.content: - continue + if not cur_result.url.startswith('magnet'): + cur_result.content = cur_result.provider.getURL(cur_result.url) + if not cur_result.content: + continue # build the black And white list if show.is_anime: @@ -219,7 +211,9 @@ def pickBestResult(results, show, quality_list=None): logger.log("Quality of " + cur_result.name + " is " + Quality.qualityStrings[cur_result.quality]) - if quality_list and cur_result.quality not in quality_list: + anyQualities, bestQualities = Quality.splitQuality(show.quality) + + if cur_result.quality not in anyQualities + bestQualities: logger.log(cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG) continue @@ -244,10 +238,11 @@ def pickBestResult(results, show, quality_list=None): logger.log(cur_result.name + u" has previously failed, rejecting it") continue - if not bestResult or bestResult.quality < cur_result.quality and cur_result.quality != Quality.UNKNOWN: + if cur_result.quality in bestQualities and (not bestResult or bestResult.quality < cur_result.quality or bestResult not in bestQualities): bestResult = cur_result - - elif bestResult.quality == cur_result.quality: + elif cur_result.quality in anyQualities and (not bestResult or bestResult not in bestQualities) and (not bestResult or bestResult.quality < cur_result.quality): + bestResult = cur_result + elif bestResult and bestResult.quality == cur_result.quality: if "proper" in cur_result.name.lower() or "repack" in cur_result.name.lower(): bestResult = cur_result elif "internal" in bestResult.name.lower() and "internal" not in cur_result.name.lower(): @@ -293,14 +288,7 @@ def isFinalResult(result): return True elif best_qualities and result.quality == max(best_qualities): - - # if this is the best redownload but we have a higher initial download then keep looking - if any_qualities and result.quality < max(any_qualities): - return False - - # if this is the best redownload and we don't have a higher initial download then we're done - else: - return True + return True # if we got here than it's either not on the lists, they're empty, or it's lower than the highest required else: @@ -506,13 +494,10 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False): if not len(foundResults[curProvider.name]): continue - anyQualities, bestQualities = Quality.splitQuality(show.quality) - # pick the best season NZB bestSeasonResult = None if SEASON_RESULT in foundResults[curProvider.name]: - bestSeasonResult = pickBestResult(foundResults[curProvider.name][SEASON_RESULT], show, - anyQualities + bestQualities) + bestSeasonResult = pickBestResult(foundResults[curProvider.name][SEASON_RESULT], show) highest_quality_overall = 0 for cur_episode in foundResults[curProvider.name]: diff --git a/sickbeard/tv.py b/sickbeard/tv.py index 727fb7a19fb103a3c1646361029333b7e6b171d0..1b0b8199e7ac9fd68a6e0b1103eb0e08da3c5cb6 100644 --- a/sickbeard/tv.py +++ b/sickbeard/tv.py @@ -1270,7 +1270,7 @@ class TVShow(object): logger.DEBUG) # if we are re-downloading then we only want it if it's in our bestQualities list and better than what we have, or we only have one bestQuality and we do not have that quality yet - if curStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST and quality in bestQualities and (quality > curQuality or (quality != curQuality and len(bestQualities) == 1)) : + if curStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST and quality in bestQualities and (quality > curQuality or curQuality not in bestQualities): logger.log(u"Episode already exists but the found episode quality is wanted more, getting found episode", logger.DEBUG) return True diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index 9ce20e82af55f667d7101b4d782d65fe57c49325..2aa55de1b22dc6f44dc5127b7daa0e8fd4028ce1 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -2388,9 +2388,11 @@ class CMD_ShowPause(ApiCall): if self.pause: showObj.paused = 1 + showObj.saveToDB() return _responds(RESULT_SUCCESS, msg=str(showObj.name) + " has been paused") else: showObj.paused = 0 + showObj.saveToDB() return _responds(RESULT_SUCCESS, msg=str(showObj.name) + " has been unpaused") class CMD_ShowRefresh(ApiCall): diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 0a66efbb090f07fab0515421f6ababa1778ff8d9..7d99e1a615aaf4950f3bc2fbb43b31fc95401196 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -1447,7 +1447,7 @@ class Home(WebRoot): showObj.sports = sports showObj.subtitles = subtitles showObj.air_by_date = air_by_date - showObj.default_ep_status = int(defaultEpStatus) + #showObj.default_ep_status = int(defaultEpStatus) if not directCall: showObj.lang = indexer_lang diff --git a/tests/all_tests.py b/tests/all_tests.py index b20c0c35745b87dc98d73856639cdf10603b4f33..4f0c9e46008ff1057b77eaab7b20a4f8525738f8 100755 --- a/tests/all_tests.py +++ b/tests/all_tests.py @@ -29,7 +29,6 @@ sys.path.insert(1, os.path.join(tests_dir, '..')) class AllTests(unittest.TestCase): #Block issue_submitter_tests to avoid issue tracker spam on every build - #Block feedparser_tests because http://lolo.sickbeard.com/ has changed api, which makes the test fail blacklist = [tests_dir + 'all_tests.py', tests_dir + 'issue_submitter_tests.py'] def setUp(self): self.test_file_strings = [ x for x in glob.glob(tests_dir + '*_tests.py') if not x in self.blacklist ] diff --git a/tests/test_lib.py b/tests/test_lib.py index f504816a7a1320024e5694d5b44b62ae438c031c..841b3a9688dfd243d716c810896610522b56e079 100644 --- a/tests/test_lib.py +++ b/tests/test_lib.py @@ -85,7 +85,7 @@ sickbeard.NAMING_MULTI_EP = 1 sickbeard.PROVIDER_ORDER = ["sick_beard_index"] -sickbeard.newznabProviderList = providers.getNewznabProviderList("'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040,5060|0|eponly|0!!!NZBs.org|https://nzbs.org/||5030,5040,5060,5070,5090|0|eponly|0!!!Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040,5060|0|eponly|0'") +sickbeard.newznabProviderList = providers.getNewznabProviderList("'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0|eponly|0|0|0!!!NZBs.org|https://nzbs.org/||5030,5040,5060,5070,5090|0|eponly|0|0|0!!!Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040,5060|0|eponly|0|0|0'") sickbeard.providerList = providers.makeProviderList() sickbeard.PROG_DIR = os.path.abspath('..')