diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index bd03bda7a08c954c0dcc288169a5ed7fd446e8fa..bc6b46a667431e931bbe0e797d65e42f9582561d 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -1,6 +1,6 @@ # coding=utf-8 -# Author: Nic Wolfe <nic@wolfeden.ca> -# URL: http://code.google.com/p/sickbeard/ +# Author: Dustyn Gibson <miigotu@gmail.com> +# URL: https://sickrage.github.io # # This file is part of SickRage. # @@ -17,25 +17,18 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. -import traceback -import re -import datetime -import json import time +import datetime +from urllib import urlencode -from sickbeard import logger -from sickbeard import tvcache -from sickbeard.common import USER_AGENT +from sickbeard import logger, tvcache from sickbeard.indexers.indexer_config import INDEXER_TVDB + from sickrage.helper.common import convert_size from sickrage.providers.torrent.TorrentProvider import TorrentProvider -class GetOutOfLoop(Exception): - pass - - -class RarbgProvider(TorrentProvider): +class RarbgProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes def __init__(self): TorrentProvider.__init__(self, "Rarbg") @@ -47,68 +40,51 @@ class RarbgProvider(TorrentProvider): self.sorting = None self.minleech = None self.token = None - self.tokenExpireDate = None - - self.urls = {'url': u'https://rarbg.com', - 'token': u'http://torrentapi.org/pubapi_v2.php?get_token=get_token&format=json&app_id=sickrage2', - 'listing': u'http://torrentapi.org/pubapi_v2.php?mode=list&app_id=sickrage2', - 'search': u'http://torrentapi.org/pubapi_v2.php?mode=search&app_id=sickrage2&search_string={search_string}', - 'search_tvdb': u'http://torrentapi.org/pubapi_v2.php?mode=search&app_id=sickrage2&search_tvdb={tvdb}&search_string={search_string}', - 'api_spec': u'https://rarbg.com/pubapi/apidocs.txt'} - - self.url = self.urls['listing'] - - self.urlOptions = { - 'categories': '&category={categories}', - 'seeders': '&min_seeders={min_seeders}', - 'leechers': '&min_leechers={min_leechers}', - 'sorting': '&sort={sorting}', - 'limit': '&limit={limit}', - 'format': '&format={format}', - 'ranked': '&ranked={ranked}', - 'token': '&token={token}' - } + self.token_expires = None - self.defaultOptions = self.urlOptions['categories'].format(categories='tv') + \ - self.urlOptions['limit'].format(limit='100') + \ - self.urlOptions['format'].format(format='json_extended') + # Spec: https://torrentapi.org/apidocs_v2.txt + self.url = u'https://rarbg.com' + self.url_api = u'http://torrentapi.org/pubapi_v2.php' self.proper_strings = ['{{PROPER|REPACK}}'] - self.next_request = datetime.datetime.now() - - self.headers.update({'User-Agent': USER_AGENT}) - self.cache = RarbgCache(self) def login(self): - if self.token and self.tokenExpireDate and datetime.datetime.now() < self.tokenExpireDate: + if self.token and self.token_expires and datetime.datetime.now() < self.token_expires: return True - response = self.get_url(self.urls['token'], timeout=30, json=True) + login_params = { + 'get_token': 'get_token', + 'format': 'json', + 'app_id': 'sickrage2' + } + + response = self.get_url(self.url_api, params=login_params, timeout=30, json=True) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False - try: - if response['token']: - self.token = response['token'] - self.tokenExpireDate = datetime.datetime.now() + datetime.timedelta(minutes=14) - return True - except Exception as e: - logger.log(u"No token found", logger.WARNING) - logger.log(u"No token found: %s" % repr(e), logger.DEBUG) - - return False - - def search(self, search_params, age=0, ep_obj=None): + self.token = response.get('token') + self.token_expires = datetime.datetime.now() + datetime.timedelta(minutes=14) if self.token else None + return self.token is not None + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals results = [] - items = {'Season': [], 'Episode': [], 'RSS': []} - if not self.login(): return results + search_params = { + 'app_id': 'sickrage2', + 'categories': 'tv', + 'seeders': int(self.minseed), + 'leechers': int(self.minleech), + 'limit': 100, + 'format': 'json_extended', + 'ranked': int(self.ranked), + 'token': self.token, + } + if ep_obj is not None: ep_indexerid = ep_obj.show.indexerid ep_indexer = ep_obj.show.indexer @@ -116,131 +92,58 @@ class RarbgProvider(TorrentProvider): ep_indexerid = None ep_indexer = None - for mode in search_params.keys(): # Mode = RSS, Season, Episode + for mode in search_strings: + items = [] logger.log(u"Search Mode: %s" % mode, logger.DEBUG) - for search_string in search_params[mode]: - - if mode != 'RSS': - logger.log(u"Search string: %s " % search_string, logger.DEBUG) - + for search_string in search_strings[mode]: if mode == 'RSS': - searchURL = self.urls['listing'] + self.defaultOptions - elif mode == 'Season': - if ep_indexer == INDEXER_TVDB: - searchURL = self.urls['search_tvdb'].format(search_string=search_string, tvdb=ep_indexerid) + self.defaultOptions - else: - searchURL = self.urls['search'].format(search_string=search_string) + self.defaultOptions - elif mode == 'Episode': - if ep_indexer == INDEXER_TVDB: - searchURL = self.urls['search_tvdb'].format(search_string=search_string, tvdb=ep_indexerid) + self.defaultOptions - else: - searchURL = self.urls['search'].format(search_string=search_string) + self.defaultOptions + search_params['sorting'] = 'last' + search_params['mode'] = 'list' + search_params.pop('search_string', None) + search_params.pop('search_tvdb', None) else: - logger.log(u"Invalid search mode: %s " % mode, logger.ERROR) - - if self.minleech: - searchURL += self.urlOptions['leechers'].format(min_leechers=int(self.minleech)) - - if self.minseed: - searchURL += self.urlOptions['seeders'].format(min_seeders=int(self.minseed)) - - searchURL += self.urlOptions['sorting'].format(sorting=(self.sorting if self.sorting else 'seeders', 'last')[mode == 'RSS']) - - if self.ranked: - searchURL += self.urlOptions['ranked'].format(ranked=int(self.ranked)) - - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - - try: - retry = 3 - while retry > 0: - time_out = 0 - while (datetime.datetime.now() < self.next_request) and time_out <= 15: - time_out += 1 - time.sleep(1) - - data = self.get_url(searchURL + self.urlOptions['token'].format(token=self.token)) - - self.next_request = datetime.datetime.now() + datetime.timedelta(seconds=10) - - if not data: - logger.log(u"No data returned from provider", logger.DEBUG) - raise GetOutOfLoop - if re.search('ERROR', data): - logger.log(u"Error returned from provider", logger.DEBUG) - raise GetOutOfLoop - if re.search('No results found', data): - logger.log(u"No results found", logger.DEBUG) - raise GetOutOfLoop - if re.search('Invalid token set!', data): - logger.log(u"Invalid token!", logger.WARNING) - return results - if re.search('Too many requests per minute. Please try again later!', data): - logger.log(u"Too many requests per minute", logger.WARNING) - retry -= 1 - time.sleep(10) - continue - if re.search('Cant find search_tvdb in database. Are you sure this imdb exists?', data): - logger.log(u"No results found. The tvdb id: %s do not exist on provider" % ep_indexerid, logger.WARNING) - raise GetOutOfLoop - if re.search('Invalid token. Use get_token for a new one!', data): - logger.log(u"Invalid token, retrieving new token", logger.DEBUG) - retry -= 1 - self.token = None - self.tokenExpireDate = None - if not self.login(): - logger.log(u"Failed retrieving new token", logger.DEBUG) - return results - logger.log(u"Using new token", logger.DEBUG) - continue - - # No error found break - break - else: - logger.log(u"Retried 3 times without getting results", logger.DEBUG) - continue - except GetOutOfLoop: - continue + logger.log(u"Search string: %s " % search_string, logger.DEBUG) + + search_params['sorting'] = self.sorting if self.sorting else 'seeders' + search_params['mode'] = 'search' - try: - data = re.search(r'\[\{\"title\".*\}\]', data) - if data is not None: - data_json = json.loads(data.group()) + if ep_indexer == INDEXER_TVDB and ep_indexerid: + search_params['search_tvdb'] = ep_indexerid else: - data_json = {} - except Exception: - logger.log(u"JSON load failed: %s" % traceback.format_exc(), logger.ERROR) - logger.log(u"JSON load failed. Data dump: %s" % data, logger.DEBUG) - continue + search_params.pop('search_tvdb', None) - try: - for item in data_json: - try: - title = item['title'] - download_url = item['download'] - seeders = item['seeders'] - leechers = item['leechers'] - torrent_size = item['size'] - size = convert_size(torrent_size) or -1 - # pubdate = item['pubdate'] + search_params['search_string'] = search_string - if not all([title, download_url]): - continue + logger.log(u"Search URL: %s" % self.url_api + '?' + urlencode(search_params), logger.DEBUG) + data = self.get_url(self.url_api, params=search_params, json=True) + if not all([isinstance(data, dict), data.get('torrent_results')]): + logger.log(u"No data returned from provider", logger.DEBUG) + continue - item = title, download_url, size, seeders, leechers - if mode != 'RSS': - logger.log(u"Found result: %s " % title, logger.DEBUG) - items[mode].append(item) + for item in data.get('torrent_results', []): + try: + title = item.get('title') + download_url = item.get('download') + seeders = item.get('seeders', 0) + leechers = item.get('leechers', 0) + size = convert_size(item.get('size', -1)) or -1 + except Exception: + logger.log(u"Skipping invalid result. JSON item: %s" % item, logger.DEBUG) + continue + + if not all([title, download_url]): + continue - except Exception: - logger.log(u"Skipping invalid result. JSON item: %s" % item, logger.DEBUG) + item = title, download_url, size, seeders, leechers + if mode != 'RSS': + logger.log(u"Found result: %s " % title, logger.DEBUG) + items.append(item) - except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + time.sleep(10) # For each search mode sort all the items by seeders - items[mode].sort(key=lambda tup: tup[3], reverse=True) - results += items[mode] + items.sort(key=lambda tup: tup[3], reverse=True) + results += items return results @@ -257,8 +160,8 @@ class RarbgCache(tvcache.TVCache): self.minTime = 10 def _getRSSData(self): - search_params = {'RSS': ['']} - return {'entries': self.provider.search(search_params)} + search_strings = {'RSS': ['']} + return {'entries': self.provider.search(search_strings)} provider = RarbgProvider()