Private GIT

Skip to content
Snippets Groups Projects
Commit dc176961 authored by miigotu's avatar miigotu
Browse files

Merge pull request #688 from SickRage/providers-fixup

Alpharatio fixes
parents 0c94fba9 bad6e178
Branches
Tags
No related merge requests found
...@@ -19,16 +19,16 @@ ...@@ -19,16 +19,16 @@
# along with SickRage. If not, see <http://www.gnu.org/licenses/>. # along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import re import re
import traceback from urllib import urlencode
from sickbeard import logger from sickbeard import logger
from sickbeard import tvcache from sickbeard import tvcache
from sickbeard.bs4_parser import BS4Parser from sickbeard.bs4_parser import BS4Parser
from sickrage.helper.common import convert_size from sickrage.helper.common import try_int, convert_size
from sickrage.providers.torrent.TorrentProvider import TorrentProvider from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class AlphaRatioProvider(TorrentProvider): class AlphaRatioProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes
def __init__(self): def __init__(self):
...@@ -40,25 +40,23 @@ class AlphaRatioProvider(TorrentProvider): ...@@ -40,25 +40,23 @@ class AlphaRatioProvider(TorrentProvider):
self.minseed = None self.minseed = None
self.minleech = None self.minleech = None
self.urls = {'base_url': 'http://alpharatio.cc/', self.url = 'http://alpharatio.cc/'
'login': 'http://alpharatio.cc/login.php', self.urls = {
'detail': 'http://alpharatio.cc/torrents.php?torrentid=%s', 'login': self.url + 'login.php',
'search': 'http://alpharatio.cc/torrents.php?searchstr=%s%s', 'search': self.url +'torrents.php',
'download': 'http://alpharatio.cc/%s'} }
self.url = self.urls['base_url']
self.categories = "&filter_cat[1]=1&filter_cat[2]=1&filter_cat[3]=1&filter_cat[4]=1&filter_cat[5]=1"
self.proper_strings = ['PROPER', 'REPACK'] self.proper_strings = ['PROPER', 'REPACK']
self.cache = AlphaRatioCache(self) self.cache = AlphaRatioCache(self)
def login(self): def login(self):
login_params = {'username': self.username, login_params = {
'username': self.username,
'password': self.password, 'password': self.password,
'remember_me': 'on', 'remember_me': 'on',
'login': 'submit'} 'login': 'submit'
}
response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) response = self.get_url(self.urls['login'], post_data=login_params, timeout=30)
if not response: if not response:
...@@ -72,7 +70,7 @@ class AlphaRatioProvider(TorrentProvider): ...@@ -72,7 +70,7 @@ class AlphaRatioProvider(TorrentProvider):
return True return True
def search(self, search_strings, age=0, ep_obj=None): def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches
results = [] results = []
items = {'Season': [], 'Episode': [], 'RSS': []} items = {'Season': [], 'Episode': [], 'RSS': []}
...@@ -80,6 +78,15 @@ class AlphaRatioProvider(TorrentProvider): ...@@ -80,6 +78,15 @@ class AlphaRatioProvider(TorrentProvider):
if not self.login(): if not self.login():
return results return results
search_params = {
'searchstr': '',
'filter_cat[1]':1,
'filter_cat[2]':1,
'filter_cat[3]':1,
'filter_cat[4]':1,
'filter_cat[5]':1
}
for mode in search_strings.keys(): for mode in search_strings.keys():
logger.log(u"Search Mode: %s" % mode, logger.DEBUG) logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
for search_string in search_strings[mode]: for search_string in search_strings[mode]:
...@@ -87,16 +94,16 @@ class AlphaRatioProvider(TorrentProvider): ...@@ -87,16 +94,16 @@ class AlphaRatioProvider(TorrentProvider):
if mode != 'RSS': if mode != 'RSS':
logger.log(u"Search string: %s " % search_string, logger.DEBUG) logger.log(u"Search string: %s " % search_string, logger.DEBUG)
searchURL = self.urls['search'] % (search_string, self.categories) search_params['searchstr'] = search_string
logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) search_url = self.urls['search'] + '?' + urlencode(search_params)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
data = self.get_url(searchURL) data = self.get_url(search_url)
if not data: if not data:
continue continue
try:
with BS4Parser(data, 'html5lib') as html: with BS4Parser(data, 'html5lib') as html:
torrent_table = html.find('table', attrs={'id': 'torrent_table'}) torrent_table = html.find('table', id='torrent_table')
torrent_rows = torrent_table.find_all('tr') if torrent_table else [] torrent_rows = torrent_table.find_all('tr') if torrent_table else []
# Continue only if one Release is found # Continue only if one Release is found
...@@ -104,20 +111,30 @@ class AlphaRatioProvider(TorrentProvider): ...@@ -104,20 +111,30 @@ class AlphaRatioProvider(TorrentProvider):
logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
continue continue
def process_column_header(td):
result = ''
if td.a and td.a.img:
result = td.a.img.get('title', td.a.get_text(strip=True))
if not result:
result = td.get_text(strip=True)
return result
# '', '', 'Name /Year', 'Files', 'Time', 'Size', 'Snatches', 'Seeders', 'Leechers'
labels = [process_column_header(label) for label in torrent_rows[0].find_all('td')]
# Skip column headers
for result in torrent_rows[1:]: for result in torrent_rows[1:]:
cells = result.find_all('td') cells = result.find_all('td')
link = result.find('a', attrs={'dir': 'ltr'}) if len(cells) < len(labels):
url = result.find('a', attrs={'title': 'Download'}) continue
try: try:
num_cells = len(cells) title = cells[labels.index('Name /Year')].find('a', dir='ltr').get_text(strip=True)
title = link.contents[0] if link.contents[0] else None download_url = self.url + cells[labels.index('Name /Year')].find('a', title='Download')['href']
download_url = self.urls['download'] % (url['href']) if url['href'] else None seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True))
seeders = cells[num_cells - 2].contents[0] if cells[len(cells) - 2].contents[0] else 1 leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True))
leechers = cells[num_cells - 1].contents[0] if cells[len(cells) - 1].contents[0] else 0 size = convert_size(cells[labels.index('Size')].get_text(strip=True)) or -1
torrent_size = cells[len(cells) - 4].contents[0] except StandardError:
size = convert_size(torrent_size) or -1
except (AttributeError, TypeError, KeyError, ValueError):
continue continue
if not all([title, download_url]): if not all([title, download_url]):
...@@ -135,9 +152,6 @@ class AlphaRatioProvider(TorrentProvider): ...@@ -135,9 +152,6 @@ class AlphaRatioProvider(TorrentProvider):
items[mode].append(item) items[mode].append(item)
except Exception:
logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.WARNING)
# For each search mode sort all the items by seeders if available # For each search mode sort all the items by seeders if available
items[mode].sort(key=lambda tup: tup[3], reverse=True) items[mode].sort(key=lambda tup: tup[3], reverse=True)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment