Private GIT

Skip to content
Snippets Groups Projects
Commit bad6e178 authored by miigotu's avatar miigotu
Browse files

AlphaRatio

parent 6640de1a
No related branches found
No related tags found
No related merge requests found
......@@ -19,16 +19,16 @@
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import re
import traceback
from urllib import urlencode
from sickbeard import logger
from sickbeard import tvcache
from sickbeard.bs4_parser import BS4Parser
from sickrage.helper.common import convert_size
from sickrage.helper.common import try_int, convert_size
from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class AlphaRatioProvider(TorrentProvider):
class AlphaRatioProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes
def __init__(self):
......@@ -40,25 +40,23 @@ class AlphaRatioProvider(TorrentProvider):
self.minseed = None
self.minleech = None
self.urls = {'base_url': 'http://alpharatio.cc/',
'login': 'http://alpharatio.cc/login.php',
'detail': 'http://alpharatio.cc/torrents.php?torrentid=%s',
'search': 'http://alpharatio.cc/torrents.php?searchstr=%s%s',
'download': 'http://alpharatio.cc/%s'}
self.url = self.urls['base_url']
self.categories = "&filter_cat[1]=1&filter_cat[2]=1&filter_cat[3]=1&filter_cat[4]=1&filter_cat[5]=1"
self.url = 'http://alpharatio.cc/'
self.urls = {
'login': self.url + 'login.php',
'search': self.url +'torrents.php',
}
self.proper_strings = ['PROPER', 'REPACK']
self.cache = AlphaRatioCache(self)
def login(self):
login_params = {'username': self.username,
login_params = {
'username': self.username,
'password': self.password,
'remember_me': 'on',
'login': 'submit'}
'login': 'submit'
}
response = self.get_url(self.urls['login'], post_data=login_params, timeout=30)
if not response:
......@@ -72,7 +70,7 @@ class AlphaRatioProvider(TorrentProvider):
return True
def search(self, search_strings, age=0, ep_obj=None):
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
......@@ -80,6 +78,15 @@ class AlphaRatioProvider(TorrentProvider):
if not self.login():
return results
search_params = {
'searchstr': '',
'filter_cat[1]':1,
'filter_cat[2]':1,
'filter_cat[3]':1,
'filter_cat[4]':1,
'filter_cat[5]':1
}
for mode in search_strings.keys():
logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
for search_string in search_strings[mode]:
......@@ -87,16 +94,16 @@ class AlphaRatioProvider(TorrentProvider):
if mode != 'RSS':
logger.log(u"Search string: %s " % search_string, logger.DEBUG)
searchURL = self.urls['search'] % (search_string, self.categories)
logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
search_params['searchstr'] = search_string
search_url = self.urls['search'] + '?' + urlencode(search_params)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
data = self.get_url(searchURL)
data = self.get_url(search_url)
if not data:
continue
try:
with BS4Parser(data, 'html5lib') as html:
torrent_table = html.find('table', attrs={'id': 'torrent_table'})
torrent_table = html.find('table', id='torrent_table')
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
# Continue only if one Release is found
......@@ -104,20 +111,30 @@ class AlphaRatioProvider(TorrentProvider):
logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
continue
def process_column_header(td):
result = ''
if td.a and td.a.img:
result = td.a.img.get('title', td.a.get_text(strip=True))
if not result:
result = td.get_text(strip=True)
return result
# '', '', 'Name /Year', 'Files', 'Time', 'Size', 'Snatches', 'Seeders', 'Leechers'
labels = [process_column_header(label) for label in torrent_rows[0].find_all('td')]
# Skip column headers
for result in torrent_rows[1:]:
cells = result.find_all('td')
link = result.find('a', attrs={'dir': 'ltr'})
url = result.find('a', attrs={'title': 'Download'})
if len(cells) < len(labels):
continue
try:
num_cells = len(cells)
title = link.contents[0] if link.contents[0] else None
download_url = self.urls['download'] % (url['href']) if url['href'] else None
seeders = cells[num_cells - 2].contents[0] if cells[len(cells) - 2].contents[0] else 1
leechers = cells[num_cells - 1].contents[0] if cells[len(cells) - 1].contents[0] else 0
torrent_size = cells[len(cells) - 4].contents[0]
size = convert_size(torrent_size) or -1
except (AttributeError, TypeError, KeyError, ValueError):
title = cells[labels.index('Name /Year')].find('a', dir='ltr').get_text(strip=True)
download_url = self.url + cells[labels.index('Name /Year')].find('a', title='Download')['href']
seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True))
leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True))
size = convert_size(cells[labels.index('Size')].get_text(strip=True)) or -1
except StandardError:
continue
if not all([title, download_url]):
......@@ -135,9 +152,6 @@ class AlphaRatioProvider(TorrentProvider):
items[mode].append(item)
except Exception:
logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.WARNING)
# For each search mode sort all the items by seeders if available
items[mode].sort(key=lambda tup: tup[3], reverse=True)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment