Private GIT

Skip to content
Snippets Groups Projects
Commit 8695768d authored by miigotu's avatar miigotu
Browse files

Fix searches with ABNormal

Replace double quotes with single quotes as per our decided convention

ABNormal, sort by Time for rss, or Seeders for everything else.
parent 23c0d3d1
No related branches found
No related tags found
No related merge requests found
......@@ -36,7 +36,7 @@ class ABNormalProvider(TorrentProvider): # pylint: disable=too-many-instance-at
def __init__(self):
# Provider Init
TorrentProvider.__init__(self, "ABNormal")
TorrentProvider.__init__(self, 'ABNormal')
# Credentials
self.username = None
......@@ -48,14 +48,14 @@ class ABNormalProvider(TorrentProvider): # pylint: disable=too-many-instance-at
self.minleech = None
# URLs
self.url = "https://abnormal.ws"
self.url = 'https://abnormal.ws'
self.urls = {
"login": urljoin(self.url, "login.php"),
"search": urljoin(self.url, "torrents.php"),
'login': urljoin(self.url, 'login.php'),
'search': urljoin(self.url, 'torrents.php'),
}
# Proper Strings
self.proper_strings = ["PROPER"]
self.proper_strings = ['PROPER']
# Cache
self.cache = tvcache.TVCache(self, min_time=30)
......@@ -65,17 +65,17 @@ class ABNormalProvider(TorrentProvider): # pylint: disable=too-many-instance-at
return True
login_params = {
"username": self.username,
"password": self.password,
'username': self.username,
'password': self.password,
}
response = self.get_url(self.urls["login"], post_data=login_params, timeout=30, returns="text")
response = self.get_url(self.urls['login'], post_data=login_params, timeout=30, returns='text')
if not response:
logger.log("Unable to connect to provider", logger.WARNING)
logger.log('Unable to connect to provider', logger.WARNING)
return False
if not re.search("torrents.php", response):
logger.log("Invalid username or password. Check your settings", logger.WARNING)
if not re.search('torrents.php', response):
logger.log('Invalid username or password. Check your settings', logger.WARNING)
return False
return True
......@@ -87,71 +87,72 @@ class ABNormalProvider(TorrentProvider): # pylint: disable=too-many-instance-at
# Search Params
search_params = {
"cat[]": ["TV|SD|VOSTFR", "TV|HD|VOSTFR", "TV|SD|VF", "TV|HD|VF", "TV|PACK|FR", "TV|PACK|VOSTFR", "TV|EMISSIONS", "ANIME"],
# Sorting: by time. Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size
"order": "Time",
# Both ASC and DESC are available
"way": "DESC"
'cat[]': ['TV|SD|VOSTFR', 'TV|HD|VOSTFR', 'TV|SD|VF', 'TV|HD|VF', 'TV|PACK|FR', 'TV|PACK|VOSTFR', 'TV|EMISSIONS', 'ANIME'],
# Both ASC and DESC are available for sort direction
'way': 'DESC'
}
# Units
units = ["O", "KO", "MO", "GO", "TO", "PO"]
units = ['O', 'KO', 'MO', 'GO', 'TO', 'PO']
for mode in search_strings:
items = []
logger.log("Search Mode: {}".format(mode), logger.DEBUG)
logger.log('Search Mode: {}'.format(mode), logger.DEBUG)
for search_string in search_strings[mode]:
if mode != "RSS":
logger.log("Search string: {}".format(search_string.decode("utf-8")),
if mode != 'RSS':
logger.log('Search string: {}'.format(search_string.decode('utf-8')),
logger.DEBUG)
search_params["search"] = search_string
data = self.get_url(self.urls["search"], params=search_params, returns="text")
# Sorting: Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size
search_params['order'] = ('Seeders', 'Time')[mode == 'RSS']
search_params['search'] = re.sub(r'[()]', '', search_string)
data = self.get_url(self.urls['search'], params=search_params, returns='text')
if not data:
continue
with BS4Parser(data, "html5lib") as html:
torrent_table = html.find("table", class_=re.compile("torrent_table cats"))
torrent_rows = torrent_table.find_all("tr") if torrent_table else []
with BS4Parser(data, 'html5lib') as html:
torrent_table = html.find(class_='torrent_table')
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
# Continue only if at least one Release is found
if len(torrent_rows) < 2:
logger.log("Data returned from provider does not contain any torrents", logger.DEBUG)
logger.log('Data returned from provider does not contain any torrents', logger.DEBUG)
continue
# Catégorie, Release, Date, DL, Size, C, S, L
labels = [label.get_text(strip=True) for label in torrent_rows[0].find_all("td")]
labels = [label.get_text(strip=True) for label in torrent_rows[0].find_all('td')]
# Skip column headers
for result in torrent_rows[1:]:
cells = result.find_all("td")
cells = result.find_all('td')
if len(cells) < len(labels):
continue
try:
title = cells[labels.index("Release")].get_text(strip=True)
download_url = urljoin(self.url, cells[labels.index("DL")].find("a", class_="tooltip")["href"])
title = cells[labels.index('Release')].get_text(strip=True)
download_url = urljoin(self.url, cells[labels.index('DL')].find('a', class_='tooltip')['href'])
if not all([title, download_url]):
continue
seeders = try_int(cells[labels.index("S")].get_text(strip=True))
leechers = try_int(cells[labels.index("L")].get_text(strip=True))
seeders = try_int(cells[labels.index('S')].get_text(strip=True))
leechers = try_int(cells[labels.index('L')].get_text(strip=True))
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != "RSS":
logger.log("Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})".format
if mode != 'RSS':
logger.log('Discarding torrent because it doesn\'t meet the minimum seeders or leechers: {} (S:{} L:{})'.format
(title, seeders, leechers), logger.DEBUG)
continue
torrent_size = cells[labels.index("Size")].get_text()
size_index = labels.index('Size') if 'Size' in labels else labels.index('Taille')
torrent_size = cells[size_index].get_text()
size = convert_size(torrent_size, units=units) or -1
item = title, download_url, size, seeders, leechers
if mode != "RSS":
logger.log("Found result: {} with {} seeders and {} leechers".format
if mode != 'RSS':
logger.log('Found result: {} with {} seeders and {} leechers'.format
(title, seeders, leechers), logger.DEBUG)
items.append(item)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment