Private GIT

Skip to content
Snippets Groups Projects
Commit e533d1ac authored by adaur's avatar adaur Committed by labrys
Browse files

Add provders: ABNormal and PHXBit

* Add ABNormal provider
* Add PHXBit provider
* Remove FNT provider
* Fix Xthor not snatching anything and switch to new error handling
parent f2dd6ac0
Branches
No related tags found
No related merge requests found
File suppressed by a .gitattributes entry, the file's encoding is unsupported, or the file size exceeds the limit.
File suppressed by a .gitattributes entry, the file's encoding is unsupported, or the file size exceeds the limit.
File suppressed by a .gitattributes entry, the file's encoding is unsupported, or the file size exceeds the limit.
......@@ -23,7 +23,7 @@ from random import shuffle
import sickbeard
from sickbeard.providers import btn, newznab, rsstorrent, womble, thepiratebay, torrentleech, kat, iptorrents, torrentz, \
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, fnt, xthor, torrentbytes, cpasbien,\
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, xthor, abnormal, phxbit, torrentbytes, cpasbien,\
freshontv, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \
scenetime, btdigg, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct, elitetorrent, bitsnoop, danishbits, hd4free, limetorrents
......@@ -33,7 +33,7 @@ __all__ = [
'speedcd', 'nyaatorrents', 'torrentbytes', 'freshontv', 'cpasbien',
'morethantv', 'bitsoup', 't411', 'tokyotoshokan', 'alpharatio',
'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'bluetigers',
'fnt', 'xthor', 'scenetime', 'btdigg', 'transmitthenet', 'tvchaosuk',
'xthor', 'abnormal', 'phxbit', 'scenetime', 'btdigg', 'transmitthenet', 'tvchaosuk',
'torrentproject', 'extratorrent', 'bitcannon', 'torrentz', 'pretome', 'gftracker',
'hdspace', 'newpct', 'elitetorrent', 'bitsnoop', 'danishbits', 'hd4free', 'limetorrents'
]
......
# coding=utf-8
# Author: adaur <adaur.underground@gmail.com>
#
# URL: https://sickrage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import re
from requests.utils import dict_from_cookiejar
from urllib import urlencode
from sickbeard import logger, tvcache
from sickbeard.bs4_parser import BS4Parser
from sickrage.helper.common import convert_size, try_int
from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class ABNormalProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes
def __init__(self):
TorrentProvider.__init__(self, "ABNormal")
self.username = None
self.password = None
self.ratio = None
self.minseed = None
self.minleech = None
self.url = 'https://abnormal.ws'
self.urls = {
'login': self.url + '/login.php',
'search': self.url + '/torrents.php?'
}
self.proper_strings = ['PROPER']
self.cache = tvcache.TVCache(self, min_time=30)
def login(self):
if any(dict_from_cookiejar(self.session.cookies).values()):
return True
login_params = {
'username': self.username,
'password': self.password,
}
response = self.get_url(self.urls['login'], post_data=login_params, timeout=30)
if not response:
logger.log(u"Unable to connect to provider", logger.WARNING)
return False
if not re.search('torrents.php', response):
logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
return False
return True
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches
results = []
if not self.login():
return results
search_params = {
'cat[]': ['TV|SD|VOSTFR', 'TV|HD|VOSTFR', 'TV|SD|VF', 'TV|HD|VF', 'TV|PACK|FR', 'TV|PACK|VOSTFR', 'TV|EMISSIONS', 'ANIME'],
# Sorting: by time. Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size
'order': 'Time',
# Both ASC and DESC are available
'way': 'DESC'
}
for mode in search_strings:
items = []
logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log(u"Search string: %s " % search_string, logger.DEBUG)
search_params['search'] = search_string
search_url = self.urls['search'] + urlencode(search_params, doseq=True)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
data = self.get_url(search_url)
if not data:
continue
with BS4Parser(data, 'html5lib') as html:
if mode != 'RSS':
torrent_table = html.find("table", class_="torrent_table cats")
else:
torrent_table = html.find("table", class_="torrent_table cats no_grouping")
torrent_table = html.find("table", class_="torrent_table cats")
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
# Continue only if one Release is found
if len(torrent_rows) < 2:
logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
continue
# Catégorie, Release, Date, DL, Size, C, S, L
labels = [label.get_text(strip=True) for label in torrent_rows[0].find_all('td')]
for result in torrent_rows[1:]:
cells = result.find_all('td')
if len(cells) < len(labels):
continue
try:
title = cells[labels.index('Release')].get_text(strip=True)
download_url = self.url + '/' + cells[labels.index('DL')].find('a', class_='tooltip')['href']
if not all([title, download_url]):
continue
seeders = try_int(cells[labels.index('S')].get_text(strip=True))
leechers = try_int(cells[labels.index('L')].get_text(strip=True))
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
continue
torrent_size = cells[labels.index('Size')].get_text(strip=True)
french_units = ['O', 'KO', 'MO', 'GO', 'TO', 'PO']
size = convert_size(torrent_size, units=french_units) or -1
item = title, download_url, size, seeders, leechers
if mode != 'RSS':
logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG)
items.append(item)
except StandardError:
continue
# For each search mode sort all the items by seeders if available
items.sort(key=lambda tup: tup[3], reverse=True)
results += items
return results
def seed_ratio(self):
return self.ratio
provider = ABNormalProvider()
# coding=utf-8
# Author: raver2046 <raver2046@gmail.com> from djoole <bobby.djoole@gmail.com>
# Author: adaur <adaur.underground@gmail.com>
#
# URL: https://sickrage.github.io
#
......@@ -20,19 +20,20 @@
import re
from requests.utils import dict_from_cookiejar
import traceback
from urllib import urlencode
from sickbeard import logger, tvcache
from sickbeard.bs4_parser import BS4Parser
from sickrage.helper.common import convert_size, try_int
from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class FNTProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes
class PhxBitProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes
def __init__(self):
TorrentProvider.__init__(self, "FNT")
TorrentProvider.__init__(self, "PhxBit")
self.username = None
self.password = None
......@@ -40,31 +41,23 @@ class FNTProvider(TorrentProvider): # pylint: disable=too-many-instance-attribu
self.minseed = None
self.minleech = None
self.cache = tvcache.TVCache(self, min_time=10) # Only poll FNT every 10 minutes max
self.url = 'https://phxbit.com'
self.urls = {
'base_url': 'https://fnt.nu',
'search': 'https://www.fnt.nu/torrents/recherche/',
'login': 'https://fnt.nu/account-login.php',
'login': self.url + '/connect.php',
'search': self.url + '/sphinx.php?'
}
self.url = self.urls['base_url']
self.search_params = {
"afficher": 1, "c118": 1, "c129": 1, "c119": 1, "c120": 1, "c121": 1, "c126": 1,
"c137": 1, "c138": 1, "c146": 1, "c122": 1, "c110": 1, "c109": 1, "c135": 1, "c148": 1,
"c153": 1, "c149": 1, "c150": 1, "c154": 1, "c155": 1, "c156": 1, "c114": 1,
"visible": 1, "freeleech": 0, "nuke": 1, "3D": 0, "sort": "size", "order": "desc"
}
self.proper_strings = ['PROPER']
def login(self):
self.cache = tvcache.TVCache(self, min_time=30)
def login(self):
if any(dict_from_cookiejar(self.session.cookies).values()):
return True
login_params = {
'username': self.username,
'password': self.password,
'submit': 'Se loguer'
}
response = self.get_url(self.urls['login'], post_data=login_params, timeout=30)
......@@ -72,79 +65,91 @@ class FNTProvider(TorrentProvider): # pylint: disable=too-many-instance-attribu
logger.log(u"Unable to connect to provider", logger.WARNING)
return False
if not re.search('Pseudo ou mot de passe non valide', response):
return True
else:
if not re.search('dons.php', response):
logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
return False
return True
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches
results = []
if not self.login():
return results
search_params = {
'order': 'desc',
'sort': 'normal',
'group': 'series'
}
for mode in search_strings:
items = []
logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
for search_string in search_strings[mode]:
if mode != 'RSS':
# Use exact=1 parameter if we're doing a backlog or manual search
search_params['exact'] = 1
logger.log(u"Search string: %s " % search_string, logger.DEBUG)
self.search_params['recherche'] = search_string
search_params['q'] = search_string
search_url = self.urls['search'] + urlencode(search_params)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
data = self.get_url(self.urls['search'], params=self.search_params)
data = self.get_url(search_url)
if not data:
continue
try:
with BS4Parser(data, 'html5lib') as html:
result_table = html.find('table', {'id': 'tablealign3bis'})
torrent_table = html.find("table")
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
if not result_table:
# Continue only if one Release is found
if len(torrent_rows) < 2:
logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
continue
if result_table:
rows = result_table.findAll("tr", {"class": "ligntorrent"})
for row in rows:
link = row.findAll('td')[1].find("a", href=re.compile("fiche_film"))
if link:
try:
title = link.text
download_url = self.urls['base_url'] + "/" + row.find("a", href=re.compile(r"download\.php"))['href']
except (AttributeError, TypeError):
def process_column_header(td):
result = ''
if td.img:
result = td.img.get('alt', '')
if not result:
result = td.get_text(strip=True)
return result
# Catégorie, Nom, DL, Com, Taille, C, Seed, Leech, Share
labels = [process_column_header(label) for label in torrent_rows[0].find_all('td')]
# Skip column headers
for result in torrent_rows[1:]:
cells = result.find_all('td')
if len(cells) < len(labels):
continue
try:
detailseedleech = link['mtcontent']
seeders = int(detailseedleech.split("<font color='#00b72e'>")[1].split("</font>")[0])
leechers = int(detailseedleech.split("<font color='red'>")[1].split("</font>")[0])
# FIXME
size = -1
except Exception:
logger.log(u"Unable to parse torrent id & seeders & leechers. Traceback: %s " % traceback.format_exc(), logger.DEBUG)
continue
title = cells[labels.index('Nom')].get_text(strip=True)
download_url = cells[labels.index('DL')].find('a')['href']
if not all([title, download_url]):
continue
seeders = try_int(cells[labels.index('Seed')].get_text(strip=True))
leechers = try_int(cells[labels.index('Leech')].get_text(strip=True))
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
continue
torrent_size = cells[labels.index('Taille')].get_text(strip=True)
size = convert_size(torrent_size) or -1
item = title, download_url, size, seeders, leechers
if mode != 'RSS':
logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG)
items.append(item)
except Exception:
logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
except StandardError:
continue
# For each search mode sort all the items by seeders if available
items.sort(key=lambda tup: tup[3], reverse=True)
......@@ -155,4 +160,4 @@ class FNTProvider(TorrentProvider): # pylint: disable=too-many-instance-attribu
def seed_ratio(self):
return self.ratio
provider = FNTProvider()
provider = PhxBitProvider()
# coding=utf-8
# -*- coding: latin-1 -*-
# Author: adaur <adaur.underground@gmail.com>
# Rewrite: Dustyn Gibson (miigotu) <miigotu@gmail.com>
# URL: https://sickrage.github.io
......@@ -19,7 +19,6 @@
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import re
import cookielib
from requests.utils import dict_from_cookiejar
from urllib import urlencode
......@@ -36,8 +35,6 @@ class XthorProvider(TorrentProvider): # pylint: disable=too-many-instance-attri
TorrentProvider.__init__(self, "Xthor")
self.cj = cookielib.CookieJar()
self.url = 'https://xthor.bz'
self.urls = {
'login': self.url + '/takelogin.php',
......@@ -124,34 +121,44 @@ class XthorProvider(TorrentProvider): # pylint: disable=too-many-instance-attri
logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
continue
# Catégorie, Nom du Torrent, (Download), (Bookmark), Com., Taille, Complété, Seeders, Leechers
labels = [label.get_text(strip=True) for label in torrent_rows[0].find_all('td')]
def process_column_header(td):
result = ''
if td.a:
result = td.a.get('title', td.a.get_text(strip=True))
if not result:
result = td.get_text(strip=True)
return result
# Catégorie, Nom du Torrent, (Download), (Bookmark), Com., Taille, Compl�t�, Seeders, Leechers
labels = [process_column_header(label) for label in torrent_rows[0].find_all('td')]
for row in torrent_rows[1:]:
try:
cells = row.find_all('td')
if len(cells) < len(labels):
continue
try:
title = cells[labels.index('Nom du Torrent')].get_text(strip=True)
download_url = self.url + '/' + row.find("a", href=re.compile("download.php"))['href']
size = convert_size(cells[labels.index('Taille')].get_text(strip=True))
seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True))
leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True))
except (AttributeError, TypeError, KeyError, ValueError):
continue
if not all([title, download_url]):
continue
seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True))
leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True))
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
continue
size = convert_size(cells[labels.index('Taille')].get_text(strip=True))
item = title, download_url, size, seeders, leechers
if mode != 'RSS':
logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG)
items.append(item)
except StandardError:
continue
# For each search mode sort all the items by seeders if available if available
items.sort(key=lambda tup: tup[3], reverse=True)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment