diff --git a/gui/slick/images/providers/frenchtorrentdb.png b/gui/slick/images/providers/frenchtorrentdb.png new file mode 100644 index 0000000000000000000000000000000000000000..26a665b300adc56127ec4e55e36574b66f5f27e5 Binary files /dev/null and b/gui/slick/images/providers/frenchtorrentdb.png differ diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 5a05113cd313dd7a17221dee7ba00c51cb92cdd6..79af1e6cbbcaf7341287405d74aedf24f6ad5a54 100644 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -38,7 +38,7 @@ from sickbeard import providers, metadata, config, webserveInit from sickbeard.providers.generic import GenericProvider from providers import btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \ omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, nextgen, speedcd, nyaatorrents, animenzb, bluetigers, fnt, torrentbytes, animezb, \ - freshontv, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, scenetime, btdigg + frenchtorrentdb, freshontv, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, scenetime, btdigg from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \ naming_ep_type from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \ diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index a6e634812303ecfb1cc084315aa5e1d93a31ae13..4dc52fe3af4f5bf01363b65a2c30bc27da8885b6 100644 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -34,6 +34,7 @@ __all__ = ['womble', 'animenzb', 'torrentbytes', 'animezb', + 'frenchtorrentdb', 'freshontv', 'morethantv', 'bitsoup', diff --git a/sickbeard/providers/frenchtorrentdb.py b/sickbeard/providers/frenchtorrentdb.py new file mode 100644 index 0000000000000000000000000000000000000000..5a286634f440cc6497bcc71ed148c9b74e544a2e --- /dev/null +++ b/sickbeard/providers/frenchtorrentdb.py @@ -0,0 +1,317 @@ +# Authors: Yannick Croissant <yannick.croissant@gmail.com> +# adaur <adaur.underground@gmail.com> +# URL: http://code.google.com/p/sickbeard/ +# +# This file is part of Sick Beard. +# +# Sick Beard is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Sick Beard is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>. + +import traceback +import re +import datetime +import time +from requests.auth import AuthBase +import sickbeard +import generic +import urllib + +import urllib2 +import requests +import json +import cookielib + +from requests import exceptions +from sickbeard.bs4_parser import BS4Parser +from sickbeard.common import Quality +from sickbeard import logger +from sickbeard import tvcache +from sickbeard import show_name_helpers +from sickbeard import db +from sickbeard import helpers +from sickbeard import classes +from lib.unidecode import unidecode +from sickbeard.helpers import sanitizeSceneName +from sickbeard.exceptions import ex + +class FrenchTorrentDBProvider(generic.TorrentProvider): + + def __init__(self): + + generic.TorrentProvider.__init__(self, "FrenchTorrentDB") + + self.supportsBacklog = True + + self.cj = cookielib.CookieJar() + self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj)) + + self.urls = {'base_url': 'http://www.frenchtorrentdb.com', + 'search': 'http://www.frenchtorrentdb.com/?section=TORRENTS&exact=1&name=%s%s&submit=GO', + 'login': 'http://www.frenchtorrentdb.com/?section=LOGIN', + } + self.url = self.urls['base_url'] + self.categories = "&adv_cat%5Bs%5D%5B1%5D=95&adv_cat%5Bs%5D%5B2%5D=190&adv_cat%5Bs%5D%5B3%5D=101&adv_cat%5Bs%5D%5B4%5D=191&adv_cat%5Bs%5D%5B5%5D=197&adv_cat%5Bs%5D%5B7%5D=199&adv_cat%5Bs%5D%5B8%5D=201&adv_cat%5Bs%5D%5B9%5D=128" + self.enabled = False + self.username = None + self.password = None + self.ratio = None + self.minseed = None + self.minleech = None + + def isEnabled(self): + return self.enabled + + def imageName(self): + return 'frenchtorrentdb.png' + + def getQuality(self, item, anime=False): + quality = Quality.sceneQuality(item[0], anime) + return quality + + def _doLogin(self): + + challenge = self.opener.open(self.url + '/?section=LOGIN&challenge=1') + + rawData = challenge.read() + + data = json.loads(rawData) + + data = urllib.urlencode({ + 'username' : self.username, + 'password' : self.password, + 'secure_login': self._getSecureLogin(data['challenge']), + 'hash' : data['hash'] + }) + + self.opener.open(self.url + '/?section=LOGIN&ajax=1', data).read() + + return True + + def _getSecureLogin(self, challenges): + + def fromCharCode(*args): + return ''.join(map(unichr, args)) + + def decodeString(p, a, c, k, e, d): + a = int(a) + c = int(c) + def e(c): + if c < a: + f = '' + else: + f = e(c / a) + return f + fromCharCode(c % a + 161) + while c: + c -= 1 + if k[c]: + regex = re.compile(e(c)) + p = re.sub(regex, k[c], p) + return p + + def decodeChallenge(challenge): + challenge = urllib2.unquote(challenge) + regexGetArgs = re.compile('\'([^\']+)\',([0-9]+),([0-9]+),\'([^\']+)\'') + regexIsEncoded = re.compile('decodeURIComponent') + regexUnquote = re.compile('\'') + if (challenge == 'a'): + return '05f' + if (re.match(regexIsEncoded, challenge) == None): + return re.sub(regexUnquote, '', challenge) + args = re.findall(regexGetArgs, challenge) + decoded = decodeString(args[0][0], args[0][1], args[0][2], args[0][3].split('|'), 0, {}) + return urllib2.unquote(decoded.decode('utf8')) + + secureLogin = '' + for challenge in challenges: + secureLogin += decodeChallenge(challenge) + return secureLogin + + def _get_episode_search_strings(self, ep_obj, add_string=''): + + search_string = {'Episode': []} + + if not ep_obj: + return [] + + if self.show.air_by_date: + for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): + ep_string = sanitizeSceneName(show_name) + '.' + \ + str(ep_obj.airdate).replace('-', '|') + search_string['Episode'].append(ep_string) + elif self.show.sports: + for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): + ep_string = sanitizeSceneName(show_name) + '.' + \ + str(ep_obj.airdate).replace('-', '|') + '|' + \ + ep_obj.airdate.strftime('%b') + search_string['Episode'].append(ep_string) + elif self.show.anime: + for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): + ep_string = sanitizeSceneName(show_name) + '.' + \ + "%i" % int(ep_obj.scene_absolute_number) + search_string['Episode'].append(ep_string) + else: + for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): + ep_string = show_name_helpers.sanitizeSceneName(show_name) + '.' + \ + sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season, + 'episodenumber': ep_obj.scene_episode} + ' %s' % add_string + + search_string['Episode'].append(re.sub('\s+', '.', ep_string)) + + return [search_string] + + def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + + logger.log(u"_doSearch started with ..." + str(search_params), logger.DEBUG) + + results = [] + items = {'Season': [], 'Episode': [], 'RSS': []} + + # check for auth + if not self._doLogin(): + return False + + for mode in search_params.keys(): + + for search_string in search_params[mode]: + + if isinstance(search_string, unicode): + search_string = unidecode(search_string) + + searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories) + + logger.log(u"Search string: " + searchURL, logger.DEBUG) + + r = self.opener.open( searchURL ) + with BS4Parser(r, features=["html5lib", "permissive"]) as html: + resultsTable = html.find("div", { "class" : "DataGrid" }) + logger.log(u"Page opened", logger.DEBUG) + + if resultsTable: + logger.log(u"We have results ", logger.DEBUG) + rows = resultsTable.findAll("ul") + + for row in rows: + link = row.find("a", title=True) + title = link['title'] + + autogetURL = self.url +'/'+ (row.find("li", { "class" : "torrents_name"}).find('a')['href'][1:]).replace('#FTD_MENU','&menu=4') + r = self.opener.open(autogetURL,'wb').read() + with BS4Parser(r, features=["html5lib", "permissive"]) as html: + downloadURL = html.find("div", { "class" : "autoget"}).find('a')['href'] + item = title, downloadURL + logger.log(u"Download URL : " + downloadURL, logger.DEBUG) + + items[mode].append(item) + + results += items[mode] + + return results + + def _get_title_and_url(self, item): + + title, url = item + + if title: + title = u'' + title + title = title.replace(' ', '.') + + if url: + url = str(url).replace('&', '&') + + return (title, url) + + def findPropers(self, search_date=datetime.datetime.today()): + + results = [] + + myDB = db.DBConnection() + sqlResults = myDB.select( + 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + + ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + + ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' + ) + + if not sqlResults: + return [] + + for sqlshow in sqlResults: + self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"])) + if self.show: + curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"])) + search_params = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK') + + for item in self._doSearch(search_params[0]): + title, url = self._get_title_and_url(item) + results.append(classes.Proper(title, url, datetime.datetime.today(), self.show)) + + return results + + def seedRatio(self): + return self.ratio + + def headURL(self, result): + """ + Check if URL is valid and the file exists at URL. + Original function overwritten because FrenchTorrentDB links only support one request + """ + + # check for auth + if not self._doLogin(): + return False + + urls, filename = self._makeURL(result) + + for url in urls: + return url + + return u'' + + def _get_season_search_strings(self, ep_obj): + + search_string = {'Season': []} + for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): + if ep_obj.show.air_by_date or ep_obj.show.sports: + ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0] + elif ep_obj.show.anime: + ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number + else: + ep_string = show_name + '.S%02d' % int(ep_obj.scene_season) # 1) showName.SXX + + search_string['Season'].append(ep_string) + + return [search_string] + +class FrenchTorrentDBAuth(AuthBase): + """Attaches HTTP Authentication to the given Request object.""" + def __init__(self, token): + self.token = token + + def __call__(self, r): + r.headers['Authorization'] = self.token + return r + + +class FrenchTorrentDBCache(tvcache.TVCache): + def __init__(self, provider): + tvcache.TVCache.__init__(self, provider) + + # Only poll FTDB every 10 minutes max + self.minTime = 10 + + def _getRSSData(self): + search_params = {'RSS': ['']} + return {'entries': self.provider._doSearch(search_params)} + +provider = FrenchTorrentDBProvider()