diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py index ad1dd5412b1d4da7079fc73b8e25ad349aef0284..0985f73ce75456c0c0533597e4d23e555574c0c5 100644 --- a/sickbeard/providers/newpct.py +++ b/sickbeard/providers/newpct.py @@ -1,3 +1,4 @@ +# coding=utf-8 # Author: CristianBB # Greetings to Mr. Pine-apple # @@ -13,18 +14,19 @@ # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. +# GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. import traceback +import re from six.moves import urllib +from sickbeard import helpers from sickbeard import logger from sickbeard import tvcache from sickbeard.providers import generic -from sickbeard.common import USER_AGENT from sickbeard.bs4_parser import BS4Parser @@ -37,13 +39,16 @@ class newpctProvider(generic.TorrentProvider): self.onlyspasearch = None self.cache = newpctCache(self) + # Unsupported + # self.minseed = None + # self.minleech = None + self.urls = { 'base_url': 'http://www.newpct.com', 'search': 'http://www.newpct.com/buscar-descargas/' } self.url = self.urls['base_url'] - self.headers.update({'User-Agent': USER_AGENT}) """ Search query: @@ -71,14 +76,14 @@ class newpctProvider(generic.TorrentProvider): 'q': '' } + def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - lang_info = '' if not epObj or not epObj.show else epObj.show.lang - # Only search if user conditions are true + lang_info = '' if not epObj or not epObj.show else epObj.show.lang if self.onlyspasearch and lang_info != 'es': logger.log(u"Show info is not spanish, skipping provider search", logger.DEBUG) return results @@ -87,6 +92,9 @@ class newpctProvider(generic.TorrentProvider): logger.log(u"Search Mode: %s" % mode, logger.DEBUG) for search_string in search_strings[mode]: + if mode is not 'RSS': + logger.log(u"Search string: %s " % search_string, logger.DEBUG) + self.search_params.update({'q': search_string.strip()}) logger.log(u"Search URL: %s" % self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params), logger.DEBUG) @@ -98,42 +106,90 @@ class newpctProvider(generic.TorrentProvider): with BS4Parser(data, features=["html5lib", "permissive"]) as html: torrent_tbody = html.find('tbody') - if len(torrent_tbody) < 1: + if not len(torrent_tbody): logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) continue torrent_table = torrent_tbody.findAll('tr') - num_results = len(torrent_table) - 1 + if not len(torrent_table): + logger.log(u"Torrent table does not have any rows", logger.DEBUG) + continue - iteration = 0 - for row in torrent_table: + for row in torrent_table[:-1]: try: - if iteration < num_results: - torrent_size = row.findAll('td')[2] - torrent_row = row.findAll('a')[1] - - download_url = torrent_row.get('href') - title_raw = torrent_row.get('title') - size = self._convertSize(torrent_size.text) + torrent_size = row.findAll('td')[2] + torrent_row = row.findAll('a')[0] - title = self._processTitle(title_raw) + download_url = torrent_row.get('href', '') + size = self._convertSize(torrent_size.text) + title = self._processTitle(torrent_row.get('title', '')) - item = title, download_url, size - logger.log(u"Found result: %s " % title, logger.DEBUG) - - items[mode].append(item) - iteration += 1 + # FIXME: Provider does not provide seeders/leechers + seeders = 1 + leechers = 0 except (AttributeError, TypeError): continue + if not all([title, download_url]): + continue + + # Filter unseeded torrent (Unsupported) + # if seeders < self.minseed or leechers < self.minleech: + # if mode is not 'RSS': + # logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG) + # continue + + item = title, download_url, size, seeders, leechers + if mode is not 'RSS': + logger.log(u"Found result: %s " % title, logger.DEBUG) + + items[mode].append(item) + except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.WARNING) + # For each search mode sort all the items by seeders if available (Unsupported) + # items[mode].sort(key=lambda tup: tup[3], reverse=True) + results += items[mode] return results + def downloadResult(self, result): + """ + Save the result to disk. + """ + + # check for auth + if not self._doLogin(): + return False + + urls, filename = self._makeURL(result) + + for url in urls: + # Search results don't return torrent files directly, it returns show sheets so we must parse showSheet to access torrent. + data = self.getURL(url) + url_torrent = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group() + + if url_torrent.startswith('http'): + self.headers.update({'Referer': '/'.join(url_torrent.split('/')[:3]) + '/'}) + + logger.log(u"Downloading a result from " + self.name + " at " + url) + + if helpers.download_file(url_torrent, filename, session=self.session, headers=self.headers): + if self._verify_download(filename): + logger.log(u"Saved result to " + filename, logger.INFO) + return True + else: + logger.log(u"Could not download %s" % url, logger.WARNING) + helpers.remove_file_failed(filename) + + if len(urls): + logger.log(u"Failed to download any results", logger.WARNING) + + return False + @staticmethod def _convertSize(size): size, modifier = size.split(' ') @@ -148,9 +204,12 @@ class newpctProvider(generic.TorrentProvider): size = size * 1024**4 return int(size) - def _processTitle(self, title): - title = title.replace('Descargar ', '') + @staticmethod + def _processTitle(title): + + # Remove "Mas informacion sobre " literal from title + title = title[22:] # Quality title = title.replace('[HDTV]', '[720p HDTV x264]') @@ -169,8 +228,7 @@ class newpctProvider(generic.TorrentProvider): title = title.replace('[BluRay MicroHD]', '[1080p BlueRay x264]') title = title.replace('[MicroHD 1080p]', '[1080p BlueRay x264]') - return title - + return title.strip() class newpctCache(tvcache.TVCache): @@ -178,8 +236,11 @@ class newpctCache(tvcache.TVCache): tvcache.TVCache.__init__(self, provider_obj) - self.minTime = 30 + # set this 0 to suppress log line, since we aren't updating it anyways + self.minTime = 0 + def _getRSSData(self): + return {'entries': []} provider = newpctProvider()