diff --git a/SickBeard.py b/SickBeard.py index 2ea7a99f4800a4c34e22ed13744fced670399e9f..f6ce75ba73a9863d2e649cc49284aeb4f80bf2f6 100755 --- a/SickBeard.py +++ b/SickBeard.py @@ -103,6 +103,15 @@ class SickRage(object): self.log_dir = None self.consoleLogging = True + @staticmethod + def clear_cache(): + try: + cache_folder = ek(os.path.join, sickbeard.CACHE_DIR, 'mako') + if os.path.isdir(cache_folder): + shutil.rmtree(cache_folder) + except Exception: + logger.log(u"Unable to remove the cache/mako directory!", logger.WARNING) + @staticmethod def help_message(): """ @@ -301,8 +310,12 @@ class SickRage(object): # Build from the DB to start with self.loadShowsFromDB() + logger.log(u"Starting up SickRage [%s] from '%s'" % (sickbeard.BRANCH, sickbeard.CONFIG_FILE)) + + self.clear_cache() + if self.forcedPort: - logger.log(u"Forcing web server to port " + str(self.forcedPort)) + logger.log(u"Forcing web server to port %s" % self.forcedPort) self.startPort = self.forcedPort else: self.startPort = sickbeard.WEB_PORT @@ -341,19 +354,6 @@ class SickRage(object): self.webserver = SRWebServer(self.web_options) self.webserver.start() - if self.consoleLogging: - print "Starting up SickRage " + sickbeard.BRANCH + " from " + sickbeard.CONFIG_FILE - - # Clean up after update - if sickbeard.GIT_NEWVER: - toclean = ek(os.path.join, sickbeard.CACHE_DIR, 'mako') - for root, dirs, files in ek(os.walk, toclean, topdown=False): - for name in files: - ek(os.remove, ek(os.path.join, root, name)) - for name in dirs: - ek(os.rmdir, ek(os.path.join, root, name)) - sickbeard.GIT_NEWVER = False - # Fire up all our threads sickbeard.start() @@ -503,6 +503,9 @@ class SickRage(object): except Exception: pass + # Clean cache + self.clear_cache() + # if run as daemon delete the pidfile if self.runAsDaemon and self.CREATEPID: self.remove_pid_file(self.PIDFILE) diff --git a/gui/slick/images/providers/limetorrents.png b/gui/slick/images/providers/limetorrents.png new file mode 100644 index 0000000000000000000000000000000000000000..88fd0c55395545e7ca963d0439980c56e6f9861b Binary files /dev/null and b/gui/slick/images/providers/limetorrents.png differ diff --git a/gui/slick/views/apiBuilder.mako b/gui/slick/views/apiBuilder.mako index aeef6b809d7dcd29c7b5ad7b64d5facf67c04b30..460f8f8ae460443cb3253352d6fd80380caaa145 100644 --- a/gui/slick/views/apiBuilder.mako +++ b/gui/slick/views/apiBuilder.mako @@ -261,13 +261,7 @@ var episodes = ${episodes}; </select> % endif % elif parameter == 'tvdbid': - <select class="form-control" name="${parameter}" data-command="${command}"> - <option>${parameter}</option> - - % for show in shows: - <option value="${show.indexerid}">${show.name}</option> - % endfor - </select> + <input class="form-control" name="${parameter}" placeholder="${parameter}" type="number" data-command="${command}" /> % elif type == 'int': % if parameter not in ('episode', 'season'): <input class="form-control" name="${parameter}" placeholder="${parameter}" type="number" data-command="${command}" /> diff --git a/gui/slick/views/displayShow.mako b/gui/slick/views/displayShow.mako index 39b3909c39099f6715691f66465c663e948d97bf..686eed6f992339599e31606cc8a099e2a69e03f4 100644 --- a/gui/slick/views/displayShow.mako +++ b/gui/slick/views/displayShow.mako @@ -250,11 +250,12 @@ <div class="pull-right clearfix" id="checkboxControls"> <div style="padding-bottom: 5px;"> + <% total_snatched = epCounts[Overview.SNATCHED] + epCounts[Overview.SNATCHED_PROPER] + epCounts[Overview.SNATCHED_BEST] %> <label for="wanted"><span class="wanted"><input type="checkbox" id="wanted" checked="checked" /> Wanted: <b>${epCounts[Overview.WANTED]}</b></span></label> <label for="qual"><span class="qual"><input type="checkbox" id="qual" checked="checked" /> Low Quality: <b>${epCounts[Overview.QUAL]}</b></span></label> <label for="good"><span class="good"><input type="checkbox" id="good" checked="checked" /> Downloaded: <b>${epCounts[Overview.GOOD]}</b></span></label> <label for="skipped"><span class="skipped"><input type="checkbox" id="skipped" checked="checked" /> Skipped: <b>${epCounts[Overview.SKIPPED]}</b></span></label> - <label for="snatched"><span class="snatched"><input type="checkbox" id="snatched" checked="checked" /> Snatched: <b>${epCounts[Overview.SNATCHED]}</b></span></label> + <label for="snatched"><span class="snatched"><input type="checkbox" id="snatched" checked="checked" /> Snatched: <b>${total_snatched}</b></span></label> </div> <button id="popover" type="button" class="btn btn-xs">Select Columns <b class="caret"></b></button> diff --git a/gui/slick/views/manage_backlogOverview.mako b/gui/slick/views/manage_backlogOverview.mako index 3a811493e5650f851bd8e3bd1d666b0fb17e8501..5484580449cbd194a3ea4239ce56735f2967c57c 100644 --- a/gui/slick/views/manage_backlogOverview.mako +++ b/gui/slick/views/manage_backlogOverview.mako @@ -23,7 +23,7 @@ showQualSnatched = lambda x: Quality.splitQuality(x.quality)[1] totalWanted = totalQual = totalQualSnatched = 0 - backLogShows = sorted([x for x in sickbeard.showList if showCounts[x.indexerid][Overview.QUAL] + showCounts[x.indexerid][Overview.WANTED] + showCounts[x.indexerid][Overview.SNATCHED]], key=lambda x: x.name) + backLogShows = sorted([x for x in sickbeard.showList if showCounts[x.indexerid][Overview.QUAL] + showCounts[x.indexerid][Overview.WANTED] + (0, showCounts[x.indexerid][Overview.SNATCHED])[len(showQualSnatched(x)) > 0]], key=lambda x: x.name) for curShow in backLogShows: totalWanted += showCounts[curShow.indexerid][Overview.WANTED] totalQual += showCounts[curShow.indexerid][Overview.QUAL] @@ -73,9 +73,8 @@ Jump to Show if whichStr not in showCats[curShow.indexerid] or showCats[curShow.indexerid][whichStr] not in (Overview.QUAL, Overview.WANTED, Overview.SNATCHED): continue - if not showQualSnatched(curShow): - if showCats[curShow.indexerid][whichStr] == Overview.SNATCHED: - continue + if not showQualSnatched(curShow) and showCats[curShow.indexerid][whichStr] == Overview.SNATCHED: + continue %> <tr class="seasonstyle ${Overview.overviewStrings[showCats[curShow.indexerid][whichStr]]}"> <td class="tableleft" align="center">${whichStr}</td> diff --git a/lib/tvdb_api/tvdb_api.py b/lib/tvdb_api/tvdb_api.py index e03d2d702e202c698983af1522c798625244e616..4e3640ba11cb6d016ee75fdce56e850c484e5baa 100644 --- a/lib/tvdb_api/tvdb_api.py +++ b/lib/tvdb_api/tvdb_api.py @@ -565,8 +565,9 @@ class Tvdb: # get response from TVDB if self.config['cache_enabled']: - - session = CacheControl(sess=self.config['session'], cache=caches.FileCache(self.config['cache_location'], use_dir_lock=True), cache_etags=False) + # Lets try without caching sessions to disk for awhile + # session = CacheControl(sess=self.config['session'], cache=caches.FileCache(self.config['cache_location'], use_dir_lock=True), cache_etags=False) + session = self.config['session'] if self.config['proxy']: log().debug("Using proxy for URL: %s" % url) session.proxies = { diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 6ebb0f92be55e53172deef461f5360e5e530cc43..98224fc0078606b1bb2b697c540b75b7d1c43556 100644 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -138,7 +138,6 @@ GIT_USERNAME = None GIT_PASSWORD = None GIT_PATH = None GIT_AUTOISSUES = False -GIT_NEWVER = False DEVELOPER = False NEWS_URL = 'http://sickrage.github.io/sickrage-news/news.md' @@ -580,7 +579,7 @@ def get_backlog_cycle_time(): def initialize(consoleLogging=True): with INIT_LOCK: - global BRANCH, GIT_RESET, GIT_REMOTE, GIT_REMOTE_URL, CUR_COMMIT_HASH, CUR_COMMIT_BRANCH, GIT_NEWVER, ACTUAL_LOG_DIR, LOG_DIR, LOG_NR, LOG_SIZE, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, ENCRYPTION_SECRET, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, WEB_COOKIE_SECRET, WEB_USE_GZIP, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \ + global BRANCH, GIT_RESET, GIT_REMOTE, GIT_REMOTE_URL, CUR_COMMIT_HASH, CUR_COMMIT_BRANCH, ACTUAL_LOG_DIR, LOG_DIR, LOG_NR, LOG_SIZE, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, ENCRYPTION_SECRET, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, WEB_COOKIE_SECRET, WEB_USE_GZIP, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \ HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, RANDOMIZE_PROVIDERS, CHECK_PROPERS_INTERVAL, ALLOW_HIGH_PRIORITY, SAB_FORCED, TORRENT_METHOD, NOTIFY_ON_LOGIN, \ SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_CATEGORY_BACKLOG, SAB_CATEGORY_ANIME, SAB_CATEGORY_ANIME_BACKLOG, SAB_HOST, \ NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_CATEGORY_BACKLOG, NZBGET_CATEGORY_ANIME, NZBGET_CATEGORY_ANIME_BACKLOG, NZBGET_PRIORITY, NZBGET_HOST, NZBGET_USE_HTTPS, backlogSearchScheduler, \ @@ -660,7 +659,6 @@ def initialize(consoleLogging=True): # git login info GIT_USERNAME = check_setting_str(CFG, 'General', 'git_username', '') GIT_PASSWORD = check_setting_str(CFG, 'General', 'git_password', '', censor_log=True) - GIT_NEWVER = bool(check_setting_int(CFG, 'General', 'git_newver', 0)) DEVELOPER = bool(check_setting_int(CFG, 'General', 'developer', 0)) # debugging @@ -1576,7 +1574,7 @@ def halt(): pass __INITIALIZED__ = False - started = False + started = False def sig_handler(signum=None, frame=None): @@ -1612,7 +1610,6 @@ def save_config(): new_config['General']['git_remote_url'] = GIT_REMOTE_URL new_config['General']['cur_commit_hash'] = CUR_COMMIT_HASH new_config['General']['cur_commit_branch'] = CUR_COMMIT_BRANCH - new_config['General']['git_newver'] = int(GIT_NEWVER) new_config['General']['config_version'] = CONFIG_VERSION new_config['General']['encryption_version'] = int(ENCRYPTION_VERSION) new_config['General']['encryption_secret'] = ENCRYPTION_SECRET diff --git a/sickbeard/dailysearcher.py b/sickbeard/dailysearcher.py index 82a70e0d6f4dc6d777ea75a34b4acc166b8d01db..4df2176dc326b66a7d1781990afc71fd84fe5413 100644 --- a/sickbeard/dailysearcher.py +++ b/sickbeard/dailysearcher.py @@ -59,7 +59,7 @@ class DailySearcher(object): curTime = datetime.datetime.now(network_timezones.sb_timezone) myDB = db.DBConnection() - sqlResults = myDB.select("SELECT showid, airdate, season, episode FROM tv_episodes WHERE status = ? AND season > 0 AND (airdate <= ? and airdate > 1)", + sqlResults = myDB.select("SELECT showid, airdate, season, episode FROM tv_episodes WHERE status = ? AND (airdate <= ? and airdate > 1)", [common.UNAIRED, curDate]) sql_l = [] diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index b31d52a845139c5c14d47c957854de6b386fa243..a3670695940da91582f75e9c6399b23f17e7a569 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -1379,8 +1379,9 @@ def _setUpSession(session, headers): """ # request session - cache_dir = sickbeard.CACHE_DIR or _getTempDir() - session = CacheControl(sess=session, cache=caches.FileCache(ek(os.path.join, cache_dir, 'sessions'), use_dir_lock=True), cache_etags=False) + # Lets try without caching sessions to disk for awhile + # cache_dir = sickbeard.CACHE_DIR or _getTempDir() + # session = CacheControl(sess=session, cache=caches.FileCache(ek(os.path.join, cache_dir, 'sessions'), use_dir_lock=True), cache_etags=False) # request session clear residual referer # pylint: disable=superfluous-parens diff --git a/sickbeard/nzbSplitter.py b/sickbeard/nzbSplitter.py index efacaeed9edfabdc4f722ed6ae17958f158fdf67..9528d44716a66fe089b759f9742e74d35962d158 100644 --- a/sickbeard/nzbSplitter.py +++ b/sickbeard/nzbSplitter.py @@ -205,9 +205,7 @@ def split_result(obj): want_ep = True for ep_num in parsed_obj.episode_numbers: if not obj.extraInfo[0].wantEpisode(season, ep_num, obj.quality): - # pylint: disable=no-member - logger.log(u"Ignoring result " + new_nzb + " because we don't want an episode that is " + - Quality.qualityStrings[obj.quality], logger.INFO) + logger.log(u"Ignoring result: " + new_nzb, logger.DEBUG) want_ep = False break if not want_ep: diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py index 0e393ef446b4e7f89b20e8777049a104963ba0e7..de35e84fb695967d506eeeb33d9c20c788c4b3f2 100644 --- a/sickbeard/postProcessor.py +++ b/sickbeard/postProcessor.py @@ -148,7 +148,7 @@ class PostProcessor(object): logger.DEBUG) return PostProcessor.DOESNT_EXIST - def list_associated_files(self, file_path, base_name_only=False, subtitles_only=False, subfolders=False): + def list_associated_files(self, file_path, base_name_only=False, subtitles_only=False, subfolders=False): # pylint: disable=unused-argument """ For a given file path searches for files with the same name but different extension and returns their absolute paths @@ -496,7 +496,7 @@ class PostProcessor(object): myDB = db.DBConnection() for curName in names: search_name = re.sub(r"[\.\- ]", "_", curName) - sql_results = myDB.select("SELECT showid, season, quality, version, resource FROM history WHERE resource LIKE ?", [search_name]) + sql_results = myDB.select("SELECT showid, season, quality, version, resource FROM history WHERE resource LIKE ? AND (action % 100 = 4 OR action % 100 = 6)", [search_name]) if len(sql_results) == 0: continue @@ -970,8 +970,10 @@ class PostProcessor(object): if self.is_proper and new_ep_quality == old_ep_quality: self._log(u"New file is a proper/repack, marking it safe to replace") else: - self._log(u"File exists and new file is the same or lower quality than existing, marking it unsafe to replace") - return False + _, preferred_qualities = common.Quality.splitQuality(int(show.quality)) + if new_ep_quality not in preferred_qualities: + self._log(u"File exists and new file quality is not in a preferred quality list, marking it unsafe to replace") + return False # Check if the processed file season is already in our indexer. If not, the file is most probably mislabled/fake and will be skipped # Only proceed if the file season is > 0 @@ -993,7 +995,7 @@ class PostProcessor(object): # try to find out if we have enough space to perform the copy or move action. if not helpers.isFileLocked(self.file_path, False): - if not verify_freespace(self.file_path, ep_obj.show._location, [ep_obj] + ep_obj.relatedEps): + if not verify_freespace(self.file_path, ep_obj.show._location, [ep_obj] + ep_obj.relatedEps): # pylint: disable=protected-access self._log("Not enough space to continue PP, exiting", logger.WARNING) return False else: @@ -1006,7 +1008,7 @@ class PostProcessor(object): # clean up any left over folders if cur_ep.location: - helpers.delete_empty_folders(ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location) + helpers.delete_empty_folders(ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location) # pylint: disable=protected-access except (OSError, IOError): raise EpisodePostProcessingFailedException("Unable to delete the existing files") @@ -1015,16 +1017,16 @@ class PostProcessor(object): # curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality) # if the show directory doesn't exist then make it if allowed - if not ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS: + if not ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS: # pylint: disable=protected-access self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: - ek(os.mkdir, ep_obj.show._location) - helpers.chmodAsParent(ep_obj.show._location) + ek(os.mkdir, ep_obj.show._location) # pylint: disable=protected-access + helpers.chmodAsParent(ep_obj.show._location) # pylint: disable=protected-access # do the library update for synoindex - notifiers.synoindex_notifier.addFolder(ep_obj.show._location) + notifiers.synoindex_notifier.addFolder(ep_obj.show._location) # pylint: disable=protected-access except (OSError, IOError): - raise EpisodePostProcessingFailedException("Unable to create the show directory: " + ep_obj.show._location) + raise EpisodePostProcessingFailedException("Unable to create the show directory: " + ep_obj.show._location) # pylint: disable=protected-access # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True) @@ -1162,7 +1164,7 @@ class PostProcessor(object): ep_obj.createMetaFiles() except Exception: logger.log(u"Could not create/update meta files. Continuing with postProcessing...") - + # log it to history history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group, new_ep_version) @@ -1170,7 +1172,7 @@ class PostProcessor(object): # If any notification fails, don't stop postProcessor try: # send notifications - notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) + notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) # pylint: disable=protected-access # do the library update for KODI notifiers.kodi_notifier.update_library(ep_obj.show.name) diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index bd0f335b86b34bf9fadbf76e466b811a89442da8..55c1993661f3501a1b403bfe59ef86ff583acf9e 100644 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -26,7 +26,7 @@ from sickbeard import logger from sickbeard.providers import btn, newznab, rsstorrent, womble, thepiratebay, torrentleech, kat, iptorrents, torrentz, \ omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, animenzb, bluetigers, cpasbien, fnt, xthor, torrentbytes, \ freshontv, titansoftv, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \ - scenetime, btdigg, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct, elitetorrent, bitsnoop, danishbits, hd4free + scenetime, btdigg, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct, elitetorrent, bitsnoop, danishbits, hd4free, limetorrents __all__ = [ 'womble', 'btn', 'thepiratebay', 'kat', 'torrentleech', 'scc', 'hdtorrents', @@ -36,7 +36,7 @@ __all__ = [ 'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'bluetigers', 'cpasbien', 'fnt', 'xthor', 'scenetime', 'btdigg', 'transmitthenet', 'tvchaosuk', 'torrentproject', 'extratorrent', 'bitcannon', 'torrentz', 'pretome', 'gftracker', - 'hdspace', 'newpct', 'elitetorrent', 'bitsnoop', 'danishbits', 'hd4free' + 'hdspace', 'newpct', 'elitetorrent', 'bitsnoop', 'danishbits', 'hd4free', 'limetorrents' ] diff --git a/sickbeard/providers/limetorrents.py b/sickbeard/providers/limetorrents.py new file mode 100644 index 0000000000000000000000000000000000000000..048bf6913501fa11da2dd4dfe665613f25a6a23b --- /dev/null +++ b/sickbeard/providers/limetorrents.py @@ -0,0 +1,143 @@ +# coding=utf-8 +# Author: Gonçalo (aka duramato/supergonkas) <matigonkas@outlook.com> +# URL: https://github.com/SickRage/sickrage +# This file is part of SickRage. +# +# SickRage is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickRage is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickRage. If not, see <http://www.gnu.org/licenses/>. + +import traceback +from bs4 import BeautifulSoup +from sickbeard import logger +from sickbeard import tvcache +from sickbeard.common import USER_AGENT +from sickrage.helper.common import try_int +from sickrage.providers.torrent.TorrentProvider import TorrentProvider + + +class LimeTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes + def __init__(self): + TorrentProvider.__init__(self, "LimeTorrents") + + self.urls = { + 'index': 'https://www.limetorrents.cc/', + 'search': 'https://www.limetorrents.cc/searchrss/20/', + 'rss': 'https://www.limetorrents.cc/rss/20/' + } + + self.url = self.urls['index'] + + self.public = True + self.ratio = None + self.minseed = None + self.minleech = None + self.headers.update({'User-Agent': USER_AGENT}) + self.proper_strings = ['PROPER', 'REPACK', 'REAL'] + + self.cache = LimeTorrentsCache(self) + + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches,too-many-locals + + results = [] + items = {'Season': [], 'Episode': [], 'RSS': []} + + for mode in search_strings.keys(): + logger.log(u"Search Mode: %s" % mode, logger.DEBUG) + for search_string in search_strings[mode]: + + if mode != 'RSS': + logger.log(u"Search string: %s " % search_string, logger.DEBUG) + + try: + url = (self.urls['rss'], self.urls['search'] + search_string)[mode != 'RSS'] + logger.log(u"URL: %r " % url, logger.DEBUG) + data = self.get_url(url) + if not data: + logger.log(u"No data returned from provider", logger.DEBUG) + continue + + if not data.startswith('<?xml'): + logger.log(u'Expected xml but got something else, is your mirror failing?', logger.INFO) + continue + + data = BeautifulSoup(data, 'html5lib') + + entries = data.findAll('item') + if not entries: + logger.log(u'Returned xml contained no results', logger.INFO) + continue + + for item in entries: + try: + title = item.title.text + download_url = item.enclosure['url'] + + if not (title and download_url): + continue + #seeders and leechers are presented diferently when doing a search and when looking for newly added + if mode == 'RSS': + # <![CDATA[ + # Category: <a href="http://www.limetorrents.cc/browse-torrents/TV-shows/">TV shows</a><br /> Seeds: 1<br />Leechers: 0<br />Size: 7.71 GB<br /><br /><a href="http://www.limetorrents.cc/Owen-Hart-of-Gold-Djon91-torrent-7180661.html">More @ limetorrents.cc</a><br /> + # ]]> + description = item.find('description') + seeders = description.find_all('br')[0].next_sibling.strip().lstrip('Seeds: ') + leechers = description.find_all('br')[1].next_sibling.strip().lstrip('Leechers: ') + else: + #<description>Seeds: 6982 , Leechers 734</description> + description = item.find('description').text.partition(',') + seeders = description[0].lstrip('Seeds: ').strip() + leechers = description[2].lstrip('Leechers ').strip() + size = try_int(item.find('size').text, -1) + + except (AttributeError, TypeError, KeyError, ValueError): + continue + + # Filter unseeded torrent + if seeders < self.minseed or leechers < self.minleech: + if mode != 'RSS': + logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG) + continue + + item = title, download_url, size, seeders, leechers + if mode != 'RSS': + logger.log(u"Found result: %s " % title, logger.DEBUG) + + items[mode].append(item) + + except (AttributeError, TypeError, KeyError, ValueError): + logger.log(u"Failed parsing provider. Traceback: %r" % traceback.format_exc(), logger.ERROR) + + # For each search mode sort all the items by seeders if available + items[mode].sort(key=lambda tup: tup[3], reverse=True) + + results += items[mode] + + return results + + def seed_ratio(self): + return self.ratio + + +class LimeTorrentsCache(tvcache.TVCache): + def __init__(self, provider_obj): + + tvcache.TVCache.__init__(self, provider_obj) + + self.minTime = 20 + + def _getRSSData(self): + search_strings = {'RSS': ['rss']} + return {'entries': self.provider.search(search_strings)} + + +provider = LimeTorrentsProvider() diff --git a/sickbeard/subtitles.py b/sickbeard/subtitles.py index df3ca0b5ba9896f543635d10c4792dbd21c34693..4155bd8c6294e429cce8af25cf14dfae49395924 100644 --- a/sickbeard/subtitles.py +++ b/sickbeard/subtitles.py @@ -195,11 +195,11 @@ def download_subtitles(subtitles_info): # pylint: disable=too-many-locals, too- subtitles_info['episode']), logger.DEBUG) return existing_subtitles, None - for sub in subtitles_list: - matches = sub.get_matches(video, hearing_impaired=False) + for subtitle in subtitles_list: + matches = subtitle.get_matches(video, hearing_impaired=False) score = subliminal.subtitle.compute_score(matches, video) logger.log(u"[%s] Subtitle score for %s is: %s (min=%s)" - % (sub.provider_name, sub.id, score, user_score), logger.DEBUG) + % (subtitle.provider_name, subtitle.id, score, user_score), logger.DEBUG) found_subtitles = pool.download_best_subtitles(subtitles_list, video, languages=languages, hearing_impaired=sickbeard.SUBTITLES_HEARING_IMPAIRED, @@ -238,7 +238,7 @@ def download_subtitles(subtitles_info): # pylint: disable=too-many-locals, too- run_subs_extra_scripts(subtitles_info, subtitle, video, single=not sickbeard.SUBTITLES_MULTI) new_subtitles = sorted({subtitle.language.opensubtitles for subtitle in found_subtitles}) - current_subtitles = sorted({subtitle for subtitle in new_subtitles + existing_subtitles}) + current_subtitles = sorted({subtitle for subtitle in new_subtitles + existing_subtitles}) if existing_subtitles else new_subtitles if not sickbeard.SUBTITLES_MULTI and len(found_subtitles) == 1: new_code = found_subtitles[0].language.opensubtitles if new_code not in existing_subtitles: @@ -371,11 +371,11 @@ class SubtitlesFinder(object): min_score=user_score, only_one=not sickbeard.SUBTITLES_MULTI) - for sub in subtitles_list: - matches = sub.get_matches(video, hearing_impaired=False) + for subtitle in subtitles_list: + matches = subtitle.get_matches(video, hearing_impaired=False) score = subliminal.subtitle.compute_score(matches, video) logger.log(u"[%s] Subtitle score for %s is: %s (min=%s)" - % (sub.provider_name, sub.id, score, user_score), logger.DEBUG) + % (subtitle.provider_name, subtitle.id, score, user_score), logger.DEBUG) downloaded_languages = set() for subtitle in found_subtitles: diff --git a/sickbeard/tv.py b/sickbeard/tv.py index 56d2459ed95ec6e8c3f8a5701d85ea2ae27e85a3..4bb071446b08f92b62381d8b19bcaaadf926d9cc 100644 --- a/sickbeard/tv.py +++ b/sickbeard/tv.py @@ -1204,20 +1204,8 @@ class TVShow(object): toReturn += "anime: " + str(self.is_anime) + "\n" return toReturn - def qualitiesToString(self, qualities=[]): - result = u'' - for quality in qualities: - if quality in Quality.qualityStrings: - result += Quality.qualityStrings[quality] + u', ' - else: - logger.log(u"Bad quality value: " + str(quality)) - - result = re.sub(', $', '', result) - - if not len(result): - result = u'None' - - return result + def qualitiesToString(self, qualities=None): + return ', '.join([Quality.qualityStrings[quality] for quality in qualities or [] if quality and quality in Quality.qualityStrings]) or 'None' def wantEpisode(self, season, episode, quality, manualSearch=False, downCurQuality=False): @@ -1231,7 +1219,8 @@ class TVShow(object): self.qualitiesToString([quality])), logger.DEBUG) if quality not in allowed_qualities + preferred_qualities or quality is UNKNOWN: - logger.log(u"Don't want this quality, ignoring found episode", logger.DEBUG) + logger.log(u"Skipping %s (S%02dE%02d, %s): Don't want this quality, ignoring found episode" % + (self.name, season or 0, episode or 0, Quality.qualityStrings[quality]), logger.INFO) return False myDB = db.DBConnection() @@ -1239,7 +1228,8 @@ class TVShow(object): [self.indexerid, season, episode]) if not sqlResults or not len(sqlResults): - logger.log(u"Unable to find a matching episode in database, ignoring found episode", logger.DEBUG) + logger.log(u"Skipping %s (S%02dE%02d, %s): Unable to find a matching episode in database, ignoring found episode" % + (self.name, season or 0, episode or 0, Quality.qualityStrings[quality]), logger.INFO) return False epStatus = int(sqlResults[0]["status"]) @@ -1249,14 +1239,15 @@ class TVShow(object): # if we know we don't want it then just say no if epStatus in Quality.ARCHIVED + [UNAIRED, SKIPPED, IGNORED] and not manualSearch: - logger.log(u"Existing episode status is unaired/skipped/ignored/archived, ignoring found episode", logger.DEBUG) + logger.log(u"Skipping %s (S%02dE%02d, %s): Existing episode status is '%s', ignoring found episode" % + (self.name, season or 0, episode or 0, Quality.qualityStrings[quality], epStatus_text), logger.INFO) return False curStatus, curQuality = Quality.splitCompositeStatus(epStatus) # if it's one of these then we want it as long as it's in our allowed initial qualities if epStatus in (WANTED, SKIPPED, UNKNOWN): - logger.log(u"Existing episode status is wanted/skipped/unknown, getting found episode", logger.DEBUG) + logger.log(u"Existing episode status is '%s', getting found episode" % epStatus_text, logger.DEBUG) return True elif manualSearch: if (downCurQuality and quality >= curQuality) or (not downCurQuality and quality > curQuality): @@ -1278,7 +1269,8 @@ class TVShow(object): logger.log(u"Episode already exists and the found episode has same/lower quality, ignoring found episode", logger.DEBUG) - logger.log(u"None of the conditions were met, ignoring found episode", logger.DEBUG) + logger.log(u"Skipping %s (S%02dE%02d, %s): None of the conditions were met, ignoring found episode" % + (self.name, season or 0, episode or 0, Quality.qualityStrings[quality]), logger.INFO) return False def getOverview(self, epStatus): diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index eba44869940a1a2aed6de900b25dd78d22d101bc..62585bbfa6ee7e342c5f75d2c25f5a63f8aa4dc5 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -356,8 +356,7 @@ class TVCache(object): # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch, downCurQuality): - logger.log(u"Skipping " + curResult["name"] + " because we don't want an episode that's " + - Quality.qualityStrings[curQuality], logger.INFO) + logger.log(u"Skipping " + curResult["name"], logger.DEBUG) continue epObj = showObj.getEpisode(curSeason, curEp) diff --git a/sickbeard/versionChecker.py b/sickbeard/versionChecker.py index e61f258868c26f42fd92c9f989931a0c1cf6e673..dfa6815474ed94e3a3997aeeed879282a39d69c7 100644 --- a/sickbeard/versionChecker.py +++ b/sickbeard/versionChecker.py @@ -638,7 +638,6 @@ class GitUpdateManager(UpdateManager): if exit_status == 0: self._find_installed_version() - sickbeard.GIT_NEWVER = True # Notify update successful if sickbeard.NOTIFY_ON_UPDATE: diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index 8c6bb80caa8d3e6cd67048874a951de43fec0265..3d407287cae355bf86642cac9147a086b0fe164e 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -2146,7 +2146,7 @@ class CMD_ShowAddNew(ApiCall): default_ep_status_after = self.future_status indexer_name = None - indexer_result = CMD_SickBeardSearchIndexers([], {indexer_ids[self.indexer]: self.indexerid}).run() + indexer_result = CMD_SickBeardSearchIndexers([], {indexer_ids[self.indexer]: self.indexerid, 'lang': self.lang}).run() if indexer_result['result'] == result_type_map[RESULT_SUCCESS]: if not indexer_result['data']['results']: diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 4d66995f2667af2f6a74974048ea18ea6caeca98..beed26dafef3b55db286e88916d936e93f4bc529 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -699,7 +699,7 @@ class Home(WebRoot): myDB = db.DBConnection() today = str(datetime.date.today().toordinal()) - status_quality = '(' + ','.join([str(x) for x in Quality.SNATCHED + Quality.SNATCHED_PROPER]) + ')' + status_quality = '(' + ','.join([str(x) for x in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST]) + ')' status_download = '(' + ','.join([str(x) for x in Quality.DOWNLOADED + Quality.ARCHIVED]) + ')' sql_statement = 'SELECT showid, ' diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 9ae4ee51296deaac7eded60babb5469f80611a85..669b51398c1e4bab159945f6948469e559f9e148 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -290,8 +290,7 @@ class GenericProvider(object): # pylint: disable=too-many-instance-attributes break if not episode_wanted: - logger.log(u'Ignoring result %s because we don\'t want an episode that is %s' % ( - title, Quality.qualityStrings[quality]), logger.INFO) + logger.log(u'Ignoring result %s.' % (title), logger.DEBUG) continue logger.log(u'Found result %s at %s' % (title, url), logger.DEBUG)