diff --git a/gui/slick/images/providers/newpct.png b/gui/slick/images/providers/newpct.png new file mode 100644 index 0000000000000000000000000000000000000000..f9dc12d7b39e9b359559f42654e50e42ce506665 Binary files /dev/null and b/gui/slick/images/providers/newpct.png differ diff --git a/gui/slick/views/config_providers.mako b/gui/slick/views/config_providers.mako index d4c66b35b9d7d1e8e6e9bd0fb0ec93d3689a85a2..41f3664ff5af6c894e619348561e7f8af93cbcf9 100644 --- a/gui/slick/views/config_providers.mako +++ b/gui/slick/views/config_providers.mako @@ -368,7 +368,7 @@ $('#config-components').tabs(); </label> </div> % endif - + % if hasattr(curTorrentProvider, 'pin'): <div class="field-pair"> <label for="${curTorrentProvider.getID()}_pin"> @@ -455,6 +455,30 @@ $('#config-components').tabs(); </div> % endif + % if hasattr(curTorrentProvider, 'onlyspasearch'): + <div class="field-pair"> + <label for="${curTorrentProvider.getID()}_onlyspasearch"> + <span class="component-title">For Spanish torrents</span> + <span class="component-desc"> + <input type="checkbox" name="${curTorrentProvider.getID()}_onlyspasearch" id="${curTorrentProvider.getID()}_onlyspasearch" ${('', 'checked="checked"')[bool(curTorrentProvider.onlyspasearch)]} /> + <p>ONLY search on this provider if show info is defined as "Spanish" (avoid provider's use for VOS shows)</p> + </span> + </label> + </div> + % endif + + % if hasattr(curTorrentProvider, 'append_identifier'): + <div class="field-pair"> + <label for="${curTorrentProvider.getID()}_append_identifier"> + <span class="component-title">Append identifier:</span> + <span class="component-desc"> + <input type="text" name="${curTorrentProvider.getID()}_append_identifier" id="${curTorrentProvider.getID()}_append_identifier" value="${curTorrentProvider.append_identifier}" class="form-control input-sm input350" /> + <p>Append an identifier to every episode snatched by this provider. Usefull in combination with "Required Words" on show configuration if you want to download certain shows only from this provider.</p> + </span> + </label> + </div> + % endif + % if hasattr(curTorrentProvider, 'sorting'): <div class="field-pair"> <label for="${curTorrentProvider.getID()}_sorting"> diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index f8021c6e36d5d3d758dc24258e86cb6dd1ec35ab..11974b7751e773399f469fa18f96ff448f5eb7ee 100644 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -36,7 +36,6 @@ from github import Github from sickbeard import metadata from sickbeard import providers from sickbeard.providers.generic import GenericProvider - from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \ naming_ep_type from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \ @@ -283,10 +282,6 @@ NZBS = False NZBS_UID = None NZBS_HASH = None -WOMBLE = False - -BINSEARCH = False - OMGWTFNZBS = False OMGWTFNZBS_USERNAME = None OMGWTFNZBS_APIKEY = None @@ -553,7 +548,7 @@ TMDB_API_KEY = 'edc5f123313769de83a71e157758030b' #TRAKT_API_KEY = 'd4161a7a106424551add171e5470112e4afdaf2438e6ef2fe0548edc75924868' TRAKT_API_KEY = '5c65f55e11d48c35385d9e8670615763a605fad28374c8ae553a7b7a50651ddd' -TRAKT_API_SECRET ='b53e32045ac122a445ef163e6d859403301ffe9b17fb8321d428531b69022a82' +TRAKT_API_SECRET = 'b53e32045ac122a445ef163e6d859403301ffe9b17fb8321d428531b69022a82' TRAKT_PIN_URL = 'https://trakt.tv/pin/4562' TRAKT_OAUTH_URL = 'https://trakt.tv/' TRAKT_API_URL = 'https://api-v2launch.trakt.tv/' @@ -574,7 +569,7 @@ def get_backlog_cycle_time(): def initialize(consoleLogging=True): with INIT_LOCK: - global BRANCH, GIT_RESET, GIT_REMOTE, GIT_REMOTE_URL, CUR_COMMIT_HASH, CUR_COMMIT_BRANCH, GIT_NEWVER, ACTUAL_LOG_DIR, LOG_DIR, LOG_NR, LOG_SIZE, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, ENCRYPTION_SECRET, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, WEB_COOKIE_SECRET, WEB_USE_GZIP, API_KEY, API_ROOT, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \ + global BRANCH, GIT_RESET, GIT_REMOTE, GIT_REMOTE_URL, CUR_COMMIT_HASH, CUR_COMMIT_BRANCH, GIT_NEWVER, ACTUAL_LOG_DIR, LOG_DIR, LOG_NR, LOG_SIZE, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, ENCRYPTION_SECRET, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, WEB_COOKIE_SECRET, WEB_USE_GZIP, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \ HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, RANDOMIZE_PROVIDERS, CHECK_PROPERS_INTERVAL, ALLOW_HIGH_PRIORITY, SAB_FORCED, TORRENT_METHOD, \ SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_CATEGORY_ANIME, SAB_HOST, \ NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_CATEGORY_ANIME, NZBGET_PRIORITY, NZBGET_HOST, NZBGET_USE_HTTPS, backlogSearchScheduler, \ @@ -583,25 +578,25 @@ def initialize(consoleLogging=True): KODI_UPDATE_LIBRARY, KODI_HOST, KODI_USERNAME, KODI_PASSWORD, BACKLOG_FREQUENCY, \ USE_TRAKT, TRAKT_USERNAME, TRAKT_ACCESS_TOKEN, TRAKT_REFRESH_TOKEN, TRAKT_REMOVE_WATCHLIST, TRAKT_SYNC_WATCHLIST, TRAKT_REMOVE_SHOW_FROM_SICKRAGE, TRAKT_METHOD_ADD, TRAKT_START_PAUSED, traktCheckerScheduler, TRAKT_USE_RECOMMENDED, TRAKT_SYNC, TRAKT_SYNC_REMOVE, TRAKT_DEFAULT_INDEXER, TRAKT_REMOVE_SERIESLIST, TRAKT_TIMEOUT, TRAKT_BLACKLIST_NAME, \ USE_PLEX, PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_NOTIFY_ONSUBTITLEDOWNLOAD, PLEX_UPDATE_LIBRARY, USE_PLEX_CLIENT, PLEX_CLIENT_USERNAME, PLEX_CLIENT_PASSWORD, \ - PLEX_SERVER_HOST, PLEX_SERVER_TOKEN, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, DEFAULT_BACKLOG_FREQUENCY, MIN_BACKLOG_FREQUENCY, SKIP_REMOVED_FILES, \ + PLEX_SERVER_HOST, PLEX_SERVER_TOKEN, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, MIN_BACKLOG_FREQUENCY, SKIP_REMOVED_FILES, \ USE_EMBY, EMBY_HOST, EMBY_APIKEY, \ showUpdateScheduler, __INITIALIZED__, INDEXER_DEFAULT_LANGUAGE, EP_DEFAULT_DELETED_STATUS, LAUNCH_BROWSER, TRASH_REMOVE_SHOW, TRASH_ROTATE_LOGS, SORT_ARTICLE, showList, loadingShowList, \ NEWZNAB_DATA, NZBS, NZBS_UID, NZBS_HASH, INDEXER_DEFAULT, INDEXER_TIMEOUT, USENET_RETENTION, TORRENT_DIR, \ QUALITY_DEFAULT, FLATTEN_FOLDERS_DEFAULT, SUBTITLES_DEFAULT, STATUS_DEFAULT, STATUS_DEFAULT_AFTER, \ GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, GROWL_NOTIFY_ONSUBTITLEDOWNLOAD, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD, USE_FREEMOBILE, FREEMOBILE_ID, FREEMOBILE_APIKEY, FREEMOBILE_NOTIFY_ONSNATCH, FREEMOBILE_NOTIFY_ONDOWNLOAD, FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD, \ - USE_GROWL, GROWL_HOST, GROWL_PASSWORD, USE_PROWL, PROWL_NOTIFY_ONSNATCH, PROWL_NOTIFY_ONDOWNLOAD, PROWL_NOTIFY_ONSUBTITLEDOWNLOAD, PROWL_API, PROWL_PRIORITY, PROG_DIR, \ + USE_GROWL, GROWL_HOST, GROWL_PASSWORD, USE_PROWL, PROWL_NOTIFY_ONSNATCH, PROWL_NOTIFY_ONDOWNLOAD, PROWL_NOTIFY_ONSUBTITLEDOWNLOAD, PROWL_API, PROWL_PRIORITY, \ USE_PYTIVO, PYTIVO_NOTIFY_ONSNATCH, PYTIVO_NOTIFY_ONDOWNLOAD, PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD, PYTIVO_UPDATE_LIBRARY, PYTIVO_HOST, PYTIVO_SHARE_NAME, PYTIVO_TIVO_NAME, \ USE_NMA, NMA_NOTIFY_ONSNATCH, NMA_NOTIFY_ONDOWNLOAD, NMA_NOTIFY_ONSUBTITLEDOWNLOAD, NMA_API, NMA_PRIORITY, \ USE_PUSHALOT, PUSHALOT_NOTIFY_ONSNATCH, PUSHALOT_NOTIFY_ONDOWNLOAD, PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHALOT_AUTHORIZATIONTOKEN, \ USE_PUSHBULLET, PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHBULLET_API, PUSHBULLET_DEVICE, \ versionCheckScheduler, VERSION_NOTIFY, AUTO_UPDATE, NOTIFY_ON_UPDATE, PROCESS_AUTOMATICALLY, NO_DELETE, UNPACK, CPU_PRESET, \ - KEEP_PROCESSED_DIR, PROCESS_METHOD, DELRARCONTENTS, TV_DOWNLOAD_DIR, MIN_DAILYSEARCH_FREQUENCY, DEFAULT_UPDATE_FREQUENCY, DEFAULT_SHOWUPDATE_HOUR, MIN_UPDATE_FREQUENCY, UPDATE_FREQUENCY, \ + KEEP_PROCESSED_DIR, PROCESS_METHOD, DELRARCONTENTS, TV_DOWNLOAD_DIR, UPDATE_FREQUENCY, \ showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, TIMEZONE_DISPLAY, \ NAMING_PATTERN, NAMING_MULTI_EP, NAMING_ANIME_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_SPORTS_PATTERN, NAMING_CUSTOM_SPORTS, NAMING_ANIME_PATTERN, NAMING_CUSTOM_ANIME, NAMING_STRIP_YEAR, \ RENAME_EPISODES, AIRDATE_EPISODES, FILE_TIMESTAMP_TIMEZONE, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \ - WOMBLE, BINSEARCH, OMGWTFNZBS, OMGWTFNZBS_USERNAME, OMGWTFNZBS_APIKEY, providerList, newznabProviderList, torrentRssProviderList, \ + providerList, newznabProviderList, torrentRssProviderList, \ EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, DAILYSEARCH_FREQUENCY, TWITTER_DMTO, TWITTER_USEDM, \ - USE_BOXCAR, BOXCAR_USERNAME, BOXCAR_PASSWORD, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD, BOXCAR_NOTIFY_ONSNATCH, \ + USE_BOXCAR, BOXCAR_USERNAME, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD, BOXCAR_NOTIFY_ONSNATCH, \ USE_BOXCAR2, BOXCAR2_ACCESSTOKEN, BOXCAR2_NOTIFY_ONDOWNLOAD, BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD, BOXCAR2_NOTIFY_ONSNATCH, \ USE_PUSHOVER, PUSHOVER_USERKEY, PUSHOVER_APIKEY, PUSHOVER_DEVICE, PUSHOVER_NOTIFY_ONDOWNLOAD, PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHOVER_NOTIFY_ONSNATCH, PUSHOVER_SOUND, \ USE_LIBNOTIFY, LIBNOTIFY_NOTIFY_ONSNATCH, LIBNOTIFY_NOTIFY_ONDOWNLOAD, LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD, USE_NMJ, NMJ_HOST, NMJ_DATABASE, NMJ_MOUNT, USE_NMJv2, NMJv2_HOST, NMJv2_DATABASE, NMJv2_DBLOC, USE_SYNOINDEX, \ @@ -609,16 +604,16 @@ def initialize(consoleLogging=True): USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \ USE_LISTVIEW, METADATA_KODI, METADATA_KODI_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \ NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, SYNC_FILES, POSTPONE_IF_SYNC_FILES, dailySearchScheduler, NFO_RENAME, \ - GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, DISPLAY_FILESIZE, FUZZY_DATING, TRIM_ZERO, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, THEME_NAME, FILTER_ROW, \ + GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, FUZZY_DATING, TRIM_ZERO, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, THEME_NAME, FILTER_ROW, \ POSTER_SORTBY, POSTER_SORTDIR, HISTORY_LIMIT, CREATE_MISSING_SHOW_DIRS, ADD_SHOWS_WO_DIR, \ METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, IGNORED_SUBS_LIST, REQUIRE_WORDS, CALENDAR_UNPROTECTED, NO_RESTART, \ USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, SUBTITLES_MULTI, EMBEDDED_SUBTITLES_ALL, SUBTITLES_EXTRA_SCRIPTS, subtitlesFinderScheduler, \ SUBTITLES_HEARING_IMPAIRED, ADDIC7ED_USER, ADDIC7ED_PASS, LEGENDASTV_USER, LEGENDASTV_PASS, OPENSUBTITLES_USER, OPENSUBTITLES_PASS, \ - USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, DEBUG, DEFAULT_PAGE, PROXY_SETTING, PROXY_INDEXERS, \ - AUTOPOSTPROCESSER_FREQUENCY, SHOWUPDATE_HOUR, DEFAULT_AUTOPOSTPROCESSER_FREQUENCY, MIN_AUTOPOSTPROCESSER_FREQUENCY, \ + USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, DEBUG, DEFAULT_PAGE, PROXY_SETTING, PROXY_INDEXERS, \ + AUTOPOSTPROCESSER_FREQUENCY, SHOWUPDATE_HOUR, \ ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \ - ANIME_SPLIT_HOME, SCENE_DEFAULT, ARCHIVE_DEFAULT, DOWNLOAD_URL, BACKLOG_DAYS, GIT_ORG, GIT_REPO, GIT_USERNAME, GIT_PASSWORD, \ - GIT_AUTOISSUES, DEVELOPER, gh, DISPLAY_ALL_SEASONS, SSL_VERIFY, NEWS_URL, NEWS_LAST_READ, NEWS_LATEST, NEWS_UNREAD, SHOWS_RECENT + ANIME_SPLIT_HOME, SCENE_DEFAULT, ARCHIVE_DEFAULT, DOWNLOAD_URL, BACKLOG_DAYS, GIT_USERNAME, GIT_PASSWORD, \ + GIT_AUTOISSUES, DEVELOPER, gh, DISPLAY_ALL_SEASONS, SSL_VERIFY, NEWS_LAST_READ, NEWS_LATEST, SOCKET_TIMEOUT if __INITIALIZED__: return False @@ -1144,8 +1139,7 @@ def initialize(consoleLogging=True): SUBTITLES_FINDER_FREQUENCY = check_setting_int(CFG, 'Subtitles', 'subtitles_finder_frequency', 1) SUBTITLES_MULTI = bool(check_setting_int(CFG, 'Subtitles', 'subtitles_multi', 1)) - SUBTITLES_EXTRA_SCRIPTS = [x.strip() for x in check_setting_str(CFG, 'Subtitles', 'subtitles_extra_scripts', '').split('|') if - x.strip()] + SUBTITLES_EXTRA_SCRIPTS = [x.strip() for x in check_setting_str(CFG, 'Subtitles', 'subtitles_extra_scripts', '').split('|') if x.strip()] ADDIC7ED_USER = check_setting_str(CFG, 'Subtitles', 'addic7ed_username', '', censor_log=True) ADDIC7ED_PASS = check_setting_str(CFG, 'Subtitles', 'addic7ed_password', '', censor_log=True) @@ -1206,7 +1200,7 @@ def initialize(consoleLogging=True): TIMEZONE_DISPLAY = check_setting_str(CFG, 'GUI', 'timezone_display', 'local') POSTER_SORTBY = check_setting_str(CFG, 'GUI', 'poster_sortby', 'name') POSTER_SORTDIR = check_setting_int(CFG, 'GUI', 'poster_sortdir', 1) - FILTER_ROW = bool(check_setting_int(CFG, 'GUI', 'filter_row', 1)) + FILTER_ROW = bool(check_setting_int(CFG, 'GUI', 'filter_row', 1)) DISPLAY_ALL_SEASONS = bool(check_setting_int(CFG, 'General', 'display_all_seasons', 1)) # initialize NZB and TORRENT providers @@ -1243,10 +1237,9 @@ def initialize(consoleLogging=True): curTorrentProvider.getID() + '_passkey', '', censor_log=True) if hasattr(curTorrentProvider, 'pin'): curTorrentProvider.pin = check_setting_str(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_pin', '', censor_log=True) + curTorrentProvider.getID() + '_pin', '', censor_log=True) if hasattr(curTorrentProvider, 'proxy'): - curTorrentProvider.proxy.enabled = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_proxy', 0)) + curTorrentProvider.proxy.enabled = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), curTorrentProvider.getID() + '_proxy', 0)) if hasattr(curTorrentProvider.proxy, 'url'): curTorrentProvider.proxy.url = check_setting_str(CFG, curTorrentProvider.getID().upper(), curTorrentProvider.getID() + '_proxy_url', '') @@ -1255,15 +1248,23 @@ def initialize(consoleLogging=True): curTorrentProvider.getID() + '_confirmed', 1)) if hasattr(curTorrentProvider, 'ranked'): curTorrentProvider.ranked = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_ranked', 1)) + curTorrentProvider.getID() + '_ranked', 1)) if hasattr(curTorrentProvider, 'engrelease'): curTorrentProvider.engrelease = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_engrelease', 0)) + curTorrentProvider.getID() + '_engrelease', 0)) + + if hasattr(curTorrentProvider, 'onlyspasearch'): + curTorrentProvider.onlyspasearch = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), + curTorrentProvider.getID() + '_onlyspasearch', 1)) + + if hasattr(curTorrentProvider, 'append_identifier'): + curTorrentProvider.append_identifier = check_setting_str(CFG, curTorrentProvider.getID().upper(), + curTorrentProvider.getID() + '_append_identifier', '[' + curTorrentProvider.name + ']') if hasattr(curTorrentProvider, 'sorting'): curTorrentProvider.sorting = check_setting_str(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_sorting','seeders') + curTorrentProvider.getID() + '_sorting', 'seeders') if hasattr(curTorrentProvider, 'options'): curTorrentProvider.options = check_setting_str(CFG, curTorrentProvider.getID().upper(), curTorrentProvider.getID() + '_options', '') @@ -1300,10 +1301,10 @@ def initialize(consoleLogging=True): if hasattr(curTorrentProvider, 'cat'): curTorrentProvider.cat = check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_cat', 0) + curTorrentProvider.getID() + '_cat', 0) if hasattr(curTorrentProvider, 'subtitle'): curTorrentProvider.subtitle = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_subtitle', 0)) + curTorrentProvider.getID() + '_subtitle', 0)) for curNzbProvider in [curProvider for curProvider in providers.sortedProviderList() if curProvider.providerType == GenericProvider.NZB]: @@ -1365,8 +1366,8 @@ def initialize(consoleLogging=True): (METADATA_PS3, metadata.ps3), (METADATA_WDTV, metadata.wdtv), (METADATA_TIVO, metadata.tivo), - (METADATA_MEDE8ER, metadata.mede8er), - ]: + (METADATA_MEDE8ER, metadata.mede8er)]: + (cur_metadata_config, cur_metadata_class) = cur_metadata_tuple tmp_provider = cur_metadata_class.metadata_class() tmp_provider.set_config(cur_metadata_config) @@ -1445,11 +1446,7 @@ def initialize(consoleLogging=True): def start(): - global __INITIALIZED__, backlogSearchScheduler, \ - showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \ - properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \ - subtitlesFinderScheduler, USE_SUBTITLES, traktCheckerScheduler, \ - dailySearchScheduler, events, started + global started with INIT_LOCK: if __INITIALIZED__: @@ -1520,11 +1517,7 @@ def start(): def halt(): - global __INITIALIZED__, backlogSearchScheduler, \ - showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \ - properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \ - subtitlesFinderScheduler, traktCheckerScheduler, \ - dailySearchScheduler, events, started + global __INITIALIZED__, started with INIT_LOCK: @@ -1628,8 +1621,6 @@ def sig_handler(signum=None, frame=None): def saveAll(): - global showList - # write all shows logger.log(u"Saving all shows to the database") for show in showList: @@ -1822,6 +1813,12 @@ def save_config(): if hasattr(curTorrentProvider, 'engrelease'): new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_engrelease'] = int( curTorrentProvider.engrelease) + if hasattr(curTorrentProvider, 'onlyspasearch'): + new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_onlyspasearch'] = int( + curTorrentProvider.onlyspasearch) + if hasattr(curTorrentProvider, 'append_identifier'): + new_config[curTorrentProvider.getID().upper()][ + curTorrentProvider.getID() + '_append_identifier'] = curTorrentProvider.append_identifier if hasattr(curTorrentProvider, 'sorting'): new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_sorting'] = curTorrentProvider.sorting if hasattr(curTorrentProvider, 'ratio'): diff --git a/sickbeard/metadata/__init__.py b/sickbeard/metadata/__init__.py index 3645c5b4ec35a32cf5cb3cd53dcf63aa81dadfa7..2e8f09157dd32d43218324298e337f19887d32ba 100644 --- a/sickbeard/metadata/__init__.py +++ b/sickbeard/metadata/__init__.py @@ -19,17 +19,16 @@ __all__ = ['generic', 'helpers', 'kodi', 'kodi_12plus', 'mediabrowser', 'ps3', 'wdtv', 'tivo', 'mede8er'] import sys -import kodi, kodi_12plus, mediabrowser, ps3, wdtv, tivo, mede8er +from sickbeard.metadata import kodi, kodi_12plus, mediabrowser, ps3, wdtv, tivo, mede8er, generic, helpers def available_generators(): - return filter(lambda x: x not in ('generic', 'helpers'), __all__) - + return [x for x in __all__ if x not in ['generic', 'helpers']] def _getMetadataModule(name): name = name.lower() prefix = "sickbeard.metadata." - if name in __all__ and prefix + name in sys.modules: + if name in available_generators() and prefix + name in sys.modules: return sys.modules[prefix + name] else: return None @@ -53,4 +52,3 @@ def get_metadata_generator_dict(): result[cur_generator.name] = cur_generator return result - diff --git a/sickbeard/metadata/generic.py b/sickbeard/metadata/generic.py index fe060a7d88ea155fea959f9c7b9f008ba32545b7..6af12b5eb2928f2d18020250db2b20eaaaa1184e 100644 --- a/sickbeard/metadata/generic.py +++ b/sickbeard/metadata/generic.py @@ -40,7 +40,7 @@ import fanart from fanart.core import Request as fanartRequest -class GenericMetadata: +class GenericMetadata(object): """ Base class for all metadata providers. Default behavior is meant to mostly follow KODI 12+ metadata standards. Has support for: @@ -56,17 +56,10 @@ class GenericMetadata: - season all banner """ - def __init__(self, - show_metadata=False, - episode_metadata=False, - fanart=False, - poster=False, - banner=False, - episode_thumbnails=False, - season_posters=False, - season_banners=False, - season_all_poster=False, - season_all_banner=False): + def __init__(self, show_metadata=False, episode_metadata=False, fanart=False, + poster=False, banner=False, episode_thumbnails=False, + season_posters=False, season_banners=False, + season_all_poster=False, season_all_banner=False): self.name = "Generic" @@ -102,8 +95,8 @@ class GenericMetadata: @staticmethod def makeID(name): - name_id = re.sub("[+]", "plus", name) - name_id = re.sub("[^\w\d_]", "_", name_id).lower() + name_id = re.sub(r"[+]", "plus", name) + name_id = re.sub(r"[^\w\d_]", "_", name_id).lower() return name_id def set_config(self, string): @@ -181,7 +174,7 @@ class GenericMetadata: return ek(os.path.join, show_obj.location, self._show_metadata_filename) def get_episode_file_path(self, ep_obj): - return helpers.replaceExtension(ep_obj.location, self._ep_nfo_extension) + return ek(helpers.replaceExtension, ep_obj.location, self._ep_nfo_extension) def get_fanart_path(self, show_obj): return ek(os.path.join, show_obj.location, self.fanart_name) @@ -192,7 +185,8 @@ class GenericMetadata: def get_banner_path(self, show_obj): return ek(os.path.join, show_obj.location, self.banner_name) - def get_episode_thumb_path(self, ep_obj): + @staticmethod + def get_episode_thumb_path(ep_obj): """ Returns the path where the episode thumbnail should be stored. ep_obj: a TVEpisode instance for which to create the thumbnail @@ -210,7 +204,8 @@ class GenericMetadata: return tbn_filename - def get_season_poster_path(self, show_obj, season): + @staticmethod + def get_season_poster_path(show_obj, season): """ Returns the full path to the file for a given season poster. @@ -227,7 +222,8 @@ class GenericMetadata: return ek(os.path.join, show_obj.location, season_poster_filename + '-poster.jpg') - def get_season_banner_path(self, show_obj, season): + @staticmethod + def get_season_banner_path(show_obj, season): """ Returns the full path to the file for a given season banner. @@ -250,6 +246,7 @@ class GenericMetadata: def get_season_all_banner_path(self, show_obj): return ek(os.path.join, show_obj.location, self.season_all_banner_name) + # pylint: disable=W0613,R0201 def _show_data(self, show_obj): """ This should be overridden by the implementing class. It should @@ -257,6 +254,7 @@ class GenericMetadata: """ return None + # pylint: disable=W0613,R0201 def _ep_data(self, ep_obj): """ This should be overridden by the implementing class. It should @@ -285,7 +283,7 @@ class GenericMetadata: nfo_file_path = self.get_show_file_path(show_obj) try: - with ek(open, nfo_file_path, 'r') as xmlFileObj: + with open(nfo_file_path, 'r') as xmlFileObj: showXML = etree.ElementTree(file=xmlFileObj) indexerid = showXML.find('id') @@ -336,7 +334,7 @@ class GenericMetadata: def create_season_posters(self, show_obj): if self.season_posters and show_obj: result = [] - for season, episodes in show_obj.episodes.iteritems(): # @UnusedVariable + for season, _ in show_obj.episodes.iteritems(): # @UnusedVariable if not self._has_season_poster(show_obj, season): logger.log(u"Metadata provider " + self.name + " creating season posters for " + show_obj.name, logger.DEBUG) @@ -347,7 +345,7 @@ class GenericMetadata: def create_season_banners(self, show_obj): if self.season_banners and show_obj: result = [] - for season, episodes in show_obj.episodes.iteritems(): # @UnusedVariable + for season, _ in show_obj.episodes.iteritems(): # @UnusedVariable if not self._has_season_banner(show_obj, season): logger.log(u"Metadata provider " + self.name + " creating season banners for " + show_obj.name, logger.DEBUG) @@ -425,7 +423,7 @@ class GenericMetadata: logger.log(u"Writing show nfo file to " + nfo_file_path, logger.DEBUG) - nfo_file = ek(open, nfo_file_path, 'w') + nfo_file = open(nfo_file_path, 'w') data.write(nfo_file, encoding="utf-8") nfo_file.close() @@ -470,7 +468,7 @@ class GenericMetadata: logger.log(u"Writing episode nfo file to " + nfo_file_path, logger.DEBUG) - nfo_file = ek(open, nfo_file_path, 'w') + nfo_file = open(nfo_file_path, 'w') data.write(nfo_file, encoding="utf-8") nfo_file.close() @@ -598,7 +596,7 @@ class GenericMetadata: continue # Just grab whatever's there for now - art_id, season_url = cur_season_art.popitem() # @UnusedVariable + _, season_url = cur_season_art.popitem() # @UnusedVariable season_poster_file_path = self.get_season_poster_path(show_obj, cur_season) @@ -647,7 +645,7 @@ class GenericMetadata: continue # Just grab whatever's there for now - art_id, season_url = cur_season_art.popitem() # @UnusedVariable + _, season_url = cur_season_art.popitem() # @UnusedVariable season_banner_file_path = self.get_season_banner_path(show_obj, cur_season) @@ -695,7 +693,7 @@ class GenericMetadata: return self._write_image(banner_data, banner_path) - def _write_image(self, image_data, image_path, obj = None): + def _write_image(self, image_data, image_path, obj=None): """ Saves the data in image_data to the location image_path. Returns True/False to represent success or failure. @@ -721,7 +719,7 @@ class GenericMetadata: ek(os.makedirs, image_dir) helpers.chmodAsParent(image_dir) - outFile = ek(open, image_path, 'wb') + outFile = open(image_path, 'wb') outFile.write(image_data) outFile.close() helpers.chmodAsParent(image_path) @@ -931,16 +929,11 @@ class GenericMetadata: logger.log(u"Loading show info from metadata file in " + folder, logger.DEBUG) try: - with ek(open, metadata_path, 'r') as xmlFileObj: + with open(metadata_path, 'r') as xmlFileObj: showXML = etree.ElementTree(file=xmlFileObj) - if showXML.findtext('title') == None \ - or (showXML.findtext('tvdbid') == None - and showXML.findtext('id') == None): - logger.log(u"Invalid info in tvshow.nfo (missing name or id):" \ - + str(showXML.findtext('title')) + " " \ - + str(showXML.findtext('tvdbid')) + " " \ - + str(showXML.findtext('id'))) + if showXML.findtext('title') == None or (showXML.findtext('tvdbid') == None and showXML.findtext('id') == None): + logger.log(u"Invalid info in tvshow.nfo (missing name or id): %s %s %s" % (showXML.findtext('title'), showXML.findtext('tvdbid'), showXML.findtext('id'))) return empty_return name = showXML.findtext('title') @@ -1002,17 +995,18 @@ class GenericMetadata: if types[type] and getattr(result, types[type]): return "{0}{1}{2}".format(base_url, max_size, result[types[type]]) - except Exception as e: + except Exception: pass logger.log(u"Could not find any " + type + " images on TMDB for " + show.name, logger.INFO) def _retrieve_show_images_from_fanart(self, show, type, thumb=False): - types = {'poster': fanart.TYPE.TV.POSTER, - 'banner': fanart.TYPE.TV.BANNER, - 'poster_thumb': fanart.TYPE.TV.POSTER, - 'banner_thumb': fanart.TYPE.TV.BANNER, - 'fanart': fanart.TYPE.TV.BACKGROUND, + types = { + 'poster': fanart.TYPE.TV.POSTER, + 'banner': fanart.TYPE.TV.BANNER, + 'poster_thumb': fanart.TYPE.TV.POSTER, + 'banner_thumb': fanart.TYPE.TV.BANNER, + 'fanart': fanart.TYPE.TV.BACKGROUND, } try: @@ -1032,7 +1026,7 @@ class GenericMetadata: if thumb: url = re.sub('/fanart/', '/preview/', url) return url - except Exception as e: + except Exception: pass logger.log(u"Could not find any " + type + " images on Fanart.tv for " + show.name, logger.INFO) diff --git a/sickbeard/metadata/kodi.py b/sickbeard/metadata/kodi.py index 7c902f2307084f850ac8d53bb1af627a1bf946c1..ac9e243129304fe7e30e8fc896fdb6e1e7ace333 100644 --- a/sickbeard/metadata/kodi.py +++ b/sickbeard/metadata/kodi.py @@ -16,11 +16,10 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. -import generic -import kodi_12plus - import os +from sickbeard.metadata import generic +from sickbeard.metadata import kodi_12plus from sickbeard import helpers from sickrage.helper.encoding import ek @@ -90,7 +89,8 @@ class KODIMetadata(kodi_12plus.KODI_12PlusMetadata): def create_season_all_banner(self, show_obj): pass - def get_episode_thumb_path(self, ep_obj): + @staticmethod + def get_episode_thumb_path(ep_obj): """ Returns the path where the episode thumbnail should be stored. Defaults to the same path as the episode file but with a .tbn extension. @@ -104,7 +104,8 @@ class KODIMetadata(kodi_12plus.KODI_12PlusMetadata): return tbn_filename - def get_season_poster_path(self, show_obj, season): + @staticmethod + def get_season_poster_path(show_obj, season): """ Returns the full path to the file for a given season poster. diff --git a/sickbeard/metadata/kodi_12plus.py b/sickbeard/metadata/kodi_12plus.py index a4372b162d2c2f4d5833f29c7fab21a3804dcc21..cdd254333058079bb39d481c3903d62e808bdecd 100644 --- a/sickbeard/metadata/kodi_12plus.py +++ b/sickbeard/metadata/kodi_12plus.py @@ -15,11 +15,10 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. -import generic import datetime import sickbeard - +from sickbeard.metadata import generic from sickbeard import logger, helpers from sickrage.helper.common import dateFormat from sickrage.helper.exceptions import ex, ShowNotFoundException @@ -131,7 +130,7 @@ class KODI_12PlusMetadata(generic.GenericMetadata): # check for title and id if not (getattr(myShow, 'seriesname', None) and getattr(myShow, 'id', None)): logger.log(u"Incomplete info for show with id " + str(show_ID) + " on " + sickbeard.indexerApi( - show_obj.indexer).name + ", skipping it", logger.ERROR) + show_obj.indexer).name + ", skipping it") return False title = etree.SubElement(tv_node, "title") @@ -147,7 +146,7 @@ class KODI_12PlusMetadata(generic.GenericMetadata): if year_text: year = etree.SubElement(tv_node, "year") year.text = year_text - except: + except Exception: pass if getattr(myShow, 'overview', None): @@ -246,9 +245,9 @@ class KODI_12PlusMetadata(generic.GenericMetadata): try: myEp = myShow[curEpToWrite.season][curEpToWrite.episode] except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound): - logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str( - curEpToWrite.episode) + " on " + sickbeard.indexerApi( - ep_obj.show.indexer).name + ".. has it been removed? Should I delete from db?") + logger.log(u"Unable to find episode %dx%d on %s... has it been removed? Should I delete from db?" % + (curEpToWrite.season, curEpToWrite.episode, sickbeard.indexerApi(ep_obj.show.indexer).name)) + return None if not getattr(myEp, 'firstaired', None): @@ -321,7 +320,7 @@ class KODI_12PlusMetadata(generic.GenericMetadata): rating = etree.SubElement(episode, "rating") rating.text = myEp['rating'] - if getattr(myEp, 'gueststars', None) and isinstance( myEp['gueststars'], basestring): + if getattr(myEp, 'gueststars', None) and isinstance(myEp['gueststars'], basestring): for actor in (x.strip() for x in myEp['gueststars'].split('|') if x.strip()): cur_actor = etree.SubElement(episode, "actor") cur_actor_name = etree.SubElement(cur_actor, "name") diff --git a/sickbeard/metadata/mede8er.py b/sickbeard/metadata/mede8er.py index 87ef802894fdd4c03f9ce0df58a2a85bba9170bc..c4bd685b73edc80e2eb779251647a2732c9d975e 100644 --- a/sickbeard/metadata/mede8er.py +++ b/sickbeard/metadata/mede8er.py @@ -61,17 +61,11 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata): season_all_poster=False, season_all_banner=False): - mediabrowser.MediaBrowserMetadata.__init__(self, - show_metadata, - episode_metadata, - fanart, - poster, - banner, - episode_thumbnails, - season_posters, - season_banners, - season_all_poster, - season_all_banner) + mediabrowser.MediaBrowserMetadata.__init__( + self, show_metadata, episode_metadata, fanart, + poster, banner, episode_thumbnails, season_posters, + season_banners, season_all_poster, season_all_banner + ) self.name = "Mede8er" @@ -92,7 +86,8 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata): def get_episode_file_path(self, ep_obj): return helpers.replaceExtension(ep_obj.location, self._ep_nfo_extension) - def get_episode_thumb_path(self, ep_obj): + @staticmethod + def get_episode_thumb_path(ep_obj): return helpers.replaceExtension(ep_obj.location, 'jpg') def _show_data(self, show_obj): @@ -135,7 +130,7 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata): # check for title and id if not (getattr(myShow, 'seriesname', None) and getattr(myShow, 'id', None)): logger.log(u"Incomplete info for show with id " + str(show_obj.indexerid) + " on " + sickbeard.indexerApi( - show_obj.indexer).name + ", skipping it", logger.ERROR) + show_obj.indexer).name + ", skipping it") return False SeriesName = etree.SubElement(tv_node, "title") @@ -253,7 +248,8 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata): try: myEp = myShow[curEpToWrite.season][curEpToWrite.episode] except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound): - logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(curEpToWrite.episode) + " on tvdb... has it been removed? Should I delete from db?") + logger.log(u"Unable to find episode %dx%d on %s... has it been removed? Should I delete from db?" % + (curEpToWrite.season, curEpToWrite.episode, sickbeard.indexerApi(ep_obj.show.indexer).name)) return None if curEpToWrite == ep_obj: @@ -319,7 +315,7 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata): if getattr(myShow, '_actors', None) or getattr(myEp, 'gueststars', None): cast = etree.SubElement(episode, "cast") - if getattr(myEp, 'gueststars', None) and isinstance( myEp['gueststars'], basestring): + if getattr(myEp, 'gueststars', None) and isinstance(myEp['gueststars'], basestring): for actor in (x.strip() for x in myEp['gueststars'].split('|') if x.strip()): cur_actor = etree.SubElement(cast, "actor") cur_actor.text = actor @@ -381,7 +377,7 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata): logger.log(u"Writing show nfo file to " + nfo_file_path, logger.DEBUG) - nfo_file = ek(open, nfo_file_path, 'w') + nfo_file = open(nfo_file_path, 'w') data.write(nfo_file, encoding="UTF-8") nfo_file.close() @@ -426,7 +422,7 @@ class Mede8erMetadata(mediabrowser.MediaBrowserMetadata): logger.log(u"Writing episode nfo file to " + nfo_file_path, logger.DEBUG) - nfo_file = ek(open, nfo_file_path, 'w') + nfo_file = open(nfo_file_path, 'w') data.write(nfo_file, encoding="UTF-8") nfo_file.close() diff --git a/sickbeard/metadata/mediabrowser.py b/sickbeard/metadata/mediabrowser.py index 4ef0f0bef38578c0c40e53f015bb4c9bcf7e25c4..a9d608a20828ac8d62be6f6ccf995748308bd00b 100644 --- a/sickbeard/metadata/mediabrowser.py +++ b/sickbeard/metadata/mediabrowser.py @@ -22,7 +22,7 @@ import re import sickbeard -import generic +from sickbeard.metadata import generic from sickbeard import logger, helpers @@ -124,7 +124,8 @@ class MediaBrowserMetadata(generic.GenericMetadata): return xml_file_path - def get_episode_thumb_path(self, ep_obj): + @staticmethod + def get_episode_thumb_path(ep_obj): """ Returns a full show dir/metadata/episode.jpg path for MediaBrowser episode thumbs. @@ -141,7 +142,8 @@ class MediaBrowserMetadata(generic.GenericMetadata): return tbn_file_path - def get_season_poster_path(self, show_obj, season): + @staticmethod + def get_season_poster_path(show_obj, season): """ Season thumbs for MediaBrowser go in Show Dir/Season X/folder.jpg @@ -151,7 +153,7 @@ class MediaBrowserMetadata(generic.GenericMetadata): dir_list = [x for x in ek(os.listdir, show_obj.location) if ek(os.path.isdir, ek(os.path.join, show_obj.location, x))] - season_dir_regex = '^Season\s+(\d+)$' + season_dir_regex = r'^Season\s+(\d+)$' season_dir = None @@ -181,7 +183,8 @@ class MediaBrowserMetadata(generic.GenericMetadata): return ek(os.path.join, show_obj.location, season_dir, 'folder.jpg') - def get_season_banner_path(self, show_obj, season): + @staticmethod + def get_season_banner_path(show_obj, season): """ Season thumbs for MediaBrowser go in Show Dir/Season X/banner.jpg @@ -191,7 +194,7 @@ class MediaBrowserMetadata(generic.GenericMetadata): dir_list = [x for x in ek(os.listdir, show_obj.location) if ek(os.path.isdir, ek(os.path.join, show_obj.location, x))] - season_dir_regex = '^Season\s+(\d+)$' + season_dir_regex = r'^Season\s+(\d+)$' season_dir = None @@ -262,7 +265,7 @@ class MediaBrowserMetadata(generic.GenericMetadata): # check for title and id if not (getattr(myShow, 'seriesname', None) and getattr(myShow, 'id', None)): logger.log(u"Incomplete info for show with id " + str(show_obj.indexerid) + " on " + sickbeard.indexerApi( - show_obj.indexer).name + ", skipping it", logger.ERROR) + show_obj.indexer).name + ", skipping it") return False if getattr(myShow, 'id', None): @@ -325,7 +328,7 @@ class MediaBrowserMetadata(generic.GenericMetadata): if year_text: ProductionYear = etree.SubElement(tv_node, "ProductionYear") ProductionYear.text = year_text - except: + except Exception: pass if getattr(myShow, 'runtime', None): @@ -434,9 +437,8 @@ class MediaBrowserMetadata(generic.GenericMetadata): try: myEp = myShow[curEpToWrite.season][curEpToWrite.episode] except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound): - logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str( - curEpToWrite.episode) + " on " + sickbeard.indexerApi( - ep_obj.show.indexer).name + ".. has it been removed? Should I delete from db?") + logger.log(u"Unable to find episode %dx%d on %s... has it been removed? Should I delete from db?" % + (curEpToWrite.season, curEpToWrite.episode, sickbeard.indexerApi(ep_obj.show.indexer).name)) return None if curEpToWrite == ep_obj: @@ -520,7 +522,7 @@ class MediaBrowserMetadata(generic.GenericMetadata): Language = etree.SubElement(episode, "Language") try: Language.text = myEp['language'] - except: + except Exception: Language.text = sickbeard.INDEXER_DEFAULT_LANGUAGE # tvrage api doesn't provide language so we must assume a value here thumb = etree.SubElement(episode, "filename") diff --git a/sickbeard/metadata/ps3.py b/sickbeard/metadata/ps3.py index eb0c0131c3ef3a6ebe73da5d368fa4b2722857cf..6dbeab00e73f0ae1752e62d0f7bd350c299eba7d 100644 --- a/sickbeard/metadata/ps3.py +++ b/sickbeard/metadata/ps3.py @@ -18,7 +18,7 @@ import os -import generic +from sickbeard.metadata import generic from sickrage.helper.encoding import ek @@ -79,7 +79,7 @@ class PS3Metadata(generic.GenericMetadata): # no show metadata generated, we abort this lookup function return (None, None, None) - def create_show_metadata(self, show_obj, force=False): + def create_show_metadata(self, show_obj): pass def update_show_indexer_metadata(self, show_obj): @@ -88,7 +88,7 @@ class PS3Metadata(generic.GenericMetadata): def get_show_file_path(self, show_obj): pass - def create_episode_metadata(self, ep_obj, force=False): + def create_episode_metadata(self, ep_obj): pass def create_fanart(self, show_obj): @@ -109,7 +109,8 @@ class PS3Metadata(generic.GenericMetadata): def create_season_all_banner(self, show_obj): pass - def get_episode_thumb_path(self, ep_obj): + @staticmethod + def get_episode_thumb_path(ep_obj): """ Returns the path where the episode thumbnail should be stored. Defaults to the same path as the episode file but with a .cover.jpg extension. diff --git a/sickbeard/metadata/tivo.py b/sickbeard/metadata/tivo.py index 383a77a2d3aba1407618f5af28c610bc23c6abef..228254a3f3165448dcaf20686fedb5ed89d58fb0 100644 --- a/sickbeard/metadata/tivo.py +++ b/sickbeard/metadata/tivo.py @@ -86,7 +86,7 @@ class TIVOMetadata(generic.GenericMetadata): # no show metadata generated, we abort this lookup function return (None, None, None) - def create_show_metadata(self, show_obj, force=False): + def create_show_metadata(self, show_obj): pass def update_show_indexer_metadata(self, show_obj): @@ -107,7 +107,8 @@ class TIVOMetadata(generic.GenericMetadata): def create_episode_thumb(self, ep_obj): pass - def get_episode_thumb_path(self, ep_obj): + @staticmethod + def get_episode_thumb_path(ep_obj): pass def create_season_posters(self, ep_obj): @@ -192,9 +193,8 @@ class TIVOMetadata(generic.GenericMetadata): try: myEp = myShow[curEpToWrite.season][curEpToWrite.episode] except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound): - logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str( - curEpToWrite.episode) + " on " + sickbeard.indexerApi( - ep_obj.show.indexer).name + "... has it been removed? Should I delete from db?") + logger.log(u"Unable to find episode %dx%d on %s... has it been removed? Should I delete from db?" % + (curEpToWrite.season, curEpToWrite.episode, sickbeard.indexerApi(ep_obj.show.indexer).name)) return None if ep_obj.season == 0 and not getattr(myEp, 'firstaired', None): @@ -320,7 +320,7 @@ class TIVOMetadata(generic.GenericMetadata): logger.log(u"Writing episode nfo file to " + nfo_file_path, logger.DEBUG) - with ek(open, nfo_file_path, 'w') as nfo_file: + with open(nfo_file_path, 'w') as nfo_file: # Calling encode directly, b/c often descriptions have wonky characters. nfo_file.write(data.encode("utf-8")) diff --git a/sickbeard/metadata/wdtv.py b/sickbeard/metadata/wdtv.py index 3f2745d8b5254e7a5b53aa91b490d21e3f7ac1c1..7dfa88f7dbad441a0cd53fab4a6435e89d8e833b 100644 --- a/sickbeard/metadata/wdtv.py +++ b/sickbeard/metadata/wdtv.py @@ -22,7 +22,7 @@ import re import sickbeard -import generic +from sickbeard.metadata import generic from sickbeard import logger, helpers from sickrage.helper.common import dateFormat @@ -95,7 +95,7 @@ class WDTVMetadata(generic.GenericMetadata): # no show metadata generated, we abort this lookup function return (None, None, None) - def create_show_metadata(self, show_obj, force=False): + def create_show_metadata(self, show_obj): pass def update_show_indexer_metadata(self, show_obj): @@ -119,7 +119,8 @@ class WDTVMetadata(generic.GenericMetadata): def create_season_all_banner(self, show_obj): pass - def get_episode_thumb_path(self, ep_obj): + @staticmethod + def get_episode_thumb_path(ep_obj): """ Returns the path where the episode thumbnail should be stored. Defaults to the same path as the episode file but with a .metathumb extension. @@ -133,7 +134,8 @@ class WDTVMetadata(generic.GenericMetadata): return tbn_filename - def get_season_poster_path(self, show_obj, season): + @staticmethod + def get_season_poster_path(show_obj, season): """ Season thumbs for WDTV go in Show Dir/Season X/folder.jpg @@ -143,7 +145,7 @@ class WDTVMetadata(generic.GenericMetadata): dir_list = [x for x in ek(os.listdir, show_obj.location) if ek(os.path.isdir, ek(os.path.join, show_obj.location, x))] - season_dir_regex = '^Season\s+(\d+)$' + season_dir_regex = r'^Season\s+(\d+)$' season_dir = None @@ -210,9 +212,8 @@ class WDTVMetadata(generic.GenericMetadata): try: myEp = myShow[curEpToWrite.season][curEpToWrite.episode] except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound): - logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str( - curEpToWrite.episode) + " on " + sickbeard.indexerApi( - ep_obj.show.indexer).name + "... has it been removed? Should I delete from db?") + logger.log(u"Unable to find episode %dx%d on %s... has it been removed? Should I delete from db?" % + (curEpToWrite.season, curEpToWrite.episode, sickbeard.indexerApi(ep_obj.show.indexer).name)) return None if ep_obj.season == 0 and not getattr(myEp, 'firstaired', None): @@ -258,7 +259,7 @@ class WDTVMetadata(generic.GenericMetadata): if year_text: year = etree.SubElement(episode, "year") year.text = year_text - except: + except Exception: pass if curEpToWrite.season != 0 and getattr(myShow, 'runtime', None): diff --git a/sickbeard/name_parser/regexes.py b/sickbeard/name_parser/regexes.py index ef51b164afd1d76588618f9160897085f1615ff2..432da8ed1250b800cd940f96debe32e74ed68125 100644 --- a/sickbeard/name_parser/regexes.py +++ b/sickbeard/name_parser/regexes.py @@ -115,6 +115,17 @@ normal_regexes = [ (?P<ep_num>\d{2})$ # 02 '''), + ('newpct', + # Example: Sobrenatural - Temporada 10 [HDTV][Cap.1023][Espanol Castellano] + r''' + (?P<series_name>.+?) # Showw_Name: "Sobrenatural" + (?:.-.+\d{1,2}.\[) # Separator and junk: " - Temporada 10 [" + (?P<extra_info>.+) # Quality: "HDTV" + (?:\]\[.+\.) # junk: "][Cap." + (?P<season_num>\d{1,2}) # Season number: "10" + (?P<ep_num>\d{2})(?:]) # Episode number: "23" + '''), + ('verbose', # Show Name Season 1 Episode 2 Ep Name r''' diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py index 11c2e9cb92b6a24c9258a1003e39d64091e69d02..3515aa1e6937c198c2b1ae0f7e7aafde70440511 100644 --- a/sickbeard/postProcessor.py +++ b/sickbeard/postProcessor.py @@ -94,6 +94,8 @@ class PostProcessor(object): self.version = None + self.anidbEpisode = None + def _log(self, message, level=logger.INFO): """ A wrapper for the internal logger which also keeps track of messages and saves them to a string for later. @@ -155,7 +157,7 @@ class PostProcessor(object): """ def recursive_glob(treeroot, pattern): results = [] - for base, dirs, files in os.walk(treeroot): + for base, _, files in os.walk(treeroot): goodfiles = fnmatch.filter(files, pattern) results.extend(os.path.join(base, f) for f in goodfiles) return results @@ -164,7 +166,7 @@ class PostProcessor(object): return [] # don't confuse glob with chars we didn't mean to use - globbable_file_path = helpers.fixGlob(file_path) + globbable_file_path = ek(helpers.fixGlob, file_path) file_path_list = [] @@ -246,7 +248,7 @@ class PostProcessor(object): self._log('Read only mode on file ' + cur_file + ' Will try to make it writeable', logger.DEBUG) try: ek(os.chmod, cur_file, stat.S_IWRITE) - except: + except Exception: self._log(u'Cannot change permissions of ' + cur_file, logger.WARNING) ek(os.remove, cur_file) @@ -297,7 +299,7 @@ class PostProcessor(object): # check if file have subtitles language if os.path.splitext(cur_extension)[1][1:] in common.subtitleExtensions: cur_lang = os.path.splitext(cur_extension)[0] - if cur_lang in subtitles.wantedLanguages(): + if cur_lang in sickbeard.subtitles.wantedLanguages(): cur_extension = cur_lang + os.path.splitext(cur_extension)[1] # replace .nfo with .nfo-orig to avoid conflicts @@ -391,7 +393,7 @@ class PostProcessor(object): self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR) raise - self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_hard_link) + self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_hard_link, subtitles=subtitles) def _moveAndSymlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False): """ @@ -414,7 +416,7 @@ class PostProcessor(object): raise self._combined_file_operation(file_path, new_path, new_base_name, associated_files, - action=_int_move_and_sym_link) + action=_int_move_and_sym_link, subtitles=subtitles) def _history_lookup(self): """ @@ -497,7 +499,7 @@ class PostProcessor(object): logger.log(u" or Parse result(air_date): " + str(parse_result.air_date), logger.DEBUG) logger.log(u"Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG) - def _analyze_name(self, name, file=True): + def _analyze_name(self, name): """ Takes a name and tries to figure out a show, season, and episode from it. @@ -517,7 +519,7 @@ class PostProcessor(object): name = helpers.remove_non_release_groups(helpers.remove_extension(name)) # parse the name to break it into show name, season, and episode - np = NameParser(file, tryIndexers=True) + np = NameParser(True, tryIndexers=True) parse_result = np.parse(name) # show object @@ -576,22 +578,23 @@ class PostProcessor(object): episodes = [] # try to look up the nzb in history - attempt_list = [self._history_lookup, + attempt_list = [ + self._history_lookup, - # try to analyze the nzb name - lambda: self._analyze_name(self.nzb_name), + # try to analyze the nzb name + lambda: self._analyze_name(self.nzb_name), - # try to analyze the file name - lambda: self._analyze_name(self.file_name), + # try to analyze the file name + lambda: self._analyze_name(self.file_name), - # try to analyze the dir name - lambda: self._analyze_name(self.folder_name), + # try to analyze the dir name + lambda: self._analyze_name(self.folder_name), - # try to analyze the file + dir names together - lambda: self._analyze_name(self.file_path), + # try to analyze the file + dir names together + lambda: self._analyze_name(self.file_path), - # try to analyze the dir + file name together as one name - lambda: self._analyze_name(self.folder_name + u' ' + self.file_name) + # try to analyze the dir + file name together as one name + lambda: self._analyze_name(self.folder_name + u' ' + self.file_name) ] # attempt every possible method to get our info @@ -644,8 +647,9 @@ class PostProcessor(object): season = int(sql_result[0][0]) episodes = [int(sql_result[0][1])] else: - self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str( - show.indexerid) + u", skipping", logger.DEBUG) + self._log( + u"Unable to find episode with date " + + str(episodes[0]) + u" for show " + str(show.indexerid) + u", skipping", logger.DEBUG) # we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers episodes = [] continue @@ -714,7 +718,7 @@ class PostProcessor(object): # if there is a quality available in the status then we don't need to bother guessing from the filename if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST: - oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) # @UnusedVariable + _, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) # @UnusedVariable if ep_quality != common.Quality.UNKNOWN: self._log( u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality], @@ -744,7 +748,7 @@ class PostProcessor(object): # Try getting quality from the episode (snatched) status if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST: - oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) # @UnusedVariable + _, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) # @UnusedVariable if ep_quality != common.Quality.UNKNOWN: self._log( u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality], @@ -761,7 +765,6 @@ class PostProcessor(object): ep_quality] + ", using that", logger.DEBUG) return ep_quality - test = str(ep_quality) return ep_quality def _run_extra_scripts(self, ep_obj): @@ -785,7 +788,7 @@ class PostProcessor(object): try: p = subprocess.Popen(script_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR) - out, err = p.communicate() # @UnusedVariable + out, _ = p.communicate() # @UnusedVariable self._log(u"Script result: " + str(out), logger.DEBUG) except OSError, e: @@ -807,7 +810,7 @@ class PostProcessor(object): if self.is_priority: return True - old_ep_status, old_ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) + _, old_ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) # if SR downloaded this on purpose we likely have a priority download if self.in_history or ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST: @@ -878,7 +881,7 @@ class PostProcessor(object): # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(show, season, episodes) - old_ep_status, old_ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) + _, old_ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) # get the quality of the episode we're processing if quality and not common.Quality.qualityStrings[quality] == 'Unknown': @@ -1130,7 +1133,7 @@ class PostProcessor(object): # do the library update for Trakt notifiers.trakt_notifier.update_library(ep_obj) - except: + except Exception: logger.log(u"Some notifications could not be sent. Continuing with postProcessing...") self._run_extra_scripts(ep_obj) diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index 09bea425e06b920121be80afe08f57f0b33e2153..781f76169ad50c6f293a05807607556434b592e4 100644 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -19,7 +19,7 @@ from sickbeard.providers import btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, torrentz, \ omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, nextgen, speedcd, nyaatorrents, animenzb, bluetigers, cpasbien, fnt, xthor, torrentbytes, \ frenchtorrentdb, freshontv, titansoftv, libertalia, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \ - scenetime, btdigg, strike, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace + scenetime, btdigg, strike, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct __all__ = ['womble', 'btn', @@ -66,7 +66,8 @@ __all__ = ['womble', 'torrentz', 'pretome', 'gftracker', - 'hdspace' + 'hdspace', + 'newpct' ] import sickbeard diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index c93f2f9dd4c0b506709af12a2c2c55c1320a3ef4..41fb6443c91aefc8dcf49362a0e739784c851fb2 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -383,7 +383,7 @@ class GenericProvider: # parse the file name try: - myParser = NameParser(False) + myParser = NameParser(False, showObj=show) parse_result = myParser.parse(title) except InvalidNameException: logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.DEBUG) diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py new file mode 100644 index 0000000000000000000000000000000000000000..15e8447f7b8079c721f851dd658085e14ed14fe7 --- /dev/null +++ b/sickbeard/providers/newpct.py @@ -0,0 +1,192 @@ +# Author: CristianBB +# Greetings to Mr. Pine-apple +# +# URL: http://code.google.com/p/sickbeard/ +# +# This file is part of SickRage. +# +# SickRage is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickRage is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickRage. If not, see <http://www.gnu.org/licenses/>. + +import traceback +from six.moves import urllib + +from sickbeard import logger +from sickbeard import tvcache +from sickbeard.providers import generic +from sickbeard.common import USER_AGENT +from sickbeard.bs4_parser import BS4Parser + + +class newpctProvider(generic.TorrentProvider): + def __init__(self): + + generic.TorrentProvider.__init__(self, "Newpct") + + self.supportsBacklog = True + self.onlyspasearch = None + self.append_identifier = None + self.cache = newpctCache(self) + + self.urls = { + 'base_url': 'http://www.newpct.com', + 'search': 'http://www.newpct.com/buscar-descargas/' + } + + self.url = self.urls['base_url'] + self.headers.update({'User-Agent': USER_AGENT}) + + """ + Search query: + http://www.newpct.com/buscar-descargas/cID=0&tLang=0&oBy=0&oMode=0&category_=767&subcategory_=All&idioma_=1&calidad_=All&oByAux=0&oModeAux=0&size_=0&btnb=Filtrar+Busqueda&q=the+strain + + category_=767 => Category Shows + idioma_=1 => Language Spanish + calidad_=All=> Quality ALL + q => Search show + """ + + self.search_params = { + 'cID': 0, + 'tLang': 0, + 'oBy': 0, + 'oMode': 0, + 'category_': 767, + 'subcategory_': 'All', + 'idioma_': 1, + 'calidad_': 'All', + 'oByAux': 0, + 'oModeAux': 0, + 'size_': 0, + 'btnb': 'Filtrar+Busqueda', + 'q': '' + } + + + def isEnabled(self): + return self.enabled + + def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + + results = [] + items = {'Season': [], 'Episode': [], 'RSS': []} + + lang_info = '' if not epObj or not epObj.show else epObj.show.lang + + #Only search if user conditions are true + if self.onlyspasearch and lang_info != 'es': + logger.log(u"Show info is not spanish, skipping provider search", logger.DEBUG) + return results + + for mode in search_strings.keys(): + logger.log(u"Search Mode: %s" % mode, logger.DEBUG) + + for search_string in search_strings[mode]: + self.search_params.update({'q': search_string.strip()}) + + logger.log(u"Search URL: %s" % self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params), logger.DEBUG) + data = self.getURL(self.urls['search'], post_data=self.search_params, timeout=30) + if not data: + continue + + try: + with BS4Parser(data, features=["html5lib", "permissive"]) as html: + torrent_tbody = html.find('tbody') + + if len(torrent_tbody) < 1: + logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + continue + + torrent_table = torrent_tbody.findAll('tr') + num_results = len(torrent_table) - 1 + + iteration = 0 + for row in torrent_table: + try: + if iteration < num_results: + torrent_size = row.findAll('td')[2] + torrent_row = row.findAll('a')[1] + + download_url = torrent_row.get('href') + title_raw = torrent_row.get('title') + size = self._convertSize(torrent_size.text) + + title = self._processTitle(title_raw) + + item = title, download_url, size + logger.log(u"Found result: %s " % title, logger.DEBUG) + + items[mode].append(item) + iteration += 1 + + except (AttributeError, TypeError): + continue + + except Exception: + logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.WARNING) + + results += items[mode] + + return results + + @staticmethod + def _convertSize(size): + size, modifier = size.split(' ') + size = float(size) + if modifier in 'KB': + size = size * 1024 + elif modifier in 'MB': + size = size * 1024**2 + elif modifier in 'GB': + size = size * 1024**3 + elif modifier in 'TB': + size = size * 1024**4 + return int(size) + + def _processTitle(self, title): + + title = title.replace('Descargar ', '') + + #Quality + title = title.replace('[HDTV]', '[720p HDTV x264]') + title = title.replace('[HDTV 720p AC3 5.1]', '[720p HDTV x264]') + title = title.replace('[HDTV 1080p AC3 5.1]', '[1080p HDTV x264]') + title = title.replace('[DVDRIP]', '[DVDrip x264]') + title = title.replace('[DVD Rip]', '[DVDrip x264]') + title = title.replace('[DVDrip]', '[DVDrip x264]') + title = title.replace('[BLuRayRip]', '[720p BlueRay x264]') + title = title.replace('[BRrip]', '[720p BlueRay x264]') + title = title.replace('[BDrip]', '[720p BlueRay x264]') + title = title.replace('[BluRay Rip]', '[720p BlueRay x264]') + title = title.replace('[BluRay 720p]', '[720p BlueRay x264]') + title = title.replace('[BluRay 1080p]', '[1080p BlueRay x264]') + title = title.replace('[BluRay MicroHD]', '[1080p BlueRay x264]') + title = title.replace('[MicroHD 1080p]', '[1080p BlueRay x264]') + + #Append identifier + title = title + self.append_identifier + + return title + + + +class newpctCache(tvcache.TVCache): + def __init__(self, provider_obj): + + tvcache.TVCache.__init__(self, provider_obj) + + self.minTime = 30 + + + +provider = newpctProvider() diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index 8a0c68af6601b4ddd34be91291c7e465fe5df1cc..0220b2e729c0f4db0960d922fed162ddbbab6208 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -16,7 +16,7 @@ # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. - +# pylint: disable=W0223,E0202 import os import time @@ -40,7 +40,7 @@ from sickrage.show.Show import Show from sickrage.system.Restart import Restart from sickrage.system.Shutdown import Shutdown -from versionChecker import CheckVersion +from sickbeard.versionChecker import CheckVersion from sickbeard import db, logger, ui, helpers from sickbeard import search_queue from sickbeard import image_cache @@ -143,17 +143,19 @@ class ApiHandler(RequestHandler): else: outputCallback = outputCallbackDict['default'] - try:self.finish(outputCallback(outDict)) - except:pass + try: + self.finish(outputCallback(outDict)) + except Exception: + pass - def _out_as_image(self, dict): - self.set_header('Content-Type', dict['image'].get_media_type()) - return dict['image'].get_media() + def _out_as_image(self, _dict): + self.set_header('Content-Type', _dict['image'].get_media_type()) + return _dict['image'].get_media() - def _out_as_json(self, dict): + def _out_as_json(self, _dict): self.set_header("Content-Type", "application/json;charset=UTF-8") try: - out = json.dumps(dict, ensure_ascii=False, sort_keys=True) + out = json.dumps(_dict, ensure_ascii=False, sort_keys=True) callback = self.get_query_argument('callback', None) or self.get_query_argument('jsonp', None) if callback is not None: out = callback + '(' + out + ');' # wrap with JSONP call if requested @@ -298,26 +300,26 @@ class ApiCall(ApiHandler): except AttributeError: self._optionalParams = [] - for paramDict, type in [(self._requiredParams, "requiredParameters"), - (self._optionalParams, "optionalParameters")]: + for paramDict, paramType in [(self._requiredParams, "requiredParameters"), + (self._optionalParams, "optionalParameters")]: - if type in self._help: + if paramType in self._help: for paramName in paramDict: - if not paramName in self._help[type]: - self._help[type][paramName] = {} + if not paramName in self._help[paramType]: + self._help[paramType][paramName] = {} if paramDict[paramName]["allowedValues"]: - self._help[type][paramName]["allowedValues"] = paramDict[paramName]["allowedValues"] + self._help[paramType][paramName]["allowedValues"] = paramDict[paramName]["allowedValues"] else: - self._help[type][paramName]["allowedValues"] = "see desc" - self._help[type][paramName]["defaultValue"] = paramDict[paramName]["defaultValue"] - self._help[type][paramName]["type"] = paramDict[paramName]["type"] + self._help[paramType][paramName]["allowedValues"] = "see desc" + self._help[paramType][paramName]["defaultValue"] = paramDict[paramName]["defaultValue"] + self._help[paramType][paramName]["type"] = paramDict[paramName]["type"] elif paramDict: for paramName in paramDict: - self._help[type] = {} - self._help[type][paramName] = paramDict[paramName] + self._help[paramType] = {} + self._help[paramType][paramName] = paramDict[paramName] else: - self._help[type] = {} + self._help[paramType] = {} msg = "No description available" if "desc" in self._help: msg = self._help["desc"] @@ -330,10 +332,10 @@ class ApiCall(ApiHandler): msg = "The required parameters: '" + "','".join(self._missing) + "' where not set" return _responds(RESULT_ERROR, msg=msg) - def check_params(self, args, kwargs, key, default, required, type, allowedValues): - # TODO: explain this + def check_params(self, args, kwargs, key, default, required, arg_type, allowedValues): + """ function to check passed params for the shorthand wrapper - and to detect missing/required param + and to detect missing/required params """ # auto-select indexer @@ -346,7 +348,7 @@ class ApiCall(ApiHandler): missing = True orgDefault = default - if type == "bool": + if arg_type == "bool": allowedValues = [0, 1] if args: @@ -364,7 +366,7 @@ class ApiCall(ApiHandler): self._missing = [] self._requiredParams = {key: {"allowedValues": allowedValues, "defaultValue": orgDefault, - "type": type}} + "type": arg_type}} if missing and key not in self._missing: self._missing.append(key) @@ -372,25 +374,25 @@ class ApiCall(ApiHandler): try: self._optionalParams[key] = {"allowedValues": allowedValues, "defaultValue": orgDefault, - "type": type} + "type": arg_type} except AttributeError: self._optionalParams = {} self._optionalParams[key] = {"allowedValues": allowedValues, "defaultValue": orgDefault, - "type": type} + "type": arg_type} if default: - default = self._check_param_type(default, key, type) - if type == "bool": - type = [] + default = self._check_param_type(default, key, arg_type) + if arg_type == "bool": + arg_type = [] self._check_param_value(default, key, allowedValues) return default, args - def _check_param_type(self, value, name, type): - """ checks if value can be converted / parsed to type + def _check_param_type(self, value, name, arg_type): + """ checks if value can be converted / parsed to arg_type will raise an error on failure - or will convert it to type and return new converted value + or will convert it to arg_type and return new converted value can check for: - int: will be converted into int - bool: will be converted to False / True @@ -399,12 +401,12 @@ class ApiCall(ApiHandler): - ignore: will ignore it, just like "string" """ error = False - if type == "int": + if arg_type == "int": if _is_int(value): value = int(value) else: error = True - elif type == "bool": + elif arg_type == "bool": if value in ("0", "1"): value = bool(int(value)) elif value in ("true", "True", "TRUE"): @@ -413,19 +415,19 @@ class ApiCall(ApiHandler): value = False elif value not in (True, False): error = True - elif type == "list": + elif arg_type == "list": value = value.split("|") - elif type == "string": + elif arg_type == "string": pass - elif type == "ignore": + elif arg_type == "ignore": pass else: - logger.log(u'API :: Invalid param type: "%s" can not be checked. Ignoring it.' % str(type), logger.ERROR) + logger.log(u'API :: Invalid param type: "%s" can not be checked. Ignoring it.' % str(arg_type), logger.ERROR) if error: # this is a real ApiError !! raise ApiError(u'param "%s" with given value "%s" could not be parsed into "%s"' - % (str(name), str(value), str(type))) + % (str(name), str(value), str(arg_type))) return value @@ -488,13 +490,13 @@ def _is_int(data): return True -def _rename_element(dict, oldKey, newKey): +def _rename_element(dict_obj, oldKey, newKey): try: - dict[newKey] = dict[oldKey] - del dict[oldKey] + dict_obj[newKey] = dict_obj[oldKey] + del dict_obj[oldKey] except (ValueError, TypeError, NameError): pass - return dict + return dict_obj def _responds(result_type, data=None, msg=""): @@ -590,7 +592,7 @@ def _getRootDirs(): valid = 1 try: ek(os.listdir, root_dir) - except: + except Exception: valid = 0 default = 0 if root_dir is default_dir: @@ -967,7 +969,7 @@ class CMD_SubtitleSearch(ApiCall): try: subtitles = epObj.downloadSubtitles() - except: + except Exception: return _responds(RESULT_FAILURE, msg='Unable to find subtitles') # return the correct json value @@ -1209,7 +1211,7 @@ class CMD_Logs(ApiCall): with ek(codecs.open, *[logger.logFile, 'r', 'utf-8']) as f: data = f.readlines() - regex = "^(\d\d\d\d)\-(\d\d)\-(\d\d)\s*(\d\d)\:(\d\d):(\d\d)\s*([A-Z]+)\s*(.+?)\s*\:\:\s*(.*)$" + regex = r"^(\d\d\d\d)\-(\d\d)\-(\d\d)\s*(\d\d)\:(\d\d):(\d\d)\s*([A-Z]+)\s*(.+?)\s*\:\:\s*(.*)$" finalData = [] @@ -1350,14 +1352,14 @@ class CMD_SickBeardAddRootDir(ApiCall): # clean up the list - replace %xx escapes by their single-character equivalent root_dirs = [urllib.unquote_plus(x) for x in root_dirs] for x in root_dirs: - if (x == self.location): + if x == self.location: location_matched = 1 - if (self.default == 1): + if self.default == 1: index = root_dirs.index(self.location) break - if (location_matched == 0): - if (self.default == 1): + if location_matched == 0: + if self.default == 1: root_dirs.insert(0, self.location) else: root_dirs.append(self.location) @@ -1505,7 +1507,7 @@ class CMD_SickBeardGetMessages(ApiCall): def run(self): messages = [] - for cur_notification in ui.notifications.get_notifications(self.rh.request.remote_ip): + for cur_notification in ui.notifications.get_notifications(self.request.remote_ip): messages.append({"title": cur_notification.title, "message": cur_notification.message, "type": cur_notification.type}) @@ -1707,6 +1709,8 @@ class CMD_SickBeardSearchTVRAGE(CMD_SickBeardSearchIndexers): } def __init__(self, args, kwargs): + # Leave this one as APICall so it doesnt try and search anything + # pylint: disable=W0233,W0231 ApiCall.__init__(self, args, kwargs) def run(self): @@ -1855,9 +1859,10 @@ class CMD_Show(ApiCall): if not showObj: return _responds(RESULT_FAILURE, msg="Show not found") - showDict = {} - showDict["season_list"] = CMD_ShowSeasonList((), {"indexerid": self.indexerid}).run()["data"] - showDict["cache"] = CMD_ShowCache((), {"indexerid": self.indexerid}).run()["data"] + showDict = { + "season_list": CMD_ShowSeasonList((), {"indexerid": self.indexerid}).run()["data"], + "cache": CMD_ShowCache((), {"indexerid": self.indexerid}).run()["data"] + } genreList = [] if showObj.genre: @@ -2118,9 +2123,10 @@ class CMD_ShowAddNew(ApiCall): if statusStrings[status].lower() == str(self.status).lower(): self.status = status break - # TODO: check if obsolete - if not self.status in statusStrings.statusStrings: + + if self.status not in statusStrings.statusStrings: raise ApiError("Invalid Status") + # only allow the status options we want if int(self.status) not in (WANTED, SKIPPED, IGNORED): return _responds(RESULT_FAILURE, msg="Status prohibited") @@ -2134,9 +2140,10 @@ class CMD_ShowAddNew(ApiCall): if statusStrings[status].lower() == str(self.future_status).lower(): self.future_status = status break - # TODO: check if obsolete - if not self.future_status in statusStrings.statusStrings: + + if self.future_status not in statusStrings.statusStrings: raise ApiError("Invalid Status") + # only allow the status options we want if int(self.future_status) not in (WANTED, SKIPPED, IGNORED): return _responds(RESULT_FAILURE, msg="Status prohibited") @@ -2759,7 +2766,7 @@ class CMD_ShowUpdate(ApiCall): sickbeard.showQueueScheduler.action.updateShow(showObj, True) # @UndefinedVariable return _responds(RESULT_SUCCESS, msg=str(showObj.name) + " has queued to be updated") except CantUpdateShowException as e: - logger.log("API::Unable to update show: {0}".format(str(e)),logger.DEBUG) + logger.log("API::Unable to update show: {0}".format(str(e)), logger.DEBUG) return _responds(RESULT_FAILURE, msg="Unable to update " + str(showObj.name)) diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 841bb9603a7496533094402aa329c19f7413c338..99074473958f0a8b0d16e524b08cac5fd919b1f2 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -4473,6 +4473,19 @@ class ConfigProviders(Config): except Exception: curTorrentProvider.engrelease = 0 + if hasattr(curTorrentProvider, 'onlyspasearch'): + try: + curTorrentProvider.onlyspasearch = config.checkbox_to_value( + kwargs[curTorrentProvider.getID() + '_onlyspasearch']) + except: + curTorrentProvider.onlyspasearch = 0 + + if hasattr(curTorrentProvider, 'append_identifier'): + try: + curTorrentProvider.append_identifier = str(kwargs[curTorrentProvider.getID() + '_append_identifier']).strip() + except: + curTorrentProvider.append_identifier = None + if hasattr(curTorrentProvider, 'sorting'): try: curTorrentProvider.sorting = str(kwargs[curTorrentProvider.getID() + '_sorting']).strip()