diff --git a/gui/slick/js/core.js b/gui/slick/js/core.js index bb8f4c274dbc6c5fb4da8e91ead037fb606c8830..ab4d37a3e534548f49d9295c99f69d06c4830a8d 100644 --- a/gui/slick/js/core.js +++ b/gui/slick/js/core.js @@ -2378,18 +2378,20 @@ var SICKRAGE = { var row = ''; row += '<tr class="good show-' + indexerId + '">'; row += '<td align="center"><input type="checkbox" class="' + indexerId + '-epcheck" name="' + indexerId + '-' + season + 'x' + episode + '"' + (checked ? ' checked' : '') + '></td>'; - row += '<td style="width: 1%;">' + season + 'x' + episode + '</td>'; - row += '<td>' + name + '</td>'; - if(subtitles.length > 0){ - row += '<td style="float: right;">'; + row += '<td style="width: 2%;">' + season + 'x' + episode + '</td>'; + if (subtitles.length > 0) { + row += '<td style="width: 8%;">'; subtitles = subtitles.split(','); for (var i in subtitles) { if (subtitles.hasOwnProperty(i)) { - row += '<img src="/images/subtitles/flags/' + subtitles[i] + '.png" width="16" height="11" alt="' + subtitles[i] + '" /> '; + row += '<img src="' + srRoot + '/images/subtitles/flags/' + subtitles[i] + '.png" width="16" height="11" alt="' + subtitles[i] + '" /> '; } } row += '</td>'; + } else { + row += '<td style="width: 8%;">None</td>'; } + row += '<td>' + name + '</td>'; row += '</tr>'; return row; diff --git a/gui/slick/js/core.min.js b/gui/slick/js/core.min.js index 5207d9e5d33ab9f73718ce4b573d75552de87823..fd0fb136dc61acbdbe7e384df326b3ae49cce5bd 100644 Binary files a/gui/slick/js/core.min.js and b/gui/slick/js/core.min.js differ diff --git a/gui/slick/views/config_anime.mako b/gui/slick/views/config_anime.mako index d1aa10189055911d7738293cab6d30397e16fc7e..d5dd49e3b85951980a59ec2206235f2156f5dff0 100644 --- a/gui/slick/views/config_anime.mako +++ b/gui/slick/views/config_anime.mako @@ -14,11 +14,11 @@ <div id="config-components"> <ul> - <li><a href="#core-component-group1">AnimeDB Settings</a></li> - <li><a href="#core-component-group2">Look & Feel</a></li> + <li><a href="#animedb-settings">AnimeDB Settings</a></li> + <li><a href="#anime-look-feel">Look & Feel</a></li> </ul> - <div id="core-component-group1" class="tab-pane active component-group"> + <div id="animedb-settings" class="tab-pane active component-group"> <div class="component-group-desc"> <img class="notifier-icon" src="${srRoot}/images/anidb24.png" alt="AniDB" title="AniDB" width="24" height="24" /> <h3><a href="${anon_url('http://anidb.info')}" onclick="window.open(this.href, '_blank'); return false;">AniDB</a></h3> @@ -69,7 +69,7 @@ </div><!-- /component-group //--> - <div id="core-component-group2" class="tab-pane component-group"> + <div id="anime-look-feel" class="tab-pane component-group"> <div class="component-group-desc"> <h3>Look and Feel</h3> diff --git a/gui/slick/views/config_backuprestore.mako b/gui/slick/views/config_backuprestore.mako index c4b9569e8d576dd067bb63f63c782d0ea1ff95ee..e70a7a32f58709c01b39009ba5650703951f26b0 100644 --- a/gui/slick/views/config_backuprestore.mako +++ b/gui/slick/views/config_backuprestore.mako @@ -27,11 +27,11 @@ <form name="configForm" method="post" action="backuprestore"> <div id="config-components"> <ul> - <li><a href="#core-component-group1">Backup</a></li> - <li><a href="#core-component-group2">Restore</a></li> + <li><a href="#backup">Backup</a></li> + <li><a href="#restore">Restore</a></li> </ul> - <div id="core-component-group1" class="component-group clearfix"> + <div id="backup" class="component-group clearfix"> <div class="component-group-desc"> <h3>Backup</h3> <p><b>Backup your main database file and config.</b></p> @@ -54,7 +54,7 @@ </div><!-- /component-group1 //--> - <div id="core-component-group2" class="component-group clearfix"> + <div id="restore" class="component-group clearfix"> <div class="component-group-desc"> <h3>Restore</h3> <p><b>Restore your main database file and config.</b></p> diff --git a/gui/slick/views/config_general.mako b/gui/slick/views/config_general.mako index f369fcce3a37c313daf47f2ada7de6494f0d47df..cc61635fd7a1fd912c8547b2fcf2ddaa70444675 100644 --- a/gui/slick/views/config_general.mako +++ b/gui/slick/views/config_general.mako @@ -33,12 +33,12 @@ <div id="config-components"> <ul> - <li><a href="#core-component-group1">Misc</a></li> - <li><a href="#core-component-group2">Interface</a></li> - <li><a href="#core-component-group3">Advanced Settings</a></li> + <li><a href="#misc">Misc</a></li> + <li><a href="#interface">Interface</a></li> + <li><a href="#advanced-settings">Advanced Settings</a></li> </ul> - <div id="core-component-group1"> + <div id="misc"> <div class="component-group"> <div class="component-group-desc"> @@ -86,8 +86,8 @@ <label for="showupdate_hour"> <span class="component-title">When to update shows</span> <span class="component-desc"> - <input type="text" name="showupdate_hour" id="showupdate_hour" value="${sickbeard.SHOWUPDATE_HOUR}" class="form-control input-sm input75" autocapitalize="off" /> - <p>with information such as next air dates, show ended, etc. Use 15 for 3pm, 4 for 4am etc. Anything over 23 or under 0 will be set to 0 (12am)</p> + <input type="number" min="0" max="23" step="1" name="showupdate_hour" id="showupdate_hour" value="${sickbeard.SHOWUPDATE_HOUR}" class="form-control input-sm input75" autocapitalize="off" /> + <p>with information such as next air dates, show ended, etc. Use 15 for 3pm, 4 for 4am etc.</p> </span> </label> </div> @@ -120,7 +120,7 @@ <label for="log_nr"> <span class="component-title">Number of Log files saved</span> <span class="component-desc"> - <input type="text" name="log_nr" id="log_nr" value="${sickbeard.LOG_NR}" class="form-control input-sm input75" autocapitalize="off" /> + <input type="number" min="1" step="1" name="log_nr" id="log_nr" value="${sickbeard.LOG_NR}" class="form-control input-sm input75" autocapitalize="off" /> <p>number of log files saved when rotating logs (default: 5) (REQUIRES RESTART)</p> </span> </label> @@ -130,7 +130,7 @@ <label for="log_size"> <span class="component-title">Size of Log files saved</span> <span class="component-desc"> - <input type="text" name="log_size" id="log_size" value="${sickbeard.LOG_SIZE}" class="form-control input-sm input75" autocapitalize="off" /> + <input type="number" min="1048576" step="1048576" name="log_size" id="log_size" value="${sickbeard.LOG_SIZE}" class="form-control input-sm input75" autocapitalize="off" /> <p>maximum size of a log file saved (default: 1048576 (1MB)) (REQUIRES RESTART)</p> </span> </label> @@ -155,7 +155,7 @@ <label for="indexer_timeout"> <span class="component-title">Timeout show indexer at</span> <span class="component-desc"> - <input type="text" name="indexer_timeout" id="indexer_timeout" value="${sickbeard.INDEXER_TIMEOUT}" class="form-control input-sm input75" autocapitalize="off" /> + <input type="number" min="10" step="1" name="indexer_timeout" id="indexer_timeout" value="${sickbeard.INDEXER_TIMEOUT}" class="form-control input-sm input75" autocapitalize="off" /> <p>seconds of inactivity when finding new shows (default:10)</p> </span> </label> @@ -208,7 +208,7 @@ <label> <span class="component-title">Check the server every*</span> <span class="component-desc"> - <input type="text" name="update_frequency" id="update_frequency" value="${sickbeard.UPDATE_FREQUENCY}" class="form-control input-sm input75" autocapitalize="off" /> + <input type="number" min="1" step="1" name="update_frequency" id="update_frequency" value="${sickbeard.UPDATE_FREQUENCY}" class="form-control input-sm input75" autocapitalize="off" /> <p>hours for software updates (default:12)</p> </span> </label> @@ -231,7 +231,7 @@ </div><!-- /component-group1 //--> - <div id="core-component-group2"> + <div id="interface"> <div class="component-group"> <div class="component-group-desc"> @@ -425,12 +425,22 @@ <label for="web_port"> <span class="component-title">HTTP port</span> <span class="component-desc"> - <input type="text" name="web_port" id="web_port" value="${sickbeard.WEB_PORT}" class="form-control input-sm input100" autocapitalize="off" /> + <input type="number" min="1" step="1" name="web_port" id="web_port" value="${sickbeard.WEB_PORT}" class="form-control input-sm input100" autocapitalize="off" /> <p>web port to browse and access SickRage (default:8081)</p> </span> </label> </div> + <div class="field-pair"> + <label for="notify_on_login"> + <span class="component-title">Notify on login</span> + <span class="component-desc"> + <input type="checkbox" name="notify_on_login" class="enabler" id="notify_on_login" ${('', 'checked="checked"')[bool(sickbeard.NOTIFY_ON_LOGIN)]}/> + <p>enable to be notified when a new login happens in webserver</p> + </span> + </label> + </div> + <div class="field-pair"> <label for="web_ipv6"> <span class="component-title">Listen on IPv6</span> @@ -489,7 +499,7 @@ </div> - <div id="core-component-group3" class="component-group"> + <div id="advanced-settings" class="component-group"> <div class="component-group"> diff --git a/gui/slick/views/config_notifications.mako b/gui/slick/views/config_notifications.mako index 2573c4fe09e7822146345f19465a32ccdcca7585..98953e2c31c6b8b46a51c2eec0dc53a8f644fbb4 100644 --- a/gui/slick/views/config_notifications.mako +++ b/gui/slick/views/config_notifications.mako @@ -1530,7 +1530,7 @@ <div class="field-pair"> <label for="trakt_timeout"> <span class="component-title">API Timeout</span> - <input type="text" name="trakt_timeout" id="trakt_timeout" value="${sickbeard.TRAKT_TIMEOUT}" class="form-control input-sm input75" autocapitalize="off" /> + <input type="number" min="10" step="1" name="trakt_timeout" id="trakt_timeout" value="${sickbeard.TRAKT_TIMEOUT}" class="form-control input-sm input75" autocapitalize="off" /> </label> <p> <span class="component-desc"> @@ -1707,7 +1707,7 @@ <div class="field-pair"> <label for="email_port"> <span class="component-title">SMTP port</span> - <input type="text" name="email_port" id="email_port" value="${sickbeard.EMAIL_PORT}" class="form-control input-sm input75" autocapitalize="off" /> + <input type="number" min"1" step="1" name="email_port" id="email_port" value="${sickbeard.EMAIL_PORT}" class="form-control input-sm input75" autocapitalize="off" /> </label> <label> <span class="component-title"> </span> diff --git a/gui/slick/views/config_postProcessing.mako b/gui/slick/views/config_postProcessing.mako index 9e4f659c6660601ac5f766c46be0af3cb9798bff..e45eca4df6e591acb703f1687cebde1868dafa00 100644 --- a/gui/slick/views/config_postProcessing.mako +++ b/gui/slick/views/config_postProcessing.mako @@ -23,11 +23,11 @@ <form id="configForm" action="savePostProcessing" method="post"> <div id="config-components"> <ul> - <li><a href="#core-component-group1">Post-Processing</a></li> - <li><a href="#core-component-group2">Episode Naming</a></li> - <li><a href="#core-component-group3">Metadata</a></li> + <li><a href="#post-processing">Post-Processing</a></li> + <li><a href="#episode-naming">Episode Naming</a></li> + <li><a href="#metadata">Metadata</a></li> </ul> - <div id="core-component-group1" class="component-group"> + <div id="post-processing" class="component-group"> <div class="component-group-desc"> <h3>Post-Processing</h3> <p>Settings that dictate how SickRage should process completed downloads.</p> @@ -41,7 +41,7 @@ </label> <label class="nocheck" for="process_automatically"> <span class="component-title"> </span> - <span class="component-desc"><b>NOTE:</b> Do not use if you use an external PostProcessing script</span> + <span class="component-desc"><b>NOTE:</b> Do not use if you use an external Post Processing script</span> </label> </div> <div class="field-pair"> @@ -82,7 +82,7 @@ <div class="field-pair"> <label class="nocheck"> <span class="component-title">Auto Post-Processing Frequency</span> - <input type="number" min="10" name="autopostprocesser_frequency" id="autopostprocesser_frequency" value="${sickbeard.AUTOPOSTPROCESSER_FREQUENCY}" class="form-control input-sm input75" /> + <input type="number" min="10" step="1" name="autopostprocesser_frequency" id="autopostprocesser_frequency" value="${sickbeard.AUTOPOSTPROCESSER_FREQUENCY}" class="form-control input-sm input75" /> </label> <label class="nocheck"> <span class="component-title"> </span> @@ -138,18 +138,18 @@ <div class="field-pair"> <input type="checkbox" name="move_associated_files" id="move_associated_files" ${('', 'checked="checked"')[bool(sickbeard.MOVE_ASSOCIATED_FILES)]}/> <label for="move_associated_files"> - <span class="component-title">Move Associated Files</span> - <span class="component-desc">Move srr/sfv/etc files with the episode when processed?</span> + <span class="component-title">Delete associated files</span> + <span class="component-desc">Delete srt/srr/sfv/etc files while post processing?</span> </label> </div> <div class="field-pair"> <label class="nocheck"> - <span class="component-title">Allowed associated file extensions</span> + <span class="component-title">Keep associated file extensions</span> <input type="text" name="allowed_extensions" id="allowed_extensions" value="${sickbeard.ALLOWED_EXTENSIONS}" class="form-control input-sm input350" autocapitalize="off" /> </label> <label class="nocheck"> <span class="component-title"> </span> - <span class="component-desc">Comma seperated list of associated file extensions SickRage should move while Post Processing. Leaving it empty means all extensions will be allowed</span> + <span class="component-desc">Comma seperated list of associated file extensions SickRage should keep while post processing. Leaving it empty means all associated files will be deleted</span> </label> </div> <div class="field-pair"> @@ -251,7 +251,7 @@ <input type="submit" class="btn config_submitter" value="Save Changes" /><br> </fieldset> </div><!-- /component-group1 //--> - <div id="core-component-group2" class="component-group"> + <div id="episode-naming" class="component-group"> <div class="component-group-desc"> <h3>Episode Naming</h3> @@ -1090,7 +1090,7 @@ </fieldset> </div><!-- /component-group2 //--> - <div id="core-component-group3" class="component-group"> + <div id="metadata" class="component-group"> <div class="component-group-desc"> <h3>Metadata</h3> <p>The data associated to the data. These are files associated to a TV show in the form of images and text that, when supported, will enhance the viewing experience.</p> diff --git a/gui/slick/views/config_providers.mako b/gui/slick/views/config_providers.mako index 7f923e2cccb6e147937b7f2288a9976c8c39b9ea..acc2ed43f46f72a517ad256b5bd862d7b7730e45 100644 --- a/gui/slick/views/config_providers.mako +++ b/gui/slick/views/config_providers.mako @@ -1,9 +1,8 @@ <%inherit file="/layouts/main.mako"/> <%! import sickbeard - from sickbeard.providers.generic import GenericProvider - from sickbeard.providers import thepiratebay from sickbeard.helpers import anon_url + from sickrage.providers.GenericProvider import GenericProvider %> <%block name="scripts"> <script type="text/javascript" src="${srRoot}/js/configProviders.js"></script> @@ -12,12 +11,12 @@ $(document).ready(function(){ % if sickbeard.USE_NZBS: var show_nzb_providers = ${("false", "true")[bool(sickbeard.USE_NZBS)]}; % for curNewznabProvider in sickbeard.newznabProviderList: - $(this).addProvider('${curNewznabProvider.getID()}', '${curNewznabProvider.name}', '${curNewznabProvider.url}', '${curNewznabProvider.key}', '${curNewznabProvider.catIDs}', ${int(curNewznabProvider.default)}, show_nzb_providers); + $(this).addProvider('${curNewznabProvider.get_id()}', '${curNewznabProvider.name}', '${curNewznabProvider.url}', '${curNewznabProvider.key}', '${curNewznabProvider.catIDs}', ${int(curNewznabProvider.default)}, show_nzb_providers); % endfor % endif % if sickbeard.USE_TORRENTS: % for curTorrentRssProvider in sickbeard.torrentRssProviderList: - $(this).addTorrentRssProvider('${curTorrentRssProvider.getID()}', '${curTorrentRssProvider.name}', '${curTorrentRssProvider.url}', '${curTorrentRssProvider.cookies}', '${curTorrentRssProvider.titleTAG}'); + $(this).addTorrentRssProvider('${curTorrentRssProvider.get_id()}', '${curTorrentRssProvider.name}', '${curTorrentRssProvider.url}', '${curTorrentRssProvider.cookies}', '${curTorrentRssProvider.titleTAG}'); % endfor % endif }); @@ -37,17 +36,17 @@ $('#config-components').tabs(); <div id="config-components"> <ul> - <li><a href="#core-component-group1">Provider Priorities</a></li> - <li><a href="#core-component-group2">Provider Options</a></li> + <li><a href="#provider-priorities">Provider Priorities</a></li> + <li><a href="#provider-options">Provider Options</a></li> % if sickbeard.USE_NZBS: - <li><a href="#core-component-group3">Configure Custom Newznab Providers</a></li> + <li><a href="#custom-newznab">Configure Custom Newznab Providers</a></li> % endif % if sickbeard.USE_TORRENTS: - <li><a href="#core-component-group4">Configure Custom Torrent Providers</a></li> + <li><a href="#custom-torrent">Configure Custom Torrent Providers</a></li> % endif </ul> - <div id="core-component-group1" class="component-group" style='min-height: 550px;'> + <div id="provider-priorities" class="component-group" style='min-height: 550px;'> <div class="component-group-desc"> <h3>Provider Priorities</h3> @@ -70,33 +69,33 @@ $('#config-components').tabs(); <ul id="provider_order_list"> % for curProvider in sickbeard.providers.sortedProviderList(): <% - if curProvider.providerType == GenericProvider.NZB and not sickbeard.USE_NZBS: + if curProvider.provider_type == GenericProvider.NZB and not sickbeard.USE_NZBS: continue - elif curProvider.providerType == GenericProvider.TORRENT and not sickbeard.USE_TORRENTS: + elif curProvider.provider_type == GenericProvider.TORRENT and not sickbeard.USE_TORRENTS: continue - curName = curProvider.getID() + curName = curProvider.get_id() if hasattr(curProvider, 'custom_url'): curURL = curProvider.custom_url or curProvider.url else: curURL = curProvider.url %> - <li class="ui-state-default ${('nzb-provider', 'torrent-provider')[bool(curProvider.providerType == "torrent")]}" id="${curName}"> - <input type="checkbox" id="enable_${curName}" class="provider_enabler" ${('', 'checked="checked"')[curProvider.isEnabled() is True]}/> - <a href="${anon_url(curURL)}" class="imgLink" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false;"><img src="${srRoot}/images/providers/${curProvider.imageName()}" alt="${curProvider.name}" title="${curProvider.name}" width="16" height="16" style="vertical-align:middle;"/></a> + <li class="ui-state-default ${('nzb-provider', 'torrent-provider')[bool(curProvider.provider_type == GenericProvider.TORRENT)]}" id="${curName}"> + <input type="checkbox" id="enable_${curName}" class="provider_enabler" ${('', 'checked="checked"')[curProvider.is_enabled() is True]}/> + <a href="${anon_url(curURL)}" class="imgLink" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false;"><img src="${srRoot}/images/providers/${curProvider.image_name()}" alt="${curProvider.name}" title="${curProvider.name}" width="16" height="16" style="vertical-align:middle;"/></a> <span style="vertical-align:middle;">${curProvider.name}</span> - ${('*', '')[bool(curProvider.supportsBacklog)]} + ${('*', '')[bool(curProvider.supports_backlog)]} <span class="ui-icon ui-icon-arrowthick-2-n-s pull-right" style="vertical-align:middle;"></span> <span class="ui-icon ${('ui-icon-locked','ui-icon-unlocked')[bool(curProvider.public)]} pull-right" style="vertical-align:middle;"></span> </li> % endfor </ul> - <input type="hidden" name="provider_order" id="provider_order" value="${" ".join([x.getID()+':'+str(int(x.isEnabled())) for x in sickbeard.providers.sortedProviderList()])}"/> + <input type="hidden" name="provider_order" id="provider_order" value="${" ".join([x.get_id()+':'+str(int(x.is_enabled())) for x in sickbeard.providers.sortedProviderList()])}"/> <br><input type="submit" class="btn config_submitter" value="Save Changes" /><br> </fieldset> </div><!-- /component-group1 //--> - <div id="core-component-group2" class="component-group"> + <div id="provider-options" class="component-group"> <div class="component-group-desc"> <h3>Provider Options</h3> @@ -112,16 +111,16 @@ $('#config-components').tabs(); <% provider_config_list = [] for curProvider in sickbeard.providers.sortedProviderList(): - if curProvider.providerType == GenericProvider.NZB and (not sickbeard.USE_NZBS or not curProvider.isEnabled()): + if curProvider.provider_type == GenericProvider.NZB and (not sickbeard.USE_NZBS or not curProvider.is_enabled()): continue - elif curProvider.providerType == GenericProvider.TORRENT and ( not sickbeard.USE_TORRENTS or not curProvider.isEnabled()): + elif curProvider.provider_type == GenericProvider.TORRENT and ( not sickbeard.USE_TORRENTS or not curProvider.is_enabled()): continue provider_config_list.append(curProvider) %> % if provider_config_list: <select id="editAProvider" class="form-control input-sm"> % for cur_provider in provider_config_list: - <option value="${cur_provider.getID()}">${cur_provider.name}</option> + <option value="${cur_provider.get_id()}">${cur_provider.name}</option> % endfor </select> % else: @@ -134,21 +133,21 @@ $('#config-components').tabs(); <!-- start div for editing providers //--> % for curNewznabProvider in [curProvider for curProvider in sickbeard.newznabProviderList]: - <div class="providerDiv" id="${curNewznabProvider.getID()}Div"> + <div class="providerDiv" id="${curNewznabProvider.get_id()}Div"> % if curNewznabProvider.default and curNewznabProvider.needs_auth: <div class="field-pair"> - <label for="${curNewznabProvider.getID()}_url"> + <label for="${curNewznabProvider.get_id()}_url"> <span class="component-title">URL:</span> <span class="component-desc"> - <input type="text" id="${curNewznabProvider.getID()}_url" value="${curNewznabProvider.url}" class="form-control input-sm input350" disabled autocapitalize="off" /> + <input type="text" id="${curNewznabProvider.get_id()}_url" value="${curNewznabProvider.url}" class="form-control input-sm input350" disabled autocapitalize="off" /> </span> </label> </div> <div class="field-pair"> - <label for="${curNewznabProvider.getID()}_hash"> + <label for="${curNewznabProvider.get_id()}_hash"> <span class="component-title">API key:</span> <span class="component-desc"> - <input type="text" id="${curNewznabProvider.getID()}_hash" value="${curNewznabProvider.key}" newznab_name="${curNewznabProvider.getID()}_hash" class="newznab_key form-control input-sm input350" autocapitalize="off" /> + <input type="text" id="${curNewznabProvider.get_id()}_hash" value="${curNewznabProvider.key}" newznab_name="${curNewznabProvider.get_id()}_hash" class="newznab_key form-control input-sm input350" autocapitalize="off" /> </span> </label> </div> @@ -156,10 +155,10 @@ $('#config-components').tabs(); % if hasattr(curNewznabProvider, 'enable_daily'): <div class="field-pair"> - <label for="${curNewznabProvider.getID()}_enable_daily"> + <label for="${curNewznabProvider.get_id()}_enable_daily"> <span class="component-title">Enable daily searches</span> <span class="component-desc"> - <input type="checkbox" name="${curNewznabProvider.getID()}_enable_daily" id="${curNewznabProvider.getID()}_enable_daily" ${('', 'checked="checked"')[bool(curNewznabProvider.enable_daily)]}/> + <input type="checkbox" name="${curNewznabProvider.get_id()}_enable_daily" id="${curNewznabProvider.get_id()}_enable_daily" ${('', 'checked="checked"')[bool(curNewznabProvider.enable_daily)]}/> <p>enable provider to perform daily searches.</p> </span> </label> @@ -167,11 +166,11 @@ $('#config-components').tabs(); % endif % if hasattr(curNewznabProvider, 'enable_backlog'): - <div class="field-pair${(' hidden', '')[curNewznabProvider.supportsBacklog]}"> - <label for="${curNewznabProvider.getID()}_enable_backlog"> + <div class="field-pair${(' hidden', '')[curNewznabProvider.supports_backlog]}"> + <label for="${curNewznabProvider.get_id()}_enable_backlog"> <span class="component-title">Enable backlog searches</span> <span class="component-desc"> - <input type="checkbox" name="${curNewznabProvider.getID()}_enable_backlog" id="${curNewznabProvider.getID()}_enable_backlog" ${('', 'checked="checked"')[bool(curNewznabProvider.enable_backlog and curNewznabProvider.supportsBacklog)]}/> + <input type="checkbox" name="${curNewznabProvider.get_id()}_enable_backlog" id="${curNewznabProvider.get_id()}_enable_backlog" ${('', 'checked="checked"')[bool(curNewznabProvider.enable_backlog and curNewznabProvider.supports_backlog)]}/> <p>enable provider to perform backlog searches.</p> </span> </label> @@ -180,10 +179,10 @@ $('#config-components').tabs(); % if hasattr(curNewznabProvider, 'search_fallback'): <div class="field-pair"> - <label for="${curNewznabProvider.getID()}_search_fallback"> + <label for="${curNewznabProvider.get_id()}_search_fallback"> <span class="component-title">Season search fallback</span> <span class="component-desc"> - <input type="checkbox" name="${curNewznabProvider.getID()}_search_fallback" id="${curNewznabProvider.getID()}_search_fallback" ${('', 'checked="checked"')[bool(curNewznabProvider.search_fallback)]}/> + <input type="checkbox" name="${curNewznabProvider.get_id()}_search_fallback" id="${curNewznabProvider.get_id()}_search_fallback" ${('', 'checked="checked"')[bool(curNewznabProvider.search_fallback)]}/> <p>when searching for a complete season depending on search mode you may return no results, this helps by restarting the search using the opposite search mode.</p> </span> </label> @@ -201,13 +200,13 @@ $('#config-components').tabs(); <label> <span class="component-title"></span> <span class="component-desc"> - <input type="radio" name="${curNewznabProvider.getID()}_search_mode" id="${curNewznabProvider.getID()}_search_mode_sponly" value="sponly" ${('', 'checked="checked"')[curNewznabProvider.search_mode=="sponly"]}/>season packs only. + <input type="radio" name="${curNewznabProvider.get_id()}_search_mode" id="${curNewznabProvider.get_id()}_search_mode_sponly" value="sponly" ${('', 'checked="checked"')[curNewznabProvider.search_mode=="sponly"]}/>season packs only. </span> </label> <label> <span class="component-title"></span> <span class="component-desc"> - <input type="radio" name="${curNewznabProvider.getID()}_search_mode" id="${curNewznabProvider.getID()}_search_mode_eponly" value="eponly" ${('', 'checked="checked"')[curNewznabProvider.search_mode=="eponly"]}/>episodes only. + <input type="radio" name="${curNewznabProvider.get_id()}_search_mode" id="${curNewznabProvider.get_id()}_search_mode_eponly" value="eponly" ${('', 'checked="checked"')[curNewznabProvider.search_mode=="eponly"]}/>episodes only. </span> </label> </div> @@ -216,14 +215,14 @@ $('#config-components').tabs(); </div> % endfor - % for curNzbProvider in [curProvider for curProvider in sickbeard.providers.sortedProviderList() if curProvider.providerType == GenericProvider.NZB and curProvider not in sickbeard.newznabProviderList]: - <div class="providerDiv" id="${curNzbProvider.getID()}Div"> + % for curNzbProvider in [curProvider for curProvider in sickbeard.providers.sortedProviderList() if curProvider.provider_type == GenericProvider.NZB and curProvider not in sickbeard.newznabProviderList]: + <div class="providerDiv" id="${curNzbProvider.get_id()}Div"> % if hasattr(curNzbProvider, 'username'): <div class="field-pair"> - <label for="${curNzbProvider.getID()}_username"> + <label for="${curNzbProvider.get_id()}_username"> <span class="component-title">Username:</span> <span class="component-desc"> - <input type="text" name="${curNzbProvider.getID()}_username" value="${curNzbProvider.username}" class="form-control input-sm input350" autocapitalize="off" autocomplete="no" /> + <input type="text" name="${curNzbProvider.get_id()}_username" value="${curNzbProvider.username}" class="form-control input-sm input350" autocapitalize="off" autocomplete="no" /> </span> </label> </div> @@ -231,10 +230,10 @@ $('#config-components').tabs(); % if hasattr(curNzbProvider, 'api_key'): <div class="field-pair"> - <label for="${curNzbProvider.getID()}_api_key"> + <label for="${curNzbProvider.get_id()}_api_key"> <span class="component-title">API key:</span> <span class="component-desc"> - <input type="text" name="${curNzbProvider.getID()}_api_key" value="${curNzbProvider.api_key}" class="form-control input-sm input350" autocapitalize="off" /> + <input type="text" name="${curNzbProvider.get_id()}_api_key" value="${curNzbProvider.api_key}" class="form-control input-sm input350" autocapitalize="off" /> </span> </label> </div> @@ -243,10 +242,10 @@ $('#config-components').tabs(); % if hasattr(curNzbProvider, 'enable_daily'): <div class="field-pair"> - <label for="${curNzbProvider.getID()}_enable_daily"> + <label for="${curNzbProvider.get_id()}_enable_daily"> <span class="component-title">Enable daily searches</span> <span class="component-desc"> - <input type="checkbox" name="${curNzbProvider.getID()}_enable_daily" id="${curNzbProvider.getID()}_enable_daily" ${('', 'checked="checked"')[bool(curNzbProvider.enable_daily)]}/> + <input type="checkbox" name="${curNzbProvider.get_id()}_enable_daily" id="${curNzbProvider.get_id()}_enable_daily" ${('', 'checked="checked"')[bool(curNzbProvider.enable_daily)]}/> <p>enable provider to perform daily searches.</p> </span> </label> @@ -254,11 +253,11 @@ $('#config-components').tabs(); % endif % if hasattr(curNzbProvider, 'enable_backlog'): - <div class="field-pair${(' hidden', '')[curNzbProvider.supportsBacklog]}"> - <label for="${curNzbProvider.getID()}_enable_backlog"> + <div class="field-pair${(' hidden', '')[curNzbProvider.supports_backlog]}"> + <label for="${curNzbProvider.get_id()}_enable_backlog"> <span class="component-title">Enable backlog searches</span> <span class="component-desc"> - <input type="checkbox" name="${curNzbProvider.getID()}_enable_backlog" id="${curNzbProvider.getID()}_enable_backlog" ${('', 'checked="checked"')[bool(curNzbProvider.enable_backlog and curNzbProvider.supportsBacklog)]}/> + <input type="checkbox" name="${curNzbProvider.get_id()}_enable_backlog" id="${curNzbProvider.get_id()}_enable_backlog" ${('', 'checked="checked"')[bool(curNzbProvider.enable_backlog and curNzbProvider.supports_backlog)]}/> <p>enable provider to perform backlog searches.</p> </span> </label> @@ -267,10 +266,10 @@ $('#config-components').tabs(); % if hasattr(curNzbProvider, 'search_fallback'): <div class="field-pair"> - <label for="${curNzbProvider.getID()}_search_fallback"> + <label for="${curNzbProvider.get_id()}_search_fallback"> <span class="component-title">Season search fallback</span> <span class="component-desc"> - <input type="checkbox" name="${curNzbProvider.getID()}_search_fallback" id="${curNzbProvider.getID()}_search_fallback" ${('', 'checked="checked"')[bool(curNzbProvider.search_fallback)]}/> + <input type="checkbox" name="${curNzbProvider.get_id()}_search_fallback" id="${curNzbProvider.get_id()}_search_fallback" ${('', 'checked="checked"')[bool(curNzbProvider.search_fallback)]}/> <p>when searching for a complete season depending on search mode you may return no results, this helps by restarting the search using the opposite search mode.</p> </span> </label> @@ -288,13 +287,13 @@ $('#config-components').tabs(); <label> <span class="component-title"></span> <span class="component-desc"> - <input type="radio" name="${curNzbProvider.getID()}_search_mode" id="${curNzbProvider.getID()}_search_mode_sponly" value="sponly" ${('', 'checked="checked"')[curNzbProvider.search_mode=="sponly"]}/>season packs only. + <input type="radio" name="${curNzbProvider.get_id()}_search_mode" id="${curNzbProvider.get_id()}_search_mode_sponly" value="sponly" ${('', 'checked="checked"')[curNzbProvider.search_mode=="sponly"]}/>season packs only. </span> </label> <label> <span class="component-title"></span> <span class="component-desc"> - <input type="radio" name="${curNzbProvider.getID()}_search_mode" id="${curNzbProvider.getID()}_search_mode_eponly" value="eponly" ${('', 'checked="checked"')[curNzbProvider.search_mode=="eponly"]}/>episodes only. + <input type="radio" name="${curNzbProvider.get_id()}_search_mode" id="${curNzbProvider.get_id()}_search_mode_eponly" value="eponly" ${('', 'checked="checked"')[curNzbProvider.search_mode=="eponly"]}/>episodes only. </span> </label> </div> @@ -303,15 +302,15 @@ $('#config-components').tabs(); </div> % endfor - % for curTorrentProvider in [curProvider for curProvider in sickbeard.providers.sortedProviderList() if curProvider.providerType == GenericProvider.TORRENT]: - <div class="providerDiv" id="${curTorrentProvider.getID()}Div"> + % for curTorrentProvider in [curProvider for curProvider in sickbeard.providers.sortedProviderList() if curProvider.provider_type == GenericProvider.TORRENT]: + <div class="providerDiv" id="${curTorrentProvider.get_id()}Div"> % if hasattr(curTorrentProvider, 'custom_url'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_custom_url"> + <label for="${curTorrentProvider.get_id()}_custom_url"> <span class="component-title">Custom URL:</span> <span class="component-desc"> - <input type="text" name="${curTorrentProvider.getID()}_custom_url" id="${curTorrentProvider.getID()}_custom_url" value="${curTorrentProvider.custom_url}" class="form-control input-sm input350" autocapitalize="off" /> + <input type="text" name="${curTorrentProvider.get_id()}_custom_url" id="${curTorrentProvider.get_id()}_custom_url" value="${curTorrentProvider.custom_url}" class="form-control input-sm input350" autocapitalize="off" /> </span> </label> <label> @@ -325,10 +324,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'api_key'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_api_key"> + <label for="${curTorrentProvider.get_id()}_api_key"> <span class="component-title">Api key:</span> <span class="component-desc"> - <input type="text" name="${curTorrentProvider.getID()}_api_key" id="${curTorrentProvider.getID()}_api_key" value="${curTorrentProvider.api_key}" class="form-control input-sm input350" autocapitalize="off" /> + <input type="text" name="${curTorrentProvider.get_id()}_api_key" id="${curTorrentProvider.get_id()}_api_key" value="${curTorrentProvider.api_key}" class="form-control input-sm input350" autocapitalize="off" /> </span> </label> </div> @@ -336,10 +335,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'digest'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_digest"> + <label for="${curTorrentProvider.get_id()}_digest"> <span class="component-title">Digest:</span> <span class="component-desc"> - <input type="text" name="${curTorrentProvider.getID()}_digest" id="${curTorrentProvider.getID()}_digest" value="${curTorrentProvider.digest}" class="form-control input-sm input350" autocapitalize="off" /> + <input type="text" name="${curTorrentProvider.get_id()}_digest" id="${curTorrentProvider.get_id()}_digest" value="${curTorrentProvider.digest}" class="form-control input-sm input350" autocapitalize="off" /> </span> </label> </div> @@ -347,10 +346,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'hash'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_hash"> + <label for="${curTorrentProvider.get_id()}_hash"> <span class="component-title">Hash:</span> <span class="component-desc"> - <input type="text" name="${curTorrentProvider.getID()}_hash" id="${curTorrentProvider.getID()}_hash" value="${curTorrentProvider.hash}" class="form-control input-sm input350" autocapitalize="off" /> + <input type="text" name="${curTorrentProvider.get_id()}_hash" id="${curTorrentProvider.get_id()}_hash" value="${curTorrentProvider.hash}" class="form-control input-sm input350" autocapitalize="off" /> </span> </label> </div> @@ -358,10 +357,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'username'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_username"> + <label for="${curTorrentProvider.get_id()}_username"> <span class="component-title">Username:</span> <span class="component-desc"> - <input type="text" name="${curTorrentProvider.getID()}_username" id="${curTorrentProvider.getID()}_username" value="${curTorrentProvider.username}" class="form-control input-sm input350" autocapitalize="off" autocomplete="no" /> + <input type="text" name="${curTorrentProvider.get_id()}_username" id="${curTorrentProvider.get_id()}_username" value="${curTorrentProvider.username}" class="form-control input-sm input350" autocapitalize="off" autocomplete="no" /> </span> </label> </div> @@ -369,10 +368,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'password'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_password"> + <label for="${curTorrentProvider.get_id()}_password"> <span class="component-title">Password:</span> <span class="component-desc"> - <input type="password" name="${curTorrentProvider.getID()}_password" id="${curTorrentProvider.getID()}_password" value="${curTorrentProvider.password}" class="form-control input-sm input350" autocomplete="no" autocapitalize="off" /> + <input type="password" name="${curTorrentProvider.get_id()}_password" id="${curTorrentProvider.get_id()}_password" value="${curTorrentProvider.password}" class="form-control input-sm input350" autocomplete="no" autocapitalize="off" /> </span> </label> </div> @@ -380,10 +379,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'passkey'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_passkey"> + <label for="${curTorrentProvider.get_id()}_passkey"> <span class="component-title">Passkey:</span> <span class="component-desc"> - <input type="text" name="${curTorrentProvider.getID()}_passkey" id="${curTorrentProvider.getID()}_passkey" value="${curTorrentProvider.passkey}" class="form-control input-sm input350" autocapitalize="off" /> + <input type="text" name="${curTorrentProvider.get_id()}_passkey" id="${curTorrentProvider.get_id()}_passkey" value="${curTorrentProvider.passkey}" class="form-control input-sm input350" autocapitalize="off" /> </span> </label> </div> @@ -391,10 +390,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'pin'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_pin"> + <label for="${curTorrentProvider.get_id()}_pin"> <span class="component-title">Pin:</span> <span class="component-desc"> - <input type="password" name="${curTorrentProvider.getID()}_pin" id="${curTorrentProvider.getID()}_pin" value="${curTorrentProvider.pin}" class="form-control input-sm input100" autocomplete="no" autocapitalize="off" /> + <input type="password" name="${curTorrentProvider.get_id()}_pin" id="${curTorrentProvider.get_id()}_pin" value="${curTorrentProvider.pin}" class="form-control input-sm input100" autocomplete="no" autocapitalize="off" /> </span> </label> </div> @@ -402,10 +401,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'ratio'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_ratio"> - <span class="component-title" id="${curTorrentProvider.getID()}_ratio_desc">Seed ratio:</span> + <label for="${curTorrentProvider.get_id()}_ratio"> + <span class="component-title" id="${curTorrentProvider.get_id()}_ratio_desc">Seed ratio:</span> <span class="component-desc"> - <input type="number" step="0.1" name="${curTorrentProvider.getID()}_ratio" id="${curTorrentProvider.getID()}_ratio" value="${curTorrentProvider.ratio}" class="form-control input-sm input75" /> + <input type="number" min="-1" step="0.1" name="${curTorrentProvider.get_id()}_ratio" id="${curTorrentProvider.get_id()}_ratio" value="${curTorrentProvider.ratio}" class="form-control input-sm input75" /> </span> </label> <label> @@ -419,10 +418,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'minseed'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_minseed"> - <span class="component-title" id="${curTorrentProvider.getID()}_minseed_desc">Minimum seeders:</span> + <label for="${curTorrentProvider.get_id()}_minseed"> + <span class="component-title" id="${curTorrentProvider.get_id()}_minseed_desc">Minimum seeders:</span> <span class="component-desc"> - <input type="number" name="${curTorrentProvider.getID()}_minseed" id="${curTorrentProvider.getID()}_minseed" value="${curTorrentProvider.minseed}" class="form-control input-sm input75" /> + <input type="number" min="0" step="1" name="${curTorrentProvider.get_id()}_minseed" id="${curTorrentProvider.get_id()}_minseed" value="${curTorrentProvider.minseed}" class="form-control input-sm input75" /> </span> </label> </div> @@ -430,10 +429,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'minleech'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_minleech"> - <span class="component-title" id="${curTorrentProvider.getID()}_minleech_desc">Minimum leechers:</span> + <label for="${curTorrentProvider.get_id()}_minleech"> + <span class="component-title" id="${curTorrentProvider.get_id()}_minleech_desc">Minimum leechers:</span> <span class="component-desc"> - <input type="number" name="${curTorrentProvider.getID()}_minleech" id="${curTorrentProvider.getID()}_minleech" value="${curTorrentProvider.minleech}" class="form-control input-sm input75" /> + <input type="number" min="0" step="1" name="${curTorrentProvider.get_id()}_minleech" id="${curTorrentProvider.get_id()}_minleech" value="${curTorrentProvider.minleech}" class="form-control input-sm input75" /> </span> </label> </div> @@ -441,10 +440,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'confirmed'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_confirmed"> + <label for="${curTorrentProvider.get_id()}_confirmed"> <span class="component-title">Confirmed download</span> <span class="component-desc"> - <input type="checkbox" name="${curTorrentProvider.getID()}_confirmed" id="${curTorrentProvider.getID()}_confirmed" ${('', 'checked="checked"')[bool(curTorrentProvider.confirmed)]}/> + <input type="checkbox" name="${curTorrentProvider.get_id()}_confirmed" id="${curTorrentProvider.get_id()}_confirmed" ${('', 'checked="checked"')[bool(curTorrentProvider.confirmed)]}/> <p>only download torrents from trusted or verified uploaders ?</p> </span> </label> @@ -453,11 +452,11 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'ranked'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_ranked"> + <label for="${curTorrentProvider.get_id()}_ranked"> <span class="component-title">Ranked torrents</span> <span class="component-desc"> - <input type="checkbox" name="${curTorrentProvider.getID()}_ranked" id="${curTorrentProvider.getID()}_ranked" ${('', 'checked="checked"')[bool(curTorrentProvider.ranked)]} /> - <p>only download ranked torrents (internal releases)</p> + <input type="checkbox" name="${curTorrentProvider.get_id()}_ranked" id="${curTorrentProvider.get_id()}_ranked" ${('', 'checked="checked"')[bool(curTorrentProvider.ranked)]} /> + <p>only download ranked torrents (trusted releases)</p> </span> </label> </div> @@ -465,11 +464,11 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'engrelease'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_engrelease"> + <label for="${curTorrentProvider.get_id()}_engrelease"> <span class="component-title">English torrents</span> <span class="component-desc"> - <input type="checkbox" name="${curTorrentProvider.getID()}_engrelease" id="${curTorrentProvider.getID()}_engrelease" ${('', 'checked="checked"')[bool(curTorrentProvider.engrelease)]} /> - <p>only download english torrents ,or torrents containing english subtitles</p> + <input type="checkbox" name="${curTorrentProvider.get_id()}_engrelease" id="${curTorrentProvider.get_id()}_engrelease" ${('', 'checked="checked"')[bool(curTorrentProvider.engrelease)]} /> + <p>only download english torrents, or torrents containing english subtitles</p> </span> </label> </div> @@ -477,10 +476,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'onlyspasearch'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_onlyspasearch"> + <label for="${curTorrentProvider.get_id()}_onlyspasearch"> <span class="component-title">For Spanish torrents</span> <span class="component-desc"> - <input type="checkbox" name="${curTorrentProvider.getID()}_onlyspasearch" id="${curTorrentProvider.getID()}_onlyspasearch" ${('', 'checked="checked"')[bool(curTorrentProvider.onlyspasearch)]} /> + <input type="checkbox" name="${curTorrentProvider.get_id()}_onlyspasearch" id="${curTorrentProvider.get_id()}_onlyspasearch" ${('', 'checked="checked"')[bool(curTorrentProvider.onlyspasearch)]} /> <p>ONLY search on this provider if show info is defined as "Spanish" (avoid provider's use for VOS shows)</p> </span> </label> @@ -489,10 +488,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'sorting'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_sorting"> + <label for="${curTorrentProvider.get_id()}_sorting"> <span class="component-title">Sorting results by</span> <span class="component-desc"> - <select name="${curTorrentProvider.getID()}_sorting" id="${curTorrentProvider.getID()}_sorting" class="form-control input-sm"> + <select name="${curTorrentProvider.get_id()}_sorting" id="${curTorrentProvider.get_id()}_sorting" class="form-control input-sm"> % for curAction in ('last', 'seeders', 'leechers'): <option value="${curAction}" ${('', 'selected="selected"')[curAction == curTorrentProvider.sorting]}>${curAction}</option> % endfor @@ -504,11 +503,11 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'freeleech'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_freeleech"> + <label for="${curTorrentProvider.get_id()}_freeleech"> <span class="component-title">Freeleech</span> <span class="component-desc"> - <input type="checkbox" name="${curTorrentProvider.getID()}_freeleech" id="${curTorrentProvider.getID()}_freeleech" ${('', 'checked="checked"')[bool(curTorrentProvider.freeleech)]}/> - <p>only download <b>[FreeLeech]</b> torrents.</p> + <input type="checkbox" name="${curTorrentProvider.get_id()}_freeleech" id="${curTorrentProvider.get_id()}_freeleech" ${('', 'checked="checked"')[bool(curTorrentProvider.freeleech)]}/> + <p>only download <b>"FreeLeech"</b> torrents.</p> </span> </label> </div> @@ -516,10 +515,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'enable_daily'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_enable_daily"> + <label for="${curTorrentProvider.get_id()}_enable_daily"> <span class="component-title">Enable daily searches</span> <span class="component-desc"> - <input type="checkbox" name="${curTorrentProvider.getID()}_enable_daily" id="${curTorrentProvider.getID()}_enable_daily" ${('', 'checked="checked"')[bool(curTorrentProvider.enable_daily)]}/> + <input type="checkbox" name="${curTorrentProvider.get_id()}_enable_daily" id="${curTorrentProvider.get_id()}_enable_daily" ${('', 'checked="checked"')[bool(curTorrentProvider.enable_daily)]}/> <p>enable provider to perform daily searches.</p> </span> </label> @@ -527,11 +526,11 @@ $('#config-components').tabs(); % endif % if hasattr(curTorrentProvider, 'enable_backlog'): - <div class="field-pair${(' hidden', '')[curTorrentProvider.supportsBacklog]}"> - <label for="${curTorrentProvider.getID()}_enable_backlog"> + <div class="field-pair${(' hidden', '')[curTorrentProvider.supports_backlog]}"> + <label for="${curTorrentProvider.get_id()}_enable_backlog"> <span class="component-title">Enable backlog searches</span> <span class="component-desc"> - <input type="checkbox" name="${curTorrentProvider.getID()}_enable_backlog" id="${curTorrentProvider.getID()}_enable_backlog" ${('', 'checked="checked"')[bool(curTorrentProvider.enable_backlog and curTorrentProvider.supportsBacklog)]}/> + <input type="checkbox" name="${curTorrentProvider.get_id()}_enable_backlog" id="${curTorrentProvider.get_id()}_enable_backlog" ${('', 'checked="checked"')[bool(curTorrentProvider.enable_backlog and curTorrentProvider.supports_backlog)]}/> <p>enable provider to perform backlog searches.</p> </span> </label> @@ -540,10 +539,10 @@ $('#config-components').tabs(); % if hasattr(curTorrentProvider, 'search_fallback'): <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_search_fallback"> + <label for="${curTorrentProvider.get_id()}_search_fallback"> <span class="component-title">Season search fallback</span> <span class="component-desc"> - <input type="checkbox" name="${curTorrentProvider.getID()}_search_fallback" id="${curTorrentProvider.getID()}_search_fallback" ${('', 'checked="checked"')[bool(curTorrentProvider.search_fallback)]}/> + <input type="checkbox" name="${curTorrentProvider.get_id()}_search_fallback" id="${curTorrentProvider.get_id()}_search_fallback" ${('', 'checked="checked"')[bool(curTorrentProvider.search_fallback)]}/> <p>when searching for a complete season depending on search mode you may return no results, this helps by restarting the search using the opposite search mode.</p> </span> </label> @@ -561,24 +560,24 @@ $('#config-components').tabs(); <label> <span class="component-title"></span> <span class="component-desc"> - <input type="radio" name="${curTorrentProvider.getID()}_search_mode" id="${curTorrentProvider.getID()}_search_mode_sponly" value="sponly" ${('', 'checked="checked"')[curTorrentProvider.search_mode=="sponly"]}/>season packs only. + <input type="radio" name="${curTorrentProvider.get_id()}_search_mode" id="${curTorrentProvider.get_id()}_search_mode_sponly" value="sponly" ${('', 'checked="checked"')[curTorrentProvider.search_mode=="sponly"]}/>season packs only. </span> </label> <label> <span class="component-title"></span> <span class="component-desc"> - <input type="radio" name="${curTorrentProvider.getID()}_search_mode" id="${curTorrentProvider.getID()}_search_mode_eponly" value="eponly" ${('', 'checked="checked"')[curTorrentProvider.search_mode=="eponly"]}/>episodes only. + <input type="radio" name="${curTorrentProvider.get_id()}_search_mode" id="${curTorrentProvider.get_id()}_search_mode_eponly" value="eponly" ${('', 'checked="checked"')[curTorrentProvider.search_mode=="eponly"]}/>episodes only. </span> </label> </div> % endif - % if hasattr(curTorrentProvider, 'cat') and curTorrentProvider.getID() == 'tntvillage': + % if hasattr(curTorrentProvider, 'cat') and curTorrentProvider.get_id() == 'tntvillage': <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_cat"> + <label for="${curTorrentProvider.get_id()}_cat"> <span class="component-title">Category:</span> <span class="component-desc"> - <select name="${curTorrentProvider.getID()}_cat" id="${curTorrentProvider.getID()}_cat" class="form-control input-sm"> + <select name="${curTorrentProvider.get_id()}_cat" id="${curTorrentProvider.get_id()}_cat" class="form-control input-sm"> % for i in curTorrentProvider.category_dict.keys(): <option value="${curTorrentProvider.category_dict[i]}" ${('', 'selected="selected"')[curTorrentProvider.category_dict[i] == curTorrentProvider.cat]}>${i}</option> % endfor @@ -588,12 +587,12 @@ $('#config-components').tabs(); </div> % endif - % if hasattr(curTorrentProvider, 'subtitle') and curTorrentProvider.getID() == 'tntvillage': + % if hasattr(curTorrentProvider, 'subtitle') and curTorrentProvider.get_id() == 'tntvillage': <div class="field-pair"> - <label for="${curTorrentProvider.getID()}_subtitle"> + <label for="${curTorrentProvider.get_id()}_subtitle"> <span class="component-title">Subtitled</span> <span class="component-desc"> - <input type="checkbox" name="${curTorrentProvider.getID()}_subtitle" id="${curTorrentProvider.getID()}_subtitle" ${('', 'checked="checked"')[bool(curTorrentProvider.subtitle)]}/> + <input type="checkbox" name="${curTorrentProvider.get_id()}_subtitle" id="${curTorrentProvider.get_id()}_subtitle" ${('', 'checked="checked"')[bool(curTorrentProvider.subtitle)]}/> <p>select torrent with Italian subtitle</p> </span> </label> @@ -612,7 +611,7 @@ $('#config-components').tabs(); </div><!-- /component-group2 //--> % if sickbeard.USE_NZBS: - <div id="core-component-group3" class="component-group"> + <div id="custom-newznab" class="component-group"> <div class="component-group-desc"> <h3>Configure Custom<br>Newznab Providers</h3> @@ -688,7 +687,7 @@ $('#config-components').tabs(); % if sickbeard.USE_TORRENTS: - <div id="core-component-group4" class="component-group"> + <div id="custom-torrent" class="component-group"> <div class="component-group-desc"> <h3>Configure Custom Torrent Providers</h3> diff --git a/gui/slick/views/config_search.mako b/gui/slick/views/config_search.mako index 7df3b52c8eca25d423147d69b80fffa68ebe6610..6fce79b211d20566e8b6dd9711ccabb931524541 100644 --- a/gui/slick/views/config_search.mako +++ b/gui/slick/views/config_search.mako @@ -15,12 +15,12 @@ <form id="configForm" action="saveSearch" method="post"> <div id="config-components"> <ul> - <li><a href="#core-component-group1">Episode Search</a></li> - <li><a href="#core-component-group2">NZB Search</a></li> - <li><a href="#core-component-group3">Torrent Search</a></li> + <li><a href="#episode-search">Episode Search</a></li> + <li><a href="#nzb-search">NZB Search</a></li> + <li><a href="#torrent-search">Torrent Search</a></li> </ul> - <div id="core-component-group1" class="component-group"> + <div id="episode-search" class="component-group"> <div class="component-group-desc"> <h3>Episode Search</h3> <p>How to manage searching with <a href="${srRoot}/config/providers">providers</a>.</p> @@ -65,7 +65,7 @@ <label> <span class="component-title">Backlog search day(s)</span> <span class="component-desc"> - <input type="text" name="backlog_days" value="${sickbeard.BACKLOG_DAYS}" class="form-control input-sm input75" autocapitalize="off" /> + <input type="number" min="1" step="1" name="backlog_days" value="${sickbeard.BACKLOG_DAYS}" class="form-control input-sm input75" autocapitalize="off" /> <p>number of day(s) that the "Forced Backlog Search" will cover (e.g. 7 Days)</p> </span> </label> @@ -75,7 +75,7 @@ <label> <span class="component-title">Backlog search frequency</span> <span class="component-desc"> - <input type="text" name="backlog_frequency" value="${sickbeard.BACKLOG_FREQUENCY}" class="form-control input-sm input75" autocapitalize="off" /> + <input type="number" min="720" step="60" name="backlog_frequency" value="${sickbeard.BACKLOG_FREQUENCY}" class="form-control input-sm input75" autocapitalize="off" /> <p>time in minutes between searches (min. ${sickbeard.MIN_BACKLOG_FREQUENCY})</p> </span> </label> @@ -85,7 +85,7 @@ <label> <span class="component-title">Daily search frequency</span> <span class="component-desc"> - <input type="text" name="dailysearch_frequency" value="${sickbeard.DAILYSEARCH_FREQUENCY}" class="form-control input-sm input75" autocapitalize="off" /> + <input type="number" min="10" step="1" name="dailysearch_frequency" value="${sickbeard.DAILYSEARCH_FREQUENCY}" class="form-control input-sm input75" autocapitalize="off" /> <p>time in minutes between searches (min. ${sickbeard.MIN_DAILYSEARCH_FREQUENCY})</p> </span> </label> @@ -95,7 +95,7 @@ <label> <span class="component-title">Usenet retention</span> <span class="component-desc"> - <input type="text" name="usenet_retention" value="${sickbeard.USENET_RETENTION}" class="form-control input-sm input75" autocapitalize="off" /> + <input type="number" min="1" step="1" name="usenet_retention" value="${sickbeard.USENET_RETENTION}" class="form-control input-sm input75" autocapitalize="off" /> <p>age limit in days for usenet articles to be used (e.g. 500)</p> </span> </label> @@ -191,7 +191,7 @@ </fieldset> </div><!-- /component-group1 //--> - <div id="core-component-group2" class="component-group"> + <div id="nzb-search" class="component-group"> <div class="component-group-desc"> <h3>NZB Search</h3> @@ -440,7 +440,7 @@ </fieldset> </div><!-- /component-group2 //--> - <div id="core-component-group3" class="component-group"> + <div id="torrent-search" class="component-group"> <div class="component-group-desc"> <h3>Torrent Search</h3> <p>How to handle Torrent search results.</p> diff --git a/gui/slick/views/config_subtitles.mako b/gui/slick/views/config_subtitles.mako index 9c77e555da9c90e967c2ef99a28831e808ef9de3..df6c0246bc93ccd0f37616548a54ef0536a5d716 100644 --- a/gui/slick/views/config_subtitles.mako +++ b/gui/slick/views/config_subtitles.mako @@ -34,12 +34,12 @@ $('#subtitles_dir').fileBrowser({ title: 'Select Subtitles Download Directory' } <div id="config-components"> <ul> - <li><a href="#core-component-group1">Subtitles Search</a></li> - <li><a href="#core-component-group2">Subtitles Plugin</a></li> - <li><a href="#core-component-group3">Plugin Settings</a></li> + <li><a href="#subtitles-search">Subtitles Search</a></li> + <li><a href="#subtitles-plugin">Subtitles Plugin</a></li> + <li><a href="#plugin-settings">Plugin Settings</a></li> </ul> - <div id="core-component-group1" class="component-group"> + <div id="subtitles-search" class="component-group"> <div class="component-group-desc"> <h3>Subtitles Search</h3> @@ -156,7 +156,7 @@ $('#subtitles_dir').fileBrowser({ title: 'Select Subtitles Download Directory' } </fieldset> </div><!-- /component-group1 //--> - <div id="core-component-group2" class="component-group"> + <div id="subtitles-plugin" class="component-group"> <div class="component-group-desc"> <h3>Subtitle Plugins</h3> @@ -183,7 +183,7 @@ $('#subtitles_dir').fileBrowser({ title: 'Select Subtitles Download Directory' } <br><input type="submit" class="btn config_submitter" value="Save Changes" /><br> </fieldset> </div><!-- /component-group2 //--> - <div id="core-component-group3" class="component-group"> + <div id="plugin-settings" class="component-group"> <div class="component-group-desc"> <h3>Subtitle Settings</h3> <p>Set user and password for each provider</p> diff --git a/gui/slick/views/displayShow.mako b/gui/slick/views/displayShow.mako index 0ae2d1c255690f5d4bf045e828cda2d44507ae06..8e78f7332e10a33aa9301dbcbdcb566d434e2e23 100644 --- a/gui/slick/views/displayShow.mako +++ b/gui/slick/views/displayShow.mako @@ -519,7 +519,7 @@ <a class="epSearch" id="${str(show.indexerid)}x${str(epResult["season"])}x${str(epResult["episode"])}" name="${str(show.indexerid)}x${str(epResult["season"])}x${str(epResult["episode"])}" href="searchEpisode?show=${show.indexerid}&season=${epResult["season"]}&episode=${epResult["episode"]}"><img src="${srRoot}/images/search16.png" width="16" height="16" alt="search" title="Manual Search" /></a> % endif % endif - % if sickbeard.USE_SUBTITLES and show.subtitles and epResult["location"] and subtitles.needs_subtitles(epResult['subtitles']): + % if sickbeard.USE_SUBTITLES and show.subtitles and epResult["location"] and (len(epResult["subtitles"]) == 0 or subtitles.needs_subtitles(epResult['subtitles'])): <a class="epSubtitlesSearch" href="searchEpisodeSubtitles?show=${show.indexerid}&season=${epResult["season"]}&episode=${epResult["episode"]}"><img src="${srRoot}/images/closed_captioning.png" height="16" alt="search subtitles" title="Search Subtitles" /></a> % endif </td> diff --git a/gui/slick/views/history.mako b/gui/slick/views/history.mako index 385f707b1da2f7ecca9a41bd26f11774d04f574b..b915c350a9ce2b1f09501977abc78890946f6108 100644 --- a/gui/slick/views/history.mako +++ b/gui/slick/views/history.mako @@ -8,13 +8,13 @@ from sickbeard import providers from sickbeard import sbdatetime - from sickbeard.providers import generic from sickbeard.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, FAILED, DOWNLOADED, SUBTITLED from sickbeard.common import Quality, statusStrings, Overview from sickrage.show.History import History from sickrage.helper.encoding import ek + from sickrage.providers.GenericProvider import GenericProvider %> <%block name="content"> <%namespace file="/inc_defs.mako" import="renderQualityPill"/> @@ -87,9 +87,9 @@ % else: % if hItem["provider"] > 0: % if curStatus in [SNATCHED, FAILED]: - <% provider = providers.getProviderClass(generic.GenericProvider.makeID(hItem["provider"])) %> + <% provider = providers.getProviderClass(GenericProvider.make_id(hItem["provider"])) %> % if provider is not None: - <img src="${srRoot}/images/providers/${provider.imageName()}" width="16" height="16" style="vertical-align:middle;" /> <span style="vertical-align:middle;">${provider.name}</span> + <img src="${srRoot}/images/providers/${provider.image_name()}" width="16" height="16" style="vertical-align:middle;" /> <span style="vertical-align:middle;">${provider.name}</span> % else: <img src="${srRoot}/images/providers/missing.png" width="16" height="16" style="vertical-align:middle;" title="missing provider"/> <span style="vertical-align:middle;">Missing Provider</span> % endif @@ -142,9 +142,9 @@ % for action in sorted(hItem["actions"]): <% curStatus, curQuality = Quality.splitCompositeStatus(int(action["action"])) %> % if curStatus in [SNATCHED, FAILED]: - <% provider = providers.getProviderClass(generic.GenericProvider.makeID(action["provider"])) %> + <% provider = providers.getProviderClass(GenericProvider.make_id(action["provider"])) %> % if provider is not None: - <img src="${srRoot}/images/providers/${provider.imageName()}" width="16" height="16" style="vertical-align:middle;" alt="${provider.name}" style="cursor: help;" title="${provider.name}: ${ek(os.path.basename, action["resource"])}"/> + <img src="${srRoot}/images/providers/${provider.image_name()}" width="16" height="16" style="vertical-align:middle;" alt="${provider.name}" style="cursor: help;" title="${provider.name}: ${ek(os.path.basename, action["resource"])}"/> % else: <img src="${srRoot}/images/providers/missing.png" width="16" height="16" style="vertical-align:middle;" alt="missing provider" title="missing provider"/> % endif diff --git a/gui/slick/views/manage_failedDownloads.mako b/gui/slick/views/manage_failedDownloads.mako index 920e04f8a3054b4f2b94905664d58b7079af0821..bb7a4808cdcd56b8dcf743c940eda5e1fc41955b 100644 --- a/gui/slick/views/manage_failedDownloads.mako +++ b/gui/slick/views/manage_failedDownloads.mako @@ -5,9 +5,9 @@ import datetime import re from sickbeard import providers - from sickbeard.providers import generic from sickbeard.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, FAILED from sickbeard.common import Quality, qualityPresets, qualityPresetStrings, statusStrings, Overview + from sickrage.providers.GenericProvider import GenericProvider %> <%block name="content"> % if not header is UNDEFINED: @@ -53,9 +53,9 @@ % endif </td> <td align="center"> - <% provider = providers.getProviderClass(generic.GenericProvider.makeID(hItem["provider"])) %> + <% provider = providers.getProviderClass(GenericProvider.make_id(hItem["provider"])) %> % if provider is not None: - <img src="${srRoot}/images/providers/${provider.imageName()}" width="16" height="16" alt="${provider.name}" title="${provider.name}"/> + <img src="${srRoot}/images/providers/${provider.image_name()}" width="16" height="16" alt="${provider.name}" title="${provider.name}"/> % else: <img src="${srRoot}/images/providers/missing.png" width="16" height="16" alt="missing provider" title="missing provider"/> % endif diff --git a/lib/bs4/__init__.py b/lib/bs4/__init__.py index 7ba34269af71fbfbb2b53272866275684f08170f..f3dd75573ad383b891da4104792c369295a17709 100644 --- a/lib/bs4/__init__.py +++ b/lib/bs4/__init__.py @@ -17,8 +17,8 @@ http://www.crummy.com/software/BeautifulSoup/bs4/doc/ """ __author__ = "Leonard Richardson (leonardr@segfault.org)" -__version__ = "4.3.2" -__copyright__ = "Copyright (c) 2004-2013 Leonard Richardson" +__version__ = "4.4.1" +__copyright__ = "Copyright (c) 2004-2015 Leonard Richardson" __license__ = "MIT" __all__ = ['BeautifulSoup'] @@ -45,7 +45,7 @@ from .element import ( # The very first thing we do is give a useful error if someone is # running this code under Python 3 without converting it. -syntax_error = u'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work. You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).' +'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work.'<>'You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).' class BeautifulSoup(Tag): """ @@ -77,8 +77,11 @@ class BeautifulSoup(Tag): ASCII_SPACES = '\x20\x0a\x09\x0c\x0d' + NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nTo get rid of this warning, change this:\n\n BeautifulSoup([your markup])\n\nto this:\n\n BeautifulSoup([your markup], \"%(parser)s\")\n" + def __init__(self, markup="", features=None, builder=None, - parse_only=None, from_encoding=None, **kwargs): + parse_only=None, from_encoding=None, exclude_encodings=None, + **kwargs): """The Soup object is initialized as the 'root tag', and the provided markup (which can be a string or a file-like object) is fed into the underlying parser.""" @@ -114,9 +117,9 @@ class BeautifulSoup(Tag): del kwargs['isHTML'] warnings.warn( "BS4 does not respect the isHTML argument to the " - "BeautifulSoup constructor. You can pass in features='html' " - "or features='xml' to get a builder capable of handling " - "one or the other.") + "BeautifulSoup constructor. Suggest you use " + "features='lxml' for HTML and features='lxml-xml' for " + "XML.") def deprecated_argument(old_name, new_name): if old_name in kwargs: @@ -140,6 +143,7 @@ class BeautifulSoup(Tag): "__init__() got an unexpected keyword argument '%s'" % arg) if builder is None: + original_features = features if isinstance(features, basestring): features = [features] if features is None or len(features) == 0: @@ -151,6 +155,16 @@ class BeautifulSoup(Tag): "requested: %s. Do you need to install a parser library?" % ",".join(features)) builder = builder_class() + if not (original_features == builder.NAME or + original_features in builder.ALTERNATE_NAMES): + if builder.is_xml: + markup_type = "XML" + else: + markup_type = "HTML" + warnings.warn(self.NO_PARSER_SPECIFIED_WARNING % dict( + parser=builder.NAME, + markup_type=markup_type)) + self.builder = builder self.is_xml = builder.is_xml self.builder.soup = self @@ -178,6 +192,8 @@ class BeautifulSoup(Tag): # system. Just let it go. pass if is_file: + if isinstance(markup, unicode): + markup = markup.encode("utf8") warnings.warn( '"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup) if markup[:5] == "http:" or markup[:6] == "https:": @@ -185,12 +201,15 @@ class BeautifulSoup(Tag): # Python 3 otherwise. if ((isinstance(markup, bytes) and not b' ' in markup) or (isinstance(markup, unicode) and not u' ' in markup)): + if isinstance(markup, unicode): + markup = markup.encode("utf8") warnings.warn( '"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup) for (self.markup, self.original_encoding, self.declared_html_encoding, self.contains_replacement_characters) in ( - self.builder.prepare_markup(markup, from_encoding)): + self.builder.prepare_markup( + markup, from_encoding, exclude_encodings=exclude_encodings)): self.reset() try: self._feed() @@ -203,6 +222,16 @@ class BeautifulSoup(Tag): self.markup = None self.builder.soup = None + def __copy__(self): + return type(self)(self.encode(), builder=self.builder) + + def __getstate__(self): + # Frequently a tree builder can't be pickled. + d = dict(self.__dict__) + if 'builder' in d and not self.builder.picklable: + del d['builder'] + return d + def _feed(self): # Convert the document to Unicode. self.builder.reset() @@ -229,9 +258,7 @@ class BeautifulSoup(Tag): def new_string(self, s, subclass=NavigableString): """Create a new NavigableString associated with this soup.""" - navigable = subclass(s) - navigable.setup() - return navigable + return subclass(s) def insert_before(self, successor): raise NotImplementedError("BeautifulSoup objects don't support insert_before().") @@ -290,14 +317,49 @@ class BeautifulSoup(Tag): def object_was_parsed(self, o, parent=None, most_recent_element=None): """Add an object to the parse tree.""" parent = parent or self.currentTag - most_recent_element = most_recent_element or self._most_recent_element - o.setup(parent, most_recent_element) + previous_element = most_recent_element or self._most_recent_element + + next_element = previous_sibling = next_sibling = None + if isinstance(o, Tag): + next_element = o.next_element + next_sibling = o.next_sibling + previous_sibling = o.previous_sibling + if not previous_element: + previous_element = o.previous_element + + o.setup(parent, previous_element, next_element, previous_sibling, next_sibling) - if most_recent_element is not None: - most_recent_element.next_element = o self._most_recent_element = o parent.contents.append(o) + if parent.next_sibling: + # This node is being inserted into an element that has + # already been parsed. Deal with any dangling references. + index = parent.contents.index(o) + if index == 0: + previous_element = parent + previous_sibling = None + else: + previous_element = previous_sibling = parent.contents[index-1] + if index == len(parent.contents)-1: + next_element = parent.next_sibling + next_sibling = None + else: + next_element = next_sibling = parent.contents[index+1] + + o.previous_element = previous_element + if previous_element: + previous_element.next_element = o + o.next_element = next_element + if next_element: + next_element.previous_element = o + o.next_sibling = next_sibling + if next_sibling: + next_sibling.previous_sibling = o + o.previous_sibling = previous_sibling + if previous_sibling: + previous_sibling.next_sibling = o + def _popToTag(self, name, nsprefix=None, inclusivePop=True): """Pops the tag stack up to and including the most recent instance of the given tag. If inclusivePop is false, pops the tag diff --git a/lib/bs4/builder/__init__.py b/lib/bs4/builder/__init__.py index 740f5f29cd72f82820b67eb19e9a2178129733e1..f8fce5681c27aea87ce74fb11a978231886f3269 100644 --- a/lib/bs4/builder/__init__.py +++ b/lib/bs4/builder/__init__.py @@ -80,9 +80,12 @@ builder_registry = TreeBuilderRegistry() class TreeBuilder(object): """Turn a document into a Beautiful Soup object tree.""" + NAME = "[Unknown tree builder]" + ALTERNATE_NAMES = [] features = [] is_xml = False + picklable = False preserve_whitespace_tags = set() empty_element_tags = None # A tag will be considered an empty-element # tag when and only when it has no contents. diff --git a/lib/bs4/builder/_html5lib.py b/lib/bs4/builder/_html5lib.py index 7de36ae75e41e90fdef54ddc5016f54317663722..8725a65885b28f3973f098a61afa11623d3aa987 100644 --- a/lib/bs4/builder/_html5lib.py +++ b/lib/bs4/builder/_html5lib.py @@ -2,6 +2,7 @@ __all__ = [ 'HTML5TreeBuilder', ] +from pdb import set_trace import warnings from bs4.builder import ( PERMISSIVE, @@ -9,7 +10,10 @@ from bs4.builder import ( HTML_5, HTMLTreeBuilder, ) -from bs4.element import NamespacedAttribute +from bs4.element import ( + NamespacedAttribute, + whitespace_re, +) import html5lib from html5lib.constants import namespaces from bs4.element import ( @@ -22,11 +26,20 @@ from bs4.element import ( class HTML5TreeBuilder(HTMLTreeBuilder): """Use html5lib to build a tree.""" - features = ['html5lib', PERMISSIVE, HTML_5, HTML] + NAME = "html5lib" + + features = [NAME, PERMISSIVE, HTML_5, HTML] - def prepare_markup(self, markup, user_specified_encoding): + def prepare_markup(self, markup, user_specified_encoding, + document_declared_encoding=None, exclude_encodings=None): # Store the user-specified encoding for use later on. self.user_specified_encoding = user_specified_encoding + + # document_declared_encoding and exclude_encodings aren't used + # ATM because the html5lib TreeBuilder doesn't use + # UnicodeDammit. + if exclude_encodings: + warnings.warn("You provided a value for exclude_encoding, but the html5lib tree builder doesn't support exclude_encoding.") yield (markup, None, None, False) # These methods are defined by Beautiful Soup. @@ -101,7 +114,16 @@ class AttrList(object): def __iter__(self): return list(self.attrs.items()).__iter__() def __setitem__(self, name, value): - "set attr", name, value + # If this attribute is a multi-valued attribute for this element, + # turn its value into a list. + list_attr = HTML5TreeBuilder.cdata_list_attributes + if (name in list_attr['*'] + or (self.element.name in list_attr + and name in list_attr[self.element.name])): + # A node that is being cloned may have already undergone + # this procedure. + if not isinstance(value, list): + value = whitespace_re.split(value) self.element[name] = value def items(self): return list(self.attrs.items()) @@ -161,6 +183,12 @@ class Element(html5lib.treebuilders._base.Node): # immediately after the parent, if it has no children.) if self.element.contents: most_recent_element = self.element._last_descendant(False) + elif self.element.next_element is not None: + # Something from further ahead in the parse tree is + # being inserted into this earlier element. This is + # very annoying because it means an expensive search + # for the last element in the tree. + most_recent_element = self.soup._last_descendant() else: most_recent_element = self.element @@ -172,6 +200,7 @@ class Element(html5lib.treebuilders._base.Node): return AttrList(self.element) def setAttributes(self, attributes): + if attributes is not None and len(attributes) > 0: converted_attributes = [] @@ -218,6 +247,9 @@ class Element(html5lib.treebuilders._base.Node): def reparentChildren(self, new_parent): """Move all of this tag's children into another tag.""" + # print "MOVE", self.element.contents + # print "FROM", self.element + # print "TO", new_parent.element element = self.element new_parent_element = new_parent.element # Determine what this tag's next_element will be once all the children @@ -236,17 +268,28 @@ class Element(html5lib.treebuilders._base.Node): new_parents_last_descendant_next_element = new_parent_element.next_element to_append = element.contents - append_after = new_parent.element.contents + append_after = new_parent_element.contents if len(to_append) > 0: # Set the first child's previous_element and previous_sibling # to elements within the new parent first_child = to_append[0] - first_child.previous_element = new_parents_last_descendant + if new_parents_last_descendant: + first_child.previous_element = new_parents_last_descendant + else: + first_child.previous_element = new_parent_element first_child.previous_sibling = new_parents_last_child + if new_parents_last_descendant: + new_parents_last_descendant.next_element = first_child + else: + new_parent_element.next_element = first_child + if new_parents_last_child: + new_parents_last_child.next_sibling = first_child # Fix the last child's next_element and next_sibling last_child = to_append[-1] last_child.next_element = new_parents_last_descendant_next_element + if new_parents_last_descendant_next_element: + new_parents_last_descendant_next_element.previous_element = last_child last_child.next_sibling = None for child in to_append: @@ -257,6 +300,10 @@ class Element(html5lib.treebuilders._base.Node): element.contents = [] element.next_element = final_next_element + # print "DONE WITH MOVE" + # print "FROM", self.element + # print "TO", new_parent_element + def cloneNode(self): tag = self.soup.new_tag(self.element.name, self.namespace) node = Element(tag, self.soup, self.namespace) diff --git a/lib/bs4/builder/_htmlparser.py b/lib/bs4/builder/_htmlparser.py index ca8d8b892bf3689117befadb0359fd2f3ca859c6..0101d647bdd99674422c1edeab205f01060cd341 100644 --- a/lib/bs4/builder/_htmlparser.py +++ b/lib/bs4/builder/_htmlparser.py @@ -4,10 +4,16 @@ __all__ = [ 'HTMLParserTreeBuilder', ] -from HTMLParser import ( - HTMLParser, - HTMLParseError, - ) +from HTMLParser import HTMLParser + +try: + from HTMLParser import HTMLParseError +except ImportError, e: + # HTMLParseError is removed in Python 3.5. Since it can never be + # thrown in 3.5, we can just define our own class as a placeholder. + class HTMLParseError(Exception): + pass + import sys import warnings @@ -19,10 +25,10 @@ import warnings # At the end of this file, we monkeypatch HTMLParser so that # strict=True works well on Python 3.2.2. major, minor, release = sys.version_info[:3] -CONSTRUCTOR_TAKES_STRICT = ( - major > 3 - or (major == 3 and minor > 2) - or (major == 3 and minor == 2 and release >= 3)) +CONSTRUCTOR_TAKES_STRICT = major == 3 and minor == 2 and release >= 3 +CONSTRUCTOR_STRICT_IS_DEPRECATED = major == 3 and minor == 3 +CONSTRUCTOR_TAKES_CONVERT_CHARREFS = major == 3 and minor >= 4 + from bs4.element import ( CData, @@ -63,7 +69,8 @@ class BeautifulSoupHTMLParser(HTMLParser): def handle_charref(self, name): # XXX workaround for a bug in HTMLParser. Remove this once - # it's fixed. + # it's fixed in all supported versions. + # http://bugs.python.org/issue13633 if name.startswith('x'): real_name = int(name.lstrip('x'), 16) elif name.startswith('X'): @@ -113,14 +120,6 @@ class BeautifulSoupHTMLParser(HTMLParser): def handle_pi(self, data): self.soup.endData() - if data.endswith("?") and data.lower().startswith("xml"): - # "An XHTML processing instruction using the trailing '?' - # will cause the '?' to be included in data." - HTMLParser - # docs. - # - # Strip the question mark so we don't end up with two - # question marks. - data = data[:-1] self.soup.handle_data(data) self.soup.endData(ProcessingInstruction) @@ -128,15 +127,19 @@ class BeautifulSoupHTMLParser(HTMLParser): class HTMLParserTreeBuilder(HTMLTreeBuilder): is_xml = False - features = [HTML, STRICT, HTMLPARSER] + picklable = True + NAME = HTMLPARSER + features = [NAME, HTML, STRICT] def __init__(self, *args, **kwargs): - if CONSTRUCTOR_TAKES_STRICT: + if CONSTRUCTOR_TAKES_STRICT and not CONSTRUCTOR_STRICT_IS_DEPRECATED: kwargs['strict'] = False + if CONSTRUCTOR_TAKES_CONVERT_CHARREFS: + kwargs['convert_charrefs'] = False self.parser_args = (args, kwargs) def prepare_markup(self, markup, user_specified_encoding=None, - document_declared_encoding=None): + document_declared_encoding=None, exclude_encodings=None): """ :return: A 4-tuple (markup, original encoding, encoding declared within markup, whether any characters had to be @@ -147,7 +150,8 @@ class HTMLParserTreeBuilder(HTMLTreeBuilder): return try_encodings = [user_specified_encoding, document_declared_encoding] - dammit = UnicodeDammit(markup, try_encodings, is_html=True) + dammit = UnicodeDammit(markup, try_encodings, is_html=True, + exclude_encodings=exclude_encodings) yield (dammit.markup, dammit.original_encoding, dammit.declared_html_encoding, dammit.contains_replacement_characters) diff --git a/lib/bs4/builder/_lxml.py b/lib/bs4/builder/_lxml.py index fa5d49875eae7829bf5c3fd1b6c3b6e8869f7e28..9e8f88fb56e682da0e3c35a4c8b3bdeda1233654 100644 --- a/lib/bs4/builder/_lxml.py +++ b/lib/bs4/builder/_lxml.py @@ -7,7 +7,12 @@ from io import BytesIO from StringIO import StringIO import collections from lxml import etree -from bs4.element import Comment, Doctype, NamespacedAttribute +from bs4.element import ( + Comment, + Doctype, + NamespacedAttribute, + ProcessingInstruction, +) from bs4.builder import ( FAST, HTML, @@ -25,8 +30,11 @@ class LXMLTreeBuilderForXML(TreeBuilder): is_xml = True + NAME = "lxml-xml" + ALTERNATE_NAMES = ["xml"] + # Well, it's permissive by XML parser standards. - features = [LXML, XML, FAST, PERMISSIVE] + features = [NAME, LXML, XML, FAST, PERMISSIVE] CHUNK_SIZE = 512 @@ -70,6 +78,7 @@ class LXMLTreeBuilderForXML(TreeBuilder): return (None, tag) def prepare_markup(self, markup, user_specified_encoding=None, + exclude_encodings=None, document_declared_encoding=None): """ :yield: A series of 4-tuples. @@ -95,7 +104,8 @@ class LXMLTreeBuilderForXML(TreeBuilder): # the document as each one in turn. is_html = not self.is_xml try_encodings = [user_specified_encoding, document_declared_encoding] - detector = EncodingDetector(markup, try_encodings, is_html) + detector = EncodingDetector( + markup, try_encodings, is_html, exclude_encodings) for encoding in detector.encodings: yield (detector.markup, encoding, document_declared_encoding, False) @@ -189,7 +199,9 @@ class LXMLTreeBuilderForXML(TreeBuilder): self.nsmaps.pop() def pi(self, target, data): - pass + self.soup.endData() + self.soup.handle_data(target + ' ' + data) + self.soup.endData(ProcessingInstruction) def data(self, content): self.soup.handle_data(content) @@ -212,7 +224,10 @@ class LXMLTreeBuilderForXML(TreeBuilder): class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML): - features = [LXML, HTML, FAST, PERMISSIVE] + NAME = LXML + ALTERNATE_NAMES = ["lxml-html"] + + features = ALTERNATE_NAMES + [NAME, HTML, FAST, PERMISSIVE] is_xml = False def default_parser(self, encoding): diff --git a/lib/bs4/dammit.py b/lib/bs4/dammit.py index 59640b7ce3a0f1386fdca863cd7eb95a3942a3ee..636f81b4c00bac992c037560d02b694b56906366 100644 --- a/lib/bs4/dammit.py +++ b/lib/bs4/dammit.py @@ -3,10 +3,12 @@ This library converts a bytestream to Unicode through any means necessary. It is heavily based on code from Mark Pilgrim's Universal -Feed Parser. It works best on XML and XML, but it does not rewrite the +Feed Parser. It works best on XML and HTML, but it does not rewrite the XML or HTML to reflect a new encoding; that's the tree builder's job. """ +__license__ = "MIT" +from pdb import set_trace import codecs from htmlentitydefs import codepoint2name import re @@ -212,8 +214,11 @@ class EncodingDetector: 5. Windows-1252. """ - def __init__(self, markup, override_encodings=None, is_html=False): + def __init__(self, markup, override_encodings=None, is_html=False, + exclude_encodings=None): self.override_encodings = override_encodings or [] + exclude_encodings = exclude_encodings or [] + self.exclude_encodings = set([x.lower() for x in exclude_encodings]) self.chardet_encoding = None self.is_html = is_html self.declared_encoding = None @@ -224,6 +229,8 @@ class EncodingDetector: def _usable(self, encoding, tried): if encoding is not None: encoding = encoding.lower() + if encoding in self.exclude_encodings: + return False if encoding not in tried: tried.add(encoding) return True @@ -266,6 +273,9 @@ class EncodingDetector: def strip_byte_order_mark(cls, data): """If a byte-order mark is present, strip it and return the encoding it implies.""" encoding = None + if isinstance(data, unicode): + # Unicode data cannot have a byte-order mark. + return data, encoding if (len(data) >= 4) and (data[:2] == b'\xfe\xff') \ and (data[2:4] != '\x00\x00'): encoding = 'utf-16be' @@ -306,7 +316,7 @@ class EncodingDetector: declared_encoding_match = html_meta_re.search(markup, endpos=html_endpos) if declared_encoding_match is not None: declared_encoding = declared_encoding_match.groups()[0].decode( - 'ascii') + 'ascii', 'replace') if declared_encoding: return declared_encoding.lower() return None @@ -331,13 +341,14 @@ class UnicodeDammit: ] def __init__(self, markup, override_encodings=[], - smart_quotes_to=None, is_html=False): + smart_quotes_to=None, is_html=False, exclude_encodings=[]): self.smart_quotes_to = smart_quotes_to self.tried_encodings = [] self.contains_replacement_characters = False self.is_html = is_html - self.detector = EncodingDetector(markup, override_encodings, is_html) + self.detector = EncodingDetector( + markup, override_encodings, is_html, exclude_encodings) # Short-circuit if the data is in Unicode to begin with. if isinstance(markup, unicode) or markup == '': diff --git a/lib/bs4/diagnose.py b/lib/bs4/diagnose.py index 4d0b00afaddf9ff6b005f3755ae28e0922d52a86..c04d23c3592b3b4db2d103f702b2737cc29cf609 100644 --- a/lib/bs4/diagnose.py +++ b/lib/bs4/diagnose.py @@ -1,4 +1,7 @@ """Diagnostic functions, mainly for use when doing tech support.""" + +__license__ = "MIT" + import cProfile from StringIO import StringIO from HTMLParser import HTMLParser @@ -33,12 +36,21 @@ def diagnose(data): if 'lxml' in basic_parsers: basic_parsers.append(["lxml", "xml"]) - from lxml import etree - print "Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION)) + try: + from lxml import etree + print "Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION)) + except ImportError, e: + print ( + "lxml is not installed or couldn't be imported.") + if 'html5lib' in basic_parsers: - import html5lib - print "Found html5lib version %s" % html5lib.__version__ + try: + import html5lib + print "Found html5lib version %s" % html5lib.__version__ + except ImportError, e: + print ( + "html5lib is not installed or couldn't be imported.") if hasattr(data, 'read'): data = data.read() diff --git a/lib/bs4/element.py b/lib/bs4/element.py index da9afdf48ec0b05cf8e970cd906425ce80b343cb..ecf2b2804d1caba3a71c6405dc8decfcff094723 100644 --- a/lib/bs4/element.py +++ b/lib/bs4/element.py @@ -1,3 +1,6 @@ +__license__ = "MIT" + +from pdb import set_trace import collections import re import sys @@ -185,24 +188,40 @@ class PageElement(object): return self.HTML_FORMATTERS.get( name, HTMLAwareEntitySubstitution.substitute_xml) - def setup(self, parent=None, previous_element=None): + def setup(self, parent=None, previous_element=None, next_element=None, + previous_sibling=None, next_sibling=None): """Sets up the initial relations between this element and other elements.""" self.parent = parent + self.previous_element = previous_element if previous_element is not None: self.previous_element.next_element = self - self.next_element = None - self.previous_sibling = None - self.next_sibling = None - if self.parent is not None and self.parent.contents: - self.previous_sibling = self.parent.contents[-1] + + self.next_element = next_element + if self.next_element: + self.next_element.previous_element = self + + self.next_sibling = next_sibling + if self.next_sibling: + self.next_sibling.previous_sibling = self + + if (not previous_sibling + and self.parent is not None and self.parent.contents): + previous_sibling = self.parent.contents[-1] + + self.previous_sibling = previous_sibling + if previous_sibling: self.previous_sibling.next_sibling = self nextSibling = _alias("next_sibling") # BS3 previousSibling = _alias("previous_sibling") # BS3 def replace_with(self, replace_with): + if not self.parent: + raise ValueError( + "Cannot replace one element with another when the" + "element to be replaced is not part of a tree.") if replace_with is self: return if replace_with is self.parent: @@ -216,6 +235,10 @@ class PageElement(object): def unwrap(self): my_parent = self.parent + if not self.parent: + raise ValueError( + "Cannot replace an element with its contents when that" + "element is not part of a tree.") my_index = self.parent.index(self) self.extract() for child in reversed(self.contents[:]): @@ -240,17 +263,20 @@ class PageElement(object): last_child = self._last_descendant() next_element = last_child.next_element - if self.previous_element is not None: + if (self.previous_element is not None and + self.previous_element is not next_element): self.previous_element.next_element = next_element - if next_element is not None: + if next_element is not None and next_element is not self.previous_element: next_element.previous_element = self.previous_element self.previous_element = None last_child.next_element = None self.parent = None - if self.previous_sibling is not None: + if (self.previous_sibling is not None + and self.previous_sibling is not self.next_sibling): self.previous_sibling.next_sibling = self.next_sibling - if self.next_sibling is not None: + if (self.next_sibling is not None + and self.next_sibling is not self.previous_sibling): self.next_sibling.previous_sibling = self.previous_sibling self.previous_sibling = self.next_sibling = None return self @@ -263,13 +289,15 @@ class PageElement(object): last_child = self while isinstance(last_child, Tag) and last_child.contents: last_child = last_child.contents[-1] - if not accept_self and last_child == self: + if not accept_self and last_child is self: last_child = None return last_child # BS3: Not part of the API! _lastRecursiveChild = _last_descendant def insert(self, position, new_child): + if new_child is None: + raise ValueError("Cannot insert None into a tag.") if new_child is self: raise ValueError("Cannot insert a tag into itself.") if (isinstance(new_child, basestring) @@ -478,6 +506,10 @@ class PageElement(object): def _find_all(self, name, attrs, text, limit, generator, **kwargs): "Iterates over a generator looking for things that match." + if text is None and 'string' in kwargs: + text = kwargs['string'] + del kwargs['string'] + if isinstance(name, SoupStrainer): strainer = name else: @@ -548,17 +580,17 @@ class PageElement(object): # Methods for supporting CSS selectors. - tag_name_re = re.compile('^[a-z0-9]+$') + tag_name_re = re.compile('^[a-zA-Z0-9][-.a-zA-Z0-9:_]*$') - # /^(\w+)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/ - # \---/ \---/\-------------/ \-------/ - # | | | | - # | | | The value - # | | ~,|,^,$,* or = - # | Attribute + # /^([a-zA-Z0-9][-.a-zA-Z0-9:_]*)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/ + # \---------------------------/ \---/\-------------/ \-------/ + # | | | | + # | | | The value + # | | ~,|,^,$,* or = + # | Attribute # Tag attribselect_re = re.compile( - r'^(?P<tag>\w+)?\[(?P<attribute>\w+)(?P<operator>[=~\|\^\$\*]?)' + + r'^(?P<tag>[a-zA-Z0-9][-.a-zA-Z0-9:_]*)?\[(?P<attribute>[\w-]+)(?P<operator>[=~\|\^\$\*]?)' + r'=?"?(?P<value>[^\]"]*)"?\]$' ) @@ -654,11 +686,17 @@ class NavigableString(unicode, PageElement): how to handle non-ASCII characters. """ if isinstance(value, unicode): - return unicode.__new__(cls, value) - return unicode.__new__(cls, value, DEFAULT_OUTPUT_ENCODING) + u = unicode.__new__(cls, value) + else: + u = unicode.__new__(cls, value, DEFAULT_OUTPUT_ENCODING) + u.setup() + return u def __copy__(self): - return self + """A copy of a NavigableString has the same contents and class + as the original, but it is not connected to the parse tree. + """ + return type(self)(self) def __getnewargs__(self): return (unicode(self),) @@ -707,7 +745,7 @@ class CData(PreformattedString): class ProcessingInstruction(PreformattedString): PREFIX = u'<?' - SUFFIX = u'?>' + SUFFIX = u'>' class Comment(PreformattedString): @@ -716,8 +754,8 @@ class Comment(PreformattedString): class Declaration(PreformattedString): - PREFIX = u'<!' - SUFFIX = u'!>' + PREFIX = u'<?' + SUFFIX = u'?>' class Doctype(PreformattedString): @@ -759,9 +797,12 @@ class Tag(PageElement): self.prefix = prefix if attrs is None: attrs = {} - elif attrs and builder.cdata_list_attributes: - attrs = builder._replace_cdata_list_attribute_values( - self.name, attrs) + elif attrs: + if builder is not None and builder.cdata_list_attributes: + attrs = builder._replace_cdata_list_attribute_values( + self.name, attrs) + else: + attrs = dict(attrs) else: attrs = dict(attrs) self.attrs = attrs @@ -778,6 +819,18 @@ class Tag(PageElement): parserClass = _alias("parser_class") # BS3 + def __copy__(self): + """A copy of a Tag is a new Tag, unconnected to the parse tree. + Its contents are a copy of the old Tag's contents. + """ + clone = type(self)(None, self.builder, self.name, self.namespace, + self.nsprefix, self.attrs) + for attr in ('can_be_empty_element', 'hidden'): + setattr(clone, attr, getattr(self, attr)) + for child in self.contents: + clone.append(child.__copy__()) + return clone + @property def is_empty_element(self): """Is this tag an empty-element tag? (aka a self-closing tag) @@ -971,15 +1024,25 @@ class Tag(PageElement): as defined in __eq__.""" return not self == other - def __repr__(self, encoding=DEFAULT_OUTPUT_ENCODING): + def __repr__(self, encoding="unicode-escape"): """Renders this tag as a string.""" - return self.encode(encoding) + if PY3K: + # "The return value must be a string object", i.e. Unicode + return self.decode() + else: + # "The return value must be a string object", i.e. a bytestring. + # By convention, the return value of __repr__ should also be + # an ASCII string. + return self.encode(encoding) def __unicode__(self): return self.decode() def __str__(self): - return self.encode() + if PY3K: + return self.decode() + else: + return self.encode() if PY3K: __str__ = __repr__ = __unicode__ @@ -1103,12 +1166,18 @@ class Tag(PageElement): formatter="minimal"): """Renders the contents of this tag as a Unicode string. + :param indent_level: Each line of the rendering will be + indented this many spaces. + :param eventual_encoding: The tag is destined to be encoded into this encoding. This method is _not_ responsible for performing that encoding. This information is passed in so that it can be substituted in if the document contains a <META> tag that mentions the document's encoding. + + :param formatter: The output formatter responsible for converting + entities to Unicode characters. """ # First off, turn a string formatter into a function. This # will stop the lookup from happening over and over again. @@ -1137,7 +1206,17 @@ class Tag(PageElement): def encode_contents( self, indent_level=None, encoding=DEFAULT_OUTPUT_ENCODING, formatter="minimal"): - """Renders the contents of this tag as a bytestring.""" + """Renders the contents of this tag as a bytestring. + + :param indent_level: Each line of the rendering will be + indented this many spaces. + + :param eventual_encoding: The bytestring will be in this encoding. + + :param formatter: The output formatter responsible for converting + entities to Unicode characters. + """ + contents = self.decode_contents(indent_level, encoding, formatter) return contents.encode(encoding) @@ -1201,26 +1280,57 @@ class Tag(PageElement): _selector_combinators = ['>', '+', '~'] _select_debug = False - def select(self, selector, _candidate_generator=None): + def select_one(self, selector): """Perform a CSS selection operation on the current element.""" + value = self.select(selector, limit=1) + if value: + return value[0] + return None + + def select(self, selector, _candidate_generator=None, limit=None): + """Perform a CSS selection operation on the current element.""" + + # Handle grouping selectors if ',' exists, ie: p,a + if ',' in selector: + context = [] + for partial_selector in selector.split(','): + partial_selector = partial_selector.strip() + if partial_selector == '': + raise ValueError('Invalid group selection syntax: %s' % selector) + candidates = self.select(partial_selector, limit=limit) + for candidate in candidates: + if candidate not in context: + context.append(candidate) + + if limit and len(context) >= limit: + break + return context + tokens = selector.split() current_context = [self] if tokens[-1] in self._selector_combinators: raise ValueError( 'Final combinator "%s" is missing an argument.' % tokens[-1]) + if self._select_debug: print 'Running CSS selector "%s"' % selector + for index, token in enumerate(tokens): - if self._select_debug: - print ' Considering token "%s"' % token - recursive_candidate_generator = None - tag_name = None + new_context = [] + new_context_ids = set([]) + if tokens[index-1] in self._selector_combinators: # This token was consumed by the previous combinator. Skip it. if self._select_debug: print ' Token was consumed by the previous combinator.' continue + + if self._select_debug: + print ' Considering token "%s"' % token + recursive_candidate_generator = None + tag_name = None + # Each operation corresponds to a checker function, a rule # for determining whether a candidate matches the # selector. Candidates are generated by the active @@ -1256,35 +1366,38 @@ class Tag(PageElement): "A pseudo-class must be prefixed with a tag name.") pseudo_attributes = re.match('([a-zA-Z\d-]+)\(([a-zA-Z\d]+)\)', pseudo) found = [] - if pseudo_attributes is not None: + if pseudo_attributes is None: + pseudo_type = pseudo + pseudo_value = None + else: pseudo_type, pseudo_value = pseudo_attributes.groups() - if pseudo_type == 'nth-of-type': - try: - pseudo_value = int(pseudo_value) - except: - raise NotImplementedError( - 'Only numeric values are currently supported for the nth-of-type pseudo-class.') - if pseudo_value < 1: - raise ValueError( - 'nth-of-type pseudo-class value must be at least 1.') - class Counter(object): - def __init__(self, destination): - self.count = 0 - self.destination = destination - - def nth_child_of_type(self, tag): - self.count += 1 - if self.count == self.destination: - return True - if self.count > self.destination: - # Stop the generator that's sending us - # these things. - raise StopIteration() - return False - checker = Counter(pseudo_value).nth_child_of_type - else: + if pseudo_type == 'nth-of-type': + try: + pseudo_value = int(pseudo_value) + except: raise NotImplementedError( - 'Only the following pseudo-classes are implemented: nth-of-type.') + 'Only numeric values are currently supported for the nth-of-type pseudo-class.') + if pseudo_value < 1: + raise ValueError( + 'nth-of-type pseudo-class value must be at least 1.') + class Counter(object): + def __init__(self, destination): + self.count = 0 + self.destination = destination + + def nth_child_of_type(self, tag): + self.count += 1 + if self.count == self.destination: + return True + if self.count > self.destination: + # Stop the generator that's sending us + # these things. + raise StopIteration() + return False + checker = Counter(pseudo_value).nth_child_of_type + else: + raise NotImplementedError( + 'Only the following pseudo-classes are implemented: nth-of-type.') elif token == '*': # Star selector -- matches everything @@ -1311,7 +1424,6 @@ class Tag(PageElement): else: raise ValueError( 'Unsupported or invalid CSS selector: "%s"' % token) - if recursive_candidate_generator: # This happens when the selector looks like "> foo". # @@ -1361,8 +1473,7 @@ class Tag(PageElement): else: _use_candidate_generator = _candidate_generator - new_context = [] - new_context_ids = set([]) + count = 0 for tag in current_context: if self._select_debug: print " Running candidate generator on %s %s" % ( @@ -1387,9 +1498,12 @@ class Tag(PageElement): # don't include it in the context more than once. new_context.append(candidate) new_context_ids.add(id(candidate)) + if limit and len(new_context) >= limit: + break elif self._select_debug: print " FAILURE %s %s" % (candidate.name, repr(candidate.attrs)) + current_context = new_context if self._select_debug: diff --git a/lib/bs4/testing.py b/lib/bs4/testing.py index fd4495ac58c92f21a29d24c520738d55a908e0a2..7ba54ab39589dad6e1ab3ba45528ecac45ef2a4d 100644 --- a/lib/bs4/testing.py +++ b/lib/bs4/testing.py @@ -1,5 +1,8 @@ """Helper classes for tests.""" +__license__ = "MIT" + +import pickle import copy import functools import unittest @@ -43,6 +46,16 @@ class SoupTest(unittest.TestCase): self.assertEqual(obj.decode(), self.document_for(compare_parsed_to)) + def assertConnectedness(self, element): + """Ensure that next_element and previous_element are properly + set for all descendants of the given element. + """ + earlier = None + for e in element.descendants: + if earlier: + self.assertEqual(e, earlier.next_element) + self.assertEqual(earlier, e.previous_element) + earlier = e class HTMLTreeBuilderSmokeTest(object): @@ -54,6 +67,15 @@ class HTMLTreeBuilderSmokeTest(object): markup in these tests, there's not much room for interpretation. """ + def test_pickle_and_unpickle_identity(self): + # Pickling a tree, then unpickling it, yields a tree identical + # to the original. + tree = self.soup("<a><b>foo</a>") + dumped = pickle.dumps(tree, 2) + loaded = pickle.loads(dumped) + self.assertEqual(loaded.__class__, BeautifulSoup) + self.assertEqual(loaded.decode(), tree.decode()) + def assertDoctypeHandled(self, doctype_fragment): """Assert that a given doctype string is handled correctly.""" doctype_str, soup = self._document_with_doctype(doctype_fragment) @@ -114,6 +136,11 @@ class HTMLTreeBuilderSmokeTest(object): soup.encode("utf-8").replace(b"\n", b""), markup.replace(b"\n", b"")) + def test_processing_instruction(self): + markup = b"""<?PITarget PIContent?>""" + soup = self.soup(markup) + self.assertEqual(markup, soup.encode("utf8")) + def test_deepcopy(self): """Make sure you can copy the tree builder. @@ -155,6 +182,23 @@ class HTMLTreeBuilderSmokeTest(object): def test_nested_formatting_elements(self): self.assertSoupEquals("<em><em></em></em>") + def test_double_head(self): + html = '''<!DOCTYPE html> +<html> +<head> +<title>Ordinary HEAD element test</title> +</head> +<script type="text/javascript"> +alert("Help!"); +</script> +<body> +Hello, world! +</body> +</html> +''' + soup = self.soup(html) + self.assertEqual("text/javascript", soup.find('script')['type']) + def test_comment(self): # Comments are represented as Comment objects. markup = "<p>foo<!--foobar-->baz</p>" @@ -221,6 +265,14 @@ class HTMLTreeBuilderSmokeTest(object): soup = self.soup(markup) self.assertEqual(["css"], soup.div.div['class']) + def test_multivalued_attribute_on_html(self): + # html5lib uses a different API to set the attributes ot the + # <html> tag. This has caused problems with multivalued + # attributes. + markup = '<html class="a b"></html>' + soup = self.soup(markup) + self.assertEqual(["a", "b"], soup.html['class']) + def test_angle_brackets_in_attribute_values_are_escaped(self): self.assertSoupEquals('<a b="<a>"></a>', '<a b="<a>"></a>') @@ -253,6 +305,35 @@ class HTMLTreeBuilderSmokeTest(object): soup = self.soup("<html><h2>\nfoo</h2><p></p></html>") self.assertEqual("p", soup.h2.string.next_element.name) self.assertEqual("p", soup.p.name) + self.assertConnectedness(soup) + + def test_head_tag_between_head_and_body(self): + "Prevent recurrence of a bug in the html5lib treebuilder." + content = """<html><head></head> + <link></link> + <body>foo</body> +</html> +""" + soup = self.soup(content) + self.assertNotEqual(None, soup.html.body) + self.assertConnectedness(soup) + + def test_multiple_copies_of_a_tag(self): + "Prevent recurrence of a bug in the html5lib treebuilder." + content = """<!DOCTYPE html> +<html> + <body> + <article id="a" > + <div><a href="1"></div> + <footer> + <a href="2"></a> + </footer> + </article> + </body> +</html> +""" + soup = self.soup(content) + self.assertConnectedness(soup.article) def test_basic_namespaces(self): """Parsers don't need to *understand* namespaces, but at the @@ -463,11 +544,25 @@ class HTMLTreeBuilderSmokeTest(object): class XMLTreeBuilderSmokeTest(object): + def test_pickle_and_unpickle_identity(self): + # Pickling a tree, then unpickling it, yields a tree identical + # to the original. + tree = self.soup("<a><b>foo</a>") + dumped = pickle.dumps(tree, 2) + loaded = pickle.loads(dumped) + self.assertEqual(loaded.__class__, BeautifulSoup) + self.assertEqual(loaded.decode(), tree.decode()) + def test_docstring_generated(self): soup = self.soup("<root/>") self.assertEqual( soup.encode(), b'<?xml version="1.0" encoding="utf-8"?>\n<root/>') + def test_xml_declaration(self): + markup = b"""<?xml version="1.0" encoding="utf8"?>\n<foo/>""" + soup = self.soup(markup) + self.assertEqual(markup, soup.encode("utf8")) + def test_real_xhtml_document(self): """A real XHTML document should come out *exactly* the same as it went in.""" markup = b"""<?xml version="1.0" encoding="utf-8"?> @@ -485,7 +580,7 @@ class XMLTreeBuilderSmokeTest(object): <script type="text/javascript"> </script> """ - soup = BeautifulSoup(doc, "xml") + soup = BeautifulSoup(doc, "lxml-xml") # lxml would have stripped this while parsing, but we can add # it later. soup.script.string = 'console.log("< < hey > > ");' diff --git a/lib/bs4/tests/test_builder_registry.py b/lib/bs4/tests/test_builder_registry.py index 92ad10fb044b3d4c68f2b09c09f88b53659e842a..90cad829334f677a497519bb5abdaa997ff3d7d1 100644 --- a/lib/bs4/tests/test_builder_registry.py +++ b/lib/bs4/tests/test_builder_registry.py @@ -1,6 +1,7 @@ """Tests of the builder registry.""" import unittest +import warnings from bs4 import BeautifulSoup from bs4.builder import ( @@ -67,10 +68,15 @@ class BuiltInRegistryTest(unittest.TestCase): HTMLParserTreeBuilder) def test_beautifulsoup_constructor_does_lookup(self): - # You can pass in a string. - BeautifulSoup("", features="html") - # Or a list of strings. - BeautifulSoup("", features=["html", "fast"]) + + with warnings.catch_warnings(record=True) as w: + # This will create a warning about not explicitly + # specifying a parser, but we'll ignore it. + + # You can pass in a string. + BeautifulSoup("", features="html") + # Or a list of strings. + BeautifulSoup("", features=["html", "fast"]) # You'll get an exception if BS can't find an appropriate # builder. diff --git a/lib/bs4/tests/test_html5lib.py b/lib/bs4/tests/test_html5lib.py index 594c3e1f26045b1c6bee259d6c0b28e10ac17b20..65536c2a2482b4e35e459e984f29c7fb2672dae3 100644 --- a/lib/bs4/tests/test_html5lib.py +++ b/lib/bs4/tests/test_html5lib.py @@ -83,3 +83,16 @@ class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest): soup = self.soup(markup) self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode()) self.assertEqual(2, len(soup.find_all('p'))) + + def test_processing_instruction(self): + """Processing instructions become comments.""" + markup = b"""<?PITarget PIContent?>""" + soup = self.soup(markup) + assert str(soup).startswith("<!--?PITarget PIContent?-->") + + def test_cloned_multivalue_node(self): + markup = b"""<a class="my_class"><p></a>""" + soup = self.soup(markup) + a1, a2 = soup.find_all('a') + self.assertEqual(a1, a2) + assert a1 is not a2 diff --git a/lib/bs4/tests/test_htmlparser.py b/lib/bs4/tests/test_htmlparser.py index bcb5ed232f99d4f687df95aeb77a650c0585724e..b45e35f9998bcdee334e5515adb5a0dbe0c79ccf 100644 --- a/lib/bs4/tests/test_htmlparser.py +++ b/lib/bs4/tests/test_htmlparser.py @@ -1,6 +1,8 @@ """Tests to ensure that the html.parser tree builder generates good trees.""" +from pdb import set_trace +import pickle from bs4.testing import SoupTest, HTMLTreeBuilderSmokeTest from bs4.builder import HTMLParserTreeBuilder @@ -17,3 +19,14 @@ class HTMLParserTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest): def test_namespaced_public_doctype(self): # html.parser can't handle namespaced doctypes, so skip this one. pass + + def test_builder_is_pickled(self): + """Unlike most tree builders, HTMLParserTreeBuilder and will + be restored after pickling. + """ + tree = self.soup("<a><b>foo</a>") + dumped = pickle.dumps(tree, 2) + loaded = pickle.loads(dumped) + self.assertTrue(isinstance(loaded.builder, type(tree.builder))) + + diff --git a/lib/bs4/tests/test_lxml.py b/lib/bs4/tests/test_lxml.py index 2b2e9b7e780723e35f66cb4bc032c350f1838f80..a05870b912ef1d7fa33ecd8435666fa1e42f4704 100644 --- a/lib/bs4/tests/test_lxml.py +++ b/lib/bs4/tests/test_lxml.py @@ -65,21 +65,6 @@ class LXMLTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest): self.assertEqual(u"<b/>", unicode(soup.b)) self.assertTrue("BeautifulStoneSoup class is deprecated" in str(w[0].message)) - def test_real_xhtml_document(self): - """lxml strips the XML definition from an XHTML doc, which is fine.""" - markup = b"""<?xml version="1.0" encoding="utf-8"?> -<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"> -<html xmlns="http://www.w3.org/1999/xhtml"> -<head><title>Hello.</title></head> -<body>Goodbye.</body> -</html>""" - soup = self.soup(markup) - self.assertEqual( - soup.encode("utf-8").replace(b"\n", b''), - markup.replace(b'\n', b'').replace( - b'<?xml version="1.0" encoding="utf-8"?>', b'')) - - @skipIf( not LXML_PRESENT, "lxml seems not to be present, not testing its XML tree builder.") diff --git a/lib/bs4/tests/test_soup.py b/lib/bs4/tests/test_soup.py index 47ac245f99a9501beff97c376c6c787b3d99f00b..1238af22fb8faf2909ec7337cb544cc282181f25 100644 --- a/lib/bs4/tests/test_soup.py +++ b/lib/bs4/tests/test_soup.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- """Tests of Beautiful Soup as a whole.""" +from pdb import set_trace import logging import unittest import sys @@ -20,6 +21,7 @@ import bs4.dammit from bs4.dammit import ( EntitySubstitution, UnicodeDammit, + EncodingDetector, ) from bs4.testing import ( SoupTest, @@ -48,8 +50,34 @@ class TestConstructor(SoupTest): soup = self.soup(data) self.assertEqual(u"foo\0bar", soup.h1.string) + def test_exclude_encodings(self): + utf8_data = u"Räksmörgås".encode("utf-8") + soup = self.soup(utf8_data, exclude_encodings=["utf-8"]) + self.assertEqual("windows-1252", soup.original_encoding) + + +class TestWarnings(SoupTest): + + def _no_parser_specified(self, s, is_there=True): + v = s.startswith(BeautifulSoup.NO_PARSER_SPECIFIED_WARNING[:80]) + self.assertTrue(v) + + def test_warning_if_no_parser_specified(self): + with warnings.catch_warnings(record=True) as w: + soup = self.soup("<a><b></b></a>") + msg = str(w[0].message) + self._assert_no_parser_specified(msg) -class TestDeprecatedConstructorArguments(SoupTest): + def test_warning_if_parser_specified_too_vague(self): + with warnings.catch_warnings(record=True) as w: + soup = self.soup("<a><b></b></a>", "html") + msg = str(w[0].message) + self._assert_no_parser_specified(msg) + + def test_no_warning_if_explicit_parser_specified(self): + with warnings.catch_warnings(record=True) as w: + soup = self.soup("<a><b></b></a>", "html.parser") + self.assertEquals([], w) def test_parseOnlyThese_renamed_to_parse_only(self): with warnings.catch_warnings(record=True) as w: @@ -271,10 +299,11 @@ class TestUnicodeDammit(unittest.TestCase): dammit.unicode_markup, """<foo>''""</foo>""") def test_detect_utf8(self): - utf8 = b"\xc3\xa9" + utf8 = b"Sacr\xc3\xa9 bleu! \xe2\x98\x83" dammit = UnicodeDammit(utf8) - self.assertEqual(dammit.unicode_markup, u'\xe9') self.assertEqual(dammit.original_encoding.lower(), 'utf-8') + self.assertEqual(dammit.unicode_markup, u'Sacr\xe9 bleu! \N{SNOWMAN}') + def test_convert_hebrew(self): hebrew = b"\xed\xe5\xec\xf9" @@ -299,6 +328,26 @@ class TestUnicodeDammit(unittest.TestCase): dammit = UnicodeDammit(utf8_data, [bad_encoding]) self.assertEqual(dammit.original_encoding.lower(), 'utf-8') + def test_exclude_encodings(self): + # This is UTF-8. + utf8_data = u"Räksmörgås".encode("utf-8") + + # But if we exclude UTF-8 from consideration, the guess is + # Windows-1252. + dammit = UnicodeDammit(utf8_data, exclude_encodings=["utf-8"]) + self.assertEqual(dammit.original_encoding.lower(), 'windows-1252') + + # And if we exclude that, there is no valid guess at all. + dammit = UnicodeDammit( + utf8_data, exclude_encodings=["utf-8", "windows-1252"]) + self.assertEqual(dammit.original_encoding, None) + + def test_encoding_detector_replaces_junk_in_encoding_name_with_replacement_character(self): + detected = EncodingDetector( + b'<?xml version="1.0" encoding="UTF-\xdb" ?>') + encodings = list(detected.encodings) + assert u'utf-\N{REPLACEMENT CHARACTER}' in encodings + def test_detect_html5_style_meta_tag(self): for data in ( diff --git a/lib/bs4/tests/test_tree.py b/lib/bs4/tests/test_tree.py index f8515c0ea1c06056471949c1fdf5a2dbb5dd12fd..6b2a1239a91a7ad501e20c1aec8939b2cb1ad5b6 100644 --- a/lib/bs4/tests/test_tree.py +++ b/lib/bs4/tests/test_tree.py @@ -9,6 +9,7 @@ same markup, but all Beautiful Soup trees can be traversed with the methods tested here. """ +from pdb import set_trace import copy import pickle import re @@ -19,8 +20,10 @@ from bs4.builder import ( HTMLParserTreeBuilder, ) from bs4.element import ( + PY3K, CData, Comment, + Declaration, Doctype, NavigableString, SoupStrainer, @@ -68,7 +71,13 @@ class TestFind(TreeTest): def test_unicode_text_find(self): soup = self.soup(u'<h1>Räksmörgås</h1>') - self.assertEqual(soup.find(text=u'Räksmörgås'), u'Räksmörgås') + self.assertEqual(soup.find(string=u'Räksmörgås'), u'Räksmörgås') + + def test_unicode_attribute_find(self): + soup = self.soup(u'<h1 id="Räksmörgås">here it is</h1>') + str(soup) + self.assertEqual("here it is", soup.find(id=u'Räksmörgås').text) + def test_find_everything(self): """Test an optimization that finds all tags.""" @@ -87,6 +96,7 @@ class TestFindAll(TreeTest): """You can search the tree for text nodes.""" soup = self.soup("<html>Foo<b>bar</b>\xbb</html>") # Exact match. + self.assertEqual(soup.find_all(string="bar"), [u"bar"]) self.assertEqual(soup.find_all(text="bar"), [u"bar"]) # Match any of a number of strings. self.assertEqual( @@ -688,7 +698,7 @@ class TestTagCreation(SoupTest): def test_tag_inherits_self_closing_rules_from_builder(self): if XML_BUILDER_PRESENT: - xml_soup = BeautifulSoup("", "xml") + xml_soup = BeautifulSoup("", "lxml-xml") xml_br = xml_soup.new_tag("br") xml_p = xml_soup.new_tag("p") @@ -697,7 +707,7 @@ class TestTagCreation(SoupTest): self.assertEqual(b"<br/>", xml_br.encode()) self.assertEqual(b"<p/>", xml_p.encode()) - html_soup = BeautifulSoup("", "html") + html_soup = BeautifulSoup("", "html.parser") html_br = html_soup.new_tag("br") html_p = html_soup.new_tag("p") @@ -773,6 +783,14 @@ class TestTreeModification(SoupTest): new_a = a.unwrap() self.assertEqual(a, new_a) + def test_replace_with_and_unwrap_give_useful_exception_when_tag_has_no_parent(self): + soup = self.soup("<a><b>Foo</b></a><c>Bar</c>") + a = soup.a + a.extract() + self.assertEqual(None, a.parent) + self.assertRaises(ValueError, a.unwrap) + self.assertRaises(ValueError, a.replace_with, soup.c) + def test_replace_tag_with_itself(self): text = "<a><b></b><c>Foo<d></d></c></a><a><e></e></a>" soup = self.soup(text) @@ -1067,6 +1085,31 @@ class TestTreeModification(SoupTest): self.assertEqual(foo_2, soup.a.string) self.assertEqual(bar_2, soup.b.string) + def test_extract_multiples_of_same_tag(self): + soup = self.soup(""" +<html> +<head> +<script>foo</script> +</head> +<body> + <script>bar</script> + <a></a> +</body> +<script>baz</script> +</html>""") + [soup.script.extract() for i in soup.find_all("script")] + self.assertEqual("<body>\n\n<a></a>\n</body>", unicode(soup.body)) + + + def test_extract_works_when_element_is_surrounded_by_identical_strings(self): + soup = self.soup( + '<html>\n' + '<body>hi</body>\n' + '</html>') + soup.find('body').extract() + self.assertEqual(None, soup.find('body')) + + def test_clear(self): """Tag.clear()""" soup = self.soup("<p><a>String <em>Italicized</em></a> and another</p>") @@ -1293,6 +1336,51 @@ class TestPersistence(SoupTest): loaded = pickle.loads(dumped) self.assertEqual(loaded.decode(), soup.decode()) + def test_copy_navigablestring_is_not_attached_to_tree(self): + html = u"<b>Foo<a></a></b><b>Bar</b>" + soup = self.soup(html) + s1 = soup.find(string="Foo") + s2 = copy.copy(s1) + self.assertEqual(s1, s2) + self.assertEqual(None, s2.parent) + self.assertEqual(None, s2.next_element) + self.assertNotEqual(None, s1.next_sibling) + self.assertEqual(None, s2.next_sibling) + self.assertEqual(None, s2.previous_element) + + def test_copy_navigablestring_subclass_has_same_type(self): + html = u"<b><!--Foo--></b>" + soup = self.soup(html) + s1 = soup.string + s2 = copy.copy(s1) + self.assertEqual(s1, s2) + self.assertTrue(isinstance(s2, Comment)) + + def test_copy_entire_soup(self): + html = u"<div><b>Foo<a></a></b><b>Bar</b></div>end" + soup = self.soup(html) + soup_copy = copy.copy(soup) + self.assertEqual(soup, soup_copy) + + def test_copy_tag_copies_contents(self): + html = u"<div><b>Foo<a></a></b><b>Bar</b></div>end" + soup = self.soup(html) + div = soup.div + div_copy = copy.copy(div) + + # The two tags look the same, and evaluate to equal. + self.assertEqual(unicode(div), unicode(div_copy)) + self.assertEqual(div, div_copy) + + # But they're not the same object. + self.assertFalse(div is div_copy) + + # And they don't have the same relation to the parse tree. The + # copy is not associated with a parse tree at all. + self.assertEqual(None, div_copy.parent) + self.assertEqual(None, div_copy.previous_element) + self.assertEqual(None, div_copy.find(string='Bar').next_element) + self.assertNotEqual(None, div.find(string='Bar').next_element) class TestSubstitutions(SoupTest): @@ -1366,7 +1454,7 @@ class TestSubstitutions(SoupTest): console.log("< < hey > > "); </script> """ - encoded = BeautifulSoup(doc).encode() + encoded = BeautifulSoup(doc, 'html.parser').encode() self.assertTrue(b"< < hey > >" in encoded) def test_formatter_skips_style_tag_for_html_documents(self): @@ -1375,7 +1463,7 @@ class TestSubstitutions(SoupTest): console.log("< < hey > > "); </style> """ - encoded = BeautifulSoup(doc).encode() + encoded = BeautifulSoup(doc, 'html.parser').encode() self.assertTrue(b"< < hey > >" in encoded) def test_prettify_leaves_preformatted_text_alone(self): @@ -1387,7 +1475,7 @@ class TestSubstitutions(SoupTest): soup.div.prettify()) def test_prettify_accepts_formatter(self): - soup = BeautifulSoup("<html><body>foo</body></html>") + soup = BeautifulSoup("<html><body>foo</body></html>", 'html.parser') pretty = soup.prettify(formatter = lambda x: x.upper()) self.assertTrue("FOO" in pretty) @@ -1484,6 +1572,14 @@ class TestEncoding(SoupTest): self.assertEqual( u"\N{SNOWMAN}".encode("utf8"), soup.b.renderContents()) + def test_repr(self): + html = u"<b>\N{SNOWMAN}</b>" + soup = self.soup(html) + if PY3K: + self.assertEqual(html, repr(soup)) + else: + self.assertEqual(b'<b>\\u2603</b>', repr(soup)) + class TestNavigableStringSubclasses(SoupTest): def test_cdata(self): @@ -1522,6 +1618,9 @@ class TestNavigableStringSubclasses(SoupTest): soup.insert(1, doctype) self.assertEqual(soup.encode(), b"<!DOCTYPE foo>\n") + def test_declaration(self): + d = Declaration("foo") + self.assertEqual("<?foo?>", d.output_ready()) class TestSoupSelector(TreeTest): @@ -1534,7 +1633,7 @@ class TestSoupSelector(TreeTest): <link rel="stylesheet" href="blah.css" type="text/css" id="l1"> </head> <body> - +<custom-dashed-tag class="dashed" id="dash1">Hello there.</custom-dashed-tag> <div id="main" class="fancy"> <div id="inner"> <h1 id="header1">An H1</h1> @@ -1552,8 +1651,18 @@ class TestSoupSelector(TreeTest): <a href="#" id="s2a1">span2a1</a> </span> <span class="span3"></span> +<custom-dashed-tag class="dashed" id="dash2"/> +<div data-tag="dashedvalue" id="data1"/> </span> </div> +<x id="xid"> +<z id="zida"/> +<z id="zidab"/> +<z id="zidac"/> +</x> +<y id="yid"> +<z id="zidb"/> +</y> <p lang="en" id="lang-en">English</p> <p lang="en-gb" id="lang-en-gb">English UK</p> <p lang="en-us" id="lang-en-us">English US</p> @@ -1565,7 +1674,7 @@ class TestSoupSelector(TreeTest): """ def setUp(self): - self.soup = BeautifulSoup(self.HTML) + self.soup = BeautifulSoup(self.HTML, 'html.parser') def assertSelects(self, selector, expected_ids): el_ids = [el['id'] for el in self.soup.select(selector)] @@ -1591,17 +1700,25 @@ class TestSoupSelector(TreeTest): def test_one_tag_many(self): els = self.soup.select('div') - self.assertEqual(len(els), 3) + self.assertEqual(len(els), 4) for div in els: self.assertEqual(div.name, 'div') + el = self.soup.select_one('div') + self.assertEqual('main', el['id']) + + def test_select_one_returns_none_if_no_match(self): + match = self.soup.select_one('nonexistenttag') + self.assertEqual(None, match) + + def test_tag_in_tag_one(self): els = self.soup.select('div div') - self.assertSelects('div div', ['inner']) + self.assertSelects('div div', ['inner', 'data1']) def test_tag_in_tag_many(self): for selector in ('html div', 'html body div', 'body div'): - self.assertSelects(selector, ['main', 'inner', 'footer']) + self.assertSelects(selector, ['data1', 'main', 'inner', 'footer']) def test_tag_no_match(self): self.assertEqual(len(self.soup.select('del')), 0) @@ -1609,6 +1726,20 @@ class TestSoupSelector(TreeTest): def test_invalid_tag(self): self.assertRaises(ValueError, self.soup.select, 'tag%t') + def test_select_dashed_tag_ids(self): + self.assertSelects('custom-dashed-tag', ['dash1', 'dash2']) + + def test_select_dashed_by_id(self): + dashed = self.soup.select('custom-dashed-tag[id=\"dash2\"]') + self.assertEqual(dashed[0].name, 'custom-dashed-tag') + self.assertEqual(dashed[0]['id'], 'dash2') + + def test_dashed_tag_text(self): + self.assertEqual(self.soup.select('body > custom-dashed-tag')[0].text, u'Hello there.') + + def test_select_dashed_matches_find_all(self): + self.assertEqual(self.soup.select('custom-dashed-tag'), self.soup.find_all('custom-dashed-tag')) + def test_header_tags(self): self.assertSelectMultiple( ('h1', ['header1']), @@ -1709,6 +1840,7 @@ class TestSoupSelector(TreeTest): ('[id^="m"]', ['me', 'main']), ('div[id^="m"]', ['main']), ('a[id^="m"]', ['me']), + ('div[data-tag^="dashed"]', ['data1']) ) def test_attribute_endswith(self): @@ -1716,8 +1848,8 @@ class TestSoupSelector(TreeTest): ('[href$=".css"]', ['l1']), ('link[href$=".css"]', ['l1']), ('link[id$="1"]', ['l1']), - ('[id$="1"]', ['l1', 'p1', 'header1', 's1a1', 's2a1', 's1a2s1']), - ('div[id$="1"]', []), + ('[id$="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's2a1', 's1a2s1', 'dash1']), + ('div[id$="1"]', ['data1']), ('[id$="noending"]', []), ) @@ -1730,7 +1862,6 @@ class TestSoupSelector(TreeTest): ('[rel*="notstyle"]', []), ('link[rel*="notstyle"]', []), ('link[href*="bla"]', ['l1']), - ('a[href*="http://"]', ['bob', 'me']), ('[href*="http://"]', ['bob', 'me']), ('[id*="p"]', ['pmulti', 'p1']), ('div[id*="m"]', ['main']), @@ -1739,8 +1870,8 @@ class TestSoupSelector(TreeTest): ('[href*=".css"]', ['l1']), ('link[href*=".css"]', ['l1']), ('link[id*="1"]', ['l1']), - ('[id*="1"]', ['l1', 'p1', 'header1', 's1a1', 's1a2', 's2a1', 's1a2s1']), - ('div[id*="1"]', []), + ('[id*="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's1a2', 's2a1', 's1a2s1', 'dash1']), + ('div[id*="1"]', ['data1']), ('[id*="noending"]', []), # New for this test ('[href*="."]', ['bob', 'me', 'l1']), @@ -1748,6 +1879,7 @@ class TestSoupSelector(TreeTest): ('link[href*="."]', ['l1']), ('div[id*="n"]', ['main', 'inner']), ('div[id*="nn"]', ['inner']), + ('div[data-tag*="edval"]', ['data1']) ) def test_attribute_exact_or_hypen(self): @@ -1767,8 +1899,17 @@ class TestSoupSelector(TreeTest): ('p[class]', ['p1', 'pmulti']), ('[blah]', []), ('p[blah]', []), + ('div[data-tag]', ['data1']) ) + def test_unsupported_pseudoclass(self): + self.assertRaises( + NotImplementedError, self.soup.select, "a:no-such-pseudoclass") + + self.assertRaises( + NotImplementedError, self.soup.select, "a:nth-of-type(a)") + + def test_nth_of_type(self): # Try to select first paragraph els = self.soup.select('div#inner p:nth-of-type(1)') @@ -1803,7 +1944,7 @@ class TestSoupSelector(TreeTest): selected = inner.select("div") # The <div id="inner"> tag was selected. The <div id="footer"> # tag was not. - self.assertSelectsIDs(selected, ['inner']) + self.assertSelectsIDs(selected, ['inner', 'data1']) def test_overspecified_child_id(self): self.assertSelects(".fancy #inner", ['inner']) @@ -1827,3 +1968,44 @@ class TestSoupSelector(TreeTest): def test_sibling_combinator_wont_select_same_tag_twice(self): self.assertSelects('p[lang] ~ p', ['lang-en-gb', 'lang-en-us', 'lang-fr']) + + # Test the selector grouping operator (the comma) + def test_multiple_select(self): + self.assertSelects('x, y', ['xid', 'yid']) + + def test_multiple_select_with_no_space(self): + self.assertSelects('x,y', ['xid', 'yid']) + + def test_multiple_select_with_more_space(self): + self.assertSelects('x, y', ['xid', 'yid']) + + def test_multiple_select_duplicated(self): + self.assertSelects('x, x', ['xid']) + + def test_multiple_select_sibling(self): + self.assertSelects('x, y ~ p[lang=fr]', ['xid', 'lang-fr']) + + def test_multiple_select_tag_and_direct_descendant(self): + self.assertSelects('x, y > z', ['xid', 'zidb']) + + def test_multiple_select_direct_descendant_and_tags(self): + self.assertSelects('div > x, y, z', ['xid', 'yid', 'zida', 'zidb', 'zidab', 'zidac']) + + def test_multiple_select_indirect_descendant(self): + self.assertSelects('div x,y, z', ['xid', 'yid', 'zida', 'zidb', 'zidab', 'zidac']) + + def test_invalid_multiple_select(self): + self.assertRaises(ValueError, self.soup.select, ',x, y') + self.assertRaises(ValueError, self.soup.select, 'x,,y') + + def test_multiple_select_attrs(self): + self.assertSelects('p[lang=en], p[lang=en-gb]', ['lang-en', 'lang-en-gb']) + + def test_multiple_select_ids(self): + self.assertSelects('x, y > z[id=zida], z[id=zidab], z[id=zidb]', ['xid', 'zidb', 'zidab']) + + def test_multiple_select_nested(self): + self.assertSelects('body > div > x, y > z', ['xid', 'zidb']) + + + diff --git a/lib/requests/__init__.py b/lib/requests/__init__.py index 446500bfd4fc15eb24144a37d565df34cb1f34ab..f7924dc895dbb2acb3e8af88c3d7272f5e8c2bb0 100644 --- a/lib/requests/__init__.py +++ b/lib/requests/__init__.py @@ -6,7 +6,7 @@ # / """ -requests HTTP library +Requests HTTP library ~~~~~~~~~~~~~~~~~~~~~ Requests is an HTTP library, written in Python, for human beings. Basic GET @@ -42,8 +42,8 @@ is at <http://python-requests.org>. """ __title__ = 'requests' -__version__ = '2.6.0' -__build__ = 0x020503 +__version__ = '2.8.1' +__build__ = 0x020801 __author__ = 'Kenneth Reitz' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2015 Kenneth Reitz' @@ -62,7 +62,8 @@ from .sessions import session, Session from .status_codes import codes from .exceptions import ( RequestException, Timeout, URLRequired, - TooManyRedirects, HTTPError, ConnectionError + TooManyRedirects, HTTPError, ConnectionError, + FileModeWarning, ) # Set default logging handler to avoid "No handler found" warnings. @@ -75,3 +76,8 @@ except ImportError: pass logging.getLogger(__name__).addHandler(NullHandler()) + +import warnings + +# FileModeWarnings go off per the default. +warnings.simplefilter('default', FileModeWarning, append=True) diff --git a/lib/requests/adapters.py b/lib/requests/adapters.py index 02e0dd1f1d169bb57d5b3b8996b1868108d93523..6266d5be305a30fd412bb38cfe092fd2a8b8941c 100644 --- a/lib/requests/adapters.py +++ b/lib/requests/adapters.py @@ -8,6 +8,7 @@ This module contains the transport adapters that Requests uses to define and maintain connections. """ +import os.path import socket from .models import Response @@ -17,11 +18,14 @@ from .packages.urllib3.util import Timeout as TimeoutSauce from .packages.urllib3.util.retry import Retry from .compat import urlparse, basestring from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers, - prepend_scheme_if_needed, get_auth_from_url, urldefragauth) + prepend_scheme_if_needed, get_auth_from_url, urldefragauth, + select_proxy) from .structures import CaseInsensitiveDict +from .packages.urllib3.exceptions import ClosedPoolError from .packages.urllib3.exceptions import ConnectTimeoutError from .packages.urllib3.exceptions import HTTPError as _HTTPError from .packages.urllib3.exceptions import MaxRetryError +from .packages.urllib3.exceptions import NewConnectionError from .packages.urllib3.exceptions import ProxyError as _ProxyError from .packages.urllib3.exceptions import ProtocolError from .packages.urllib3.exceptions import ReadTimeoutError @@ -35,6 +39,7 @@ from .auth import _basic_auth_str DEFAULT_POOLBLOCK = False DEFAULT_POOLSIZE = 10 DEFAULT_RETRIES = 0 +DEFAULT_POOL_TIMEOUT = None class BaseAdapter(object): @@ -103,7 +108,7 @@ class HTTPAdapter(BaseAdapter): def __setstate__(self, state): # Can't handle by adding 'proxy_manager' to self.__attrs__ because - # because self.poolmanager uses a lambda function, which isn't pickleable. + # self.poolmanager uses a lambda function, which isn't pickleable. self.proxy_manager = {} self.config = {} @@ -181,10 +186,15 @@ class HTTPAdapter(BaseAdapter): raise Exception("Could not find a suitable SSL CA certificate bundle.") conn.cert_reqs = 'CERT_REQUIRED' - conn.ca_certs = cert_loc + + if not os.path.isdir(cert_loc): + conn.ca_certs = cert_loc + else: + conn.ca_cert_dir = cert_loc else: conn.cert_reqs = 'CERT_NONE' conn.ca_certs = None + conn.ca_cert_dir = None if cert: if not isinstance(cert, basestring): @@ -237,8 +247,7 @@ class HTTPAdapter(BaseAdapter): :param url: The URL to connect to. :param proxies: (optional) A Requests-style dictionary of proxies used on this request. """ - proxies = proxies or {} - proxy = proxies.get(urlparse(url.lower()).scheme) + proxy = select_proxy(url, proxies) if proxy: proxy = prepend_scheme_if_needed(proxy, 'http') @@ -271,12 +280,10 @@ class HTTPAdapter(BaseAdapter): :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. - :param proxies: A dictionary of schemes to proxy URLs. + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. """ - proxies = proxies or {} + proxy = select_proxy(request.url, proxies) scheme = urlparse(request.url).scheme - proxy = proxies.get(scheme) - if proxy and scheme != 'https': url = urldefragauth(request.url) else: @@ -309,7 +316,6 @@ class HTTPAdapter(BaseAdapter): :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param proxies: The url of the proxy being used for this request. - :param kwargs: Optional additional keyword arguments. """ headers = {} username, password = get_auth_from_url(proxy) @@ -326,8 +332,8 @@ class HTTPAdapter(BaseAdapter): :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send - data before giving up, as a float, or a (`connect timeout, read - timeout <user/advanced.html#timeouts>`_) tuple. + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) <timeouts>` tuple. :type timeout: float or tuple :param verify: (optional) Whether to verify SSL certificates. :param cert: (optional) Any user-provided SSL certificate to be trusted. @@ -375,7 +381,7 @@ class HTTPAdapter(BaseAdapter): if hasattr(conn, 'proxy_pool'): conn = conn.proxy_pool - low_conn = conn._get_conn(timeout=timeout) + low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) try: low_conn.putrequest(request.method, @@ -394,7 +400,15 @@ class HTTPAdapter(BaseAdapter): low_conn.send(b'\r\n') low_conn.send(b'0\r\n\r\n') - r = low_conn.getresponse() + # Receive the response from the server + try: + # For Python 2.7+ versions, use buffering of HTTP + # responses + r = low_conn.getresponse(buffering=True) + except TypeError: + # For compatibility with Python 2.6 versions and back + r = low_conn.getresponse() + resp = HTTPResponse.from_httplib( r, pool=conn, @@ -407,22 +421,24 @@ class HTTPAdapter(BaseAdapter): # Then, reraise so that we can handle the actual exception. low_conn.close() raise - else: - # All is well, return the connection to the pool. - conn._put_conn(low_conn) except (ProtocolError, socket.error) as err: raise ConnectionError(err, request=request) except MaxRetryError as e: if isinstance(e.reason, ConnectTimeoutError): - raise ConnectTimeout(e, request=request) + # TODO: Remove this in 3.0.0: see #2811 + if not isinstance(e.reason, NewConnectionError): + raise ConnectTimeout(e, request=request) if isinstance(e.reason, ResponseError): raise RetryError(e, request=request) raise ConnectionError(e, request=request) + except ClosedPoolError as e: + raise ConnectionError(e, request=request) + except _ProxyError as e: raise ProxyError(e) diff --git a/lib/requests/api.py b/lib/requests/api.py index 98c92298eb02122e6be3aa132a9d06106aa6d657..09ec731b06afa320f42a30bbd5f960301319c3c5 100644 --- a/lib/requests/api.py +++ b/lib/requests/api.py @@ -27,8 +27,8 @@ def request(method, url, **kwargs): :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data - before giving up, as a float, or a (`connect timeout, read timeout - <user/advanced.html#timeouts>`_) tuple. + before giving up, as a float, or a :ref:`(connect timeout, read + timeout) <timeouts>` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. :type allow_redirects: bool @@ -46,26 +46,25 @@ def request(method, url, **kwargs): <Response [200]> """ - session = sessions.Session() - response = session.request(method=method, url=url, **kwargs) - # By explicitly closing the session, we avoid leaving sockets open which - # can trigger a ResourceWarning in some cases, and look like a memory leak - # in others. - session.close() - return response + # By using the 'with' statement we are sure the session is closed, thus we + # avoid leaving sockets open which can trigger a ResourceWarning in some + # cases, and look like a memory leak in others. + with sessions.Session() as session: + return session.request(method=method, url=url, **kwargs) -def get(url, **kwargs): +def get(url, params=None, **kwargs): """Sends a GET request. :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) - return request('get', url, **kwargs) + return request('get', url, params=params, **kwargs) def options(url, **kwargs): diff --git a/lib/requests/auth.py b/lib/requests/auth.py index d1c482517667e511a16cfb0208b98a3260caf95e..2af55fb5e60070e2fa2557a7784943f12838de2f 100644 --- a/lib/requests/auth.py +++ b/lib/requests/auth.py @@ -11,6 +11,7 @@ import os import re import time import hashlib +import threading from base64 import b64encode @@ -63,19 +64,26 @@ class HTTPDigestAuth(AuthBase): def __init__(self, username, password): self.username = username self.password = password - self.last_nonce = '' - self.nonce_count = 0 - self.chal = {} - self.pos = None - self.num_401_calls = 1 + # Keep state in per-thread local storage + self._thread_local = threading.local() + + def init_per_thread_state(self): + # Ensure state is initialized just once per-thread + if not hasattr(self._thread_local, 'init'): + self._thread_local.init = True + self._thread_local.last_nonce = '' + self._thread_local.nonce_count = 0 + self._thread_local.chal = {} + self._thread_local.pos = None + self._thread_local.num_401_calls = None def build_digest_header(self, method, url): - realm = self.chal['realm'] - nonce = self.chal['nonce'] - qop = self.chal.get('qop') - algorithm = self.chal.get('algorithm') - opaque = self.chal.get('opaque') + realm = self._thread_local.chal['realm'] + nonce = self._thread_local.chal['nonce'] + qop = self._thread_local.chal.get('qop') + algorithm = self._thread_local.chal.get('algorithm') + opaque = self._thread_local.chal.get('opaque') if algorithm is None: _algorithm = 'MD5' @@ -103,7 +111,8 @@ class HTTPDigestAuth(AuthBase): # XXX not implemented yet entdig = None p_parsed = urlparse(url) - path = p_parsed.path + #: path is request-uri defined in RFC 2616 which should not be empty + path = p_parsed.path or "/" if p_parsed.query: path += '?' + p_parsed.query @@ -113,12 +122,12 @@ class HTTPDigestAuth(AuthBase): HA1 = hash_utf8(A1) HA2 = hash_utf8(A2) - if nonce == self.last_nonce: - self.nonce_count += 1 + if nonce == self._thread_local.last_nonce: + self._thread_local.nonce_count += 1 else: - self.nonce_count = 1 - ncvalue = '%08x' % self.nonce_count - s = str(self.nonce_count).encode('utf-8') + self._thread_local.nonce_count = 1 + ncvalue = '%08x' % self._thread_local.nonce_count + s = str(self._thread_local.nonce_count).encode('utf-8') s += nonce.encode('utf-8') s += time.ctime().encode('utf-8') s += os.urandom(8) @@ -127,7 +136,7 @@ class HTTPDigestAuth(AuthBase): if _algorithm == 'MD5-SESS': HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) - if qop is None: + if not qop: respdig = KD(HA1, "%s:%s" % (nonce, HA2)) elif qop == 'auth' or 'auth' in qop.split(','): noncebit = "%s:%s:%s:%s:%s" % ( @@ -138,7 +147,7 @@ class HTTPDigestAuth(AuthBase): # XXX handle auth-int. return None - self.last_nonce = nonce + self._thread_local.last_nonce = nonce # XXX should the partial digests be encoded too? base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ @@ -157,28 +166,27 @@ class HTTPDigestAuth(AuthBase): def handle_redirect(self, r, **kwargs): """Reset num_401_calls counter on redirects.""" if r.is_redirect: - self.num_401_calls = 1 + self._thread_local.num_401_calls = 1 def handle_401(self, r, **kwargs): """Takes the given response and tries digest-auth, if needed.""" - if self.pos is not None: + if self._thread_local.pos is not None: # Rewind the file position indicator of the body to where # it was to resend the request. - r.request.body.seek(self.pos) - num_401_calls = getattr(self, 'num_401_calls', 1) + r.request.body.seek(self._thread_local.pos) s_auth = r.headers.get('www-authenticate', '') - if 'digest' in s_auth.lower() and num_401_calls < 2: + if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: - self.num_401_calls += 1 + self._thread_local.num_401_calls += 1 pat = re.compile(r'digest ', flags=re.IGNORECASE) - self.chal = parse_dict_header(pat.sub('', s_auth, count=1)) + self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) # Consume content and release the original connection # to allow our new request to reuse the same one. r.content - r.raw.release_conn() + r.close() prep = r.request.copy() extract_cookies_to_jar(prep._cookies, r.request, r.raw) prep.prepare_cookies(prep._cookies) @@ -191,21 +199,25 @@ class HTTPDigestAuth(AuthBase): return _r - self.num_401_calls = 1 + self._thread_local.num_401_calls = 1 return r def __call__(self, r): + # Initialize per-thread state, if needed + self.init_per_thread_state() # If we have a saved nonce, skip the 401 - if self.last_nonce: + if self._thread_local.last_nonce: r.headers['Authorization'] = self.build_digest_header(r.method, r.url) try: - self.pos = r.body.tell() + self._thread_local.pos = r.body.tell() except AttributeError: # In the case of HTTPDigestAuth being reused and the body of # the previous request was a file-like object, pos has the # file position of the previous body. Ensure it's set to # None. - self.pos = None + self._thread_local.pos = None r.register_hook('response', self.handle_401) r.register_hook('response', self.handle_redirect) + self._thread_local.num_401_calls = 1 + return r diff --git a/lib/requests/cacert.pem b/lib/requests/cacert.pem index 729fe15d400436f3158ace67af4b7517e5386c6f..6a66daa99796dd0e23fb914dc8ff54f152fd0819 100644 --- a/lib/requests/cacert.pem +++ b/lib/requests/cacert.pem @@ -1,83 +1,3 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. -# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. -# Label: "GTE CyberTrust Global Root" -# Serial: 421 -# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db -# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74 -# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36 ------BEGIN CERTIFICATE----- -MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD -VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv -bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv -b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV -UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU -cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds -b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH -iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS -r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4 -04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r -GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9 -3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P -lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/ ------END CERTIFICATE----- - -# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division -# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division -# Label: "Thawte Server CA" -# Serial: 1 -# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d -# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c -# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9 ------BEGIN CERTIFICATE----- -MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD -VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv -biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm -MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx -MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT -DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 -dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl -cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 -DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD -gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 -yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX -L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj -EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG -7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e -QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ -qdq5snUb9kLy78fyGPmJvKP/iiMucEc= ------END CERTIFICATE----- - -# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division -# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division -# Label: "Thawte Premium Server CA" -# Serial: 1 -# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a -# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a -# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72 ------BEGIN CERTIFICATE----- -MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD -VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv -biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy -dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t -MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB -MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG -A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp -b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl -cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv -bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE -VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ -ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR -uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG -9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI -hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM -pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== ------END CERTIFICATE----- # Issuer: O=Equifax OU=Equifax Secure Certificate Authority # Subject: O=Equifax OU=Equifax Secure Certificate Authority @@ -106,55 +26,6 @@ A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y 1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 -----END CERTIFICATE----- -# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority -# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority -# Label: "Verisign Class 3 Public Primary Certification Authority" -# Serial: 149843929435818692848040365716851702463 -# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67 -# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2 -# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70 ------BEGIN CERTIFICATE----- -MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz -cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 -MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV -BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt -YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN -ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE -BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is -I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G -CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do -lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc -AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k ------END CERTIFICATE----- - -# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network -# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network -# Label: "Verisign Class 3 Public Primary Certification Authority - G2" -# Serial: 167285380242319648451154478808036881606 -# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9 -# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f -# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b ------BEGIN CERTIFICATE----- -MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ -BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh -c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy -MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp -emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X -DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw -FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg -UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo -YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 -MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB -AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 -pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 -13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID -AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk -U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i -F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY -oJ2daZH9 ------END CERTIFICATE----- - # Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA # Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA # Label: "GlobalSign Root CA" @@ -214,84 +85,6 @@ AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== -----END CERTIFICATE----- -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority -# Label: "ValiCert Class 1 VA" -# Serial: 1 -# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb -# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e -# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04 ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy -NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y -LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+ -TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y -TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0 -LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW -I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw -nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI ------END CERTIFICATE----- - -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority -# Label: "ValiCert Class 2 VA" -# Serial: 1 -# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87 -# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6 -# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy -NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY -dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 -WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS -v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v -UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu -IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC -W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd ------END CERTIFICATE----- - -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority -# Label: "RSA Root Certificate 1" -# Serial: 1 -# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72 -# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb -# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy -NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD -cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs -2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY -JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE -Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ -n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A -PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu ------END CERTIFICATE----- - # Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only # Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only # Label: "Verisign Class 3 Public Primary Certification Authority - G3" @@ -356,42 +149,6 @@ fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== -----END CERTIFICATE----- -# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Secure Server CA" -# Serial: 927650371 -# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee -# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39 -# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50 ------BEGIN CERTIFICATE----- -MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC -VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u -ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc -KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u -ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 -MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE -ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j -b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF -bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg -U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA -A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ -I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 -wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC -AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb -oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 -BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p -dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk -MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp -b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu -dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 -MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi -E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa -MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI -hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN -95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd -2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= ------END CERTIFICATE----- - # Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited # Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited # Label: "Entrust.net Premium 2048 Secure Server CA" @@ -454,61 +211,13 @@ ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp -----END CERTIFICATE----- -# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. -# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. -# Label: "Equifax Secure Global eBusiness CA" +# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Low-Value Services Root" # Serial: 1 -# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc -# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45 -# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07 ------BEGIN CERTIFICATE----- -MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT -ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw -MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj -dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l -c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC -UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc -58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ -o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr -aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA -A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA -Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv -8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV ------END CERTIFICATE----- - -# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. -# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. -# Label: "Equifax Secure eBusiness CA 1" -# Serial: 4 -# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d -# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41 -# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73 ------BEGIN CERTIFICATE----- -MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT -ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw -MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j -LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ -KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo -RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu -WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw -Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD -AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK -eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM -zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ -WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN -/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== ------END CERTIFICATE----- - -# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network -# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network -# Label: "AddTrust Low-Value Services Root" -# Serial: 1 -# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc -# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d -# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7 +# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc +# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d +# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7 -----BEGIN CERTIFICATE----- MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 @@ -831,77 +540,6 @@ OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS -----END CERTIFICATE----- -# Issuer: CN=America Online Root Certification Authority 1 O=America Online Inc. -# Subject: CN=America Online Root Certification Authority 1 O=America Online Inc. -# Label: "America Online Root Certification Authority 1" -# Serial: 1 -# MD5 Fingerprint: 14:f1:08:ad:9d:fa:64:e2:89:e7:1c:cf:a8:ad:7d:5e -# SHA1 Fingerprint: 39:21:c1:15:c1:5d:0e:ca:5c:cb:5b:c4:f0:7d:21:d8:05:0b:56:6a -# SHA256 Fingerprint: 77:40:73:12:c6:3a:15:3d:5b:c0:0b:4e:51:75:9c:df:da:c2:37:dc:2a:33:b6:79:46:e9:8e:9b:fa:68:0a:e3 ------BEGIN CERTIFICATE----- -MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP -bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2 -MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft -ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg -Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lk -hsmj76CGv2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym -1BW32J/X3HGrfpq/m44zDyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsW -OqMFf6Dch9Wc/HKpoH145LcxVR5lu9RhsCFg7RAycsWSJR74kEoYeEfffjA3PlAb -2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP8c9GsEsPPt2IYriMqQko -O3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAU -AK3Zo/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB -BQUAA4IBAQB8itEfGDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkF -Zu90821fnZmv9ov761KyBZiibyrFVL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAb -LjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft3OJvx8Fi8eNy1gTIdGcL+oir -oQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43gKd8hdIaC2y+C -MMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds -sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7 ------END CERTIFICATE----- - -# Issuer: CN=America Online Root Certification Authority 2 O=America Online Inc. -# Subject: CN=America Online Root Certification Authority 2 O=America Online Inc. -# Label: "America Online Root Certification Authority 2" -# Serial: 1 -# MD5 Fingerprint: d6:ed:3c:ca:e2:66:0f:af:10:43:0d:77:9b:04:09:bf -# SHA1 Fingerprint: 85:b5:ff:67:9b:0c:79:96:1f:c8:6e:44:22:00:46:13:db:17:92:84 -# SHA256 Fingerprint: 7d:3b:46:5a:60:14:e5:26:c0:af:fc:ee:21:27:d2:31:17:27:ad:81:1c:26:84:2d:00:6a:f3:73:06:cc:80:bd ------BEGIN CERTIFICATE----- -MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP -bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2 -MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft -ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg -Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIP -ADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC -206B89enfHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFci -KtZHgVdEglZTvYYUAQv8f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2 -JxhP7JsowtS013wMPgwr38oE18aO6lhOqKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9 -BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JNRvCAOVIyD+OEsnpD8l7e -Xz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0gBe4lL8B -PeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67 -Xnfn6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEq -Z8A9W6Wa6897GqidFEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZ -o2C7HK2JNDJiuEMhBnIMoVxtRsX6Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3 -+L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnjB453cMor9H124HhnAgMBAAGj -YzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3OpaaEg5+31IqEj -FNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE -AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmn -xPBUlgtk87FYT15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2 -LHo1YGwRgJfMqZJS5ivmae2p+DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzccc -obGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXgJXUjhx5c3LqdsKyzadsXg8n33gy8 -CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//ZoyzH1kUQ7rVyZ2OuMe -IjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgOZtMA -DjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2F -AjgQ5ANh1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUX -Om/9riW99XJZZLF0KjhfGEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPb -AZO1XB4Y3WRayhgoPmMEEf0cjQAPuDffZ4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQl -Zvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuPcX/9XhmgD0uRuMRUvAaw -RY8mkaKO/qk= ------END CERTIFICATE----- - # Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association # Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association # Label: "Visa eCommerce Root" @@ -1272,39 +910,6 @@ u3pCMuiufi3IvKwUv9kP2Vv8wfl6leF9fpb8cbDCTMjfRTTJzg3ynGQI0DvDKcWy iJf2fL1LuCAWZwWN4jvBcj+UlTfHXbme2JOhF4//DGYVwSR8MnwDHTuhWEUykw== -----END CERTIFICATE----- -# Issuer: O=TDC Internet OU=TDC Internet Root CA -# Subject: O=TDC Internet OU=TDC Internet Root CA -# Label: "TDC Internet Root CA" -# Serial: 986490188 -# MD5 Fingerprint: 91:f4:03:55:20:a1:f8:63:2c:62:de:ac:fb:61:1c:8e -# SHA1 Fingerprint: 21:fc:bd:8e:7f:6c:af:05:1b:d1:b3:43:ec:a8:e7:61:47:f2:0f:8a -# SHA256 Fingerprint: 48:98:c6:88:8c:0c:ff:b0:d3:e3:1a:ca:8a:37:d4:e3:51:5f:f7:46:d0:26:35:d8:66:46:cf:a0:a3:18:5a:e7 ------BEGIN CERTIFICATE----- -MIIEKzCCAxOgAwIBAgIEOsylTDANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJE -SzEVMBMGA1UEChMMVERDIEludGVybmV0MR0wGwYDVQQLExRUREMgSW50ZXJuZXQg -Um9vdCBDQTAeFw0wMTA0MDUxNjMzMTdaFw0yMTA0MDUxNzAzMTdaMEMxCzAJBgNV -BAYTAkRLMRUwEwYDVQQKEwxUREMgSW50ZXJuZXQxHTAbBgNVBAsTFFREQyBJbnRl -cm5ldCBSb290IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxLhA -vJHVYx/XmaCLDEAedLdInUaMArLgJF/wGROnN4NrXceO+YQwzho7+vvOi20jxsNu -Zp+Jpd/gQlBn+h9sHvTQBda/ytZO5GhgbEaqHF1j4QeGDmUApy6mcca8uYGoOn0a -0vnRrEvLznWv3Hv6gXPU/Lq9QYjUdLP5Xjg6PEOo0pVOd20TDJ2PeAG3WiAfAzc1 -4izbSysseLlJ28TQx5yc5IogCSEWVmb/Bexb4/DPqyQkXsN/cHoSxNK1EKC2IeGN -eGlVRGn1ypYcNIUXJXfi9i8nmHj9eQY6otZaQ8H/7AQ77hPv01ha/5Lr7K7a8jcD -R0G2l8ktCkEiu7vmpwIDAQABo4IBJTCCASEwEQYJYIZIAYb4QgEBBAQDAgAHMGUG -A1UdHwReMFwwWqBYoFakVDBSMQswCQYDVQQGEwJESzEVMBMGA1UEChMMVERDIElu -dGVybmV0MR0wGwYDVQQLExRUREMgSW50ZXJuZXQgUm9vdCBDQTENMAsGA1UEAxME -Q1JMMTArBgNVHRAEJDAigA8yMDAxMDQwNTE2MzMxN1qBDzIwMjEwNDA1MTcwMzE3 -WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUbGQBx/2FbazI2p5QCIUItTxWqFAw -HQYDVR0OBBYEFGxkAcf9hW2syNqeUAiFCLU8VqhQMAwGA1UdEwQFMAMBAf8wHQYJ -KoZIhvZ9B0EABBAwDhsIVjUuMDo0LjADAgSQMA0GCSqGSIb3DQEBBQUAA4IBAQBO -Q8zR3R0QGwZ/t6T609lN+yOfI1Rb5osvBCiLtSdtiaHsmGnc540mgwV5dOy0uaOX -wTUA/RXaOYE6lTGQ3pfphqiZdwzlWqCE/xIWrG64jcN7ksKsLtB9KOy282A4aW8+ -2ARVPp7MVdK6/rtHBNcK2RYKNCn1WBPVT8+PVkuzHu7TmHnaCB4Mb7j4Fifvwm89 -9qNLPg7kbWzbO0ESm70NRyN/PErQr8Cv9u8btRXE64PECV90i9kR+8JWsTz4cMo0 -jUNAE4z9mQNUecYu6oah9jrUCbz0vGbMPVjQV0kK7iXiQe4T+Zs4NNEA9X7nlB38 -aQNiuJkFBT1reBK9sG9l ------END CERTIFICATE----- - # Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com # Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com # Label: "UTN DATACorp SGC Root CA" @@ -1490,84 +1095,6 @@ f1qbFFgBJ34TUMdrKuZoPL9coAob4Q566eKAw+np9v1sEZ7Q5SgnK1QyQhSCdeZK 8CtmdWOMovsEPoMOmzbwGOQmIMOM8CgHrTwXZoi1/baI -----END CERTIFICATE----- -# Issuer: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Subject: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Label: "NetLock Business (Class B) Root" -# Serial: 105 -# MD5 Fingerprint: 39:16:aa:b9:6a:41:e1:14:69:df:9e:6c:3b:72:dc:b6 -# SHA1 Fingerprint: 87:9f:4b:ee:05:df:98:58:3b:e3:60:d6:33:e7:0d:3f:fe:98:71:af -# SHA256 Fingerprint: 39:df:7b:68:2b:7b:93:8f:84:71:54:81:cc:de:8d:60:d8:f2:2e:c5:98:87:7d:0a:aa:c1:2b:59:18:2b:03:12 ------BEGIN CERTIFICATE----- -MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUx -ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0 -b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQD -EylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikgVGFudXNpdHZhbnlraWFkbzAeFw05 -OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYDVQQGEwJIVTERMA8G -A1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRvbnNh -Z2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5l -dExvY2sgVXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqG -SIb3DQEBAQUAA4GNADCBiQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xK -gZjupNTKihe5In+DCnVMm8Bp2GQ5o+2So/1bXHQawEfKOml2mrriRBf8TKPV/riX -iK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr1nGTLbO/CVRY7QbrqHvc -Q7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNVHQ8BAf8E -BAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1G -SUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFu -b3MgU3pvbGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBh -bGFwamFuIGtlc3p1bHQuIEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExv -Y2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGln -aXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0 -IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJh -c2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGph -biBhIGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJo -ZXRvIGF6IGVsbGVub3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBP -UlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmlj -YXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBo -dHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNA -bmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06 -sPgzTEdM43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXa -n3BukxowOR0w2y7jfLKRstE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKS -NitjrFgBazMpUIaD8QFI ------END CERTIFICATE----- - -# Issuer: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Subject: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Label: "NetLock Express (Class C) Root" -# Serial: 104 -# MD5 Fingerprint: 4f:eb:f1:f0:70:c2:80:63:5d:58:9f:da:12:3c:a9:c4 -# SHA1 Fingerprint: e3:92:51:2f:0a:cf:f5:05:df:f6:de:06:7f:75:37:e1:65:ea:57:4b -# SHA256 Fingerprint: 0b:5e:ed:4e:84:64:03:cf:55:e0:65:84:84:40:ed:2a:82:75:8b:f5:b9:aa:1f:25:3d:46:13:cf:a0:80:ff:3f ------BEGIN CERTIFICATE----- -MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUx -ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0 -b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQD -EytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBDKSBUYW51c2l0dmFueWtpYWRvMB4X -DTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJBgNVBAYTAkhVMREw -DwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9u -c2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMr -TmV0TG9jayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzAN -BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNA -OoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3ZW3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC -2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63euyucYT2BDMIJTLrdKwW -RMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQwDgYDVR0P -AQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEW -ggJNRklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0 -YWxhbm9zIFN6b2xnYWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFz -b2sgYWxhcGphbiBrZXN6dWx0LiBBIGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBO -ZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1iaXp0b3NpdGFzYSB2ZWRpLiBB -IGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0ZWxlIGF6IGVs -b2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBs -ZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25s -YXBqYW4gYSBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kg -a2VyaGV0byBheiBlbGxlbm9yemVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4g -SU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5kIHRoZSB1c2Ugb2YgdGhpcyBjZXJ0 -aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQUyBhdmFpbGFibGUg -YXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwgYXQg -Y3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmY -ta3UzbM2xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2g -pO0u9f38vf5NNwgMvOOWgyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4 -Fp1hBWeAyNDYpQcCNJgEjTME1A== ------END CERTIFICATE----- - # Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com # Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com # Label: "XRamp Global CA Root" @@ -1757,40 +1284,6 @@ LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl pYYsfPQS -----END CERTIFICATE----- -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Firmaprofesional Root CA" -# Serial: 1 -# MD5 Fingerprint: 11:92:79:40:3c:b1:83:40:e5:ab:66:4a:67:92:80:df -# SHA1 Fingerprint: a9:62:8f:4b:98:a9:1b:48:35:ba:d2:c1:46:32:86:bb:66:64:6a:8c -# SHA256 Fingerprint: c1:cf:0b:52:09:64:35:e3:f1:b7:1d:aa:ec:45:5a:23:11:c8:40:4f:55:83:a9:e2:13:c6:9d:85:7d:94:33:05 ------BEGIN CERTIFICATE----- -MIIEVzCCAz+gAwIBAgIBATANBgkqhkiG9w0BAQUFADCBnTELMAkGA1UEBhMCRVMx -IjAgBgNVBAcTGUMvIE11bnRhbmVyIDI0NCBCYXJjZWxvbmExQjBABgNVBAMTOUF1 -dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1hcHJvZmVzaW9uYWwgQ0lGIEE2 -MjYzNDA2ODEmMCQGCSqGSIb3DQEJARYXY2FAZmlybWFwcm9mZXNpb25hbC5jb20w -HhcNMDExMDI0MjIwMDAwWhcNMTMxMDI0MjIwMDAwWjCBnTELMAkGA1UEBhMCRVMx -IjAgBgNVBAcTGUMvIE11bnRhbmVyIDI0NCBCYXJjZWxvbmExQjBABgNVBAMTOUF1 -dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1hcHJvZmVzaW9uYWwgQ0lGIEE2 -MjYzNDA2ODEmMCQGCSqGSIb3DQEJARYXY2FAZmlybWFwcm9mZXNpb25hbC5jb20w -ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDnIwNvbyOlXnjOlSztlB5u -Cp4Bx+ow0Syd3Tfom5h5VtP8c9/Qit5Vj1H5WuretXDE7aTt/6MNbg9kUDGvASdY -rv5sp0ovFy3Tc9UTHI9ZpTQsHVQERc1ouKDAA6XPhUJHlShbz++AbOCQl4oBPB3z -hxAwJkh91/zpnZFx/0GaqUC1N5wpIE8fUuOgfRNtVLcK3ulqTgesrBlf3H5idPay -BQC6haD9HThuy1q7hryUZzM1gywfI834yJFxzJeL764P3CkDG8A563DtwW4O2GcL -iam8NeTvtjS0pbbELaW+0MOUJEjb35bTALVmGotmBQ/dPz/LP6pemkr4tErvlTcb -AgMBAAGjgZ8wgZwwKgYDVR0RBCMwIYYfaHR0cDovL3d3dy5maXJtYXByb2Zlc2lv -bmFsLmNvbTASBgNVHRMBAf8ECDAGAQH/AgEBMCsGA1UdEAQkMCKADzIwMDExMDI0 -MjIwMDAwWoEPMjAxMzEwMjQyMjAwMDBaMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4E -FgQUMwugZtHq2s7eYpMEKFK1FH84aLcwDQYJKoZIhvcNAQEFBQADggEBAEdz/o0n -VPD11HecJ3lXV7cVVuzH2Fi3AQL0M+2TUIiefEaxvT8Ub/GzR0iLjJcG1+p+o1wq -u00vR+L4OQbJnC4xGgN49Lw4xiKLMzHwFgQEffl25EvXwOaD7FnMP97/T2u3Z36m -hoEyIwOdyPdfwUpgpZKpsaSgYMN4h7Mi8yrrW6ntBas3D7Hi05V2Y1Z0jFhyGzfl -ZKG+TQyTmAyX9odtsz/ny4Cm7YjHX1BiAuiZdBbQ5rQ58SfLyEDW44YQqSMSkuBp -QWOnryULwMWSyx6Yo1q6xTMPoJcB3X/ge9YGVM+h4k0460tQtcsm9MracEpqoeJ5 -quGnM/b9Sh/22WA= ------END CERTIFICATE----- - # Issuer: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services # Subject: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services # Label: "Swisscom Root CA 1" @@ -2014,38 +1507,6 @@ rscL9yuwNwXsvFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf2 9w4LTJxoeHtxMcfrHuBnQfO3oKfN5XozNmr6mis= -----END CERTIFICATE----- -# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=(c) 2005 TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. -# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=(c) 2005 TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. -# Label: "TURKTRUST Certificate Services Provider Root 1" -# Serial: 1 -# MD5 Fingerprint: f1:6a:22:18:c9:cd:df:ce:82:1d:1d:b7:78:5c:a9:a5 -# SHA1 Fingerprint: 79:98:a3:08:e1:4d:65:85:e6:c2:1e:15:3a:71:9f:ba:5a:d3:4a:d9 -# SHA256 Fingerprint: 44:04:e3:3b:5e:14:0d:cf:99:80:51:fd:fc:80:28:c7:c8:16:15:c5:ee:73:7b:11:1b:58:82:33:a9:b5:35:a0 ------BEGIN CERTIFICATE----- -MIID+zCCAuOgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBtzE/MD0GA1UEAww2VMOc -UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx -c8SxMQswCQYDVQQGDAJUUjEPMA0GA1UEBwwGQU5LQVJBMVYwVAYDVQQKDE0oYykg -MjAwNSBUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8 -dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjAeFw0wNTA1MTMxMDI3MTdaFw0xNTAz -MjIxMDI3MTdaMIG3MT8wPQYDVQQDDDZUw5xSS1RSVVNUIEVsZWt0cm9uaWsgU2Vy -dGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLExCzAJBgNVBAYMAlRSMQ8wDQYD -VQQHDAZBTktBUkExVjBUBgNVBAoMTShjKSAyMDA1IFTDnFJLVFJVU1QgQmlsZ2kg -xLBsZXRpxZ9pbSB2ZSBCaWxpxZ9pbSBHw7x2ZW5sacSfaSBIaXptZXRsZXJpIEEu -xZ4uMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAylIF1mMD2Bxf3dJ7 -XfIMYGFbazt0K3gNfUW9InTojAPBxhEqPZW8qZSwu5GXyGl8hMW0kWxsE2qkVa2k -heiVfrMArwDCBRj1cJ02i67L5BuBf5OI+2pVu32Fks66WJ/bMsW9Xe8iSi9BB35J -YbOG7E6mQW6EvAPs9TscyB/C7qju6hJKjRTP8wrgUDn5CDX4EVmt5yLqS8oUBt5C -urKZ8y1UiBAG6uEaPj1nH/vO+3yC6BFdSsG5FOpU2WabfIl9BJpiyelSPJ6c79L1 -JuTm5Rh8i27fbMx4W09ysstcP4wFjdFMjK2Sx+F4f2VsSQZQLJ4ywtdKxnWKWU51 -b0dewQIDAQABoxAwDjAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4IBAQAV -9VX/N5aAWSGk/KEVTCD21F/aAyT8z5Aa9CEKmu46sWrv7/hg0Uw2ZkUd82YCdAR7 -kjCo3gp2D++Vbr3JN+YaDayJSFvMgzbC9UZcWYJWtNX+I7TYVBxEq8Sn5RTOPEFh -fEPmzcSBCYsk+1Ql1haolgxnB2+zUEfjHCQo3SqYpGH+2+oSN7wBGjSFvW5P55Fy -B0SFHljKVETd96y5y4khctuPwGkplyqjrhgjlxxBKot8KsF8kOipKMDTkcatKIdA -aLX/7KfS0zgYnNN9aV3wxqUeJBujR/xpB2jn5Jq07Q+hh4cCzofSSE7hvP/L8XKS -RGQDJereW26fyfJOrN3H ------END CERTIFICATE----- - # Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005 # Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005 # Label: "TURKTRUST Certificate Services Provider Root 2" @@ -2617,152 +2078,6 @@ t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== -----END CERTIFICATE----- -# Issuer: CN=AC Raíz Certicámara S.A. O=Sociedad Cameral de Certificación Digital - Certicámara S.A. -# Subject: CN=AC Raíz Certicámara S.A. O=Sociedad Cameral de Certificación Digital - Certicámara S.A. -# Label: "AC Ra\xC3\xADz Certic\xC3\xA1mara S.A." -# Serial: 38908203973182606954752843738508300 -# MD5 Fingerprint: 93:2a:3e:f6:fd:23:69:0d:71:20:d4:2b:47:99:2b:a6 -# SHA1 Fingerprint: cb:a1:c5:f8:b0:e3:5e:b8:b9:45:12:d3:f9:34:a2:e9:06:10:d3:36 -# SHA256 Fingerprint: a6:c5:1e:0d:a5:ca:0a:93:09:d2:e4:c0:e4:0c:2a:f9:10:7a:ae:82:03:85:7f:e1:98:e3:e7:69:e3:43:08:5c ------BEGIN CERTIFICATE----- -MIIGZjCCBE6gAwIBAgIPB35Sk3vgFeNX8GmMy+wMMA0GCSqGSIb3DQEBBQUAMHsx -CzAJBgNVBAYTAkNPMUcwRQYDVQQKDD5Tb2NpZWRhZCBDYW1lcmFsIGRlIENlcnRp -ZmljYWNpw7NuIERpZ2l0YWwgLSBDZXJ0aWPDoW1hcmEgUy5BLjEjMCEGA1UEAwwa -QUMgUmHDrXogQ2VydGljw6FtYXJhIFMuQS4wHhcNMDYxMTI3MjA0NjI5WhcNMzAw -NDAyMjE0MjAyWjB7MQswCQYDVQQGEwJDTzFHMEUGA1UECgw+U29jaWVkYWQgQ2Ft -ZXJhbCBkZSBDZXJ0aWZpY2FjacOzbiBEaWdpdGFsIC0gQ2VydGljw6FtYXJhIFMu -QS4xIzAhBgNVBAMMGkFDIFJhw616IENlcnRpY8OhbWFyYSBTLkEuMIICIjANBgkq -hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq2uJo1PMSCMI+8PPUZYILrgIem08kBeG -qentLhM0R7LQcNzJPNCNyu5LF6vQhbCnIwTLqKL85XXbQMpiiY9QngE9JlsYhBzL -fDe3fezTf3MZsGqy2IiKLUV0qPezuMDU2s0iiXRNWhU5cxh0T7XrmafBHoi0wpOQ -Y5fzp6cSsgkiBzPZkc0OnB8OIMfuuzONj8LSWKdf/WU34ojC2I+GdV75LaeHM/J4 -Ny+LvB2GNzmxlPLYvEqcgxhaBvzz1NS6jBUJJfD5to0EfhcSM2tXSExP2yYe68yQ -54v5aHxwD6Mq0Do43zeX4lvegGHTgNiRg0JaTASJaBE8rF9ogEHMYELODVoqDA+b -MMCm8Ibbq0nXl21Ii/kDwFJnmxL3wvIumGVC2daa49AZMQyth9VXAnow6IYm+48j -ilSH5L887uvDdUhfHjlvgWJsxS3EF1QZtzeNnDeRyPYL1epjb4OsOMLzP96a++Ej -YfDIJss2yKHzMI+ko6Kh3VOz3vCaMh+DkXkwwakfU5tTohVTP92dsxA7SH2JD/zt -A/X7JWR1DhcZDY8AFmd5ekD8LVkH2ZD6mq093ICK5lw1omdMEWux+IBkAC1vImHF -rEsm5VoQgpukg3s0956JkSCXjrdCx2bD0Omk1vUgjcTDlaxECp1bczwmPS9KvqfJ -pxAe+59QafMCAwEAAaOB5jCB4zAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE -AwIBBjAdBgNVHQ4EFgQU0QnQ6dfOeXRU+Tows/RtLAMDG2gwgaAGA1UdIASBmDCB -lTCBkgYEVR0gADCBiTArBggrBgEFBQcCARYfaHR0cDovL3d3dy5jZXJ0aWNhbWFy -YS5jb20vZHBjLzBaBggrBgEFBQcCAjBOGkxMaW1pdGFjaW9uZXMgZGUgZ2FyYW50 -7WFzIGRlIGVzdGUgY2VydGlmaWNhZG8gc2UgcHVlZGVuIGVuY29udHJhciBlbiBs -YSBEUEMuMA0GCSqGSIb3DQEBBQUAA4ICAQBclLW4RZFNjmEfAygPU3zmpFmps4p6 -xbD/CHwso3EcIRNnoZUSQDWDg4902zNc8El2CoFS3UnUmjIz75uny3XlesuXEpBc -unvFm9+7OSPI/5jOCk0iAUgHforA1SBClETvv3eiiWdIG0ADBaGJ7M9i4z0ldma/ -Jre7Ir5v/zlXdLp6yQGVwZVR6Kss+LGGIOk/yzVb0hfpKv6DExdA7ohiZVvVO2Dp -ezy4ydV/NgIlqmjCMRW3MGXrfx1IebHPOeJCgBbT9ZMj/EyXyVo3bHwi2ErN0o42 -gzmRkBDI8ck1fj+404HGIGQatlDCIaR43NAvO2STdPCWkPHv+wlaNECW8DYSwaN0 -jJN+Qd53i+yG2dIPPy3RzECiiWZIHiCznCNZc6lEc7wkeZBWN7PGKX6jD/EpOe9+ -XCgycDWs2rjIdWb8m0w5R44bb5tNAlQiM+9hup4phO9OSzNHdpdqy35f/RWmnkJD -W2ZaiogN9xa5P1FlK2Zqi9E4UqLWRhH6/JocdJ6PlwsCT2TG9WjTSy3/pDceiz+/ -RL5hRqGEPQgnTIEgd4kI6mdAXmwIUV80WoyWaM3X94nCHNMyAK9Sy9NgWyo6R35r -MDOhYil/SrnhLecUIw4OGEfhefwVVdCx/CVxY3UzHCMrr1zZ7Ud3YA47Dx7SwNxk -BYn8eNZcLCZDqQ== ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA -# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA -# Label: "TC TrustCenter Class 2 CA II" -# Serial: 941389028203453866782103406992443 -# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23 -# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e -# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4 ------BEGIN CERTIFICATE----- -MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV -BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 -Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1 -OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i -SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc -VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf -tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg -uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J -XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK -8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99 -5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3 -kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy -dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6 -Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz -JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 -Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u -TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS -GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt -ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8 -au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV -hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI -dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ== ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA -# Subject: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA -# Label: "TC TrustCenter Class 3 CA II" -# Serial: 1506523511417715638772220530020799 -# MD5 Fingerprint: 56:5f:aa:80:61:12:17:f6:67:21:e6:2b:6d:61:56:8e -# SHA1 Fingerprint: 80:25:ef:f4:6e:70:c8:d4:72:24:65:84:fe:40:3b:8a:8d:6a:db:f5 -# SHA256 Fingerprint: 8d:a0:84:fc:f9:9c:e0:77:22:f8:9b:32:05:93:98:06:fa:5c:b8:11:e1:c8:13:f6:a1:08:c7:d3:36:b3:40:8e ------BEGIN CERTIFICATE----- -MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV -BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 -Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYwMTEyMTQ0MTU3WhcNMjUxMjMxMjI1 -OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i -SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UEAxMc -VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJW -Ht4bNwcwIi9v8Qbxq63WyKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+Q -Vl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo6SI7dYnWRBpl8huXJh0obazovVkdKyT2 -1oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZuV3bOx4a+9P/FRQI2Alq -ukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk2ZyqBwi1 -Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NX -XAek0CSnwPIA1DCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy -dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6 -Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz -JTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 -Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u -TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlN -irTzwppVMXzEO2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8 -TtXqluJucsG7Kv5sbviRmEb8yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6 -g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9IJqDnxrcOfHFcqMRA/07QlIp2+gB -95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal092Y+tTmBvTwtiBj -S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A== ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA -# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA -# Label: "TC TrustCenter Universal CA I" -# Serial: 601024842042189035295619584734726 -# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c -# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3 -# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV -BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1 -c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx -MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg -R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD -VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR -JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T -fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu -jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z -wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ -fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD -VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G -CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1 -7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn -8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs -ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT -ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/ -2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY ------END CERTIFICATE----- - # Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center # Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center # Label: "Deutsche Telekom Root CA 2" @@ -2793,36 +2108,6 @@ xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU Cm26OWMohpLzGITY+9HPBVZkVw== -----END CERTIFICATE----- -# Issuer: CN=ComSign Secured CA O=ComSign -# Subject: CN=ComSign Secured CA O=ComSign -# Label: "ComSign Secured CA" -# Serial: 264725503855295744117309814499492384489 -# MD5 Fingerprint: 40:01:25:06:8d:21:43:6a:0e:43:00:9c:e7:43:f3:d5 -# SHA1 Fingerprint: f9:cd:0e:2c:da:76:24:c1:8f:bd:f0:f0:ab:b6:45:b8:f7:fe:d5:7a -# SHA256 Fingerprint: 50:79:41:c7:44:60:a0:b4:70:86:22:0d:4e:99:32:57:2a:b5:d1:b5:bb:cb:89:80:ab:1c:b1:76:51:a8:44:d2 ------BEGIN CERTIFICATE----- -MIIDqzCCApOgAwIBAgIRAMcoRwmzuGxFjB36JPU2TukwDQYJKoZIhvcNAQEFBQAw -PDEbMBkGA1UEAxMSQ29tU2lnbiBTZWN1cmVkIENBMRAwDgYDVQQKEwdDb21TaWdu -MQswCQYDVQQGEwJJTDAeFw0wNDAzMjQxMTM3MjBaFw0yOTAzMTYxNTA0NTZaMDwx -GzAZBgNVBAMTEkNvbVNpZ24gU2VjdXJlZCBDQTEQMA4GA1UEChMHQ29tU2lnbjEL -MAkGA1UEBhMCSUwwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGtWhf -HZQVw6QIVS3joFd67+l0Kru5fFdJGhFeTymHDEjWaueP1H5XJLkGieQcPOqs49oh -gHMhCu95mGwfCP+hUH3ymBvJVG8+pSjsIQQPRbsHPaHA+iqYHU4Gk/v1iDurX8sW -v+bznkqH7Rnqwp9D5PGBpX8QTz7RSmKtUxvLg/8HZaWSLWapW7ha9B20IZFKF3ue -Mv5WJDmyVIRD9YTC2LxBkMyd1mja6YJQqTtoz7VdApRgFrFD2UNd3V2Hbuq7s8lr -9gOUCXDeFhF6K+h2j0kQmHe5Y1yLM5d19guMsqtb3nQgJT/j8xH5h2iGNXHDHYwt -6+UarA9z1YJZQIDTAgMBAAGjgacwgaQwDAYDVR0TBAUwAwEB/zBEBgNVHR8EPTA7 -MDmgN6A1hjNodHRwOi8vZmVkaXIuY29tc2lnbi5jby5pbC9jcmwvQ29tU2lnblNl -Y3VyZWRDQS5jcmwwDgYDVR0PAQH/BAQDAgGGMB8GA1UdIwQYMBaAFMFL7XC29z58 -ADsAj8c+DkWfHl3sMB0GA1UdDgQWBBTBS+1wtvc+fAA7AI/HPg5Fnx5d7DANBgkq -hkiG9w0BAQUFAAOCAQEAFs/ukhNQq3sUnjO2QiBq1BW9Cav8cujvR3qQrFHBZE7p -iL1DRYHjZiM/EoZNGeQFsOY3wo3aBijJD4mkU6l1P7CW+6tMM1X5eCZGbxs2mPtC -dsGCuY7e+0X5YxtiOzkGynd6qDwJz2w2PQ8KRUtpFhpFfTMDZflScZAmlaxMDPWL -kz/MdXSFmLr/YnpNH4n+rr2UAJm/EaXc4HnFFgt9AmEd6oX5AhVP51qJThRv4zdL -hfXBPGHg/QVBspJ/wx2g0K5SZGBrGMYmnNj1ZOQ2GmKfig8+/21OGVZOIJFsnzQz -OjRXUDpvgV4GxvU+fE6OK85lBi5d0ipTdF7Tbieejw== ------END CERTIFICATE----- - # Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc # Subject: CN=Cybertrust Global Root O=Cybertrust, Inc # Label: "Cybertrust Global Root" @@ -2960,34 +2245,6 @@ h7U/2k3ZIQAw3pDaDtMaSKk+hQsUi4y8QZ5q9w5wwDX3OaJdZtB7WZ+oRxKaJyOk LY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho -----END CERTIFICATE----- -# Issuer: CN=Buypass Class 3 CA 1 O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 CA 1 O=Buypass AS-983163327 -# Label: "Buypass Class 3 CA 1" -# Serial: 2 -# MD5 Fingerprint: df:3c:73:59:81:e7:39:50:81:04:4c:34:a2:cb:b3:7b -# SHA1 Fingerprint: 61:57:3a:11:df:0e:d8:7e:d5:92:65:22:ea:d0:56:d7:44:b3:23:71 -# SHA256 Fingerprint: b7:b1:2b:17:1f:82:1d:aa:99:0c:d0:fe:50:87:b1:28:44:8b:a8:e5:18:4f:84:c5:1e:02:b5:c8:fb:96:2b:24 ------BEGIN CERTIFICATE----- -MIIDUzCCAjugAwIBAgIBAjANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3Mg -Q2xhc3MgMyBDQSAxMB4XDTA1MDUwOTE0MTMwM1oXDTE1MDUwOTE0MTMwM1owSzEL -MAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MR0wGwYD -VQQDDBRCdXlwYXNzIENsYXNzIDMgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAKSO13TZKWTeXx+HgJHqTjnmGcZEC4DVC69TB4sSveZn8AKxifZg -isRbsELRwCGoy+Gb72RRtqfPFfV0gGgEkKBYouZ0plNTVUhjP5JW3SROjvi6K//z -NIqeKNc0n6wv1g/xpC+9UrJJhW05NfBEMJNGJPO251P7vGGvqaMU+8IXF4Rs4HyI -+MkcVyzwPX6UvCWThOiaAJpFBUJXgPROztmuOfbIUxAMZTpHe2DC1vqRycZxbL2R -hzyRhkmr8w+gbCZ2Xhysm3HljbybIR6c1jh+JIAVMYKWsUnTYjdbiAwKYjT+p0h+ -mbEwi5A3lRyoH6UsjfRVyNvdWQrCrXig9IsCAwEAAaNCMEAwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUOBTmyPCppAP0Tj4io1vy1uCtQHQwDgYDVR0PAQH/BAQD -AgEGMA0GCSqGSIb3DQEBBQUAA4IBAQABZ6OMySU9E2NdFm/soT4JXJEVKirZgCFP -Bdy7pYmrEzMqnji3jG8CcmPHc3ceCQa6Oyh7pEfJYWsICCD8igWKH7y6xsL+z27s -EzNxZy5p+qksP2bAEllNC1QCkoS72xLvg3BweMhT+t/Gxv/ciC8HwEmdMldg0/L2 -mSlf56oBzKwzqBwKu5HEA6BvtjT5htOzdlSY9EqBs1OdTUDs5XcTRa9bqh/YL0yC -e/4qxFi7T/ye/QNlGioOw6UgFpRreaaiErS7GqQjel/wroQk5PMr+4okoyeYZdow -dXb8GZHo2+ubPzK/QJcHJrrM85SFSnonk8+QQtS4Wxam58tAA915 ------END CERTIFICATE----- - # Issuer: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. # Subject: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. # Label: "EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1" @@ -3535,28 +2792,6 @@ r0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3otkYNbn5XOmeUwssfnHdK Z05phkOTOPu220+DkdRgfks+KzgHVZhepA== -----END CERTIFICATE----- -# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority -# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority -# Label: "Verisign Class 3 Public Primary Certification Authority" -# Serial: 80507572722862485515306429940691309246 -# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4 -# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b -# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05 ------BEGIN CERTIFICATE----- -MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz -cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 -MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV -BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt -YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN -ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE -BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is -I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G -CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i -2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ -2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ ------END CERTIFICATE----- - # Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. # Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. # Label: "Microsec e-Szigno Root CA 2009" @@ -3589,36 +2824,6 @@ tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW -----END CERTIFICATE----- -# Issuer: CN=e-Guven Kok Elektronik Sertifika Hizmet Saglayicisi O=Elektronik Bilgi Guvenligi A.S. -# Subject: CN=e-Guven Kok Elektronik Sertifika Hizmet Saglayicisi O=Elektronik Bilgi Guvenligi A.S. -# Label: "E-Guven Kok Elektronik Sertifika Hizmet Saglayicisi" -# Serial: 91184789765598910059173000485363494069 -# MD5 Fingerprint: 3d:41:29:cb:1e:aa:11:74:cd:5d:b0:62:af:b0:43:5b -# SHA1 Fingerprint: dd:e1:d2:a9:01:80:2e:1d:87:5e:84:b3:80:7e:4b:b1:fd:99:41:34 -# SHA256 Fingerprint: e6:09:07:84:65:a4:19:78:0c:b6:ac:4c:1c:0b:fb:46:53:d9:d9:cc:6e:b3:94:6e:b7:f3:d6:99:97:ba:d5:98 ------BEGIN CERTIFICATE----- -MIIDtjCCAp6gAwIBAgIQRJmNPMADJ72cdpW56tustTANBgkqhkiG9w0BAQUFADB1 -MQswCQYDVQQGEwJUUjEoMCYGA1UEChMfRWxla3Ryb25payBCaWxnaSBHdXZlbmxp -Z2kgQS5TLjE8MDoGA1UEAxMzZS1HdXZlbiBLb2sgRWxla3Ryb25payBTZXJ0aWZp -a2EgSGl6bWV0IFNhZ2xheWljaXNpMB4XDTA3MDEwNDExMzI0OFoXDTE3MDEwNDEx -MzI0OFowdTELMAkGA1UEBhMCVFIxKDAmBgNVBAoTH0VsZWt0cm9uaWsgQmlsZ2kg -R3V2ZW5saWdpIEEuUy4xPDA6BgNVBAMTM2UtR3V2ZW4gS29rIEVsZWt0cm9uaWsg -U2VydGlmaWthIEhpem1ldCBTYWdsYXlpY2lzaTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAMMSIJ6wXgBljU5Gu4Bc6SwGl9XzcslwuedLZYDBS75+PNdU -MZTe1RK6UxYC6lhj71vY8+0qGqpxSKPcEC1fX+tcS5yWCEIlKBHMilpiAVDV6wlT -L/jDj/6z/P2douNffb7tC+Bg62nsM+3YjfsSSYMAyYuXjDtzKjKzEve5TfL0TW3H -5tYmNwjy2f1rXKPlSFxYvEK+A1qBuhw1DADT9SN+cTAIJjjcJRFHLfO6IxClv7wC -90Nex/6wN1CZew+TzuZDLMN+DfIcQ2Zgy2ExR4ejT669VmxMvLz4Bcpk9Ok0oSy1 -c+HCPujIyTQlCFzz7abHlJ+tiEMl1+E5YP6sOVkCAwEAAaNCMEAwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFJ/uRLOU1fqRTy7ZVZoE -VtstxNulMA0GCSqGSIb3DQEBBQUAA4IBAQB/X7lTW2M9dTLn+sR0GstG30ZpHFLP -qk/CaOv/gKlR6D1id4k9CnU58W5dF4dvaAXBlGzZXd/aslnLpRCKysw5zZ/rTt5S -/wzw9JKp8mxTq5vSR6AfdPebmvEvFZ96ZDAYBzwqD2fK/A+JYZ1lpTzlvBNbCNvj -/+27BrtqBrF6T2XGgv0enIu1De5Iu7i9qgi0+6N8y5/NkHZchpZ4Vwpm+Vganf2X -KWDeEaaQHBkc7gGWIjQ0LpH5t8Qn0Xvmv/uARFoW5evg1Ao4vOSR49XrXMGs3xtq -fJ7lddK2l4fbzIcrQzqECK+rPNv3PGYxhrCdU3nt+CPeQuMtgvEP5fqX ------END CERTIFICATE----- - # Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 # Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 # Label: "GlobalSign Root CA - R3" @@ -5024,3 +4229,1388 @@ wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy KwbQBM0= -----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi +# Subject: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi +# Label: "E-Tugra Certification Authority" +# Serial: 7667447206703254355 +# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 +# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 +# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV +BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC +aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV +BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 +Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz +MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ +BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp +em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY +B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH +D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF +Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo +q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D +k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH +fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut +dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM +ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 +zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX +U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 +Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 +XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF +Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR +HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY +GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c +77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 ++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK +vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 +FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl +yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P +AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD +y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d +NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=Certification Authority of WoSign O=WoSign CA Limited +# Subject: CN=Certification Authority of WoSign O=WoSign CA Limited +# Label: "WoSign" +# Serial: 125491772294754854453622855443212256657 +# MD5 Fingerprint: a1:f2:f9:b5:d2:c8:7a:74:b8:f3:05:f1:d7:e1:84:8d +# SHA1 Fingerprint: b9:42:94:bf:91:ea:8f:b6:4b:e6:10:97:c7:fb:00:13:59:b6:76:cb +# SHA256 Fingerprint: 4b:22:d5:a6:ae:c9:9f:3c:db:79:aa:5e:c0:68:38:47:9c:d5:ec:ba:71:64:f7:f2:2d:c1:d6:5f:63:d8:57:08 +-----BEGIN CERTIFICATE----- +MIIFdjCCA16gAwIBAgIQXmjWEXGUY1BWAGjzPsnFkTANBgkqhkiG9w0BAQUFADBV +MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxKjAoBgNV +BAMTIUNlcnRpZmljYXRpb24gQXV0aG9yaXR5IG9mIFdvU2lnbjAeFw0wOTA4MDgw +MTAwMDFaFw0zOTA4MDgwMTAwMDFaMFUxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFX +b1NpZ24gQ0EgTGltaXRlZDEqMCgGA1UEAxMhQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgb2YgV29TaWduMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvcqN +rLiRFVaXe2tcesLea9mhsMMQI/qnobLMMfo+2aYpbxY94Gv4uEBf2zmoAHqLoE1U +fcIiePyOCbiohdfMlZdLdNiefvAA5A6JrkkoRBoQmTIPJYhTpA2zDxIIFgsDcScc +f+Hb0v1naMQFXQoOXXDX2JegvFNBmpGN9J42Znp+VsGQX+axaCA2pIwkLCxHC1l2 +ZjC1vt7tj/id07sBMOby8w7gLJKA84X5KIq0VC6a7fd2/BVoFutKbOsuEo/Uz/4M +x1wdC34FMr5esAkqQtXJTpCzWQ27en7N1QhatH/YHGkR+ScPewavVIMYe+HdVHpR +aG53/Ma/UkpmRqGyZxq7o093oL5d//xWC0Nyd5DKnvnyOfUNqfTq1+ezEC8wQjch +zDBwyYaYD8xYTYO7feUapTeNtqwylwA6Y3EkHp43xP901DfA4v6IRmAR3Qg/UDar +uHqklWJqbrDKaiFaafPz+x1wOZXzp26mgYmhiMU7ccqjUu6Du/2gd/Tkb+dC221K +mYo0SLwX3OSACCK28jHAPwQ+658geda4BmRkAjHXqc1S+4RFaQkAKtxVi8QGRkvA +Sh0JWzko/amrzgD5LkhLJuYwTKVYyrREgk/nkR4zw7CT/xH8gdLKH3Ep3XZPkiWv +HYG3Dy+MwwbMLyejSuQOmbp8HkUff6oZRZb9/D0CAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOFmzw7R8bNLtwYgFP6H +EtX2/vs+MA0GCSqGSIb3DQEBBQUAA4ICAQCoy3JAsnbBfnv8rWTjMnvMPLZdRtP1 +LOJwXcgu2AZ9mNELIaCJWSQBnfmvCX0KI4I01fx8cpm5o9dU9OpScA7F9dY74ToJ +MuYhOZO9sxXqT2r09Ys/L3yNWC7F4TmgPsc9SnOeQHrAK2GpZ8nzJLmzbVUsWh2e +JXLOC62qx1ViC777Y7NhRCOjy+EaDveaBk3e1CNOIZZbOVtXHS9dCF4Jef98l7VN +g64N1uajeeAz0JmWAjCnPv/So0M/BVoG6kQC2nz4SNAzqfkHx5Xh9T71XXG68pWp +dIhhWeO/yloTunK0jF02h+mmxTwTv97QRCbut+wucPrXnbes5cVAWubXbHssw1ab +R80LzvobtCHXt2a49CUwi1wNuepnsvRtrtWhnk/Yn+knArAdBtaP4/tIEp9/EaEQ +PkxROpaw0RPxx9gmrjrKkcRpnd8BKWRRb2jaFOwIQZeQjdCygPLPwj2/kWjFgGce +xGATVdVhmVd8upUPYUk6ynW8yQqTP2cOEvIo4jEbwFcW3wh8GcF+Dx+FHgo2fFt+ +J7x6v+Db9NpSvd4MVHAxkUOVyLzwPt0JfjBkUO1/AaQzZ01oT74V77D2AhGiGxMl +OtzCWfHjXEa7ZywCRuoeSKbmW9m1vFGikpbbqsY3Iqb+zCB0oy2pLmvLwIIRIbWT +ee5Ehr7XHuQe+w== +-----END CERTIFICATE----- + +# Issuer: CN=CA 沃通根证书 O=WoSign CA Limited +# Subject: CN=CA 沃通根证书 O=WoSign CA Limited +# Label: "WoSign China" +# Serial: 106921963437422998931660691310149453965 +# MD5 Fingerprint: 78:83:5b:52:16:76:c4:24:3b:83:78:e8:ac:da:9a:93 +# SHA1 Fingerprint: 16:32:47:8d:89:f9:21:3a:92:00:85:63:f5:a4:a7:d3:12:40:8a:d6 +# SHA256 Fingerprint: d6:f0:34:bd:94:aa:23:3f:02:97:ec:a4:24:5b:28:39:73:e4:47:aa:59:0f:31:0c:77:f4:8f:df:83:11:22:54 +-----BEGIN CERTIFICATE----- +MIIFWDCCA0CgAwIBAgIQUHBrzdgT/BtOOzNy0hFIjTANBgkqhkiG9w0BAQsFADBG +MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxGzAZBgNV +BAMMEkNBIOayg+mAmuagueivgeS5pjAeFw0wOTA4MDgwMTAwMDFaFw0zOTA4MDgw +MTAwMDFaMEYxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFXb1NpZ24gQ0EgTGltaXRl +ZDEbMBkGA1UEAwwSQ0Eg5rKD6YCa5qC56K+B5LmmMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA0EkhHiX8h8EqwqzbdoYGTufQdDTc7WU1/FDWiD+k8H/r +D195L4mx/bxjWDeTmzj4t1up+thxx7S8gJeNbEvxUNUqKaqoGXqW5pWOdO2XCld1 +9AXbbQs5uQF/qvbW2mzmBeCkTVL829B0txGMe41P/4eDrv8FAxNXUDf+jJZSEExf +v5RxadmWPgxDT74wwJ85dE8GRV2j1lY5aAfMh09Qd5Nx2UQIsYo06Yms25tO4dnk +UkWMLhQfkWsZHWgpLFbE4h4TV2TwYeO5Ed+w4VegG63XX9Gv2ystP9Bojg/qnw+L +NVgbExz03jWhCl3W6t8Sb8D7aQdGctyB9gQjF+BNdeFyb7Ao65vh4YOhn0pdr8yb ++gIgthhid5E7o9Vlrdx8kHccREGkSovrlXLp9glk3Kgtn3R46MGiCWOc76DbT52V +qyBPt7D3h1ymoOQ3OMdc4zUPLK2jgKLsLl3Az+2LBcLmc272idX10kaO6m1jGx6K +yX2m+Jzr5dVjhU1zZmkR/sgO9MHHZklTfuQZa/HpelmjbX7FF+Ynxu8b22/8DU0G +AbQOXDBGVWCvOGU6yke6rCzMRh+yRpY/8+0mBe53oWprfi1tWFxK1I5nuPHa1UaK +J/kR8slC/k7e3x9cxKSGhxYzoacXGKUN5AXlK8IrC6KVkLn9YDxOiT7nnO4fuwEC +AwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFOBNv9ybQV0T6GTwp+kVpOGBwboxMA0GCSqGSIb3DQEBCwUAA4ICAQBqinA4 +WbbaixjIvirTthnVZil6Xc1bL3McJk6jfW+rtylNpumlEYOnOXOvEESS5iVdT2H6 +yAa+Tkvv/vMx/sZ8cApBWNromUuWyXi8mHwCKe0JgOYKOoICKuLJL8hWGSbueBwj +/feTZU7n85iYr83d2Z5AiDEoOqsuC7CsDCT6eiaY8xJhEPRdF/d+4niXVOKM6Cm6 +jBAyvd0zaziGfjk9DgNyp115j0WKWa5bIW4xRtVZjc8VX90xJc/bYNaBRHIpAlf2 +ltTW/+op2znFuCyKGo3Oy+dCMYYFaA6eFN0AkLppRQjbbpCBhqcqBT/mhDn4t/lX +X0ykeVoQDF7Va/81XwVRHmyjdanPUIPTfPRm94KNPQx96N97qA4bLJyuQHCH2u2n +FoJavjVsIE4iYdm8UXrNemHcSxH5/mc0zy4EZmFcV5cjjPOGG0jfKq+nwf/Yjj4D +u9gqsPoUJbJRa4ZDhS4HIxaAjUz7tGM7zMN07RujHv41D198HRaG9Q7DlfEvr10l +O1Hm13ZBONFLAzkopR6RctR9q5czxNM+4Gm2KHmgCY0c0f9BckgG/Jou5yD5m6Le +ie2uPAmvylezkolwQOQvT8Jwg0DXJCxr5wkf09XHwQj02w47HAcLQxGEIYbpgNR1 +2KvxAmLBsX5VYc8T1yaw15zLKYs4SgsOkI26oQ== +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 14367148294922964480859022125800977897474 +# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e +# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb +# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c +-----BEGIN CERTIFICATE----- +MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ +FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F +uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX +kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs +ewv4n4Q= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Label: "Staat der Nederlanden Root CA - G3" +# Serial: 10003001 +# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 +# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc +# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX +DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl +ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv +b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP +cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW +IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX +xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy +KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR +9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az +5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 +6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 +Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP +bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt +BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt +XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd +INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD +U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp +LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 +Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp +gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh +/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw +0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A +fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq +4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR +1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ +QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM +94B7IWcnMFk= +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Label: "Staat der Nederlanden EV Root CA" +# Serial: 10000013 +# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba +# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb +# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y +MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg +TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS +b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS +M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC +UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d +Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p +rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l +pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb +j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC +KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS +/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X +cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH +1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP +px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 +MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u +2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS +v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC +wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy +CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e +vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 +Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa +Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL +eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 +FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc +7uzXLg== +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. +# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. +# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5" +# Serial: 156233699172481 +# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e +# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb +# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78 +-----BEGIN CERTIFICATE----- +MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE +BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn +aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg +QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg +SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0 +MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD +VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8 +dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF +bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB +IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom +/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR +Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3 +4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z +5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0 +hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID +AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX +SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l +VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq +URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf +peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF +Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW ++qtB4Uu2NQvAmxU= +-----END CERTIFICATE----- + +# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. +# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. +# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6" +# Serial: 138134509972618 +# MD5 Fingerprint: f8:c5:ee:2a:6b:be:95:8d:08:f7:25:4a:ea:71:3e:46 +# SHA1 Fingerprint: 8a:5c:8c:ee:a5:03:e6:05:56:ba:d8:1b:d4:f6:c9:b0:ed:e5:2f:e0 +# SHA256 Fingerprint: 8d:e7:86:55:e1:be:7f:78:47:80:0b:93:f6:94:d2:1d:36:8c:c0:6e:03:3e:7f:ab:04:bb:5e:b9:9d:a6:b7:00 +-----BEGIN CERTIFICATE----- +MIIEJjCCAw6gAwIBAgIGfaHyZeyKMA0GCSqGSIb3DQEBCwUAMIGxMQswCQYDVQQG +EwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYDVQQKDERUw5xSS1RSVVNUIEJpbGdp +IMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBB +LsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBI +aXptZXQgU2HEn2xhecSxY8Sxc8SxIEg2MB4XDTEzMTIxODA5MDQxMFoXDTIzMTIx +NjA5MDQxMFowgbExCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExTTBLBgNV +BAoMRFTDnFJLVFJVU1QgQmlsZ2kgxLBsZXRpxZ9pbSB2ZSBCaWxpxZ9pbSBHw7x2 +ZW5sacSfaSBIaXptZXRsZXJpIEEuxZ4uMUIwQAYDVQQDDDlUw5xSS1RSVVNUIEVs +ZWt0cm9uaWsgU2VydGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLEgSDYwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCdsGjW6L0UlqMACprx9MfMkU1x +eHe59yEmFXNRFpQJRwXiM/VomjX/3EsvMsew7eKC5W/a2uqsxgbPJQ1BgfbBOCK9 ++bGlprMBvD9QFyv26WZV1DOzXPhDIHiTVRZwGTLmiddk671IUP320EEDwnS3/faA +z1vFq6TWlRKb55cTMgPp1KtDWxbtMyJkKbbSk60vbNg9tvYdDjTu0n2pVQ8g9P0p +u5FbHH3GQjhtQiht1AH7zYiXSX6484P4tZgvsycLSF5W506jM7NE1qXyGJTtHB6p +lVxiSvgNZ1GpryHV+DKdeboaX+UEVU0TRv/yz3THGmNtwx8XEsMeED5gCLMxAgMB +AAGjQjBAMB0GA1UdDgQWBBTdVRcT9qzoSCHK77Wv0QAy7Z6MtTAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAb1gNl0Oq +FlQ+v6nfkkU/hQu7VtMMUszIv3ZnXuaqs6fvuay0EBQNdH49ba3RfdCaqaXKGDsC +QC4qnFAUi/5XfldcEQlLNkVS9z2sFP1E34uXI9TDwe7UU5X+LEr+DXCqu4svLcsy +o4LyVN/Y8t3XSHLuSqMplsNEzm61kod2pLv0kmzOLBQJZo6NrRa1xxsJYTvjIKID +gI6tflEATseWhvtDmHd9KMeP2Cpu54Rvl0EpABZeTeIT6lnAY2c6RPuY/ATTMHKm +9ocJV612ph1jmv3XZch4gyt1O6VbuA1df74jrlZVlFjvH4GMKrLN5ptjnhi85WsG +tAuYSyher4hYyw== +-----END CERTIFICATE----- + +# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 +# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 +# Label: "Certinomis - Root CA" +# Serial: 1 +# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f +# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8 +# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58 +-----BEGIN CERTIFICATE----- +MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET +MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb +BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz +MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx +FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g +Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2 +fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl +LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV +WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF +TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb +5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc +CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri +wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ +wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG +m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4 +F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng +WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0 +2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF +AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/ +0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw +F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS +g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj +qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN +h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/ +ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V +btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj +Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ +8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW +gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE= +-----END CERTIFICATE----- +# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Secure Server CA" +# Serial: 927650371 +# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee +# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39 +# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50 +-----BEGIN CERTIFICATE----- +MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC +VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u +ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc +KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u +ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 +MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE +ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j +b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF +bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg +U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA +A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ +I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 +wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC +AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb +oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 +BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p +dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk +MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp +b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu +dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 +MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi +E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa +MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI +hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN +95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd +2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority +# Label: "ValiCert Class 2 VA" +# Serial: 1 +# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87 +# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6 +# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy +NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY +dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 +WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS +v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v +UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu +IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC +W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok +# Subject: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok +# Label: "NetLock Express (Class C) Root" +# Serial: 104 +# MD5 Fingerprint: 4f:eb:f1:f0:70:c2:80:63:5d:58:9f:da:12:3c:a9:c4 +# SHA1 Fingerprint: e3:92:51:2f:0a:cf:f5:05:df:f6:de:06:7f:75:37:e1:65:ea:57:4b +# SHA256 Fingerprint: 0b:5e:ed:4e:84:64:03:cf:55:e0:65:84:84:40:ed:2a:82:75:8b:f5:b9:aa:1f:25:3d:46:13:cf:a0:80:ff:3f +-----BEGIN CERTIFICATE----- +MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUx +ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0 +b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQD +EytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBDKSBUYW51c2l0dmFueWtpYWRvMB4X +DTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJBgNVBAYTAkhVMREw +DwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9u +c2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMr +TmV0TG9jayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzAN +BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNA +OoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3ZW3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC +2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63euyucYT2BDMIJTLrdKwW +RMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQwDgYDVR0P +AQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEW +ggJNRklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0 +YWxhbm9zIFN6b2xnYWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFz +b2sgYWxhcGphbiBrZXN6dWx0LiBBIGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBO +ZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1iaXp0b3NpdGFzYSB2ZWRpLiBB +IGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0ZWxlIGF6IGVs +b2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBs +ZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25s +YXBqYW4gYSBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kg +a2VyaGV0byBheiBlbGxlbm9yemVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4g +SU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5kIHRoZSB1c2Ugb2YgdGhpcyBjZXJ0 +aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQUyBhdmFpbGFibGUg +YXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwgYXQg +Y3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmY +ta3UzbM2xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2g +pO0u9f38vf5NNwgMvOOWgyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4 +Fp1hBWeAyNDYpQcCNJgEjTME1A== +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok +# Subject: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok +# Label: "NetLock Business (Class B) Root" +# Serial: 105 +# MD5 Fingerprint: 39:16:aa:b9:6a:41:e1:14:69:df:9e:6c:3b:72:dc:b6 +# SHA1 Fingerprint: 87:9f:4b:ee:05:df:98:58:3b:e3:60:d6:33:e7:0d:3f:fe:98:71:af +# SHA256 Fingerprint: 39:df:7b:68:2b:7b:93:8f:84:71:54:81:cc:de:8d:60:d8:f2:2e:c5:98:87:7d:0a:aa:c1:2b:59:18:2b:03:12 +-----BEGIN CERTIFICATE----- +MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUx +ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0 +b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQD +EylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikgVGFudXNpdHZhbnlraWFkbzAeFw05 +OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYDVQQGEwJIVTERMA8G +A1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRvbnNh +Z2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5l +dExvY2sgVXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqG +SIb3DQEBAQUAA4GNADCBiQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xK +gZjupNTKihe5In+DCnVMm8Bp2GQ5o+2So/1bXHQawEfKOml2mrriRBf8TKPV/riX +iK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr1nGTLbO/CVRY7QbrqHvc +Q7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNVHQ8BAf8E +BAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1G +SUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFu +b3MgU3pvbGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBh +bGFwamFuIGtlc3p1bHQuIEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExv +Y2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGln +aXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0 +IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJh +c2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGph +biBhIGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJo +ZXRvIGF6IGVsbGVub3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBP +UlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmlj +YXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBo +dHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNA +bmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06 +sPgzTEdM43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXa +n3BukxowOR0w2y7jfLKRstE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKS +NitjrFgBazMpUIaD8QFI +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority +# Label: "RSA Root Certificate 1" +# Serial: 1 +# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72 +# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb +# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy +NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD +cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs +2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY +JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE +Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ +n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A +PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority +# Label: "ValiCert Class 1 VA" +# Serial: 1 +# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb +# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e +# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04 +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy +NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y +LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+ +TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y +TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0 +LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW +I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw +nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI +-----END CERTIFICATE----- + +# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. +# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. +# Label: "Equifax Secure eBusiness CA 1" +# Serial: 4 +# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d +# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41 +# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73 +-----BEGIN CERTIFICATE----- +MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT +ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw +MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j +LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ +KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo +RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu +WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw +Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD +AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK +eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM +zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ +WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN +/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== +-----END CERTIFICATE----- + +# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. +# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. +# Label: "Equifax Secure Global eBusiness CA" +# Serial: 1 +# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc +# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45 +# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07 +-----BEGIN CERTIFICATE----- +MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT +ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw +MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj +dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l +c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC +UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc +58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ +o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr +aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA +A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA +Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv +8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV +-----END CERTIFICATE----- + +# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division +# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division +# Label: "Thawte Premium Server CA" +# Serial: 1 +# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a +# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a +# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72 +-----BEGIN CERTIFICATE----- +MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy +dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t +MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB +MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG +A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp +b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl +cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv +bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE +VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ +ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR +uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG +9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI +hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM +pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== +-----END CERTIFICATE----- + +# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division +# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division +# Label: "Thawte Server CA" +# Serial: 1 +# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d +# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c +# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9 +-----BEGIN CERTIFICATE----- +MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm +MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx +MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT +DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 +dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl +cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 +DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD +gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 +yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX +L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj +EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG +7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e +QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ +qdq5snUb9kLy78fyGPmJvKP/iiMucEc= +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Label: "Verisign Class 3 Public Primary Certification Authority" +# Serial: 149843929435818692848040365716851702463 +# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67 +# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2 +# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70 +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz +cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 +MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV +BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE +BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is +I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G +CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do +lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc +AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Label: "Verisign Class 3 Public Primary Certification Authority" +# Serial: 80507572722862485515306429940691309246 +# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4 +# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b +# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05 +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz +cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 +MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV +BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE +BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is +I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G +CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i +2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ +2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network +# Label: "Verisign Class 3 Public Primary Certification Authority - G2" +# Serial: 167285380242319648451154478808036881606 +# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9 +# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f +# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b +-----BEGIN CERTIFICATE----- +MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh +c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy +MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp +emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X +DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw +FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg +UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo +YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 +MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 +pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 +13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID +AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk +U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i +F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY +oJ2daZH9 +-----END CERTIFICATE----- + +# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. +# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. +# Label: "GTE CyberTrust Global Root" +# Serial: 421 +# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db +# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74 +# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36 +-----BEGIN CERTIFICATE----- +MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD +VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv +bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv +b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV +UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU +cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds +b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH +iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS +r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4 +04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r +GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9 +3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P +lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/ +-----END CERTIFICATE----- diff --git a/lib/requests/cookies.py b/lib/requests/cookies.py index 6969fe5cc4e37fd687e064e42b3d7eeb4cc7b3b5..d61ec2daa9ef4b894b6ec38105bf12eea7cff2bc 100644 --- a/lib/requests/cookies.py +++ b/lib/requests/cookies.py @@ -6,6 +6,7 @@ Compatibility code to be able to use `cookielib.CookieJar` with requests. requests.utils imports from here, so be careful with imports. """ +import copy import time import collections from .compat import cookielib, urlparse, urlunparse, Morsel @@ -142,10 +143,13 @@ def remove_cookie_by_name(cookiejar, name, domain=None, path=None): """ clearables = [] for cookie in cookiejar: - if cookie.name == name: - if domain is None or domain == cookie.domain: - if path is None or path == cookie.path: - clearables.append((cookie.domain, cookie.path, cookie.name)) + if cookie.name != name: + continue + if domain is not None and domain != cookie.domain: + continue + if path is not None and path != cookie.path: + continue + clearables.append((cookie.domain, cookie.path, cookie.name)) for domain, path, name in clearables: cookiejar.clear(domain, path, name) @@ -302,7 +306,7 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping): """Updates this jar with cookies from another CookieJar or dict-like""" if isinstance(other, cookielib.CookieJar): for cookie in other: - self.set_cookie(cookie) + self.set_cookie(copy.copy(cookie)) else: super(RequestsCookieJar, self).update(other) @@ -359,6 +363,21 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping): return new_cj +def _copy_cookie_jar(jar): + if jar is None: + return None + + if hasattr(jar, 'copy'): + # We're dealing with an instance of RequestsCookieJar + return jar.copy() + # We're dealing with a generic CookieJar instance + new_jar = copy.copy(jar) + new_jar.clear() + for cookie in jar: + new_jar.set_cookie(copy.copy(cookie)) + return new_jar + + def create_cookie(name, value, **kwargs): """Make a cookie from underspecified parameters. @@ -399,11 +418,14 @@ def morsel_to_cookie(morsel): expires = None if morsel['max-age']: - expires = time.time() + morsel['max-age'] + try: + expires = int(time.time() + int(morsel['max-age'])) + except ValueError: + raise TypeError('max-age: %s must be integer' % morsel['max-age']) elif morsel['expires']: time_template = '%a, %d-%b-%Y %H:%M:%S GMT' - expires = time.mktime( - time.strptime(morsel['expires'], time_template)) - time.timezone + expires = int(time.mktime( + time.strptime(morsel['expires'], time_template)) - time.timezone) return create_cookie( comment=morsel['comment'], comment_url=bool(morsel['comment']), diff --git a/lib/requests/exceptions.py b/lib/requests/exceptions.py index 89135a802eb1a87e15aa5d3e8a94ed0fce50273b..ba0b910e316cde8f256a29a9620a3942fe8b499d 100644 --- a/lib/requests/exceptions.py +++ b/lib/requests/exceptions.py @@ -97,3 +97,18 @@ class StreamConsumedError(RequestException, TypeError): class RetryError(RequestException): """Custom retries logic failed""" + + +# Warnings + + +class RequestsWarning(Warning): + """Base warning for Requests.""" + pass + + +class FileModeWarning(RequestsWarning, DeprecationWarning): + """ + A file was opened in text mode, but Requests determined its binary length. + """ + pass diff --git a/lib/requests/hooks.py b/lib/requests/hooks.py index 5dfaf6b68018885d099b2f6e8c7bb876a9cf29d3..9da94366d7e4d11b3f66743d604b7320cdc8cc6d 100644 --- a/lib/requests/hooks.py +++ b/lib/requests/hooks.py @@ -12,34 +12,23 @@ Available hooks: The response generated from a Request. """ - - HOOKS = ['response'] - def default_hooks(): - hooks = {} - for event in HOOKS: - hooks[event] = [] - return hooks + return dict((event, []) for event in HOOKS) # TODO: response is the only one def dispatch_hook(key, hooks, hook_data, **kwargs): """Dispatches a hook dictionary on a given piece of data.""" - hooks = hooks or dict() - - if key in hooks: - hooks = hooks.get(key) - + hooks = hooks.get(key) + if hooks: if hasattr(hooks, '__call__'): hooks = [hooks] - for hook in hooks: _hook_data = hook(hook_data, **kwargs) if _hook_data is not None: hook_data = _hook_data - return hook_data diff --git a/lib/requests/models.py b/lib/requests/models.py index 419cf0a8b5ee8bd7cbec48daddaf928943974968..9c624d3c112ef99e7a4acfc38dca2bed8c7840fb 100644 --- a/lib/requests/models.py +++ b/lib/requests/models.py @@ -15,7 +15,7 @@ from .hooks import default_hooks from .structures import CaseInsensitiveDict from .auth import HTTPBasicAuth -from .cookies import cookiejar_from_dict, get_cookie_header +from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar from .packages.urllib3.fields import RequestField from .packages.urllib3.filepost import encode_multipart_formdata from .packages.urllib3.util import parse_url @@ -30,7 +30,8 @@ from .utils import ( iter_slices, guess_json_utf, super_len, to_native_string) from .compat import ( cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO, - is_py2, chardet, json, builtin_str, basestring) + is_py2, chardet, builtin_str, basestring) +from .compat import json as complexjson from .status_codes import codes #: The set of HTTP status codes that indicate an automatically @@ -42,12 +43,11 @@ REDIRECT_STATI = ( codes.temporary_redirect, # 307 codes.permanent_redirect, # 308 ) + DEFAULT_REDIRECT_LIMIT = 30 CONTENT_CHUNK_SIZE = 10 * 1024 ITER_CHUNK_SIZE = 512 -json_dumps = json.dumps - class RequestEncodingMixin(object): @property @@ -81,7 +81,7 @@ class RequestEncodingMixin(object): """ if isinstance(data, (str, bytes)): - return data + return to_native_string(data) elif hasattr(data, 'read'): return data elif hasattr(data, '__iter__'): @@ -149,8 +149,7 @@ class RequestEncodingMixin(object): else: fdata = fp.read() - rf = RequestField(name=k, data=fdata, - filename=fn, headers=fh) + rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) rf.make_multipart(content_type=ft) new_fields.append(rf) @@ -193,7 +192,7 @@ class Request(RequestHooksMixin): :param headers: dictionary of headers to send. :param files: dictionary of {filename: fileobject} files to multipart upload. :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place. - :param json: json for the body to attach to the request (if data is not specified). + :param json: json for the body to attach to the request (if files or data is not specified). :param params: dictionary of URL parameters to append to the URL. :param auth: Auth handler or (user, pass) tuple. :param cookies: dictionary or CookieJar of cookies to attach to this request. @@ -207,17 +206,8 @@ class Request(RequestHooksMixin): <PreparedRequest [GET]> """ - def __init__(self, - method=None, - url=None, - headers=None, - files=None, - data=None, - params=None, - auth=None, - cookies=None, - hooks=None, - json=None): + def __init__(self, method=None, url=None, headers=None, files=None, + data=None, params=None, auth=None, cookies=None, hooks=None, json=None): # Default empty dicts for dict params. data = [] if data is None else data @@ -296,8 +286,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): self.hooks = default_hooks() def prepare(self, method=None, url=None, headers=None, files=None, - data=None, params=None, auth=None, cookies=None, hooks=None, - json=None): + data=None, params=None, auth=None, cookies=None, hooks=None, json=None): """Prepares the entire request with the given parameters.""" self.prepare_method(method) @@ -306,6 +295,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): self.prepare_cookies(cookies) self.prepare_body(data, files, json) self.prepare_auth(auth, url) + # Note that prepare_auth must be last to enable authentication schemes # such as OAuth to work on a fully prepared request. @@ -320,7 +310,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): p.method = self.method p.url = self.url p.headers = self.headers.copy() if self.headers is not None else None - p._cookies = self._cookies.copy() if self._cookies is not None else None + p._cookies = _copy_cookie_jar(self._cookies) p.body = self.body p.hooks = self.hooks return p @@ -329,12 +319,12 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): """Prepares the given HTTP method.""" self.method = method if self.method is not None: - self.method = self.method.upper() + self.method = to_native_string(self.method.upper()) def prepare_url(self, url, params): """Prepares the given HTTP URL.""" #: Accept objects that have string representations. - #: We're unable to blindy call unicode/str functions + #: We're unable to blindly call unicode/str functions #: as this will include the bytestring indicator (b'') #: on python 3.x. #: https://github.com/kennethreitz/requests/pull/2238 @@ -357,8 +347,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): raise InvalidURL(*e.args) if not scheme: - raise MissingSchema("Invalid URL {0!r}: No schema supplied. " - "Perhaps you meant http://{0}?".format(url)) + error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") + error = error.format(to_native_string(url, 'utf8')) + + raise MissingSchema(error) if not host: raise InvalidURL("Invalid URL %r: No host supplied" % url) @@ -422,9 +414,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): content_type = None length = None - if json is not None: + if not data and json is not None: content_type = 'application/json' - body = json_dumps(json) + body = complexjson.dumps(json) is_stream = all([ hasattr(data, '__iter__'), @@ -442,7 +434,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): if files: raise NotImplementedError('Streamed bodies and files are mutually exclusive.') - if length is not None: + if length: self.headers['Content-Length'] = builtin_str(length) else: self.headers['Transfer-Encoding'] = 'chunked' @@ -451,7 +443,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): if files: (body, content_type) = self._encode_files(files, data) else: - if data and json is None: + if data: body = self._encode_params(data) if isinstance(data, basestring) or hasattr(data, 'read'): content_type = None @@ -501,7 +493,15 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): self.prepare_content_length(self.body) def prepare_cookies(self, cookies): - """Prepares the given HTTP cookie data.""" + """Prepares the given HTTP cookie data. + + This function eventually generates a ``Cookie`` header from the + given cookies using cookielib. Due to cookielib's design, the header + will not be regenerated if it already exists, meaning this function + can only be called once for the life of the + :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls + to ``prepare_cookies`` will have no actual effect, unless the "Cookie" + header is removed beforehand.""" if isinstance(cookies, cookielib.CookieJar): self._cookies = cookies @@ -514,6 +514,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): def prepare_hooks(self, hooks): """Prepares the given hooks.""" + # hooks can be passed as None to the prepare method and to this + # method. To prevent iterating over None, simply use an empty list + # if hooks is False-y + hooks = hooks or [] for event in hooks: self.register_hook(event, hooks[event]) @@ -524,16 +528,8 @@ class Response(object): """ __attrs__ = [ - '_content', - 'status_code', - 'headers', - 'url', - 'history', - 'encoding', - 'reason', - 'cookies', - 'elapsed', - 'request', + '_content', 'status_code', 'headers', 'url', 'history', + 'encoding', 'reason', 'cookies', 'elapsed', 'request' ] def __init__(self): @@ -573,7 +569,11 @@ class Response(object): self.cookies = cookiejar_from_dict({}) #: The amount of time elapsed between sending the request - #: and the arrival of the response (as a timedelta) + #: and the arrival of the response (as a timedelta). + #: This property specifically measures the time taken between sending + #: the first byte of the request and finishing parsing the headers. It + #: is therefore unaffected by consuming the response content or the + #: value of the ``stream`` keyword argument. self.elapsed = datetime.timedelta(0) #: The :class:`PreparedRequest <PreparedRequest>` object to which this @@ -631,7 +631,7 @@ class Response(object): @property def is_permanent_redirect(self): - """True if this Response one of the permanant versions of redirect""" + """True if this Response one of the permanent versions of redirect""" return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) @property @@ -649,9 +649,10 @@ class Response(object): If decode_unicode is True, content will be decoded using the best available encoding based on the response. """ + def generate(): - try: - # Special case for urllib3. + # Special case for urllib3. + if hasattr(self.raw, 'stream'): try: for chunk in self.raw.stream(chunk_size, decode_content=True): yield chunk @@ -661,7 +662,7 @@ class Response(object): raise ContentDecodingError(e) except ReadTimeoutError as e: raise ConnectionError(e) - except AttributeError: + else: # Standard file-like object. while True: chunk = self.raw.read(chunk_size) @@ -792,14 +793,16 @@ class Response(object): encoding = guess_json_utf(self.content) if encoding is not None: try: - return json.loads(self.content.decode(encoding), **kwargs) + return complexjson.loads( + self.content.decode(encoding), **kwargs + ) except UnicodeDecodeError: # Wrong UTF codec detected; usually because it's not UTF-8 # but some other 8-bit codec. This is an RFC violation, # and the server didn't bother to tell us what codec *was* # used. pass - return json.loads(self.text, **kwargs) + return complexjson.loads(self.text, **kwargs) @property def links(self): @@ -825,10 +828,10 @@ class Response(object): http_error_msg = '' if 400 <= self.status_code < 500: - http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason) + http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url) elif 500 <= self.status_code < 600: - http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason) + http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url) if http_error_msg: raise HTTPError(http_error_msg, response=self) @@ -839,4 +842,7 @@ class Response(object): *Note: Should not normally need to be called explicitly.* """ + if not self._content_consumed: + return self.raw.close() + return self.raw.release_conn() diff --git a/lib/requests/packages/README.rst b/lib/requests/packages/README.rst new file mode 100644 index 0000000000000000000000000000000000000000..83e0c6258dc6d80e35e46d6dfad0c27fc0e84829 --- /dev/null +++ b/lib/requests/packages/README.rst @@ -0,0 +1,11 @@ +If you are planning to submit a pull request to requests with any changes in +this library do not go any further. These are independent libraries which we +vendor into requests. Any changes necessary to these libraries must be made in +them and submitted as separate pull requests to those libraries. + +urllib3 pull requests go here: https://github.com/shazow/urllib3 + +chardet pull requests go here: https://github.com/chardet/chardet + +See https://github.com/kennethreitz/requests/pull/1812#issuecomment-30854316 +for the reasoning behind this. diff --git a/lib/requests/packages/__init__.py b/lib/requests/packages/__init__.py index 4dcf870f3ba7de37b6c3ac328ac5e06f83b781e8..971c2ad024d24b8425bad8eef7bb08a8e935fe1c 100644 --- a/lib/requests/packages/__init__.py +++ b/lib/requests/packages/__init__.py @@ -1,107 +1,36 @@ -""" -Copyright (c) Donald Stufft, pip, and individual contributors +''' +Debian and other distributions "unbundle" requests' vendored dependencies, and +rewrite all imports to use the global versions of ``urllib3`` and ``chardet``. +The problem with this is that not only requests itself imports those +dependencies, but third-party code outside of the distros' control too. + +In reaction to these problems, the distro maintainers replaced +``requests.packages`` with a magical "stub module" that imports the correct +modules. The implementations were varying in quality and all had severe +problems. For example, a symlink (or hardlink) that links the correct modules +into place introduces problems regarding object identity, since you now have +two modules in `sys.modules` with the same API, but different identities:: + + requests.packages.urllib3 is not urllib3 + +With version ``2.5.2``, requests started to maintain its own stub, so that +distro-specific breakage would be reduced to a minimum, even though the whole +issue is not requests' fault in the first place. See +https://github.com/kennethreitz/requests/pull/2375 for the corresponding pull +request. +''' -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""" from __future__ import absolute_import - import sys - -class VendorAlias(object): - - def __init__(self, package_names): - self._package_names = package_names - self._vendor_name = __name__ - self._vendor_pkg = self._vendor_name + "." - self._vendor_pkgs = [ - self._vendor_pkg + name for name in self._package_names - ] - - def find_module(self, fullname, path=None): - if fullname.startswith(self._vendor_pkg): - return self - - def load_module(self, name): - # Ensure that this only works for the vendored name - if not name.startswith(self._vendor_pkg): - raise ImportError( - "Cannot import %s, must be a subpackage of '%s'." % ( - name, self._vendor_name, - ) - ) - - if not (name == self._vendor_name or - any(name.startswith(pkg) for pkg in self._vendor_pkgs)): - raise ImportError( - "Cannot import %s, must be one of %s." % ( - name, self._vendor_pkgs - ) - ) - - # Check to see if we already have this item in sys.modules, if we do - # then simply return that. - if name in sys.modules: - return sys.modules[name] - - # Check to see if we can import the vendor name - try: - # We do this dance here because we want to try and import this - # module without hitting a recursion error because of a bunch of - # VendorAlias instances on sys.meta_path - real_meta_path = sys.meta_path[:] - try: - sys.meta_path = [ - m for m in sys.meta_path - if not isinstance(m, VendorAlias) - ] - __import__(name) - module = sys.modules[name] - finally: - # Re-add any additions to sys.meta_path that were made while - # during the import we just did, otherwise things like - # requests.packages.urllib3.poolmanager will fail. - for m in sys.meta_path: - if m not in real_meta_path: - real_meta_path.append(m) - - # Restore sys.meta_path with any new items. - sys.meta_path = real_meta_path - except ImportError: - # We can't import the vendor name, so we'll try to import the - # "real" name. - real_name = name[len(self._vendor_pkg):] - try: - __import__(real_name) - module = sys.modules[real_name] - except ImportError: - raise ImportError("No module named '%s'" % (name,)) - - # If we've gotten here we've found the module we're looking for, either - # as part of our vendored package, or as the real name, so we'll add - # it to sys.modules as the vendored name so that we don't have to do - # the lookup again. - sys.modules[name] = module - - # Finally, return the loaded module - return module - - -sys.meta_path.append(VendorAlias(["urllib3", "chardet"])) +try: + from . import urllib3 +except ImportError: + import urllib3 + sys.modules['%s.urllib3' % __name__] = urllib3 + +try: + from . import chardet +except ImportError: + import chardet + sys.modules['%s.chardet' % __name__] = chardet diff --git a/lib/requests/packages/chardet/chardetect.py b/lib/requests/packages/chardet/chardetect.py old mode 100644 new mode 100755 diff --git a/lib/requests/packages/urllib3/__init__.py b/lib/requests/packages/urllib3/__init__.py index 0660b9c83a593749284f8444db1c6fc726889ea3..86bb71d23ffdd6d178047fdcb70ce8ef443aa10b 100644 --- a/lib/requests/packages/urllib3/__init__.py +++ b/lib/requests/packages/urllib3/__init__.py @@ -4,7 +4,7 @@ urllib3 - Thread-safe connection pooling and re-using. __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' __license__ = 'MIT' -__version__ = '1.10.2' +__version__ = '1.12' from .connectionpool import ( @@ -55,9 +55,14 @@ def add_stderr_logger(level=logging.DEBUG): del NullHandler -# Set security warning to always go off by default. import warnings -warnings.simplefilter('always', exceptions.SecurityWarning) +# SecurityWarning's always go off by default. +warnings.simplefilter('always', exceptions.SecurityWarning, append=True) +# SubjectAltNameWarning's should go off once per host +warnings.simplefilter('default', exceptions.SubjectAltNameWarning) +# InsecurePlatformWarning's don't vary between requests, so we keep it default. +warnings.simplefilter('default', exceptions.InsecurePlatformWarning, + append=True) def disable_warnings(category=exceptions.HTTPWarning): """ diff --git a/lib/requests/packages/urllib3/_collections.py b/lib/requests/packages/urllib3/_collections.py index cc424de0f45534ff0a5297be39c2e0d096ebebb1..b68b9a59bfdd397f8a6a3a05c0adf7ec3db1330d 100644 --- a/lib/requests/packages/urllib3/_collections.py +++ b/lib/requests/packages/urllib3/_collections.py @@ -97,14 +97,7 @@ class RecentlyUsedContainer(MutableMapping): return list(iterkeys(self._container)) -_dict_setitem = dict.__setitem__ -_dict_getitem = dict.__getitem__ -_dict_delitem = dict.__delitem__ -_dict_contains = dict.__contains__ -_dict_setdefault = dict.setdefault - - -class HTTPHeaderDict(dict): +class HTTPHeaderDict(MutableMapping): """ :param headers: An iterable of field-value pairs. Must not contain multiple field names @@ -139,7 +132,8 @@ class HTTPHeaderDict(dict): """ def __init__(self, headers=None, **kwargs): - dict.__init__(self) + super(HTTPHeaderDict, self).__init__() + self._container = {} if headers is not None: if isinstance(headers, HTTPHeaderDict): self._copy_from(headers) @@ -149,38 +143,44 @@ class HTTPHeaderDict(dict): self.extend(kwargs) def __setitem__(self, key, val): - return _dict_setitem(self, key.lower(), (key, val)) + self._container[key.lower()] = (key, val) + return self._container[key.lower()] def __getitem__(self, key): - val = _dict_getitem(self, key.lower()) + val = self._container[key.lower()] return ', '.join(val[1:]) def __delitem__(self, key): - return _dict_delitem(self, key.lower()) + del self._container[key.lower()] def __contains__(self, key): - return _dict_contains(self, key.lower()) + return key.lower() in self._container def __eq__(self, other): if not isinstance(other, Mapping) and not hasattr(other, 'keys'): return False if not isinstance(other, type(self)): other = type(self)(other) - return dict((k1, self[k1]) for k1 in self) == dict((k2, other[k2]) for k2 in other) + return (dict((k.lower(), v) for k, v in self.itermerged()) == + dict((k.lower(), v) for k, v in other.itermerged())) def __ne__(self, other): return not self.__eq__(other) - values = MutableMapping.values - get = MutableMapping.get - update = MutableMapping.update - if not PY3: # Python 2 iterkeys = MutableMapping.iterkeys itervalues = MutableMapping.itervalues __marker = object() + def __len__(self): + return len(self._container) + + def __iter__(self): + # Only provide the originally cased names + for vals in self._container.values(): + yield vals[0] + def pop(self, key, default=__marker): '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. @@ -216,7 +216,7 @@ class HTTPHeaderDict(dict): key_lower = key.lower() new_vals = key, val # Keep the common case aka no item present as fast as possible - vals = _dict_setdefault(self, key_lower, new_vals) + vals = self._container.setdefault(key_lower, new_vals) if new_vals is not vals: # new_vals was not inserted, as there was a previous one if isinstance(vals, list): @@ -225,22 +225,22 @@ class HTTPHeaderDict(dict): else: # vals should be a tuple then, i.e. only one item so far # Need to convert the tuple to list for further extension - _dict_setitem(self, key_lower, [vals[0], vals[1], val]) + self._container[key_lower] = [vals[0], vals[1], val] - def extend(*args, **kwargs): + def extend(self, *args, **kwargs): """Generic import function for any type of header-like object. Adapted version of MutableMapping.update in order to insert items with self.add instead of self.__setitem__ """ - if len(args) > 2: - raise TypeError("update() takes at most 2 positional " + if len(args) > 1: + raise TypeError("extend() takes at most 1 positional " "arguments ({} given)".format(len(args))) - elif not args: - raise TypeError("update() takes at least 1 argument (0 given)") - self = args[0] - other = args[1] if len(args) >= 2 else () - - if isinstance(other, Mapping): + other = args[0] if len(args) >= 1 else () + + if isinstance(other, HTTPHeaderDict): + for key, val in other.iteritems(): + self.add(key, val) + elif isinstance(other, Mapping): for key in other: self.add(key, other[key]) elif hasattr(other, "keys"): @@ -257,7 +257,7 @@ class HTTPHeaderDict(dict): """Returns a list of all the values for the named field. Returns an empty list if the key doesn't exist.""" try: - vals = _dict_getitem(self, key.lower()) + vals = self._container[key.lower()] except KeyError: return [] else: @@ -276,11 +276,11 @@ class HTTPHeaderDict(dict): def _copy_from(self, other): for key in other: - val = _dict_getitem(other, key) + val = other.getlist(key) if isinstance(val, list): # Don't need to convert tuples val = list(val) - _dict_setitem(self, key, val) + self._container[key.lower()] = [key] + val def copy(self): clone = type(self)() @@ -290,31 +290,34 @@ class HTTPHeaderDict(dict): def iteritems(self): """Iterate over all header lines, including duplicate ones.""" for key in self: - vals = _dict_getitem(self, key) + vals = self._container[key.lower()] for val in vals[1:]: yield vals[0], val def itermerged(self): """Iterate over all headers, merging duplicate ones together.""" for key in self: - val = _dict_getitem(self, key) + val = self._container[key.lower()] yield val[0], ', '.join(val[1:]) def items(self): return list(self.iteritems()) @classmethod - def from_httplib(cls, message, duplicates=('set-cookie',)): # Python 2 + def from_httplib(cls, message): # Python 2 """Read headers from a Python 2 httplib message object.""" - ret = cls(message.items()) - # ret now contains only the last header line for each duplicate. - # Importing with all duplicates would be nice, but this would - # mean to repeat most of the raw parsing already done, when the - # message object was created. Extracting only the headers of interest - # separately, the cookies, should be faster and requires less - # extra code. - for key in duplicates: - ret.discard(key) - for val in message.getheaders(key): - ret.add(key, val) - return ret + # python2.7 does not expose a proper API for exporting multiheaders + # efficiently. This function re-reads raw lines from the message + # object and extracts the multiheaders properly. + headers = [] + + for line in message.headers: + if line.startswith((' ', '\t')): + key, value = headers[-1] + headers[-1] = (key, value + '\r\n' + line.rstrip()) + continue + + key, value = line.split(':', 1) + headers.append((key, value.strip())) + + return cls(headers) diff --git a/lib/requests/packages/urllib3/connection.py b/lib/requests/packages/urllib3/connection.py index e5de769d8c501aaced9e174574efdc67188a3fff..3eab1e28190ec1afc4e484876b4f567b3e9286ec 100644 --- a/lib/requests/packages/urllib3/connection.py +++ b/lib/requests/packages/urllib3/connection.py @@ -1,7 +1,7 @@ import datetime import sys import socket -from socket import timeout as SocketTimeout +from socket import error as SocketError, timeout as SocketTimeout import warnings from .packages import six @@ -36,9 +36,10 @@ except NameError: # Python 2: from .exceptions import ( + NewConnectionError, ConnectTimeoutError, + SubjectAltNameWarning, SystemTimeWarning, - SecurityWarning, ) from .packages.ssl_match_hostname import match_hostname @@ -133,11 +134,15 @@ class HTTPConnection(_HTTPConnection, object): conn = connection.create_connection( (self.host, self.port), self.timeout, **extra_kw) - except SocketTimeout: + except SocketTimeout as e: raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)) + except SocketError as e: + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % e) + return conn def _prepare_conn(self, conn): @@ -185,17 +190,23 @@ class VerifiedHTTPSConnection(HTTPSConnection): """ cert_reqs = None ca_certs = None + ca_cert_dir = None ssl_version = None assert_fingerprint = None def set_cert(self, key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, - assert_hostname=None, assert_fingerprint=None): + assert_hostname=None, assert_fingerprint=None, + ca_cert_dir=None): + + if (ca_certs or ca_cert_dir) and cert_reqs is None: + cert_reqs = 'CERT_REQUIRED' self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs self.ca_certs = ca_certs + self.ca_cert_dir = ca_cert_dir self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint @@ -234,6 +245,7 @@ class VerifiedHTTPSConnection(HTTPSConnection): self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file, cert_reqs=resolved_cert_reqs, ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, server_hostname=hostname, ssl_version=resolved_ssl_version) @@ -245,10 +257,11 @@ class VerifiedHTTPSConnection(HTTPSConnection): cert = self.sock.getpeercert() if not cert.get('subjectAltName', ()): warnings.warn(( - 'Certificate has no `subjectAltName`, falling back to check for a `commonName` for now. ' - 'This feature is being removed by major browsers and deprecated by RFC 2818. ' - '(See https://github.com/shazow/urllib3/issues/497 for details.)'), - SecurityWarning + 'Certificate for {0} has no `subjectAltName`, falling back to check for a ' + '`commonName` for now. This feature is being removed by major browsers and ' + 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 ' + 'for details.)'.format(hostname)), + SubjectAltNameWarning ) match_hostname(cert, self.assert_hostname or hostname) @@ -260,3 +273,5 @@ if ssl: # Make a copy for testing. UnverifiedHTTPSConnection = HTTPSConnection HTTPSConnection = VerifiedHTTPSConnection +else: + HTTPSConnection = DummyConnection diff --git a/lib/requests/packages/urllib3/connectionpool.py b/lib/requests/packages/urllib3/connectionpool.py index 0085345c43d0f84b63f08c643d44b4e6e70c2093..b38ac68d7b1b81e82e1dc3b855c2746634fbe330 100644 --- a/lib/requests/packages/urllib3/connectionpool.py +++ b/lib/requests/packages/urllib3/connectionpool.py @@ -17,14 +17,17 @@ from .exceptions import ( ClosedPoolError, ProtocolError, EmptyPoolError, + HeaderParsingError, HostChangedError, LocationValueError, MaxRetryError, ProxyError, + ConnectTimeoutError, ReadTimeoutError, SSLError, TimeoutError, InsecureRequestWarning, + NewConnectionError, ) from .packages.ssl_match_hostname import CertificateError from .packages import six @@ -38,9 +41,10 @@ from .request import RequestMethods from .response import HTTPResponse from .util.connection import is_connection_dropped +from .util.response import assert_header_parsing from .util.retry import Retry from .util.timeout import Timeout -from .util.url import get_host +from .util.url import get_host, Url xrange = six.moves.xrange @@ -120,7 +124,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): :param maxsize: Number of connections to save that can be reused. More than 1 is useful - in multithreaded situations. If ``block`` is set to false, more + in multithreaded situations. If ``block`` is set to False, more connections will be created but they will not be saved once they've been used. @@ -381,8 +385,19 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): log.debug("\"%s %s %s\" %s %s" % (method, url, http_version, httplib_response.status, httplib_response.length)) + + try: + assert_header_parsing(httplib_response.msg) + except HeaderParsingError as hpe: # Platform-specific: Python 3 + log.warning( + 'Failed to parse headers (url=%s): %s', + self._absolute_url(url), hpe, exc_info=True) + return httplib_response + def _absolute_url(self, path): + return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url + def close(self): """ Close all pooled connections and disable the pool. @@ -568,27 +583,24 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # Close the connection. If a connection is reused on which there # was a Certificate error, the next request will certainly raise # another Certificate error. - if conn: - conn.close() - conn = None + conn = conn and conn.close() + release_conn = True raise SSLError(e) except SSLError: # Treat SSLError separately from BaseSSLError to preserve # traceback. - if conn: - conn.close() - conn = None + conn = conn and conn.close() + release_conn = True raise - except (TimeoutError, HTTPException, SocketError, ConnectionError) as e: - if conn: - # Discard the connection for these exceptions. It will be - # be replaced during the next _get_conn() call. - conn.close() - conn = None + except (TimeoutError, HTTPException, SocketError, ProtocolError) as e: + # Discard the connection for these exceptions. It will be + # be replaced during the next _get_conn() call. + conn = conn and conn.close() + release_conn = True - if isinstance(e, SocketError) and self.proxy: + if isinstance(e, (SocketError, NewConnectionError)) and self.proxy: e = ProxyError('Cannot connect to proxy.', e) elif isinstance(e, (SocketError, HTTPException)): e = ProtocolError('Connection aborted.', e) @@ -626,6 +638,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): retries = retries.increment(method, url, response=response, _pool=self) except MaxRetryError: if retries.raise_on_redirect: + # Release the connection for this response, since we're not + # returning it to be released manually. + response.release_conn() raise return response @@ -662,10 +677,10 @@ class HTTPSConnectionPool(HTTPConnectionPool): ``assert_hostname`` and ``host`` in this order to verify connections. If ``assert_hostname`` is False, no verification is done. - The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs`` and - ``ssl_version`` are only used if :mod:`ssl` is available and are fed into - :meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket - into an SSL socket. + The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, + ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is + available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade + the connection socket into an SSL socket. """ scheme = 'https' @@ -678,15 +693,20 @@ class HTTPSConnectionPool(HTTPConnectionPool): key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, ssl_version=None, assert_hostname=None, assert_fingerprint=None, - **conn_kw): + ca_cert_dir=None, **conn_kw): HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, block, headers, retries, _proxy, _proxy_headers, **conn_kw) + + if ca_certs and cert_reqs is None: + cert_reqs = 'CERT_REQUIRED' + self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs self.ca_certs = ca_certs + self.ca_cert_dir = ca_cert_dir self.ssl_version = ssl_version self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint @@ -702,6 +722,7 @@ class HTTPSConnectionPool(HTTPConnectionPool): cert_file=self.cert_file, cert_reqs=self.cert_reqs, ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, assert_hostname=self.assert_hostname, assert_fingerprint=self.assert_fingerprint) conn.ssl_version = self.ssl_version @@ -735,7 +756,6 @@ class HTTPSConnectionPool(HTTPConnectionPool): % (self.num_connections, self.host)) if not self.ConnectionCls or self.ConnectionCls is DummyConnection: - # Platform-specific: Python without ssl raise SSLError("Can't connect to HTTPS URL because the SSL " "module is not available.") diff --git a/lib/requests/packages/urllib3/contrib/appengine.py b/lib/requests/packages/urllib3/contrib/appengine.py new file mode 100644 index 0000000000000000000000000000000000000000..ed9d8b816cab393ea71ad3c14a1a6a0d2dff790b --- /dev/null +++ b/lib/requests/packages/urllib3/contrib/appengine.py @@ -0,0 +1,222 @@ +import logging +import os +import warnings + +from ..exceptions import ( + HTTPError, + HTTPWarning, + MaxRetryError, + ProtocolError, + TimeoutError, + SSLError +) + +from ..packages.six import BytesIO +from ..request import RequestMethods +from ..response import HTTPResponse +from ..util.timeout import Timeout +from ..util.retry import Retry + +try: + from google.appengine.api import urlfetch +except ImportError: + urlfetch = None + + +log = logging.getLogger(__name__) + + +class AppEnginePlatformWarning(HTTPWarning): + pass + + +class AppEnginePlatformError(HTTPError): + pass + + +class AppEngineManager(RequestMethods): + """ + Connection manager for Google App Engine sandbox applications. + + This manager uses the URLFetch service directly instead of using the + emulated httplib, and is subject to URLFetch limitations as described in + the App Engine documentation here: + + https://cloud.google.com/appengine/docs/python/urlfetch + + Notably it will raise an AppEnginePlatformError if: + * URLFetch is not available. + * If you attempt to use this on GAEv2 (Managed VMs), as full socket + support is available. + * If a request size is more than 10 megabytes. + * If a response size is more than 32 megabtyes. + * If you use an unsupported request method such as OPTIONS. + + Beyond those cases, it will raise normal urllib3 errors. + """ + + def __init__(self, headers=None, retries=None, validate_certificate=True): + if not urlfetch: + raise AppEnginePlatformError( + "URLFetch is not available in this environment.") + + if is_prod_appengine_v2(): + raise AppEnginePlatformError( + "Use normal urllib3.PoolManager instead of AppEngineManager" + "on Managed VMs, as using URLFetch is not necessary in " + "this environment.") + + warnings.warn( + "urllib3 is using URLFetch on Google App Engine sandbox instead " + "of sockets. To use sockets directly instead of URLFetch see " + "https://urllib3.readthedocs.org/en/latest/contrib.html.", + AppEnginePlatformWarning) + + RequestMethods.__init__(self, headers) + self.validate_certificate = validate_certificate + + self.retries = retries or Retry.DEFAULT + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + # Return False to re-raise any potential exceptions + return False + + def urlopen(self, method, url, body=None, headers=None, + retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT, + **response_kw): + + retries = self._get_retries(retries, redirect) + + try: + response = urlfetch.fetch( + url, + payload=body, + method=method, + headers=headers or {}, + allow_truncated=False, + follow_redirects=( + redirect and + retries.redirect != 0 and + retries.total), + deadline=self._get_absolute_timeout(timeout), + validate_certificate=self.validate_certificate, + ) + except urlfetch.DeadlineExceededError as e: + raise TimeoutError(self, e) + + except urlfetch.InvalidURLError as e: + if 'too large' in e.message: + raise AppEnginePlatformError( + "URLFetch request too large, URLFetch only " + "supports requests up to 10mb in size.", e) + raise ProtocolError(e) + + except urlfetch.DownloadError as e: + if 'Too many redirects' in e.message: + raise MaxRetryError(self, url, reason=e) + raise ProtocolError(e) + + except urlfetch.ResponseTooLargeError as e: + raise AppEnginePlatformError( + "URLFetch response too large, URLFetch only supports" + "responses up to 32mb in size.", e) + + except urlfetch.SSLCertificateError as e: + raise SSLError(e) + + except urlfetch.InvalidMethodError as e: + raise AppEnginePlatformError( + "URLFetch does not support method: %s" % method, e) + + http_response = self._urlfetch_response_to_http_response( + response, **response_kw) + + # Check for redirect response + if (http_response.get_redirect_location() and + retries.raise_on_redirect and redirect): + raise MaxRetryError(self, url, "too many redirects") + + # Check if we should retry the HTTP response. + if retries.is_forced_retry(method, status_code=http_response.status): + retries = retries.increment( + method, url, response=http_response, _pool=self) + log.info("Forced retry: %s" % url) + retries.sleep() + return self.urlopen( + method, url, + body=body, headers=headers, + retries=retries, redirect=redirect, + timeout=timeout, **response_kw) + + return http_response + + def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw): + + if is_prod_appengine_v1(): + # Production GAE handles deflate encoding automatically, but does + # not remove the encoding header. + content_encoding = urlfetch_resp.headers.get('content-encoding') + + if content_encoding == 'deflate': + del urlfetch_resp.headers['content-encoding'] + + return HTTPResponse( + # In order for decoding to work, we must present the content as + # a file-like object. + body=BytesIO(urlfetch_resp.content), + headers=urlfetch_resp.headers, + status=urlfetch_resp.status_code, + **response_kw + ) + + def _get_absolute_timeout(self, timeout): + if timeout is Timeout.DEFAULT_TIMEOUT: + return 5 # 5s is the default timeout for URLFetch. + if isinstance(timeout, Timeout): + if not timeout.read is timeout.connect: + warnings.warn( + "URLFetch does not support granular timeout settings, " + "reverting to total timeout.", AppEnginePlatformWarning) + return timeout.total + return timeout + + def _get_retries(self, retries, redirect): + if not isinstance(retries, Retry): + retries = Retry.from_int( + retries, redirect=redirect, default=self.retries) + + if retries.connect or retries.read or retries.redirect: + warnings.warn( + "URLFetch only supports total retries and does not " + "recognize connect, read, or redirect retry parameters.", + AppEnginePlatformWarning) + + return retries + + +def is_appengine(): + return (is_local_appengine() or + is_prod_appengine_v1() or + is_prod_appengine_v2()) + + +def is_appengine_sandbox(): + return is_appengine() and not is_prod_appengine_v2() + + +def is_local_appengine(): + return ('APPENGINE_RUNTIME' in os.environ and + 'Development/' in os.environ['SERVER_SOFTWARE']) + + +def is_prod_appengine_v1(): + return ('APPENGINE_RUNTIME' in os.environ and + 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and + not is_prod_appengine_v2()) + + +def is_prod_appengine_v2(): + return os.environ.get('GAE_VM', False) == 'true' diff --git a/lib/requests/packages/urllib3/contrib/pyopenssl.py b/lib/requests/packages/urllib3/contrib/pyopenssl.py index ee657fb3f2905560165e005280d9af9b5b58628d..c20ae46d538c3744165040cb5cc5e235a72be086 100644 --- a/lib/requests/packages/urllib3/contrib/pyopenssl.py +++ b/lib/requests/packages/urllib3/contrib/pyopenssl.py @@ -38,8 +38,6 @@ Module Variables ---------------- :var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites. - Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES: - ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS`` .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) @@ -85,23 +83,16 @@ _openssl_verify = { + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, } -# A secure default. -# Sources for more information on TLS ciphers: -# -# - https://wiki.mozilla.org/Security/Server_Side_TLS -# - https://www.ssllabs.com/projects/best-practices/index.html -# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ -# -# The general intent is: -# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), -# - prefer ECDHE over DHE for better performance, -# - prefer any AES-GCM over any AES-CBC for better performance and security, -# - use 3DES as fallback which is secure but slow, -# - disable NULL authentication, MD5 MACs and DSS for security reasons. -DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \ - "ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \ - "!aNULL:!MD5:!DSS" +DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS +# OpenSSL will only write 16K at a time +SSL_WRITE_BLOCKSIZE = 16384 + +try: + _ = memoryview + has_memoryview = True +except NameError: + has_memoryview = False orig_util_HAS_SNI = util.HAS_SNI orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket @@ -221,13 +212,21 @@ class WrappedSocket(object): continue def sendall(self, data): - while len(data): - sent = self._send_until_done(data) - data = data[sent:] + if has_memoryview and not isinstance(data, memoryview): + data = memoryview(data) + + total_sent = 0 + while total_sent < len(data): + sent = self._send_until_done(data[total_sent:total_sent+SSL_WRITE_BLOCKSIZE]) + total_sent += sent + + def shutdown(self): + # FIXME rethrow compatible exceptions should we ever use this + self.connection.shutdown() def close(self): if self._makefile_refs < 1: - return self.connection.shutdown() + return self.connection.close() else: self._makefile_refs -= 1 @@ -268,7 +267,7 @@ def _verify_callback(cnx, x509, err_no, err_depth, return_code): def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ca_certs=None, server_hostname=None, - ssl_version=None): + ssl_version=None, ca_cert_dir=None): ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version]) if certfile: keyfile = keyfile or certfile # Match behaviour of the normal python ssl library @@ -277,9 +276,9 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ctx.use_privatekey_file(keyfile) if cert_reqs != ssl.CERT_NONE: ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback) - if ca_certs: + if ca_certs or ca_cert_dir: try: - ctx.load_verify_locations(ca_certs, None) + ctx.load_verify_locations(ca_certs, ca_cert_dir) except OpenSSL.SSL.Error as e: raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e) else: @@ -299,10 +298,12 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, try: cnx.do_handshake() except OpenSSL.SSL.WantReadError: - select.select([sock], [], []) + rd, _, _ = select.select([sock], [], [], sock.gettimeout()) + if not rd: + raise timeout('select timed out') continue except OpenSSL.SSL.Error as e: - raise ssl.SSLError('bad handshake', e) + raise ssl.SSLError('bad handshake: %r' % e) break return WrappedSocket(cnx, sock) diff --git a/lib/requests/packages/urllib3/exceptions.py b/lib/requests/packages/urllib3/exceptions.py index 5d52301122e42dcd44f266be1257abceaf36f829..9607d65f32d07a0f0502eb01683ce97cdaf6606e 100644 --- a/lib/requests/packages/urllib3/exceptions.py +++ b/lib/requests/packages/urllib3/exceptions.py @@ -112,6 +112,9 @@ class ConnectTimeoutError(TimeoutError): "Raised when a socket timeout occurs while connecting to a server" pass +class NewConnectionError(ConnectTimeoutError, PoolError): + "Raised when we fail to establish a new connection. Usually ECONNREFUSED." + pass class EmptyPoolError(PoolError): "Raised when a pool runs out of connections and no more are allowed." @@ -149,6 +152,11 @@ class SecurityWarning(HTTPWarning): pass +class SubjectAltNameWarning(SecurityWarning): + "Warned when connecting to a host with a certificate missing a SAN." + pass + + class InsecureRequestWarning(SecurityWarning): "Warned when making an unverified HTTPS request." pass @@ -162,3 +170,24 @@ class SystemTimeWarning(SecurityWarning): class InsecurePlatformWarning(SecurityWarning): "Warned when certain SSL configuration is not available on a platform." pass + + +class ResponseNotChunked(ProtocolError, ValueError): + "Response needs to be chunked in order to read it as chunks." + pass + + +class ProxySchemeUnknown(AssertionError, ValueError): + "ProxyManager does not support the supplied scheme" + # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. + + def __init__(self, scheme): + message = "Not supported proxy scheme %s" % scheme + super(ProxySchemeUnknown, self).__init__(message) + + +class HeaderParsingError(HTTPError): + "Raised by assert_header_parsing, but we convert it to a log.warning statement." + def __init__(self, defects, unparsed_data): + message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data) + super(HeaderParsingError, self).__init__(message) diff --git a/lib/requests/packages/urllib3/poolmanager.py b/lib/requests/packages/urllib3/poolmanager.py index b8d1e745d14997f084d44edf0c17fc646ca8d7bc..76b6a129aec393e323e7423091180f93fb9375b8 100644 --- a/lib/requests/packages/urllib3/poolmanager.py +++ b/lib/requests/packages/urllib3/poolmanager.py @@ -8,7 +8,7 @@ except ImportError: from ._collections import RecentlyUsedContainer from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool from .connectionpool import port_by_scheme -from .exceptions import LocationValueError, MaxRetryError +from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown from .request import RequestMethods from .util.url import parse_url from .util.retry import Retry @@ -227,8 +227,8 @@ class ProxyManager(PoolManager): port = port_by_scheme.get(proxy.scheme, 80) proxy = proxy._replace(port=port) - assert proxy.scheme in ("http", "https"), \ - 'Not supported proxy scheme %s' % proxy.scheme + if proxy.scheme not in ("http", "https"): + raise ProxySchemeUnknown(proxy.scheme) self.proxy = proxy self.proxy_headers = proxy_headers or {} diff --git a/lib/requests/packages/urllib3/request.py b/lib/requests/packages/urllib3/request.py index b08d6c92746a0d9952b6d9e3875dd125b7416af8..a1a12bc5b3d2fcac5bcecf7d9285c35e06382538 100644 --- a/lib/requests/packages/urllib3/request.py +++ b/lib/requests/packages/urllib3/request.py @@ -71,14 +71,22 @@ class RequestMethods(object): headers=headers, **urlopen_kw) - def request_encode_url(self, method, url, fields=None, **urlopen_kw): + def request_encode_url(self, method, url, fields=None, headers=None, + **urlopen_kw): """ Make a request using :meth:`urlopen` with the ``fields`` encoded in the url. This is useful for request methods like GET, HEAD, DELETE, etc. """ + if headers is None: + headers = self.headers + + extra_kw = {'headers': headers} + extra_kw.update(urlopen_kw) + if fields: url += '?' + urlencode(fields) - return self.urlopen(method, url, **urlopen_kw) + + return self.urlopen(method, url, **extra_kw) def request_encode_body(self, method, url, fields=None, headers=None, encode_multipart=True, multipart_boundary=None, diff --git a/lib/requests/packages/urllib3/response.py b/lib/requests/packages/urllib3/response.py index 34cd3d7057fc71855db223b4e5b8fb939211a700..788eb6cacda77d60d25953ea8ba969732dee2301 100644 --- a/lib/requests/packages/urllib3/response.py +++ b/lib/requests/packages/urllib3/response.py @@ -1,12 +1,16 @@ +from contextlib import contextmanager import zlib import io from socket import timeout as SocketTimeout from ._collections import HTTPHeaderDict -from .exceptions import ProtocolError, DecodeError, ReadTimeoutError +from .exceptions import ( + ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked +) from .packages.six import string_types as basestring, binary_type, PY3 +from .packages.six.moves import http_client as httplib from .connection import HTTPException, BaseSSLError -from .util.response import is_fp_closed +from .util.response import is_fp_closed, is_response_to_head class DeflateDecoder(object): @@ -117,7 +121,17 @@ class HTTPResponse(io.IOBase): if hasattr(body, 'read'): self._fp = body - if preload_content and not self._body: + # Are we using the chunked-style of transfer encoding? + self.chunked = False + self.chunk_left = None + tr_enc = self.headers.get('transfer-encoding', '').lower() + # Don't incur the penalty of creating a list and then discarding it + encodings = (enc.strip() for enc in tr_enc.split(",")) + if "chunked" in encodings: + self.chunked = True + + # We certainly don't want to preload content when the response is chunked. + if not self.chunked and preload_content and not self._body: self._body = self.read(decode_content=decode_content) def get_redirect_location(self): @@ -157,6 +171,76 @@ class HTTPResponse(io.IOBase): """ return self._fp_bytes_read + def _init_decoder(self): + """ + Set-up the _decoder attribute if necessar. + """ + # Note: content-encoding value should be case-insensitive, per RFC 7230 + # Section 3.2 + content_encoding = self.headers.get('content-encoding', '').lower() + if self._decoder is None and content_encoding in self.CONTENT_DECODERS: + self._decoder = _get_decoder(content_encoding) + + def _decode(self, data, decode_content, flush_decoder): + """ + Decode the data passed in and potentially flush the decoder. + """ + try: + if decode_content and self._decoder: + data = self._decoder.decompress(data) + except (IOError, zlib.error) as e: + content_encoding = self.headers.get('content-encoding', '').lower() + raise DecodeError( + "Received response with content-encoding: %s, but " + "failed to decode it." % content_encoding, e) + + if flush_decoder and decode_content and self._decoder: + buf = self._decoder.decompress(binary_type()) + data += buf + self._decoder.flush() + + return data + + @contextmanager + def _error_catcher(self): + """ + Catch low-level python exceptions, instead re-raising urllib3 + variants, so that low-level exceptions are not leaked in the + high-level api. + + On exit, release the connection back to the pool. + """ + try: + try: + yield + + except SocketTimeout: + # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but + # there is yet no clean way to get at it from this context. + raise ReadTimeoutError(self._pool, None, 'Read timed out.') + + except BaseSSLError as e: + # FIXME: Is there a better way to differentiate between SSLErrors? + if 'read operation timed out' not in str(e): # Defensive: + # This shouldn't happen but just in case we're missing an edge + # case, let's avoid swallowing SSL errors. + raise + + raise ReadTimeoutError(self._pool, None, 'Read timed out.') + + except HTTPException as e: + # This includes IncompleteRead. + raise ProtocolError('Connection broken: %r' % e, e) + except Exception: + # The response may not be closed but we're not going to use it anymore + # so close it now to ensure that the connection is released back to the pool. + if self._original_response and not self._original_response.isclosed(): + self._original_response.close() + + raise + finally: + if self._original_response and self._original_response.isclosed(): + self.release_conn() + def read(self, amt=None, decode_content=None, cache_content=False): """ Similar to :meth:`httplib.HTTPResponse.read`, but with two additional @@ -178,12 +262,7 @@ class HTTPResponse(io.IOBase): after having ``.read()`` the file object. (Overridden if ``amt`` is set.) """ - # Note: content-encoding value should be case-insensitive, per RFC 7230 - # Section 3.2 - content_encoding = self.headers.get('content-encoding', '').lower() - if self._decoder is None: - if content_encoding in self.CONTENT_DECODERS: - self._decoder = _get_decoder(content_encoding) + self._init_decoder() if decode_content is None: decode_content = self.decode_content @@ -191,67 +270,37 @@ class HTTPResponse(io.IOBase): return flush_decoder = False - - try: - try: - if amt is None: - # cStringIO doesn't like amt=None - data = self._fp.read() + data = None + + with self._error_catcher(): + if amt is None: + # cStringIO doesn't like amt=None + data = self._fp.read() + flush_decoder = True + else: + cache_content = False + data = self._fp.read(amt) + if amt != 0 and not data: # Platform-specific: Buggy versions of Python. + # Close the connection when no data is returned + # + # This is redundant to what httplib/http.client _should_ + # already do. However, versions of python released before + # December 15, 2012 (http://bugs.python.org/issue16298) do + # not properly close the connection in all cases. There is + # no harm in redundantly calling close. + self._fp.close() flush_decoder = True - else: - cache_content = False - data = self._fp.read(amt) - if amt != 0 and not data: # Platform-specific: Buggy versions of Python. - # Close the connection when no data is returned - # - # This is redundant to what httplib/http.client _should_ - # already do. However, versions of python released before - # December 15, 2012 (http://bugs.python.org/issue16298) do - # not properly close the connection in all cases. There is - # no harm in redundantly calling close. - self._fp.close() - flush_decoder = True - - except SocketTimeout: - # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but - # there is yet no clean way to get at it from this context. - raise ReadTimeoutError(self._pool, None, 'Read timed out.') - - except BaseSSLError as e: - # FIXME: Is there a better way to differentiate between SSLErrors? - if 'read operation timed out' not in str(e): # Defensive: - # This shouldn't happen but just in case we're missing an edge - # case, let's avoid swallowing SSL errors. - raise - - raise ReadTimeoutError(self._pool, None, 'Read timed out.') - - except HTTPException as e: - # This includes IncompleteRead. - raise ProtocolError('Connection broken: %r' % e, e) + if data: self._fp_bytes_read += len(data) - try: - if decode_content and self._decoder: - data = self._decoder.decompress(data) - except (IOError, zlib.error) as e: - raise DecodeError( - "Received response with content-encoding: %s, but " - "failed to decode it." % content_encoding, e) - - if flush_decoder and decode_content and self._decoder: - buf = self._decoder.decompress(binary_type()) - data += buf + self._decoder.flush() + data = self._decode(data, decode_content, flush_decoder) if cache_content: self._body = data - return data + return data - finally: - if self._original_response and self._original_response.isclosed(): - self.release_conn() def stream(self, amt=2**16, decode_content=None): """ @@ -269,11 +318,15 @@ class HTTPResponse(io.IOBase): If True, will attempt to decode the body based on the 'content-encoding' header. """ - while not is_fp_closed(self._fp): - data = self.read(amt=amt, decode_content=decode_content) + if self.chunked: + for line in self.read_chunked(amt, decode_content=decode_content): + yield line + else: + while not is_fp_closed(self._fp): + data = self.read(amt=amt, decode_content=decode_content) - if data: - yield data + if data: + yield data @classmethod def from_httplib(ResponseCls, r, **response_kw): @@ -285,6 +338,7 @@ class HTTPResponse(io.IOBase): with ``original_response=r``. """ headers = r.msg + if not isinstance(headers, HTTPHeaderDict): if PY3: # Python 3 headers = HTTPHeaderDict(headers.items()) @@ -351,3 +405,81 @@ class HTTPResponse(io.IOBase): else: b[:len(temp)] = temp return len(temp) + + def _update_chunk_length(self): + # First, we'll figure out length of a chunk and then + # we'll try to read it from socket. + if self.chunk_left is not None: + return + line = self._fp.fp.readline() + line = line.split(b';', 1)[0] + try: + self.chunk_left = int(line, 16) + except ValueError: + # Invalid chunked protocol response, abort. + self.close() + raise httplib.IncompleteRead(line) + + def _handle_chunk(self, amt): + returned_chunk = None + if amt is None: + chunk = self._fp._safe_read(self.chunk_left) + returned_chunk = chunk + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + elif amt < self.chunk_left: + value = self._fp._safe_read(amt) + self.chunk_left = self.chunk_left - amt + returned_chunk = value + elif amt == self.chunk_left: + value = self._fp._safe_read(amt) + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + returned_chunk = value + else: # amt > self.chunk_left + returned_chunk = self._fp._safe_read(self.chunk_left) + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + return returned_chunk + + def read_chunked(self, amt=None, decode_content=None): + """ + Similar to :meth:`HTTPResponse.read`, but with an additional + parameter: ``decode_content``. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + self._init_decoder() + # FIXME: Rewrite this method and make it a class with a better structured logic. + if not self.chunked: + raise ResponseNotChunked("Response is not chunked. " + "Header 'transfer-encoding: chunked' is missing.") + + # Don't bother reading the body of a HEAD request. + if self._original_response and is_response_to_head(self._original_response): + self._original_response.close() + return + + with self._error_catcher(): + while True: + self._update_chunk_length() + if self.chunk_left == 0: + break + chunk = self._handle_chunk(amt) + yield self._decode(chunk, decode_content=decode_content, + flush_decoder=True) + + # Chunk content ends with \r\n: discard it. + while True: + line = self._fp.fp.readline() + if not line: + # Some sites may not end with '\r\n'. + break + if line == b'\r\n': + break + + # We read everything; close the "file". + if self._original_response: + self._original_response.close() diff --git a/lib/requests/packages/urllib3/util/connection.py b/lib/requests/packages/urllib3/util/connection.py index 859aec6ee6be0f6e2e59753ad7fb41c9370a3073..4f2f0f185f7bca2bcd184fd2dadedca44fb95bb4 100644 --- a/lib/requests/packages/urllib3/util/connection.py +++ b/lib/requests/packages/urllib3/util/connection.py @@ -60,6 +60,8 @@ def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, """ host, port = address + if host.startswith('['): + host = host.strip('[]') err = None for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res @@ -78,16 +80,16 @@ def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, sock.connect(sa) return sock - except socket.error as _: - err = _ + except socket.error as e: + err = e if sock is not None: sock.close() sock = None if err is not None: raise err - else: - raise socket.error("getaddrinfo returns an empty list") + + raise socket.error("getaddrinfo returns an empty list") def _set_socket_options(sock, options): diff --git a/lib/requests/packages/urllib3/util/response.py b/lib/requests/packages/urllib3/util/response.py index 45fff55246e591cc3337f731d989c37133847850..2c1de154a42d342183d318977cd956febc86e7f1 100644 --- a/lib/requests/packages/urllib3/util/response.py +++ b/lib/requests/packages/urllib3/util/response.py @@ -1,3 +1,8 @@ +from ..packages.six.moves import http_client as httplib + +from ..exceptions import HeaderParsingError + + def is_fp_closed(obj): """ Checks whether a given file-like object is closed. @@ -20,3 +25,49 @@ def is_fp_closed(obj): pass raise ValueError("Unable to determine whether fp is closed.") + + +def assert_header_parsing(headers): + """ + Asserts whether all headers have been successfully parsed. + Extracts encountered errors from the result of parsing headers. + + Only works on Python 3. + + :param headers: Headers to verify. + :type headers: `httplib.HTTPMessage`. + + :raises urllib3.exceptions.HeaderParsingError: + If parsing errors are found. + """ + + # This will fail silently if we pass in the wrong kind of parameter. + # To make debugging easier add an explicit check. + if not isinstance(headers, httplib.HTTPMessage): + raise TypeError('expected httplib.Message, got {}.'.format( + type(headers))) + + defects = getattr(headers, 'defects', None) + get_payload = getattr(headers, 'get_payload', None) + + unparsed_data = None + if get_payload: # Platform-specific: Python 3. + unparsed_data = get_payload() + + if defects or unparsed_data: + raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) + + +def is_response_to_head(response): + """ + Checks, wether a the request of a response has been a HEAD-request. + Handles the quirks of AppEngine. + + :param conn: + :type conn: :class:`httplib.HTTPResponse` + """ + # FIXME: Can we do this somehow without accessing private httplib _method? + method = response._method + if isinstance(method, int): # Platform-specific: Appengine + return method == 3 + return method.upper() == 'HEAD' diff --git a/lib/requests/packages/urllib3/util/retry.py b/lib/requests/packages/urllib3/util/retry.py index 7e0959df37c1be566c2ff83bf1bbf84a11467a45..1fb1f23baaa8e958757dc7da052fcf65bd1bd467 100644 --- a/lib/requests/packages/urllib3/util/retry.py +++ b/lib/requests/packages/urllib3/util/retry.py @@ -94,7 +94,7 @@ class Retry(object): seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep for [0.1s, 0.2s, 0.4s, ...] between retries. It will never be longer - than :attr:`Retry.MAX_BACKOFF`. + than :attr:`Retry.BACKOFF_MAX`. By default, backoff is disabled (set to 0). diff --git a/lib/requests/packages/urllib3/util/ssl_.py b/lib/requests/packages/urllib3/util/ssl_.py index e7e7dfae1881d7303e584fb61fbbd1fc62300ae8..47b817e31bb248cac25fae27334f95948a0786f0 100644 --- a/lib/requests/packages/urllib3/util/ssl_.py +++ b/lib/requests/packages/urllib3/util/ssl_.py @@ -8,11 +8,18 @@ SSLContext = None HAS_SNI = False create_default_context = None +# Maps the length of a digest to a possible hash function producing this digest +HASHFUNC_MAP = { + 32: md5, + 40: sha1, + 64: sha256, +} + import errno -import ssl import warnings try: # Test for SSL features + import ssl from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 from ssl import HAS_SNI # Has SNI? except ImportError: @@ -25,14 +32,24 @@ except ImportError: OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 OP_NO_COMPRESSION = 0x20000 -try: - from ssl import _DEFAULT_CIPHERS -except ImportError: - _DEFAULT_CIPHERS = ( - 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' - 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:' - '!eNULL:!MD5' - ) +# A secure default. +# Sources for more information on TLS ciphers: +# +# - https://wiki.mozilla.org/Security/Server_Side_TLS +# - https://www.ssllabs.com/projects/best-practices/index.html +# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ +# +# The general intent is: +# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), +# - prefer ECDHE over DHE for better performance, +# - prefer any AES-GCM over any AES-CBC for better performance and security, +# - use 3DES as fallback which is secure but slow, +# - disable NULL authentication, MD5 MACs and DSS for security reasons. +DEFAULT_CIPHERS = ( + 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' + 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:' + '!eNULL:!MD5' +) try: from ssl import SSLContext # Modern SSL? @@ -40,7 +57,8 @@ except ImportError: import sys class SSLContext(object): # Platform-specific: Python 2 & 3.1 - supports_set_ciphers = sys.version_info >= (2, 7) + supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or + (3, 2) <= sys.version_info) def __init__(self, protocol_version): self.protocol = protocol_version @@ -57,8 +75,11 @@ except ImportError: self.certfile = certfile self.keyfile = keyfile - def load_verify_locations(self, location): - self.ca_certs = location + def load_verify_locations(self, cafile=None, capath=None): + self.ca_certs = cafile + + if capath is not None: + raise SSLError("CA directories not supported in older Pythons") def set_ciphers(self, cipher_suite): if not self.supports_set_ciphers: @@ -101,31 +122,21 @@ def assert_fingerprint(cert, fingerprint): Fingerprint as string of hexdigits, can be interspersed by colons. """ - # Maps the length of a digest to a possible hash function producing - # this digest. - hashfunc_map = { - 16: md5, - 20: sha1, - 32: sha256, - } - fingerprint = fingerprint.replace(':', '').lower() - digest_length, odd = divmod(len(fingerprint), 2) - - if odd or digest_length not in hashfunc_map: - raise SSLError('Fingerprint is of invalid length.') + digest_length = len(fingerprint) + hashfunc = HASHFUNC_MAP.get(digest_length) + if not hashfunc: + raise SSLError( + 'Fingerprint of invalid length: {0}'.format(fingerprint)) # We need encode() here for py32; works on py2 and p33. fingerprint_bytes = unhexlify(fingerprint.encode()) - hashfunc = hashfunc_map[digest_length] - cert_digest = hashfunc(cert).digest() - if not cert_digest == fingerprint_bytes: + if cert_digest != fingerprint_bytes: raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' - .format(hexlify(fingerprint_bytes), - hexlify(cert_digest))) + .format(fingerprint, hexlify(cert_digest))) def resolve_cert_reqs(candidate): @@ -167,7 +178,7 @@ def resolve_ssl_version(candidate): return candidate -def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED, +def create_urllib3_context(ssl_version=None, cert_reqs=None, options=None, ciphers=None): """All arguments have the same meaning as ``ssl_wrap_socket``. @@ -204,6 +215,9 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED, """ context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) + # Setting the default here, as we may have no ssl module on import + cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs + if options is None: options = 0 # SSLv2 is easily broken and is considered harmful and dangerous @@ -217,7 +231,7 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED, context.options |= options if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6 - context.set_ciphers(ciphers or _DEFAULT_CIPHERS) + context.set_ciphers(ciphers or DEFAULT_CIPHERS) context.verify_mode = cert_reqs if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 @@ -229,10 +243,11 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED, def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ca_certs=None, server_hostname=None, - ssl_version=None, ciphers=None, ssl_context=None): + ssl_version=None, ciphers=None, ssl_context=None, + ca_cert_dir=None): """ - All arguments except for server_hostname and ssl_context have the same - meaning as they do when using :func:`ssl.wrap_socket`. + All arguments except for server_hostname, ssl_context, and ca_cert_dir have + the same meaning as they do when using :func:`ssl.wrap_socket`. :param server_hostname: When SNI is supported, the expected hostname of the certificate @@ -242,15 +257,19 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, :param ciphers: A string of ciphers we wish the client to support. This is not supported on Python 2.6 as the ssl module does not support it. + :param ca_cert_dir: + A directory containing CA certificates in multiple separate files, as + supported by OpenSSL's -CApath flag or the capath argument to + SSLContext.load_verify_locations(). """ context = ssl_context if context is None: context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers) - if ca_certs: + if ca_certs or ca_cert_dir: try: - context.load_verify_locations(ca_certs) + context.load_verify_locations(ca_certs, ca_cert_dir) except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2 raise SSLError(e) # Py33 raises FileNotFoundError which subclasses OSError @@ -259,6 +278,7 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, if e.errno == errno.ENOENT: raise SSLError(e) raise + if certfile: context.load_cert_chain(certfile, keyfile) if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI diff --git a/lib/requests/packages/urllib3/util/url.py b/lib/requests/packages/urllib3/util/url.py index b2ec834fe721a55195c25d4495b48c1bdaefcd5f..e58050cd753da66a1939691bacdb4b76f91766e8 100644 --- a/lib/requests/packages/urllib3/util/url.py +++ b/lib/requests/packages/urllib3/util/url.py @@ -15,6 +15,8 @@ class Url(namedtuple('Url', url_attrs)): def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None): + if path and not path.startswith('/'): + path = '/' + path return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment) diff --git a/lib/requests/sessions.py b/lib/requests/sessions.py index ef3f22bc5c5b7075301d5a162de00061041f2417..6d60745919fb9817865cc220403521cc9fab2919 100644 --- a/lib/requests/sessions.py +++ b/lib/requests/sessions.py @@ -62,12 +62,11 @@ def merge_setting(request_setting, session_setting, dict_class=OrderedDict): merged_setting = dict_class(to_key_val_list(session_setting)) merged_setting.update(to_key_val_list(request_setting)) - # Remove keys that are set to None. - for (k, v) in request_setting.items(): - if v is None: - del merged_setting[k] - - merged_setting = dict((k, v) for (k, v) in merged_setting.items() if v is not None) + # Remove keys that are set to None. Extract keys first to avoid altering + # the dictionary during iteration. + none_keys = [k for (k, v) in merged_setting.items() if v is None] + for key in none_keys: + del merged_setting[key] return merged_setting @@ -90,7 +89,7 @@ def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): class SessionRedirectMixin(object): def resolve_redirects(self, resp, req, stream=False, timeout=None, - verify=True, cert=None, proxies=None): + verify=True, cert=None, proxies=None, **adapter_kwargs): """Receives a Response. Returns a generator of Responses.""" i = 0 @@ -193,6 +192,7 @@ class SessionRedirectMixin(object): cert=cert, proxies=proxies, allow_redirects=False, + **adapter_kwargs ) extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) @@ -273,7 +273,13 @@ class Session(SessionRedirectMixin): >>> import requests >>> s = requests.Session() >>> s.get('http://httpbin.org/get') - 200 + <Response [200]> + + Or as a context manager:: + + >>> with requests.Session() as s: + >>> s.get('http://httpbin.org/get') + <Response [200]> """ __attrs__ = [ @@ -293,9 +299,9 @@ class Session(SessionRedirectMixin): #: :class:`Request <Request>`. self.auth = None - #: Dictionary mapping protocol to the URL of the proxy (e.g. - #: {'http': 'foo.bar:3128'}) to be used on each - #: :class:`Request <Request>`. + #: Dictionary mapping protocol or protocol and host to the URL of the proxy + #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to + #: be used on each :class:`Request <Request>`. self.proxies = {} #: Event-handling hooks. @@ -319,7 +325,8 @@ class Session(SessionRedirectMixin): #: limit, a :class:`TooManyRedirects` exception is raised. self.max_redirects = DEFAULT_REDIRECT_LIMIT - #: Should we trust the environment? + #: Trust environment settings for proxy configuration, default + #: authentication and similar. self.trust_env = True #: A CookieJar containing all currently outstanding cookies set on this @@ -404,8 +411,8 @@ class Session(SessionRedirectMixin): :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. - :param data: (optional) Dictionary or bytes to send in the body of the - :class:`Request`. + :param data: (optional) Dictionary, bytes, or file-like object to send + in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the @@ -417,13 +424,13 @@ class Session(SessionRedirectMixin): :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send - data before giving up, as a float, or a (`connect timeout, read - timeout <user/advanced.html#timeouts>`_) tuple. + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) <timeouts>` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Set to True by default. :type allow_redirects: bool - :param proxies: (optional) Dictionary mapping protocol to the URL of - the proxy. + :param proxies: (optional) Dictionary mapping protocol or protocol and + hostname to the URL of the proxy. :param stream: (optional) whether to immediately download the response content. Defaults to ``False``. :param verify: (optional) if ``True``, the SSL cert will be verified. @@ -431,9 +438,6 @@ class Session(SessionRedirectMixin): :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. """ - - method = to_native_string(method) - # Create the Request. req = Request( method = method.upper(), @@ -560,10 +564,6 @@ class Session(SessionRedirectMixin): # Set up variables needed for resolve_redirects and dispatching of hooks allow_redirects = kwargs.pop('allow_redirects', True) stream = kwargs.get('stream') - timeout = kwargs.get('timeout') - verify = kwargs.get('verify') - cert = kwargs.get('cert') - proxies = kwargs.get('proxies') hooks = request.hooks # Get the appropriate adapter to use @@ -591,12 +591,7 @@ class Session(SessionRedirectMixin): extract_cookies_to_jar(self.cookies, request, r.raw) # Redirect resolving generator. - gen = self.resolve_redirects(r, request, - stream=stream, - timeout=timeout, - verify=verify, - cert=cert, - proxies=proxies) + gen = self.resolve_redirects(r, request, **kwargs) # Resolve redirects if allowed. history = [resp for resp in gen] if allow_redirects else [] @@ -639,7 +634,7 @@ class Session(SessionRedirectMixin): 'cert': cert} def get_adapter(self, url): - """Returns the appropriate connnection adapter for the given URL.""" + """Returns the appropriate connection adapter for the given URL.""" for (prefix, adapter) in self.adapters.items(): if url.lower().startswith(prefix): diff --git a/lib/requests/status_codes.py b/lib/requests/status_codes.py index e0887f210a716d620bde38056db40fe987af9bbe..a852574a455e509ac3c69bce42f59d15fa860776 100644 --- a/lib/requests/status_codes.py +++ b/lib/requests/status_codes.py @@ -78,11 +78,12 @@ _codes = { 507: ('insufficient_storage',), 509: ('bandwidth_limit_exceeded', 'bandwidth'), 510: ('not_extended',), + 511: ('network_authentication_required', 'network_auth', 'network_authentication'), } codes = LookupDict(name='status_codes') -for (code, titles) in list(_codes.items()): +for code, titles in _codes.items(): for title in titles: setattr(codes, title, code) if not title.startswith('\\'): diff --git a/lib/requests/utils.py b/lib/requests/utils.py index 8fba62dd82aec65e9339f8f25ea19e68a403d7b5..132cd2b5e3838af105c11fcdc0ce3b0198670ddb 100644 --- a/lib/requests/utils.py +++ b/lib/requests/utils.py @@ -29,7 +29,7 @@ from .compat import (quote, urlparse, bytes, str, OrderedDict, unquote, is_py2, basestring) from .cookies import RequestsCookieJar, cookiejar_from_dict from .structures import CaseInsensitiveDict -from .exceptions import InvalidURL +from .exceptions import InvalidURL, FileModeWarning _hush_pyflakes = (RequestsCookieJar,) @@ -48,26 +48,47 @@ def dict_to_sequence(d): def super_len(o): + total_length = 0 + current_position = 0 + if hasattr(o, '__len__'): - return len(o) + total_length = len(o) + + elif hasattr(o, 'len'): + total_length = o.len - if hasattr(o, 'len'): - return o.len + elif hasattr(o, 'getvalue'): + # e.g. BytesIO, cStringIO.StringIO + total_length = len(o.getvalue()) - if hasattr(o, 'fileno'): + elif hasattr(o, 'fileno'): try: fileno = o.fileno() except io.UnsupportedOperation: pass else: - return os.fstat(fileno).st_size + total_length = os.fstat(fileno).st_size - if hasattr(o, 'getvalue'): - # e.g. BytesIO, cStringIO.StringIO - return len(o.getvalue()) + # Having used fstat to determine the file length, we need to + # confirm that this file was opened up in binary mode. + if 'b' not in o.mode: + warnings.warn(( + "Requests has determined the content-length for this " + "request using the binary size of the file: however, the " + "file has been opened in text mode (i.e. without the 'b' " + "flag in the mode). This may lead to an incorrect " + "content-length. In Requests 3.0, support will be removed " + "for files in text mode."), + FileModeWarning + ) + + if hasattr(o, 'tell'): + current_position = o.tell() + + return max(0, total_length - current_position) -def get_netrc_auth(url): +def get_netrc_auth(url, raise_errors=False): """Returns the Requests tuple auth for a given url from netrc.""" try: @@ -105,8 +126,9 @@ def get_netrc_auth(url): return (_netrc[login_i], _netrc[2]) except (NetrcParseError, IOError): # If there was a parsing error or a permissions issue reading the file, - # we'll just skip netrc auth - pass + # we'll just skip netrc auth unless explicitly asked to raise errors. + if raise_errors: + raise # AppEngine hackiness. except (ImportError, AttributeError): @@ -498,7 +520,9 @@ def should_bypass_proxies(url): if no_proxy: # We need to check whether we match here. We need to see if we match # the end of the netloc, both with and without the port. - no_proxy = no_proxy.replace(' ', '').split(',') + no_proxy = ( + host for host in no_proxy.replace(' ', '').split(',') if host + ) ip = netloc.split(':')[0] if is_ipv4_address(ip): @@ -536,36 +560,22 @@ def get_environ_proxies(url): else: return getproxies() +def select_proxy(url, proxies): + """Select a proxy for the url, if applicable. + + :param url: The url being for the request + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + """ + proxies = proxies or {} + urlparts = urlparse(url) + proxy = proxies.get(urlparts.scheme+'://'+urlparts.hostname) + if proxy is None: + proxy = proxies.get(urlparts.scheme) + return proxy def default_user_agent(name="python-requests"): """Return a string representing the default user agent.""" - _implementation = platform.python_implementation() - - if _implementation == 'CPython': - _implementation_version = platform.python_version() - elif _implementation == 'PyPy': - _implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, - sys.pypy_version_info.minor, - sys.pypy_version_info.micro) - if sys.pypy_version_info.releaselevel != 'final': - _implementation_version = ''.join([_implementation_version, sys.pypy_version_info.releaselevel]) - elif _implementation == 'Jython': - _implementation_version = platform.python_version() # Complete Guess - elif _implementation == 'IronPython': - _implementation_version = platform.python_version() # Complete Guess - else: - _implementation_version = 'Unknown' - - try: - p_system = platform.system() - p_release = platform.release() - except IOError: - p_system = 'Unknown' - p_release = 'Unknown' - - return " ".join(['%s/%s' % (name, __version__), - '%s/%s' % (_implementation, _implementation_version), - '%s/%s' % (p_system, p_release)]) + return '%s/%s' % (name, __version__) def default_headers(): diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index e78c7521abbfdbc7e7327dfa50f9649810c67f0e..8cd96b162a23a70ae3ea6a43ee265096bdce6767 100644 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -16,7 +16,6 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. - import webbrowser import datetime import socket @@ -35,7 +34,6 @@ from github import Github from sickbeard import metadata from sickbeard import providers -from sickbeard.providers.generic import GenericProvider from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \ naming_ep_type from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \ @@ -58,6 +56,7 @@ from sickbeard.databases import mainDB, cache_db, failed_db from sickrage.helper.encoding import ek from sickrage.helper.exceptions import ex +from sickrage.providers.GenericProvider import GenericProvider from sickrage.show.Show import Show from sickrage.system.Shutdown import Shutdown @@ -181,6 +180,7 @@ API_KEY = None API_ROOT = None ENABLE_HTTPS = False +NOTIFY_ON_LOGIN = False HTTPS_CERT = None HTTPS_KEY = None @@ -283,7 +283,7 @@ NFO_RENAME = True TV_DOWNLOAD_DIR = None UNPACK = False SKIP_REMOVED_FILES = False -ALLOWED_EXTENSIONS = "nfo,srr,sfv" +ALLOWED_EXTENSIONS = "srt,nfo,srr,sfv" NZBS = False NZBS_UID = None @@ -576,7 +576,7 @@ def initialize(consoleLogging=True): with INIT_LOCK: global BRANCH, GIT_RESET, GIT_REMOTE, GIT_REMOTE_URL, CUR_COMMIT_HASH, CUR_COMMIT_BRANCH, GIT_NEWVER, ACTUAL_LOG_DIR, LOG_DIR, LOG_NR, LOG_SIZE, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, ENCRYPTION_SECRET, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, WEB_COOKIE_SECRET, WEB_USE_GZIP, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \ - HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, RANDOMIZE_PROVIDERS, CHECK_PROPERS_INTERVAL, ALLOW_HIGH_PRIORITY, SAB_FORCED, TORRENT_METHOD, \ + HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, RANDOMIZE_PROVIDERS, CHECK_PROPERS_INTERVAL, ALLOW_HIGH_PRIORITY, SAB_FORCED, TORRENT_METHOD, NOTIFY_ON_LOGIN, \ SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_CATEGORY_BACKLOG, SAB_CATEGORY_ANIME, SAB_CATEGORY_ANIME_BACKLOG, SAB_HOST, \ NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_CATEGORY_BACKLOG, NZBGET_CATEGORY_ANIME, NZBGET_CATEGORY_ANIME_BACKLOG, NZBGET_PRIORITY, NZBGET_HOST, NZBGET_USE_HTTPS, backlogSearchScheduler, \ TORRENT_USERNAME, TORRENT_PASSWORD, TORRENT_HOST, TORRENT_PATH, TORRENT_SEED_TIME, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_LABEL, TORRENT_LABEL_ANIME, TORRENT_VERIFY_CERT, TORRENT_RPCURL, TORRENT_AUTH_TYPE, \ @@ -816,6 +816,8 @@ def initialize(consoleLogging=True): ENABLE_HTTPS = bool(check_setting_int(CFG, 'General', 'enable_https', 0)) + NOTIFY_ON_LOGIN = bool(check_setting_int(CFG, 'General', 'notify_on_login', 0)) + HTTPS_CERT = check_setting_str(CFG, 'General', 'https_cert', 'server.crt') HTTPS_KEY = check_setting_str(CFG, 'General', 'https_key', 'server.key') @@ -1227,119 +1229,119 @@ def initialize(consoleLogging=True): # dynamically load provider settings for curTorrentProvider in [curProvider for curProvider in providers.sortedProviderList() if - curProvider.providerType == GenericProvider.TORRENT]: - curTorrentProvider.enabled = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID(), 0)) + curProvider.provider_type == GenericProvider.TORRENT]: + curTorrentProvider.enabled = bool(check_setting_int(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id(), 0)) if hasattr(curTorrentProvider, 'custom_url'): - curTorrentProvider.custom_url = check_setting_str(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_custom_url', '', censor_log=True) + curTorrentProvider.custom_url = check_setting_str(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_custom_url', '', censor_log=True) if hasattr(curTorrentProvider, 'api_key'): - curTorrentProvider.api_key = check_setting_str(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_api_key', '', censor_log=True) + curTorrentProvider.api_key = check_setting_str(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_api_key', '', censor_log=True) if hasattr(curTorrentProvider, 'hash'): - curTorrentProvider.hash = check_setting_str(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_hash', '', censor_log=True) + curTorrentProvider.hash = check_setting_str(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_hash', '', censor_log=True) if hasattr(curTorrentProvider, 'digest'): - curTorrentProvider.digest = check_setting_str(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_digest', '', censor_log=True) + curTorrentProvider.digest = check_setting_str(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_digest', '', censor_log=True) if hasattr(curTorrentProvider, 'username'): - curTorrentProvider.username = check_setting_str(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_username', '', censor_log=True) + curTorrentProvider.username = check_setting_str(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_username', '', censor_log=True) if hasattr(curTorrentProvider, 'password'): - curTorrentProvider.password = check_setting_str(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_password', '', censor_log=True) + curTorrentProvider.password = check_setting_str(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_password', '', censor_log=True) if hasattr(curTorrentProvider, 'passkey'): - curTorrentProvider.passkey = check_setting_str(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_passkey', '', censor_log=True) + curTorrentProvider.passkey = check_setting_str(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_passkey', '', censor_log=True) if hasattr(curTorrentProvider, 'pin'): - curTorrentProvider.pin = check_setting_str(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_pin', '', censor_log=True) + curTorrentProvider.pin = check_setting_str(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_pin', '', censor_log=True) if hasattr(curTorrentProvider, 'confirmed'): - curTorrentProvider.confirmed = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_confirmed', 1)) + curTorrentProvider.confirmed = bool(check_setting_int(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_confirmed', 1)) if hasattr(curTorrentProvider, 'ranked'): - curTorrentProvider.ranked = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_ranked', 1)) + curTorrentProvider.ranked = bool(check_setting_int(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_ranked', 1)) if hasattr(curTorrentProvider, 'engrelease'): - curTorrentProvider.engrelease = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_engrelease', 0)) + curTorrentProvider.engrelease = bool(check_setting_int(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_engrelease', 0)) if hasattr(curTorrentProvider, 'onlyspasearch'): - curTorrentProvider.onlyspasearch = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_onlyspasearch', 0)) + curTorrentProvider.onlyspasearch = bool(check_setting_int(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_onlyspasearch', 0)) if hasattr(curTorrentProvider, 'sorting'): - curTorrentProvider.sorting = check_setting_str(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_sorting', 'seeders') + curTorrentProvider.sorting = check_setting_str(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_sorting', 'seeders') if hasattr(curTorrentProvider, 'options'): - curTorrentProvider.options = check_setting_str(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_options', '') + curTorrentProvider.options = check_setting_str(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_options', '') if hasattr(curTorrentProvider, 'ratio'): - curTorrentProvider.ratio = check_setting_str(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_ratio', '') + curTorrentProvider.ratio = check_setting_str(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_ratio', '') if hasattr(curTorrentProvider, 'minseed'): - curTorrentProvider.minseed = check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_minseed', 1) + curTorrentProvider.minseed = check_setting_int(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_minseed', 1) if hasattr(curTorrentProvider, 'minleech'): - curTorrentProvider.minleech = check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_minleech', 0) + curTorrentProvider.minleech = check_setting_int(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_minleech', 0) if hasattr(curTorrentProvider, 'freeleech'): - curTorrentProvider.freeleech = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_freeleech', 0)) + curTorrentProvider.freeleech = bool(check_setting_int(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_freeleech', 0)) if hasattr(curTorrentProvider, 'search_mode'): - curTorrentProvider.search_mode = check_setting_str(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_search_mode', + curTorrentProvider.search_mode = check_setting_str(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_search_mode', 'eponly') if hasattr(curTorrentProvider, 'search_fallback'): - curTorrentProvider.search_fallback = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_search_fallback', + curTorrentProvider.search_fallback = bool(check_setting_int(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_search_fallback', 0)) if hasattr(curTorrentProvider, 'enable_daily'): - curTorrentProvider.enable_daily = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_enable_daily', + curTorrentProvider.enable_daily = bool(check_setting_int(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_enable_daily', 1)) if hasattr(curTorrentProvider, 'enable_backlog'): - curTorrentProvider.enable_backlog = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_enable_backlog', - curTorrentProvider.supportsBacklog)) + curTorrentProvider.enable_backlog = bool(check_setting_int(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_enable_backlog', + curTorrentProvider.supports_backlog)) if hasattr(curTorrentProvider, 'cat'): - curTorrentProvider.cat = check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_cat', 0) + curTorrentProvider.cat = check_setting_int(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_cat', 0) if hasattr(curTorrentProvider, 'subtitle'): - curTorrentProvider.subtitle = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_subtitle', 0)) + curTorrentProvider.subtitle = bool(check_setting_int(CFG, curTorrentProvider.get_id().upper(), + curTorrentProvider.get_id() + '_subtitle', 0)) for curNzbProvider in [curProvider for curProvider in providers.sortedProviderList() if - curProvider.providerType == GenericProvider.NZB]: + curProvider.provider_type == GenericProvider.NZB]: curNzbProvider.enabled = bool( - check_setting_int(CFG, curNzbProvider.getID().upper(), curNzbProvider.getID(), 0)) + check_setting_int(CFG, curNzbProvider.get_id().upper(), curNzbProvider.get_id(), 0)) if hasattr(curNzbProvider, 'api_key'): - curNzbProvider.api_key = check_setting_str(CFG, curNzbProvider.getID().upper(), - curNzbProvider.getID() + '_api_key', '', censor_log=True) + curNzbProvider.api_key = check_setting_str(CFG, curNzbProvider.get_id().upper(), + curNzbProvider.get_id() + '_api_key', '', censor_log=True) if hasattr(curNzbProvider, 'username'): - curNzbProvider.username = check_setting_str(CFG, curNzbProvider.getID().upper(), - curNzbProvider.getID() + '_username', '', censor_log=True) + curNzbProvider.username = check_setting_str(CFG, curNzbProvider.get_id().upper(), + curNzbProvider.get_id() + '_username', '', censor_log=True) if hasattr(curNzbProvider, 'search_mode'): - curNzbProvider.search_mode = check_setting_str(CFG, curNzbProvider.getID().upper(), - curNzbProvider.getID() + '_search_mode', + curNzbProvider.search_mode = check_setting_str(CFG, curNzbProvider.get_id().upper(), + curNzbProvider.get_id() + '_search_mode', 'eponly') if hasattr(curNzbProvider, 'search_fallback'): - curNzbProvider.search_fallback = bool(check_setting_int(CFG, curNzbProvider.getID().upper(), - curNzbProvider.getID() + '_search_fallback', + curNzbProvider.search_fallback = bool(check_setting_int(CFG, curNzbProvider.get_id().upper(), + curNzbProvider.get_id() + '_search_fallback', 0)) if hasattr(curNzbProvider, 'enable_daily'): - curNzbProvider.enable_daily = bool(check_setting_int(CFG, curNzbProvider.getID().upper(), - curNzbProvider.getID() + '_enable_daily', + curNzbProvider.enable_daily = bool(check_setting_int(CFG, curNzbProvider.get_id().upper(), + curNzbProvider.get_id() + '_enable_daily', 1)) if hasattr(curNzbProvider, 'enable_backlog'): - curNzbProvider.enable_backlog = bool(check_setting_int(CFG, curNzbProvider.getID().upper(), - curNzbProvider.getID() + '_enable_backlog', - curNzbProvider.supportsBacklog)) + curNzbProvider.enable_backlog = bool(check_setting_int(CFG, curNzbProvider.get_id().upper(), + curNzbProvider.get_id() + '_enable_backlog', + curNzbProvider.supports_backlog)) if not ek(os.path.isfile, CONFIG_FILE): logger.log(u"Unable to find '" + CONFIG_FILE + "', all settings will be default!", logger.DEBUG) @@ -1689,6 +1691,7 @@ def save_config(): new_config['General']['debug'] = int(DEBUG) new_config['General']['default_page'] = DEFAULT_PAGE new_config['General']['enable_https'] = int(ENABLE_HTTPS) + new_config['General']['notify_on_login'] = int(NOTIFY_ON_LOGIN) new_config['General']['https_cert'] = HTTPS_CERT new_config['General']['https_key'] = HTTPS_KEY new_config['General']['handle_reverse_proxy'] = int(HANDLE_REVERSE_PROXY) @@ -1791,103 +1794,103 @@ def save_config(): # dynamically save provider settings for curTorrentProvider in [curProvider for curProvider in providers.sortedProviderList() if - curProvider.providerType == GenericProvider.TORRENT]: - new_config[curTorrentProvider.getID().upper()] = {} - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID()] = int(curTorrentProvider.enabled) + curProvider.provider_type == GenericProvider.TORRENT]: + new_config[curTorrentProvider.get_id().upper()] = {} + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id()] = int(curTorrentProvider.enabled) if hasattr(curTorrentProvider, 'custom_url'): - new_config[curTorrentProvider.getID().upper()][ - curTorrentProvider.getID() + '_custom_url'] = curTorrentProvider.custom_url + new_config[curTorrentProvider.get_id().upper()][ + curTorrentProvider.get_id() + '_custom_url'] = curTorrentProvider.custom_url if hasattr(curTorrentProvider, 'digest'): - new_config[curTorrentProvider.getID().upper()][ - curTorrentProvider.getID() + '_digest'] = curTorrentProvider.digest + new_config[curTorrentProvider.get_id().upper()][ + curTorrentProvider.get_id() + '_digest'] = curTorrentProvider.digest if hasattr(curTorrentProvider, 'hash'): - new_config[curTorrentProvider.getID().upper()][ - curTorrentProvider.getID() + '_hash'] = curTorrentProvider.hash + new_config[curTorrentProvider.get_id().upper()][ + curTorrentProvider.get_id() + '_hash'] = curTorrentProvider.hash if hasattr(curTorrentProvider, 'api_key'): - new_config[curTorrentProvider.getID().upper()][ - curTorrentProvider.getID() + '_api_key'] = curTorrentProvider.api_key + new_config[curTorrentProvider.get_id().upper()][ + curTorrentProvider.get_id() + '_api_key'] = curTorrentProvider.api_key if hasattr(curTorrentProvider, 'username'): - new_config[curTorrentProvider.getID().upper()][ - curTorrentProvider.getID() + '_username'] = curTorrentProvider.username + new_config[curTorrentProvider.get_id().upper()][ + curTorrentProvider.get_id() + '_username'] = curTorrentProvider.username if hasattr(curTorrentProvider, 'password'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_password'] = helpers.encrypt( + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id() + '_password'] = helpers.encrypt( curTorrentProvider.password, ENCRYPTION_VERSION) if hasattr(curTorrentProvider, 'passkey'): - new_config[curTorrentProvider.getID().upper()][ - curTorrentProvider.getID() + '_passkey'] = curTorrentProvider.passkey + new_config[curTorrentProvider.get_id().upper()][ + curTorrentProvider.get_id() + '_passkey'] = curTorrentProvider.passkey if hasattr(curTorrentProvider, 'pin'): - new_config[curTorrentProvider.getID().upper()][ - curTorrentProvider.getID() + '_pin'] = curTorrentProvider.pin + new_config[curTorrentProvider.get_id().upper()][ + curTorrentProvider.get_id() + '_pin'] = curTorrentProvider.pin if hasattr(curTorrentProvider, 'confirmed'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_confirmed'] = int( + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id() + '_confirmed'] = int( curTorrentProvider.confirmed) if hasattr(curTorrentProvider, 'ranked'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_ranked'] = int( + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id() + '_ranked'] = int( curTorrentProvider.ranked) if hasattr(curTorrentProvider, 'engrelease'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_engrelease'] = int( + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id() + '_engrelease'] = int( curTorrentProvider.engrelease) if hasattr(curTorrentProvider, 'onlyspasearch'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_onlyspasearch'] = int( + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id() + '_onlyspasearch'] = int( curTorrentProvider.onlyspasearch) if hasattr(curTorrentProvider, 'sorting'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_sorting'] = curTorrentProvider.sorting + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id() + '_sorting'] = curTorrentProvider.sorting if hasattr(curTorrentProvider, 'ratio'): - new_config[curTorrentProvider.getID().upper()][ - curTorrentProvider.getID() + '_ratio'] = curTorrentProvider.ratio + new_config[curTorrentProvider.get_id().upper()][ + curTorrentProvider.get_id() + '_ratio'] = curTorrentProvider.ratio if hasattr(curTorrentProvider, 'minseed'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_minseed'] = int( + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id() + '_minseed'] = int( curTorrentProvider.minseed) if hasattr(curTorrentProvider, 'minleech'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_minleech'] = int( + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id() + '_minleech'] = int( curTorrentProvider.minleech) if hasattr(curTorrentProvider, 'options'): - new_config[curTorrentProvider.getID().upper()][ - curTorrentProvider.getID() + '_options'] = curTorrentProvider.options + new_config[curTorrentProvider.get_id().upper()][ + curTorrentProvider.get_id() + '_options'] = curTorrentProvider.options if hasattr(curTorrentProvider, 'freeleech'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_freeleech'] = int( + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id() + '_freeleech'] = int( curTorrentProvider.freeleech) if hasattr(curTorrentProvider, 'search_mode'): - new_config[curTorrentProvider.getID().upper()][ - curTorrentProvider.getID() + '_search_mode'] = curTorrentProvider.search_mode + new_config[curTorrentProvider.get_id().upper()][ + curTorrentProvider.get_id() + '_search_mode'] = curTorrentProvider.search_mode if hasattr(curTorrentProvider, 'search_fallback'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_search_fallback'] = int( + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id() + '_search_fallback'] = int( curTorrentProvider.search_fallback) if hasattr(curTorrentProvider, 'enable_daily'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_enable_daily'] = int( + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id() + '_enable_daily'] = int( curTorrentProvider.enable_daily) if hasattr(curTorrentProvider, 'enable_backlog'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_enable_backlog'] = int( + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id() + '_enable_backlog'] = int( curTorrentProvider.enable_backlog) if hasattr(curTorrentProvider, 'cat'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_cat'] = int( + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id() + '_cat'] = int( curTorrentProvider.cat) if hasattr(curTorrentProvider, 'subtitle'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_subtitle'] = int( + new_config[curTorrentProvider.get_id().upper()][curTorrentProvider.get_id() + '_subtitle'] = int( curTorrentProvider.subtitle) for curNzbProvider in [curProvider for curProvider in providers.sortedProviderList() if - curProvider.providerType == GenericProvider.NZB]: - new_config[curNzbProvider.getID().upper()] = {} - new_config[curNzbProvider.getID().upper()][curNzbProvider.getID()] = int(curNzbProvider.enabled) + curProvider.provider_type == GenericProvider.NZB]: + new_config[curNzbProvider.get_id().upper()] = {} + new_config[curNzbProvider.get_id().upper()][curNzbProvider.get_id()] = int(curNzbProvider.enabled) if hasattr(curNzbProvider, 'api_key'): - new_config[curNzbProvider.getID().upper()][ - curNzbProvider.getID() + '_api_key'] = curNzbProvider.api_key + new_config[curNzbProvider.get_id().upper()][ + curNzbProvider.get_id() + '_api_key'] = curNzbProvider.api_key if hasattr(curNzbProvider, 'username'): - new_config[curNzbProvider.getID().upper()][ - curNzbProvider.getID() + '_username'] = curNzbProvider.username + new_config[curNzbProvider.get_id().upper()][ + curNzbProvider.get_id() + '_username'] = curNzbProvider.username if hasattr(curNzbProvider, 'search_mode'): - new_config[curNzbProvider.getID().upper()][ - curNzbProvider.getID() + '_search_mode'] = curNzbProvider.search_mode + new_config[curNzbProvider.get_id().upper()][ + curNzbProvider.get_id() + '_search_mode'] = curNzbProvider.search_mode if hasattr(curNzbProvider, 'search_fallback'): - new_config[curNzbProvider.getID().upper()][curNzbProvider.getID() + '_search_fallback'] = int( + new_config[curNzbProvider.get_id().upper()][curNzbProvider.get_id() + '_search_fallback'] = int( curNzbProvider.search_fallback) if hasattr(curNzbProvider, 'enable_daily'): - new_config[curNzbProvider.getID().upper()][curNzbProvider.getID() + '_enable_daily'] = int( + new_config[curNzbProvider.get_id().upper()][curNzbProvider.get_id() + '_enable_daily'] = int( curNzbProvider.enable_daily) if hasattr(curNzbProvider, 'enable_backlog'): - new_config[curNzbProvider.getID().upper()][curNzbProvider.getID() + '_enable_backlog'] = int( + new_config[curNzbProvider.get_id().upper()][curNzbProvider.get_id() + '_enable_backlog'] = int( curNzbProvider.enable_backlog) new_config['NZBs'] = {} diff --git a/sickbeard/classes.py b/sickbeard/classes.py index 3c226bae7bc9b11c841215d166be0cec8dc82f5b..ee3d116631ca8758ca81533dd26976c68c7349b0 100644 --- a/sickbeard/classes.py +++ b/sickbeard/classes.py @@ -272,6 +272,7 @@ class ErrorViewer(object): @staticmethod def add(error): + ErrorViewer.errors = [e for e in ErrorViewer.errors if e.message != error.message] ErrorViewer.errors.append(error) @staticmethod @@ -296,6 +297,7 @@ class WarningViewer(object): @staticmethod def add(error): + WarningViewer.errors = [e for e in WarningViewer.errors if e.message != error.message] WarningViewer.errors.append(error) @staticmethod diff --git a/sickbeard/clients/generic.py b/sickbeard/clients/generic.py index 2109da630a568984ea4cb3c5c78d92541ee165b3..ca74035d81149482073f0585d87d4cc1fa635937 100644 --- a/sickbeard/clients/generic.py +++ b/sickbeard/clients/generic.py @@ -176,7 +176,7 @@ class GenericClient(object): try: # Sets per provider seed ratio - result.ratio = result.provider.seedRatio() + result.ratio = result.provider.seed_ratio() # lazy fix for now, I'm sure we already do this somewhere else too result = self._get_torrent_hash(result) diff --git a/sickbeard/common.py b/sickbeard/common.py index 731aaa84896b266d875e8d8e09217072db5e7e74..3af49f8ce9b90fdd20e242bf125a79fdeb900255 100644 --- a/sickbeard/common.py +++ b/sickbeard/common.py @@ -78,13 +78,17 @@ NOTIFY_DOWNLOAD = 2 NOTIFY_SUBTITLE_DOWNLOAD = 3 NOTIFY_GIT_UPDATE = 4 NOTIFY_GIT_UPDATE_TEXT = 5 +NOTIFY_LOGIN = 6 +NOTIFY_LOGIN_TEXT = 7 notifyStrings = NumDict({ NOTIFY_SNATCH: "Started Download", NOTIFY_DOWNLOAD: "Download Finished", NOTIFY_SUBTITLE_DOWNLOAD: "Subtitle Download Finished", NOTIFY_GIT_UPDATE: "SickRage Updated", - NOTIFY_GIT_UPDATE_TEXT: "SickRage Updated To Commit#: " + NOTIFY_GIT_UPDATE_TEXT: "SickRage Updated To Commit#: ", + NOTIFY_LOGIN : "SickRage new login", + NOTIFY_LOGIN_TEXT : "New login from IP: {0}. http://geomaplookup.net/?ip={0}" }) # Episode statuses diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 2aef0b46b4504fdb34eb4fe447bf69715f5e15f5..4d86695a3450e09029aa603aaf8d3f43c9ed2814 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -120,6 +120,7 @@ def remove_non_release_groups(name): r'\[Seedbox\]$': 'searchre', r'\[PublicHD\]$': 'searchre', r'\[AndroidTwoU\]$': 'searchre', + r'\[brassetv]\]$': 'searchre', r'\.\[BT\]$': 'searchre', r' \[1044\]$': 'searchre', r'\.RiPSaLoT$': 'searchre', @@ -1408,7 +1409,7 @@ def _setUpSession(session, headers): return session -def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False, needBytes=False): +def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False, need_bytes=False): """ Returns a byte-string retrieved from the url provider. """ @@ -1461,7 +1462,7 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N logger.log(traceback.format_exc(), logger.WARNING) return None - return (resp.text, resp.content)[needBytes] if not json else resp.json() + return (resp.text, resp.content)[need_bytes] if not json else resp.json() def download_file(url, filename, session=None, headers=None): @@ -1722,12 +1723,14 @@ def getDiskSpaceUsage(diskPath=None): def getTVDBFromID(indexer_id, indexer): + + session = requests.Session() tvdb_id = '' if indexer == 'IMDB': - url = "http://www.thetvdb.com/api/GetSeriesByRemoteID.php?imdbid=%s" % (indexer_id) - data = urllib.urlopen(url) + url = "http://www.thetvdb.com/api/GetSeriesByRemoteID.php?imdbid=%s" % indexer_id + data = getURL(url, session=session) try: - tree = ET.parse(data) + tree = ET.fromstring(data) for show in tree.getiterator("Series"): tvdb_id = show.findtext("seriesid") @@ -1736,10 +1739,10 @@ def getTVDBFromID(indexer_id, indexer): return tvdb_id elif indexer == 'ZAP2IT': - url = "http://www.thetvdb.com/api/GetSeriesByRemoteID.php?zap2it=%s" % (indexer_id) - data = urllib.urlopen(url) + url = "http://www.thetvdb.com/api/GetSeriesByRemoteID.php?zap2it=%s" % indexer_id + data = getURL(url, session=session) try: - tree = ET.parse(data) + tree = ET.fromstring(data) for show in tree.getiterator("Series"): tvdb_id = show.findtext("seriesid") @@ -1748,10 +1751,16 @@ def getTVDBFromID(indexer_id, indexer): return tvdb_id elif indexer == 'TVMAZE': - url = "http://api.tvmaze.com/shows/%s" % (indexer_id) - response = urllib2.urlopen(url) - data = json.load(response) + url = "http://api.tvmaze.com/shows/%s" % indexer_id + data = getURL(url, session=session, json=True) tvdb_id = data['externals']['thetvdb'] return tvdb_id else: return tvdb_id + +def is_ip_private(ip): + priv_lo = re.compile("^127\.\d{1,3}\.\d{1,3}\.\d{1,3}$") + priv_24 = re.compile("^10\.\d{1,3}\.\d{1,3}\.\d{1,3}$") + priv_20 = re.compile("^192\.168\.\d{1,3}.\d{1,3}$") + priv_16 = re.compile("^172.(1[6-9]|2[0-9]|3[0-1]).[0-9]{1,3}.[0-9]{1,3}$") + return priv_lo.match(ip) or priv_24.match(ip) or priv_20.match(ip) or priv_16.match(ip) diff --git a/sickbeard/imdbPopular.py b/sickbeard/imdbPopular.py index 540e3eee57e226513ae258c0d60323f4e850d8a0..475f4c315c0ace16969989dbc7e7769b537484a3 100644 --- a/sickbeard/imdbPopular.py +++ b/sickbeard/imdbPopular.py @@ -8,7 +8,6 @@ import sickbeard from sickbeard import helpers from sickrage.helper.encoding import ek - class imdbPopular(object): def __init__(self): """Gets a list of most popular TV series from imdb""" @@ -34,7 +33,7 @@ class imdbPopular(object): if not data: return None - soup = BeautifulSoup(data, 'html.parser') + soup = BeautifulSoup(data, 'html5lib') results = soup.find("table", {"class": "results"}) rows = results.find_all("tr") diff --git a/sickbeard/metadata/helpers.py b/sickbeard/metadata/helpers.py index c952c6644ef54580b37a88404223c274746ec2f1..31ad1d509ebceea3467d85a4f12482315429da20 100644 --- a/sickbeard/metadata/helpers.py +++ b/sickbeard/metadata/helpers.py @@ -37,7 +37,7 @@ def getShowImage(url, imgNum=None): logger.log(u"Fetching image from " + tempURL, logger.DEBUG) - image_data = helpers.getURL(tempURL, session=meta_session, needBytes=True) + image_data = helpers.getURL(tempURL, session=meta_session, need_bytes=True) if image_data is None: logger.log(u"There was an error trying to retrieve the image, aborting", logger.WARNING) return diff --git a/sickbeard/notifiers/__init__.py b/sickbeard/notifiers/__init__.py index 4ed59e1f7c62e9933dd781af5811876840d012fc..ab4472ec4acc4b3d791af006584f468f8dbb7867 100644 --- a/sickbeard/notifiers/__init__.py +++ b/sickbeard/notifiers/__init__.py @@ -107,3 +107,8 @@ def notify_snatch(ep_name): def notify_git_update(new_version=""): for n in notifiers: n.notify_git_update(new_version) + + +def notify_login(ipaddress): + for n in notifiers: + n.notify_login(ipaddress) diff --git a/sickbeard/notifiers/boxcar2.py b/sickbeard/notifiers/boxcar2.py index 5bb79cd7490b93b79eeed2c5f87c1094d73a5506..28fadc43eade167bfc260bf47748d60e4abb7272 100644 --- a/sickbeard/notifiers/boxcar2.py +++ b/sickbeard/notifiers/boxcar2.py @@ -104,6 +104,12 @@ class Boxcar2Notifier(object): title = notifyStrings[NOTIFY_GIT_UPDATE] self._notifyBoxcar2(title, update_text + new_version) + def notify_login(self, ipaddress=""): + if sickbeard.USE_BOXCAR2: + update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] + title = common.notifyStrings[common.NOTIFY_LOGIN] + self._notifyBoxcar2(title, update_text.format(ipaddress)) + def _notifyBoxcar2(self, title, message, accesstoken=None): """ Sends a boxcar2 notification based on the provided info or SB config diff --git a/sickbeard/notifiers/emailnotify.py b/sickbeard/notifiers/emailnotify.py index 34249c92de064129a725b9fcab8a7d0e73b216ab..ce8d4ead9d3e3935946f8e9dcbb20cf63b963be1 100644 --- a/sickbeard/notifiers/emailnotify.py +++ b/sickbeard/notifiers/emailnotify.py @@ -170,6 +170,9 @@ class EmailNotifier(object): def notify_git_update(self, new_version="??"): pass + def notify_login(self, ipaddress=""): + pass + def _generate_recipients(self, show): addrs = [] myDB = db.DBConnection() diff --git a/sickbeard/notifiers/freemobile.py b/sickbeard/notifiers/freemobile.py index dc040d819291291f3f54d8ba86ec3b74484ad209..bf9ac00b4bb0ae5a85aaad1c0adb145ec60a281d 100644 --- a/sickbeard/notifiers/freemobile.py +++ b/sickbeard/notifiers/freemobile.py @@ -102,6 +102,12 @@ class FreeMobileNotifier(object): title = notifyStrings[NOTIFY_GIT_UPDATE] self._notifyFreeMobile(title, update_text + new_version) + def notify_login(self, ipaddress=""): + if sickbeard.USE_FREEMOBILE: + update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] + title = common.notifyStrings[common.NOTIFY_LOGIN] + self._notifyFreeMobile(title, update_text.format(ipaddress)) + def _notifyFreeMobile(self, title, message, cust_id=None, apiKey=None, force=False): """ Sends a SMS notification diff --git a/sickbeard/notifiers/growl.py b/sickbeard/notifiers/growl.py index a1a234c4442c59e3622d34d3b686b253136be8af..aa86c8aa6ec2672083679d0a1471d0920427f08a 100644 --- a/sickbeard/notifiers/growl.py +++ b/sickbeard/notifiers/growl.py @@ -52,6 +52,12 @@ class GrowlNotifier(object): title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._sendGrowl(title, update_text + new_version) + def notify_login(self, ipaddress=""): + if sickbeard.USE_GROWL: + update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] + title = common.notifyStrings[common.NOTIFY_LOGIN] + self._sendGrowl(title, update_text.format(ipaddress)) + def _send_growl(self, options, message=None): # Send Notification diff --git a/sickbeard/notifiers/kodi.py b/sickbeard/notifiers/kodi.py index aeb35c7a872a497a25d6c0280a0ecd20fdc2a150..2e9f36b022b508a27d5c2d6e43b1e88865ec5a13 100644 --- a/sickbeard/notifiers/kodi.py +++ b/sickbeard/notifiers/kodi.py @@ -531,6 +531,12 @@ class KODINotifier(object): title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._notify_kodi(update_text + new_version, title) + def notify_login(self, ipaddress=""): + if sickbeard.USE_KODI: + update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] + title = common.notifyStrings[common.NOTIFY_LOGIN] + self._notify_kodi(update_text.format(ipaddress), title) + def test_notify(self, host, username, password): return self._notify_kodi("Testing KODI notifications from SickRage", "Test Notification", host, username, password, force=True) diff --git a/sickbeard/notifiers/libnotify.py b/sickbeard/notifiers/libnotify.py index ab4efff673a9e51ba30a22db6e920d880d56381a..40a6c1c69ba63425362a224bcc35fec518412fd2 100644 --- a/sickbeard/notifiers/libnotify.py +++ b/sickbeard/notifiers/libnotify.py @@ -102,6 +102,12 @@ class LibnotifyNotifier(object): title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._notify(title, update_text + new_version) + def notify_login(self, ipaddress=""): + if sickbeard.USE_LIBNOTIFY: + update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] + title = common.notifyStrings[common.NOTIFY_LOGIN] + self._notify(title, update_text.format(ipaddress)) + def test_notify(self): return self._notify('Test notification', "This is a test notification from SickRage", force=True) diff --git a/sickbeard/notifiers/nma.py b/sickbeard/notifiers/nma.py index d34e11089a139663901d93a400f925ac72fa9037..ad5ea24d4cfdcd898d544d47065864d318aecd68 100644 --- a/sickbeard/notifiers/nma.py +++ b/sickbeard/notifiers/nma.py @@ -32,6 +32,14 @@ class NMA_Notifier(object): title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._sendNMA(nma_api=None, nma_priority=None, event=title, message=update_text + new_version) + + def notify_login(self, ipaddress=""): + if sickbeard.USE_NMA: + update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] + title = common.notifyStrings[common.NOTIFY_LOGIN] + self._sendNMA(nma_api=None, nma_priority=None, event=title, message=update_text.format(ipaddress)) + + def _sendNMA(self, nma_api=None, nma_priority=None, event=None, message=None, force=False): title = 'SickRage' diff --git a/sickbeard/notifiers/nmj.py b/sickbeard/notifiers/nmj.py index 8b8be514480d0b8fb88c74c0f13c0eedc46e414d..47e8309f1b927d96704f6edc19861ad5802f958e 100644 --- a/sickbeard/notifiers/nmj.py +++ b/sickbeard/notifiers/nmj.py @@ -101,6 +101,9 @@ class NMJNotifier(object): return False # Not implemented, no reason to start scanner. + def notify_login(self, ipaddress=""): + return False + def test_notify(self, host, database, mount): return self._sendNMJ(host, database, mount) diff --git a/sickbeard/notifiers/nmjv2.py b/sickbeard/notifiers/nmjv2.py index a99b8c92fc9fb7d8700eac05d0037d64d95b4871..9a4d57fddefb1db3d5d630cb4acb4f8faee31954 100644 --- a/sickbeard/notifiers/nmjv2.py +++ b/sickbeard/notifiers/nmjv2.py @@ -47,6 +47,9 @@ class NMJv2Notifier(object): return False # Not implemented, no reason to start scanner. + def notify_login(self, ipaddress=""): + return False + def test_notify(self, host): return self._sendNMJ(host) diff --git a/sickbeard/notifiers/plex.py b/sickbeard/notifiers/plex.py index 941b8223c309f1905310e365b14003e7bec31d51..e0e0987b182e3d554b5dd2faeef01e9e085e9b1d 100644 --- a/sickbeard/notifiers/plex.py +++ b/sickbeard/notifiers/plex.py @@ -156,6 +156,13 @@ class PLEXNotifier(object): if update_text and title and new_version: self._notify_pmc(update_text + new_version, title) + def notify_login(self, ipaddress=""): + if sickbeard.USE_PLEX: + update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] + title = common.notifyStrings[common.NOTIFY_LOGIN] + if update_text and title and new_version: + self._notify_pmc(update_text.format(ipaddress), title) + def test_notify_pmc(self, host, username, password): return self._notify_pmc('This is a test notification from SickRage', 'Test Notification', host, username, password, force=True) diff --git a/sickbeard/notifiers/prowl.py b/sickbeard/notifiers/prowl.py index 8f1bde7513000c7c962459a29398ba97c36bfb4c..fd47d5a6387c875c17c8ed619d8ec0953d8dcdc4 100644 --- a/sickbeard/notifiers/prowl.py +++ b/sickbeard/notifiers/prowl.py @@ -86,6 +86,12 @@ class ProwlNotifier(object): self._send_prowl(prowl_api=None, prowl_priority=None, event=title, message=update_text + new_version) + def notify_login(self, ipaddress=""): + if sickbeard.USE_PROWL: + update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] + title = common.notifyStrings[common.NOTIFY_LOGIN] + self._send_prowl(prowl_api=None, prowl_priority=None, + event=title, message=update_text.format(ipaddress)) @staticmethod def _generate_recipients(show=None): diff --git a/sickbeard/notifiers/pushalot.py b/sickbeard/notifiers/pushalot.py index 77c0ea71488c139c4542624a242e298341002cea..6a3c82b02a0c457172d6ea3cbee246f93e18c0f6 100644 --- a/sickbeard/notifiers/pushalot.py +++ b/sickbeard/notifiers/pushalot.py @@ -57,6 +57,14 @@ class PushalotNotifier(object): event=title, message=update_text + new_version) + def notify_login(self, ipaddress=""): + if sickbeard.USE_PUSHALOT: + update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] + title = common.notifyStrings[common.NOTIFY_LOGIN] + self._sendPushalot(pushalot_authorizationtoken=None, + event=title, + message=update_text.format(ipaddress)) + def _sendPushalot(self, pushalot_authorizationtoken=None, event=None, message=None, force=False): if not sickbeard.USE_PUSHALOT and not force: diff --git a/sickbeard/notifiers/pushbullet.py b/sickbeard/notifiers/pushbullet.py index 0e633b81eacbcce37ae3c80cb32b5b5c2ed380dc..2a218ba8661791b4f8c1dfe76472fac613b0fdb3 100644 --- a/sickbeard/notifiers/pushbullet.py +++ b/sickbeard/notifiers/pushbullet.py @@ -63,6 +63,10 @@ class PushbulletNotifier(object): if sickbeard.USE_PUSHBULLET: self._sendPushbullet(pushbullet_api=None, event=notifyStrings[NOTIFY_GIT_UPDATE], message=notifyStrings[NOTIFY_GIT_UPDATE_TEXT] + new_version) + def notify_login(self, ipaddress=""): + if sickbeard.USE_PUSHBULLET: + self._sendPushbullet(pushbullet_api=None, event=notifyStrings[NOTIFY_LOGIN], message=notifyStrings[NOTIFY_LOGIN_TEXT].format(ipaddress)) + def _sendPushbullet(self, pushbullet_api=None, pushbullet_device=None, event=None, message=None): if not (sickbeard.USE_PUSHBULLET or event == 'Test' or event is None): diff --git a/sickbeard/notifiers/pushover.py b/sickbeard/notifiers/pushover.py index 3d6ca648ccca34c78f88c5729502d2e9cc3c7eb1..a0914e7dd4b392738c51899a0f3d6b1c0952d70a 100644 --- a/sickbeard/notifiers/pushover.py +++ b/sickbeard/notifiers/pushover.py @@ -151,6 +151,12 @@ class PushoverNotifier(object): title = notifyStrings[NOTIFY_GIT_UPDATE] self._notifyPushover(title, update_text + new_version) + def notify_login(self, ipaddress=""): + if sickbeard.USE_PUSHOVER: + update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] + title = common.notifyStrings[common.NOTIFY_LOGIN] + self._notifyPushover(title, update_text.format(ipaddress)) + def _notifyPushover(self, title, message, sound=None, userKey=None, apiKey=None, force=False): """ Sends a pushover notification based on the provided info or SR config diff --git a/sickbeard/notifiers/pytivo.py b/sickbeard/notifiers/pytivo.py index dfdc8ab8991d663b9ae8ce2398ef156aefc65d96..c6fd68d6151ee4757a02edfc4cef7b58e9483d92 100644 --- a/sickbeard/notifiers/pytivo.py +++ b/sickbeard/notifiers/pytivo.py @@ -42,6 +42,9 @@ class pyTivoNotifier(object): def notify_git_update(self, new_version): pass + def notify_login(self, ipaddress=""): + pass + def update_library(self, ep_obj): # Values from config diff --git a/sickbeard/notifiers/synoindex.py b/sickbeard/notifiers/synoindex.py index b1275ce95b942699ec03eef9e121b5e6ef10261d..30c844feb5af6443a79466015a5bc64c986412ee 100644 --- a/sickbeard/notifiers/synoindex.py +++ b/sickbeard/notifiers/synoindex.py @@ -41,6 +41,9 @@ class synoIndexNotifier(object): def notify_git_update(self, new_version): pass + def notify_login(self, ipaddress=""): + pass + def moveFolder(self, old_path, new_path): self.moveObject(old_path, new_path) diff --git a/sickbeard/notifiers/synologynotifier.py b/sickbeard/notifiers/synologynotifier.py index 2f3f62d26f8d7871f782e421f844e628ec1110f6..cadac287d19021cbd7402695b9fc88fac1d9a191 100644 --- a/sickbeard/notifiers/synologynotifier.py +++ b/sickbeard/notifiers/synologynotifier.py @@ -47,6 +47,12 @@ class synologyNotifier(object): title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._send_synologyNotifier(update_text + new_version, title) + def notify_login(self, ipaddress=""): + if sickbeard.USE_SYNOLOGYNOTIFIER: + update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] + title = common.notifyStrings[common.NOTIFY_LOGIN] + self._send_synologyNotifier(update_text.format(ipaddress), title) + def _send_synologyNotifier(self, message, title): synodsmnotify_cmd = ["/usr/syno/bin/synodsmnotify", "@administrators", title, message] logger.log(u"Executing command " + str(synodsmnotify_cmd)) diff --git a/sickbeard/notifiers/trakt.py b/sickbeard/notifiers/trakt.py index 6733c5d1b38eb0e360a55fb88f9b4a89752e9b25..33c4a2873bdebb2bc3a5328ab56ae5dc8e04ed31 100644 --- a/sickbeard/notifiers/trakt.py +++ b/sickbeard/notifiers/trakt.py @@ -43,6 +43,9 @@ class TraktNotifier(object): def notify_git_update(self, new_version): pass + def notify_login(self, ipaddress=""): + pass + def update_library(self, ep_obj): """ Sends a request to trakt indicating that the given episode is part of our library. diff --git a/sickbeard/notifiers/tweet.py b/sickbeard/notifiers/tweet.py index 99f6554c098cbbe06431197eb0d2870b09bef144..0f114758019cda43527390b51191f5e54a28bfe3 100644 --- a/sickbeard/notifiers/tweet.py +++ b/sickbeard/notifiers/tweet.py @@ -60,6 +60,12 @@ class TwitterNotifier(object): title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._notifyTwitter(title + " - " + update_text + new_version) + def notify_login(self, ipaddress=""): + if sickbeard.USE_TWITTER: + update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] + title = common.notifyStrings[common.NOTIFY_LOGIN] + self._notifyTwitter(title + " - " + update_text.format(ipaddress)) + def test_notify(self): return self._notifyTwitter("This is a test notification from SickRage", force=True) diff --git a/sickbeard/nzbSplitter.py b/sickbeard/nzbSplitter.py index 4374a76d74d09176792ed0d6cb5d57222ad7a71c..efacaeed9edfabdc4f722ed6ae17958f158fdf67 100644 --- a/sickbeard/nzbSplitter.py +++ b/sickbeard/nzbSplitter.py @@ -151,7 +151,7 @@ def split_result(obj): :param obj: to search for results :return: a list of episode objects or an empty list """ - url_data = helpers.getURL(obj.url, session=requests.Session(), needBytes=True) + url_data = helpers.getURL(obj.url, session=requests.Session(), need_bytes=True) if url_data is None: logger.log(u"Unable to load url " + obj.url + ", can't download season NZB", logger.ERROR) # pylint: disable=no-member return [] diff --git a/sickbeard/nzbget.py b/sickbeard/nzbget.py index d5ea16b16663353cbf76284df0966f9d94e473f1..2f14ad625dc350ab887b9152563af9dc012e2f93 100644 --- a/sickbeard/nzbget.py +++ b/sickbeard/nzbget.py @@ -23,10 +23,10 @@ from base64 import standard_b64encode import xmlrpclib import sickbeard -from sickbeard.providers.generic import GenericProvider from sickbeard import logger from sickbeard.common import Quality from sickrage.helper.common import try_int +from sickrage.providers.GenericProvider import GenericProvider def sendNZB(nzb, proper=False): @@ -115,7 +115,7 @@ def sendNZB(nzb, proper=False): else: if nzb.resultType == "nzb": genProvider = GenericProvider("") - data = genProvider.getURL(nzb.url) + data = genProvider.get_url(nzb.url) if data is None: return False nzbcontent64 = standard_b64encode(data) diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py index b440afbf9c11d603a59cf03a34699f4f080b8b57..470fdbc697c7f2ca623c1563e82465e5e723d2a8 100644 --- a/sickbeard/postProcessor.py +++ b/sickbeard/postProcessor.py @@ -186,13 +186,18 @@ class PostProcessor(object): if not base_name: return [] - if subfolders: # subfolders are only checked in show folder, so names will always be exactly alike - filelist = recursive_glob(ek(os.path.dirname, globbable_file_path), base_name + '*') # just create the list of all files starting with the basename - else: # this is called when PP, so we need to do the filename check case-insensitive + # subfolders are only checked in show folder, so names will always be exactly alike + if subfolders: + # just create the list of all files starting with the basename + filelist = recursive_glob(ek(os.path.dirname, globbable_file_path), base_name + '*') + # this is called when PP, so we need to do the filename check case-insensitive + else: filelist = [] - checklist = glob.glob(ek(os.path.join, ek(os.path.dirname, globbable_file_path), '*')) # get a list of all the files in the folder - for filefound in checklist: # loop through all the files in the folder, and check if they are the same name even when the cases don't match + # get a list of all the files in the folder + checklist = glob.glob(ek(os.path.join, ek(os.path.dirname, globbable_file_path), '*')) + # loop through all the files in the folder, and check if they are the same name even when the cases don't match + for filefound in checklist: file_name = filefound.rpartition('.')[0] file_extension = filefound.rpartition('.')[2] @@ -214,26 +219,27 @@ class PostProcessor(object): filelist.append(filefound) elif file_name.lower().endswith('pt-br') and len(filefound.rsplit('.', 2)[1]) == 5: filelist.append(filefound) - elif new_file_name.lower() == base_name.lower().replace('[[]', '[').replace('[]]', ']'): # if there's no difference in the filename add it to the filelist + # if there's no difference in the filename add it to the filelist + elif new_file_name.lower() == base_name.lower().replace('[[]', '[').replace('[]]', ']'): filelist.append(filefound) for associated_file_path in filelist: - # only add associated to list + # Exclude the video file we are post-processing if associated_file_path == file_path: continue - # only list it if the only non-shared part is the extension or if it is a subtitle - if subtitles_only and not associated_file_path[len(associated_file_path) - 3:] in subtitle_extensions: - continue + # Exlude non-subtitle files with the 'only subtitles' option (not implemented yet) + # if subtitles_only and not associated_file_path[-3:] in subtitle_extensions: + # continue # Exclude .rar files from associated list if re.search(r'(^.+\.(rar|r\d+)$)', associated_file_path): continue # Add the extensions that the user doesn't allow to the 'extensions_to_delete' list - if sickbeard.MOVE_ASSOCIATED_FILES and sickbeard.ALLOWED_EXTENSIONS: + if sickbeard.MOVE_ASSOCIATED_FILES: allowed_extensions = sickbeard.ALLOWED_EXTENSIONS.split(",") - if not associated_file_path[-3:] in allowed_extensions and not associated_file_path[-3:] in subtitle_extensions: + if not associated_file_path[-3:] in allowed_extensions: if ek(os.path.isfile, associated_file_path): extensions_to_delete.append(associated_file_path) @@ -414,7 +420,6 @@ class PostProcessor(object): self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_copy, subtitles=subtitles) - def _hardlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False): """ Hardlink file and set proper permissions @@ -789,7 +794,7 @@ class PostProcessor(object): return ep_quality # Try getting quality from the episode (snatched) status - if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST: + if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST: _, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) # @UnusedVariable if ep_quality != common.Quality.UNKNOWN: self._log( @@ -988,7 +993,6 @@ class PostProcessor(object): else: self._log("Unable to determine needed filespace as the source file is locked for access") - # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: diff --git a/sickbeard/properFinder.py b/sickbeard/properFinder.py index 3d0dd8560d84107f1a98ddd4fd3efd0e863e5406..183608d342b36ca87156f1c796686ead2cf961ea 100644 --- a/sickbeard/properFinder.py +++ b/sickbeard/properFinder.py @@ -79,14 +79,14 @@ class ProperFinder: # for each provider get a list of the origThreadName = threading.currentThread().name - providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.isActive()] + providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.is_active()] for curProvider in providers: threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]" logger.log(u"Searching for any new PROPER releases from " + curProvider.name) try: - curPropers = curProvider.findPropers(search_date) + curPropers = curProvider.find_propers(search_date) except AuthException, e: logger.log(u"Authentication error: " + ex(e), logger.DEBUG) continue @@ -98,7 +98,7 @@ class ProperFinder: # if they haven't been added by a different provider than add the proper to the list for x in curPropers: if not re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', x.name, re.I): - logger.log(u'findPropers returned a non-proper, we have caught and skipped it.', logger.DEBUG) + logger.log(u'find_propers returned a non-proper, we have caught and skipped it.', logger.DEBUG) continue name = self._genericName(x.name) @@ -249,7 +249,7 @@ class ProperFinder: epObj = curProper.show.getEpisode(curProper.season, curProper.episode) # make the result object - result = curProper.provider.getResult([epObj]) + result = curProper.provider.get_result([epObj]) result.show = curProper.show result.url = curProper.url result.name = curProper.name diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index 1d838ce81f302b50738ec923c46b9bcb68bfed77..1b447b6d896116b49e3c7102071382662b077e71 100644 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -24,25 +24,25 @@ from random import shuffle import sickbeard from sickbeard import logger from sickbeard.providers import btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, torrentz, \ - omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, nextgen, speedcd, nyaatorrents, animenzb, bluetigers, cpasbien, fnt, xthor, torrentbytes, \ - freshontv, titansoftv, libertalia, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \ - scenetime, btdigg, strike, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct, elitetorrent, bitsnoop + omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, animenzb, bluetigers, cpasbien, fnt, xthor, torrentbytes, \ + freshontv, titansoftv, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \ + scenetime, btdigg, strike, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct, elitetorrent, bitsnoop, danishbits __all__ = [ 'womble', 'btn', 'thepiratebay', 'kat', 'torrentleech', 'scc', 'hdtorrents', - 'torrentday', 'hdbits', 'hounddawgs', 'iptorrents', 'omgwtfnzbs', 'nextgen', + 'torrentday', 'hdbits', 'hounddawgs', 'iptorrents', 'omgwtfnzbs', 'speedcd', 'nyaatorrents', 'animenzb', 'torrentbytes', 'freshontv', 'titansoftv', - 'libertalia', 'morethantv', 'bitsoup', 't411', 'tokyotoshokan', 'alpharatio', + 'morethantv', 'bitsoup', 't411', 'tokyotoshokan', 'alpharatio', 'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'bluetigers', 'cpasbien', 'fnt', 'xthor', 'scenetime', 'btdigg', 'strike', 'transmitthenet', 'tvchaosuk', 'torrentproject', 'extratorrent', 'bitcannon', 'torrentz', 'pretome', 'gftracker', - 'hdspace', 'newpct', 'elitetorrent', 'bitsnoop' + 'hdspace', 'newpct', 'elitetorrent', 'bitsnoop', 'danishbits' ] def sortedProviderList(randomize=False): initialList = sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList - providerDict = dict(zip([x.getID() for x in initialList], initialList)) + providerDict = dict(zip([x.get_id() for x in initialList], initialList)) newList = [] @@ -53,7 +53,7 @@ def sortedProviderList(randomize=False): # add all enabled providers first for curModule in providerDict: - if providerDict[curModule] not in newList and providerDict[curModule].isEnabled(): + if providerDict[curModule] not in newList and providerDict[curModule].is_enabled(): newList.append(providerDict[curModule]) # add any modules that are missing from that list @@ -211,7 +211,7 @@ def getProviderModule(name): def getProviderClass(provider_id): providerMatch = [x for x in sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList if - x.getID() == provider_id] + x.get_id() == provider_id] if len(providerMatch) != 1: return None diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index c8549a7a3b813576d5c2533f201146037d098e82..d500d771cf24ecbda5ab576d80be7ad9c7a6bfad 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -23,16 +23,15 @@ import traceback from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser +from sickrage.providers.TorrentProvider import TorrentProvider -class AlphaRatioProvider(generic.TorrentProvider): +class AlphaRatioProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "AlphaRatio") - + TorrentProvider.__init__(self, "AlphaRatio") self.username = None self.password = None @@ -54,13 +53,13 @@ class AlphaRatioProvider(generic.TorrentProvider): self.cache = AlphaRatioCache(self) - def _doLogin(self): + def login(self): login_params = {'username': self.username, 'password': self.password, 'remember_me': 'on', 'login': 'submit'} - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -72,12 +71,12 @@ class AlphaRatioProvider(generic.TorrentProvider): return True - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_strings.keys(): @@ -90,12 +89,12 @@ class AlphaRatioProvider(generic.TorrentProvider): searchURL = self.urls['search'] % (search_string, self.categories) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL) + data = self.get_url(searchURL) if not data: continue try: - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', attrs={'id': 'torrent_table'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] @@ -144,7 +143,7 @@ class AlphaRatioProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -159,6 +158,6 @@ class AlphaRatioCache(tvcache.TVCache): def _getRSSData(self): search_strings = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_strings)} + return {'entries': self.provider.search(search_strings)} provider = AlphaRatioProvider() diff --git a/sickbeard/providers/animenzb.py b/sickbeard/providers/animenzb.py index db95585cd5f91e6cfa03b366af461179c1d6a5d5..57d23009692293c9dd8d8eb504dad521f648180c 100644 --- a/sickbeard/providers/animenzb.py +++ b/sickbeard/providers/animenzb.py @@ -28,18 +28,18 @@ from sickbeard import show_name_helpers from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic +from sickrage.providers.NZBProvider import NZBProvider -class animenzb(generic.NZBProvider): +class animenzb(NZBProvider): def __init__(self): - generic.NZBProvider.__init__(self, "AnimeNZB") + NZBProvider.__init__(self, "AnimeNZB") - self.supportsBacklog = False + self.supports_backlog = False self.public = True - self.supportsAbsoluteNumbering = True + self.supports_absolute_numbering = True self.anime_only = True self.urls = {'base_url': 'http://animenzb.com//'} @@ -53,7 +53,7 @@ class animenzb(generic.NZBProvider): def _get_episode_search_strings(self, ep_obj, add_string=''): return [x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)] - def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_string, age=0, ep_obj=None): logger.log(u"Search string: %s " % search_string, logger.DEBUG) @@ -81,11 +81,11 @@ class animenzb(generic.NZBProvider): return results - def findPropers(self, date=None): + def find_propers(self, search_date=None): results = [] - for item in self._doSearch("v2|v3|v4|v5"): + for item in self.search("v2|v3|v4|v5"): (title, url) = self._get_title_and_url(item) @@ -96,7 +96,7 @@ class animenzb(generic.NZBProvider): else: continue - if not date or result_date > date: + if not search_date or result_date > search_date: search_result = classes.Proper(title, url, result_date, self.show) results.append(search_result) diff --git a/sickbeard/providers/binsearch.py b/sickbeard/providers/binsearch.py index 9c911276d5d5d2a4fe1161ab6f34d6b78ec14e78..0dc5d67da92277a84b9c2b53a586ab8e59fbfbc6 100644 --- a/sickbeard/providers/binsearch.py +++ b/sickbeard/providers/binsearch.py @@ -14,24 +14,25 @@ # # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see <http://www.gnu.org/licenses/>. + import urllib import re - -from sickbeard.providers import generic - from sickbeard import logger from sickbeard import tvcache +from sickrage.providers.NZBProvider import NZBProvider -class BinSearchProvider(generic.NZBProvider): + +class BinSearchProvider(NZBProvider): def __init__(self): - generic.NZBProvider.__init__(self, "BinSearch") + NZBProvider.__init__(self, "BinSearch") self.public = True self.cache = BinSearchCache(self) self.urls = {'base_url': 'https://www.binsearch.info/'} self.url = self.urls['base_url'] - self.supportsBacklog = False + self.supports_backlog = False + class BinSearchCache(tvcache.TVCache): def __init__(self, provider_obj): diff --git a/sickbeard/providers/bitcannon.py b/sickbeard/providers/bitcannon.py index 876f258a6bbd1d7032dfaf44dd3379843463017a..69b494e9ba0dcfc39071b67dc220b889e7e357d3 100644 --- a/sickbeard/providers/bitcannon.py +++ b/sickbeard/providers/bitcannon.py @@ -22,12 +22,13 @@ from urllib import urlencode from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic +from sickrage.providers.TorrentProvider import TorrentProvider -class BitCannonProvider(generic.TorrentProvider): + +class BitCannonProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "BitCannon") + TorrentProvider.__init__(self, "BitCannon") self.public = True @@ -45,7 +46,7 @@ class BitCannonProvider(generic.TorrentProvider): 'apiKey': '' } - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): # search_strings comes in one of these formats: # {'Episode': ['Italian Works S05E10']} # {'Season': ['Italian Works S05']} @@ -54,7 +55,7 @@ class BitCannonProvider(generic.TorrentProvider): items = {'Season': [], 'Episode': [], 'RSS': []} # select the correct category (TODO: Add more categories?) - anime = (self.show and self.show.anime) or (epObj and epObj.show and epObj.show.anime) or False + anime = (self.show and self.show.anime) or (ep_obj and ep_obj.show and ep_obj.show.anime) or False self.search_params['category'] = ('tv', 'anime')[anime] # Set API Key (if applicable) @@ -77,7 +78,7 @@ class BitCannonProvider(generic.TorrentProvider): url = self.custom_url search_url = url + "api/search?" + urlencode(self.search_params) logger.log(u"Search URL: %s" % search_url, logger.DEBUG) - parsed_json = self.getURL(search_url, json=True) + parsed_json = self.get_url(search_url, json=True) if not parsed_json: logger.log(u"No data returned from provider", logger.DEBUG) @@ -123,7 +124,7 @@ class BitCannonProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @staticmethod @@ -145,6 +146,6 @@ class BitCannonCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['tv', 'anime']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = BitCannonProvider() diff --git a/sickbeard/providers/bitsnoop.py b/sickbeard/providers/bitsnoop.py index 3428ec4115c04192a44f640bf190142340f2103d..7ff47b0171379439c0849d467f6162d3a36abffb 100644 --- a/sickbeard/providers/bitsnoop.py +++ b/sickbeard/providers/bitsnoop.py @@ -22,13 +22,13 @@ from bs4 import BeautifulSoup import sickbeard from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickrage.helper.common import try_int +from sickrage.providers.TorrentProvider import TorrentProvider -class BitSnoopProvider(generic.TorrentProvider): # pylint: disable=too-many-instance-attributes,too-many-arguments +class BitSnoopProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes def __init__(self): - generic.TorrentProvider.__init__(self, "BitSnoop") + TorrentProvider.__init__(self, "BitSnoop") self.urls = { 'index': 'http://bitsnoop.com', @@ -47,7 +47,7 @@ class BitSnoopProvider(generic.TorrentProvider): # pylint: disable=too-many-inst self.cache = BitSnoopCache(self) - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): # pylint: disable=too-many-branches,too-many-arguments,too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches,too-many-locals results = [] items = {'Season': [], 'Episode': [], 'RSS': []} @@ -61,7 +61,7 @@ class BitSnoopProvider(generic.TorrentProvider): # pylint: disable=too-many-inst try: url = (self.urls['rss'], self.urls['search'] + search_string + '/s/d/1/?fmt=rss')[mode != 'RSS'] - data = self.getURL(url) + data = self.get_url(url) if not data: logger.log(u"No data returned from provider", logger.DEBUG) continue @@ -70,7 +70,7 @@ class BitSnoopProvider(generic.TorrentProvider): # pylint: disable=too-many-inst logger.log(u'Expected xml but got something else, is your mirror failing?', logger.INFO) continue - data = BeautifulSoup(data, features=["html5lib", "permissive"]) + data = BeautifulSoup(data, 'html5lib') entries = entries = data.findAll('item') @@ -123,8 +123,7 @@ class BitSnoopProvider(generic.TorrentProvider): # pylint: disable=too-many-inst return results - - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -137,7 +136,7 @@ class BitSnoopCache(tvcache.TVCache): def _getRSSData(self): search_strings = {'RSS': ['rss']} - return {'entries': self.provider._doSearch(search_strings)} + return {'entries': self.provider.search(search_strings)} provider = BitSnoopProvider() diff --git a/sickbeard/providers/bitsoup.py b/sickbeard/providers/bitsoup.py index a3e10b6e8e94211b171de77b944da663949470b3..f8585c739dbd3d1904525f60756e23fe771bc0ab 100644 --- a/sickbeard/providers/bitsoup.py +++ b/sickbeard/providers/bitsoup.py @@ -21,12 +21,13 @@ import traceback from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser +from sickrage.providers.TorrentProvider import TorrentProvider -class BitSoupProvider(generic.TorrentProvider): + +class BitSoupProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "BitSoup") + TorrentProvider.__init__(self, "BitSoup") self.urls = { 'base_url': 'https://www.bitsoup.me', @@ -38,7 +39,6 @@ class BitSoupProvider(generic.TorrentProvider): self.url = self.urls['base_url'] - self.username = None self.password = None self.ratio = None @@ -51,13 +51,13 @@ class BitSoupProvider(generic.TorrentProvider): "c42": 1, "c45": 1, "c49": 1, "c7": 1 } - def _checkAuth(self): + def _check_auth(self): if not self.username or not self.password: logger.log(u"Invalid username or password. Check your settings", logger.WARNING) return True - def _doLogin(self): + def login(self): login_params = { 'username': self.username, @@ -65,7 +65,7 @@ class BitSoupProvider(generic.TorrentProvider): 'ssl': 'yes' } - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -76,12 +76,12 @@ class BitSoupProvider(generic.TorrentProvider): return True - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_strings.keys(): @@ -93,7 +93,7 @@ class BitSoupProvider(generic.TorrentProvider): self.search_params['search'] = search_string - data = self.getURL(self.urls['search'], params=self.search_params) + data = self.get_url(self.urls['search'], params=self.search_params) if not data: continue @@ -147,7 +147,7 @@ class BitSoupProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -161,7 +161,7 @@ class BitSoupCache(tvcache.TVCache): def _getRSSData(self): search_strings = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_strings)} + return {'entries': self.provider.search(search_strings)} provider = BitSoupProvider() diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py index c6d2902bd59ffe8e868214be9a6de7ce9f12acef..c3af540e1150240bac584fef9cecae73de518369 100644 --- a/sickbeard/providers/bluetigers.py +++ b/sickbeard/providers/bluetigers.py @@ -22,17 +22,16 @@ import requests import re from requests.auth import AuthBase -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser from sickbeard import logger from sickbeard import tvcache +from sickrage.providers.TorrentProvider import TorrentProvider -class BLUETIGERSProvider(generic.TorrentProvider): +class BLUETIGERSProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "BLUETIGERS") - + TorrentProvider.__init__(self, "BLUETIGERS") self.username = None self.password = None @@ -54,7 +53,7 @@ class BLUETIGERSProvider(generic.TorrentProvider): self.url = self.urls['base_url'] - def _doLogin(self): + def login(self): if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()): return True @@ -64,10 +63,10 @@ class BLUETIGERSProvider(generic.TorrentProvider): 'take_login' : '1' } - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: - check_login = self.getURL(self.urls['base_url'], timeout=30) + check_login = self.get_url(self.urls['base_url'], timeout=30) if re.search('account-logout.php', check_login): return True else: @@ -80,12 +79,12 @@ class BLUETIGERSProvider(generic.TorrentProvider): return True - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_strings.keys(): @@ -97,12 +96,12 @@ class BLUETIGERSProvider(generic.TorrentProvider): self.search_params['search'] = search_string - data = self.getURL(self.urls['search'], params=self.search_params) + data = self.get_url(self.urls['search'], params=self.search_params) if not data: continue try: - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: result_linkz = html.findAll('a', href=re.compile("torrents-details")) if not result_linkz: @@ -144,7 +143,7 @@ class BLUETIGERSProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -167,7 +166,7 @@ class BLUETIGERSCache(tvcache.TVCache): def _getRSSData(self): search_strings = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_strings)} + return {'entries': self.provider.search(search_strings)} provider = BLUETIGERSProvider() diff --git a/sickbeard/providers/btdigg.py b/sickbeard/providers/btdigg.py index c97d225e9986c154083d66cd08a1ba9cef0ec599..ceb8b6010677161051207455442111b1cff745e5 100644 --- a/sickbeard/providers/btdigg.py +++ b/sickbeard/providers/btdigg.py @@ -19,17 +19,16 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. - -from sickbeard.providers import generic from urllib import urlencode from sickbeard import logger from sickbeard import tvcache +from sickrage.providers.TorrentProvider import TorrentProvider -class BTDIGGProvider(generic.TorrentProvider): +class BTDIGGProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "BTDigg") + TorrentProvider.__init__(self, "BTDigg") self.public = True self.ratio = 0 @@ -46,7 +45,7 @@ class BTDIGGProvider(generic.TorrentProvider): self.cache = BTDiggCache(self) - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} @@ -65,7 +64,7 @@ class BTDIGGProvider(generic.TorrentProvider): searchURL = self.urls['api'] + '?' + urlencode(search_params) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - jdata = self.getURL(searchURL, json=True) + jdata = self.get_url(searchURL, json=True) if not jdata: logger.log(u"No data returned to be parsed!!!") return [] @@ -101,9 +100,10 @@ class BTDIGGProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio + class BTDiggCache(tvcache.TVCache): def __init__(self, provider_obj): @@ -116,6 +116,6 @@ class BTDiggCache(tvcache.TVCache): # Use this hacky way for RSS search since most results will use this codecs search_params = {'RSS': ['x264', 'x264.HDTV', '720.HDTV.x264']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = BTDIGGProvider() diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index 47f825d497ad9b9a3df299fa4c7c2c12b8c6c95d..95d06605699b8372dc6b25f746a1b00bbb7c3bcd 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -28,18 +28,17 @@ from sickbeard import logger from sickbeard import classes from sickbeard import tvcache from sickbeard import scene_exceptions -from sickbeard.providers import generic from sickbeard.helpers import sanitizeSceneName from sickbeard.common import cpu_presets from sickrage.helper.exceptions import AuthException, ex +from sickrage.providers.TorrentProvider import TorrentProvider -class BTNProvider(generic.TorrentProvider): +class BTNProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "BTN") + TorrentProvider.__init__(self, "BTN") - - self.supportsAbsoluteNumbering = True + self.supports_absolute_numbering = True self.api_key = None self.ratio = None @@ -51,7 +50,7 @@ class BTNProvider(generic.TorrentProvider): self.url = self.urls['website'] - def _checkAuth(self): + def _check_auth(self): if not self.api_key: logger.log(u"Invalid api key. Check your settings", logger.WARNING) @@ -60,7 +59,7 @@ class BTNProvider(generic.TorrentProvider): def _checkAuthFromData(self, parsedJSON): if parsedJSON is None: - return self._checkAuth() + return self._check_auth() if 'api-error' in parsedJSON: logger.log(u"Incorrect authentication credentials: % s" % parsedJSON['api-error'], logger.DEBUG) @@ -69,9 +68,9 @@ class BTNProvider(generic.TorrentProvider): return True - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): - self._checkAuth() + self._check_auth() results = [] params = {} @@ -258,15 +257,15 @@ class BTNProvider(generic.TorrentProvider): def _doGeneralSearch(self, search_string): # 'search' looks as broad is it can find. Can contain episode overview and title for example, # use with caution! - return self._doSearch({'search': search_string}) + return self.search({'search': search_string}) - def findPropers(self, search_date=None): + def find_propers(self, search_date=None): results = [] search_terms = ['%.proper.%', '%.repack.%'] for term in search_terms: - for item in self._doSearch({'release': term}, age=4 * 24 * 60 * 60): + for item in self.search({'release': term}, age=4 * 24 * 60 * 60): if item['Time']: try: result_date = datetime.fromtimestamp(float(item['Time'])) @@ -280,7 +279,7 @@ class BTNProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -307,7 +306,7 @@ class BTNCache(tvcache.TVCache): logger.DEBUG) seconds_since_last_update = 86400 - return {'entries': self.provider._doSearch(search_params=None, age=seconds_since_last_update)} + return {'entries': self.provider.search(search_params=None, age=seconds_since_last_update)} provider = BTNProvider() diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py index a2456108c1b1484e899192c8b5e49da59ec7769d..c826b9411be36254824d069af424e835218b3cc2 100644 --- a/sickbeard/providers/cpasbien.py +++ b/sickbeard/providers/cpasbien.py @@ -21,15 +21,15 @@ import traceback from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser +from sickrage.providers.TorrentProvider import TorrentProvider -class CpasbienProvider(generic.TorrentProvider): +class CpasbienProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "Cpasbien") + TorrentProvider.__init__(self, "Cpasbien") self.public = True self.ratio = None @@ -39,7 +39,7 @@ class CpasbienProvider(generic.TorrentProvider): self.cache = CpasbienCache(self) - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} @@ -53,13 +53,13 @@ class CpasbienProvider(generic.TorrentProvider): searchURL = self.url + '/recherche/'+search_string.replace('.', '-') + '.html' logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL) + data = self.get_url(searchURL) if not data: continue try: - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: lin = erlin = 0 resultdiv = [] while erlin == 0: @@ -115,9 +115,10 @@ class CpasbienProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio + class CpasbienCache(tvcache.TVCache): def __init__(self, provider_obj): diff --git a/sickbeard/providers/nextgen.py b/sickbeard/providers/danishbits.py similarity index 60% rename from sickbeard/providers/nextgen.py rename to sickbeard/providers/danishbits.py index 172af632dbd7f6b7c752a48ac8869f008304f65b..51d466014a22fe575a7673f6224382e282e7e9ca 100644 --- a/sickbeard/providers/nextgen.py +++ b/sickbeard/providers/danishbits.py @@ -19,34 +19,34 @@ import traceback import urllib import time +import re from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic +from sickrage.providers.TorrentProvider import TorrentProvider from sickbeard.bs4_parser import BS4Parser -class NextGenProvider(generic.TorrentProvider): - def __init__(self): - - generic.TorrentProvider.__init__(self, "NextGen") +class DanishbitsProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes + def __init__(self): + TorrentProvider.__init__(self, "Danishbits") self.username = None self.password = None self.ratio = None - self.cache = NextGenCache(self) + self.cache = DanishbitsCache(self) - self.urls = {'base_url': 'https://nxtgn.biz/', - 'search': 'https://nxtgn.biz/browse.php?search=%s&cat=0&incldead=0&modes=%s', - 'login_page': 'https://nxtgn.biz/login.php'} + self.urls = {'base_url': 'https://danishbits.org/', + 'search': 'https://danishbits.org/torrents.php?action=newbrowse&search=%s%s', + 'login_page': 'https://danishbits.org/login.php'} self.url = self.urls['base_url'] - self.categories = '&c7=1&c24=1&c17=1&c22=1&c42=1&c46=1&c26=1&c28=1&c43=1&c4=1&c31=1&c45=1&c33=1' + self.categories = '&group=3' self.last_login_check = None @@ -56,24 +56,19 @@ class NextGenProvider(generic.TorrentProvider): self.minleech = 0 self.freeleech = True - def getLoginParams(self): - return { - 'username': self.username, - 'password': self.password, - } - - def loginSuccess(self, output): - if "<title>NextGen - Login</title>" in output: + @staticmethod + def loginSuccess(output): + if "<title>Login :: Danishbits.org</title>" in output: return False else: return True - def _doLogin(self): + def login(self): now = time.time() if self.login_opener and self.last_login_check < (now - 3600): try: - output = self.getURL(self.urls['test']) + output = self.get_url(self.urls['test']) if self.loginSuccess(output): self.last_login_check = now return True @@ -86,21 +81,21 @@ class NextGenProvider(generic.TorrentProvider): return True try: - login_params = self.getLoginParams() - data = self.getURL(self.urls['login_page']) + data = self.get_url(self.urls['login_page']) if not data: return False - with BS4Parser(data) as bs: - csrfraw = bs.find('form', attrs={'id': 'login'})['action'] - output = self.getURL(self.urls['base_url'] + csrfraw, post_data=login_params) - - if self.loginSuccess(output): - self.last_login_check = now - self.login_opener = self.session - return True - - error = 'unknown' + login_params = { + 'username': self.username, + 'password': self.password, + } + output = self.get_url(self.urls['login_page'], post_data=login_params) + if self.loginSuccess(output): + self.last_login_check = now + self.login_opener = self.session + return True + + error = 'unknown' except Exception: error = traceback.format_exc() self.login_opener = None @@ -109,40 +104,33 @@ class NextGenProvider(generic.TorrentProvider): logger.log(u"Failed to login: %s" % error, logger.ERROR) return False - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-branches,too-many-locals results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_params.keys(): logger.log(u"Search Mode: %s" % mode, logger.DEBUG) for search_string in search_params[mode]: + if mode == 'RSS': + continue if mode != 'RSS': logger.log(u"Search string: %s " % search_string, logger.DEBUG) searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories) - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL) + logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) + data = self.get_url(searchURL) if not data: continue try: with BS4Parser(data.decode('iso-8859-1'), features=["html5lib", "permissive"]) as html: - resultsTable = html.find('div', attrs={'id': 'torrent-table-wrapper'}) - - if not resultsTable: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) - continue - # Collecting entries - entries_std = html.find_all('div', attrs={'id': 'torrent-std'}) - entries_sticky = html.find_all('div', attrs={'id': 'torrent-sticky'}) - - entries = entries_std + entries_sticky + entries = html.find_all('tr', attrs={'class': 'torrent'}) # Xirg STANDARD TORRENTS # Continue only if one Release is found @@ -152,15 +140,16 @@ class NextGenProvider(generic.TorrentProvider): for result in entries: - try: - title = result.find('div', attrs={'id': 'torrent-udgivelse2-users'}).a['title'] - download_url = self.urls['base_url'] + result.find('div', attrs={'id': 'torrent-download'}).a['href'] - seeders = int(result.find('div', attrs={'id' : 'torrent-seeders'}).text) - leechers = int(result.find('div', attrs={'id' : 'torrent-leechers'}).text) - size = self._convertSize(result.find('div', attrs={'id' : 'torrent-size'}).text) - freeleech = result.find('div', attrs={'id': 'browse-mode-F2L'}) is not None - except (AttributeError, TypeError, KeyError): - continue + # try: + title = result.find('div', attrs={'class': 'croptorrenttext'}).find('b').text + download_url = self.urls['base_url'] + result.find('span', attrs={'class': 'right'}).find('a')['href'] + seeders = int(result.find_all('td')[6].text) + leechers = int(result.find_all('td')[7].text) + size = self._convertSize(result.find_all('td')[2].text) + freeleech = result.find('div', attrs={'class': 'freeleech'}) is not None + # except (AttributeError, TypeError, KeyError): + # logger.log(u"attrErr: {0}, tErr: {1}, kErr: {2}".format(AttributeError, TypeError, KeyError), logger.DEBUG) + # continue if self.freeleech and not freeleech: continue @@ -180,7 +169,7 @@ class NextGenProvider(generic.TorrentProvider): items[mode].append(item) - except Exception, e: + except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) # For each search mode sort all the items by seeders if available @@ -190,7 +179,12 @@ class NextGenProvider(generic.TorrentProvider): return results - def _convertSize(self, size): + @staticmethod + def _convertSize(size): + regex = re.compile(r'(.+?\w{2})\d+ file\w') + m = regex.match(size) + size = m.group(1) + size, modifier = size[:-2], size[-2:] size = float(size) if modifier in 'KB': @@ -207,17 +201,17 @@ class NextGenProvider(generic.TorrentProvider): return self.ratio -class NextGenCache(tvcache.TVCache): +class DanishbitsCache(tvcache.TVCache): def __init__(self, provider_obj): tvcache.TVCache.__init__(self, provider_obj) - # Only poll NextGen every 10 minutes max + # Only poll Danishbits every 10 minutes max self.minTime = 10 def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} -provider = NextGenProvider() +provider = DanishbitsProvider() diff --git a/sickbeard/providers/elitetorrent.py b/sickbeard/providers/elitetorrent.py index f63582e602226b411215c4946d1349f0f97d9118..bb1ab5b3c04017a9d0bf46167515764fe218d600 100644 --- a/sickbeard/providers/elitetorrent.py +++ b/sickbeard/providers/elitetorrent.py @@ -24,14 +24,14 @@ from six.moves import urllib from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser +from sickrage.providers.TorrentProvider import TorrentProvider -class elitetorrentProvider(generic.TorrentProvider): +class elitetorrentProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "EliteTorrent") + TorrentProvider.__init__(self, "EliteTorrent") self.onlyspasearch = None self.minseed = None @@ -48,7 +48,7 @@ class elitetorrentProvider(generic.TorrentProvider): """ Search query: http://www.elitetorrent.net/torrents.php?cat=4&modo=listado&orden=fecha&pag=1&buscar=fringe - + cat = 4 => Shows modo = listado => display results mode orden = fecha => order @@ -64,17 +64,17 @@ class elitetorrentProvider(generic.TorrentProvider): 'buscar': '' } - - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - lang_info = '' if not epObj or not epObj.show else epObj.show.lang - + lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang + for mode in search_strings.keys(): logger.log(u"Search Mode: %s" % mode, logger.DEBUG) - + # Only search if user conditions are true if self.onlyspasearch and lang_info != 'es' and mode != 'RSS': logger.log(u"Show info is not spanish, skipping provider search", logger.DEBUG) @@ -83,20 +83,20 @@ class elitetorrentProvider(generic.TorrentProvider): for search_string in search_strings[mode]: if mode != 'RSS': logger.log(u"Search string: %s " % search_string, logger.DEBUG) - + search_string = re.sub(r'S0*(\d*)E(\d*)', r'\1x\2', search_string) self.search_params['buscar'] = search_string.strip() if mode != 'RSS' else '' - + searchURL = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - - data = self.getURL(searchURL, timeout=30) - + + data = self.get_url(searchURL, timeout=30) + if not data: continue try: - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', class_='fichas-listado') if torrent_table is None: @@ -111,13 +111,13 @@ class elitetorrentProvider(generic.TorrentProvider): for row in torrent_rows[1:]: try: seeders_raw = row.find('td', class_='semillas').text - leechers_raw = row.find('td', class_='clientes').text + leechers_raw = row.find('td', class_='clientes').text download_url = self.urls['base_url'] + row.findAll('a')[0].get('href', '') title = self._processTitle(row.findAll('a')[1].text) seeders = seeders_raw if seeders_raw.isnumeric() else 0 leechers = leechers_raw if leechers_raw.isnumeric() else 0 - + # FIXME: Provider does not provide size size = 0 @@ -149,24 +149,24 @@ class elitetorrentProvider(generic.TorrentProvider): return results - + @staticmethod def _processTitle(title): # Quality, if no literal is defined it's HDTV if 'calidad' not in title: title += ' HDTV x264' - - title = title.replace('(calidad baja)', 'HDTV x264') + + title = title.replace('(calidad baja)', 'HDTV x264') title = title.replace('(Buena calidad)', '720p HDTV x264') title = title.replace('(Alta calidad)', '720p HDTV x264') title = title.replace('(calidad regular)', 'DVDrip x264') title = title.replace('(calidad media)', 'DVDrip x264') - + #Language, all results from this provider have spanish audio, we append it to title (avoid to download undesired torrents) title += ' SPANISH AUDIO' title += '-ELITETORRENT' - + return title.strip() @@ -179,7 +179,7 @@ class elitetorrentCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = elitetorrentProvider() diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index f3b632e1ab060991d9a39ddef2e62d353d477a5a..b5a75f2dfdb29bc7648511a07c2220dc95776478 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -24,13 +24,13 @@ from xml.parsers.expat import ExpatError from sickbeard import logger from sickbeard import tvcache from sickbeard.common import USER_AGENT -from sickbeard.providers import generic from sickrage.helper.common import try_int +from sickrage.providers.TorrentProvider import TorrentProvider -class ExtraTorrentProvider(generic.TorrentProvider): +class ExtraTorrentProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "ExtraTorrent") + TorrentProvider.__init__(self, "ExtraTorrent") self.urls = { 'index': 'http://extratorrent.cc', @@ -48,7 +48,7 @@ class ExtraTorrentProvider(generic.TorrentProvider): self.headers.update({'User-Agent': USER_AGENT}) self.search_params = {'cid': 8} - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} @@ -62,7 +62,7 @@ class ExtraTorrentProvider(generic.TorrentProvider): try: self.search_params.update({'type': ('search', 'rss')[mode == 'RSS'], 'search': search_string}) - data = self.getURL(self.urls['rss'], params=self.search_params) + data = self.get_url(self.urls['rss'], params=self.search_params) if not data: logger.log(u"No data returned from provider", logger.DEBUG) continue @@ -119,7 +119,7 @@ class ExtraTorrentProvider(generic.TorrentProvider): return results def _magnet_from_details(self, link): - details = self.getURL(link) + details = self.get_url(link) if not details: return '' @@ -129,7 +129,7 @@ class ExtraTorrentProvider(generic.TorrentProvider): return match.group(1) - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -142,7 +142,7 @@ class ExtraTorrentCache(tvcache.TVCache): def _getRSSData(self): search_strings = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_strings)} + return {'entries': self.provider.search(search_strings)} provider = ExtraTorrentProvider() diff --git a/sickbeard/providers/fnt.py b/sickbeard/providers/fnt.py index adae3518cb659b3dbb1c6eafaada3d72466d2920..95a78a1a8c8640f7e41fd63d8f8422724fcaf7c1 100644 --- a/sickbeard/providers/fnt.py +++ b/sickbeard/providers/fnt.py @@ -23,14 +23,13 @@ import requests from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser +from sickrage.providers.TorrentProvider import TorrentProvider -class FNTProvider(generic.TorrentProvider): +class FNTProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "FNT") - + TorrentProvider.__init__(self, "FNT") self.username = None self.password = None @@ -53,7 +52,7 @@ class FNTProvider(generic.TorrentProvider): "visible": 1, "freeleech": 0, "nuke": 1, "3D": 0, "sort": "size", "order": "desc" } - def _doLogin(self): + def login(self): if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()): return True @@ -63,7 +62,7 @@ class FNTProvider(generic.TorrentProvider): 'submit' : 'Se loguer' } - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -76,13 +75,13 @@ class FNTProvider(generic.TorrentProvider): return True - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} # check for auth - if not self._doLogin(): + if not self.login(): return results for mode in search_strings.keys(): @@ -94,12 +93,12 @@ class FNTProvider(generic.TorrentProvider): self.search_params['recherche'] = search_string - data = self.getURL(self.urls['search'], params=self.search_params) + data = self.get_url(self.urls['search'], params=self.search_params) if not data: continue try: - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: result_table = html.find('table', {'id': 'tablealign3bis'}) if not result_table: @@ -154,7 +153,7 @@ class FNTProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -167,7 +166,7 @@ class FNTCache(tvcache.TVCache): def _getRSSData(self): search_strings = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_strings)} + return {'entries': self.provider.search(search_strings)} provider = FNTProvider() diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 2fb6858c85062f7b9e3f2bb13d06801febb6097b..81ea9ff6a5d7397475a9d228093def608d3726a6 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -23,17 +23,15 @@ import traceback from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser from sickrage.helper.common import try_int +from sickrage.providers.TorrentProvider import TorrentProvider -class FreshOnTVProvider(generic.TorrentProvider): +class FreshOnTVProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "FreshOnTV") - - + TorrentProvider.__init__(self, "FreshOnTV") self._uid = None self._hash = None @@ -56,14 +54,14 @@ class FreshOnTVProvider(generic.TorrentProvider): self.cookies = None - def _checkAuth(self): + def _check_auth(self): if not self.username or not self.password: logger.log(u"Invalid username or password. Check your settings", logger.WARNING) return True - def _doLogin(self): + def login(self): if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()): return True @@ -74,7 +72,7 @@ class FreshOnTVProvider(generic.TorrentProvider): 'password': self.password, 'login': 'submit'} - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -102,14 +100,14 @@ class FreshOnTVProvider(generic.TorrentProvider): return False - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} freeleech = '3' if self.freeleech else '0' - if not self._doLogin(): + if not self.login(): return results for mode in search_params.keys(): @@ -121,7 +119,7 @@ class FreshOnTVProvider(generic.TorrentProvider): searchURL = self.urls['search'] % (freeleech, search_string) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - init_html = self.getURL(searchURL) + init_html = self.get_url(searchURL) max_page_number = 0 if not init_html: @@ -129,7 +127,7 @@ class FreshOnTVProvider(generic.TorrentProvider): continue try: - with BS4Parser(init_html, features=["html5lib", "permissive"]) as init_soup: + with BS4Parser(init_html, 'html5lib') as init_soup: # Check to see if there is more than 1 page of results pager = init_soup.find('div', {'class': 'pager'}) @@ -166,7 +164,7 @@ class FreshOnTVProvider(generic.TorrentProvider): time.sleep(1) page_searchURL = searchURL + '&page=' + str(i) # '.log(u"Search string: " + page_searchURL, logger.DEBUG) - page_html = self.getURL(page_searchURL) + page_html = self.get_url(page_searchURL) if not page_html: continue @@ -177,7 +175,7 @@ class FreshOnTVProvider(generic.TorrentProvider): for data_response in data_response_list: - with BS4Parser(data_response, features=["html5lib", "permissive"]) as html: + with BS4Parser(data_response, 'html5lib') as html: torrent_rows = html.findAll("tr", {"class": re.compile('torrent_[0-9]*')}) @@ -234,7 +232,7 @@ class FreshOnTVProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -248,6 +246,6 @@ class FreshOnTVCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = FreshOnTVProvider() diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py deleted file mode 100644 index 2b515390185e9ff6713e609f3c24e3cd2c42f75c..0000000000000000000000000000000000000000 --- a/sickbeard/providers/generic.py +++ /dev/null @@ -1,668 +0,0 @@ -# coding=utf-8 -# Author: Nic Wolfe <nic@wolfeden.ca> -# URL: http://code.google.com/p/sickbeard/ -# -# This file is part of SickRage. -# -# SickRage is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickRage is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickRage. If not, see <http://www.gnu.org/licenses/>. - -import datetime -import os -import re -import itertools -from random import shuffle -from base64 import b16encode, b32decode - -import requests -from hachoir_parser import createParser - -import sickbeard -from sickbeard import helpers, classes, logger, db -from sickbeard.common import MULTI_EP_RESULT, SEASON_RESULT -from sickbeard import tvcache -from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException -from sickbeard.common import Quality -from sickbeard.common import user_agents -from sickrage.helper.common import sanitize_filename -from sickrage.helper.encoding import ek -from sickrage.helper.exceptions import ex -from sickrage.show.Show import Show -from sickbeard import show_name_helpers - - -class GenericProvider(object): - NZB = "nzb" - TORRENT = "torrent" - - def __init__(self, name): - - # these need to be set in the subclass - self.providerType = None - self.name = name - - self.urls = {} - self.url = '' - - self.public = False - - self.show = None - - self.supportsBacklog = True - self.supportsAbsoluteNumbering = False - self.anime_only = False - - self.search_mode = None - self.search_fallback = False - - self.enabled = False - self.enable_daily = False - self.enable_backlog = False - - self.cache = tvcache.TVCache(self) - - self.session = requests.Session() - - shuffle(user_agents) - self.headers = {'User-Agent': user_agents[0]} - - self.btCacheURLS = [ - 'http://torcache.net/torrent/{torrent_hash}.torrent', - 'http://thetorrent.org/torrent/{torrent_hash}.torrent', - 'http://btdig.com/torrent/{torrent_hash}.torrent', - # 'http://torrage.com/torrent/{torrent_hash}.torrent', - # 'http://itorrents.org/torrent/{torrent_hash}.torrent', - ] - - shuffle(self.btCacheURLS) - - self.proper_strings = ['PROPER|REPACK|REAL'] - - def getID(self): - return GenericProvider.makeID(self.name) - - @staticmethod - def makeID(name): - return re.sub(r"[^\w\d_]", "_", name.strip().lower()) - - def imageName(self): - return self.getID() + '.png' - - # pylint: disable=no-self-use,unused-variable - # Method could be a function, Unused variable - def _checkAuth(self): - return True - - def _doLogin(self): - return True - - def isActive(self): - return False - - def isEnabled(self): - return self.enabled - - def getResult(self, episodes): - """ - Returns a result of the correct type for this provider - """ - - if self.providerType == GenericProvider.NZB: - result = classes.NZBSearchResult(episodes) - elif self.providerType == GenericProvider.TORRENT: - result = classes.TorrentSearchResult(episodes) - else: - result = classes.SearchResult(episodes) - - result.provider = self - - return result - - def getURL(self, url, post_data=None, params=None, timeout=30, json=False, needBytes=False): - """ - By default this is just a simple urlopen call but this method should be overridden - for providers with special URL requirements (like cookies) - """ - - return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout, - session=self.session, json=json, needBytes=needBytes) - - - def _makeURL(self, result): - urls = [] - filename = u'' - if result.url.startswith('magnet'): - try: - torrent_hash = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0].upper() - - try: - torrent_name = re.findall('dn=([^&]+)', result.url)[0] - except Exception: - torrent_name = 'NO_DOWNLOAD_NAME' - - if len(torrent_hash) == 32: - torrent_hash = b16encode(b32decode(torrent_hash)).upper() - - if not torrent_hash: - logger.log(u"Unable to extract torrent hash from magnet: " + ex(result.url), logger.ERROR) - return urls, filename - - urls = [x.format(torrent_hash=torrent_hash, torrent_name=torrent_name) for x in self.btCacheURLS] - except Exception: - logger.log(u"Unable to extract torrent hash or name from magnet: " + ex(result.url), logger.ERROR) - return urls, filename - else: - urls = [result.url] - - if self.providerType == GenericProvider.TORRENT: - filename = ek(os.path.join, sickbeard.TORRENT_DIR, sanitize_filename(result.name) + '.' + self.providerType) - - elif self.providerType == GenericProvider.NZB: - filename = ek(os.path.join, sickbeard.NZB_DIR, sanitize_filename(result.name) + '.' + self.providerType) - - return urls, filename - - def downloadResult(self, result): - """ - Save the result to disk. - """ - - # check for auth - if not self._doLogin(): - return False - - urls, filename = self._makeURL(result) - - for url in urls: - if 'NO_DOWNLOAD_NAME' in url: - continue - - if url.startswith('http'): - self.headers.update({'Referer': '/'.join(url.split('/')[:3]) + '/'}) - - logger.log(u"Downloading a result from " + self.name + " at " + url) - - # Support for Jackett/TorzNab - if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB): - filename = filename.rsplit('.', 1)[0] + '.' + GenericProvider.TORRENT - - if helpers.download_file(url, filename, session=self.session, headers=self.headers): - if self._verify_download(filename): - logger.log(u"Saved result to " + filename, logger.INFO) - return True - else: - logger.log(u"Could not download %s" % url, logger.WARNING) - helpers.remove_file_failed(filename) - - if len(urls): - logger.log(u"Failed to download any results", logger.WARNING) - - return False - - def _verify_download(self, file_name=None): - """ - Checks the saved file to see if it was actually valid, if not then consider the download a failure. - """ - - # primitive verification of torrents, just make sure we didn't get a text file or something - if file_name.endswith(GenericProvider.TORRENT): - try: - parser = createParser(file_name) - if parser: - # pylint: disable=protected-access - # Access to a protected member of a client class - mime_type = parser._getMimeType() - try: - parser.stream._input.close() - except Exception: - pass - if mime_type == 'application/x-bittorrent': - return True - except Exception as e: - logger.log(u"Failed to validate torrent file: " + ex(e), logger.DEBUG) - - logger.log(u"Result is not a valid torrent file", logger.DEBUG) - return False - - return True - - def searchRSS(self, episodes): - return self.cache.findNeededEpisodes(episodes) - - def getQuality(self, item, anime=False): - """ - Figures out the quality of the given RSS item node - - item: An elementtree.ElementTree element representing the <item> tag of the RSS feed - - Returns a Quality value obtained from the node's data - """ - (title, url) = self._get_title_and_url(item) - quality = Quality.sceneQuality(title, anime) - return quality - - # pylint: disable=no-self-use,unused-argument - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): - return [] - - def _get_season_search_strings(self, episode): - return [] - - def _get_episode_search_strings(self, eb_obj, add_string=''): - return [] - - def _get_title_and_url(self, item): - """ - Retrieves the title and URL data from the item XML node - - item: An elementtree.ElementTree element representing the <item> tag of the RSS feed - - Returns: A tuple containing two strings representing title and URL respectively - """ - - title = item.get('title', '') - if title: - title = u'' + title.replace(' ', '.') - - url = item.get('link', '') - if url: - url = url.replace('&', '&').replace('%26tr%3D', '&tr=') - - return title, url - - def _get_size(self, item): - """Gets the size from the item""" - logger.log(u"Provider type doesn't have _get_size() implemented yet", logger.ERROR) - return -1 - - def findSearchResults(self, show, episodes, search_mode, manualSearch=False, downCurQuality=False): - - self._checkAuth() - self.show = show - - results = {} - itemList = [] - - searched_scene_season = None - for epObj in episodes: - # search cache for episode result - cacheResult = self.cache.searchCache(epObj, manualSearch, downCurQuality) - if cacheResult: - if epObj.episode not in results: - results[epObj.episode] = cacheResult - else: - results[epObj.episode].extend(cacheResult) - - # found result, search next episode - continue - - # skip if season already searched - if len(episodes) > 1 and search_mode == 'sponly' and searched_scene_season == epObj.scene_season: - continue - - # mark season searched for season pack searches so we can skip later on - searched_scene_season = epObj.scene_season - - search_strings = [] - if len(episodes) > 1 and search_mode == 'sponly': - # get season search results - search_strings = self._get_season_search_strings(epObj) - elif search_mode == 'eponly': - # get single episode search results - search_strings = self._get_episode_search_strings(epObj) - - first = search_strings and isinstance(search_strings[0], dict) and 'rid' in search_strings[0] - if first: - logger.log(u'First search_string has rid', logger.DEBUG) - - for curString in search_strings: - itemList += self._doSearch(curString, search_mode, len(episodes), epObj=epObj) - if first: - first = False - if itemList: - logger.log(u'First search_string had rid, and returned results, skipping query by string', logger.DEBUG) - break - else: - logger.log(u'First search_string had rid, but returned no results, searching with string query', logger.DEBUG) - - # if we found what we needed already from cache then return results and exit - if len(results) == len(episodes): - return results - - # sort list by quality - if len(itemList): - items = {} - itemsUnknown = [] - for item in itemList: - quality = self.getQuality(item, anime=show.is_anime) - if quality == Quality.UNKNOWN: - itemsUnknown += [item] - else: - if quality not in items: - items[quality] = [item] - else: - items[quality].append(item) - - itemList = list(itertools.chain(*[v for (k, v) in sorted(items.iteritems(), reverse=True)])) - itemList += itemsUnknown if itemsUnknown else [] - - # filter results - cl = [] - for item in itemList: - (title, url) = self._get_title_and_url(item) - - # parse the file name - try: - myParser = NameParser(parse_method=('normal', 'anime')[show.is_anime]) - parse_result = myParser.parse(title) - except InvalidNameException: - logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.DEBUG) - continue - except InvalidShowException: - logger.log(u"Unable to parse the filename " + title + " into a valid show", logger.DEBUG) - continue - - showObj = parse_result.show - quality = parse_result.quality - release_group = parse_result.release_group - version = parse_result.version - - addCacheEntry = False - if not (showObj.air_by_date or showObj.sports): - if search_mode == 'sponly': - if len(parse_result.episode_numbers): - logger.log( - u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it", - logger.DEBUG) - addCacheEntry = True - if len(parse_result.episode_numbers) and (parse_result.season_number not in set([ep.season for ep in episodes]) - or not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]): - logger.log( - u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring", - logger.DEBUG) - addCacheEntry = True - else: - if not len(parse_result.episode_numbers) and parse_result.season_number and not [ep for ep in - episodes if - ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: - logger.log( - u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring", - logger.DEBUG) - addCacheEntry = True - elif len(parse_result.episode_numbers) and not [ep for ep in episodes if - ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: - logger.log( - u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring", - logger.DEBUG) - addCacheEntry = True - - if not addCacheEntry: - # we just use the existing info for normal searches - actual_season = parse_result.season_number - actual_episodes = parse_result.episode_numbers - else: - sameDaySpecial = False - if not parse_result.is_air_by_date: - logger.log( - u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it", - logger.DEBUG) - addCacheEntry = True - else: - airdate = parse_result.air_date.toordinal() - myDB = db.DBConnection() - sql_results = myDB.select( - "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", - [showObj.indexerid, airdate]) - - if len(sql_results) == 2: - if int(sql_results[0]['season']) == 0 and int(sql_results[1]['season']) != 0: - actual_season = int(sql_results[1]["season"]) - actual_episodes = [int(sql_results[1]["episode"])] - sameDaySpecial = True - elif int(sql_results[1]['season']) == 0 and int(sql_results[0]['season']) != 0: - actual_season = int(sql_results[0]["season"]) - actual_episodes = [int(sql_results[0]["episode"])] - sameDaySpecial = True - elif len(sql_results) != 1: - logger.log( - u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it", - logger.WARNING) - addCacheEntry = True - - if not addCacheEntry and not sameDaySpecial: - actual_season = int(sql_results[0]["season"]) - actual_episodes = [int(sql_results[0]["episode"])] - - # add parsed result to cache for usage later on - if addCacheEntry: - logger.log(u"Adding item from search to cache: " + title, logger.DEBUG) - # pylint: disable=protected-access - # Access to a protected member of a client class - ci = self.cache._addCacheEntry(title, url, parse_result=parse_result) - if ci is not None: - cl.append(ci) - continue - - # make sure we want the episode - wantEp = True - for epNo in actual_episodes: - if not showObj.wantEpisode(actual_season, epNo, quality, manualSearch, downCurQuality): - wantEp = False - break - - if not wantEp: - logger.log( - u"Ignoring result " + title + " because we don't want an episode that is " + - Quality.qualityStrings[ - quality], logger.INFO) - - continue - - logger.log(u"Found result " + title + " at " + url, logger.DEBUG) - - # make a result object - epObj = [] - for curEp in actual_episodes: - epObj.append(showObj.getEpisode(actual_season, curEp)) - - result = self.getResult(epObj) - result.show = showObj - result.url = url - result.name = title - result.quality = quality - result.release_group = release_group - result.version = version - result.content = None - result.size = self._get_size(item) - - if len(epObj) == 1: - epNum = epObj[0].episode - logger.log(u"Single episode result.", logger.DEBUG) - elif len(epObj) > 1: - epNum = MULTI_EP_RESULT - logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str( - parse_result.episode_numbers), logger.DEBUG) - elif len(epObj) == 0: - epNum = SEASON_RESULT - logger.log(u"Separating full season result to check for later", logger.DEBUG) - - if epNum not in results: - results[epNum] = [result] - else: - results[epNum].append(result) - - # check if we have items to add to cache - if len(cl) > 0: - # pylint: disable=protected-access - # Access to a protected member of a client class - myDB = self.cache._getDB() - myDB.mass_action(cl) - - return results - - def findPropers(self, search_date=None): - - results = self.cache.listPropers(search_date) - - return [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in - results] - - def seedRatio(self): - ''' - Provider should override this value if custom seed ratio enabled - It should return the value of the provider seed ratio - ''' - return '' - - -class NZBProvider(GenericProvider): - def __init__(self, name): - GenericProvider.__init__(self, name) - - self.providerType = GenericProvider.NZB - - def isActive(self): - return sickbeard.USE_NZBS and self.isEnabled() - - def _get_size(self, item): - try: - size = item.get('links')[1].get('length', -1) - except IndexError: - size = -1 - - if not size: - logger.log(u"Size was not found in your provider response", logger.DEBUG) - - return int(size) - - -class TorrentProvider(GenericProvider): - def __init__(self, name): - GenericProvider.__init__(self, name) - - self.providerType = GenericProvider.TORRENT - - def isActive(self): - return sickbeard.USE_TORRENTS and self.isEnabled() - - def _get_title_and_url(self, item): - from feedparser.util import FeedParserDict - if isinstance(item, (dict, FeedParserDict)): - title = item.get('title', '') - download_url = item.get('url', '') - if not download_url: - download_url = item.get('link', '') - - elif isinstance(item, (list, tuple)) and len(item) > 1: - title = item[0] - download_url = item[1] - - # Temp global block `DIAMOND` releases - if title.endswith('DIAMOND'): - logger.log(u'Skipping DIAMOND release for mass fake releases.') - title = download_url = u'FAKERELEASE' - - if title: - title = self._clean_title_from_provider(title) - if download_url: - download_url = download_url.replace('&', '&') - - return (title, download_url) - - - def _get_size(self, item): - - size = -1 - if isinstance(item, dict): - size = item.get('size', -1) - elif isinstance(item, (list, tuple)) and len(item) > 2: - size = item[2] - - # Make sure we didn't select seeds/leechers by accident - if not size or size < 1024*1024: - size = -1 - - return size - - def _get_season_search_strings(self, ep_obj): - - search_string = {'Season': []} - for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): - if ep_obj.show.air_by_date or ep_obj.show.sports: - ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0] - elif ep_obj.show.anime: - ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number - else: - ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) # 1) showName.SXX - - search_string['Season'].append(ep_string.encode('utf-8').strip()) - - return [search_string] - - def _get_episode_search_strings(self, ep_obj, add_string=''): - - search_string = {'Episode': []} - - if not ep_obj: - return [] - - for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)): - ep_string = show_name + ' ' - if ep_obj.show.air_by_date: - ep_string += str(ep_obj.airdate).replace('-', ' ') - elif ep_obj.show.sports: - ep_string += str(ep_obj.airdate).replace('-', ' ') + ('|', ' ')[len(self.proper_strings) > 1] + ep_obj.airdate.strftime('%b') - elif ep_obj.show.anime: - ep_string += "%02d" % int(ep_obj.scene_absolute_number) - else: - ep_string += sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season, - 'episodenumber': ep_obj.scene_episode} - if add_string: - ep_string = ep_string + ' %s' % add_string - - search_string['Episode'].append(ep_string.encode('utf-8').strip()) - - return [search_string] - - @staticmethod - def _clean_title_from_provider(title): - return (title or '').replace(' ', '.') - - @property - def _custom_trackers(self): - return ('', '&tr=' + '&tr='.join(set([x.strip() for x in sickbeard.TRACKERS_LIST.split(',') if x.strip()])))[self.public] if sickbeard.TRACKERS_LIST else '' - - def findPropers(self, search_date=datetime.datetime.today()): - - results = [] - - myDB = db.DBConnection() - sqlResults = myDB.select( - 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + - ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + - ' WHERE e.airdate >= ' + str(search_date.toordinal()) + - ' AND e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST]) + ')' - ) - - for sqlshow in sqlResults or []: - show = Show.find(sickbeard.showList, int(sqlshow["showid"])) - if show: - curEp = show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"])) - for term in self.proper_strings: - searchString = self._get_episode_search_strings(curEp, add_string=term) - - for item in self._doSearch(searchString[0]): - title, url = self._get_title_and_url(item) - results.append(classes.Proper(title, url, datetime.datetime.today(), show)) - - return results diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index c709b14a20e8345ff44ff2485cf9e2b9fd20cef9..c2fecc7148f0c0a525988adab96dc8fdb80167f2 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -21,17 +21,16 @@ import traceback from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser from sickrage.helper.exceptions import AuthException +from sickrage.providers.TorrentProvider import TorrentProvider -class GFTrackerProvider(generic.TorrentProvider): +class GFTrackerProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "GFTracker") - + TorrentProvider.__init__(self, "GFTracker") self.username = None self.password = None @@ -55,19 +54,19 @@ class GFTrackerProvider(generic.TorrentProvider): self.cache = GFTrackerCache(self) - def _checkAuth(self): + def _check_auth(self): if not self.username or not self.password: raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") return True - def _doLogin(self): + def login(self): login_params = {'username': self.username, 'password': self.password} - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) # Save cookies from response self.cookies = self.headers.get('Set-Cookie') @@ -81,12 +80,12 @@ class GFTrackerProvider(generic.TorrentProvider): return True - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_params.keys(): @@ -102,12 +101,12 @@ class GFTrackerProvider(generic.TorrentProvider): # Set cookies from response self.headers.update({'Cookie': self.cookies}) # Returns top 30 results by default, expandable in user profile - data = self.getURL(searchURL) + data = self.get_url(searchURL) if not data: continue try: - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: torrent_table = html.find("div", id="torrentBrowse") torrent_rows = torrent_table.findChildren("tr") if torrent_table else [] @@ -165,7 +164,7 @@ class GFTrackerProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio def _convertSize(self, sizeString): @@ -185,6 +184,7 @@ class GFTrackerProvider(generic.TorrentProvider): size = -1 return int(size) + class GFTrackerCache(tvcache.TVCache): def __init__(self, provider_obj): @@ -195,6 +195,6 @@ class GFTrackerCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = GFTrackerProvider() diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index 33c37dbd1582fd92ed902512ab33563b243cbe1e..72f069d6bc967d39ba16bb4e4d70186ff2a89f71 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -16,11 +16,10 @@ import datetime import urllib -from sickbeard.providers import generic - from sickbeard import classes from sickbeard import logger, tvcache from sickrage.helper.exceptions import AuthException +from sickrage.providers.TorrentProvider import TorrentProvider try: import json @@ -28,12 +27,10 @@ except ImportError: import simplejson as json -class HDBitsProvider(generic.TorrentProvider): +class HDBitsProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "HDBits") - - + TorrentProvider.__init__(self, "HDBits") self.username = None self.passkey = None @@ -48,7 +45,7 @@ class HDBitsProvider(generic.TorrentProvider): self.url = self.urls['base_url'] - def _checkAuth(self): + def _check_auth(self): if not self.username or not self.passkey: raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") @@ -75,22 +72,22 @@ class HDBitsProvider(generic.TorrentProvider): title = item['name'] if title: - title = self._clean_title_from_provider(title) + title = self._clean_title(title) url = self.urls['download'] + urllib.urlencode({'id': item['id'], 'passkey': self.passkey}) - return (title, url) + return title, url - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): # FIXME results = [] logger.log(u"Search string: %s" % search_params, logger.DEBUG) - self._checkAuth() + self._check_auth() - parsedJSON = self.getURL(self.urls['search'], post_data=search_params, json=True) + parsedJSON = self.get_url(self.urls['search'], post_data=search_params, json=True) if not parsedJSON: return [] @@ -106,13 +103,13 @@ class HDBitsProvider(generic.TorrentProvider): # FIXME SORTING return results - def findPropers(self, search_date=None): + def find_propers(self, search_date=None): results = [] search_terms = [' proper ', ' repack '] for term in search_terms: - for item in self._doSearch(self._make_post_data_JSON(search_term=term)): + for item in self.search(self._make_post_data_JSON(search_term=term)): if item['utadded']: try: result_date = datetime.datetime.fromtimestamp(int(item['utadded'])) @@ -180,7 +177,7 @@ class HDBitsProvider(generic.TorrentProvider): return json.dumps(post_data) - def seedRatio(self): + def seed_ratio(self): return self.ratio diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index c91255e07c9258f3699036fa5013d349e62353e2..f50dcebaa496a29e0634a26249cb3ed52ab96421 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -25,12 +25,12 @@ from bs4 import BeautifulSoup from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic +from sickrage.providers.TorrentProvider import TorrentProvider -class HDSpaceProvider(generic.TorrentProvider): - def __init__(self): - generic.TorrentProvider.__init__(self, "HDSpace") +class HDSpaceProvider(TorrentProvider): + def __init__(self): + TorrentProvider.__init__(self, "HDSpace") self.username = None self.password = None @@ -54,14 +54,14 @@ class HDSpaceProvider(generic.TorrentProvider): self.url = self.urls['base_url'] - def _checkAuth(self): + def _check_auth(self): if not self.username or not self.password: logger.log(u"Invalid username or password. Check your settings", logger.WARNING) return True - def _doLogin(self): + def login(self): if 'pass' in requests.utils.dict_from_cookiejar(self.session.cookies): return True @@ -69,7 +69,7 @@ class HDSpaceProvider(generic.TorrentProvider): login_params = {'uid': self.username, 'pwd': self.password} - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -80,12 +80,12 @@ class HDSpaceProvider(generic.TorrentProvider): return True - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_strings.keys(): @@ -101,7 +101,7 @@ class HDSpaceProvider(generic.TorrentProvider): if mode != 'RSS': logger.log(u"Search string: %s" % search_string, logger.DEBUG) - data = self.getURL(searchURL) + data = self.get_url(searchURL) if not data or 'please try later' in data: logger.log(u"No data returned from provider", logger.DEBUG) continue @@ -164,7 +164,7 @@ class HDSpaceProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio def _convertSize(self, size): @@ -180,6 +180,7 @@ class HDSpaceProvider(generic.TorrentProvider): size = size * 1024**4 return int(size) + class HDSpaceCache(tvcache.TVCache): def __init__(self, provider_obj): @@ -190,6 +191,6 @@ class HDSpaceCache(tvcache.TVCache): def _getRSSData(self): search_strings = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_strings)} + return {'entries': self.provider.search(search_strings)} provider = HDSpaceProvider() diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index 2428c1932048522d1664f34ff4323fe0f78ed4bc..55ef723774b01a96b48f838898dfb52b4bdd9e69 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -25,13 +25,13 @@ import traceback from sickbeard.bs4_parser import BS4Parser from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic +from sickrage.providers.TorrentProvider import TorrentProvider -class HDTorrentsProvider(generic.TorrentProvider): - def __init__(self): - generic.TorrentProvider.__init__(self, "HDTorrents") +class HDTorrentsProvider(TorrentProvider): + def __init__(self): + TorrentProvider.__init__(self, "HDTorrents") self.username = None self.password = None @@ -52,14 +52,14 @@ class HDTorrentsProvider(generic.TorrentProvider): self.cache = HDTorrentsCache(self) - def _checkAuth(self): + def _check_auth(self): if not self.username or not self.password: logger.log(u"Invalid username or password. Check your settings", logger.WARNING) return True - def _doLogin(self): + def login(self): if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()): return True @@ -68,7 +68,7 @@ class HDTorrentsProvider(generic.TorrentProvider): 'pwd': self.password, 'submit': 'Confirm'} - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -79,12 +79,12 @@ class HDTorrentsProvider(generic.TorrentProvider): return True - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_strings.keys(): @@ -100,7 +100,7 @@ class HDTorrentsProvider(generic.TorrentProvider): if mode != 'RSS': logger.log(u"Search string: %s" % search_string, logger.DEBUG) - data = self.getURL(searchURL) + data = self.get_url(searchURL) if not data or 'please try later' in data: logger.log(u"No data returned from provider", logger.DEBUG) continue @@ -116,7 +116,7 @@ class HDTorrentsProvider(generic.TorrentProvider): data = urllib.unquote(data[index:].encode('utf-8')).decode('utf-8').replace('\t', '') - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: if not html: logger.log(u"No html data parsed from provider", logger.DEBUG) continue @@ -196,7 +196,7 @@ class HDTorrentsProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio def _convertSize(self, size): @@ -212,6 +212,7 @@ class HDTorrentsProvider(generic.TorrentProvider): size = size * 1024**4 return int(size) + class HDTorrentsCache(tvcache.TVCache): def __init__(self, provider_obj): @@ -222,6 +223,6 @@ class HDTorrentsCache(tvcache.TVCache): def _getRSSData(self): search_strings = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_strings)} + return {'entries': self.provider.search(search_strings)} provider = HDTorrentsProvider() diff --git a/sickbeard/providers/hounddawgs.py b/sickbeard/providers/hounddawgs.py index 48067378a5032eb4d54ad541d9c9572d859444d8..8bb7c8a1454545e19c1f280317864277c2d00a8d 100644 --- a/sickbeard/providers/hounddawgs.py +++ b/sickbeard/providers/hounddawgs.py @@ -21,15 +21,14 @@ import traceback from sickbeard import logger from sickbeard import tvcache from sickbeard.bs4_parser import BS4Parser +from sickrage.providers.TorrentProvider import TorrentProvider -from sickbeard.providers import generic -class HoundDawgsProvider(generic.TorrentProvider): +class HoundDawgsProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "HoundDawgs") - + TorrentProvider.__init__(self, "HoundDawgs") self.username = None self.password = None @@ -61,15 +60,15 @@ class HoundDawgsProvider(generic.TorrentProvider): "searchtags": '' } - def _doLogin(self): + def login(self): login_params = {'username': self.username, 'password': self.password, 'keeplogged': 'on', 'login': 'Login'} - self.getURL(self.urls['base_url'], timeout=30) - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + self.get_url(self.urls['base_url'], timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -82,12 +81,12 @@ class HoundDawgsProvider(generic.TorrentProvider): return True - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_strings.keys(): @@ -99,7 +98,7 @@ class HoundDawgsProvider(generic.TorrentProvider): self.search_params['searchstr'] = search_string - data = self.getURL(self.urls['search'], params=self.search_params) + data = self.get_url(self.urls['search'], params=self.search_params) strTableStart = "<table class=\"torrent_table" startTableIndex = data.find(strTableStart) @@ -108,7 +107,7 @@ class HoundDawgsProvider(generic.TorrentProvider): continue try: - with BS4Parser(trimmedData, features=["html5lib", "permissive"]) as html: + with BS4Parser(trimmedData, 'html5lib') as html: result_table = html.find('table', {'id': 'torrent_table'}) if not result_table: @@ -176,7 +175,7 @@ class HoundDawgsProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -190,7 +189,7 @@ class HoundDawgsCache(tvcache.TVCache): def _getRSSData(self): search_strings = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_strings)} + return {'entries': self.provider.search(search_strings)} provider = HoundDawgsProvider() diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index dc412eb108d29daa376543e889d7c7011a1528b4..9043e86fabdc0342959e9018fa989da7a3ef4ed5 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -17,18 +17,17 @@ # along with SickRage. If not, see <http://www.gnu.org/licenses/>. import re -from sickbeard.providers import generic from sickbeard import logger from sickbeard import tvcache from sickbeard.bs4_parser import BS4Parser from sickrage.helper.exceptions import AuthException, ex +from sickrage.providers.TorrentProvider import TorrentProvider -class IPTorrentsProvider(generic.TorrentProvider): - def __init__(self): - - generic.TorrentProvider.__init__(self, "IPTorrents") +class IPTorrentsProvider(TorrentProvider): + def __init__(self): + TorrentProvider.__init__(self, "IPTorrents") self.username = None self.password = None @@ -47,21 +46,21 @@ class IPTorrentsProvider(generic.TorrentProvider): self.categories = '73=&60=' - def _checkAuth(self): + def _check_auth(self): if not self.username or not self.password: raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") return True - def _doLogin(self): + def login(self): login_params = {'username': self.username, 'password': self.password, 'login': 'submit'} - self.getURL(self.urls['login'], timeout=30) - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + self.get_url(self.urls['login'], timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -75,14 +74,14 @@ class IPTorrentsProvider(generic.TorrentProvider): return True - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} freeleech = '&free=on' if self.freeleech else '' - if not self._doLogin(): + if not self.login(): return results for mode in search_params.keys(): @@ -97,13 +96,13 @@ class IPTorrentsProvider(generic.TorrentProvider): searchURL += ';o=seeders' if mode != 'RSS' else '' logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL) + data = self.get_url(searchURL) if not data: continue try: data = re.sub(r'(?im)<button.+?<[\/]button>', '', data, 0) - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: if not html: logger.log(u"No data returned from provider", logger.DEBUG) continue @@ -155,7 +154,7 @@ class IPTorrentsProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @staticmethod @@ -172,6 +171,7 @@ class IPTorrentsProvider(generic.TorrentProvider): size = size * 1024**4 return int(size) + class IPTorrentsCache(tvcache.TVCache): def __init__(self, provider_obj): @@ -182,7 +182,7 @@ class IPTorrentsCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = IPTorrentsProvider() diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index 0c580a7ad7120454fd7fbac45c2817581382bd06..a52c600a57277c87efec82b6ab786855a1037284 100755 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -17,7 +17,6 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. - import posixpath # Must use posixpath import traceback from urllib import urlencode @@ -27,14 +26,14 @@ import sickbeard from sickbeard import logger from sickbeard import tvcache from sickbeard.common import USER_AGENT -from sickbeard.providers import generic from sickrage.helper.common import try_int +from sickrage.providers.TorrentProvider import TorrentProvider -class KATProvider(generic.TorrentProvider): +class KATProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "KickAssTorrents") + TorrentProvider.__init__(self, "KickAssTorrents") self.public = True @@ -63,12 +62,12 @@ class KATProvider(generic.TorrentProvider): self.cache = KATCache(self) - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} # select the correct category - anime = (self.show and self.show.anime) or (epObj and epObj.show and epObj.show.anime) or False + anime = (self.show and self.show.anime) or (ep_obj and ep_obj.show and ep_obj.show.anime) or False self.search_params['category'] = ('tv', 'anime')[anime] for mode in search_strings.keys(): @@ -88,7 +87,7 @@ class KATProvider(generic.TorrentProvider): searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/')) # Must use posixpath logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL) + data = self.get_url(searchURL) if not data: logger.log(u'URL did not return data, maybe try a custom url, or a different one', logger.DEBUG) continue @@ -97,7 +96,7 @@ class KATProvider(generic.TorrentProvider): logger.log(u'Expected xml but got something else, is your mirror failing?', logger.INFO) continue - data = BeautifulSoup(data, features=["html5lib", "permissive"]) + data = BeautifulSoup(data, 'html5lib') entries = data.findAll('item') for item in entries: @@ -153,7 +152,7 @@ class KATProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -167,6 +166,6 @@ class KATCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['tv', 'anime']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = KATProvider() diff --git a/sickbeard/providers/libertalia.py b/sickbeard/providers/libertalia.py deleted file mode 100644 index a329f52ccca6db91ac06d22ec92e38aa63b4fd7e..0000000000000000000000000000000000000000 --- a/sickbeard/providers/libertalia.py +++ /dev/null @@ -1,155 +0,0 @@ -# -*- coding: latin-1 -*- -# Authors: Raver2046 -# adaur -# based on tpi.py -# URL: http://code.google.com/p/sickbeard/ -# -# This file is part of SickRage. -# -# SickRage is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickRage is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickRage. If not, see <http://www.gnu.org/licenses/>. - -import re -import requests -import cookielib -import urllib - -from sickbeard import logger -from sickbeard import tvcache -from sickbeard.providers import generic -from sickbeard.bs4_parser import BS4Parser - -class LibertaliaProvider(generic.TorrentProvider): - - def __init__(self): - - generic.TorrentProvider.__init__(self, "Libertalia") - - - self.cj = cookielib.CookieJar() - - self.url = "https://libertalia.me" - self.urlsearch = "https://libertalia.me/torrents.php?name=%s%s" - - self.categories = "&cat%5B%5D=9&cat%5B%5D=10" - - self.username = None - self.password = None - self.ratio = None - self.minseed = None - self.minleech = None - - self.cache = LibertaliaCache(self) - - def _doLogin(self): - - if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = {'username': self.username, - 'password': self.password} - - response = self.getURL(self.url + '/login.php', post_data=login_params, timeout=30) - if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) - return False - - if re.search('upload.php', response): - return True - else: - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) - return False - - return True - - - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): - - results = [] - items = {'Season': [], 'Episode': [], 'RSS': []} - - # check for auth - if not self._doLogin(): - return results - - for mode in search_params.keys(): - logger.log(u"Search Mode: %s" % mode, logger.DEBUG) - for search_string in search_params[mode]: - - if mode != 'RSS': - logger.log(u"Search string: %s " % search_string, logger.DEBUG) - - searchURL = self.urlsearch % (urllib.quote(search_string), self.categories) - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL) - if not data: - continue - - with BS4Parser(data, features=["html5lib", "permissive"]) as html: - resultsTable = html.find("table", {"class" : "torrent_table"}) - if resultsTable: - rows = resultsTable.findAll("tr", {"class" : re.compile("torrent_row.*")}) - for row in rows: - - # bypass first row because title only - columns = row.find('td', {"class" : "torrent_name"}) - # isvfclass = row.find('td', {"class" : "sprite-vf"}) - # isvostfrclass = row.find('td', {"class" : "sprite-vostfr"}) - link = columns.find("a", href=re.compile("torrents")) - if link: - title = link.text - # recherched = searchURL.replace(".", "(.*)").replace(" ", "(.*)").replace("'", "(.*)") - download_url = row.find("a", href=re.compile("torrent_pass"))['href'] - # FIXME - size = -1 - seeders = 1 - leechers = 0 - - if not all([title, download_url]): - continue - - # Filter unseeded torrent - # if seeders < self.minseed or leechers < self.minleech: - # if mode != 'RSS': - # logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG) - # continue - - item = title, download_url, size, seeders, leechers - if mode != 'RSS': - logger.log(u"Found result: %s " % title, logger.DEBUG) - - items[mode].append(item) - - # For each search mode sort all the items by seeders if available - items[mode].sort(key=lambda tup: tup[3], reverse=True) - - results += items[mode] - - return results - - def seedRatio(self): - return self.ratio - - -class LibertaliaCache(tvcache.TVCache): - def __init__(self, provider_obj): - - tvcache.TVCache.__init__(self, provider_obj) - - self.minTime = 10 - - def _getRSSData(self): - search_strings = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_strings)} - -provider = LibertaliaProvider() diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py index 253a166f109cc0df4ba60dac6c779e57d684261d..13f885a5c60cf9e9060ec4b0b7c08f658cf96880 100644 --- a/sickbeard/providers/morethantv.py +++ b/sickbeard/providers/morethantv.py @@ -26,17 +26,16 @@ import traceback from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser from sickrage.helper.exceptions import AuthException +from sickrage.providers.TorrentProvider import TorrentProvider -class MoreThanTVProvider(generic.TorrentProvider): +class MoreThanTVProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "MoreThanTV") - + TorrentProvider.__init__(self, "MoreThanTV") self._uid = None self._hash = None @@ -61,14 +60,14 @@ class MoreThanTVProvider(generic.TorrentProvider): self.cache = MoreThanTVCache(self) - def _checkAuth(self): + def _check_auth(self): if not self.username or not self.password: raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") return True - def _doLogin(self): + def login(self): if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()): return True @@ -80,7 +79,7 @@ class MoreThanTVProvider(generic.TorrentProvider): 'login': 'Log in', 'keeplogged': '1'} - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -91,14 +90,14 @@ class MoreThanTVProvider(generic.TorrentProvider): return True - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} # freeleech = '3' if self.freeleech else '0' - if not self._doLogin(): + if not self.login(): return results for mode in search_params.keys(): @@ -112,12 +111,12 @@ class MoreThanTVProvider(generic.TorrentProvider): logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) # returns top 15 results by default, expandable in user profile to 100 - data = self.getURL(searchURL) + data = self.get_url(searchURL) if not data: continue try: - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', attrs={'class': 'torrent_table'}) torrent_rows = torrent_table.findChildren('tr') if torrent_table else [] @@ -181,7 +180,7 @@ class MoreThanTVProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio def _convertSize(self, sizeString): @@ -201,6 +200,7 @@ class MoreThanTVProvider(generic.TorrentProvider): size = -1 return int(size) + class MoreThanTVCache(tvcache.TVCache): def __init__(self, provider_obj): @@ -211,6 +211,6 @@ class MoreThanTVCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = MoreThanTVProvider() diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py index e4350ccfc8dc4906a4af58bd5a1e7dc55e2c1a4b..a4874739365f58e7ac48578479058eb60867ddb6 100644 --- a/sickbeard/providers/newpct.py +++ b/sickbeard/providers/newpct.py @@ -26,14 +26,14 @@ from six.moves import urllib from sickbeard import helpers from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser +from sickrage.providers.TorrentProvider import TorrentProvider -class newpctProvider(generic.TorrentProvider): +class newpctProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "Newpct") + TorrentProvider.__init__(self, "Newpct") self.onlyspasearch = None self.cache = newpctCache(self) @@ -68,13 +68,13 @@ class newpctProvider(generic.TorrentProvider): 'bus_de_': 'All' } - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} # Only search if user conditions are true - lang_info = '' if not epObj or not epObj.show else epObj.show.lang + lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang for mode in search_strings.keys(): logger.log(u"Search Mode: %s" % mode, logger.DEBUG) @@ -94,12 +94,12 @@ class newpctProvider(generic.TorrentProvider): searchURL = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL, timeout=30) + data = self.get_url(searchURL, timeout=30) if not data: continue try: - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: torrent_tbody = html.find('tbody') if torrent_tbody is None: @@ -152,33 +152,33 @@ class newpctProvider(generic.TorrentProvider): return results - def getURL(self, url, post_data=None, params=None, timeout=30, json=False, needBytes=False): + def get_url(self, url, post_data=None, params=None, timeout=30, json=False, need_bytes=False): """ - needBytes=True when trying access to torrent info (For calling torrent client). Previously we must parse + need_bytes=True when trying access to torrent info (For calling torrent client). Previously we must parse the URL to get torrent file """ - if needBytes: + if need_bytes: data = helpers.getURL(url, post_data=None, params=None, headers=self.headers, timeout=timeout, - session=self.session, json=json, needBytes=False) + session=self.session, json=json, need_bytes=False) url = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group() - + return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout, - session=self.session, json=json, needBytes=needBytes) - - def downloadResult(self, result): + session=self.session, json=json, need_bytes=need_bytes) + + def download_result(self, result): """ Save the result to disk. """ # check for auth - if not self._doLogin(): + if not self.login(): return False - urls, filename = self._makeURL(result) + urls, filename = self._make_url(result) for url in urls: # Search results don't return torrent files directly, it returns show sheets so we must parse showSheet to access torrent. - data = self.getURL(url) + data = self.get_url(url) url_torrent = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group() if url_torrent.startswith('http'): @@ -253,7 +253,7 @@ class newpctCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = newpctProvider() diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index f9aa9d60afdd6f9da40723ae30a864a58850d6a9..b0934de88259610306a12356992892c2acf815bf 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -35,14 +35,14 @@ from sickbeard import logger from sickbeard import tvcache from sickbeard import db from sickbeard.common import Quality -from sickbeard.providers import generic from sickrage.helper.encoding import ek, ss from sickrage.show.Show import Show from sickrage.helper.common import try_int from sickbeard.common import USER_AGENT +from sickrage.providers.NZBProvider import NZBProvider -class NewznabProvider(generic.NZBProvider): +class NewznabProvider(NZBProvider): """ Generic provider for built in and custom providers who expose a newznab compatible api. @@ -51,7 +51,7 @@ class NewznabProvider(generic.NZBProvider): def __init__(self, name, url, key='0', catIDs='5030,5040', search_mode='eponly', search_fallback=False, enable_daily=True, enable_backlog=False): - generic.NZBProvider.__init__(self, name) + NZBProvider.__init__(self, name) self.headers.update({'User-Agent': USER_AGENT}) @@ -84,15 +84,15 @@ class NewznabProvider(generic.NZBProvider): int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback)) + '|' + str( int(self.enable_daily)) + '|' + str(int(self.enable_backlog)) - def imageName(self): + def image_name(self): """ Checks if we have an image for this provider already. Returns found image or the default newznab image """ if ek(os.path.isfile, ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', - self.getID() + '.png')): - return self.getID() + '.png' + self.get_id() + '.png')): + return self.get_id() + '.png' return 'newznab.png' def get_newznab_categories(self): @@ -104,7 +104,7 @@ class NewznabProvider(generic.NZBProvider): """ return_categories = [] - if not self._checkAuth(): + if not self._check_auth(): return False, return_categories, "Provider requires auth and your key is not set" params = {"t": "caps"} @@ -112,13 +112,13 @@ class NewznabProvider(generic.NZBProvider): params['apikey'] = self.key url = ek(os.path.join, self.url, 'api?') + urllib.urlencode(params) - data = self.getURL(url) + data = self.get_url(url) if not data: error_string = u"Error getting xml for [%s]" % url logger.log(error_string, logger.WARNING) return False, return_categories, error_string - data = BeautifulSoup(data, features=["html5lib", "permissive"]) + data = BeautifulSoup(data, 'html5lib') if not self._checkAuthFromData(data): data.decompose() error_string = u"Error parsing xml for [%s]" % (self.name) @@ -136,7 +136,7 @@ class NewznabProvider(generic.NZBProvider): def _get_season_search_strings(self, ep_obj): """ - Makes objects to pass to _doSearch for manual and backlog season pack searching + Makes objects to pass to search for manual and backlog season pack searching Returns a list containing dicts of search parameters """ to_return = [] @@ -165,7 +165,7 @@ class NewznabProvider(generic.NZBProvider): def _get_episode_search_strings(self, ep_obj, add_string=''): """ - Makes objects to pass to _doSearch for manual and backlog season pack searching + Makes objects to pass to search for manual and backlog season pack searching Returns a list containing dicts of search parameters """ to_return = [] @@ -194,7 +194,7 @@ class NewznabProvider(generic.NZBProvider): return to_return - def _checkAuth(self): + def _check_auth(self): """ Checks that user has set their api key if it is needed Returns: True/False @@ -208,17 +208,17 @@ class NewznabProvider(generic.NZBProvider): def _checkAuthFromData(self, data): """ Checks that the returned data is valid - Returns: _checkAuth if valid otherwise False if there is an error + Returns: _check_auth if valid otherwise False if there is an error """ if data.findAll('categories') + data.findAll('item'): - return self._checkAuth() + return self._check_auth() try: err_desc = data.error.attrs['description'] if not err_desc: raise except (AssertionError, AttributeError, ValueError): - return self._checkAuth() + return self._check_auth() # This is all we should really need, the code is irrelevant # Provider name is the thread name, and this should INFO, @@ -227,13 +227,13 @@ class NewznabProvider(generic.NZBProvider): return False - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): # pylint: disable=too-many-arguments,too-many-locals + def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-arguments,too-many-locals """ Searches indexer using the params in search_params, either for latest releases, or a string/id search Returns: list of results in dict form """ results = [] - if not self._checkAuth(): + if not self._check_auth(): return results params = { @@ -254,11 +254,11 @@ class NewznabProvider(generic.NZBProvider): search_url = ek(os.path.join, self.url, 'api?') + urllib.urlencode(params) logger.log(u"Search url: %s" % search_url, logger.DEBUG) - data = self.getURL(search_url) + data = self.get_url(search_url) if not data: return results - data = BeautifulSoup(data, features=["html5lib", "permissive"]) + data = BeautifulSoup(data, 'html5lib') try: torznab = 'xmlns:torznab' in data.rss.attrs.keys() @@ -306,7 +306,7 @@ class NewznabProvider(generic.NZBProvider): """ return try_int(item.get('size', -1), -1) - def findPropers(self, search_date=datetime.datetime.today()): + def find_propers(self, search_date=datetime.datetime.today()): """ Searches providers for PROPER or REPACK releases Returns a list of objects of type classes.Proper @@ -331,7 +331,7 @@ class NewznabProvider(generic.NZBProvider): curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"])) searchStrings = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK') for searchString in searchStrings: - for item in self._doSearch(searchString): + for item in self.search(searchString): title, url = self._get_title_and_url(item) if re.match(r'.*(REPACK|PROPER).*', title, re.I): results.append(classes.Proper(title, url, datetime.datetime.today(), self.show)) @@ -348,4 +348,4 @@ class NewznabCache(tvcache.TVCache): self.minTime = 30 def _getRSSData(self): - return {'entries': self.provider._doSearch({})} + return {'entries': self.provider.search({})} diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index 75943032c72c0aade3e564aaf48b7ce01ca7f6d3..c72184fde6381549e1437a8e956b3d5bfc400565 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -21,16 +21,16 @@ import re from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic +from sickrage.providers.TorrentProvider import TorrentProvider -class NyaaProvider(generic.TorrentProvider): +class NyaaProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "NyaaTorrents") + TorrentProvider.__init__(self, "NyaaTorrents") self.public = True - self.supportsAbsoluteNumbering = True + self.supports_absolute_numbering = True self.anime_only = True self.ratio = None @@ -44,7 +44,7 @@ class NyaaProvider(generic.TorrentProvider): self.minleech = 0 self.confirmed = False - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): if self.show and not self.show.is_anime: return [] @@ -119,7 +119,7 @@ class NyaaProvider(generic.TorrentProvider): size = size * 1024**4 return int(size) - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -132,6 +132,6 @@ class NyaaCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = NyaaProvider() diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index edf5b00dc91a60e34ae252568491e1de557e33fc..67df4f4b2a6921f8989e7e82223d21fd887b387f 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -24,13 +24,13 @@ from sickbeard import tvcache from sickbeard import classes from sickbeard import logger from sickbeard import show_name_helpers -from sickbeard.providers import generic from sickrage.helper.common import try_int +from sickrage.providers.NZBProvider import NZBProvider -class OmgwtfnzbsProvider(generic.NZBProvider): +class OmgwtfnzbsProvider(NZBProvider): def __init__(self): - generic.NZBProvider.__init__(self, "omgwtfnzbs") + NZBProvider.__init__(self, "omgwtfnzbs") self.username = None self.api_key = None @@ -39,8 +39,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): self.urls = {'base_url': 'https://omgwtfnzbs.org/'} self.url = self.urls['base_url'] - - def _checkAuth(self): + def _check_auth(self): if not self.username or not self.api_key: logger.log(u"Invalid api key. Check your settings", logger.WARNING) @@ -50,7 +49,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): def _checkAuthFromData(self, parsed_data, is_XML=True): if parsed_data is None: - return self._checkAuth() + return self._check_auth() if is_XML: # provider doesn't return xml on error @@ -85,9 +84,9 @@ class OmgwtfnzbsProvider(generic.NZBProvider): def _get_size(self, item): return try_int(item['sizebytes'], -1) - def _doSearch(self, search, search_mode='eponly', epcount=0, retention=0, epObj=None): + def search(self, search, age=0, ep_obj=None): - self._checkAuth() + self._check_auth() params = {'user': self.username, 'api': self.api_key, @@ -96,14 +95,14 @@ class OmgwtfnzbsProvider(generic.NZBProvider): 'retention': sickbeard.USENET_RETENTION, 'search': search} - if retention or not params['retention']: - params['retention'] = retention + if age or not params['retention']: + params['retention'] = age searchURL = 'https://api.omgwtfnzbs.org/json/?' + urllib.urlencode(params) logger.log(u"Search string: %s" % params, logger.DEBUG) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - parsedJSON = self.getURL(searchURL, json=True) + parsedJSON = self.get_url(searchURL, json=True) if not parsedJSON: return [] @@ -119,12 +118,12 @@ class OmgwtfnzbsProvider(generic.NZBProvider): return [] - def findPropers(self, search_date=None): + def find_propers(self, search_date=None): search_terms = ['.PROPER.', '.REPACK.'] results = [] for term in search_terms: - for item in self._doSearch(term, retention=4): + for item in self.search(term, age=4): if 'usenetage' in item: title, url = self._get_title_and_url(item) diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index 4164e1826f1e5b9ebda4db91651798ef10e4fedb..87430198ee523e70a5c464365490683dd1b92782 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -22,16 +22,15 @@ import traceback from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser +from sickrage.providers.TorrentProvider import TorrentProvider -class PretomeProvider(generic.TorrentProvider): +class PretomeProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "Pretome") - + TorrentProvider.__init__(self, "Pretome") self.username = None self.password = None @@ -54,20 +53,20 @@ class PretomeProvider(generic.TorrentProvider): self.cache = PretomeCache(self) - def _checkAuth(self): + def _check_auth(self): if not self.username or not self.password or not self.pin: logger.log(u"Invalid username or password or pin. Check your settings", logger.WARNING) return True - def _doLogin(self): + def login(self): login_params = {'username': self.username, 'password': self.password, 'login_pin': self.pin} - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -78,12 +77,12 @@ class PretomeProvider(generic.TorrentProvider): return True - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_params.keys(): @@ -96,12 +95,12 @@ class PretomeProvider(generic.TorrentProvider): searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL) + data = self.get_url(searchURL) if not data: continue try: - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: # Continue only if one Release is found empty = html.find('h2', text="No .torrents fit this filter criteria") if empty: @@ -167,7 +166,7 @@ class PretomeProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio def _convertSize(self, sizeString): @@ -195,7 +194,7 @@ class PretomeCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = PretomeProvider() diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index 30e6a470050c6fa29f7dd372b229a5c44ef1919a..d334b96425eaebeca6bb61efe217108c7b4d2b23 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -25,19 +25,19 @@ import time from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.common import USER_AGENT from sickbeard.indexers.indexer_config import INDEXER_TVDB +from sickrage.providers.TorrentProvider import TorrentProvider class GetOutOfLoop(Exception): pass -class RarbgProvider(generic.TorrentProvider): +class RarbgProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "Rarbg") + TorrentProvider.__init__(self, "Rarbg") self.public = True self.ratio = None @@ -78,12 +78,11 @@ class RarbgProvider(generic.TorrentProvider): self.cache = RarbgCache(self) - def _doLogin(self): + def login(self): if self.token and self.tokenExpireDate and datetime.datetime.now() < self.tokenExpireDate: return True - - response = self.getURL(self.urls['token'], timeout=30, json=True) + response = self.get_url(self.urls['token'], timeout=30, json=True) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -99,17 +98,17 @@ class RarbgProvider(generic.TorrentProvider): return False - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results - if epObj is not None: - ep_indexerid = epObj.show.indexerid - ep_indexer = epObj.show.indexer + if ep_obj is not None: + ep_indexerid = ep_obj.show.indexerid + ep_indexer = ep_obj.show.indexer else: ep_indexerid = None ep_indexer = None @@ -157,7 +156,7 @@ class RarbgProvider(generic.TorrentProvider): time_out = time_out + 1 time.sleep(1) - data = self.getURL(searchURL + self.urlOptions['token'].format(token=self.token)) + data = self.get_url(searchURL + self.urlOptions['token'].format(token=self.token)) self.next_request = datetime.datetime.now() + datetime.timedelta(seconds=10) @@ -186,7 +185,7 @@ class RarbgProvider(generic.TorrentProvider): retry = retry - 1 self.token = None self.tokenExpireDate = None - if not self._doLogin(): + if not self.login(): logger.log(u"Failed retrieving new token", logger.DEBUG) return results logger.log(u"Using new token", logger.DEBUG) @@ -241,7 +240,7 @@ class RarbgProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -255,7 +254,7 @@ class RarbgCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = RarbgProvider() diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index a0d52b674e9c62f5d56270d573178fcbbd99d6ab..dd728c4e7def044e2c807ae79fa6b66fc7f2851d 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -22,19 +22,19 @@ import requests from bencode import bdecode import sickbeard -from sickbeard.providers import generic from sickbeard import helpers from sickbeard import logger from sickbeard import tvcache from sickrage.helper.encoding import ek from sickrage.helper.exceptions import ex +from sickrage.providers.TorrentProvider import TorrentProvider -class TorrentRssProvider(generic.TorrentProvider): +class TorrentRssProvider(TorrentProvider): def __init__(self, name, url, cookies='', titleTAG='title', search_mode='eponly', search_fallback=False, enable_daily=False, enable_backlog=False): - generic.TorrentProvider.__init__(self, name) + TorrentProvider.__init__(self, name) self.cache = TorrentRssCache(self) self.urls = {'base_url': re.sub(r'\/$', '', url)} @@ -42,7 +42,7 @@ class TorrentRssProvider(generic.TorrentProvider): self.url = self.urls['base_url'] self.ratio = None - self.supportsBacklog = False + self.supports_backlog = False self.search_mode = search_mode self.search_fallback = search_fallback @@ -64,16 +64,16 @@ class TorrentRssProvider(generic.TorrentProvider): self.enable_backlog ) - def imageName(self): - if ek(os.path.isfile, ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', self.getID() + '.png')): - return self.getID() + '.png' + def image_name(self): + if ek(os.path.isfile, ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', self.get_id() + '.png')): + return self.get_id() + '.png' return 'torrentrss.png' def _get_title_and_url(self, item): title = item.get(self.titleTAG) if title: - title = self._clean_title_from_provider(title) + title = self._clean_title(title) attempt_list = [lambda: item.get('torrent_magneturi'), @@ -121,7 +121,7 @@ class TorrentRssProvider(generic.TorrentProvider): if self.cookies: requests.utils.add_dict_to_cookiejar(self.session.cookies, dict(x.rsplit('=', 1) for x in self.cookies.split(';'))) - torrent_file = self.getURL(url) + torrent_file = self.get_url(url) try: bdecode(torrent_file) except Exception, e: @@ -148,7 +148,7 @@ class TorrentRssProvider(generic.TorrentProvider): logger.log(u"Saved custom_torrent html dump %s " % dumpName, logger.INFO) return True - def seedRatio(self): + def seed_ratio(self): return self.ratio diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 7fcfa6138696c60dbe19019ee94d182e0f292d5e..1dab41630393e6b8d0b97da431a7e1996c8785af 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -25,16 +25,15 @@ import sickbeard from sickbeard.common import cpu_presets from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser +from sickrage.providers.TorrentProvider import TorrentProvider -class SCCProvider(generic.TorrentProvider): - def __init__(self): - - generic.TorrentProvider.__init__(self, "SceneAccess") +class SCCProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes + def __init__(self): + TorrentProvider.__init__(self, "SceneAccess") self.username = None self.password = None @@ -44,25 +43,31 @@ class SCCProvider(generic.TorrentProvider): self.cache = SCCCache(self) - self.urls = {'base_url': 'https://sceneaccess.eu', - 'login': 'https://sceneaccess.eu/login', - 'detail': 'https://www.sceneaccess.eu/details?id=%s', - 'search': 'https://sceneaccess.eu/all?search=%s&method=1&%s', - 'download': 'https://www.sceneaccess.eu/%s'} + self.urls = { + 'base_url': 'https://sceneaccess.eu', + 'login': 'https://sceneaccess.eu/login', + 'detail': 'https://www.sceneaccess.eu/details?id=%s', + 'search': 'https://sceneaccess.eu/all?search=%s&method=1&%s', + 'download': 'https://www.sceneaccess.eu/%s' + } self.url = self.urls['base_url'] - self.categories = { 'sponly': 'c26=26&c44=44&c45=45', # Archive, non-scene HD, non-scene SD; need to include non-scene because WEB-DL packs get added to those categories - 'eponly': 'c27=27&c17=17&c44=44&c45=45&c33=33&c34=34'} # TV HD, TV SD, non-scene HD, non-scene SD, foreign XviD, foreign x264 - - def _doLogin(self): + self.categories = { + 'Season': 'c26=26&c44=44&c45=45', # Archive, non-scene HD, non-scene SD; need to include non-scene because WEB-DL packs get added to those categories + 'Episode': 'c17=17&c27=27&c33=33&c34=34&c44=44&c45=45', # TV HD, TV SD, non-scene HD, non-scene SD, foreign XviD, foreign x264 + 'RSS': 'c17=17&c26=26&c27=27&c33=33&c34=34&c44=44&c45=45' # Season + Episode + } - login_params = {'username': self.username, - 'password': self.password, - 'submit': 'come on in'} + def login(self): + login_params = { + 'username': self.username, + 'password': self.password, + 'submit': 'come on in' + } - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -74,19 +79,19 @@ class SCCProvider(generic.TorrentProvider): return True - def _isSection(self, section, text): + @staticmethod + def _isSection(section, text): title = r'<title>.+? \| %s</title>' % section return re.search(title, text, re.IGNORECASE) - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals,too-many-branches + items = {'Season': [], 'Episode': [], 'RSS': []} results = [] - if not self._doLogin(): + if not self.login(): return results - items = {'Season': [], 'Episode': [], 'RSS': []} - for mode in search_strings.keys(): if mode != 'RSS': logger.log(u"Search Mode: %s" % mode, logger.DEBUG) @@ -94,11 +99,11 @@ class SCCProvider(generic.TorrentProvider): if mode != 'RSS': logger.log(u"Search string: %s " % search_string, logger.DEBUG) - searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories[search_mode]) + searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories[mode]) try: - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL) + logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) + data = self.get_url(searchURL) time.sleep(cpu_presets[sickbeard.CPU_PRESET]) except Exception as e: logger.log(u"Unable to fetch data. Error: %s" % repr(e), logger.WARNING) @@ -106,7 +111,7 @@ class SCCProvider(generic.TorrentProvider): if not data: continue - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', attrs={'id': 'torrents-table'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] @@ -119,11 +124,11 @@ class SCCProvider(generic.TorrentProvider): try: link = result.find('td', attrs={'class': 'ttr_name'}).find('a') - url = result.find('td', attrs={'class': 'td_dl'}).find('a') + url = result.find('td', attrs={'class': 'td_dl'}).find('a') title = link.string if re.search(r'\.\.\.', title): - data = self.getURL(self.url + "/" + link['href']) + data = self.get_url(self.url + "/" + link['href']) if data: with BS4Parser(data) as details_html: title = re.search('(?<=").+(?<!")', details_html.title.string).group(0) @@ -156,10 +161,11 @@ class SCCProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio - def _convertSize(self, size): + @staticmethod + def _convertSize(size): size, base = size.split() size = float(size) if base in 'KB': @@ -183,6 +189,6 @@ class SCCCache(tvcache.TVCache): def _getRSSData(self): search_strings = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_strings)} + return {'entries': self.provider.search(search_strings)} provider = SCCProvider() diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 139e8873ea9a0d5d7eb7f2250868bd1a88c29344..c8d7fdd080aa9af734653517b464d77ca4b51368 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -22,17 +22,15 @@ import traceback from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser +from sickrage.providers.TorrentProvider import TorrentProvider -class SceneTimeProvider(generic.TorrentProvider): +class SceneTimeProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "SceneTime") - - + TorrentProvider.__init__(self, "SceneTime") self.username = None self.password = None @@ -52,12 +50,12 @@ class SceneTimeProvider(generic.TorrentProvider): self.categories = "&c2=1&c43=13&c9=1&c63=1&c77=1&c79=1&c100=1&c101=1" - def _doLogin(self): + def login(self): login_params = {'username': self.username, 'password': self.password} - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -68,12 +66,12 @@ class SceneTimeProvider(generic.TorrentProvider): return True - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_params.keys(): @@ -86,12 +84,12 @@ class SceneTimeProvider(generic.TorrentProvider): searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL) + data = self.get_url(searchURL) if not data: continue try: - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: torrent_table = html.select("#torrenttable table") torrent_rows = torrent_table[0].select("tr") if torrent_table else [] @@ -151,7 +149,7 @@ class SceneTimeProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -165,7 +163,7 @@ class SceneTimeCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = SceneTimeProvider() diff --git a/sickbeard/providers/shazbat.py b/sickbeard/providers/shazbat.py index 95eb68edbe6ee24c57834076b77c623098fcf5a2..d0a7e308d36a3df6c0d10cd576c600e8b47c627e 100644 --- a/sickbeard/providers/shazbat.py +++ b/sickbeard/providers/shazbat.py @@ -18,17 +18,16 @@ from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickrage.helper.exceptions import AuthException +from sickrage.providers.TorrentProvider import TorrentProvider -class ShazbatProvider(generic.TorrentProvider): +class ShazbatProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "Shazbat.tv") - - self.supportsBacklog = False + TorrentProvider.__init__(self, "Shazbat.tv") + self.supports_backlog = False self.passkey = None self.ratio = None @@ -40,7 +39,7 @@ class ShazbatProvider(generic.TorrentProvider): 'website': u'http://www.shazbat.tv/login',} self.url = self.urls['website'] - def _checkAuth(self): + def _check_auth(self): if not self.passkey: raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") @@ -48,13 +47,13 @@ class ShazbatProvider(generic.TorrentProvider): def _checkAuthFromData(self, data): if not self.passkey: - self._checkAuth() + self._check_auth() elif not (data['entries'] and data['feed']): logger.log(u"Invalid username or password. Check your settings", logger.WARNING) return True - def seedRatio(self): + def seed_ratio(self): return self.ratio diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index 2001324ffc0f42ecc522ac01e6cce58468af87be..622bf9bf0581c57714dd9cf54053b55751c9832c 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -20,15 +20,14 @@ import re from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic +from sickrage.providers.TorrentProvider import TorrentProvider -class SpeedCDProvider(generic.TorrentProvider): +class SpeedCDProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "Speedcd") - + TorrentProvider.__init__(self, "Speedcd") self.username = None self.password = None @@ -51,12 +50,12 @@ class SpeedCDProvider(generic.TorrentProvider): self.cache = SpeedCDCache(self) - def _doLogin(self): + def login(self): login_params = {'username': self.username, 'password': self.password} - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -67,12 +66,12 @@ class SpeedCDProvider(generic.TorrentProvider): return True - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_params.keys(): @@ -87,7 +86,7 @@ class SpeedCDProvider(generic.TorrentProvider): post_data = dict({'/browse.php?': None, 'cata': 'yes', 'jxt': 4, 'jxw': 'b', 'search': search_string}, **self.categories[mode]) - parsedJSON = self.getURL(self.urls['search'], post_data=post_data, json=True) + parsedJSON = self.get_url(self.urls['search'], post_data=post_data, json=True) if not parsedJSON: continue @@ -130,7 +129,7 @@ class SpeedCDProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -144,6 +143,6 @@ class SpeedCDCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = SpeedCDProvider() diff --git a/sickbeard/providers/strike.py b/sickbeard/providers/strike.py index 478746cf5599a199869054f7967ff2d384f21f5d..a1d0f4fe30ec65fbf3ec3065918787111ac18e98 100644 --- a/sickbeard/providers/strike.py +++ b/sickbeard/providers/strike.py @@ -19,12 +19,13 @@ from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic +from sickrage.providers.TorrentProvider import TorrentProvider -class STRIKEProvider(generic.TorrentProvider): + +class STRIKEProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "Strike") + TorrentProvider.__init__(self, "Strike") self.public = True self.url = 'https://getstrike.net/' @@ -32,7 +33,7 @@ class STRIKEProvider(generic.TorrentProvider): self.cache = StrikeCache(self) self.minseed, self.minleech = 2 * [None] - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} @@ -46,7 +47,7 @@ class STRIKEProvider(generic.TorrentProvider): searchURL = self.url + "api/v2/torrents/search/?category=TV&phrase=" + search_string logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - jdata = self.getURL(searchURL, json=True) + jdata = self.get_url(searchURL, json=True) if not jdata: logger.log(u"No data returned from provider", logger.DEBUG) return [] @@ -82,8 +83,7 @@ class STRIKEProvider(generic.TorrentProvider): return results - - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -91,14 +91,14 @@ class StrikeCache(tvcache.TVCache): def __init__(self, provider_obj): tvcache.TVCache.__init__(self, provider_obj) - + # Cache results for 10 min self.minTime = 10 def _getRSSData(self): - - # Use this hacky way for RSS search since most results will use this codec + + # Use this hacky way for RSS search since most results will use this codec search_params = {'RSS': ['x264']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = STRIKEProvider() diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index f35f6c13929518ea539ebccf482faf8c771c4da6..afc40063275bde28fc6f6b9a1195491f92d58674 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -23,13 +23,12 @@ from requests.auth import AuthBase from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic +from sickrage.providers.TorrentProvider import TorrentProvider -class T411Provider(generic.TorrentProvider): +class T411Provider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "T411") - + TorrentProvider.__init__(self, "T411") self.username = None self.password = None @@ -53,7 +52,7 @@ class T411Provider(generic.TorrentProvider): self.minleech = 0 self.confirmed = False - def _doLogin(self): + def login(self): if self.token is not None: if time.time() < (self.tokenLastUpdate + 30 * 60): @@ -62,7 +61,7 @@ class T411Provider(generic.TorrentProvider): login_params = {'username': self.username, 'password': self.password} - response = self.getURL(self.urls['login_page'], post_data=login_params, timeout=30, json=True) + response = self.get_url(self.urls['login_page'], post_data=login_params, timeout=30, json=True) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -77,12 +76,12 @@ class T411Provider(generic.TorrentProvider): logger.log(u"Token not found in authentication response", logger.WARNING) return False - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_params.keys(): @@ -95,7 +94,7 @@ class T411Provider(generic.TorrentProvider): searchURLS = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS'] for searchURL in searchURLS: logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL, json=True) + data = self.get_url(searchURL, json=True) if not data: continue @@ -157,7 +156,7 @@ class T411Provider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -180,7 +179,7 @@ class T411Cache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = T411Provider() diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index 9315ff4c0cd3c11d9e477f6a2ed2f6cd192c7dc4..ca75537abe61102eebcbb6740c952e7142fea86f 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -16,20 +16,19 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. - import re import posixpath # Must use posixpath from urllib import urlencode from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.common import USER_AGENT +from sickrage.providers.TorrentProvider import TorrentProvider -class ThePirateBayProvider(generic.TorrentProvider): +class ThePirateBayProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "ThePirateBay") + TorrentProvider.__init__(self, "ThePirateBay") self.public = True @@ -65,7 +64,7 @@ class ThePirateBayProvider(generic.TorrentProvider): self.re_title_url = r'/torrent/(?P<id>\d+)/(?P<title>.*?)".+?(?P<url>magnet.*?)".+?Size (?P<size>[\d\.]* [TGKMiB]{2,3}).+?(?P<seeders>\d+)</td>.+?(?P<leechers>\d+)</td>' - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} @@ -84,7 +83,7 @@ class ThePirateBayProvider(generic.TorrentProvider): searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/')) # Must use posixpath logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL) + data = self.get_url(searchURL) if not data: logger.log(u'URL did not return data, maybe try a custom url, or a different one', logger.DEBUG) continue @@ -139,7 +138,7 @@ class ThePirateBayProvider(generic.TorrentProvider): size = size * 1024**4 return size - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -153,6 +152,6 @@ class ThePirateBayCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = ThePirateBayProvider() diff --git a/sickbeard/providers/titansoftv.py b/sickbeard/providers/titansoftv.py index 7f77f7c88ed438ad44943e30a10c3af245422249..5efb5a34b05e72a802c44449ab68e7ebce2f521e 100644 --- a/sickbeard/providers/titansoftv.py +++ b/sickbeard/providers/titansoftv.py @@ -19,28 +19,28 @@ import urllib -from sickbeard.providers import generic from sickbeard import logger from sickbeard import tvcache from sickbeard.helpers import mapIndexersToShow from sickrage.helper.exceptions import AuthException +from sickrage.providers.TorrentProvider import TorrentProvider -class TitansOfTVProvider(generic.TorrentProvider): +class TitansOfTVProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, 'TitansOfTV') + TorrentProvider.__init__(self, 'TitansOfTV') - self.supportsAbsoluteNumbering = True + self.supports_absolute_numbering = True self.api_key = None self.ratio = None self.cache = TitansOfTVCache(self) self.url = 'http://titansof.tv/api/torrents' self.download_url = 'http://titansof.tv/api/torrents/%s/download?apikey=%s' - def seedRatio(self): + def seed_ratio(self): return self.ratio - def _checkAuth(self): + def _check_auth(self): if not self.api_key: raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') @@ -54,9 +54,9 @@ class TitansOfTVProvider(generic.TorrentProvider): return True - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): # FIXME ADD MODE - self._checkAuth() + self._check_auth() results = [] params = {} self.headers.update({'X-Authorization': self.api_key}) @@ -68,7 +68,7 @@ class TitansOfTVProvider(generic.TorrentProvider): logger.log(u"Search string: %s " % search_params, logger.DEBUG) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - parsedJSON = self.getURL(searchURL, json=True) # do search + parsedJSON = self.get_url(searchURL, json=True) # do search if not parsedJSON: logger.log(u"No data returned from provider", logger.DEBUG) @@ -151,7 +151,7 @@ class TitansOfTVCache(tvcache.TVCache): def _getRSSData(self): search_params = {'limit': 100} - return self.provider._doSearch(search_params) + return self.provider.search(search_params) provider = TitansOfTVProvider() diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 0b7e2cc00876392dbb68b1c869a81510bca2dc72..e1f598b3c05531272cbe7bf0b5275cc4c3b9ae3a 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -24,10 +24,10 @@ from sickbeard import logger from sickbeard import tvcache from sickbeard import db -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickrage.helper.exceptions import AuthException +from sickrage.providers.TorrentProvider import TorrentProvider category_excluded = {'Sport': 22, 'Teatro': 23, @@ -56,10 +56,9 @@ category_excluded = {'Sport': 22, 'Mobile': 37} -class TNTVillageProvider(generic.TorrentProvider): +class TNTVillageProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "TNTVillage") - + TorrentProvider.__init__(self, "TNTVillage") self._uid = None self._hash = None @@ -112,21 +111,21 @@ class TNTVillageProvider(generic.TorrentProvider): self.cache = TNTVillageCache(self) - def _checkAuth(self): + def _check_auth(self): if not self.username or not self.password: raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") return True - def _doLogin(self): + def login(self): login_params = {'UserName': self.username, 'PassWord': self.password, 'CookieDate': 0, 'submit': 'Connettiti al Forum'} - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -269,14 +268,14 @@ class TNTVillageProvider(generic.TorrentProvider): if int(episodes[0]['count']) == len(parse_result.episode_numbers): return True - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} self.categories = "cat=" + str(self.cat) - if not self._doLogin(): + if not self.login(): return results for mode in search_params.keys(): @@ -308,13 +307,13 @@ class TNTVillageProvider(generic.TorrentProvider): logger.log(u"Search string: %s " % search_string, logger.DEBUG) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL) + data = self.get_url(searchURL) if not data: logger.log(u"No data returned from provider", logger.DEBUG) continue try: - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', attrs={'class': 'copyright'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] @@ -398,7 +397,7 @@ class TNTVillageProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -412,7 +411,7 @@ class TNTVillageCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': []} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = TNTVillageProvider() diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index 17f6cd66505889cf3a88dfc5877e5e4225d773f1..f2cac0fcafd77ebf53a7e57cd8d7c728c57714c1 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -21,18 +21,18 @@ import traceback from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard import show_name_helpers from sickbeard.bs4_parser import BS4Parser +from sickrage.providers.TorrentProvider import TorrentProvider -class TokyoToshokanProvider(generic.TorrentProvider): +class TokyoToshokanProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "TokyoToshokan") + TorrentProvider.__init__(self, "TokyoToshokan") self.public = True - self.supportsAbsoluteNumbering = True + self.supports_absolute_numbering = True self.anime_only = True self.ratio = None @@ -41,7 +41,7 @@ class TokyoToshokanProvider(generic.TorrentProvider): self.urls = {'base_url': 'http://tokyotosho.info/'} self.url = self.urls['base_url'] - def seedRatio(self): + def seed_ratio(self): return self.ratio def _get_season_search_strings(self, ep_obj): @@ -50,7 +50,7 @@ class TokyoToshokanProvider(generic.TorrentProvider): def _get_episode_search_strings(self, ep_obj, add_string=''): return [x.replace('.', ' ') for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)] - def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_string, age=0, ep_obj=None): # FIXME ADD MODE if self.show and not self.show.is_anime: return [] @@ -64,14 +64,14 @@ class TokyoToshokanProvider(generic.TorrentProvider): searchURL = self.url + 'search.php?' + urllib.urlencode(params) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL) + data = self.get_url(searchURL) if not data: return [] results = [] try: - with BS4Parser(data, features=["html5lib", "permissive"]) as soup: + with BS4Parser(data, 'html5lib') as soup: torrent_table = soup.find('table', attrs={'class': 'listing'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] if torrent_rows: diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index 0f32e03d32870443fe998e12139ab368471f2c89..7601a7b615f33bc74beeb75c8eba48b28c2ed691 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -22,16 +22,15 @@ import traceback from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser +from sickrage.providers.TorrentProvider import TorrentProvider -class TorrentBytesProvider(generic.TorrentProvider): +class TorrentBytesProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "TorrentBytes") - + TorrentProvider.__init__(self, "TorrentBytes") self.username = None self.password = None @@ -54,13 +53,13 @@ class TorrentBytesProvider(generic.TorrentProvider): self.cache = TorrentBytesCache(self) - def _doLogin(self): + def login(self): login_params = {'username': self.username, 'password': self.password, 'login': 'Log in!'} - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -71,12 +70,12 @@ class TorrentBytesProvider(generic.TorrentProvider): return True - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_params.keys(): @@ -89,12 +88,12 @@ class TorrentBytesProvider(generic.TorrentProvider): searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL) + data = self.get_url(searchURL) if not data: continue try: - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: # Continue only if one Release is found empty = html.find('Nothing found!') if empty: @@ -129,15 +128,15 @@ class TorrentBytesProvider(generic.TorrentProvider): title = link.contents[0] download_url = self.urls['download'] % (torrent_id, link.contents[0]) seeders = int(cells[8].find('span').contents[0]) - leechers = int(cells[9].find('span').contents[0]) - + leechers = int(cells[9].find('span').contents[0]) + # Need size for failed downloads handling if size is None: if re.match(r'[0-9]+,?\.?[0-9]*[KkMmGg]+[Bb]+', cells[6].text): size = self._convertSize(cells[6].text) if not size: size = -1 - + except (AttributeError, TypeError): continue @@ -166,9 +165,9 @@ class TorrentBytesProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio - + def _convertSize(self, sizeString): size = sizeString[:-2] modifier = sizeString[-2:] @@ -194,7 +193,7 @@ class TorrentBytesCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = TorrentBytesProvider() diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index f73d650120e1dcaf61f9b5f28a6b6d9ba423941a..a75bc6ac59fec4cc913ba2e3c09976c92cdb5215 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -19,15 +19,14 @@ import re import requests from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic +from sickrage.providers.TorrentProvider import TorrentProvider -class TorrentDayProvider(generic.TorrentProvider): - def __init__(self): - - generic.TorrentProvider.__init__(self, "TorrentDay") +class TorrentDayProvider(TorrentProvider): + def __init__(self): + TorrentProvider.__init__(self, "TorrentDay") self._uid = None self._hash = None @@ -54,7 +53,7 @@ class TorrentDayProvider(generic.TorrentProvider): self.categories = {'Season': {'c14': 1}, 'Episode': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1}, 'RSS': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1, 'c14': 1}} - def _doLogin(self): + def login(self): if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()): return True @@ -70,7 +69,7 @@ class TorrentDayProvider(generic.TorrentProvider): 'submit.y': 0 } - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -93,12 +92,12 @@ class TorrentDayProvider(generic.TorrentProvider): logger.log(u"Unable to obtain cookie", logger.WARNING) return False - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_params.keys(): @@ -116,7 +115,7 @@ class TorrentDayProvider(generic.TorrentProvider): if self.freeleech: post_data.update({'free': 'on'}) - parsedJSON = self.getURL(self.urls['search'], post_data=post_data, json=True) + parsedJSON = self.get_url(self.urls['search'], post_data=post_data, json=True) if not parsedJSON: logger.log(u"No data returned from provider", logger.DEBUG) continue @@ -158,7 +157,7 @@ class TorrentDayProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -172,6 +171,6 @@ class TorrentDayCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = TorrentDayProvider() diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index 27f5b76e96a9dc4cf1b7eb66d6f3395430503c68..bf069c022d4e011d173371ccddc1db4f355e0581 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -22,16 +22,15 @@ import urllib from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser +from sickrage.providers.TorrentProvider import TorrentProvider -class TorrentLeechProvider(generic.TorrentProvider): +class TorrentLeechProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "TorrentLeech") - + TorrentProvider.__init__(self, "TorrentLeech") self.username = None self.password = None @@ -54,14 +53,14 @@ class TorrentLeechProvider(generic.TorrentProvider): self.cache = TorrentLeechCache(self) - def _doLogin(self): + def login(self): login_params = {'username': self.username, 'password': self.password, 'remember_me': 'on', 'login': 'submit'} - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -72,12 +71,12 @@ class TorrentLeechProvider(generic.TorrentProvider): return True - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_params.keys(): @@ -90,13 +89,13 @@ class TorrentLeechProvider(generic.TorrentProvider): searchURL = self.urls['search'] % (urllib.quote_plus(search_string.encode('utf-8')), self.categories) logger.log(u"Search string: %s " % search_string, logger.DEBUG) - data = self.getURL(searchURL) + data = self.get_url(searchURL) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) if not data: continue try: - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', attrs={'id': 'torrenttable'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] @@ -144,7 +143,7 @@ class TorrentLeechProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -158,7 +157,7 @@ class TorrentLeechCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = TorrentLeechProvider() diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index c2d72efa32afac96963dfc13b8f18312fc521b70..36e70cf7af91ce6b11ff8574b5f6f1475e4f8907 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -20,14 +20,14 @@ from urllib import quote_plus from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.common import USER_AGENT from sickrage.helper.common import try_int +from sickrage.providers.TorrentProvider import TorrentProvider -class TORRENTPROJECTProvider(generic.TorrentProvider): +class TORRENTPROJECTProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "TorrentProject") + TorrentProvider.__init__(self, "TorrentProject") self.public = True self.ratio = 0 @@ -38,7 +38,7 @@ class TORRENTPROJECTProvider(generic.TorrentProvider): self.minleech = None self.cache = TORRENTPROJECTCache(self) - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} @@ -53,7 +53,7 @@ class TORRENTPROJECTProvider(generic.TorrentProvider): searchURL = self.urls['api'] + "?s=%s&out=json&filter=2101&num=150" % quote_plus(search_string.encode('utf-8')) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - torrents = self.getURL(searchURL, json=True) + torrents = self.get_url(searchURL, json=True) if not (torrents and "total_found" in torrents and int(torrents["total_found"]) > 0): logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) continue @@ -78,7 +78,7 @@ class TORRENTPROJECTProvider(generic.TorrentProvider): assert mode != 'RSS' logger.log(u"Torrent has less than 10 seeds getting dyn trackers: " + title, logger.DEBUG) trackerUrl = self.urls['api'] + "" + t_hash + "/trackers_json" - jdata = self.getURL(trackerUrl, json=True) + jdata = self.get_url(trackerUrl, json=True) assert jdata != "maintenance" download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title + "".join(["&tr=" + s for s in jdata]) except (Exception, AssertionError): @@ -101,7 +101,7 @@ class TORRENTPROJECTProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -115,6 +115,6 @@ class TORRENTPROJECTCache(tvcache.TVCache): def _getRSSData(self): search_params = {'RSS': ['0day']} - return {'entries': self.provider._doSearch(search_params)} + return {'entries': self.provider.search(search_params)} provider = TORRENTPROJECTProvider() diff --git a/sickbeard/providers/torrentz.py b/sickbeard/providers/torrentz.py index 6282d965c60ade8d7670ea2cfc92d000d436de80..38301b6306cfe188e1e0cf8c223549758a1f9da2 100644 --- a/sickbeard/providers/torrentz.py +++ b/sickbeard/providers/torrentz.py @@ -16,7 +16,6 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. - import re import time import traceback @@ -27,15 +26,15 @@ from xml.parsers.expat import ExpatError import sickbeard from sickbeard import logger from sickbeard import tvcache -from sickbeard.providers import generic from sickbeard.common import cpu_presets +from sickrage.providers.TorrentProvider import TorrentProvider -class TORRENTZProvider(generic.TorrentProvider): +class TORRENTZProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "Torrentz") + TorrentProvider.__init__(self, "Torrentz") self.public = True self.confirmed = True self.ratio = None @@ -47,7 +46,7 @@ class TORRENTZProvider(generic.TorrentProvider): 'base': 'https://torrentz.eu/'} self.url = self.urls['base'] - def seedRatio(self): + def seed_ratio(self): return self.ratio @staticmethod @@ -55,7 +54,7 @@ class TORRENTZProvider(generic.TorrentProvider): match = re.findall(r'[0-9]+', description) return (int(match[0]) * 1024**2, int(match[1]), int(match[2])) - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} @@ -66,7 +65,7 @@ class TORRENTZProvider(generic.TorrentProvider): search_url += '?q=' + urllib.parse.quote_plus(search_string) logger.log(search_url) - data = self.getURL(search_url) + data = self.get_url(search_url) if not data: logger.log(u'Seems to be down right now!') continue @@ -123,6 +122,7 @@ class TORRENTZProvider(generic.TorrentProvider): return results + class TORRENTZCache(tvcache.TVCache): def __init__(self, provider_obj): @@ -133,6 +133,6 @@ class TORRENTZCache(tvcache.TVCache): self.minTime = 15 def _getRSSData(self): - return {'entries': self.provider._doSearch({'RSS': ['']})} + return {'entries': self.provider.search({'RSS': ['']})} provider = TORRENTZProvider() diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py index 7b329ccf86c7b241e650312b242e20efde94f31c..18b3e64f40af9bb6b6060e2ad31f6a0ca4eb4146 100644 --- a/sickbeard/providers/transmitthenet.py +++ b/sickbeard/providers/transmitthenet.py @@ -20,15 +20,15 @@ from urllib import urlencode from sickbeard import logger from sickbeard import tvcache from sickbeard.bs4_parser import BS4Parser -from sickbeard.providers import generic from sickrage.helper.exceptions import AuthException from sickrage.helper.common import try_int +from sickrage.providers.TorrentProvider import TorrentProvider -class TransmitTheNetProvider(generic.TorrentProvider): +class TransmitTheNetProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "TransmitTheNet") + TorrentProvider.__init__(self, "TransmitTheNet") self.urls = { 'base_url': 'https://transmithe.net/', @@ -47,14 +47,14 @@ class TransmitTheNetProvider(generic.TorrentProvider): self.cache = TransmitTheNetCache(self) - def _checkAuth(self): + def _check_auth(self): if not self.username or not self.password: raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") return True - def _doLogin(self): + def login(self): login_params = { 'username': self.username, @@ -63,7 +63,7 @@ class TransmitTheNetProvider(generic.TorrentProvider): 'login': 'Login' } - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -74,12 +74,12 @@ class TransmitTheNetProvider(generic.TorrentProvider): return True - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_strings.keys(): @@ -101,13 +101,13 @@ class TransmitTheNetProvider(generic.TorrentProvider): search_url = self.urls['search'] + "?" + urlencode(search_params) logger.log(u"Search URL: %s" % search_url, logger.DEBUG) - data = self.getURL(self.urls['search'], params=search_params) + data = self.get_url(self.urls['search'], params=search_params) if not data: logger.log(u"No data returned from provider", logger.DEBUG) continue try: - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', {'id': 'torrent_table'}) if not torrent_table: logger.log(u"Data returned from %s does not contain any torrents" % self.name, logger.DEBUG) @@ -132,6 +132,9 @@ class TransmitTheNetProvider(generic.TorrentProvider): temp_anchor = torrent_row.find('a', {"data-src": True}) title = temp_anchor['data-src'].rsplit('.', 1)[0] + if not title: + title = torrent_row.find('a', onmouseout='return nd();').string + title = title.replace("[", "").replace("]", "").replace("/ ", "") size = try_int(temp_anchor['data-filesize']) temp_anchor = torrent_row.find('span', class_='time').parent.find_next_sibling() @@ -163,7 +166,7 @@ class TransmitTheNetProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -176,7 +179,7 @@ class TransmitTheNetCache(tvcache.TVCache): def _getRSSData(self): search_strings = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_strings)} + return {'entries': self.provider.search(search_strings)} provider = TransmitTheNetProvider() diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index fac4c38c23a3f2d83ac43cd7a35a055896afb8ba..7542bb2c611f02b0fb53216185e435a3e46fee7b 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -21,14 +21,14 @@ from sickbeard import logger from sickbeard import tvcache from sickbeard import show_name_helpers from sickbeard.helpers import sanitizeSceneName -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser from sickrage.helper.exceptions import AuthException +from sickrage.providers.TorrentProvider import TorrentProvider -class TVChaosUKProvider(generic.TorrentProvider): +class TVChaosUKProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, 'TvChaosUK') + TorrentProvider.__init__(self, 'TvChaosUK') self.urls = {'base_url': 'https://tvchaosuk.com/', 'login': 'https://tvchaosuk.com/takelogin.php', @@ -37,12 +37,12 @@ class TVChaosUKProvider(generic.TorrentProvider): self.url = self.urls['base_url'] - self.username = None self.password = None self.ratio = None self.minseed = None self.minleech = None + self.freeleech = None self.cache = TVChaosUKCache(self) @@ -54,7 +54,7 @@ class TVChaosUKProvider(generic.TorrentProvider): 'include_dead_torrents': 'no', } - def _checkAuth(self): + def _check_auth(self): if self.username and self.password: return True @@ -104,10 +104,10 @@ class TVChaosUKProvider(generic.TorrentProvider): return [search_string] - def _doLogin(self): + def login(self): login_params = {'username': self.username, 'password': self.password} - response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -118,12 +118,12 @@ class TVChaosUKProvider(generic.TorrentProvider): return True - def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_strings, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - if not self._doLogin(): + if not self.login(): return results for mode in search_strings.keys(): @@ -134,7 +134,7 @@ class TVChaosUKProvider(generic.TorrentProvider): logger.log(u"Search string: %s " % search_string, logger.DEBUG) self.search_params['keywords'] = search_string.strip() - data = self.getURL(self.urls['search'], params=self.search_params) + data = self.get_url(self.urls['search'], params=self.search_params) # url_searched = self.urls['search'] + '?' + urlencode(self.search_params) if not data: @@ -142,10 +142,19 @@ class TVChaosUKProvider(generic.TorrentProvider): continue with BS4Parser(data) as html: - torrent_table = html.find(id='listtorrents').find_all('tr') - for torrent in torrent_table: + torrent_table = html.find(id='listtorrents') + if not torrent_table: + logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + continue + + torrent_rows = torrent_table.find_all('tr') + + for torrent in torrent_rows: try: - title = torrent.find(attrs={'class':'tooltip-content'}).text.strip() + freeleech = torrent.find('img', alt=re.compile('Free Torrent')) + if self.freeleech and not freeleech: + continue + title = torrent.find(attrs={'class':'tooltip-target'}).text.strip() download_url = torrent.find(title="Click to Download this Torrent!").parent['href'].strip() seeders = int(torrent.find(title='Seeders').text.strip()) leechers = int(torrent.find(title='Leechers').text.strip()) @@ -190,7 +199,7 @@ class TVChaosUKProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio @@ -204,7 +213,7 @@ class TVChaosUKCache(tvcache.TVCache): def _getRSSData(self): search_strings = {'RSS': ['']} - return {'entries': self.provider._doSearch(search_strings)} + return {'entries': self.provider.search(search_strings)} provider = TVChaosUKProvider() diff --git a/sickbeard/providers/womble.py b/sickbeard/providers/womble.py index 7a3d1ef902359dd7df7341d1505b617f05efc8f0..3f9db0f4018d335aa80745cf6bfd00aba3bb306c 100644 --- a/sickbeard/providers/womble.py +++ b/sickbeard/providers/womble.py @@ -16,20 +16,20 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. -from sickbeard.providers import generic - from sickbeard import logger from sickbeard import tvcache +from sickrage.providers.NZBProvider import NZBProvider -class WombleProvider(generic.NZBProvider): +class WombleProvider(NZBProvider): def __init__(self): - generic.NZBProvider.__init__(self, "Womble's Index") + NZBProvider.__init__(self, "Womble's Index") self.public = True self.cache = WombleCache(self) self.urls = {'base_url': 'http://newshost.co.za/'} self.url = self.urls['base_url'] - self.supportsBacklog = False + self.supports_backlog = False + class WombleCache(tvcache.TVCache): def __init__(self, provider_obj): diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py index 1206128a21a362d5209f51cae57830a74f01cf87..b026c80b8ec6e3300369f1c2831cb5314d0866e2 100644 --- a/sickbeard/providers/xthor.py +++ b/sickbeard/providers/xthor.py @@ -23,16 +23,15 @@ import urllib import requests from sickbeard import logger -from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser +from sickrage.providers.TorrentProvider import TorrentProvider -class XthorProvider(generic.TorrentProvider): +class XthorProvider(TorrentProvider): def __init__(self): - generic.TorrentProvider.__init__(self, "Xthor") - + TorrentProvider.__init__(self, "Xthor") self.cj = cookielib.CookieJar() @@ -44,7 +43,7 @@ class XthorProvider(generic.TorrentProvider): self.password = None self.ratio = None - def _doLogin(self): + def login(self): if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()): return True @@ -53,7 +52,7 @@ class XthorProvider(generic.TorrentProvider): 'password': self.password, 'submitme': 'X'} - response = self.getURL(self.url + '/takelogin.php', post_data=login_params, timeout=30) + response = self.get_url(self.url + '/takelogin.php', post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False @@ -66,13 +65,13 @@ class XthorProvider(generic.TorrentProvider): return True - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): + def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} # check for auth - if not self._doLogin(): + if not self.login(): return results for mode in search_params.keys(): @@ -84,11 +83,12 @@ class XthorProvider(generic.TorrentProvider): searchURL = self.urlsearch % (urllib.quote(search_string), self.categories) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.getURL(searchURL) + data = self.get_url(searchURL) + if not data: continue - with BS4Parser(data, features=["html5lib", "permissive"]) as html: + with BS4Parser(data, 'html5lib') as html: resultsTable = html.find("table", {"class" : "table2 table-bordered2"}) if not resultsTable: continue @@ -133,7 +133,7 @@ class XthorProvider(generic.TorrentProvider): return results - def seedRatio(self): + def seed_ratio(self): return self.ratio provider = XthorProvider() diff --git a/sickbeard/sab.py b/sickbeard/sab.py index 4c1cc0e7d8952ff68d86f03c4fc8b99c6191ce9a..de8687383d4299d46ef41c3e1e8c213dfb05c43f 100644 --- a/sickbeard/sab.py +++ b/sickbeard/sab.py @@ -74,7 +74,7 @@ def sendNZB(nzb): # if it's a normal result we just pass SAB the URL if nzb.resultType == "nzb": # for newzbin results send the ID to sab specifically - if nzb.provider.getID() == 'newzbin': + if nzb.provider.get_id() == 'newzbin': id = nzb.provider.getIDFromURL(nzb.url) if not id: logger.log(u"Unable to send NZB to sab, can't find ID in URL " + str(nzb.url), logger.ERROR) diff --git a/sickbeard/search.py b/sickbeard/search.py index 6fa7e2bec933e2a145e6aa4e2eebe6b63c580c01..f4b0deaf7e36b4dbbe264b92b70b625059d33896 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -17,7 +17,6 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. - import os import re import threading @@ -37,10 +36,10 @@ from sickbeard import notifiers from sickbeard import nzbSplitter from sickbeard import ui from sickbeard import failed_history -from sickbeard.providers.generic import GenericProvider from sickbeard import common from sickrage.helper.encoding import ek from sickrage.helper.exceptions import AuthException, ex +from sickrage.providers.GenericProvider import GenericProvider def _downloadResult(result): @@ -58,7 +57,7 @@ def _downloadResult(result): # nzbs with an URL can just be downloaded from the provider if result.resultType == "nzb": - newResult = resProvider.downloadResult(result) + newResult = resProvider.download_result(result) # if it's an nzb data result elif result.resultType == "nzbdata": @@ -80,13 +79,14 @@ def _downloadResult(result): logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR) newResult = False elif result.resultType == "torrent": - newResult = resProvider.downloadResult(result) + newResult = resProvider.download_result(result) else: logger.log(u"Invalid provider type - this is a coding error, report it please", logger.ERROR) newResult = False return newResult + def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that @@ -132,7 +132,7 @@ def snatchEpisode(result, endStatus=SNATCHED): dlResult = _downloadResult(result) else: if not result.content and not result.url.startswith('magnet'): - result.content = result.provider.getURL(result.url, needBytes=True) + result.content = result.provider.get_url(result.url, need_bytes=True) if result.content or result.url.startswith('magnet'): client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() @@ -377,7 +377,7 @@ def searchForNeededEpisodes(): if not curShow.paused: episodes.extend(wantedEpisodes(curShow, fromDate)) - providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.isActive() and x.enable_daily] + providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.is_active() and x.enable_daily] for curProvider in providers: threads += [threading.Thread(target=curProvider.cache.updateCache, name=origThreadName + " :: [" + curProvider.name + "]")] @@ -393,7 +393,7 @@ def searchForNeededEpisodes(): threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]" curFoundResults = {} try: - curFoundResults = curProvider.searchRSS(episodes) + curFoundResults = curProvider.search_rss(episodes) except AuthException, e: logger.log(u"Authentication error: " + ex(e), logger.ERROR) continue @@ -454,7 +454,7 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False): origThreadName = threading.currentThread().name - providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.isActive() and x.enable_backlog] + providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.is_active() and x.enable_backlog] for curProvider in providers: threads += [threading.Thread(target=curProvider.cache.updateCache, name=origThreadName + " :: [" + curProvider.name + "]")] @@ -492,7 +492,7 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False): logger.log(u"Performing season pack search for " + show.name) try: - searchResults = curProvider.findSearchResults(show, episodes, search_mode, manualSearch, downCurQuality) + searchResults = curProvider.find_search_results(show, episodes, search_mode, manualSearch, downCurQuality) except AuthException, e: logger.log(u"Authentication error: " + ex(e), logger.ERROR) break @@ -546,7 +546,7 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False): # get the quality of the season nzb seasonQual = bestSeasonResult.quality logger.log( - u"The quality of the season " + bestSeasonResult.provider.providerType + " is " + Quality.qualityStrings[ + u"The quality of the season " + bestSeasonResult.provider.provider_type + " is " + Quality.qualityStrings[ seasonQual], logger.DEBUG) myDB = db.DBConnection() @@ -569,7 +569,7 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False): # if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred) if allWanted and bestSeasonResult.quality == highest_quality_overall: logger.log( - u"Every ep in this season is needed, downloading the whole " + bestSeasonResult.provider.providerType + " " + bestSeasonResult.name) + u"Every ep in this season is needed, downloading the whole " + bestSeasonResult.provider.provider_type + " " + bestSeasonResult.name) epObjs = [] for curEpNum in allEps: for season in set([x.season for x in episodes]): @@ -585,7 +585,7 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False): else: - if bestSeasonResult.provider.providerType == GenericProvider.NZB: + if bestSeasonResult.provider.provider_type == GenericProvider.NZB: logger.log(u"Breaking apart the NZB and adding the individual ones to our results", logger.DEBUG) # if not, break it apart and add them as the lowest priority results diff --git a/sickbeard/showUpdater.py b/sickbeard/showUpdater.py index d6befb61a793941af3876fe5be47bd63d9a08bd4..b1bac12f516d822cbfd1d4ee483adc8a3fbf1c9d 100644 --- a/sickbeard/showUpdater.py +++ b/sickbeard/showUpdater.py @@ -16,6 +16,9 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. +import xml.etree.ElementTree as ET +import requests +import time import datetime import threading import sickbeard @@ -25,21 +28,36 @@ from sickbeard import ui from sickbeard import db from sickbeard import network_timezones from sickbeard import failed_history +from sickbeard import helpers from sickrage.helper.exceptions import CantRefreshShowException, CantUpdateShowException, ex - +from sickbeard.indexers.indexer_config import INDEXER_TVRAGE +from sickbeard.indexers.indexer_config import INDEXER_TVDB class ShowUpdater: def __init__(self): self.lock = threading.Lock() self.amActive = False - def run(self, force=False): + self.session = requests.Session() + + def run(self, force=False): # pylint: disable=unused-parameter self.amActive = True + bad_indexer = [INDEXER_TVRAGE] update_datetime = datetime.datetime.now() update_date = update_datetime.date() + # update_timestamp = calendar.timegm(update_datetime.timetuple()) + update_timestamp = time.mktime(update_datetime.timetuple()) + my_db = db.DBConnection('cache.db') + result = my_db.select("SELECT `time` FROM lastUpdate WHERE provider = 'theTVDB'") + if result: + last_update = int(result[0]['time']) + else: + last_update = update_timestamp - 86400 + my_db.action("INSERT INTO lastUpdate (provider,`time`) VALUES (?, ?)", ['theTVDB', last_update]) + # refresh network timezones network_timezones.update_network_dict() @@ -47,45 +65,63 @@ class ShowUpdater: if sickbeard.USE_FAILED_DOWNLOADS: failed_history.trimHistory() - logger.log(u"Doing full update on all shows") + update_delta = update_timestamp - last_update + + if update_delta >= 691200: # 8 days ( 7 days + 1 day of buffer time) + update_file = 'updates_month.xml' + elif update_delta >= 90000: # 25 hours ( 1 day + 1 hour of buffer time) + update_file = 'updates_week.xml' + else: + update_file = 'updates_day.xml' - # select 10 'Ended' tv_shows updated more than 90 days ago to include in this update - stale_should_update = [] - stale_update_date = (update_date - datetime.timedelta(days=90)).toordinal() + # url = 'http://thetvdb.com/api/Updates.php?type=series&time=%s' % last_update + url = 'http://thetvdb.com/api/%s/updates/%s' % (sickbeard.indexerApi(INDEXER_TVDB).api_params['apikey'], update_file) + data = helpers.getURL(url, session=self.session) - # last_update_date <= 90 days, sorted ASC because dates are ordinal - myDB = db.DBConnection() - sql_result = myDB.select( - "SELECT indexer_id FROM tv_shows WHERE status = 'Ended' AND last_update_indexer <= ? ORDER BY last_update_indexer ASC LIMIT 10;", - [stale_update_date]) + updated_shows = [] + try: + tree = ET.fromstring(data) + for show in tree.findall("Series"): + updated_shows.append(int(show.find('id').text)) - for cur_result in sql_result: - stale_should_update.append(int(cur_result['indexer_id'])) + except SyntaxError: + pass - # start update process - piList = [] - for curShow in sickbeard.showList: + logger.log(u"Doing full update on all shows") + + pi_list = [] + for cur_show in sickbeard.showList: + + if cur_show.indexer in bad_indexer: + logger.log(u"Indexer is no longer available for show [ %s ] " % cur_show.name, logger.WARNING) + else: + indexer_name = sickbeard.indexerApi(cur_show.indexer).name try: - # get next episode airdate - curShow.nextEpisode() - - # if should_update returns True (not 'Ended') or show is selected stale 'Ended' then update, otherwise just refresh - if curShow.should_update(update_date=update_date) or curShow.indexerid in stale_should_update: - try: - piList.append(sickbeard.showQueueScheduler.action.updateShow(curShow, True)) # @UndefinedVariable - except CantUpdateShowException as e: - logger.log(u"Unable to update show: {0}".format(str(e)),logger.DEBUG) + if indexer_name == 'theTVDB': + if cur_show.indexerid in updated_shows: + pi_list.append(sickbeard.showQueueScheduler.action.updateShow(cur_show, True)) + # else: + # pi_list.append(sickbeard.showQueueScheduler.action.refreshShow(cur_show, True)) else: - logger.log( - u"Not updating episodes for show " + curShow.name + " because it's marked as ended and last/next episode is not within the grace period.", - logger.DEBUG) - piList.append(sickbeard.showQueueScheduler.action.refreshShow(curShow, True)) # @UndefinedVariable - + cur_show.nextEpisode() + + if cur_show.should_update(update_date=update_date): + try: + pi_list.append(sickbeard.showQueueScheduler.action.updateShow(cur_show, True)) + except CantUpdateShowException as e: + logger.log(u"Unable to update show: {0}".format(str(e)), logger.DEBUG) + else: + logger.log( + u"Not updating episodes for show " + cur_show.name + " because it's last/next episode is not within the grace period.", + logger.DEBUG) + # pi_list.append(sickbeard.showQueueScheduler.action.refreshShow(cur_show, True)) except (CantUpdateShowException, CantRefreshShowException), e: logger.log(u"Automatic update failed: " + ex(e), logger.ERROR) - ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator("Daily Update", piList)) + ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator("Daily Update", pi_list)) + + my_db.action("UPDATE lastUpdate SET `time` = ? WHERE provider=?", [update_timestamp, 'theTVDB']) logger.log(u"Completed full update on all shows") diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 6ace5591a924705406e53fc3e043c9489fef182b..f01d187a39cff0de59d4e894c439cf2e394c48e0 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -75,7 +75,7 @@ class CacheDBConnection(db.DBConnection): class TVCache(object): def __init__(self, provider): self.provider = provider - self.providerID = self.provider.getID() + self.providerID = self.provider.get_id() self.providerDB = None self.minTime = 10 @@ -370,7 +370,7 @@ class TVCache(object): logger.log(u"Found result " + title + " at " + url) - result = self.provider.getResult([epObj]) + result = self.provider.get_result([epObj]) result.show = showObj result.url = url result.name = title diff --git a/sickbeard/versionChecker.py b/sickbeard/versionChecker.py index 3679a7f91115eb6e36360e8908df5460d7c7710a..414fb08911a229a412aa716588301d882f67014a 100644 --- a/sickbeard/versionChecker.py +++ b/sickbeard/versionChecker.py @@ -642,8 +642,10 @@ class GitUpdateManager(UpdateManager): # Notify update successful if sickbeard.NOTIFY_ON_UPDATE: - notifiers.notify_git_update(sickbeard.CUR_COMMIT_HASH if sickbeard.CUR_COMMIT_HASH else "") - + try: + notifiers.notify_git_update(sickbeard.CUR_COMMIT_HASH if sickbeard.CUR_COMMIT_HASH else "") + except Exception: + logger.log(u"Unable to send update notification. Continuing the update process", logger.DEBUG) return True else: @@ -885,8 +887,10 @@ class SourceUpdateManager(UpdateManager): return False # Notify update successful - notifiers.notify_git_update(sickbeard.NEWEST_VERSION_STRING) - + try: + notifiers.notify_git_update(sickbeard.NEWEST_VERSION_STRING) + except Exception: + logger.log(u"Unable to send update notification. Continuing the update process", logger.DEBUG) return True @staticmethod diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 6f1b4de05360f1d05bf07dd1ce7fb1eb70def17a..69e9ed15461e1b8dc2ded14b8d92ebb886e97db9 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -62,6 +62,7 @@ from sickrage.media.ShowBanner import ShowBanner from sickrage.media.ShowFanArt import ShowFanArt from sickrage.media.ShowNetworkLogo import ShowNetworkLogo from sickrage.media.ShowPoster import ShowPoster +from sickrage.providers.GenericProvider import GenericProvider from sickrage.show.ComingEpisodes import ComingEpisodes from sickrage.show.History import History as HistoryTool from sickrage.show.Show import Show @@ -297,6 +298,9 @@ class LoginHandler(BaseHandler): and (self.get_argument('password') == password or not password): api_key = sickbeard.API_KEY + if sickbeard.NOTIFY_ON_LOGIN and not helpers.is_ip_private(self.request.remote_ip): + notifiers.notify_login(self.request.remote_ip) + if api_key: remember_me = int(self.get_argument('remember_me', default=0) or 0) self.set_secure_cookie('sickrage_user', api_key, expires_days=30 if remember_me > 0 else None) @@ -1848,24 +1852,18 @@ class Home(WebRoot): ep_obj_rename_list = [] ep_obj_list = showObj.getAllEpisodes(has_location=True) - - for cur_ep_obj in ep_obj_list: - # Only want to rename if we have a location - if cur_ep_obj.location: - if cur_ep_obj.relatedEps: - # do we have one of multi-episodes in the rename list already - have_already = False - for cur_related_ep in cur_ep_obj.relatedEps + [cur_ep_obj]: - if cur_related_ep in ep_obj_rename_list: - have_already = True - break - if not have_already: - ep_obj_rename_list.append(cur_ep_obj) - else: - ep_obj_rename_list.append(cur_ep_obj) + ep_obj_list = [x for x in ep_obj_list if x.location] + ep_obj_rename_list = [] + for ep_obj in ep_obj_list: + has_already = False + for check in ep_obj.relatedEps + [ep_obj]: + if check in ep_obj_rename_list: + has_already = True + break + if not has_already: + ep_obj_rename_list.append(ep_obj) if ep_obj_rename_list: - # present season DESC episode DESC on screen ep_obj_rename_list.reverse() t = PageTemplate(rh=self, filename="testRename.mako") @@ -2317,12 +2315,8 @@ class HomeAddShows(Home): results.setdefault(indexer, []).extend(indexerResults) for i, shows in results.iteritems(): - final_results.extend([[sickbeard.indexerApi(i).name, i, sickbeard.indexerApi(i).config["show_url"], int(show['id']), - show['seriesname'], show['firstaired']] for show in shows]) - -# map(final_results.extend, -# ([[sickbeard.indexerApi(id).name, id, sickbeard.indexerApi(id).config["show_url"], int(show['id']), -# show['seriesname'], show['firstaired']] for show in shows] for id, shows in results.iteritems())) + final_results.extend({(sickbeard.indexerApi(i).name, i, sickbeard.indexerApi(i).config["show_url"], int(show['id']), + show['seriesname'], show['firstaired']) for show in shows}) lang_id = sickbeard.indexerApi().config['langabbv_to_id'][lang] return json.dumps({'results': final_results, 'langid': lang_id}) @@ -3755,7 +3749,7 @@ class ConfigGeneral(Config): sickbeard.save_config() - def saveGeneral(self, log_dir=None, log_nr=5, log_size=1048576, web_port=None, web_log=None, encryption_version=None, web_ipv6=None, + def saveGeneral(self, log_dir=None, log_nr=5, log_size=1048576, web_port=None, notify_on_login=None, web_log=None, encryption_version=None, web_ipv6=None, trash_remove_show=None, trash_rotate_logs=None, update_frequency=None, skip_removed_files=None, indexerDefaultLang='en', ep_default_deleted_status=None, launch_browser=None, showupdate_hour=3, web_username=None, api_key=None, indexer_default=None, timezone_display=None, cpu_preset='NORMAL', @@ -3808,7 +3802,7 @@ class ConfigGeneral(Config): # sickbeard.LOG_DIR is set in config.change_LOG_DIR() sickbeard.COMING_EPS_MISSED_RANGE = try_int(coming_eps_missed_range, 7) sickbeard.DISPLAY_ALL_SEASONS = config.checkbox_to_value(display_all_seasons) - + sickbeard.NOTIFY_ON_LOGIN = config.checkbox_to_value(notify_on_login) sickbeard.WEB_PORT = try_int(web_port) sickbeard.WEB_IPV6 = config.checkbox_to_value(web_ipv6) # sickbeard.WEB_LOG is set in config.change_LOG_DIR() @@ -4257,14 +4251,14 @@ class ConfigProviders(Config): if not name: return json.dumps({'error': 'No Provider Name specified'}) - providerDict = dict(zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList)) + providerDict = dict(zip([x.get_id() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList)) tempProvider = newznab.NewznabProvider(name, '') - if tempProvider.getID() in providerDict: - return json.dumps({'error': 'Provider Name already exists as ' + providerDict[tempProvider.getID()].name}) + if tempProvider.get_id() in providerDict: + return json.dumps({'error': 'Provider Name already exists as ' + providerDict[tempProvider.get_id()].name}) else: - return json.dumps({'success': tempProvider.getID()}) + return json.dumps({'success': tempProvider.get_id()}) @staticmethod def saveNewznabProvider(name, url, key=''): @@ -4286,12 +4280,12 @@ class ConfigProviders(Config): else: providerDict[name].needs_auth = True - return providerDict[name].getID() + '|' + providerDict[name].configStr() + return providerDict[name].get_id() + '|' + providerDict[name].configStr() else: newProvider = newznab.NewznabProvider(name, url, key=key) sickbeard.newznabProviderList.append(newProvider) - return newProvider.getID() + '|' + newProvider.configStr() + return newProvider.get_id() + '|' + newProvider.configStr() @staticmethod def getNewznabCategories(name, url, key): @@ -4314,7 +4308,7 @@ class ConfigProviders(Config): return json.dumps({'success': False, 'error': error}) # Get list with Newznabproviders - # providerDict = dict(zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList)) + # providerDict = dict(zip([x.get_id() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList)) # Get newznabprovider obj with provided name tempProvider = newznab.NewznabProvider(name, url, key) @@ -4326,7 +4320,7 @@ class ConfigProviders(Config): @staticmethod def deleteNewznabProvider(nnid): - providerDict = dict(zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList)) + providerDict = dict(zip([x.get_id() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList)) if nnid not in providerDict or providerDict[nnid].default: return '0' @@ -4346,16 +4340,16 @@ class ConfigProviders(Config): return json.dumps({'error': 'Invalid name specified'}) providerDict = dict( - zip([x.getID() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) + zip([x.get_id() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) tempProvider = rsstorrent.TorrentRssProvider(name, url, cookies, titleTAG) - if tempProvider.getID() in providerDict: - return json.dumps({'error': 'Exists as ' + providerDict[tempProvider.getID()].name}) + if tempProvider.get_id() in providerDict: + return json.dumps({'error': 'Exists as ' + providerDict[tempProvider.get_id()].name}) else: (succ, errMsg) = tempProvider.validateRSS() if succ: - return json.dumps({'success': tempProvider.getID()}) + return json.dumps({'success': tempProvider.get_id()}) else: return json.dumps({'error': errMsg}) @@ -4373,18 +4367,18 @@ class ConfigProviders(Config): providerDict[name].cookies = cookies providerDict[name].titleTAG = titleTAG - return providerDict[name].getID() + '|' + providerDict[name].configStr() + return providerDict[name].get_id() + '|' + providerDict[name].configStr() else: newProvider = rsstorrent.TorrentRssProvider(name, url, cookies, titleTAG) sickbeard.torrentRssProviderList.append(newProvider) - return newProvider.getID() + '|' + newProvider.configStr() + return newProvider.get_id() + '|' + newProvider.configStr() @staticmethod def deleteTorrentRssProvider(id): providerDict = dict( - zip([x.getID() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) + zip([x.get_id() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) if id not in providerDict: return '0' @@ -4404,7 +4398,7 @@ class ConfigProviders(Config): provider_list = [] newznabProviderDict = dict( - zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList)) + zip([x.get_id() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList)) finishedNames = [] @@ -4420,7 +4414,7 @@ class ConfigProviders(Config): newProvider = newznab.NewznabProvider(cur_name, cur_url, key=cur_key) - cur_id = newProvider.getID() + cur_id = newProvider.get_id() # if it already exists then update it if cur_id in newznabProviderDict: @@ -4463,11 +4457,11 @@ class ConfigProviders(Config): # delete anything that is missing for curProvider in sickbeard.newznabProviderList: - if curProvider.getID() not in finishedNames: + if curProvider.get_id() not in finishedNames: sickbeard.newznabProviderList.remove(curProvider) torrentRssProviderDict = dict( - zip([x.getID() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) + zip([x.get_id() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) finishedNames = [] if torrentrss_string: @@ -4481,7 +4475,7 @@ class ConfigProviders(Config): newProvider = rsstorrent.TorrentRssProvider(curName, curURL, curCookies, curTitleTAG) - curID = newProvider.getID() + curID = newProvider.get_id() # if it already exists then update it if curID in torrentRssProviderDict: @@ -4496,7 +4490,7 @@ class ConfigProviders(Config): # delete anything that is missing for curProvider in sickbeard.torrentRssProviderList: - if curProvider.getID() not in finishedNames: + if curProvider.get_id() not in finishedNames: sickbeard.torrentRssProviderList.remove(curProvider) disabled_list = [] @@ -4506,7 +4500,7 @@ class ConfigProviders(Config): curEnabled = try_int(curEnabled) curProvObj = [x for x in sickbeard.providers.sortedProviderList() if - x.getID() == curProvider and hasattr(x, 'enabled')] + x.get_id() == curProvider and hasattr(x, 'enabled')] if curProvObj: curProvObj[0].enabled = bool(curEnabled) @@ -4524,194 +4518,195 @@ class ConfigProviders(Config): # dynamically load provider settings for curTorrentProvider in [curProvider for curProvider in sickbeard.providers.sortedProviderList() if - curProvider.providerType == sickbeard.GenericProvider.TORRENT]: + curProvider.provider_type == GenericProvider.TORRENT]: if hasattr(curTorrentProvider, 'custom_url'): try: - curTorrentProvider.custom_url = str(kwargs[curTorrentProvider.getID() + '_custom_url']).strip() + curTorrentProvider.custom_url = str(kwargs[curTorrentProvider.get_id() + '_custom_url']).strip() except Exception: curTorrentProvider.custom_url = None if hasattr(curTorrentProvider, 'minseed'): try: - curTorrentProvider.minseed = int(str(kwargs[curTorrentProvider.getID() + '_minseed']).strip()) + curTorrentProvider.minseed = int(str(kwargs[curTorrentProvider.get_id() + '_minseed']).strip()) except Exception: curTorrentProvider.minseed = 0 if hasattr(curTorrentProvider, 'minleech'): try: - curTorrentProvider.minleech = int(str(kwargs[curTorrentProvider.getID() + '_minleech']).strip()) + curTorrentProvider.minleech = int(str(kwargs[curTorrentProvider.get_id() + '_minleech']).strip()) except Exception: curTorrentProvider.minleech = 0 if hasattr(curTorrentProvider, 'ratio'): try: - curTorrentProvider.ratio = str(kwargs[curTorrentProvider.getID() + '_ratio']).strip() + ratio = float(str(kwargs[curTorrentProvider.get_id() + '_ratio']).strip()) + curTorrentProvider.ratio = (ratio, -1)[ratio < 0] except Exception: curTorrentProvider.ratio = None if hasattr(curTorrentProvider, 'digest'): try: - curTorrentProvider.digest = str(kwargs[curTorrentProvider.getID() + '_digest']).strip() + curTorrentProvider.digest = str(kwargs[curTorrentProvider.get_id() + '_digest']).strip() except Exception: curTorrentProvider.digest = None if hasattr(curTorrentProvider, 'hash'): try: - curTorrentProvider.hash = str(kwargs[curTorrentProvider.getID() + '_hash']).strip() + curTorrentProvider.hash = str(kwargs[curTorrentProvider.get_id() + '_hash']).strip() except Exception: curTorrentProvider.hash = None if hasattr(curTorrentProvider, 'api_key'): try: - curTorrentProvider.api_key = str(kwargs[curTorrentProvider.getID() + '_api_key']).strip() + curTorrentProvider.api_key = str(kwargs[curTorrentProvider.get_id() + '_api_key']).strip() except Exception: curTorrentProvider.api_key = None if hasattr(curTorrentProvider, 'username'): try: - curTorrentProvider.username = str(kwargs[curTorrentProvider.getID() + '_username']).strip() + curTorrentProvider.username = str(kwargs[curTorrentProvider.get_id() + '_username']).strip() except Exception: curTorrentProvider.username = None if hasattr(curTorrentProvider, 'password'): try: - curTorrentProvider.password = str(kwargs[curTorrentProvider.getID() + '_password']).strip() + curTorrentProvider.password = str(kwargs[curTorrentProvider.get_id() + '_password']).strip() except Exception: curTorrentProvider.password = None if hasattr(curTorrentProvider, 'passkey'): try: - curTorrentProvider.passkey = str(kwargs[curTorrentProvider.getID() + '_passkey']).strip() + curTorrentProvider.passkey = str(kwargs[curTorrentProvider.get_id() + '_passkey']).strip() except Exception: curTorrentProvider.passkey = None if hasattr(curTorrentProvider, 'pin'): try: - curTorrentProvider.pin = str(kwargs[curTorrentProvider.getID() + '_pin']).strip() + curTorrentProvider.pin = str(kwargs[curTorrentProvider.get_id() + '_pin']).strip() except Exception: curTorrentProvider.pin = None if hasattr(curTorrentProvider, 'confirmed'): try: curTorrentProvider.confirmed = config.checkbox_to_value( - kwargs[curTorrentProvider.getID() + '_confirmed']) + kwargs[curTorrentProvider.get_id() + '_confirmed']) except Exception: curTorrentProvider.confirmed = 0 if hasattr(curTorrentProvider, 'ranked'): try: curTorrentProvider.ranked = config.checkbox_to_value( - kwargs[curTorrentProvider.getID() + '_ranked']) + kwargs[curTorrentProvider.get_id() + '_ranked']) except Exception: curTorrentProvider.ranked = 0 if hasattr(curTorrentProvider, 'engrelease'): try: curTorrentProvider.engrelease = config.checkbox_to_value( - kwargs[curTorrentProvider.getID() + '_engrelease']) + kwargs[curTorrentProvider.get_id() + '_engrelease']) except Exception: curTorrentProvider.engrelease = 0 if hasattr(curTorrentProvider, 'onlyspasearch'): try: curTorrentProvider.onlyspasearch = config.checkbox_to_value( - kwargs[curTorrentProvider.getID() + '_onlyspasearch']) + kwargs[curTorrentProvider.get_id() + '_onlyspasearch']) except Exception: curTorrentProvider.onlyspasearch = 0 if hasattr(curTorrentProvider, 'sorting'): try: - curTorrentProvider.sorting = str(kwargs[curTorrentProvider.getID() + '_sorting']).strip() + curTorrentProvider.sorting = str(kwargs[curTorrentProvider.get_id() + '_sorting']).strip() except Exception: curTorrentProvider.sorting = 'seeders' if hasattr(curTorrentProvider, 'freeleech'): try: curTorrentProvider.freeleech = config.checkbox_to_value( - kwargs[curTorrentProvider.getID() + '_freeleech']) + kwargs[curTorrentProvider.get_id() + '_freeleech']) except Exception: curTorrentProvider.freeleech = 0 if hasattr(curTorrentProvider, 'search_mode'): try: - curTorrentProvider.search_mode = str(kwargs[curTorrentProvider.getID() + '_search_mode']).strip() + curTorrentProvider.search_mode = str(kwargs[curTorrentProvider.get_id() + '_search_mode']).strip() except Exception: curTorrentProvider.search_mode = 'eponly' if hasattr(curTorrentProvider, 'search_fallback'): try: curTorrentProvider.search_fallback = config.checkbox_to_value( - kwargs[curTorrentProvider.getID() + '_search_fallback']) + kwargs[curTorrentProvider.get_id() + '_search_fallback']) except Exception: curTorrentProvider.search_fallback = 0 # these exceptions are catching unselected checkboxes if hasattr(curTorrentProvider, 'enable_daily'): try: curTorrentProvider.enable_daily = config.checkbox_to_value( - kwargs[curTorrentProvider.getID() + '_enable_daily']) + kwargs[curTorrentProvider.get_id() + '_enable_daily']) except Exception: curTorrentProvider.enable_daily = 0 # these exceptions are actually catching unselected checkboxes if hasattr(curTorrentProvider, 'enable_backlog'): try: curTorrentProvider.enable_backlog = config.checkbox_to_value( - kwargs[curTorrentProvider.getID() + '_enable_backlog']) + kwargs[curTorrentProvider.get_id() + '_enable_backlog']) except Exception: curTorrentProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes if hasattr(curTorrentProvider, 'cat'): try: - curTorrentProvider.cat = int(str(kwargs[curTorrentProvider.getID() + '_cat']).strip()) + curTorrentProvider.cat = int(str(kwargs[curTorrentProvider.get_id() + '_cat']).strip()) except Exception: curTorrentProvider.cat = 0 if hasattr(curTorrentProvider, 'subtitle'): try: curTorrentProvider.subtitle = config.checkbox_to_value( - kwargs[curTorrentProvider.getID() + '_subtitle']) + kwargs[curTorrentProvider.get_id() + '_subtitle']) except Exception: curTorrentProvider.subtitle = 0 for curNzbProvider in [curProvider for curProvider in sickbeard.providers.sortedProviderList() if - curProvider.providerType == sickbeard.GenericProvider.NZB]: + curProvider.provider_type == GenericProvider.NZB]: if hasattr(curNzbProvider, 'api_key'): try: - curNzbProvider.api_key = str(kwargs[curNzbProvider.getID() + '_api_key']).strip() + curNzbProvider.api_key = str(kwargs[curNzbProvider.get_id() + '_api_key']).strip() except Exception: curNzbProvider.api_key = None if hasattr(curNzbProvider, 'username'): try: - curNzbProvider.username = str(kwargs[curNzbProvider.getID() + '_username']).strip() + curNzbProvider.username = str(kwargs[curNzbProvider.get_id() + '_username']).strip() except Exception: curNzbProvider.username = None if hasattr(curNzbProvider, 'search_mode'): try: - curNzbProvider.search_mode = str(kwargs[curNzbProvider.getID() + '_search_mode']).strip() + curNzbProvider.search_mode = str(kwargs[curNzbProvider.get_id() + '_search_mode']).strip() except Exception: curNzbProvider.search_mode = 'eponly' if hasattr(curNzbProvider, 'search_fallback'): try: curNzbProvider.search_fallback = config.checkbox_to_value( - kwargs[curNzbProvider.getID() + '_search_fallback']) + kwargs[curNzbProvider.get_id() + '_search_fallback']) except Exception: curNzbProvider.search_fallback = 0 # these exceptions are actually catching unselected checkboxes if hasattr(curNzbProvider, 'enable_daily'): try: curNzbProvider.enable_daily = config.checkbox_to_value( - kwargs[curNzbProvider.getID() + '_enable_daily']) + kwargs[curNzbProvider.get_id() + '_enable_daily']) except Exception: curNzbProvider.enable_daily = 0 # these exceptions are actually catching unselected checkboxes if hasattr(curNzbProvider, 'enable_backlog'): try: curNzbProvider.enable_backlog = config.checkbox_to_value( - kwargs[curNzbProvider.getID() + '_enable_backlog']) + kwargs[curNzbProvider.get_id() + '_enable_backlog']) except Exception: curNzbProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py new file mode 100644 index 0000000000000000000000000000000000000000..1fed336152ef2c85a92f8f90582b2593ff92ac45 --- /dev/null +++ b/sickrage/providers/GenericProvider.py @@ -0,0 +1,504 @@ +# This file is part of SickRage. +# +# URL: https://sickrage.github.io +# Git: https://github.com/SickRage/SickRage.git +# +# SickRage is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickRage is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickRage. If not, see <http://www.gnu.org/licenses/>. + +import re +import sickbeard + +from base64 import b16encode, b32decode +from datetime import datetime +from itertools import chain +from os.path import join +from random import shuffle +from requests import Session +from sickbeard import logger +from sickbeard.classes import Proper, SearchResult +from sickbeard.common import MULTI_EP_RESULT, Quality, SEASON_RESULT, user_agents +from sickbeard.db import DBConnection +from sickbeard.helpers import download_file, getURL, remove_file_failed +from sickbeard.name_parser.parser import InvalidNameException, InvalidShowException, NameParser +from sickbeard.show_name_helpers import allPossibleShowNames +from sickbeard.tvcache import TVCache +from sickrage.helper.common import replace_extension, sanitize_filename +from sickrage.helper.encoding import ek +from sickrage.helper.exceptions import ex + + +class GenericProvider(object): # pylint: disable=too-many-instance-attributes + NZB = 'nzb' + TORRENT = 'torrent' + + def __init__(self, name): + shuffle(user_agents) + + self.name = name + + self.anime_only = False + self.bt_cache_urls = [ + 'http://torcache.net/torrent/{torrent_hash}.torrent', + 'http://thetorrent.org/torrent/{torrent_hash}.torrent', + 'http://btdig.com/torrent/{torrent_hash}.torrent', + # 'http://torrage.com/torrent/{torrent_hash}.torrent', + # 'http://itorrents.org/torrent/{torrent_hash}.torrent', + ] + self.cache = TVCache(self) + self.enable_backlog = False + self.enable_daily = False + self.enabled = False + self.headers = { + 'User-Agent': user_agents[0] + } + self.proper_strings = ['PROPER|REPACK|REAL'] + self.provider_type = None + self.public = False + self.search_fallback = False + self.search_mode = None + self.session = Session() + self.show = None + self.supports_absolute_numbering = False + self.supports_backlog = True + self.url = '' + self.urls = {} + + shuffle(self.bt_cache_urls) + + def download_result(self, result): + if not self.login(): + return False + + urls, filename = self._make_url(result) + + for url in urls: + if 'NO_DOWNLOAD_NAME' in url: + continue + + if url.startswith('http'): + self.headers.update({ + 'Referer': '/'.join(url.split('/')[:3]) + '/' + }) + + logger.log(u'Downloading a result from %s at %s' % (self.name, url)) + + if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB): + filename = replace_extension(filename, GenericProvider.TORRENT) + + if download_file(url, filename, session=self.session, headers=self.headers): + if self._verify_download(filename): + logger.log(u'Saved result to %s' % filename, logger.INFO) + return True + + logger.log(u'Could not download %s' % url, logger.WARNING) + remove_file_failed(filename) + + if len(urls): + logger.log(u'Failed to download any results', logger.WARNING) + + return False + + def find_propers(self, search_date=None): + results = self.cache.listPropers(search_date) + + return [Proper(x['name'], x['url'], datetime.fromtimestamp(x['time']), self.show) for x in results] + + def find_search_results(self, show, episodes, search_mode, manual_search=False, download_current_quality=False): # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements + self._check_auth() + self.show = show + + results = {} + items_list = [] + searched_scene_season = None + + for episode in episodes: + cache_result = self.cache.searchCache(episode, manualSearch=manual_search, + downCurQuality=download_current_quality) + + if cache_result: + if episode.episode not in results: + results[episode.episode] = cache_result + else: + results[episode.episode].extend(cache_result) + + continue + + if len(episodes) > 1 and search_mode == 'sponly' and searched_scene_season == episode.scene_season: + continue + + search_strings = [] + searched_scene_season = episode.scene_season + + if len(episodes) > 1 and search_mode == 'sponly': + search_strings = self._get_season_search_strings(episode) + elif search_mode == 'eponly': + search_strings = self._get_episode_search_strings(episode) + + first = search_strings and isinstance(search_strings[0], dict) and 'rid' in search_strings[0] + if first: + logger.log(u'First search_string has rid', logger.DEBUG) + + for search_string in search_strings: + items_list += self.search(search_string, ep_obj=episode) + + if first: + first = False + + if items_list: + logger.log(u'First search_string had rid, and returned results, skipping query by string', + logger.DEBUG) + break + + logger.log(u'First search_string had rid, but returned no results, searching with string query', + logger.DEBUG) + + if len(results) == len(episodes): + return results + + if items_list: + items = {} + unknown_items = [] + + for item in items_list: + quality = self.get_quality(item, anime=show.is_anime) + + if quality == Quality.UNKNOWN: + unknown_items.append(item) + else: + if quality not in items: + items[quality] = [] + items[quality].append(item) + + items_list = list(chain(*[v for (_, v) in sorted(items.iteritems(), reverse=True)])) + items_list += unknown_items + + cl = [] + + for item in items_list: + (title, url) = self._get_title_and_url(item) + + try: + parser = NameParser(parse_method=('normal', 'anime')[show.is_anime]) + parse_result = parser.parse(title) + except InvalidNameException: + logger.log(u'Unable to parse the filename %s into a valid episode' % title, logger.DEBUG) + continue + except InvalidShowException: + logger.log(u'Unable to parse the filename %s into a valid show' % title, logger.DEBUG) + continue + + show_object = parse_result.show + quality = parse_result.quality + release_group = parse_result.release_group + version = parse_result.version + add_cache_entry = False + + if not (show_object.air_by_date or show_object.sports): + if search_mode == 'sponly': + if len(parse_result.episode_numbers): + logger.log( + u'This is supposed to be a season pack search but the result %s is not a valid season pack, skipping it' % title, + logger.DEBUG + ) + add_cache_entry = True + + if len(parse_result.episode_numbers) and \ + (parse_result.season_number not in set([ep.season for ep in episodes]) or + not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]): + logger.log( + u'The result %s doesn\'t seem to be a valid episode that we are trying to snatch, ignoring' % title, + logger.DEBUG) + add_cache_entry = True + else: + if not len(parse_result.episode_numbers) and parse_result.season_number and not [ep for ep in + episodes if + ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: + logger.log( + u'The result %s doesn\'t seem to be a valid season that we are trying to snatch, ignoring' % title, + logger.DEBUG) + add_cache_entry = True + elif len(parse_result.episode_numbers) and not [ep for ep in episodes if + ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: + logger.log( + u'The result %s doesn\'t seem to be a valid episode that we are trying to snatch, ignoring' % title, + logger.DEBUG) + add_cache_entry = True + + if not add_cache_entry: + actual_season = parse_result.season_number + actual_episodes = parse_result.episode_numbers + else: + same_day_special = False + + if not parse_result.is_air_by_date: + logger.log( + u'This is supposed to be a date search but the result %s didn\'t parse as one, skipping it' % title, + logger.DEBUG) + add_cache_entry = True + else: + air_date = parse_result.air_date.toordinal() + db = DBConnection() + sql_results = db.select( + 'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?', + [show_object.indexerid, air_date] + ) + + if len(sql_results) == 2: + if int(sql_results[0]['season']) == 0 and int(sql_results[1]['season']) != 0: + actual_season = int(sql_results[1]['season']) + actual_episodes = [int(sql_results[1]['episode'])] + same_day_special = True + elif int(sql_results[1]['season']) == 0 and int(sql_results[0]['season']) != 0: + actual_season = int(sql_results[0]['season']) + actual_episodes = [int(sql_results[0]['episode'])] + same_day_special = True + elif len(sql_results) != 1: + logger.log( + u'Tried to look up the date for the episode %s but the database didn\'t give proper results, skipping it' % title, + logger.WARNING) + add_cache_entry = True + + if not add_cache_entry and not same_day_special: + actual_season = int(sql_results[0]['season']) + actual_episodes = [int(sql_results[0]['episode'])] + + if add_cache_entry: + logger.log(u'Adding item from search to cache: %s' % title, logger.DEBUG) + # pylint: disable=protected-access + # Access to a protected member of a client class + ci = self.cache._addCacheEntry(title, url, parse_result=parse_result) + + if ci is not None: + cl.append(ci) + + continue + + episode_wanted = True + + for episode_number in actual_episodes: + if not show_object.wantEpisode(actual_season, episode_number, quality, manual_search, + download_current_quality): + episode_wanted = False + break + + if not episode_wanted: + logger.log(u'Ignoring result %s because we don\'t want an episode that is %s' % ( + title, Quality.qualityStrings[quality]), logger.INFO) + continue + + logger.log(u'Found result %s at %s' % (title, url), logger.DEBUG) + + episode_object = [] + for current_episode in actual_episodes: + episode_object.append(show_object.getEpisode(actual_season, current_episode)) + + result = self.get_result(episode_object) + result.show = show_object + result.url = url + result.name = title + result.quality = quality + result.release_group = release_group + result.version = version + result.content = None + result.size = self._get_size(item) + + if len(episode_object) == 1: + episode_number = episode_object[0].episode + logger.log(u'Single episode result.', logger.DEBUG) + elif len(episode_object) > 1: + episode_number = MULTI_EP_RESULT + logger.log(u'Separating multi-episode result to check for later - result contains episodes: %s' % str( + parse_result.episode_numbers), logger.DEBUG) + elif len(episode_object) == 0: + episode_number = SEASON_RESULT + logger.log(u'Separating full season result to check for later', logger.DEBUG) + + if episode_number not in results: + results[episode_number] = [result] + else: + results[episode_number].append(result) + + if len(cl) > 0: + # pylint: disable=protected-access + # Access to a protected member of a client class + db = self.cache._getDB() + db.mass_action(cl) + + return results + + def get_id(self): + return GenericProvider.make_id(self.name) + + def get_quality(self, item, anime=False): + (title, _) = self._get_title_and_url(item) + quality = Quality.sceneQuality(title, anime) + + return quality + + def get_result(self, episodes): + result = self._get_result(episodes) + result.provider = self + + return result + + def get_url(self, url, post_data=None, params=None, timeout=30, json=False, need_bytes=False): # pylint: disable=too-many-arguments, + return getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout, + session=self.session, json=json, need_bytes=need_bytes) + + def image_name(self): + return self.get_id() + '.png' + + def is_active(self): # pylint: disable=no-self-use + return False + + def is_enabled(self): + return bool(self.enabled) + + @staticmethod + def make_id(name): + if not name: + return '' + + return re.sub(r'[^\w\d_]', '_', str(name).strip().lower()) + + def search_rss(self, episodes): + return self.cache.findNeededEpisodes(episodes) + + def seed_ratio(self): # pylint: disable=no-self-use + return '' + + def _check_auth(self): # pylint: disable=no-self-use + return True + + def login(self): # pylint: disable=no-self-use + return True + + def search(self, search_params, age=0, ep_obj=None): # pylint: disable=unused-argument,no-self-use + return [] + + def _get_result(self, episodes): # pylint: disable=no-self-use + return SearchResult(episodes) + + def _get_episode_search_strings(self, episode, add_string=''): + if not episode: + return [] + + search_string = { + 'Episode': [] + } + + for show_name in set(allPossibleShowNames(episode.show)): + episode_string = show_name + ' ' + + if episode.show.air_by_date: + episode_string += str(episode.airdate).replace('-', ' ') + elif episode.show.sports: + episode_string += str(episode.airdate).replace('-', ' ') + episode_string += ('|', ' ')[len(self.proper_strings) > 1] + episode_string += episode.airdate.strftime('%b') + elif episode.show.anime: + episode_string += '%02d' % int(episode.scene_absolute_number) + else: + episode_string += sickbeard.config.naming_ep_type[2] % { + 'seasonnumber': episode.scene_season, + 'episodenumber': episode.scene_episode, + } + + if add_string: + episode_string += ' ' + add_string + + search_string['Episode'].append(episode_string.encode('utf-8').strip()) + + return [search_string] + + def _get_season_search_strings(self, episode): + search_string = { + 'Season': [] + } + + for show_name in set(allPossibleShowNames(self.show)): + episode_string = show_name + ' ' + + if episode.show.air_by_date or episode.show.sports: + episode_string += str(episode.airdate).split('-')[0] + elif episode.show.anime: + episode_string += '%d' % int(episode.scene_absolute_number) + else: + episode_string += 'S%02d' % int(episode.scene_season) + + search_string['Season'].append(episode_string.encode('utf-8').strip()) + + return [search_string] + + def _get_size(self, item): # pylint: disable=unused-argument,no-self-use + return -1 + + def _get_storage_dir(self): # pylint: disable=no-self-use + return '' + + def _get_title_and_url(self, item): # pylint: disable=no-self-use + if not item: + return '', '' + + title = item.get('title', '') + url = item.get('link', '') + + if title: + title = u'' + title.replace(' ', '.') + else: + title = '' + + if url: + url = url.replace('&', '&').replace('%26tr%3D', '&tr=') + else: + url = '' + + return title, url + + def _make_url(self, result): + if not result: + return '', '' + + urls = [] + filename = u'' + + if result.url.startswith('magnet'): + try: + torrent_hash = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0].upper() + + try: + torrent_name = re.findall('dn=([^&]+)', result.url)[0] + except Exception: + torrent_name = 'NO_DOWNLOAD_NAME' + + if len(torrent_hash) == 32: + torrent_hash = b16encode(b32decode(torrent_hash)).upper() + + if not torrent_hash: + logger.log(u'Unable to extract torrent hash from magnet: %s' % ex(result.url), logger.ERROR) + return urls, filename + + urls = [x.format(torrent_hash=torrent_hash, torrent_name=torrent_name) for x in self.bt_cache_urls] + except Exception: + logger.log(u'Unable to extract torrent hash or name from magnet: %s' % ex(result.url), logger.ERROR) + return urls, filename + else: + urls = [result.url] + + filename = ek(join, self._get_storage_dir(), sanitize_filename(result.name) + '.' + self.provider_type) + + return urls, filename + + def _verify_download(self, file_name=None): # pylint: disable=unused-argument,no-self-use + return True diff --git a/sickrage/providers/NZBProvider.py b/sickrage/providers/NZBProvider.py new file mode 100644 index 0000000000000000000000000000000000000000..3461e03bbe3f310ae0a60b65ce05dfc5734ba3e8 --- /dev/null +++ b/sickrage/providers/NZBProvider.py @@ -0,0 +1,52 @@ +# This file is part of SickRage. +# +# URL: https://sickrage.github.io +# Git: https://github.com/SickRage/SickRage.git +# +# SickRage is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickRage is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickRage. If not, see <http://www.gnu.org/licenses/>. + +import sickbeard + +from sickbeard import logger +from sickbeard.classes import NZBSearchResult +from sickrage.helper.common import try_int + +from sickrage.providers.GenericProvider import GenericProvider + + +class NZBProvider(GenericProvider): + def __init__(self, name): + GenericProvider.__init__(self, name) + + self.provider_type = GenericProvider.NZB + + def is_active(self): + return bool(sickbeard.USE_NZBS) and self.is_enabled() + + def _get_result(self, episodes): + return NZBSearchResult(episodes) + + def _get_size(self, item): + try: + size = item.get('links')[1].get('length', -1) + except (AttributeError, IndexError, TypeError): + size = -1 + + if not size: + logger.log(u'The size was not found in the provider response', logger.DEBUG) + + return try_int(size, -1) + + def _get_storage_dir(self): + return sickbeard.NZB_DIR diff --git a/sickrage/providers/TorrentProvider.py b/sickrage/providers/TorrentProvider.py new file mode 100644 index 0000000000000000000000000000000000000000..12fd59fe932ea40aa1729a1f70264ad846f23f35 --- /dev/null +++ b/sickrage/providers/TorrentProvider.py @@ -0,0 +1,147 @@ +# This file is part of SickRage. +# +# URL: https://sickrage.github.io +# Git: https://github.com/SickRage/SickRage.git +# +# SickRage is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickRage is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickRage. If not, see <http://www.gnu.org/licenses/>. + +import sickbeard + +from datetime import datetime +from feedparser.util import FeedParserDict +from hachoir_parser import createParser +from sickbeard import logger +from sickbeard.classes import Proper, TorrentSearchResult +from sickbeard.common import Quality +from sickbeard.db import DBConnection +from sickrage.helper.common import try_int +from sickrage.helper.exceptions import ex +from sickrage.providers.GenericProvider import GenericProvider +from sickrage.show.Show import Show + + +class TorrentProvider(GenericProvider): + def __init__(self, name): + GenericProvider.__init__(self, name) + + self.provider_type = GenericProvider.TORRENT + + def find_propers(self, search_date=None): + results = [] + db = DBConnection() + placeholder = ','.join([str(x) for x in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST]) + sql_results = db.select( + 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate' + ' FROM tv_episodes AS e' + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + + ' AND e.status IN (' + placeholder + ')' + ) + + for result in sql_results or []: + show = Show.find(sickbeard.showList, int(result['showid'])) + + if show: + episode = show.getEpisode(int(result['season']), int(result['episode'])) + + for term in self.proper_strings: + search_strings = self._get_episode_search_strings(episode, add_string=term) + + for item in self.search(search_strings[0]): + title, url = self._get_title_and_url(item) + + results.append(Proper(title, url, datetime.today(), show)) + + return results + + def is_active(self): + return bool(sickbeard.USE_TORRENTS) and self.is_enabled() + + @property + def _custom_trackers(self): + if sickbeard.TRACKERS_LIST: + if not self.public: + return '' + + return '&tr=' + '&tr='.join(set([x.strip() for x in sickbeard.TRACKERS_LIST.split(',') if x.strip()])) + + return '' + + def _get_result(self, episodes): + return TorrentSearchResult(episodes) + + def _get_size(self, item): + if isinstance(item, dict): + size = item.get('size', -1) + elif isinstance(item, (list, tuple)) and len(item) > 2: + size = item[2] + else: + size = -1 + + # Make sure we didn't select seeds/leechers by accident + if not size or size < 1024 * 1024: + size = -1 + + return try_int(size, -1) + + def _get_storage_dir(self): + return sickbeard.TORRENT_DIR + + def _get_title_and_url(self, item): + if isinstance(item, (dict, FeedParserDict)): + download_url = item.get('url', '') + title = item.get('title', '') + + if not download_url: + download_url = item.get('link', '') + elif isinstance(item, (list, tuple)) and len(item) > 1: + download_url = item[1] + title = item[0] + else: + download_url = '' + title = '' + + if title.endswith('DIAMOND'): + logger.log(u'Skipping DIAMOND release for mass fake releases.') + download_url = title = u'FAKERELEASE' + + if download_url: + download_url = download_url.replace('&', '&') + + if title: + title = title.replace(' ', '.') + + return title, download_url + + def _verify_download(self, file_name=None): + try: + parser = createParser(file_name) + + if parser: + # pylint: disable=protected-access + # Access to a protected member of a client class + mime_type = parser._getMimeType() + + try: + parser.stream._input.close() + except Exception: + pass + + if mime_type == 'application/x-bittorrent': + return True + except Exception as e: + logger.log(u'Failed to validate torrent file: %s' % ex(e), logger.DEBUG) + + logger.log(u'Result is not a valid torrent file', logger.DEBUG) + return False diff --git a/sickrage/providers/__init__.py b/sickrage/providers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a9a2c5b3bb437bff74e283b62c894075e8c15331 --- /dev/null +++ b/sickrage/providers/__init__.py @@ -0,0 +1 @@ +__all__ = [] diff --git a/tests/search_tests.py b/tests/search_tests.py index 56601e158f017946195131442a69c79ae85a37a3..326a4f2f2388c010eeba5bbbf16b26c17c2d5003 100644 --- a/tests/search_tests.py +++ b/tests/search_tests.py @@ -32,9 +32,9 @@ sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../l sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) from sickbeard.tv import TVEpisode, TVShow -from sickbeard.providers.generic import GenericProvider import sickbeard import sickbeard.common as common +from sickrage.providers.GenericProvider import GenericProvider import tests.test_lib as test TESTS = { @@ -117,7 +117,7 @@ def test_generator(cur_data, cur_name, cur_provider): if not cur_provider.public: continue - items = cur_provider._doSearch(search_strings) # pylint: disable=protected-access + items = cur_provider.search(search_strings) # pylint: disable=protected-access if not items: print "No results from cur_provider?" continue @@ -132,7 +132,7 @@ def test_generator(cur_data, cur_name, cur_provider): print "url is empty" continue - quality = cur_provider.getQuality(items[0]) + quality = cur_provider.get_quality(items[0]) size = cur_provider._get_size(items[0]) # pylint: disable=protected-access if not show.quality & quality: @@ -152,7 +152,7 @@ if __name__ == '__main__': filename = name.replace(' ', '_') for provider in sickbeard.providers.sortedProviderList(): - if provider.providerType == GenericProvider.TORRENT: + if provider.provider_type == GenericProvider.TORRENT: if forceSearch: test_name = 'test_manual_%s_%s_%s' % (filename, data["tvdbid"], provider.name) else: diff --git a/tests/sickrage_tests/__init__.py b/tests/sickrage_tests/__init__.py index 65f1d24f7a0f64e3e96303df7d9349c13fe99e93..a504b8b1db037eb7bf86e88cc8aa2ef29110a251 100644 --- a/tests/sickrage_tests/__init__.py +++ b/tests/sickrage_tests/__init__.py @@ -25,6 +25,7 @@ from __future__ import print_function import helper import media +import providers import show import system import unittest @@ -35,6 +36,7 @@ if __name__ == '__main__': TEST_MODULES = [ helper, media, + providers, show, system, ] diff --git a/tests/sickrage_tests/media/generic_media_tests.py b/tests/sickrage_tests/media/generic_media_tests.py index 172d191fb45bec9b6deff7f4b9f9b3ee57bd3199..b7079bd67a40223ff19565d22dc11c548c50350b 100644 --- a/tests/sickrage_tests/media/generic_media_tests.py +++ b/tests/sickrage_tests/media/generic_media_tests.py @@ -133,9 +133,9 @@ class GenericMediaTests(unittest.TestCase): Test get_media_root """ - sickbeard.PROG_DIR = '/home/SickRage/' + sickbeard.PROG_DIR = os.path.join('some', 'path', 'to', 'SickRage') - self.assertEqual(GenericMedia.get_media_root(), '/home/SickRage/gui/slick') + self.assertEqual(GenericMedia.get_media_root(), os.path.join('some', 'path', 'to', 'SickRage', 'gui', 'slick')) if __name__ == '__main__': diff --git a/tests/sickrage_tests/media/show_network_logo_tests.py b/tests/sickrage_tests/media/show_network_logo_tests.py index 0cb3ee9b3e6e2f1cb33886aa3cbe93c1d18c2c92..3bce111d5b77f3c18c1e3b0595512f68bcfe712f 100644 --- a/tests/sickrage_tests/media/show_network_logo_tests.py +++ b/tests/sickrage_tests/media/show_network_logo_tests.py @@ -43,7 +43,7 @@ class ShowNetworkLogoTests(GenericMediaTests): Test get_default_media_name """ - self.assertEqual(ShowNetworkLogo(0, '').get_default_media_name(), 'network/nonetwork.png') + self.assertEqual(ShowNetworkLogo(0, '').get_default_media_name(), os.path.join('network', 'nonetwork.png')) if __name__ == '__main__': diff --git a/tests/sickrage_tests/providers/__init__.py b/tests/sickrage_tests/providers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e6567476f7fb14df1d2668c29030792d72f2e3e9 --- /dev/null +++ b/tests/sickrage_tests/providers/__init__.py @@ -0,0 +1,23 @@ +# coding=utf-8 +""" +Tests for SickRage providers +""" + +import unittest + +from generic_provider_tests import GenericProviderTests +from nzb_provider_tests import NZBProviderTests +from torrent_provider_tests import TorrentProviderTests + +if __name__ == '__main__': + print('=====> Running all test in "sickrage_tests.providers" <=====') + + TEST_CLASSES = [ + GenericProviderTests, + NZBProviderTests, + TorrentProviderTests, + ] + + for test_class in TEST_CLASSES: + SUITE = unittest.TestLoader().loadTestsFromTestCase(test_class) + unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/tests/sickrage_tests/providers/generic_provider_tests.py b/tests/sickrage_tests/providers/generic_provider_tests.py new file mode 100644 index 0000000000000000000000000000000000000000..28e9072e82e085610b737807e6c38dc331fe4024 --- /dev/null +++ b/tests/sickrage_tests/providers/generic_provider_tests.py @@ -0,0 +1,346 @@ +# coding=utf-8 +# This file is part of SickRage. +# +# URL: https://SickRage.GitHub.io +# Git: https://github.com/SickRage/SickRage.git +# +# SickRage is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickRage is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickRage. If not, see <http://www.gnu.org/licenses/>. + +""" +Test GenericProvider +""" + +from __future__ import print_function + +import os +import sys +import unittest + +sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib'))) +sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))) + +from sickrage.providers.GenericProvider import GenericProvider + + +class GenericProviderTests(unittest.TestCase): + """ + Test GenericProvider + """ + + def test_get_id(self): + """ + Test get_id + """ + test_cases = { + None: '', + 123: '123', + 12.3: '12_3', + 0: '', + -123: '_123', + -12.3: '_12_3', + '': '', + ' ': '', + '123': '123', + ' 123 ': '123', + '12.3': '12_3', + ' 12.3 ': '12_3', + '0': '0', + ' 0 ': '0', + '-123': '_123', + ' -123 ': '_123', + '-12.3': '_12_3', + ' -12.3 ': '_12_3', + 'abc': 'abc', + ' abc ': 'abc', + 'ABC': 'abc', + ' ABC ': 'abc', + '.def': '_def', + 'g,hi': 'g_hi', + 'jk!l': 'jk_l', + 'mno?': 'mno_', + '_pqr$': '_pqr_', + } + + unicode_test_cases = { + u'': '', + u' ': '', + u'123': '123', + u' 123 ': '123', + u'12.3': '12_3', + u' 12.3 ': '12_3', + u'0': '0', + u' 0 ': '0', + u'-123': '_123', + u' -123 ': '_123', + u'-12.3': '_12_3', + u' -12.3 ': '_12_3', + u'abc': 'abc', + u' abc ': 'abc', + u'ABC': 'abc', + u' ABC ': 'abc', + u'.def': '_def', + u'g,hi': 'g_hi', + u'jk!l': 'jk_l', + u'mno?': 'mno_', + u'_pqr$': '_pqr_', + } + + for test in test_cases, unicode_test_cases: + for (name, result) in test.iteritems(): + self.assertEqual(GenericProvider(name).get_id(), result) + + def test_image_name(self): + """ + Test image_name + """ + test_cases = { + None: '.png', + 123: '123.png', + 12.3: '12_3.png', + 0: '.png', + -123: '_123.png', + -12.3: '_12_3.png', + '': '.png', + ' ': '.png', + '123': '123.png', + ' 123 ': '123.png', + '12.3': '12_3.png', + ' 12.3 ': '12_3.png', + '0': '0.png', + ' 0 ': '0.png', + '-123': '_123.png', + ' -123 ': '_123.png', + '-12.3': '_12_3.png', + ' -12.3 ': '_12_3.png', + 'abc': 'abc.png', + ' abc ': 'abc.png', + 'ABC': 'abc.png', + ' ABC ': 'abc.png', + '.def': '_def.png', + 'g,hi': 'g_hi.png', + 'jk!l': 'jk_l.png', + 'mno?': 'mno_.png', + '_pqr$': '_pqr_.png', + } + + unicode_test_cases = { + u'': '.png', + u' ': '.png', + u'123': '123.png', + u' 123 ': '123.png', + u'12.3': '12_3.png', + u' 12.3 ': '12_3.png', + u'0': '0.png', + u' 0 ': '0.png', + u'-123': '_123.png', + u' -123 ': '_123.png', + u'-12.3': '_12_3.png', + u' -12.3 ': '_12_3.png', + u'abc': 'abc.png', + u' abc ': 'abc.png', + u'ABC': 'abc.png', + u' ABC ': 'abc.png', + u'.def': '_def.png', + u'g,hi': 'g_hi.png', + u'jk!l': 'jk_l.png', + u'mno?': 'mno_.png', + u'_pqr$': '_pqr_.png', + } + + for test in test_cases, unicode_test_cases: + for (name, result) in test.iteritems(): + self.assertEqual(GenericProvider(name).image_name(), result) + + def test_is_active(self): + """ + Test is_active + """ + self.assertFalse(GenericProvider('Test Provider').is_active()) + + def test_is_enabled(self): + """ + Test is_enabled + """ + self.assertFalse(GenericProvider('Test Provider').is_enabled()) + + def test_make_id(self): + """ + Test make_id + """ + test_cases = { + None: '', + 123: '123', + 12.3: '12_3', + 0: '', + -123: '_123', + -12.3: '_12_3', + '': '', + ' ': '', + '123': '123', + ' 123 ': '123', + '12.3': '12_3', + ' 12.3 ': '12_3', + '0': '0', + ' 0 ': '0', + '-123': '_123', + ' -123 ': '_123', + '-12.3': '_12_3', + ' -12.3 ': '_12_3', + 'abc': 'abc', + ' abc ': 'abc', + 'ABC': 'abc', + ' ABC ': 'abc', + '.def': '_def', + 'g,hi': 'g_hi', + 'jk!l': 'jk_l', + 'mno?': 'mno_', + '_pqr$': '_pqr_', + } + + unicode_test_cases = { + u'': '', + u' ': '', + u'123': '123', + u' 123 ': '123', + u'12.3': '12_3', + u' 12.3 ': '12_3', + u'0': '0', + u' 0 ': '0', + u'-123': '_123', + u' -123 ': '_123', + u'-12.3': '_12_3', + u' -12.3 ': '_12_3', + u'abc': 'abc', + u' abc ': 'abc', + u'ABC': 'abc', + u' ABC ': 'abc', + u'.def': '_def', + u'g,hi': 'g_hi', + u'jk!l': 'jk_l', + u'mno?': 'mno_', + u'_pqr$': '_pqr_', + } + + for test in test_cases, unicode_test_cases: + for (name, result) in test.iteritems(): + self.assertEqual(GenericProvider.make_id(name), result) + + def test_seed_ratio(self): + """ + Test seed_ratio + """ + self.assertEqual(GenericProvider('Test Provider').seed_ratio(), '') + + def test__check_auth(self): + """ + Test _check_auth + """ + self.assertTrue(GenericProvider('Test Provider')._check_auth()) + + def test_login(self): + """ + Test login + """ + self.assertTrue(GenericProvider('Test Provider').login()) + + def test_search(self): + """ + Test search + """ + test_cases = { + None: [], + 123: [], + 12.3: [], + -123: [], + -12.3: [], + '': [], + '123': [], + '12.3': [], + '-123': [], + '-12.3': [], + } + + unicode_test_cases = { + u'': [], + u'123': [], + u'12.3': [], + u'-123': [], + u'-12.3': [], + } + + for test in test_cases, unicode_test_cases: + for (search_params, result) in test.iteritems(): + self.assertEqual(GenericProvider('Test Provider').search(search_params), result) + + def test__get_size(self): + """ + Test _get_size + """ + self.assertEqual(GenericProvider('Test Provider')._get_size(None), -1) + + def test__get_storage_dir(self): + """ + Test _get_storage_dir + """ + self.assertEqual(GenericProvider('Test Provider')._get_storage_dir(), '') + + def test__get_title_and_url(self): + """ + Test _get_title_and_url + """ + items_list = [ + None, {}, {'link': None, 'title': None}, {'link': '', 'title': ''}, + {'link': 'http://www.google.com/&foo=bar%26tr%3Dtest', 'title': 'Some Title'} + ] + results_list = [ + ('', ''), ('', ''), ('', ''), ('', ''), ('Some.Title', 'http://www.google.com/&foo=bar&tr=test') + ] + + unicode_items_list = [ + {'link': u'', 'title': u''}, + {'link': u'http://www.google.com/&foo=bar%26tr%3Dtest', 'title': u'Some Title'} + ] + unicode_results_list = [ + ('', ''), ('Some.Title', 'http://www.google.com/&foo=bar&tr=test') + ] + + self.assertEqual( + len(items_list), len(results_list), + 'Number of parameters (%d) and results (%d) does not match' % (len(items_list), len(results_list)) + ) + + self.assertEqual( + len(unicode_items_list), len(unicode_results_list), + 'Number of parameters (%d) and results (%d) does not match' % ( + len(unicode_items_list), len(unicode_results_list)) + ) + + for (index, item) in enumerate(items_list): + self.assertEqual(GenericProvider('Test Provider')._get_title_and_url(item), results_list[index]) + + for (index, item) in enumerate(unicode_items_list): + self.assertEqual(GenericProvider('Test Provider')._get_title_and_url(item), unicode_results_list[index]) + + def test__verify_download(self): + """ + Test _verify_download + """ + self.assertTrue(GenericProvider('Test Provider')._verify_download()) + + +if __name__ == '__main__': + print('=====> Testing %s' % __file__) + + SUITE = unittest.TestLoader().loadTestsFromTestCase(GenericProviderTests) + unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/tests/sickrage_tests/providers/nzb_provider_tests.py b/tests/sickrage_tests/providers/nzb_provider_tests.py new file mode 100644 index 0000000000000000000000000000000000000000..2983dc9cb40cc4cfd471e131a7473283a6a5e98d --- /dev/null +++ b/tests/sickrage_tests/providers/nzb_provider_tests.py @@ -0,0 +1,152 @@ +# coding=utf-8 +# This file is part of SickRage. +# +# URL: https://SickRage.GitHub.io +# Git: https://github.com/SickRage/SickRage.git +# +# SickRage is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickRage is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickRage. If not, see <http://www.gnu.org/licenses/>. + +""" +Test NZBProvider +""" + +from __future__ import print_function + +import os +import sys +import unittest + +sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib'))) +sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))) + +import sickbeard + +from generic_provider_tests import GenericProviderTests +from sickrage.providers.GenericProvider import GenericProvider +from sickrage.providers.NZBProvider import NZBProvider + + +class NZBProviderTests(GenericProviderTests): + """ + Test NZBProvider + """ + + def test___init__(self): + """ + Test __init__ + """ + self.assertEqual(NZBProvider('Test Provider').provider_type, GenericProvider.NZB) + + def test_is_active(self): + """ + Test is_active + """ + test_cases = { + (False, False): False, + (False, None): False, + (False, True): False, + (None, False): False, + (None, None): False, + (None, True): False, + (True, False): False, + (True, None): False, + (True, True): True, + } + + for ((use_nzb, enabled), result) in test_cases.iteritems(): + sickbeard.USE_NZBS = use_nzb + + provider = NZBProvider('Test Provider') + provider.enabled = enabled + + self.assertEqual(provider.is_active(), result) + + def test__get_size(self): + """ + Test _get_size + """ + items_list = [ + None, {}, {'links': None}, {'links': []}, {'links': [{}]}, + {'links': [{'length': 1}, {'length': None}, {'length': 3}]}, + {'links': [{'length': 1}, {'length': ''}, {'length': 3}]}, + {'links': [{'length': 1}, {'length': '0'}, {'length': 3}]}, + {'links': [{'length': 1}, {'length': '123'}, {'length': 3}]}, + {'links': [{'length': 1}, {'length': '12.3'}, {'length': 3}]}, + {'links': [{'length': 1}, {'length': '-123'}, {'length': 3}]}, + {'links': [{'length': 1}, {'length': '-12.3'}, {'length': 3}]}, + {'links': [{'length': 1}, {'length': 0}, {'length': 3}]}, + {'links': [{'length': 1}, {'length': 123}, {'length': 3}]}, + {'links': [{'length': 1}, {'length': 12.3}, {'length': 3}]}, + {'links': [{'length': 1}, {'length': -123}, {'length': 3}]}, + {'links': [{'length': 1}, {'length': -12.3}, {'length': 3}]}, + ] + results_list = [ + -1, -1, -1, -1, -1, -1, -1, 0, 123, -1, -123, -1, 0, 123, 12, -123, -12 + ] + + unicode_items_list = [ + {u'links': None}, {u'links': []}, {u'links': [{}]}, + {u'links': [{u'length': 1}, {u'length': None}, {u'length': 3}]}, + {u'links': [{u'length': 1}, {u'length': u''}, {u'length': 3}]}, + {u'links': [{u'length': 1}, {u'length': u'0'}, {u'length': 3}]}, + {u'links': [{u'length': 1}, {u'length': u'123'}, {u'length': 3}]}, + {u'links': [{u'length': 1}, {u'length': u'12.3'}, {u'length': 3}]}, + {u'links': [{u'length': 1}, {u'length': u'-123'}, {u'length': 3}]}, + {u'links': [{u'length': 1}, {u'length': u'-12.3'}, {u'length': 3}]}, + {u'links': [{u'length': 1}, {u'length': 0}, {u'length': 3}]}, + {u'links': [{u'length': 1}, {u'length': 123}, {u'length': 3}]}, + {u'links': [{u'length': 1}, {u'length': 12.3}, {u'length': 3}]}, + {u'links': [{u'length': 1}, {u'length': -123}, {u'length': 3}]}, + {u'links': [{u'length': 1}, {u'length': -12.3}, {u'length': 3}]}, + ] + unicode_results_list = [ + -1, -1, -1, -1, -1, 0, 123, -1, -123, -1, 0, 123, 12, -123, -12 + ] + + self.assertEqual( + len(items_list), len(results_list), + 'Number of parameters (%d) and results (%d) does not match' % (len(items_list), len(results_list)) + ) + + self.assertEqual( + len(unicode_items_list), len(unicode_results_list), + 'Number of parameters (%d) and results (%d) does not match' % ( + len(unicode_items_list), len(unicode_results_list)) + ) + + for (index, item) in enumerate(items_list): + self.assertEqual(NZBProvider('Test Provider')._get_size(item), results_list[index]) + + for (index, item) in enumerate(unicode_items_list): + self.assertEqual(NZBProvider('Test Provider')._get_size(item), unicode_results_list[index]) + + def test__get_storage_dir(self): + """ + Test _get_storage_dir + """ + test_cases = [ + None, 123, 12.3, '', os.path.join('some', 'path', 'to', 'folder') + ] + + for nzb_dir in test_cases: + sickbeard.NZB_DIR = nzb_dir + + self.assertEqual(NZBProvider('Test Provider')._get_storage_dir(), nzb_dir) + + +if __name__ == '__main__': + print('=====> Testing %s' % __file__) + + SUITE = unittest.TestLoader().loadTestsFromTestCase(NZBProviderTests) + unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/tests/sickrage_tests/providers/torrent_provider_tests.py b/tests/sickrage_tests/providers/torrent_provider_tests.py new file mode 100644 index 0000000000000000000000000000000000000000..ac1973b646be3b144527656a7992923be35c213e --- /dev/null +++ b/tests/sickrage_tests/providers/torrent_provider_tests.py @@ -0,0 +1,144 @@ +# coding=utf-8 +# This file is part of SickRage. +# +# URL: https://SickRage.GitHub.io +# Git: https://github.com/SickRage/SickRage.git +# +# SickRage is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickRage is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickRage. If not, see <http://www.gnu.org/licenses/>. + +""" +Test TorrentProvider +""" + +from __future__ import print_function + +import os +import sys +import unittest + +sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib'))) +sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))) + +import sickbeard + +from generic_provider_tests import GenericProviderTests +from sickrage.providers.GenericProvider import GenericProvider +from sickrage.providers.TorrentProvider import TorrentProvider + + +class TorrentProviderTests(GenericProviderTests): + """ + Test TorrentProvider + """ + + def test___init__(self): + """ + Test __init__ + """ + self.assertEqual(TorrentProvider('Test Provider').provider_type, GenericProvider.TORRENT) + + def test_is_active(self): + """ + Test is_active + """ + test_cases = { + (False, False): False, + (False, None): False, + (False, True): False, + (None, False): False, + (None, None): False, + (None, True): False, + (True, False): False, + (True, None): False, + (True, True): True, + } + + for ((use_torrents, enabled), result) in test_cases.iteritems(): + sickbeard.USE_TORRENTS = use_torrents + + provider = TorrentProvider('Test Provider') + provider.enabled = enabled + + self.assertEqual(provider.is_active(), result) + + def test__get_size(self): + """ + Test _get_size + """ + items_list = [ + None, {}, {'size': None}, {'size': ''}, {'size': '0'}, {'size': '123'}, {'size': '12.3'}, {'size': '-123'}, + {'size': '-12.3'}, {'size': '1100000'}, {'size': 0}, {'size': 123}, {'size': 12.3}, {'size': -123}, + {'size': -12.3}, {'size': 1100000}, [], [None], [1100000], [None, None, None], [None, None, ''], + [None, None, '0'], [None, None, '123'], [None, None, '12.3'], [None, None, '-123'], [None, None, '-12.3'], + [None, None, '1100000'], [None, None, 0], [None, None, 123], [None, None, 12.3], [None, None, -123], + [None, None, -12.3], [None, None, 1100000], (), (None, None, None), (None, None, ''), (None, None, '0'), + (None, None, '123'), (None, None, '12.3'), (None, None, '-123'), (None, None, '-12.3'), + (None, None, '1100000'), '', '0', '123', '12.3', '-123', '-12.3', '1100000', 0, 123, 12.3, -123, -12.3, + 1100000 + ] + results_list = [ + -1, -1, -1, -1, 0, 123, -1, -123, -1, 1100000, -1, -1, -1, -1, -1, 1100000, -1, -1, -1, -1, -1, 0, 123, -1, + -123, -1, 1100000, -1, -1, -1, -1, -1, 1100000, -1, -1, -1, 0, 123, -1, -123, -1, 1100000, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1 + ] + + unicode_items_list = [ + {u'size': None}, {u'size': u''}, {u'size': u'0'}, {u'size': u'123'}, {u'size': u'12.3'}, {u'size': u'-123'}, + {u'size': u'-12.3'}, {u'size': u'1100000'}, {u'size': 0}, {u'size': 123}, {u'size': 12.3}, {u'size': -123}, + {u'size': -12.3}, {u'size': 1100000}, [None, None, u''], [None, None, u'0'], [None, None, u'123'], + [None, None, u'12.3'], [None, None, u'-123'], [None, None, u'-12.3'], [None, None, u'1100000'], + (None, None, u''), (None, None, u'0'), (None, None, u'123'), (None, None, u'12.3'), (None, None, u'-123'), + (None, None, u'-12.3'), (None, None, u'1100000'), u'', u'0', u'123', u'12.3', u'-123', u'-12.3', u'1100000' + ] + unicode_results_list = [ + -1, -1, 0, 123, -1, -123, -1, 1100000, -1, -1, -1, -1, -1, 1100000, -1, 0, 123, -1, -123, -1, 1100000, -1, + 0, 123, -1, -123, -1, 1100000, -1, -1, -1, -1, -1, -1, -1 + ] + + self.assertEqual( + len(items_list), len(results_list), + 'Number of parameters (%d) and results (%d) does not match' % (len(items_list), len(results_list)) + ) + + self.assertEqual( + len(unicode_items_list), len(unicode_results_list), + 'Number of parameters (%d) and results (%d) does not match' % ( + len(unicode_items_list), len(unicode_results_list)) + ) + + for (index, item) in enumerate(items_list): + self.assertEqual(TorrentProvider('Test Provider')._get_size(item), results_list[index]) + + for (index, item) in enumerate(unicode_items_list): + self.assertEqual(TorrentProvider('Test Provider')._get_size(item), unicode_results_list[index]) + + def test__get_storage_dir(self): + """ + Test _get_storage_dir + """ + test_cases = [ + None, 123, 12.3, '', os.path.join('some', 'path', 'to', 'folder') + ] + + for torrent_dir in test_cases: + sickbeard.TORRENT_DIR = torrent_dir + + self.assertEqual(TorrentProvider('Test Provider')._get_storage_dir(), torrent_dir) + + +if __name__ == '__main__': + print('=====> Testing %s' % __file__) + + SUITE = unittest.TestLoader().loadTestsFromTestCase(TorrentProviderTests) + unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/tests/snatch_tests.py b/tests/snatch_tests.py index fee4517c031d207aa00c30b6d0346d9a87c46511..c629f5f66bbf614b432a13d273a1c44d4ae0dd0e 100644 --- a/tests/snatch_tests.py +++ b/tests/snatch_tests.py @@ -94,8 +94,8 @@ class SearchTest(test.SickbeardTestDBCase): """ for provider in sickbeard.providers.sortedProviderList(): - provider.getURL = self._fake_get_url - # provider.isActive = self._fake_is_active + provider.get_url = self._fake_get_url + # provider.is_active = self._fake_is_active super(SearchTest, self).__init__(something) diff --git a/tests/ssl_sni_tests.py b/tests/ssl_sni_tests.py index 9715c7c85cf1648b4cf55fc22adcabc022ffba03..1d69b96314025c85b559a9ce04782ce6df0130c3 100644 --- a/tests/ssl_sni_tests.py +++ b/tests/ssl_sni_tests.py @@ -40,7 +40,7 @@ class SniTests(unittest.TestCase): """ Test SNI """ - self_signed_cert_providers = ["Womble's Index", "Libertalia", "HoundDawgs"] + self_signed_cert_providers = ["Womble's Index", "Libertalia"] def test_sni_urls(self): """ diff --git a/tests/torrent_tests.py b/tests/torrent_tests.py index 62fd6a970f78e209c174b93f7759323e70fc2cb6..665be92ed4083a3243507f76984a4f4a31475804 100644 --- a/tests/torrent_tests.py +++ b/tests/torrent_tests.py @@ -69,7 +69,7 @@ class TorrentBasicTests(test.SickbeardTestDBCase): # pylint: disable=protected-access search_strings_list = bitcannon._get_episode_search_strings(self.shows[0].episodes[0]) # [{'Episode': ['Italian Works S05E10']}] for search_strings in search_strings_list: - bitcannon._doSearch(search_strings) # {'Episode': ['Italian Works S05E10']} # pylint: disable=protected-access + bitcannon.search(search_strings) # {'Episode': ['Italian Works S05E10']} # pylint: disable=protected-access return True @@ -85,7 +85,7 @@ class TorrentBasicTests(test.SickbeardTestDBCase): if not html: return - soup = BeautifulSoup(html, features=["html5lib", "permissive"]) + soup = BeautifulSoup(html, 'html5lib') torrent_table = soup.find('table', attrs={'class': 'data'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else []