Private GIT

Skip to content
Snippets Groups Projects
Commit 05633fc4 authored by miigotu's avatar miigotu
Browse files

Merge pull request #2209 from SiCKRAGETV/rm-headURL

Remove headURL and see how it works out. bt-cache sites are only nece…
parents 099dcd94 02f14a65
No related branches found
No related tags found
No related merge requests found
......@@ -1333,53 +1333,6 @@ def _setUpSession(session, headers):
return session
def headURL(url, params=None, headers={}, timeout=30, session=None, json=False, proxyGlypeProxySSLwarning=None):
"""
Checks if URL is valid, without reading it
"""
session = _setUpSession(session, headers)
session.params = params
try:
resp = session.head(url, timeout=timeout, allow_redirects=True, verify=session.verify)
if not resp.ok:
logger.log(u"Requested headURL " + url + " returned status code is " + str(
resp.status_code) + ': ' + codeDescription(resp.status_code), logger.DEBUG)
return False
if proxyGlypeProxySSLwarning is not None:
if re.search('The site you are attempting to browse is on a secure connection', resp.text):
resp = session.head(proxyGlypeProxySSLwarning, timeout=timeout, allow_redirects=True, verify=session.verify)
if not resp.ok:
logger.log(u"GlypeProxySSLwarning: Requested headURL " + url + " returned status code is " + str(
resp.status_code) + ': ' + codeDescription(resp.status_code), logger.DEBUG)
return False
return resp.status_code == 200
except requests.exceptions.HTTPError as e:
logger.log(u"HTTP error in headURL %s. Error: %s" % (url, ex(e)), logger.WARNING)
pass
except requests.exceptions.ConnectionError as e:
logger.log(u"Connection error in headURL %s. Error: %s " % (url, ex(e)), logger.WARNING)
pass
except requests.exceptions.Timeout as e:
logger.log(u"Connection timed out accessing headURL %s. Error: %s" % (url, ex(e)), logger.WARNING)
pass
except requests.exceptions.ContentDecodingError:
logger.log(u"Content-Encoding was gzip, but content was not compressed. headURL: %s" % url, logger.DEBUG)
logger.log(traceback.format_exc(), logger.DEBUG)
pass
except Exception as e:
logger.log(u"Unknown exception in headURL %s. Error: %s" % (url, ex(e)), logger.WARNING)
logger.log(traceback.format_exc(), logger.WARNING)
pass
return False
def getURL(url, post_data=None, params={}, headers={}, timeout=30, session=None, json=False, proxyGlypeProxySSLwarning=None):
"""
......
......
......@@ -261,22 +261,6 @@ class FrenchTorrentDBProvider(generic.TorrentProvider):
def seedRatio(self):
return self.ratio
def headURL(self, result):
"""
Check if URL is valid and the file exists at URL.
Original function overwritten because FrenchTorrentDB links only support one request
"""
# check for auth
if not self._doLogin():
return u''
urls, filename = self._makeURL(result)
for url in urls:
return url
return u''
def _get_season_search_strings(self, ep_obj):
......
......
......@@ -188,34 +188,6 @@ class GenericProvider:
return (urls, filename)
def headURL(self, result):
"""
Check if URL is valid and the file exists at URL
"""
# check for auth
if not self._doLogin():
return False
urls, filename = self._makeURL(result)
if self.proxy.isEnabled():
self.headers.update({'Referer': self.proxy.getProxyURL()})
self.proxyGlypeProxySSLwarning = self.proxy.getProxyURL() + 'includes/process.php?action=sslagree&submit=Continue anyway...'
else:
if 'Referer' in self.headers:
self.headers.pop('Referer')
self.proxyGlypeProxySSLwarning = None
for url in urls:
if 'NO_DOWNLOAD_NAME' in url:
continue
if helpers.headURL(self.proxy._buildURL(url), session=self.session, headers=self.headers,
proxyGlypeProxySSLwarning=self.proxyGlypeProxySSLwarning):
return url
return u''
def downloadResult(self, result):
"""
Save the result to disk.
......
......
......@@ -237,14 +237,6 @@ def pickBestResult(results, show):
logger.log(cur_result.name + u" has previously failed, rejecting it")
continue
# Only request HEAD instead of downloading content here, and only after all other checks but before bestresult!
# Otherwise we are spamming providers even when searching with cache only. We can validate now, and download later
if len(cur_result.url) and cur_result.provider:
cur_result.url = cur_result.provider.headURL(cur_result)
if not len(cur_result.url):
logger.log('Skipping %s, URL check failed. Bad result from provider.' % cur_result.name,logger.INFO)
continue
if cur_result.quality in bestQualities and (not bestResult or bestResult.quality < cur_result.quality or bestResult not in bestQualities):
bestResult = cur_result
elif cur_result.quality in anyQualities and (not bestResult or bestResult not in bestQualities) and (not bestResult or bestResult.quality < cur_result.quality):
......
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment