Private GIT

Skip to content
Snippets Groups Projects
Commit 838c03b5 authored by Dustyn Gibson's avatar Dustyn Gibson
Browse files

Merge branch 'hotfix-2885'

parents 044081be 0736f583
Branches
Tags
No related merge requests found
......@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
# pylint: disable=W0703
import urllib
import time
......@@ -23,14 +24,14 @@ import os
import re
import sickbeard
import generic
from sickbeard.common import Quality
from sickbeard import classes
from sickbeard import helpers
from sickbeard import scene_exceptions
from sickbeard import logger
from sickbeard import tvcache
from sickbeard import db
from sickbeard.common import Quality
from sickbeard.providers import generic
from sickrage.helper.encoding import ek
from sickrage.helper.exceptions import AuthException
......@@ -60,6 +61,8 @@ class NewznabProvider(generic.NZBProvider):
else:
self.needs_auth = True
self.public = not self.needs_auth
if catIDs:
self.catIDs = catIDs
else:
......@@ -106,7 +109,7 @@ class NewznabProvider(generic.NZBProvider):
try:
data = self.cache.getRSSFeed("%s/api?%s" % (self.url, urllib.urlencode(params)))
except:
except Exception:
logger.log(u"Error getting html for [%s]" %
("%s/api?%s" % (self.url, '&'.join("%s=%s" % (x, y) for x, y in params.iteritems()))), logger.DEBUG)
return (False, return_categories, "Error getting html for [%s]" %
......@@ -122,7 +125,7 @@ class NewznabProvider(generic.NZBProvider):
return_categories.append(category)
for subcat in category.subcats:
return_categories.append(subcat)
except:
except Exception:
logger.log(u"Error parsing result for [%s]" % (self.name),
logger.DEBUG)
return (False, return_categories, "Error parsing result for [%s]" % (self.name))
......@@ -210,10 +213,8 @@ class NewznabProvider(generic.NZBProvider):
def _checkAuthFromData(self, data):
try:
data['feed']
data['entries']
except:return self._checkAuth()
if 'feed' not in data or 'entries' not in data:
return self._checkAuth()
try:
bozo = int(data['bozo'])
......@@ -222,7 +223,7 @@ class NewznabProvider(generic.NZBProvider):
err_desc = data['feed']['error']['description']
if not err_code or err_desc:
raise
except:
except Exception:
return True
if err_code == 100:
......@@ -270,10 +271,10 @@ class NewznabProvider(generic.NZBProvider):
if 'lolo.sickbeard.com' in self.url and params['maxage'] < 33:
params['maxage'] = 33
while (total >= offset):
while total >= offset:
search_url = self.url + 'api?' + urllib.urlencode(params)
while((datetime.datetime.now() - self.last_search).seconds < 5):
while(datetime.datetime.now() - self.last_search).seconds < 5:
time.sleep(1)
logger.log(u"Search url: " + search_url, logger.DEBUG)
......@@ -347,7 +348,7 @@ class NewznabProvider(generic.NZBProvider):
for searchString in searchStrings:
for item in self._doSearch(searchString):
title, url = self._get_title_and_url(item)
if(re.match(r'.*(REPACK|PROPER).*', title, re.I)):
if re.match(r'.*(REPACK|PROPER).*', title, re.I):
results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
return results
......@@ -378,7 +379,7 @@ class NewznabCache(tvcache.TVCache):
rss_url = self.provider.url + 'api?' + urllib.urlencode(params)
while((datetime.datetime.now() - self.last_search).seconds < 5):
while (datetime.datetime.now() - self.last_search).seconds < 5:
time.sleep(1)
logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG)
......@@ -389,6 +390,7 @@ class NewznabCache(tvcache.TVCache):
return data
def _checkAuth(self, data):
# pylint: disable=W0212
return self.provider._checkAuthFromData(data)
def _parseItem(self, item):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment