diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py
index 9aebd87370851d0aadb911b325f96377b48e8676..fe036380a44e8679aeab3d322844957e7219513e 100644
--- a/sickbeard/providers/alpharatio.py
+++ b/sickbeard/providers/alpharatio.py
@@ -19,16 +19,16 @@
 # along with SickRage. If not, see <http://www.gnu.org/licenses/>.
 
 import re
-import traceback
+from urllib import urlencode
 
 from sickbeard import logger
 from sickbeard import tvcache
 from sickbeard.bs4_parser import BS4Parser
-from sickrage.helper.common import convert_size
+from sickrage.helper.common import try_int, convert_size
 from sickrage.providers.torrent.TorrentProvider import TorrentProvider
 
 
-class AlphaRatioProvider(TorrentProvider):
+class AlphaRatioProvider(TorrentProvider):  # pylint: disable=too-many-instance-attributes
 
     def __init__(self):
 
@@ -40,25 +40,23 @@ class AlphaRatioProvider(TorrentProvider):
         self.minseed = None
         self.minleech = None
 
-        self.urls = {'base_url': 'http://alpharatio.cc/',
-                     'login': 'http://alpharatio.cc/login.php',
-                     'detail': 'http://alpharatio.cc/torrents.php?torrentid=%s',
-                     'search': 'http://alpharatio.cc/torrents.php?searchstr=%s%s',
-                     'download': 'http://alpharatio.cc/%s'}
-
-        self.url = self.urls['base_url']
-
-        self.categories = "&filter_cat[1]=1&filter_cat[2]=1&filter_cat[3]=1&filter_cat[4]=1&filter_cat[5]=1"
+        self.url = 'http://alpharatio.cc/'
+        self.urls = {
+            'login': self.url + 'login.php',
+            'search': self.url +'torrents.php',
+        }
 
         self.proper_strings = ['PROPER', 'REPACK']
 
         self.cache = AlphaRatioCache(self)
 
     def login(self):
-        login_params = {'username': self.username,
-                        'password': self.password,
-                        'remember_me': 'on',
-                        'login': 'submit'}
+        login_params = {
+            'username': self.username,
+            'password': self.password,
+            'remember_me': 'on',
+            'login': 'submit'
+        }
 
         response = self.get_url(self.urls['login'], post_data=login_params, timeout=30)
         if not response:
@@ -72,7 +70,7 @@ class AlphaRatioProvider(TorrentProvider):
 
         return True
 
-    def search(self, search_strings, age=0, ep_obj=None):
+    def search(self, search_strings, age=0, ep_obj=None):  # pylint: disable=too-many-locals, too-many-branches
 
         results = []
         items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -80,6 +78,15 @@ class AlphaRatioProvider(TorrentProvider):
         if not self.login():
             return results
 
+        search_params = {
+            'searchstr': '',
+            'filter_cat[1]':1,
+            'filter_cat[2]':1,
+            'filter_cat[3]':1,
+            'filter_cat[4]':1,
+            'filter_cat[5]':1
+        }
+
         for mode in search_strings.keys():
             logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
             for search_string in search_strings[mode]:
@@ -87,56 +94,63 @@ class AlphaRatioProvider(TorrentProvider):
                 if mode != 'RSS':
                     logger.log(u"Search string: %s " % search_string, logger.DEBUG)
 
-                searchURL = self.urls['search'] % (search_string, self.categories)
-                logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
+                search_params['searchstr'] = search_string
+                search_url = self.urls['search'] + '?' + urlencode(search_params)
+                logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
 
-                data = self.get_url(searchURL)
+                data = self.get_url(search_url)
                 if not data:
                     continue
 
-                try:
-                    with BS4Parser(data, 'html5lib') as html:
-                        torrent_table = html.find('table', attrs={'id': 'torrent_table'})
-                        torrent_rows = torrent_table.find_all('tr') if torrent_table else []
+                with BS4Parser(data, 'html5lib') as html:
+                    torrent_table = html.find('table', id='torrent_table')
+                    torrent_rows = torrent_table.find_all('tr') if torrent_table else []
+
+                    # Continue only if one Release is found
+                    if len(torrent_rows) < 2:
+                        logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
+                        continue
+
+                    def process_column_header(td):
+                        result = ''
+                        if td.a and td.a.img:
+                            result = td.a.img.get('title', td.a.get_text(strip=True))
+                        if not result:
+                            result = td.get_text(strip=True)
+                        return result
+
+                    # '', '', 'Name /Year', 'Files', 'Time', 'Size', 'Snatches', 'Seeders', 'Leechers'
+                    labels = [process_column_header(label) for label in torrent_rows[0].find_all('td')]
+
+                    # Skip column headers
+                    for result in torrent_rows[1:]:
+                        cells = result.find_all('td')
+                        if len(cells) < len(labels):
+                            continue
 
-                        # Continue only if one Release is found
-                        if len(torrent_rows) < 2:
-                            logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
+                        try:
+                            title = cells[labels.index('Name /Year')].find('a', dir='ltr').get_text(strip=True)
+                            download_url = self.url + cells[labels.index('Name /Year')].find('a', title='Download')['href']
+                            seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True))
+                            leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True))
+                            size = convert_size(cells[labels.index('Size')].get_text(strip=True)) or -1
+                        except StandardError:
                             continue
 
-                        for result in torrent_rows[1:]:
-                            cells = result.find_all('td')
-                            link = result.find('a', attrs={'dir': 'ltr'})
-                            url = result.find('a', attrs={'title': 'Download'})
-
-                            try:
-                                num_cells = len(cells)
-                                title = link.contents[0] if link.contents[0] else None
-                                download_url = self.urls['download'] % (url['href']) if url['href'] else None
-                                seeders = cells[num_cells - 2].contents[0] if cells[len(cells) - 2].contents[0] else 1
-                                leechers = cells[num_cells - 1].contents[0] if cells[len(cells) - 1].contents[0] else 0
-                                torrent_size = cells[len(cells) - 4].contents[0]
-                                size = convert_size(torrent_size) or -1
-                            except (AttributeError, TypeError, KeyError, ValueError):
-                                continue
-
-                            if not all([title, download_url]):
-                                continue
-
-                            # Filter unseeded torrent
-                            if seeders < self.minseed or leechers < self.minleech:
-                                if mode != 'RSS':
-                                    logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
-                                continue
-
-                            item = title, download_url, size, seeders, leechers
+                        if not all([title, download_url]):
+                            continue
+
+                        # Filter unseeded torrent
+                        if seeders < self.minseed or leechers < self.minleech:
                             if mode != 'RSS':
-                                logger.log(u"Found result: %s " % title, logger.DEBUG)
+                                logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
+                            continue
 
-                            items[mode].append(item)
+                        item = title, download_url, size, seeders, leechers
+                        if mode != 'RSS':
+                            logger.log(u"Found result: %s " % title, logger.DEBUG)
 
-                except Exception:
-                    logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.WARNING)
+                        items[mode].append(item)
 
             # For each search mode sort all the items by seeders if available
             items[mode].sort(key=lambda tup: tup[3], reverse=True)