diff --git a/gui/slick/images/providers/cpasbien.png b/gui/slick/images/providers/cpasbien.png
new file mode 100644
index 0000000000000000000000000000000000000000..6b8b1d29194ad1158570cb97a23dc5786953030c
Binary files /dev/null and b/gui/slick/images/providers/cpasbien.png differ
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
index b78c5240194eed5835f148304964765b0d38cb52..2a8b868f394bcb340607f8724c8a9cda6b245070 100644
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -37,7 +37,7 @@ from github import Github
 from sickbeard import providers, metadata, config, webserveInit
 from sickbeard.providers.generic import GenericProvider
 from providers import btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \
-    omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, nextgen, speedcd, nyaatorrents, animenzb, bluetigers, fnt, torrentbytes, animezb, \
+    omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, nextgen, speedcd, nyaatorrents, animenzb, bluetigers, cpasbien, fnt, torrentbytes, animezb, \
     frenchtorrentdb, freshontv, libertalia, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, scenetime, btdigg
 from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
     naming_ep_type
diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py
index 6f98f01127323724b5843eb750d67f27f791a955..162c53cb64253fc4e93aecad2e3e4f21ef080d06 100644
--- a/sickbeard/providers/__init__.py
+++ b/sickbeard/providers/__init__.py
@@ -47,6 +47,7 @@ __all__ = ['womble',
            'tntvillage',
            'binsearch',
            'bluetigers',
+           'cpasbien',
            'fnt',
            'scenetime',
            'btdigg',
diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py
new file mode 100644
index 0000000000000000000000000000000000000000..272dfd440b27d2bf833102726e1e50054d8df5fc
--- /dev/null
+++ b/sickbeard/providers/cpasbien.py
@@ -0,0 +1,217 @@
+# -*- coding: latin-1 -*-
+# Author: Guillaume Serre <guillaume.serre@gmail.com>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+import traceback
+import re
+import datetime
+import sickbeard
+import generic
+
+from lib import requests
+from lib.requests import exceptions
+
+from sickbeard.common import USER_AGENT, Quality, cpu_presets
+from sickbeard import logger
+from sickbeard import tvcache
+from sickbeard import show_name_helpers
+from sickbeard.bs4_parser import BS4Parser
+from sickbeard import db
+from sickbeard import helpers
+from sickbeard import classes
+from sickbeard.helpers import sanitizeSceneName, arithmeticEval
+from sickbeard.exceptions import ex
+
+import cookielib
+
+
+class CpasbienProvider(generic.TorrentProvider):
+
+    def __init__(self):
+        
+        generic.TorrentProvider.__init__(self, "Cpasbien")
+
+        self.supportsBacklog = True
+        self.ratio = None
+        
+        self.url = "http://www.cpasbien.pw"
+        
+        
+    def isEnabled(self):
+        
+        return self.enabled
+    
+    def imageName(self):
+        return 'cpasbien.png'
+    
+    def getQuality(self, item, anime=False):
+        quality = Quality.sceneQuality(item[0], anime)
+        return quality
+
+    def _get_season_search_strings(self, ep_obj):
+
+        search_string = {'Season': []}
+        for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+            if ep_obj.show.air_by_date or ep_obj.show.sports:
+                ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
+            elif ep_obj.show.anime:
+                ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
+            else:
+                ep_string = show_name + '.S%02d' % int(ep_obj.scene_season)  # 1) showName.SXX
+
+            search_string['Season'].append(ep_string)
+
+        return [search_string]
+
+    def _get_episode_search_strings(self, ep_obj, add_string=''):
+
+        search_string = {'Episode': []}
+
+        if not ep_obj:
+            return []
+
+        if self.show.air_by_date:
+            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+                ep_string = sanitizeSceneName(show_name) + '.' + \
+                            str(ep_obj.airdate).replace('-', '|')
+                search_string['Episode'].append(ep_string)
+        elif self.show.sports:
+            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+                ep_string = sanitizeSceneName(show_name) + '.' + \
+                            str(ep_obj.airdate).replace('-', '|') + '|' + \
+                            ep_obj.airdate.strftime('%b')
+                search_string['Episode'].append(ep_string)
+        elif self.show.anime:
+            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+                ep_string = sanitizeSceneName(show_name) + '.' + \
+                            "%i" % int(ep_obj.scene_absolute_number)
+                search_string['Episode'].append(ep_string)
+        else:
+            for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+                ep_string = show_name_helpers.sanitizeSceneName(show_name) + '.' + \
+                            sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
+                                                                  'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
+
+                search_string['Episode'].append(re.sub('\s+', '.', ep_string))
+
+        return [search_string]
+        
+    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
+        
+        results = []
+        items = {'Season': [], 'Episode': [], 'RSS': []}
+        
+        for mode in search_params.keys():
+
+            for search_string in search_params[mode]:
+        
+                searchURL = self.url + '/recherche/'+search_string.replace('.','-')+'.html'
+                data = self.getURL(searchURL)        
+                try:
+                    with BS4Parser(data, features=["html5lib", "permissive"]) as html:
+                        
+                        lin=0
+                        erlin=0
+                        resultdiv=[]
+                        while erlin==0:
+                            try:
+                                classlin='ligne'+str(lin)
+                                resultlin=html.findAll(attrs = {'class' : [classlin]})
+                                if resultlin:
+                                    for ele in resultlin:
+                                        resultdiv.append(ele)
+                                    lin+=1
+                                else:
+                                    erlin=1
+                            except:
+                                erlin=1
+                        
+                        for row in resultdiv:
+                            try:
+                                link = row.find("a", title=True)
+                                torrent_name = str(link.text).lower().strip()  
+                                pageURL = link['href']
+
+                                #downloadTorrentLink = torrentSoup.find("a", title.startswith('Cliquer'))
+                                tmp = pageURL.split('/')[-1].replace('.html','.torrent')
+
+                                downloadTorrentLink = ('http://www.cpasbien.pw/telechargement/%s' % tmp)
+
+                                if downloadTorrentLink:
+                
+                                    torrent_download_url = downloadTorrentLink
+                            except (AttributeError, TypeError):
+                                    continue
+                            
+                            if not torrent_name or not torrent_download_url:
+                                continue
+
+                            item = torrent_name, torrent_download_url
+                            logger.log(u"Found result: " + torrent_name + " (" + torrent_download_url + ")",logger.DEBUG)
+                            items[mode].append(item)
+
+                except Exception, e:
+                    logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),logger.ERROR)
+            results += items[mode]
+        return results
+    
+    def _get_title_and_url(self, item):
+
+        title, url = item
+
+        if title:
+            title = u'' + title
+            title = title.replace(' ', '.')
+
+        if url:
+            url = str(url).replace('&amp;', '&')
+
+        return title, url
+    
+    def findPropers(self, search_date=datetime.datetime.today()):
+
+        results = []
+
+        myDB = db.DBConnection()
+        sqlResults = myDB.select(
+            'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
+            ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
+            ' WHERE e.airdate >= ' + str(search_date.toordinal()) +
+            ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
+            ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
+        )
+
+        if not sqlResults:
+            return []
+
+        for sqlshow in sqlResults:
+            self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"]))
+            if self.show:
+                curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"]))
+                searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK')
+
+                for item in self._doSearch(searchString[0]):
+                    title, url = self._get_title_and_url(item)
+                    results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
+
+        return results
+    
+    def seedRatio(self):
+        return self.ratio
+
+provider = CpasbienProvider()
diff --git a/sickbeard/providers/libertalia.py b/sickbeard/providers/libertalia.py
index 2b2a35e7850535ecf5fa0c3953d85f49e2a8feee..daff4eed4e3a13831e710e2d35aefa4c39b01b52 100644
--- a/sickbeard/providers/libertalia.py
+++ b/sickbeard/providers/libertalia.py
@@ -1,6 +1,6 @@
 # -*- coding: latin-1 -*-
 # Authors: Raver2046 
-#          adaur 
+#          adaur
 # based on tpi.py
 # URL: http://code.google.com/p/sickbeard/
 #
@@ -26,13 +26,11 @@ import time
 from requests.auth import AuthBase
 import sickbeard
 import generic
-import urllib
 
-import urllib2
 import requests
 import json
 import cookielib
-import random
+import urllib
 
 from requests import exceptions
 from sickbeard.bs4_parser import BS4Parser
@@ -56,17 +54,12 @@ class LibertaliaProvider(generic.TorrentProvider):
         self.supportsBacklog = True
         
         self.cj = cookielib.CookieJar()
-        self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
         
         self.url = "https://libertalia.me"
         self.urlsearch = "https://libertalia.me/torrents.php?name=%s%s"
         
         self.categories = "&cat%5B%5D=9"
         
-        self.login_done = False
-        self.failed_login_logged = False
-        self.successful_login_logged = False
-        
         self.enabled = False
         self.username = None
         self.password = None
@@ -133,34 +126,35 @@ class LibertaliaProvider(generic.TorrentProvider):
         return quality
     
     def _doLogin(self):
-
-        listeUserAgents = [ 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_5; fr-fr) AppleWebKit/525.18 (KHTML, like Gecko) Version/3.1.2 Safari/525.20.1',
-                                                'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.835.186 Safari/535.1',
-                                                'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/525.13 (KHTML, like Gecko) Chrome/0.2.149.27 Safari/525.13',
-                                                'Mozilla/5.0 (X11; U; Linux x86_64; en-us) AppleWebKit/528.5+ (KHTML, like Gecko, Safari/528.5+) midori',
-                                                'Mozilla/5.0 (Windows NT 6.0) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/13.0.782.107 Safari/535.1',
-                                                'Mozilla/5.0 (Macintosh; U; PPC Mac OS X; en-us) AppleWebKit/312.1 (KHTML, like Gecko) Safari/312',
-                                                'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.12 Safari/535.11',
-                                                'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.8 (KHTML, like Gecko) Chrome/17.0.940.0 Safari/535.8' ]
-
-        self.opener.addheaders = [('User-agent', random.choice(listeUserAgents))] 
-                                       
-        data = urllib.urlencode({'username': self.username, 'password' :  self.password, 'submit' : 'login'})
-        
-     
-        r = self.opener.open(self.url + '/login.php',data)
+ 
+        if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
+            return True
+            
+        header = {'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.8 (KHTML, like Gecko) Chrome/17.0.940.0 Safari/535.8'}
         
-        for index, cookie in enumerate(self.cj):
-            if (cookie.name == "session"):
-                self.login_done = True
-                                
-        if not self.login_done and not self.failed_login_logged:
-            logger.log(u"Unable to login to Libertalia. Please check username and password.", logger.WARNING) 
+        login_params = {'username': self.username,
+                            'password': self.password
+        }
+
+        if not self.session:
+            self.session = requests.Session()
+
+        logger.log('Performing authentication to Libertalia', logger.DEBUG)
+        try:
+            response = self.session.post(self.url + '/login.php', data=login_params, timeout=30, headers=header)
+        except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
+            logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
+            return False
+
+        if re.search('upload.php', response.text):
+            logger.log(u'Login to ' + self.name + ' was successful.', logger.DEBUG)
+            return True                
+        else:
+            logger.log(u'Login to ' + self.name + ' was unsuccessful.', logger.DEBUG)                
             return False
-        
-        if self.login_done and not self.successful_login_logged:
-            logger.log(u"Login to Libertalia successful", logger.DEBUG) 
-            return True      
+
+        return True
+            
 
     def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
     
@@ -184,11 +178,14 @@ class LibertaliaProvider(generic.TorrentProvider):
          
                 logger.log(u"Search string: " + searchURL, logger.DEBUG)
                 
-                r = self.opener.open( searchURL )
-                with BS4Parser(r, features=["html5lib", "permissive"]) as html:
+                data = self.getURL(searchURL)
+                if not data:
+                    continue
+                
+                with BS4Parser(data, features=["html5lib", "permissive"]) as html:
                     resultsTable = html.find("table", { "class" : "torrent_table"  })
                     if resultsTable:
-                        logger.log(u"Libertalia found resulttable ! " , logger.DEBUG)  
+                        logger.log(u"Libertalia found result table ! " , logger.DEBUG)  
                         rows = resultsTable.findAll("tr" ,  {"class" : "torrent_row  new  "}  )  # torrent_row new
                         
                         for row in rows:
@@ -202,13 +199,11 @@ class LibertaliaProvider(generic.TorrentProvider):
                             if link:               
                                 title = link.text
                                 recherched=searchURL.replace(".","(.*)").replace(" ","(.*)").replace("'","(.*)")
-                                logger.log(u"Libertalia TITLE : " + title, logger.DEBUG)  
-                                logger.log(u"Libertalia CHECK MATCH : " + recherched, logger.DEBUG)                                        
-                                #downloadURL =  self.url + "/" + row.find("a",href=re.compile("torrent_pass"))['href']
-                                if re.match(recherched,title , re.IGNORECASE):              
-                                    downloadURL =  row.find("a",href=re.compile("torrent_pass"))['href']                
-                                    item = title, downloadURL
-                                    items[mode].append(item)
+                                logger.log(u"Libertalia title : " + title, logger.DEBUG)                                 
+                                downloadURL =  row.find("a",href=re.compile("torrent_pass"))['href']   
+                                logger.log(u"Libertalia download URL : " + downloadURL, logger.DEBUG)                                   
+                                item = title, downloadURL
+                                items[mode].append(item)
             results += items[mode]
          
         return results