From fbd17361775d44ca0180e70b94a9436005f10041 Mon Sep 17 00:00:00 2001
From: Sarakha63 <sarakha_ludovic@yahoo.fr>
Date: Fri, 24 May 2013 02:42:19 +0200
Subject: [PATCH] correction

---
 .project                                      |   34 +-
 .pydevproject                                 |   30 +-
 cherrypy/__init__.py                          | 1146 ++---
 cherrypy/_cpdispatch.py                       | 1136 ++---
 cherrypy/_cplogging.py                        |  500 +--
 cherrypy/cherryd                              |  204 +-
 cherrypy/lib/covercp.py                       |  728 ++--
 cherrypy/lib/httpauth.py                      |  722 ++--
 cherrypy/process/win32.py                     |  348 +-
 data/interfaces/default/config_general.tmpl   |  434 +-
 .../default/config_postProcessing.tmpl        | 1154 ++---
 data/interfaces/default/home_newShow.tmpl     |  170 +-
 data/interfaces/default/inc_top.tmpl          |  500 +--
 .../default/manage_manageSearches.tmpl        |   80 +-
 data/js/ajaxNotifications.js                  |   52 +-
 data/js/configNotifications.js                |  410 +-
 data/js/configProviders.js                    |  420 +-
 lib/tvdb_api/__init__.py                      |    2 +-
 .../requests/packages/charade/cp949prober.py  |   88 +-
 .../packages/charade/langbulgarianmodel.py    |  458 +-
 .../requests/packages/charade/sjisprober.py   |  182 +-
 .../packages/charade/universaldetector.py     |  344 +-
 sickbeard/databases/cache_db.py               |  100 +-
 sickbeard/encodingKludge.py                   |  138 +-
 sickbeard/generic_queue.py                    |  268 +-
 sickbeard/gh_api.py                           |  118 +-
 sickbeard/image_cache.py                      |  450 +-
 sickbeard/logger.py                           |  368 +-
 sickbeard/naming.py                           |  356 +-
 sickbeard/notifiers/nma.py                    |  110 +-
 sickbeard/notifiers/nmj.py                    |  366 +-
 sickbeard/providers/__init__.py               |  258 +-
 sickbeard/providers/binnewz/nzbdownloader.py  |  190 +-
 sickbeard/providers/cpasbien.py               |  306 +-
 sickbeard/providers/newzbin.py                |  768 ++--
 sickbeard/providers/nzbmatrix.py              |  362 +-
 sickbeard/providers/nzbsrus.py                |  244 +-
 sickbeard/providers/piratebay/__init__.py     |  806 ++--
 sickbeard/scene_exceptions.py                 |  234 +-
 sickbeard/tv.py                               | 3824 ++++++++---------
 sickbeard/versionChecker.py                   | 1045 +++--
 41 files changed, 9720 insertions(+), 9733 deletions(-)

diff --git a/.project b/.project
index b3bd37784..c0407428d 100644
--- a/.project
+++ b/.project
@@ -1,17 +1,17 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>Sick-Beard</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.python.pydev.PyDevBuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.python.pydev.pythonNature</nature>
-	</natures>
-</projectDescription>
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+	<name>SickBeard</name>
+	<comment></comment>
+	<projects>
+	</projects>
+	<buildSpec>
+		<buildCommand>
+			<name>org.python.pydev.PyDevBuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+	</buildSpec>
+	<natures>
+		<nature>org.python.pydev.pythonNature</nature>
+	</natures>
+</projectDescription>
diff --git a/.pydevproject b/.pydevproject
index b41c812dd..cb27afca5 100644
--- a/.pydevproject
+++ b/.pydevproject
@@ -1,15 +1,15 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<?eclipse-pydev version="1.0"?><pydev_project>
-<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
-<path>/Sick-Beard</path>
-<path>/Sick-Beard/lib</path>
-<path>/Sick-Beard/sickbeard</path>
-</pydev_pathproperty>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
-<pydev_pathproperty name="org.python.pydev.PROJECT_EXTERNAL_SOURCE_PATH">
-<path>C:\Python27\imports\Cheetah\bin</path>
-<path>C:\Python27\imports\Cheetah\lib\python\Cheetah</path>
-<path>C:\Python27\imports\Cheetah-2.4.4\build\lib.win32-2.7</path>
-</pydev_pathproperty>
-</pydev_project>
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?eclipse-pydev version="1.0"?><pydev_project>
+<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
+<path>/SickBeard</path>
+<path>/SickBeard/lib</path>
+<path>/SickBeard/sickbeard</path>
+</pydev_pathproperty>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
+<pydev_pathproperty name="org.python.pydev.PROJECT_EXTERNAL_SOURCE_PATH">
+<path>C:\Python27\imports\Cheetah\bin</path>
+<path>C:\Python27\imports\Cheetah\lib\python\Cheetah</path>
+<path>C:\Python27\imports\Cheetah-2.4.4\build\lib.win32-2.7</path>
+</pydev_pathproperty>
+</pydev_project>
diff --git a/cherrypy/__init__.py b/cherrypy/__init__.py
index 82e272993..c9fc1f1d8 100644
--- a/cherrypy/__init__.py
+++ b/cherrypy/__init__.py
@@ -1,573 +1,573 @@
-"""CherryPy is a pythonic, object-oriented HTTP framework.
-
-
-CherryPy consists of not one, but four separate API layers.
-
-The APPLICATION LAYER is the simplest. CherryPy applications are written as
-a tree of classes and methods, where each branch in the tree corresponds to
-a branch in the URL path. Each method is a 'page handler', which receives
-GET and POST params as keyword arguments, and returns or yields the (HTML)
-body of the response. The special method name 'index' is used for paths
-that end in a slash, and the special method name 'default' is used to
-handle multiple paths via a single handler. This layer also includes:
-
- * the 'exposed' attribute (and cherrypy.expose)
- * cherrypy.quickstart()
- * _cp_config attributes
- * cherrypy.tools (including cherrypy.session)
- * cherrypy.url()
-
-The ENVIRONMENT LAYER is used by developers at all levels. It provides
-information about the current request and response, plus the application
-and server environment, via a (default) set of top-level objects:
-
- * cherrypy.request
- * cherrypy.response
- * cherrypy.engine
- * cherrypy.server
- * cherrypy.tree
- * cherrypy.config
- * cherrypy.thread_data
- * cherrypy.log
- * cherrypy.HTTPError, NotFound, and HTTPRedirect
- * cherrypy.lib
-
-The EXTENSION LAYER allows advanced users to construct and share their own
-plugins. It consists of:
-
- * Hook API
- * Tool API
- * Toolbox API
- * Dispatch API
- * Config Namespace API
-
-Finally, there is the CORE LAYER, which uses the core API's to construct
-the default components which are available at higher layers. You can think
-of the default components as the 'reference implementation' for CherryPy.
-Megaframeworks (and advanced users) may replace the default components
-with customized or extended components. The core API's are:
-
- * Application API
- * Engine API
- * Request API
- * Server API
- * WSGI API
-
-These API's are described in the CherryPy specification:
-http://www.cherrypy.org/wiki/CherryPySpec
-"""
-
-__version__ = "3.2.0rc1"
-
-from urlparse import urljoin as _urljoin
-from urllib import urlencode as _urlencode
-
-
-class _AttributeDocstrings(type):
-    """Metaclass for declaring docstrings for class attributes."""
-    # The full docstring for this type is down in the __init__ method so
-    # that it doesn't show up in help() for every consumer class.
-    
-    def __init__(cls, name, bases, dct):
-        '''Metaclass for declaring docstrings for class attributes.
-        
-        Base Python doesn't provide any syntax for setting docstrings on
-        'data attributes' (non-callables). This metaclass allows class
-        definitions to follow the declaration of a data attribute with
-        a docstring for that attribute; the attribute docstring will be
-        popped from the class dict and folded into the class docstring.
-        
-        The naming convention for attribute docstrings is:
-            <attrname> + "__doc".
-        For example:
-        
-            class Thing(object):
-                """A thing and its properties."""
-                
-                __metaclass__ = cherrypy._AttributeDocstrings
-                
-                height = 50
-                height__doc = """The height of the Thing in inches."""
-        
-        In which case, help(Thing) starts like this:
-        
-            >>> help(mod.Thing)
-            Help on class Thing in module pkg.mod:
-            
-            class Thing(__builtin__.object)
-             |  A thing and its properties.
-             |  
-             |  height [= 50]:
-             |      The height of the Thing in inches.
-             | 
-        
-        The benefits of this approach over hand-edited class docstrings:
-            1. Places the docstring nearer to the attribute declaration.
-            2. Makes attribute docs more uniform ("name (default): doc").
-            3. Reduces mismatches of attribute _names_ between
-               the declaration and the documentation.
-            4. Reduces mismatches of attribute default _values_ between
-               the declaration and the documentation.
-        
-        The benefits of a metaclass approach over other approaches:
-            1. Simpler ("less magic") than interface-based solutions.
-            2. __metaclass__ can be specified at the module global level
-               for classic classes.
-        
-        For various formatting reasons, you should write multiline docs
-        with a leading newline and not a trailing one:
-            
-            response__doc = """
-            The response object for the current thread. In the main thread,
-            and any threads which are not HTTP requests, this is None."""
-        
-        The type of the attribute is intentionally not included, because
-        that's not How Python Works. Quack.
-        '''
-        
-        newdoc = [cls.__doc__ or ""]
-        
-        dctkeys = dct.keys()
-        dctkeys.sort()
-        for name in dctkeys:
-            if name.endswith("__doc"):
-                # Remove the magic doc attribute.
-                if hasattr(cls, name):
-                    delattr(cls, name)
-                
-                # Make a uniformly-indented docstring from it.
-                val = '\n'.join(['    ' + line.strip()
-                                 for line in dct[name].split('\n')])
-                
-                # Get the default value.
-                attrname = name[:-5]
-                try:
-                    attrval = getattr(cls, attrname)
-                except AttributeError:
-                    attrval = "missing"
-                
-                # Add the complete attribute docstring to our list.
-                newdoc.append("%s [= %r]:\n%s" % (attrname, attrval, val))
-        
-        # Add our list of new docstrings to the class docstring.
-        cls.__doc__ = "\n\n".join(newdoc)
-
-
-from cherrypy._cperror import HTTPError, HTTPRedirect, InternalRedirect
-from cherrypy._cperror import NotFound, CherryPyException, TimeoutError
-
-from cherrypy import _cpdispatch as dispatch
-
-from cherrypy import _cptools
-tools = _cptools.default_toolbox
-Tool = _cptools.Tool
-
-from cherrypy import _cprequest
-from cherrypy.lib import httputil as _httputil
-
-from cherrypy import _cptree
-tree = _cptree.Tree()
-from cherrypy._cptree import Application
-from cherrypy import _cpwsgi as wsgi
-
-from cherrypy import process
-try:
-    from cherrypy.process import win32
-    engine = win32.Win32Bus()
-    engine.console_control_handler = win32.ConsoleCtrlHandler(engine)
-    del win32
-except ImportError:
-    engine = process.bus
-
-
-# Timeout monitor
-class _TimeoutMonitor(process.plugins.Monitor):
-    
-    def __init__(self, bus):
-        self.servings = []
-        process.plugins.Monitor.__init__(self, bus, self.run)
-    
-    def acquire(self):
-        self.servings.append((serving.request, serving.response))
-    
-    def release(self):
-        try:
-            self.servings.remove((serving.request, serving.response))
-        except ValueError:
-            pass
-    
-    def run(self):
-        """Check timeout on all responses. (Internal)"""
-        for req, resp in self.servings:
-            resp.check_timeout()
-engine.timeout_monitor = _TimeoutMonitor(engine)
-engine.timeout_monitor.subscribe()
-
-engine.autoreload = process.plugins.Autoreloader(engine)
-engine.autoreload.subscribe()
-
-engine.thread_manager = process.plugins.ThreadManager(engine)
-engine.thread_manager.subscribe()
-
-engine.signal_handler = process.plugins.SignalHandler(engine)
-
-
-from cherrypy import _cpserver
-server = _cpserver.Server()
-server.subscribe()
-
-
-def quickstart(root=None, script_name="", config=None):
-    """Mount the given root, start the builtin server (and engine), then block.
-    
-    root: an instance of a "controller class" (a collection of page handler
-        methods) which represents the root of the application.
-    script_name: a string containing the "mount point" of the application.
-        This should start with a slash, and be the path portion of the URL
-        at which to mount the given root. For example, if root.index() will
-        handle requests to "http://www.example.com:8080/dept/app1/", then
-        the script_name argument would be "/dept/app1".
-        
-        It MUST NOT end in a slash. If the script_name refers to the root
-        of the URI, it MUST be an empty string (not "/").
-    config: a file or dict containing application config. If this contains
-        a [global] section, those entries will be used in the global
-        (site-wide) config.
-    """
-    if config:
-        _global_conf_alias.update(config)
-    
-    tree.mount(root, script_name, config)
-    
-    if hasattr(engine, "signal_handler"):
-        engine.signal_handler.subscribe()
-    if hasattr(engine, "console_control_handler"):
-        engine.console_control_handler.subscribe()
-    
-    engine.start()
-    engine.block()
-
-
-try:
-    from threading import local as _local
-except ImportError:
-    from cherrypy._cpthreadinglocal import local as _local
-
-class _Serving(_local):
-    """An interface for registering request and response objects.
-    
-    Rather than have a separate "thread local" object for the request and
-    the response, this class works as a single threadlocal container for
-    both objects (and any others which developers wish to define). In this
-    way, we can easily dump those objects when we stop/start a new HTTP
-    conversation, yet still refer to them as module-level globals in a
-    thread-safe way.
-    """
-    
-    __metaclass__ = _AttributeDocstrings
-    
-    request = _cprequest.Request(_httputil.Host("127.0.0.1", 80),
-                                 _httputil.Host("127.0.0.1", 1111))
-    request__doc = """
-    The request object for the current thread. In the main thread,
-    and any threads which are not receiving HTTP requests, this is None."""
-    
-    response = _cprequest.Response()
-    response__doc = """
-    The response object for the current thread. In the main thread,
-    and any threads which are not receiving HTTP requests, this is None."""
-    
-    def load(self, request, response):
-        self.request = request
-        self.response = response
-    
-    def clear(self):
-        """Remove all attributes of self."""
-        self.__dict__.clear()
-
-serving = _Serving()
-
-
-class _ThreadLocalProxy(object):
-    
-    __slots__ = ['__attrname__', '__dict__']
-    
-    def __init__(self, attrname):
-        self.__attrname__ = attrname
-    
-    def __getattr__(self, name):
-        child = getattr(serving, self.__attrname__)
-        return getattr(child, name)
-    
-    def __setattr__(self, name, value):
-        if name in ("__attrname__",):
-            object.__setattr__(self, name, value)
-        else:
-            child = getattr(serving, self.__attrname__)
-            setattr(child, name, value)
-    
-    def __delattr__(self, name):
-        child = getattr(serving, self.__attrname__)
-        delattr(child, name)
-    
-    def _get_dict(self):
-        child = getattr(serving, self.__attrname__)
-        d = child.__class__.__dict__.copy()
-        d.update(child.__dict__)
-        return d
-    __dict__ = property(_get_dict)
-    
-    def __getitem__(self, key):
-        child = getattr(serving, self.__attrname__)
-        return child[key]
-    
-    def __setitem__(self, key, value):
-        child = getattr(serving, self.__attrname__)
-        child[key] = value
-    
-    def __delitem__(self, key):
-        child = getattr(serving, self.__attrname__)
-        del child[key]
-    
-    def __contains__(self, key):
-        child = getattr(serving, self.__attrname__)
-        return key in child
-    
-    def __len__(self):
-        child = getattr(serving, self.__attrname__)
-        return len(child)
-    
-    def __nonzero__(self):
-        child = getattr(serving, self.__attrname__)
-        return bool(child)
-
-
-# Create request and response object (the same objects will be used
-#   throughout the entire life of the webserver, but will redirect
-#   to the "serving" object)
-request = _ThreadLocalProxy('request')
-response = _ThreadLocalProxy('response')
-
-# Create thread_data object as a thread-specific all-purpose storage
-class _ThreadData(_local):
-    """A container for thread-specific data."""
-thread_data = _ThreadData()
-
-
-# Monkeypatch pydoc to allow help() to go through the threadlocal proxy.
-# Jan 2007: no Googleable examples of anyone else replacing pydoc.resolve.
-# The only other way would be to change what is returned from type(request)
-# and that's not possible in pure Python (you'd have to fake ob_type).
-def _cherrypy_pydoc_resolve(thing, forceload=0):
-    """Given an object or a path to an object, get the object and its name."""
-    if isinstance(thing, _ThreadLocalProxy):
-        thing = getattr(serving, thing.__attrname__)
-    return _pydoc._builtin_resolve(thing, forceload)
-
-try:
-    import pydoc as _pydoc
-    _pydoc._builtin_resolve = _pydoc.resolve
-    _pydoc.resolve = _cherrypy_pydoc_resolve
-except ImportError:
-    pass
-
-
-from cherrypy import _cplogging
-
-class _GlobalLogManager(_cplogging.LogManager):
-    
-    def __call__(self, *args, **kwargs):
-        # Do NOT use try/except here. See http://www.cherrypy.org/ticket/945
-        if hasattr(request, 'app') and hasattr(request.app, 'log'):
-            log = request.app.log
-        else:
-            log = self
-        return log.error(*args, **kwargs)
-    
-    def access(self):
-        try:
-            return request.app.log.access()
-        except AttributeError:
-            return _cplogging.LogManager.access(self)
-
-
-log = _GlobalLogManager()
-# Set a default screen handler on the global log.
-log.screen = True
-log.error_file = ''
-# Using an access file makes CP about 10% slower. Leave off by default.
-log.access_file = ''
-
-def _buslog(msg, level):
-    log.error(msg, 'ENGINE', severity=level)
-engine.subscribe('log', _buslog)
-
-#                       Helper functions for CP apps                       #
-
-
-def expose(func=None, alias=None):
-    """Expose the function, optionally providing an alias or set of aliases."""
-    def expose_(func):
-        func.exposed = True
-        if alias is not None:
-            if isinstance(alias, basestring):
-                parents[alias.replace(".", "_")] = func
-            else:
-                for a in alias:
-                    parents[a.replace(".", "_")] = func
-        return func
-    
-    import sys, types
-    if isinstance(func, (types.FunctionType, types.MethodType)):
-        if alias is None:
-            # @expose
-            func.exposed = True
-            return func
-        else:
-            # func = expose(func, alias)
-            parents = sys._getframe(1).f_locals
-            return expose_(func)
-    elif func is None:
-        if alias is None:
-            # @expose()
-            parents = sys._getframe(1).f_locals
-            return expose_
-        else:
-            # @expose(alias="alias") or
-            # @expose(alias=["alias1", "alias2"])
-            parents = sys._getframe(1).f_locals
-            return expose_
-    else:
-        # @expose("alias") or
-        # @expose(["alias1", "alias2"])
-        parents = sys._getframe(1).f_locals
-        alias = func
-        return expose_
-
-
-def url(path="", qs="", script_name=None, base=None, relative=None):
-    """Create an absolute URL for the given path.
-    
-    If 'path' starts with a slash ('/'), this will return
-        (base + script_name + path + qs).
-    If it does not start with a slash, this returns
-        (base + script_name [+ request.path_info] + path + qs).
-    
-    If script_name is None, cherrypy.request will be used
-    to find a script_name, if available.
-    
-    If base is None, cherrypy.request.base will be used (if available).
-    Note that you can use cherrypy.tools.proxy to change this.
-    
-    Finally, note that this function can be used to obtain an absolute URL
-    for the current request path (minus the querystring) by passing no args.
-    If you call url(qs=cherrypy.request.query_string), you should get the
-    original browser URL (assuming no internal redirections).
-    
-    If relative is None or not provided, request.app.relative_urls will
-    be used (if available, else False). If False, the output will be an
-    absolute URL (including the scheme, host, vhost, and script_name).
-    If True, the output will instead be a URL that is relative to the
-    current request path, perhaps including '..' atoms. If relative is
-    the string 'server', the output will instead be a URL that is
-    relative to the server root; i.e., it will start with a slash.
-    """
-    if isinstance(qs, (tuple, list, dict)):
-        qs = _urlencode(qs)
-    if qs:
-        qs = '?' + qs
-    
-    if request.app:
-        if not path.startswith("/"):
-            # Append/remove trailing slash from path_info as needed
-            # (this is to support mistyped URL's without redirecting;
-            # if you want to redirect, use tools.trailing_slash).
-            pi = request.path_info
-            if request.is_index is True:
-                if not pi.endswith('/'):
-                    pi = pi + '/'
-            elif request.is_index is False:
-                if pi.endswith('/') and pi != '/':
-                    pi = pi[:-1]
-            
-            if path == "":
-                path = pi
-            else:
-                path = _urljoin(pi, path)
-        
-        if script_name is None:
-            script_name = request.script_name
-        if base is None:
-            base = request.base
-        
-        newurl = base + script_name + path + qs
-    else:
-        # No request.app (we're being called outside a request).
-        # We'll have to guess the base from server.* attributes.
-        # This will produce very different results from the above
-        # if you're using vhosts or tools.proxy.
-        if base is None:
-            base = server.base()
-        
-        path = (script_name or "") + path
-        newurl = base + path + qs
-    
-    if './' in newurl:
-        # Normalize the URL by removing ./ and ../
-        atoms = []
-        for atom in newurl.split('/'):
-            if atom == '.':
-                pass
-            elif atom == '..':
-                atoms.pop()
-            else:
-                atoms.append(atom)
-        newurl = '/'.join(atoms)
-    
-    # At this point, we should have a fully-qualified absolute URL.
-    
-    if relative is None:
-        relative = getattr(request.app, "relative_urls", False)
-    
-    # See http://www.ietf.org/rfc/rfc2396.txt
-    if relative == 'server':
-        # "A relative reference beginning with a single slash character is
-        # termed an absolute-path reference, as defined by <abs_path>..."
-        # This is also sometimes called "server-relative".
-        newurl = '/' + '/'.join(newurl.split('/', 3)[3:])
-    elif relative:
-        # "A relative reference that does not begin with a scheme name
-        # or a slash character is termed a relative-path reference."
-        old = url().split('/')[:-1]
-        new = newurl.split('/')
-        while old and new:
-            a, b = old[0], new[0]
-            if a != b:
-                break
-            old.pop(0)
-            new.pop(0)
-        new = (['..'] * len(old)) + new
-        newurl = '/'.join(new)
-    
-    return newurl
-
-
-# import _cpconfig last so it can reference other top-level objects
-from cherrypy import _cpconfig
-# Use _global_conf_alias so quickstart can use 'config' as an arg
-# without shadowing cherrypy.config.
-config = _global_conf_alias = _cpconfig.Config()
-config.defaults = {
-    'tools.log_tracebacks.on': True,
-    'tools.log_headers.on': True,
-    'tools.trailing_slash.on': True,
-    'tools.encode.on': True
-    }
-config.namespaces["log"] = lambda k, v: setattr(log, k, v)
-config.namespaces["checker"] = lambda k, v: setattr(checker, k, v)
-# Must reset to get our defaults applied.
-config.reset()
-
-from cherrypy import _cpchecker
-checker = _cpchecker.Checker()
-engine.subscribe('start', checker)
+"""CherryPy is a pythonic, object-oriented HTTP framework.
+
+
+CherryPy consists of not one, but four separate API layers.
+
+The APPLICATION LAYER is the simplest. CherryPy applications are written as
+a tree of classes and methods, where each branch in the tree corresponds to
+a branch in the URL path. Each method is a 'page handler', which receives
+GET and POST params as keyword arguments, and returns or yields the (HTML)
+body of the response. The special method name 'index' is used for paths
+that end in a slash, and the special method name 'default' is used to
+handle multiple paths via a single handler. This layer also includes:
+
+ * the 'exposed' attribute (and cherrypy.expose)
+ * cherrypy.quickstart()
+ * _cp_config attributes
+ * cherrypy.tools (including cherrypy.session)
+ * cherrypy.url()
+
+The ENVIRONMENT LAYER is used by developers at all levels. It provides
+information about the current request and response, plus the application
+and server environment, via a (default) set of top-level objects:
+
+ * cherrypy.request
+ * cherrypy.response
+ * cherrypy.engine
+ * cherrypy.server
+ * cherrypy.tree
+ * cherrypy.config
+ * cherrypy.thread_data
+ * cherrypy.log
+ * cherrypy.HTTPError, NotFound, and HTTPRedirect
+ * cherrypy.lib
+
+The EXTENSION LAYER allows advanced users to construct and share their own
+plugins. It consists of:
+
+ * Hook API
+ * Tool API
+ * Toolbox API
+ * Dispatch API
+ * Config Namespace API
+
+Finally, there is the CORE LAYER, which uses the core API's to construct
+the default components which are available at higher layers. You can think
+of the default components as the 'reference implementation' for CherryPy.
+Megaframeworks (and advanced users) may replace the default components
+with customized or extended components. The core API's are:
+
+ * Application API
+ * Engine API
+ * Request API
+ * Server API
+ * WSGI API
+
+These API's are described in the CherryPy specification:
+http://www.cherrypy.org/wiki/CherryPySpec
+"""
+
+__version__ = "3.2.0rc1"
+
+from urlparse import urljoin as _urljoin
+from urllib import urlencode as _urlencode
+
+
+class _AttributeDocstrings(type):
+    """Metaclass for declaring docstrings for class attributes."""
+    # The full docstring for this type is down in the __init__ method so
+    # that it doesn't show up in help() for every consumer class.
+    
+    def __init__(cls, name, bases, dct):
+        '''Metaclass for declaring docstrings for class attributes.
+        
+        Base Python doesn't provide any syntax for setting docstrings on
+        'data attributes' (non-callables). This metaclass allows class
+        definitions to follow the declaration of a data attribute with
+        a docstring for that attribute; the attribute docstring will be
+        popped from the class dict and folded into the class docstring.
+        
+        The naming convention for attribute docstrings is:
+            <attrname> + "__doc".
+        For example:
+        
+            class Thing(object):
+                """A thing and its properties."""
+                
+                __metaclass__ = cherrypy._AttributeDocstrings
+                
+                height = 50
+                height__doc = """The height of the Thing in inches."""
+        
+        In which case, help(Thing) starts like this:
+        
+            >>> help(mod.Thing)
+            Help on class Thing in module pkg.mod:
+            
+            class Thing(__builtin__.object)
+             |  A thing and its properties.
+             |  
+             |  height [= 50]:
+             |      The height of the Thing in inches.
+             | 
+        
+        The benefits of this approach over hand-edited class docstrings:
+            1. Places the docstring nearer to the attribute declaration.
+            2. Makes attribute docs more uniform ("name (default): doc").
+            3. Reduces mismatches of attribute _names_ between
+               the declaration and the documentation.
+            4. Reduces mismatches of attribute default _values_ between
+               the declaration and the documentation.
+        
+        The benefits of a metaclass approach over other approaches:
+            1. Simpler ("less magic") than interface-based solutions.
+            2. __metaclass__ can be specified at the module global level
+               for classic classes.
+        
+        For various formatting reasons, you should write multiline docs
+        with a leading newline and not a trailing one:
+            
+            response__doc = """
+            The response object for the current thread. In the main thread,
+            and any threads which are not HTTP requests, this is None."""
+        
+        The type of the attribute is intentionally not included, because
+        that's not How Python Works. Quack.
+        '''
+        
+        newdoc = [cls.__doc__ or ""]
+        
+        dctkeys = dct.keys()
+        dctkeys.sort()
+        for name in dctkeys:
+            if name.endswith("__doc"):
+                # Remove the magic doc attribute.
+                if hasattr(cls, name):
+                    delattr(cls, name)
+                
+                # Make a uniformly-indented docstring from it.
+                val = '\n'.join(['    ' + line.strip()
+                                 for line in dct[name].split('\n')])
+                
+                # Get the default value.
+                attrname = name[:-5]
+                try:
+                    attrval = getattr(cls, attrname)
+                except AttributeError:
+                    attrval = "missing"
+                
+                # Add the complete attribute docstring to our list.
+                newdoc.append("%s [= %r]:\n%s" % (attrname, attrval, val))
+        
+        # Add our list of new docstrings to the class docstring.
+        cls.__doc__ = "\n\n".join(newdoc)
+
+
+from cherrypy._cperror import HTTPError, HTTPRedirect, InternalRedirect
+from cherrypy._cperror import NotFound, CherryPyException, TimeoutError
+
+from cherrypy import _cpdispatch as dispatch
+
+from cherrypy import _cptools
+tools = _cptools.default_toolbox
+Tool = _cptools.Tool
+
+from cherrypy import _cprequest
+from cherrypy.lib import httputil as _httputil
+
+from cherrypy import _cptree
+tree = _cptree.Tree()
+from cherrypy._cptree import Application
+from cherrypy import _cpwsgi as wsgi
+
+from cherrypy import process
+try:
+    from cherrypy.process import win32
+    engine = win32.Win32Bus()
+    engine.console_control_handler = win32.ConsoleCtrlHandler(engine)
+    del win32
+except ImportError:
+    engine = process.bus
+
+
+# Timeout monitor
+class _TimeoutMonitor(process.plugins.Monitor):
+    
+    def __init__(self, bus):
+        self.servings = []
+        process.plugins.Monitor.__init__(self, bus, self.run)
+    
+    def acquire(self):
+        self.servings.append((serving.request, serving.response))
+    
+    def release(self):
+        try:
+            self.servings.remove((serving.request, serving.response))
+        except ValueError:
+            pass
+    
+    def run(self):
+        """Check timeout on all responses. (Internal)"""
+        for req, resp in self.servings:
+            resp.check_timeout()
+engine.timeout_monitor = _TimeoutMonitor(engine)
+engine.timeout_monitor.subscribe()
+
+engine.autoreload = process.plugins.Autoreloader(engine)
+engine.autoreload.subscribe()
+
+engine.thread_manager = process.plugins.ThreadManager(engine)
+engine.thread_manager.subscribe()
+
+engine.signal_handler = process.plugins.SignalHandler(engine)
+
+
+from cherrypy import _cpserver
+server = _cpserver.Server()
+server.subscribe()
+
+
+def quickstart(root=None, script_name="", config=None):
+    """Mount the given root, start the builtin server (and engine), then block.
+    
+    root: an instance of a "controller class" (a collection of page handler
+        methods) which represents the root of the application.
+    script_name: a string containing the "mount point" of the application.
+        This should start with a slash, and be the path portion of the URL
+        at which to mount the given root. For example, if root.index() will
+        handle requests to "http://www.example.com:8080/dept/app1/", then
+        the script_name argument would be "/dept/app1".
+        
+        It MUST NOT end in a slash. If the script_name refers to the root
+        of the URI, it MUST be an empty string (not "/").
+    config: a file or dict containing application config. If this contains
+        a [global] section, those entries will be used in the global
+        (site-wide) config.
+    """
+    if config:
+        _global_conf_alias.update(config)
+    
+    tree.mount(root, script_name, config)
+    
+    if hasattr(engine, "signal_handler"):
+        engine.signal_handler.subscribe()
+    if hasattr(engine, "console_control_handler"):
+        engine.console_control_handler.subscribe()
+    
+    engine.start()
+    engine.block()
+
+
+try:
+    from threading import local as _local
+except ImportError:
+    from cherrypy._cpthreadinglocal import local as _local
+
+class _Serving(_local):
+    """An interface for registering request and response objects.
+    
+    Rather than have a separate "thread local" object for the request and
+    the response, this class works as a single threadlocal container for
+    both objects (and any others which developers wish to define). In this
+    way, we can easily dump those objects when we stop/start a new HTTP
+    conversation, yet still refer to them as module-level globals in a
+    thread-safe way.
+    """
+    
+    __metaclass__ = _AttributeDocstrings
+    
+    request = _cprequest.Request(_httputil.Host("127.0.0.1", 80),
+                                 _httputil.Host("127.0.0.1", 1111))
+    request__doc = """
+    The request object for the current thread. In the main thread,
+    and any threads which are not receiving HTTP requests, this is None."""
+    
+    response = _cprequest.Response()
+    response__doc = """
+    The response object for the current thread. In the main thread,
+    and any threads which are not receiving HTTP requests, this is None."""
+    
+    def load(self, request, response):
+        self.request = request
+        self.response = response
+    
+    def clear(self):
+        """Remove all attributes of self."""
+        self.__dict__.clear()
+
+serving = _Serving()
+
+
+class _ThreadLocalProxy(object):
+    
+    __slots__ = ['__attrname__', '__dict__']
+    
+    def __init__(self, attrname):
+        self.__attrname__ = attrname
+    
+    def __getattr__(self, name):
+        child = getattr(serving, self.__attrname__)
+        return getattr(child, name)
+    
+    def __setattr__(self, name, value):
+        if name in ("__attrname__",):
+            object.__setattr__(self, name, value)
+        else:
+            child = getattr(serving, self.__attrname__)
+            setattr(child, name, value)
+    
+    def __delattr__(self, name):
+        child = getattr(serving, self.__attrname__)
+        delattr(child, name)
+    
+    def _get_dict(self):
+        child = getattr(serving, self.__attrname__)
+        d = child.__class__.__dict__.copy()
+        d.update(child.__dict__)
+        return d
+    __dict__ = property(_get_dict)
+    
+    def __getitem__(self, key):
+        child = getattr(serving, self.__attrname__)
+        return child[key]
+    
+    def __setitem__(self, key, value):
+        child = getattr(serving, self.__attrname__)
+        child[key] = value
+    
+    def __delitem__(self, key):
+        child = getattr(serving, self.__attrname__)
+        del child[key]
+    
+    def __contains__(self, key):
+        child = getattr(serving, self.__attrname__)
+        return key in child
+    
+    def __len__(self):
+        child = getattr(serving, self.__attrname__)
+        return len(child)
+    
+    def __nonzero__(self):
+        child = getattr(serving, self.__attrname__)
+        return bool(child)
+
+
+# Create request and response object (the same objects will be used
+#   throughout the entire life of the webserver, but will redirect
+#   to the "serving" object)
+request = _ThreadLocalProxy('request')
+response = _ThreadLocalProxy('response')
+
+# Create thread_data object as a thread-specific all-purpose storage
+class _ThreadData(_local):
+    """A container for thread-specific data."""
+thread_data = _ThreadData()
+
+
+# Monkeypatch pydoc to allow help() to go through the threadlocal proxy.
+# Jan 2007: no Googleable examples of anyone else replacing pydoc.resolve.
+# The only other way would be to change what is returned from type(request)
+# and that's not possible in pure Python (you'd have to fake ob_type).
+def _cherrypy_pydoc_resolve(thing, forceload=0):
+    """Given an object or a path to an object, get the object and its name."""
+    if isinstance(thing, _ThreadLocalProxy):
+        thing = getattr(serving, thing.__attrname__)
+    return _pydoc._builtin_resolve(thing, forceload)
+
+try:
+    import pydoc as _pydoc
+    _pydoc._builtin_resolve = _pydoc.resolve
+    _pydoc.resolve = _cherrypy_pydoc_resolve
+except ImportError:
+    pass
+
+
+from cherrypy import _cplogging
+
+class _GlobalLogManager(_cplogging.LogManager):
+    
+    def __call__(self, *args, **kwargs):
+        # Do NOT use try/except here. See http://www.cherrypy.org/ticket/945
+        if hasattr(request, 'app') and hasattr(request.app, 'log'):
+            log = request.app.log
+        else:
+            log = self
+        return log.error(*args, **kwargs)
+    
+    def access(self):
+        try:
+            return request.app.log.access()
+        except AttributeError:
+            return _cplogging.LogManager.access(self)
+
+
+log = _GlobalLogManager()
+# Set a default screen handler on the global log.
+log.screen = True
+log.error_file = ''
+# Using an access file makes CP about 10% slower. Leave off by default.
+log.access_file = ''
+
+def _buslog(msg, level):
+    log.error(msg, 'ENGINE', severity=level)
+engine.subscribe('log', _buslog)
+
+#                       Helper functions for CP apps                       #
+
+
+def expose(func=None, alias=None):
+    """Expose the function, optionally providing an alias or set of aliases."""
+    def expose_(func):
+        func.exposed = True
+        if alias is not None:
+            if isinstance(alias, basestring):
+                parents[alias.replace(".", "_")] = func
+            else:
+                for a in alias:
+                    parents[a.replace(".", "_")] = func
+        return func
+    
+    import sys, types
+    if isinstance(func, (types.FunctionType, types.MethodType)):
+        if alias is None:
+            # @expose
+            func.exposed = True
+            return func
+        else:
+            # func = expose(func, alias)
+            parents = sys._getframe(1).f_locals
+            return expose_(func)
+    elif func is None:
+        if alias is None:
+            # @expose()
+            parents = sys._getframe(1).f_locals
+            return expose_
+        else:
+            # @expose(alias="alias") or
+            # @expose(alias=["alias1", "alias2"])
+            parents = sys._getframe(1).f_locals
+            return expose_
+    else:
+        # @expose("alias") or
+        # @expose(["alias1", "alias2"])
+        parents = sys._getframe(1).f_locals
+        alias = func
+        return expose_
+
+
+def url(path="", qs="", script_name=None, base=None, relative=None):
+    """Create an absolute URL for the given path.
+    
+    If 'path' starts with a slash ('/'), this will return
+        (base + script_name + path + qs).
+    If it does not start with a slash, this returns
+        (base + script_name [+ request.path_info] + path + qs).
+    
+    If script_name is None, cherrypy.request will be used
+    to find a script_name, if available.
+    
+    If base is None, cherrypy.request.base will be used (if available).
+    Note that you can use cherrypy.tools.proxy to change this.
+    
+    Finally, note that this function can be used to obtain an absolute URL
+    for the current request path (minus the querystring) by passing no args.
+    If you call url(qs=cherrypy.request.query_string), you should get the
+    original browser URL (assuming no internal redirections).
+    
+    If relative is None or not provided, request.app.relative_urls will
+    be used (if available, else False). If False, the output will be an
+    absolute URL (including the scheme, host, vhost, and script_name).
+    If True, the output will instead be a URL that is relative to the
+    current request path, perhaps including '..' atoms. If relative is
+    the string 'server', the output will instead be a URL that is
+    relative to the server root; i.e., it will start with a slash.
+    """
+    if isinstance(qs, (tuple, list, dict)):
+        qs = _urlencode(qs)
+    if qs:
+        qs = '?' + qs
+    
+    if request.app:
+        if not path.startswith("/"):
+            # Append/remove trailing slash from path_info as needed
+            # (this is to support mistyped URL's without redirecting;
+            # if you want to redirect, use tools.trailing_slash).
+            pi = request.path_info
+            if request.is_index is True:
+                if not pi.endswith('/'):
+                    pi = pi + '/'
+            elif request.is_index is False:
+                if pi.endswith('/') and pi != '/':
+                    pi = pi[:-1]
+            
+            if path == "":
+                path = pi
+            else:
+                path = _urljoin(pi, path)
+        
+        if script_name is None:
+            script_name = request.script_name
+        if base is None:
+            base = request.base
+        
+        newurl = base + script_name + path + qs
+    else:
+        # No request.app (we're being called outside a request).
+        # We'll have to guess the base from server.* attributes.
+        # This will produce very different results from the above
+        # if you're using vhosts or tools.proxy.
+        if base is None:
+            base = server.base()
+        
+        path = (script_name or "") + path
+        newurl = base + path + qs
+    
+    if './' in newurl:
+        # Normalize the URL by removing ./ and ../
+        atoms = []
+        for atom in newurl.split('/'):
+            if atom == '.':
+                pass
+            elif atom == '..':
+                atoms.pop()
+            else:
+                atoms.append(atom)
+        newurl = '/'.join(atoms)
+    
+    # At this point, we should have a fully-qualified absolute URL.
+    
+    if relative is None:
+        relative = getattr(request.app, "relative_urls", False)
+    
+    # See http://www.ietf.org/rfc/rfc2396.txt
+    if relative == 'server':
+        # "A relative reference beginning with a single slash character is
+        # termed an absolute-path reference, as defined by <abs_path>..."
+        # This is also sometimes called "server-relative".
+        newurl = '/' + '/'.join(newurl.split('/', 3)[3:])
+    elif relative:
+        # "A relative reference that does not begin with a scheme name
+        # or a slash character is termed a relative-path reference."
+        old = url().split('/')[:-1]
+        new = newurl.split('/')
+        while old and new:
+            a, b = old[0], new[0]
+            if a != b:
+                break
+            old.pop(0)
+            new.pop(0)
+        new = (['..'] * len(old)) + new
+        newurl = '/'.join(new)
+    
+    return newurl
+
+
+# import _cpconfig last so it can reference other top-level objects
+from cherrypy import _cpconfig
+# Use _global_conf_alias so quickstart can use 'config' as an arg
+# without shadowing cherrypy.config.
+config = _global_conf_alias = _cpconfig.Config()
+config.defaults = {
+    'tools.log_tracebacks.on': True,
+    'tools.log_headers.on': True,
+    'tools.trailing_slash.on': True,
+    'tools.encode.on': True
+    }
+config.namespaces["log"] = lambda k, v: setattr(log, k, v)
+config.namespaces["checker"] = lambda k, v: setattr(checker, k, v)
+# Must reset to get our defaults applied.
+config.reset()
+
+from cherrypy import _cpchecker
+checker = _cpchecker.Checker()
+engine.subscribe('start', checker)
diff --git a/cherrypy/_cpdispatch.py b/cherrypy/_cpdispatch.py
index 6020b7a42..f1d10b8a4 100644
--- a/cherrypy/_cpdispatch.py
+++ b/cherrypy/_cpdispatch.py
@@ -1,568 +1,568 @@
-"""CherryPy dispatchers.
-
-A 'dispatcher' is the object which looks up the 'page handler' callable
-and collects config for the current request based on the path_info, other
-request attributes, and the application architecture. The core calls the
-dispatcher as early as possible, passing it a 'path_info' argument.
-
-The default dispatcher discovers the page handler by matching path_info
-to a hierarchical arrangement of objects, starting at request.app.root.
-"""
-
-import cherrypy
-
-
-class PageHandler(object):
-    """Callable which sets response.body."""
-    
-    def __init__(self, callable, *args, **kwargs):
-        self.callable = callable
-        self.args = args
-        self.kwargs = kwargs
-    
-    def __call__(self):
-        try:
-            return self.callable(*self.args, **self.kwargs)
-        except TypeError, x:
-            try:
-                test_callable_spec(self.callable, self.args, self.kwargs)
-            except cherrypy.HTTPError, error:
-                raise error
-            except:
-                raise x
-            raise
-
-
-def test_callable_spec(callable, callable_args, callable_kwargs):
-    """
-    Inspect callable and test to see if the given args are suitable for it.
-
-    When an error occurs during the handler's invoking stage there are 2
-    erroneous cases:
-    1.  Too many parameters passed to a function which doesn't define
-        one of *args or **kwargs.
-    2.  Too little parameters are passed to the function.
-
-    There are 3 sources of parameters to a cherrypy handler.
-    1.  query string parameters are passed as keyword parameters to the handler.
-    2.  body parameters are also passed as keyword parameters.
-    3.  when partial matching occurs, the final path atoms are passed as
-        positional args.
-    Both the query string and path atoms are part of the URI.  If they are
-    incorrect, then a 404 Not Found should be raised. Conversely the body
-    parameters are part of the request; if they are invalid a 400 Bad Request.
-    """
-    show_mismatched_params = getattr(
-        cherrypy.serving.request, 'show_mismatched_params', False)
-    try:
-        (args, varargs, varkw, defaults) = inspect.getargspec(callable)
-    except TypeError:
-        if isinstance(callable, object) and hasattr(callable, '__call__'):
-            (args, varargs, varkw, defaults) = inspect.getargspec(callable.__call__)
-        else:
-            # If it wasn't one of our own types, re-raise 
-            # the original error
-            raise
-
-    if args and args[0] == 'self':
-        args = args[1:]
-
-    arg_usage = dict([(arg, 0,) for arg in args])
-    vararg_usage = 0
-    varkw_usage = 0
-    extra_kwargs = set()
-
-    for i, value in enumerate(callable_args):
-        try:
-            arg_usage[args[i]] += 1
-        except IndexError:
-            vararg_usage += 1
-
-    for key in callable_kwargs.keys():
-        try:
-            arg_usage[key] += 1
-        except KeyError:
-            varkw_usage += 1
-            extra_kwargs.add(key)
-
-    # figure out which args have defaults.
-    args_with_defaults = args[-len(defaults or []):]
-    for i, val in enumerate(defaults or []):
-        # Defaults take effect only when the arg hasn't been used yet.
-        if arg_usage[args_with_defaults[i]] == 0:
-            arg_usage[args_with_defaults[i]] += 1
-
-    missing_args = []
-    multiple_args = []
-    for key, usage in arg_usage.items():
-        if usage == 0:
-            missing_args.append(key)
-        elif usage > 1:
-            multiple_args.append(key)
-
-    if missing_args:
-        # In the case where the method allows body arguments
-        # there are 3 potential errors:
-        # 1. not enough query string parameters -> 404
-        # 2. not enough body parameters -> 400
-        # 3. not enough path parts (partial matches) -> 404
-        #
-        # We can't actually tell which case it is, 
-        # so I'm raising a 404 because that covers 2/3 of the
-        # possibilities
-        # 
-        # In the case where the method does not allow body
-        # arguments it's definitely a 404.
-        message = None
-        if show_mismatched_params:
-            message = "Missing parameters: %s" % ",".join(missing_args)
-        raise cherrypy.HTTPError(404, message=message)
-
-    # the extra positional arguments come from the path - 404 Not Found
-    if not varargs and vararg_usage > 0:
-        raise cherrypy.HTTPError(404)
-
-    body_params = cherrypy.serving.request.body.params or {}
-    body_params = set(body_params.keys())
-    qs_params = set(callable_kwargs.keys()) - body_params
-
-    if multiple_args:
-        if qs_params.intersection(set(multiple_args)):
-            # If any of the multiple parameters came from the query string then
-            # it's a 404 Not Found
-            error = 404
-        else:
-            # Otherwise it's a 400 Bad Request
-            error = 400
-
-        message = None
-        if show_mismatched_params:
-            message = "Multiple values for parameters: "\
-                    "%s" % ",".join(multiple_args)
-        raise cherrypy.HTTPError(error, message=message)
-
-    if not varkw and varkw_usage > 0:
-
-        # If there were extra query string parameters, it's a 404 Not Found
-        extra_qs_params = set(qs_params).intersection(extra_kwargs)
-        if extra_qs_params:
-            message = None
-            if show_mismatched_params:
-                message = "Unexpected query string "\
-                        "parameters: %s" % ", ".join(extra_qs_params)
-            raise cherrypy.HTTPError(404, message=message)
-
-        # If there were any extra body parameters, it's a 400 Not Found
-        extra_body_params = set(body_params).intersection(extra_kwargs)
-        if extra_body_params:
-            message = None
-            if show_mismatched_params:
-                message = "Unexpected body parameters: "\
-                        "%s" % ", ".join(extra_body_params)
-            raise cherrypy.HTTPError(400, message=message)
-
-
-try:
-    import inspect
-except ImportError:
-    test_callable_spec = lambda callable, args, kwargs: None
-
-
-
-class LateParamPageHandler(PageHandler):
-    """When passing cherrypy.request.params to the page handler, we do not
-    want to capture that dict too early; we want to give tools like the
-    decoding tool a chance to modify the params dict in-between the lookup
-    of the handler and the actual calling of the handler. This subclass
-    takes that into account, and allows request.params to be 'bound late'
-    (it's more complicated than that, but that's the effect).
-    """
-    
-    def _get_kwargs(self):
-        kwargs = cherrypy.serving.request.params.copy()
-        if self._kwargs:
-            kwargs.update(self._kwargs)
-        return kwargs
-    
-    def _set_kwargs(self, kwargs):
-        self._kwargs = kwargs
-    
-    kwargs = property(_get_kwargs, _set_kwargs,
-                      doc='page handler kwargs (with '
-                      'cherrypy.request.params copied in)')
-
-
-class Dispatcher(object):
-    """CherryPy Dispatcher which walks a tree of objects to find a handler.
-    
-    The tree is rooted at cherrypy.request.app.root, and each hierarchical
-    component in the path_info argument is matched to a corresponding nested
-    attribute of the root object. Matching handlers must have an 'exposed'
-    attribute which evaluates to True. The special method name "index"
-    matches a URI which ends in a slash ("/"). The special method name
-    "default" may match a portion of the path_info (but only when no longer
-    substring of the path_info matches some other object).
-    
-    This is the default, built-in dispatcher for CherryPy.
-    """
-    __metaclass__ = cherrypy._AttributeDocstrings
-
-    dispatch_method_name = '_cp_dispatch'
-    dispatch_method_name__doc = """
-    The name of the dispatch method that nodes may optionally implement
-    to provide their own dynamic dispatch algorithm.
-    """
-    
-    def __init__(self, dispatch_method_name=None):
-        if dispatch_method_name:
-            self.dispatch_method_name = dispatch_method_name
-
-    def __call__(self, path_info):
-        """Set handler and config for the current request."""
-        request = cherrypy.serving.request
-        func, vpath = self.find_handler(path_info)
-        
-        if func:
-            # Decode any leftover %2F in the virtual_path atoms.
-            vpath = [x.replace("%2F", "/") for x in vpath]
-            request.handler = LateParamPageHandler(func, *vpath)
-        else:
-            request.handler = cherrypy.NotFound()
-    
-    def find_handler(self, path):
-        """Return the appropriate page handler, plus any virtual path.
-        
-        This will return two objects. The first will be a callable,
-        which can be used to generate page output. Any parameters from
-        the query string or request body will be sent to that callable
-        as keyword arguments.
-        
-        The callable is found by traversing the application's tree,
-        starting from cherrypy.request.app.root, and matching path
-        components to successive objects in the tree. For example, the
-        URL "/path/to/handler" might return root.path.to.handler.
-        
-        The second object returned will be a list of names which are
-        'virtual path' components: parts of the URL which are dynamic,
-        and were not used when looking up the handler.
-        These virtual path components are passed to the handler as
-        positional arguments.
-        """
-        request = cherrypy.serving.request
-        app = request.app
-        root = app.root
-        dispatch_name = self.dispatch_method_name
-        
-        # Get config for the root object/path.
-        curpath = ""
-        nodeconf = {}
-        if hasattr(root, "_cp_config"):
-            nodeconf.update(root._cp_config)
-        if "/" in app.config:
-            nodeconf.update(app.config["/"])
-        object_trail = [['root', root, nodeconf, curpath]]
-        
-        node = root
-        names = [x for x in path.strip('/').split('/') if x] + ['index']
-        iternames = names[:]
-        while iternames:
-            name = iternames[0]
-            # map to legal Python identifiers (replace '.' with '_')
-            objname = name.replace('.', '_')
-            
-            nodeconf = {}
-            subnode = getattr(node, objname, None)
-            if subnode is None:
-                dispatch = getattr(node, dispatch_name, None)
-                if dispatch and callable(dispatch) and not \
-                        getattr(dispatch, 'exposed', False):
-                    subnode = dispatch(vpath=iternames)
-            name = iternames.pop(0)
-            node = subnode
-
-            if node is not None:
-                # Get _cp_config attached to this node.
-                if hasattr(node, "_cp_config"):
-                    nodeconf.update(node._cp_config)
-            
-            # Mix in values from app.config for this path.
-            curpath = "/".join((curpath, name))
-            if curpath in app.config:
-                nodeconf.update(app.config[curpath])
-            
-            object_trail.append([name, node, nodeconf, curpath])
-        
-        def set_conf():
-            """Collapse all object_trail config into cherrypy.request.config."""
-            base = cherrypy.config.copy()
-            # Note that we merge the config from each node
-            # even if that node was None.
-            for name, obj, conf, curpath in object_trail:
-                base.update(conf)
-                if 'tools.staticdir.dir' in conf:
-                    base['tools.staticdir.section'] = curpath
-            return base
-        
-        # Try successive objects (reverse order)
-        num_candidates = len(object_trail) - 1
-        for i in range(num_candidates, -1, -1):
-            
-            name, candidate, nodeconf, curpath = object_trail[i]
-            if candidate is None:
-                continue
-            
-            # Try a "default" method on the current leaf.
-            if hasattr(candidate, "default"):
-                defhandler = candidate.default
-                if getattr(defhandler, 'exposed', False):
-                    # Insert any extra _cp_config from the default handler.
-                    conf = getattr(defhandler, "_cp_config", {})
-                    object_trail.insert(i + 1, ["default", defhandler, conf, curpath])
-                    request.config = set_conf()
-                    # See http://www.cherrypy.org/ticket/613
-                    request.is_index = path.endswith("/")
-                    return defhandler, names[i:-1]
-            
-            # Uncomment the next line to restrict positional params to "default".
-            # if i < num_candidates - 2: continue
-            
-            # Try the current leaf.
-            if getattr(candidate, 'exposed', False):
-                request.config = set_conf()
-                if i == num_candidates:
-                    # We found the extra ".index". Mark request so tools
-                    # can redirect if path_info has no trailing slash.
-                    request.is_index = True
-                else:
-                    # We're not at an 'index' handler. Mark request so tools
-                    # can redirect if path_info has NO trailing slash.
-                    # Note that this also includes handlers which take
-                    # positional parameters (virtual paths).
-                    request.is_index = False
-                return candidate, names[i:-1]
-        
-        # We didn't find anything
-        request.config = set_conf()
-        return None, []
-
-
-class MethodDispatcher(Dispatcher):
-    """Additional dispatch based on cherrypy.request.method.upper().
-    
-    Methods named GET, POST, etc will be called on an exposed class.
-    The method names must be all caps; the appropriate Allow header
-    will be output showing all capitalized method names as allowable
-    HTTP verbs.
-    
-    Note that the containing class must be exposed, not the methods.
-    """
-    
-    def __call__(self, path_info):
-        """Set handler and config for the current request."""
-        request = cherrypy.serving.request
-        resource, vpath = self.find_handler(path_info)
-        
-        if resource:
-            # Set Allow header
-            avail = [m for m in dir(resource) if m.isupper()]
-            if "GET" in avail and "HEAD" not in avail:
-                avail.append("HEAD")
-            avail.sort()
-            cherrypy.serving.response.headers['Allow'] = ", ".join(avail)
-            
-            # Find the subhandler
-            meth = request.method.upper()
-            func = getattr(resource, meth, None)
-            if func is None and meth == "HEAD":
-                func = getattr(resource, "GET", None)
-            if func:
-                # Grab any _cp_config on the subhandler.
-                if hasattr(func, "_cp_config"):
-                    request.config.update(func._cp_config)
-                
-                # Decode any leftover %2F in the virtual_path atoms.
-                vpath = [x.replace("%2F", "/") for x in vpath]
-                request.handler = LateParamPageHandler(func, *vpath)
-            else:
-                request.handler = cherrypy.HTTPError(405)
-        else:
-            request.handler = cherrypy.NotFound()
-
-
-class RoutesDispatcher(object):
-    """A Routes based dispatcher for CherryPy."""
-    
-    def __init__(self, full_result=False):
-        """
-        Routes dispatcher
-
-        Set full_result to True if you wish the controller
-        and the action to be passed on to the page handler
-        parameters. By default they won't be.
-        """
-        import routes
-        self.full_result = full_result
-        self.controllers = {}
-        self.mapper = routes.Mapper()
-        self.mapper.controller_scan = self.controllers.keys
-        
-    def connect(self, name, route, controller, **kwargs):
-        self.controllers[name] = controller
-        self.mapper.connect(name, route, controller=name, **kwargs)
-    
-    def redirect(self, url):
-        raise cherrypy.HTTPRedirect(url)
-    
-    def __call__(self, path_info):
-        """Set handler and config for the current request."""
-        func = self.find_handler(path_info)
-        if func:
-            cherrypy.serving.request.handler = LateParamPageHandler(func)
-        else:
-            cherrypy.serving.request.handler = cherrypy.NotFound()
-    
-    def find_handler(self, path_info):
-        """Find the right page handler, and set request.config."""
-        import routes
-        
-        request = cherrypy.serving.request
-        
-        config = routes.request_config()
-        config.mapper = self.mapper
-        if hasattr(request, 'wsgi_environ'):
-            config.environ = request.wsgi_environ
-        config.host = request.headers.get('Host', None)
-        config.protocol = request.scheme
-        config.redirect = self.redirect
-        
-        result = self.mapper.match(path_info)
-        
-        config.mapper_dict = result
-        params = {}
-        if result:
-            params = result.copy()
-        if not self.full_result:
-            params.pop('controller', None)
-            params.pop('action', None)
-        request.params.update(params)
-        
-        # Get config for the root object/path.
-        request.config = base = cherrypy.config.copy()
-        curpath = ""
-        
-        def merge(nodeconf):
-            if 'tools.staticdir.dir' in nodeconf:
-                nodeconf['tools.staticdir.section'] = curpath or "/"
-            base.update(nodeconf)
-        
-        app = request.app
-        root = app.root
-        if hasattr(root, "_cp_config"):
-            merge(root._cp_config)
-        if "/" in app.config:
-            merge(app.config["/"])
-        
-        # Mix in values from app.config.
-        atoms = [x for x in path_info.split("/") if x]
-        if atoms:
-            last = atoms.pop()
-        else:
-            last = None
-        for atom in atoms:
-            curpath = "/".join((curpath, atom))
-            if curpath in app.config:
-                merge(app.config[curpath])
-        
-        handler = None
-        if result:
-            controller = result.get('controller', None)
-            controller = self.controllers.get(controller)
-            if controller:
-                # Get config from the controller.
-                if hasattr(controller, "_cp_config"):
-                    merge(controller._cp_config)
-            
-            action = result.get('action', None)
-            if action is not None:
-                handler = getattr(controller, action, None)
-                # Get config from the handler 
-                if hasattr(handler, "_cp_config"): 
-                    merge(handler._cp_config)
-                    
-        # Do the last path atom here so it can
-        # override the controller's _cp_config.
-        if last:
-            curpath = "/".join((curpath, last))
-            if curpath in app.config:
-                merge(app.config[curpath])
-        
-        return handler
-
-
-def XMLRPCDispatcher(next_dispatcher=Dispatcher()):
-    from cherrypy.lib import xmlrpc
-    def xmlrpc_dispatch(path_info):
-        path_info = xmlrpc.patched_path(path_info)
-        return next_dispatcher(path_info)
-    return xmlrpc_dispatch
-
-
-def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True, **domains):
-    """Select a different handler based on the Host header.
-    
-    This can be useful when running multiple sites within one CP server.
-    It allows several domains to point to different parts of a single
-    website structure. For example:
-    
-        http://www.domain.example  ->  root
-        http://www.domain2.example  ->  root/domain2/
-        http://www.domain2.example:443  ->  root/secure
-    
-    can be accomplished via the following config:
-    
-        [/]
-        request.dispatch = cherrypy.dispatch.VirtualHost(
-            **{'www.domain2.example': '/domain2',
-               'www.domain2.example:443': '/secure',
-              })
-    
-    next_dispatcher: the next dispatcher object in the dispatch chain.
-        The VirtualHost dispatcher adds a prefix to the URL and calls
-        another dispatcher. Defaults to cherrypy.dispatch.Dispatcher().
-    
-    use_x_forwarded_host: if True (the default), any "X-Forwarded-Host"
-        request header will be used instead of the "Host" header. This
-        is commonly added by HTTP servers (such as Apache) when proxying.
-    
-    **domains: a dict of {host header value: virtual prefix} pairs.
-        The incoming "Host" request header is looked up in this dict,
-        and, if a match is found, the corresponding "virtual prefix"
-        value will be prepended to the URL path before calling the
-        next dispatcher. Note that you often need separate entries
-        for "example.com" and "www.example.com". In addition, "Host"
-        headers may contain the port number.
-    """
-    from cherrypy.lib import httputil
-    def vhost_dispatch(path_info):
-        request = cherrypy.serving.request
-        header = request.headers.get
-        
-        domain = header('Host', '')
-        if use_x_forwarded_host:
-            domain = header("X-Forwarded-Host", domain)
-        
-        prefix = domains.get(domain, "")
-        if prefix:
-            path_info = httputil.urljoin(prefix, path_info)
-        
-        result = next_dispatcher(path_info)
-        
-        # Touch up staticdir config. See http://www.cherrypy.org/ticket/614.
-        section = request.config.get('tools.staticdir.section')
-        if section:
-            section = section[len(prefix):]
-            request.config['tools.staticdir.section'] = section
-        
-        return result
-    return vhost_dispatch
-
+"""CherryPy dispatchers.
+
+A 'dispatcher' is the object which looks up the 'page handler' callable
+and collects config for the current request based on the path_info, other
+request attributes, and the application architecture. The core calls the
+dispatcher as early as possible, passing it a 'path_info' argument.
+
+The default dispatcher discovers the page handler by matching path_info
+to a hierarchical arrangement of objects, starting at request.app.root.
+"""
+
+import cherrypy
+
+
+class PageHandler(object):
+    """Callable which sets response.body."""
+    
+    def __init__(self, callable, *args, **kwargs):
+        self.callable = callable
+        self.args = args
+        self.kwargs = kwargs
+    
+    def __call__(self):
+        try:
+            return self.callable(*self.args, **self.kwargs)
+        except TypeError, x:
+            try:
+                test_callable_spec(self.callable, self.args, self.kwargs)
+            except cherrypy.HTTPError, error:
+                raise error
+            except:
+                raise x
+            raise
+
+
+def test_callable_spec(callable, callable_args, callable_kwargs):
+    """
+    Inspect callable and test to see if the given args are suitable for it.
+
+    When an error occurs during the handler's invoking stage there are 2
+    erroneous cases:
+    1.  Too many parameters passed to a function which doesn't define
+        one of *args or **kwargs.
+    2.  Too little parameters are passed to the function.
+
+    There are 3 sources of parameters to a cherrypy handler.
+    1.  query string parameters are passed as keyword parameters to the handler.
+    2.  body parameters are also passed as keyword parameters.
+    3.  when partial matching occurs, the final path atoms are passed as
+        positional args.
+    Both the query string and path atoms are part of the URI.  If they are
+    incorrect, then a 404 Not Found should be raised. Conversely the body
+    parameters are part of the request; if they are invalid a 400 Bad Request.
+    """
+    show_mismatched_params = getattr(
+        cherrypy.serving.request, 'show_mismatched_params', False)
+    try:
+        (args, varargs, varkw, defaults) = inspect.getargspec(callable)
+    except TypeError:
+        if isinstance(callable, object) and hasattr(callable, '__call__'):
+            (args, varargs, varkw, defaults) = inspect.getargspec(callable.__call__)
+        else:
+            # If it wasn't one of our own types, re-raise 
+            # the original error
+            raise
+
+    if args and args[0] == 'self':
+        args = args[1:]
+
+    arg_usage = dict([(arg, 0,) for arg in args])
+    vararg_usage = 0
+    varkw_usage = 0
+    extra_kwargs = set()
+
+    for i, value in enumerate(callable_args):
+        try:
+            arg_usage[args[i]] += 1
+        except IndexError:
+            vararg_usage += 1
+
+    for key in callable_kwargs.keys():
+        try:
+            arg_usage[key] += 1
+        except KeyError:
+            varkw_usage += 1
+            extra_kwargs.add(key)
+
+    # figure out which args have defaults.
+    args_with_defaults = args[-len(defaults or []):]
+    for i, val in enumerate(defaults or []):
+        # Defaults take effect only when the arg hasn't been used yet.
+        if arg_usage[args_with_defaults[i]] == 0:
+            arg_usage[args_with_defaults[i]] += 1
+
+    missing_args = []
+    multiple_args = []
+    for key, usage in arg_usage.items():
+        if usage == 0:
+            missing_args.append(key)
+        elif usage > 1:
+            multiple_args.append(key)
+
+    if missing_args:
+        # In the case where the method allows body arguments
+        # there are 3 potential errors:
+        # 1. not enough query string parameters -> 404
+        # 2. not enough body parameters -> 400
+        # 3. not enough path parts (partial matches) -> 404
+        #
+        # We can't actually tell which case it is, 
+        # so I'm raising a 404 because that covers 2/3 of the
+        # possibilities
+        # 
+        # In the case where the method does not allow body
+        # arguments it's definitely a 404.
+        message = None
+        if show_mismatched_params:
+            message = "Missing parameters: %s" % ",".join(missing_args)
+        raise cherrypy.HTTPError(404, message=message)
+
+    # the extra positional arguments come from the path - 404 Not Found
+    if not varargs and vararg_usage > 0:
+        raise cherrypy.HTTPError(404)
+
+    body_params = cherrypy.serving.request.body.params or {}
+    body_params = set(body_params.keys())
+    qs_params = set(callable_kwargs.keys()) - body_params
+
+    if multiple_args:
+        if qs_params.intersection(set(multiple_args)):
+            # If any of the multiple parameters came from the query string then
+            # it's a 404 Not Found
+            error = 404
+        else:
+            # Otherwise it's a 400 Bad Request
+            error = 400
+
+        message = None
+        if show_mismatched_params:
+            message = "Multiple values for parameters: "\
+                    "%s" % ",".join(multiple_args)
+        raise cherrypy.HTTPError(error, message=message)
+
+    if not varkw and varkw_usage > 0:
+
+        # If there were extra query string parameters, it's a 404 Not Found
+        extra_qs_params = set(qs_params).intersection(extra_kwargs)
+        if extra_qs_params:
+            message = None
+            if show_mismatched_params:
+                message = "Unexpected query string "\
+                        "parameters: %s" % ", ".join(extra_qs_params)
+            raise cherrypy.HTTPError(404, message=message)
+
+        # If there were any extra body parameters, it's a 400 Not Found
+        extra_body_params = set(body_params).intersection(extra_kwargs)
+        if extra_body_params:
+            message = None
+            if show_mismatched_params:
+                message = "Unexpected body parameters: "\
+                        "%s" % ", ".join(extra_body_params)
+            raise cherrypy.HTTPError(400, message=message)
+
+
+try:
+    import inspect
+except ImportError:
+    test_callable_spec = lambda callable, args, kwargs: None
+
+
+
+class LateParamPageHandler(PageHandler):
+    """When passing cherrypy.request.params to the page handler, we do not
+    want to capture that dict too early; we want to give tools like the
+    decoding tool a chance to modify the params dict in-between the lookup
+    of the handler and the actual calling of the handler. This subclass
+    takes that into account, and allows request.params to be 'bound late'
+    (it's more complicated than that, but that's the effect).
+    """
+    
+    def _get_kwargs(self):
+        kwargs = cherrypy.serving.request.params.copy()
+        if self._kwargs:
+            kwargs.update(self._kwargs)
+        return kwargs
+    
+    def _set_kwargs(self, kwargs):
+        self._kwargs = kwargs
+    
+    kwargs = property(_get_kwargs, _set_kwargs,
+                      doc='page handler kwargs (with '
+                      'cherrypy.request.params copied in)')
+
+
+class Dispatcher(object):
+    """CherryPy Dispatcher which walks a tree of objects to find a handler.
+    
+    The tree is rooted at cherrypy.request.app.root, and each hierarchical
+    component in the path_info argument is matched to a corresponding nested
+    attribute of the root object. Matching handlers must have an 'exposed'
+    attribute which evaluates to True. The special method name "index"
+    matches a URI which ends in a slash ("/"). The special method name
+    "default" may match a portion of the path_info (but only when no longer
+    substring of the path_info matches some other object).
+    
+    This is the default, built-in dispatcher for CherryPy.
+    """
+    __metaclass__ = cherrypy._AttributeDocstrings
+
+    dispatch_method_name = '_cp_dispatch'
+    dispatch_method_name__doc = """
+    The name of the dispatch method that nodes may optionally implement
+    to provide their own dynamic dispatch algorithm.
+    """
+    
+    def __init__(self, dispatch_method_name=None):
+        if dispatch_method_name:
+            self.dispatch_method_name = dispatch_method_name
+
+    def __call__(self, path_info):
+        """Set handler and config for the current request."""
+        request = cherrypy.serving.request
+        func, vpath = self.find_handler(path_info)
+        
+        if func:
+            # Decode any leftover %2F in the virtual_path atoms.
+            vpath = [x.replace("%2F", "/") for x in vpath]
+            request.handler = LateParamPageHandler(func, *vpath)
+        else:
+            request.handler = cherrypy.NotFound()
+    
+    def find_handler(self, path):
+        """Return the appropriate page handler, plus any virtual path.
+        
+        This will return two objects. The first will be a callable,
+        which can be used to generate page output. Any parameters from
+        the query string or request body will be sent to that callable
+        as keyword arguments.
+        
+        The callable is found by traversing the application's tree,
+        starting from cherrypy.request.app.root, and matching path
+        components to successive objects in the tree. For example, the
+        URL "/path/to/handler" might return root.path.to.handler.
+        
+        The second object returned will be a list of names which are
+        'virtual path' components: parts of the URL which are dynamic,
+        and were not used when looking up the handler.
+        These virtual path components are passed to the handler as
+        positional arguments.
+        """
+        request = cherrypy.serving.request
+        app = request.app
+        root = app.root
+        dispatch_name = self.dispatch_method_name
+        
+        # Get config for the root object/path.
+        curpath = ""
+        nodeconf = {}
+        if hasattr(root, "_cp_config"):
+            nodeconf.update(root._cp_config)
+        if "/" in app.config:
+            nodeconf.update(app.config["/"])
+        object_trail = [['root', root, nodeconf, curpath]]
+        
+        node = root
+        names = [x for x in path.strip('/').split('/') if x] + ['index']
+        iternames = names[:]
+        while iternames:
+            name = iternames[0]
+            # map to legal Python identifiers (replace '.' with '_')
+            objname = name.replace('.', '_')
+            
+            nodeconf = {}
+            subnode = getattr(node, objname, None)
+            if subnode is None:
+                dispatch = getattr(node, dispatch_name, None)
+                if dispatch and callable(dispatch) and not \
+                        getattr(dispatch, 'exposed', False):
+                    subnode = dispatch(vpath=iternames)
+            name = iternames.pop(0)
+            node = subnode
+
+            if node is not None:
+                # Get _cp_config attached to this node.
+                if hasattr(node, "_cp_config"):
+                    nodeconf.update(node._cp_config)
+            
+            # Mix in values from app.config for this path.
+            curpath = "/".join((curpath, name))
+            if curpath in app.config:
+                nodeconf.update(app.config[curpath])
+            
+            object_trail.append([name, node, nodeconf, curpath])
+        
+        def set_conf():
+            """Collapse all object_trail config into cherrypy.request.config."""
+            base = cherrypy.config.copy()
+            # Note that we merge the config from each node
+            # even if that node was None.
+            for name, obj, conf, curpath in object_trail:
+                base.update(conf)
+                if 'tools.staticdir.dir' in conf:
+                    base['tools.staticdir.section'] = curpath
+            return base
+        
+        # Try successive objects (reverse order)
+        num_candidates = len(object_trail) - 1
+        for i in range(num_candidates, -1, -1):
+            
+            name, candidate, nodeconf, curpath = object_trail[i]
+            if candidate is None:
+                continue
+            
+            # Try a "default" method on the current leaf.
+            if hasattr(candidate, "default"):
+                defhandler = candidate.default
+                if getattr(defhandler, 'exposed', False):
+                    # Insert any extra _cp_config from the default handler.
+                    conf = getattr(defhandler, "_cp_config", {})
+                    object_trail.insert(i + 1, ["default", defhandler, conf, curpath])
+                    request.config = set_conf()
+                    # See http://www.cherrypy.org/ticket/613
+                    request.is_index = path.endswith("/")
+                    return defhandler, names[i:-1]
+            
+            # Uncomment the next line to restrict positional params to "default".
+            # if i < num_candidates - 2: continue
+            
+            # Try the current leaf.
+            if getattr(candidate, 'exposed', False):
+                request.config = set_conf()
+                if i == num_candidates:
+                    # We found the extra ".index". Mark request so tools
+                    # can redirect if path_info has no trailing slash.
+                    request.is_index = True
+                else:
+                    # We're not at an 'index' handler. Mark request so tools
+                    # can redirect if path_info has NO trailing slash.
+                    # Note that this also includes handlers which take
+                    # positional parameters (virtual paths).
+                    request.is_index = False
+                return candidate, names[i:-1]
+        
+        # We didn't find anything
+        request.config = set_conf()
+        return None, []
+
+
+class MethodDispatcher(Dispatcher):
+    """Additional dispatch based on cherrypy.request.method.upper().
+    
+    Methods named GET, POST, etc will be called on an exposed class.
+    The method names must be all caps; the appropriate Allow header
+    will be output showing all capitalized method names as allowable
+    HTTP verbs.
+    
+    Note that the containing class must be exposed, not the methods.
+    """
+    
+    def __call__(self, path_info):
+        """Set handler and config for the current request."""
+        request = cherrypy.serving.request
+        resource, vpath = self.find_handler(path_info)
+        
+        if resource:
+            # Set Allow header
+            avail = [m for m in dir(resource) if m.isupper()]
+            if "GET" in avail and "HEAD" not in avail:
+                avail.append("HEAD")
+            avail.sort()
+            cherrypy.serving.response.headers['Allow'] = ", ".join(avail)
+            
+            # Find the subhandler
+            meth = request.method.upper()
+            func = getattr(resource, meth, None)
+            if func is None and meth == "HEAD":
+                func = getattr(resource, "GET", None)
+            if func:
+                # Grab any _cp_config on the subhandler.
+                if hasattr(func, "_cp_config"):
+                    request.config.update(func._cp_config)
+                
+                # Decode any leftover %2F in the virtual_path atoms.
+                vpath = [x.replace("%2F", "/") for x in vpath]
+                request.handler = LateParamPageHandler(func, *vpath)
+            else:
+                request.handler = cherrypy.HTTPError(405)
+        else:
+            request.handler = cherrypy.NotFound()
+
+
+class RoutesDispatcher(object):
+    """A Routes based dispatcher for CherryPy."""
+    
+    def __init__(self, full_result=False):
+        """
+        Routes dispatcher
+
+        Set full_result to True if you wish the controller
+        and the action to be passed on to the page handler
+        parameters. By default they won't be.
+        """
+        import routes
+        self.full_result = full_result
+        self.controllers = {}
+        self.mapper = routes.Mapper()
+        self.mapper.controller_scan = self.controllers.keys
+        
+    def connect(self, name, route, controller, **kwargs):
+        self.controllers[name] = controller
+        self.mapper.connect(name, route, controller=name, **kwargs)
+    
+    def redirect(self, url):
+        raise cherrypy.HTTPRedirect(url)
+    
+    def __call__(self, path_info):
+        """Set handler and config for the current request."""
+        func = self.find_handler(path_info)
+        if func:
+            cherrypy.serving.request.handler = LateParamPageHandler(func)
+        else:
+            cherrypy.serving.request.handler = cherrypy.NotFound()
+    
+    def find_handler(self, path_info):
+        """Find the right page handler, and set request.config."""
+        import routes
+        
+        request = cherrypy.serving.request
+        
+        config = routes.request_config()
+        config.mapper = self.mapper
+        if hasattr(request, 'wsgi_environ'):
+            config.environ = request.wsgi_environ
+        config.host = request.headers.get('Host', None)
+        config.protocol = request.scheme
+        config.redirect = self.redirect
+        
+        result = self.mapper.match(path_info)
+        
+        config.mapper_dict = result
+        params = {}
+        if result:
+            params = result.copy()
+        if not self.full_result:
+            params.pop('controller', None)
+            params.pop('action', None)
+        request.params.update(params)
+        
+        # Get config for the root object/path.
+        request.config = base = cherrypy.config.copy()
+        curpath = ""
+        
+        def merge(nodeconf):
+            if 'tools.staticdir.dir' in nodeconf:
+                nodeconf['tools.staticdir.section'] = curpath or "/"
+            base.update(nodeconf)
+        
+        app = request.app
+        root = app.root
+        if hasattr(root, "_cp_config"):
+            merge(root._cp_config)
+        if "/" in app.config:
+            merge(app.config["/"])
+        
+        # Mix in values from app.config.
+        atoms = [x for x in path_info.split("/") if x]
+        if atoms:
+            last = atoms.pop()
+        else:
+            last = None
+        for atom in atoms:
+            curpath = "/".join((curpath, atom))
+            if curpath in app.config:
+                merge(app.config[curpath])
+        
+        handler = None
+        if result:
+            controller = result.get('controller', None)
+            controller = self.controllers.get(controller)
+            if controller:
+                # Get config from the controller.
+                if hasattr(controller, "_cp_config"):
+                    merge(controller._cp_config)
+            
+            action = result.get('action', None)
+            if action is not None:
+                handler = getattr(controller, action, None)
+                # Get config from the handler 
+                if hasattr(handler, "_cp_config"): 
+                    merge(handler._cp_config)
+                    
+        # Do the last path atom here so it can
+        # override the controller's _cp_config.
+        if last:
+            curpath = "/".join((curpath, last))
+            if curpath in app.config:
+                merge(app.config[curpath])
+        
+        return handler
+
+
+def XMLRPCDispatcher(next_dispatcher=Dispatcher()):
+    from cherrypy.lib import xmlrpc
+    def xmlrpc_dispatch(path_info):
+        path_info = xmlrpc.patched_path(path_info)
+        return next_dispatcher(path_info)
+    return xmlrpc_dispatch
+
+
+def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True, **domains):
+    """Select a different handler based on the Host header.
+    
+    This can be useful when running multiple sites within one CP server.
+    It allows several domains to point to different parts of a single
+    website structure. For example:
+    
+        http://www.domain.example  ->  root
+        http://www.domain2.example  ->  root/domain2/
+        http://www.domain2.example:443  ->  root/secure
+    
+    can be accomplished via the following config:
+    
+        [/]
+        request.dispatch = cherrypy.dispatch.VirtualHost(
+            **{'www.domain2.example': '/domain2',
+               'www.domain2.example:443': '/secure',
+              })
+    
+    next_dispatcher: the next dispatcher object in the dispatch chain.
+        The VirtualHost dispatcher adds a prefix to the URL and calls
+        another dispatcher. Defaults to cherrypy.dispatch.Dispatcher().
+    
+    use_x_forwarded_host: if True (the default), any "X-Forwarded-Host"
+        request header will be used instead of the "Host" header. This
+        is commonly added by HTTP servers (such as Apache) when proxying.
+    
+    **domains: a dict of {host header value: virtual prefix} pairs.
+        The incoming "Host" request header is looked up in this dict,
+        and, if a match is found, the corresponding "virtual prefix"
+        value will be prepended to the URL path before calling the
+        next dispatcher. Note that you often need separate entries
+        for "example.com" and "www.example.com". In addition, "Host"
+        headers may contain the port number.
+    """
+    from cherrypy.lib import httputil
+    def vhost_dispatch(path_info):
+        request = cherrypy.serving.request
+        header = request.headers.get
+        
+        domain = header('Host', '')
+        if use_x_forwarded_host:
+            domain = header("X-Forwarded-Host", domain)
+        
+        prefix = domains.get(domain, "")
+        if prefix:
+            path_info = httputil.urljoin(prefix, path_info)
+        
+        result = next_dispatcher(path_info)
+        
+        # Touch up staticdir config. See http://www.cherrypy.org/ticket/614.
+        section = request.config.get('tools.staticdir.section')
+        if section:
+            section = section[len(prefix):]
+            request.config['tools.staticdir.section'] = section
+        
+        return result
+    return vhost_dispatch
+
diff --git a/cherrypy/_cplogging.py b/cherrypy/_cplogging.py
index 4c9639111..3935d24fc 100644
--- a/cherrypy/_cplogging.py
+++ b/cherrypy/_cplogging.py
@@ -1,250 +1,250 @@
-"""CherryPy logging."""
-
-import datetime
-import logging
-# Silence the no-handlers "warning" (stderr write!) in stdlib logging
-logging.Logger.manager.emittedNoHandlerWarning = 1
-logfmt = logging.Formatter("%(message)s")
-import os
-import sys
-
-import cherrypy
-from cherrypy import _cperror
-
-
-class LogManager(object):
-    
-    appid = None
-    error_log = None
-    access_log = None
-    access_log_format = \
-        '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
-    
-    def __init__(self, appid=None, logger_root="cherrypy"):
-        self.logger_root = logger_root
-        self.appid = appid
-        if appid is None:
-            self.error_log = logging.getLogger("%s.error" % logger_root)
-            self.access_log = logging.getLogger("%s.access" % logger_root)
-        else:
-            self.error_log = logging.getLogger("%s.error.%s" % (logger_root, appid))
-            self.access_log = logging.getLogger("%s.access.%s" % (logger_root, appid))
-        self.error_log.setLevel(logging.INFO)
-        self.access_log.setLevel(logging.INFO)
-        cherrypy.engine.subscribe('graceful', self.reopen_files)
-    
-    def reopen_files(self):
-        """Close and reopen all file handlers."""
-        for log in (self.error_log, self.access_log):
-            for h in log.handlers:
-                if isinstance(h, logging.FileHandler):
-                    h.acquire()
-                    h.stream.close()
-                    h.stream = open(h.baseFilename, h.mode)
-                    h.release()
-    
-    def error(self, msg='', context='', severity=logging.INFO, traceback=False):
-        """Write to the error log.
-        
-        This is not just for errors! Applications may call this at any time
-        to log application-specific information.
-        """
-        if traceback:
-            msg += _cperror.format_exc()
-        self.error_log.log(severity, ' '.join((self.time(), context, msg)))
-    
-    def __call__(self, *args, **kwargs):
-        """Write to the error log.
-        
-        This is not just for errors! Applications may call this at any time
-        to log application-specific information.
-        """
-        return self.error(*args, **kwargs)
-    
-    def access(self):
-        """Write to the access log (in Apache/NCSA Combined Log format).
-        
-        Like Apache started doing in 2.0.46, non-printable and other special
-        characters in %r (and we expand that to all parts) are escaped using
-        \\xhh sequences, where hh stands for the hexadecimal representation
-        of the raw byte. Exceptions from this rule are " and \\, which are
-        escaped by prepending a backslash, and all whitespace characters,
-        which are written in their C-style notation (\\n, \\t, etc).
-        """
-        request = cherrypy.serving.request
-        remote = request.remote
-        response = cherrypy.serving.response
-        outheaders = response.headers
-        inheaders = request.headers
-        if response.output_status is None:
-            status = "-"
-        else:
-            status = response.output_status.split(" ", 1)[0]
-        
-        atoms = {'h': remote.name or remote.ip,
-                 'l': '-',
-                 'u': getattr(request, "login", None) or "-",
-                 't': self.time(),
-                 'r': request.request_line,
-                 's': status,
-                 'b': dict.get(outheaders, 'Content-Length', '') or "-",
-                 'f': dict.get(inheaders, 'Referer', ''),
-                 'a': dict.get(inheaders, 'User-Agent', ''),
-                 }
-        for k, v in atoms.items():
-            if isinstance(v, unicode):
-                v = v.encode('utf8')
-            elif not isinstance(v, str):
-                v = str(v)
-            # Fortunately, repr(str) escapes unprintable chars, \n, \t, etc
-            # and backslash for us. All we have to do is strip the quotes.
-            v = repr(v)[1:-1]
-            # Escape double-quote.
-            atoms[k] = v.replace('"', '\\"')
-        
-        try:
-            self.access_log.log(logging.INFO, self.access_log_format % atoms)
-        except:
-            self(traceback=True)
-    
-    def time(self):
-        """Return now() in Apache Common Log Format (no timezone)."""
-        now = datetime.datetime.now()
-        monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun',
-                      'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
-        month = monthnames[now.month - 1].capitalize()
-        return ('[%02d/%s/%04d:%02d:%02d:%02d]' % 
-                (now.day, month, now.year, now.hour, now.minute, now.second))
-    
-    def _get_builtin_handler(self, log, key):
-        for h in log.handlers:
-            if getattr(h, "_cpbuiltin", None) == key:
-                return h
-    
-    
-    # ------------------------- Screen handlers ------------------------- #
-    
-    def _set_screen_handler(self, log, enable, stream=None):
-        h = self._get_builtin_handler(log, "screen")
-        if enable:
-            if not h:
-                if stream is None:
-                    stream = sys.stderr
-                h = logging.StreamHandler(stream)
-                h.setFormatter(logfmt)
-                h._cpbuiltin = "screen"
-                log.addHandler(h)
-        elif h:
-            log.handlers.remove(h)
-    
-    def _get_screen(self):
-        h = self._get_builtin_handler
-        has_h = h(self.error_log, "screen") or h(self.access_log, "screen")
-        return bool(has_h)
-    
-    def _set_screen(self, newvalue):
-        self._set_screen_handler(self.error_log, newvalue, stream=sys.stderr)
-        self._set_screen_handler(self.access_log, newvalue, stream=sys.stdout)
-    screen = property(_get_screen, _set_screen,
-                      doc="If True, error and access will print to stderr.")
-    
-    
-    # -------------------------- File handlers -------------------------- #
-    
-    def _add_builtin_file_handler(self, log, fname):
-        h = logging.FileHandler(fname)
-        h.setFormatter(logfmt)
-        h._cpbuiltin = "file"
-        log.addHandler(h)
-    
-    def _set_file_handler(self, log, filename):
-        h = self._get_builtin_handler(log, "file")
-        if filename:
-            if h:
-                if h.baseFilename != os.path.abspath(filename):
-                    h.close()
-                    log.handlers.remove(h)
-                    self._add_builtin_file_handler(log, filename)
-            else:
-                self._add_builtin_file_handler(log, filename)
-        else:
-            if h:
-                h.close()
-                log.handlers.remove(h)
-    
-    def _get_error_file(self):
-        h = self._get_builtin_handler(self.error_log, "file")
-        if h:
-            return h.baseFilename
-        return ''
-    def _set_error_file(self, newvalue):
-        self._set_file_handler(self.error_log, newvalue)
-    error_file = property(_get_error_file, _set_error_file,
-                          doc="The filename for self.error_log.")
-    
-    def _get_access_file(self):
-        h = self._get_builtin_handler(self.access_log, "file")
-        if h:
-            return h.baseFilename
-        return ''
-    def _set_access_file(self, newvalue):
-        self._set_file_handler(self.access_log, newvalue)
-    access_file = property(_get_access_file, _set_access_file,
-                           doc="The filename for self.access_log.")
-    
-    
-    # ------------------------- WSGI handlers ------------------------- #
-    
-    def _set_wsgi_handler(self, log, enable):
-        h = self._get_builtin_handler(log, "wsgi")
-        if enable:
-            if not h:
-                h = WSGIErrorHandler()
-                h.setFormatter(logfmt)
-                h._cpbuiltin = "wsgi"
-                log.addHandler(h)
-        elif h:
-            log.handlers.remove(h)
-    
-    def _get_wsgi(self):
-        return bool(self._get_builtin_handler(self.error_log, "wsgi"))
-    
-    def _set_wsgi(self, newvalue):
-        self._set_wsgi_handler(self.error_log, newvalue)
-    wsgi = property(_get_wsgi, _set_wsgi,
-                      doc="If True, error messages will be sent to wsgi.errors.")
-
-
-class WSGIErrorHandler(logging.Handler):
-    "A handler class which writes logging records to environ['wsgi.errors']."
-    
-    def flush(self):
-        """Flushes the stream."""
-        try:
-            stream = cherrypy.serving.request.wsgi_environ.get('wsgi.errors')
-        except (AttributeError, KeyError):
-            pass
-        else:
-            stream.flush()
-    
-    def emit(self, record):
-        """Emit a record."""
-        try:
-            stream = cherrypy.serving.request.wsgi_environ.get('wsgi.errors')
-        except (AttributeError, KeyError):
-            pass
-        else:
-            try:
-                msg = self.format(record)
-                fs = "%s\n"
-                import types
-                if not hasattr(types, "UnicodeType"): #if no unicode support...
-                    stream.write(fs % msg)
-                else:
-                    try:
-                        stream.write(fs % msg)
-                    except UnicodeError:
-                        stream.write(fs % msg.encode("UTF-8"))
-                self.flush()
-            except:
-                self.handleError(record)
+"""CherryPy logging."""
+
+import datetime
+import logging
+# Silence the no-handlers "warning" (stderr write!) in stdlib logging
+logging.Logger.manager.emittedNoHandlerWarning = 1
+logfmt = logging.Formatter("%(message)s")
+import os
+import sys
+
+import cherrypy
+from cherrypy import _cperror
+
+
+class LogManager(object):
+    
+    appid = None
+    error_log = None
+    access_log = None
+    access_log_format = \
+        '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
+    
+    def __init__(self, appid=None, logger_root="cherrypy"):
+        self.logger_root = logger_root
+        self.appid = appid
+        if appid is None:
+            self.error_log = logging.getLogger("%s.error" % logger_root)
+            self.access_log = logging.getLogger("%s.access" % logger_root)
+        else:
+            self.error_log = logging.getLogger("%s.error.%s" % (logger_root, appid))
+            self.access_log = logging.getLogger("%s.access.%s" % (logger_root, appid))
+        self.error_log.setLevel(logging.INFO)
+        self.access_log.setLevel(logging.INFO)
+        cherrypy.engine.subscribe('graceful', self.reopen_files)
+    
+    def reopen_files(self):
+        """Close and reopen all file handlers."""
+        for log in (self.error_log, self.access_log):
+            for h in log.handlers:
+                if isinstance(h, logging.FileHandler):
+                    h.acquire()
+                    h.stream.close()
+                    h.stream = open(h.baseFilename, h.mode)
+                    h.release()
+    
+    def error(self, msg='', context='', severity=logging.INFO, traceback=False):
+        """Write to the error log.
+        
+        This is not just for errors! Applications may call this at any time
+        to log application-specific information.
+        """
+        if traceback:
+            msg += _cperror.format_exc()
+        self.error_log.log(severity, ' '.join((self.time(), context, msg)))
+    
+    def __call__(self, *args, **kwargs):
+        """Write to the error log.
+        
+        This is not just for errors! Applications may call this at any time
+        to log application-specific information.
+        """
+        return self.error(*args, **kwargs)
+    
+    def access(self):
+        """Write to the access log (in Apache/NCSA Combined Log format).
+        
+        Like Apache started doing in 2.0.46, non-printable and other special
+        characters in %r (and we expand that to all parts) are escaped using
+        \\xhh sequences, where hh stands for the hexadecimal representation
+        of the raw byte. Exceptions from this rule are " and \\, which are
+        escaped by prepending a backslash, and all whitespace characters,
+        which are written in their C-style notation (\\n, \\t, etc).
+        """
+        request = cherrypy.serving.request
+        remote = request.remote
+        response = cherrypy.serving.response
+        outheaders = response.headers
+        inheaders = request.headers
+        if response.output_status is None:
+            status = "-"
+        else:
+            status = response.output_status.split(" ", 1)[0]
+        
+        atoms = {'h': remote.name or remote.ip,
+                 'l': '-',
+                 'u': getattr(request, "login", None) or "-",
+                 't': self.time(),
+                 'r': request.request_line,
+                 's': status,
+                 'b': dict.get(outheaders, 'Content-Length', '') or "-",
+                 'f': dict.get(inheaders, 'Referer', ''),
+                 'a': dict.get(inheaders, 'User-Agent', ''),
+                 }
+        for k, v in atoms.items():
+            if isinstance(v, unicode):
+                v = v.encode('utf8')
+            elif not isinstance(v, str):
+                v = str(v)
+            # Fortunately, repr(str) escapes unprintable chars, \n, \t, etc
+            # and backslash for us. All we have to do is strip the quotes.
+            v = repr(v)[1:-1]
+            # Escape double-quote.
+            atoms[k] = v.replace('"', '\\"')
+        
+        try:
+            self.access_log.log(logging.INFO, self.access_log_format % atoms)
+        except:
+            self(traceback=True)
+    
+    def time(self):
+        """Return now() in Apache Common Log Format (no timezone)."""
+        now = datetime.datetime.now()
+        monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun',
+                      'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
+        month = monthnames[now.month - 1].capitalize()
+        return ('[%02d/%s/%04d:%02d:%02d:%02d]' % 
+                (now.day, month, now.year, now.hour, now.minute, now.second))
+    
+    def _get_builtin_handler(self, log, key):
+        for h in log.handlers:
+            if getattr(h, "_cpbuiltin", None) == key:
+                return h
+    
+    
+    # ------------------------- Screen handlers ------------------------- #
+    
+    def _set_screen_handler(self, log, enable, stream=None):
+        h = self._get_builtin_handler(log, "screen")
+        if enable:
+            if not h:
+                if stream is None:
+                    stream = sys.stderr
+                h = logging.StreamHandler(stream)
+                h.setFormatter(logfmt)
+                h._cpbuiltin = "screen"
+                log.addHandler(h)
+        elif h:
+            log.handlers.remove(h)
+    
+    def _get_screen(self):
+        h = self._get_builtin_handler
+        has_h = h(self.error_log, "screen") or h(self.access_log, "screen")
+        return bool(has_h)
+    
+    def _set_screen(self, newvalue):
+        self._set_screen_handler(self.error_log, newvalue, stream=sys.stderr)
+        self._set_screen_handler(self.access_log, newvalue, stream=sys.stdout)
+    screen = property(_get_screen, _set_screen,
+                      doc="If True, error and access will print to stderr.")
+    
+    
+    # -------------------------- File handlers -------------------------- #
+    
+    def _add_builtin_file_handler(self, log, fname):
+        h = logging.FileHandler(fname)
+        h.setFormatter(logfmt)
+        h._cpbuiltin = "file"
+        log.addHandler(h)
+    
+    def _set_file_handler(self, log, filename):
+        h = self._get_builtin_handler(log, "file")
+        if filename:
+            if h:
+                if h.baseFilename != os.path.abspath(filename):
+                    h.close()
+                    log.handlers.remove(h)
+                    self._add_builtin_file_handler(log, filename)
+            else:
+                self._add_builtin_file_handler(log, filename)
+        else:
+            if h:
+                h.close()
+                log.handlers.remove(h)
+    
+    def _get_error_file(self):
+        h = self._get_builtin_handler(self.error_log, "file")
+        if h:
+            return h.baseFilename
+        return ''
+    def _set_error_file(self, newvalue):
+        self._set_file_handler(self.error_log, newvalue)
+    error_file = property(_get_error_file, _set_error_file,
+                          doc="The filename for self.error_log.")
+    
+    def _get_access_file(self):
+        h = self._get_builtin_handler(self.access_log, "file")
+        if h:
+            return h.baseFilename
+        return ''
+    def _set_access_file(self, newvalue):
+        self._set_file_handler(self.access_log, newvalue)
+    access_file = property(_get_access_file, _set_access_file,
+                           doc="The filename for self.access_log.")
+    
+    
+    # ------------------------- WSGI handlers ------------------------- #
+    
+    def _set_wsgi_handler(self, log, enable):
+        h = self._get_builtin_handler(log, "wsgi")
+        if enable:
+            if not h:
+                h = WSGIErrorHandler()
+                h.setFormatter(logfmt)
+                h._cpbuiltin = "wsgi"
+                log.addHandler(h)
+        elif h:
+            log.handlers.remove(h)
+    
+    def _get_wsgi(self):
+        return bool(self._get_builtin_handler(self.error_log, "wsgi"))
+    
+    def _set_wsgi(self, newvalue):
+        self._set_wsgi_handler(self.error_log, newvalue)
+    wsgi = property(_get_wsgi, _set_wsgi,
+                      doc="If True, error messages will be sent to wsgi.errors.")
+
+
+class WSGIErrorHandler(logging.Handler):
+    "A handler class which writes logging records to environ['wsgi.errors']."
+    
+    def flush(self):
+        """Flushes the stream."""
+        try:
+            stream = cherrypy.serving.request.wsgi_environ.get('wsgi.errors')
+        except (AttributeError, KeyError):
+            pass
+        else:
+            stream.flush()
+    
+    def emit(self, record):
+        """Emit a record."""
+        try:
+            stream = cherrypy.serving.request.wsgi_environ.get('wsgi.errors')
+        except (AttributeError, KeyError):
+            pass
+        else:
+            try:
+                msg = self.format(record)
+                fs = "%s\n"
+                import types
+                if not hasattr(types, "UnicodeType"): #if no unicode support...
+                    stream.write(fs % msg)
+                else:
+                    try:
+                        stream.write(fs % msg)
+                    except UnicodeError:
+                        stream.write(fs % msg.encode("UTF-8"))
+                self.flush()
+            except:
+                self.handleError(record)
diff --git a/cherrypy/cherryd b/cherrypy/cherryd
index d4633b3b0..6dcdcca3a 100644
--- a/cherrypy/cherryd
+++ b/cherrypy/cherryd
@@ -1,102 +1,102 @@
-#! /usr/bin/env python
-"""The CherryPy daemon."""
-
-import sys
-
-import cherrypy
-from cherrypy.process import plugins, servers
-
-
-def start(configfiles=None, daemonize=False, environment=None,
-          fastcgi=False, scgi=False, pidfile=None, imports=None):
-    """Subscribe all engine plugins and start the engine."""
-    sys.path = [''] + sys.path
-    for i in imports or []:
-        exec("import %s" % i)
-    
-    for c in configfiles or []:
-        cherrypy.config.update(c)
-        # If there's only one app mounted, merge config into it.
-        if len(cherrypy.tree.apps) == 1:
-            for app in cherrypy.tree.apps.values():
-                app.merge(c)
-    
-    engine = cherrypy.engine
-    
-    if environment is not None:
-        cherrypy.config.update({'environment': environment})
-    
-    # Only daemonize if asked to.
-    if daemonize:
-        # Don't print anything to stdout/sterr.
-        cherrypy.config.update({'log.screen': False})
-        plugins.Daemonizer(engine).subscribe()
-    
-    if pidfile:
-        plugins.PIDFile(engine, pidfile).subscribe()
-    
-    if hasattr(engine, "signal_handler"):
-        engine.signal_handler.subscribe()
-    if hasattr(engine, "console_control_handler"):
-        engine.console_control_handler.subscribe()
-    
-    if fastcgi and scgi:
-        # fastcgi and scgi aren't allowed together.
-        cherrypy.log.error("fastcgi and scgi aren't allowed together.", 'ENGINE')
-        sys.exit(1)
-    elif fastcgi or scgi:
-        # Turn off autoreload when using fastcgi or scgi.
-        cherrypy.config.update({'engine.autoreload_on': False})
-        # Turn off the default HTTP server (which is subscribed by default).
-        cherrypy.server.unsubscribe()
-        
-        addr = cherrypy.server.bind_addr
-        if fastcgi:
-            f = servers.FlupFCGIServer(application=cherrypy.tree,
-                                       bindAddress=addr)
-        else:
-            f = servers.FlupSCGIServer(application=cherrypy.tree,
-                                       bindAddress=addr)
-        s = servers.ServerAdapter(engine, httpserver=f, bind_addr=addr)
-        s.subscribe()
-    
-    # Always start the engine; this will start all other services
-    try:
-        engine.start()
-    except:
-        # Assume the error has been logged already via bus.log.
-        sys.exit(1)
-    else:
-        engine.block()
-
-
-if __name__ == '__main__':
-    from optparse import OptionParser
-    
-    p = OptionParser()
-    p.add_option('-c', '--config', action="append", dest='config',
-                 help="specify config file(s)")
-    p.add_option('-d', action="store_true", dest='daemonize',
-                 help="run the server as a daemon")
-    p.add_option('-e', '--environment', dest='environment', default=None,
-                 help="apply the given config environment")
-    p.add_option('-f', action="store_true", dest='fastcgi',
-                 help="start a fastcgi server instead of the default HTTP server")
-    p.add_option('-s', action="store_true", dest='scgi',
-                 help="start a scgi server instead of the default HTTP server")
-    p.add_option('-i', '--import', action="append", dest='imports',
-                 help="specify modules to import")
-    p.add_option('-p', '--pidfile', dest='pidfile', default=None,
-                 help="store the process id in the given file")
-    p.add_option('-P', '--Path', action="append", dest='Path',
-                 help="add the given paths to sys.path")
-    options, args = p.parse_args()
-    
-    if options.Path:
-        for p in options.Path:
-            sys.path.insert(0, p)
-    
-    start(options.config, options.daemonize,
-          options.environment, options.fastcgi, options.scgi, options.pidfile,
-          options.imports)
-
+#! /usr/bin/env python
+"""The CherryPy daemon."""
+
+import sys
+
+import cherrypy
+from cherrypy.process import plugins, servers
+
+
+def start(configfiles=None, daemonize=False, environment=None,
+          fastcgi=False, scgi=False, pidfile=None, imports=None):
+    """Subscribe all engine plugins and start the engine."""
+    sys.path = [''] + sys.path
+    for i in imports or []:
+        exec("import %s" % i)
+    
+    for c in configfiles or []:
+        cherrypy.config.update(c)
+        # If there's only one app mounted, merge config into it.
+        if len(cherrypy.tree.apps) == 1:
+            for app in cherrypy.tree.apps.values():
+                app.merge(c)
+    
+    engine = cherrypy.engine
+    
+    if environment is not None:
+        cherrypy.config.update({'environment': environment})
+    
+    # Only daemonize if asked to.
+    if daemonize:
+        # Don't print anything to stdout/sterr.
+        cherrypy.config.update({'log.screen': False})
+        plugins.Daemonizer(engine).subscribe()
+    
+    if pidfile:
+        plugins.PIDFile(engine, pidfile).subscribe()
+    
+    if hasattr(engine, "signal_handler"):
+        engine.signal_handler.subscribe()
+    if hasattr(engine, "console_control_handler"):
+        engine.console_control_handler.subscribe()
+    
+    if fastcgi and scgi:
+        # fastcgi and scgi aren't allowed together.
+        cherrypy.log.error("fastcgi and scgi aren't allowed together.", 'ENGINE')
+        sys.exit(1)
+    elif fastcgi or scgi:
+        # Turn off autoreload when using fastcgi or scgi.
+        cherrypy.config.update({'engine.autoreload_on': False})
+        # Turn off the default HTTP server (which is subscribed by default).
+        cherrypy.server.unsubscribe()
+        
+        addr = cherrypy.server.bind_addr
+        if fastcgi:
+            f = servers.FlupFCGIServer(application=cherrypy.tree,
+                                       bindAddress=addr)
+        else:
+            f = servers.FlupSCGIServer(application=cherrypy.tree,
+                                       bindAddress=addr)
+        s = servers.ServerAdapter(engine, httpserver=f, bind_addr=addr)
+        s.subscribe()
+    
+    # Always start the engine; this will start all other services
+    try:
+        engine.start()
+    except:
+        # Assume the error has been logged already via bus.log.
+        sys.exit(1)
+    else:
+        engine.block()
+
+
+if __name__ == '__main__':
+    from optparse import OptionParser
+    
+    p = OptionParser()
+    p.add_option('-c', '--config', action="append", dest='config',
+                 help="specify config file(s)")
+    p.add_option('-d', action="store_true", dest='daemonize',
+                 help="run the server as a daemon")
+    p.add_option('-e', '--environment', dest='environment', default=None,
+                 help="apply the given config environment")
+    p.add_option('-f', action="store_true", dest='fastcgi',
+                 help="start a fastcgi server instead of the default HTTP server")
+    p.add_option('-s', action="store_true", dest='scgi',
+                 help="start a scgi server instead of the default HTTP server")
+    p.add_option('-i', '--import', action="append", dest='imports',
+                 help="specify modules to import")
+    p.add_option('-p', '--pidfile', dest='pidfile', default=None,
+                 help="store the process id in the given file")
+    p.add_option('-P', '--Path', action="append", dest='Path',
+                 help="add the given paths to sys.path")
+    options, args = p.parse_args()
+    
+    if options.Path:
+        for p in options.Path:
+            sys.path.insert(0, p)
+    
+    start(options.config, options.daemonize,
+          options.environment, options.fastcgi, options.scgi, options.pidfile,
+          options.imports)
+
diff --git a/cherrypy/lib/covercp.py b/cherrypy/lib/covercp.py
index 4d9be29fd..1d7bf1f46 100644
--- a/cherrypy/lib/covercp.py
+++ b/cherrypy/lib/covercp.py
@@ -1,364 +1,364 @@
-"""Code-coverage tools for CherryPy.
-
-To use this module, or the coverage tools in the test suite,
-you need to download 'coverage.py', either Gareth Rees' original
-implementation:
-http://www.garethrees.org/2001/12/04/python-coverage/
-
-or Ned Batchelder's enhanced version:
-http://www.nedbatchelder.com/code/modules/coverage.html
-
-To turn on coverage tracing, use the following code:
-
-    cherrypy.engine.subscribe('start', covercp.start)
-
-DO NOT subscribe anything on the 'start_thread' channel, as previously
-recommended. Calling start once in the main thread should be sufficient
-to start coverage on all threads. Calling start again in each thread
-effectively clears any coverage data gathered up to that point.
-
-Run your code, then use the covercp.serve() function to browse the
-results in a web browser. If you run this module from the command line,
-it will call serve() for you.
-"""
-
-import re
-import sys
-import cgi
-from urllib import quote_plus
-import os, os.path
-localFile = os.path.join(os.path.dirname(__file__), "coverage.cache")
-
-try:
-    from coverage import the_coverage as coverage
-    def start():
-        coverage.start()
-except ImportError:
-    # Setting coverage to None will raise errors
-    # that need to be trapped downstream.
-    coverage = None
-    
-    import warnings
-    warnings.warn("No code coverage will be performed; coverage.py could not be imported.")
-    
-    def start():
-        pass
-start.priority = 20
-
-TEMPLATE_MENU = """<html>
-<head>
-    <title>CherryPy Coverage Menu</title>
-    <style>
-        body {font: 9pt Arial, serif;}
-        #tree {
-            font-size: 8pt;
-            font-family: Andale Mono, monospace;
-            white-space: pre;
-            }
-        #tree a:active, a:focus {
-            background-color: black;
-            padding: 1px;
-            color: white;
-            border: 0px solid #9999FF;
-            -moz-outline-style: none;
-            }
-        .fail { color: red;}
-        .pass { color: #888;}
-        #pct { text-align: right;}
-        h3 {
-            font-size: small;
-            font-weight: bold;
-            font-style: italic;
-            margin-top: 5px; 
-            }
-        input { border: 1px solid #ccc; padding: 2px; }
-        .directory {
-            color: #933;
-            font-style: italic;
-            font-weight: bold;
-            font-size: 10pt;
-            }
-        .file {
-            color: #400;
-            }
-        a { text-decoration: none; }
-        #crumbs {
-            color: white;
-            font-size: 8pt;
-            font-family: Andale Mono, monospace;
-            width: 100%;
-            background-color: black;
-            }
-        #crumbs a {
-            color: #f88;
-            }
-        #options {
-            line-height: 2.3em;
-            border: 1px solid black;
-            background-color: #eee;
-            padding: 4px;
-            }
-        #exclude {
-            width: 100%;
-            margin-bottom: 3px;
-            border: 1px solid #999;
-            }
-        #submit {
-            background-color: black;
-            color: white;
-            border: 0;
-            margin-bottom: -9px;
-            }
-    </style>
-</head>
-<body>
-<h2>CherryPy Coverage</h2>"""
-
-TEMPLATE_FORM = """
-<div id="options">
-<form action='menu' method=GET>
-    <input type='hidden' name='base' value='%(base)s' />
-    Show percentages <input type='checkbox' %(showpct)s name='showpct' value='checked' /><br />
-    Hide files over <input type='text' id='pct' name='pct' value='%(pct)s' size='3' />%%<br />
-    Exclude files matching<br />
-    <input type='text' id='exclude' name='exclude' value='%(exclude)s' size='20' />
-    <br />
-
-    <input type='submit' value='Change view' id="submit"/>
-</form>
-</div>""" 
-
-TEMPLATE_FRAMESET = """<html>
-<head><title>CherryPy coverage data</title></head>
-<frameset cols='250, 1*'>
-    <frame src='menu?base=%s' />
-    <frame name='main' src='' />
-</frameset>
-</html>
-"""
-
-TEMPLATE_COVERAGE = """<html>
-<head>
-    <title>Coverage for %(name)s</title>
-    <style>
-        h2 { margin-bottom: .25em; }
-        p { margin: .25em; }
-        .covered { color: #000; background-color: #fff; }
-        .notcovered { color: #fee; background-color: #500; }
-        .excluded { color: #00f; background-color: #fff; }
-         table .covered, table .notcovered, table .excluded
-             { font-family: Andale Mono, monospace;
-               font-size: 10pt; white-space: pre; }
-
-         .lineno { background-color: #eee;}
-         .notcovered .lineno { background-color: #000;}
-         table { border-collapse: collapse;
-    </style>
-</head>
-<body>
-<h2>%(name)s</h2>
-<p>%(fullpath)s</p>
-<p>Coverage: %(pc)s%%</p>"""
-
-TEMPLATE_LOC_COVERED = """<tr class="covered">
-    <td class="lineno">%s&nbsp;</td>
-    <td>%s</td>
-</tr>\n"""
-TEMPLATE_LOC_NOT_COVERED = """<tr class="notcovered">
-    <td class="lineno">%s&nbsp;</td>
-    <td>%s</td>
-</tr>\n"""
-TEMPLATE_LOC_EXCLUDED = """<tr class="excluded">
-    <td class="lineno">%s&nbsp;</td>
-    <td>%s</td>
-</tr>\n"""
-
-TEMPLATE_ITEM = "%s%s<a class='file' href='report?name=%s' target='main'>%s</a>\n"
-
-def _percent(statements, missing):
-    s = len(statements)
-    e = s - len(missing)
-    if s > 0:
-        return int(round(100.0 * e / s))
-    return 0
-
-def _show_branch(root, base, path, pct=0, showpct=False, exclude=""):
-    
-    # Show the directory name and any of our children
-    dirs = [k for k, v in root.items() if v]
-    dirs.sort()
-    for name in dirs:
-        newpath = os.path.join(path, name)
-        
-        if newpath.lower().startswith(base):
-            relpath = newpath[len(base):]
-            yield "| " * relpath.count(os.sep)
-            yield "<a class='directory' href='menu?base=%s&exclude=%s'>%s</a>\n" % \
-                   (newpath, quote_plus(exclude), name)
-        
-        for chunk in _show_branch(root[name], base, newpath, pct, showpct, exclude):
-            yield chunk
-    
-    # Now list the files
-    if path.lower().startswith(base):
-        relpath = path[len(base):]
-        files = [k for k, v in root.items() if not v]
-        files.sort()
-        for name in files:
-            newpath = os.path.join(path, name)
-            
-            pc_str = ""
-            if showpct:
-                try:
-                    _, statements, _, missing, _ = coverage.analysis2(newpath)
-                except:
-                    # Yes, we really want to pass on all errors.
-                    pass
-                else:
-                    pc = _percent(statements, missing)
-                    pc_str = ("%3d%% " % pc).replace(' ', '&nbsp;')
-                    if pc < float(pct) or pc == -1:
-                        pc_str = "<span class='fail'>%s</span>" % pc_str
-                    else:
-                        pc_str = "<span class='pass'>%s</span>" % pc_str
-            
-            yield TEMPLATE_ITEM % ("| " * (relpath.count(os.sep) + 1),
-                                   pc_str, newpath, name)
-
-def _skip_file(path, exclude):
-    if exclude:
-        return bool(re.search(exclude, path))
-
-def _graft(path, tree):
-    d = tree
-    
-    p = path
-    atoms = []
-    while True:
-        p, tail = os.path.split(p)
-        if not tail:
-            break
-        atoms.append(tail)
-    atoms.append(p)
-    if p != "/":
-        atoms.append("/")
-    
-    atoms.reverse()
-    for node in atoms:
-        if node:
-            d = d.setdefault(node, {})
-
-def get_tree(base, exclude):
-    """Return covered module names as a nested dict."""
-    tree = {}
-    coverage.get_ready()
-    runs = list(coverage.cexecuted.keys())
-    if runs:
-        for path in runs:
-            if not _skip_file(path, exclude) and not os.path.isdir(path):
-                _graft(path, tree)
-    return tree
-
-class CoverStats(object):
-    
-    def __init__(self, root=None):
-        if root is None:
-            # Guess initial depth. Files outside this path will not be
-            # reachable from the web interface.
-            import cherrypy
-            root = os.path.dirname(cherrypy.__file__)
-        self.root = root
-    
-    def index(self):
-        return TEMPLATE_FRAMESET % self.root.lower()
-    index.exposed = True
-    
-    def menu(self, base="/", pct="50", showpct="",
-             exclude=r'python\d\.\d|test|tut\d|tutorial'):
-        
-        # The coverage module uses all-lower-case names.
-        base = base.lower().rstrip(os.sep)
-        
-        yield TEMPLATE_MENU
-        yield TEMPLATE_FORM % locals()
-        
-        # Start by showing links for parent paths
-        yield "<div id='crumbs'>"
-        path = ""
-        atoms = base.split(os.sep)
-        atoms.pop()
-        for atom in atoms:
-            path += atom + os.sep
-            yield ("<a href='menu?base=%s&exclude=%s'>%s</a> %s"
-                   % (path, quote_plus(exclude), atom, os.sep))
-        yield "</div>"
-        
-        yield "<div id='tree'>"
-        
-        # Then display the tree
-        tree = get_tree(base, exclude)
-        if not tree:
-            yield "<p>No modules covered.</p>"
-        else:
-            for chunk in _show_branch(tree, base, "/", pct,
-                                      showpct == 'checked', exclude):
-                yield chunk
-        
-        yield "</div>"
-        yield "</body></html>"
-    menu.exposed = True
-    
-    def annotated_file(self, filename, statements, excluded, missing):
-        source = open(filename, 'r')
-        buffer = []
-        for lineno, line in enumerate(source.readlines()):
-            lineno += 1
-            line = line.strip("\n\r")
-            empty_the_buffer = True
-            if lineno in excluded:
-                template = TEMPLATE_LOC_EXCLUDED
-            elif lineno in missing:
-                template = TEMPLATE_LOC_NOT_COVERED
-            elif lineno in statements:
-                template = TEMPLATE_LOC_COVERED
-            else:
-                empty_the_buffer = False
-                buffer.append((lineno, line))
-            if empty_the_buffer:
-                for lno, pastline in buffer:
-                    yield template % (lno, cgi.escape(pastline))
-                buffer = []
-                yield template % (lineno, cgi.escape(line))
-    
-    def report(self, name):
-        coverage.get_ready()
-        filename, statements, excluded, missing, _ = coverage.analysis2(name)
-        pc = _percent(statements, missing)
-        yield TEMPLATE_COVERAGE % dict(name=os.path.basename(name),
-                                       fullpath=name,
-                                       pc=pc)
-        yield '<table>\n'
-        for line in self.annotated_file(filename, statements, excluded,
-                                        missing):
-            yield line
-        yield '</table>'
-        yield '</body>'
-        yield '</html>'
-    report.exposed = True
-
-
-def serve(path=localFile, port=8080, root=None):
-    if coverage is None:
-        raise ImportError("The coverage module could not be imported.")
-    coverage.cache_default = path
-    
-    import cherrypy
-    cherrypy.config.update({'server.socket_port': int(port),
-                            'server.thread_pool': 10,
-                            'environment': "production",
-                            })
-    cherrypy.quickstart(CoverStats(root))
-
-if __name__ == "__main__":
-    serve(*tuple(sys.argv[1:]))
-
+"""Code-coverage tools for CherryPy.
+
+To use this module, or the coverage tools in the test suite,
+you need to download 'coverage.py', either Gareth Rees' original
+implementation:
+http://www.garethrees.org/2001/12/04/python-coverage/
+
+or Ned Batchelder's enhanced version:
+http://www.nedbatchelder.com/code/modules/coverage.html
+
+To turn on coverage tracing, use the following code:
+
+    cherrypy.engine.subscribe('start', covercp.start)
+
+DO NOT subscribe anything on the 'start_thread' channel, as previously
+recommended. Calling start once in the main thread should be sufficient
+to start coverage on all threads. Calling start again in each thread
+effectively clears any coverage data gathered up to that point.
+
+Run your code, then use the covercp.serve() function to browse the
+results in a web browser. If you run this module from the command line,
+it will call serve() for you.
+"""
+
+import re
+import sys
+import cgi
+from urllib import quote_plus
+import os, os.path
+localFile = os.path.join(os.path.dirname(__file__), "coverage.cache")
+
+try:
+    from coverage import the_coverage as coverage
+    def start():
+        coverage.start()
+except ImportError:
+    # Setting coverage to None will raise errors
+    # that need to be trapped downstream.
+    coverage = None
+    
+    import warnings
+    warnings.warn("No code coverage will be performed; coverage.py could not be imported.")
+    
+    def start():
+        pass
+start.priority = 20
+
+TEMPLATE_MENU = """<html>
+<head>
+    <title>CherryPy Coverage Menu</title>
+    <style>
+        body {font: 9pt Arial, serif;}
+        #tree {
+            font-size: 8pt;
+            font-family: Andale Mono, monospace;
+            white-space: pre;
+            }
+        #tree a:active, a:focus {
+            background-color: black;
+            padding: 1px;
+            color: white;
+            border: 0px solid #9999FF;
+            -moz-outline-style: none;
+            }
+        .fail { color: red;}
+        .pass { color: #888;}
+        #pct { text-align: right;}
+        h3 {
+            font-size: small;
+            font-weight: bold;
+            font-style: italic;
+            margin-top: 5px; 
+            }
+        input { border: 1px solid #ccc; padding: 2px; }
+        .directory {
+            color: #933;
+            font-style: italic;
+            font-weight: bold;
+            font-size: 10pt;
+            }
+        .file {
+            color: #400;
+            }
+        a { text-decoration: none; }
+        #crumbs {
+            color: white;
+            font-size: 8pt;
+            font-family: Andale Mono, monospace;
+            width: 100%;
+            background-color: black;
+            }
+        #crumbs a {
+            color: #f88;
+            }
+        #options {
+            line-height: 2.3em;
+            border: 1px solid black;
+            background-color: #eee;
+            padding: 4px;
+            }
+        #exclude {
+            width: 100%;
+            margin-bottom: 3px;
+            border: 1px solid #999;
+            }
+        #submit {
+            background-color: black;
+            color: white;
+            border: 0;
+            margin-bottom: -9px;
+            }
+    </style>
+</head>
+<body>
+<h2>CherryPy Coverage</h2>"""
+
+TEMPLATE_FORM = """
+<div id="options">
+<form action='menu' method=GET>
+    <input type='hidden' name='base' value='%(base)s' />
+    Show percentages <input type='checkbox' %(showpct)s name='showpct' value='checked' /><br />
+    Hide files over <input type='text' id='pct' name='pct' value='%(pct)s' size='3' />%%<br />
+    Exclude files matching<br />
+    <input type='text' id='exclude' name='exclude' value='%(exclude)s' size='20' />
+    <br />
+
+    <input type='submit' value='Change view' id="submit"/>
+</form>
+</div>""" 
+
+TEMPLATE_FRAMESET = """<html>
+<head><title>CherryPy coverage data</title></head>
+<frameset cols='250, 1*'>
+    <frame src='menu?base=%s' />
+    <frame name='main' src='' />
+</frameset>
+</html>
+"""
+
+TEMPLATE_COVERAGE = """<html>
+<head>
+    <title>Coverage for %(name)s</title>
+    <style>
+        h2 { margin-bottom: .25em; }
+        p { margin: .25em; }
+        .covered { color: #000; background-color: #fff; }
+        .notcovered { color: #fee; background-color: #500; }
+        .excluded { color: #00f; background-color: #fff; }
+         table .covered, table .notcovered, table .excluded
+             { font-family: Andale Mono, monospace;
+               font-size: 10pt; white-space: pre; }
+
+         .lineno { background-color: #eee;}
+         .notcovered .lineno { background-color: #000;}
+         table { border-collapse: collapse;
+    </style>
+</head>
+<body>
+<h2>%(name)s</h2>
+<p>%(fullpath)s</p>
+<p>Coverage: %(pc)s%%</p>"""
+
+TEMPLATE_LOC_COVERED = """<tr class="covered">
+    <td class="lineno">%s&nbsp;</td>
+    <td>%s</td>
+</tr>\n"""
+TEMPLATE_LOC_NOT_COVERED = """<tr class="notcovered">
+    <td class="lineno">%s&nbsp;</td>
+    <td>%s</td>
+</tr>\n"""
+TEMPLATE_LOC_EXCLUDED = """<tr class="excluded">
+    <td class="lineno">%s&nbsp;</td>
+    <td>%s</td>
+</tr>\n"""
+
+TEMPLATE_ITEM = "%s%s<a class='file' href='report?name=%s' target='main'>%s</a>\n"
+
+def _percent(statements, missing):
+    s = len(statements)
+    e = s - len(missing)
+    if s > 0:
+        return int(round(100.0 * e / s))
+    return 0
+
+def _show_branch(root, base, path, pct=0, showpct=False, exclude=""):
+    
+    # Show the directory name and any of our children
+    dirs = [k for k, v in root.items() if v]
+    dirs.sort()
+    for name in dirs:
+        newpath = os.path.join(path, name)
+        
+        if newpath.lower().startswith(base):
+            relpath = newpath[len(base):]
+            yield "| " * relpath.count(os.sep)
+            yield "<a class='directory' href='menu?base=%s&exclude=%s'>%s</a>\n" % \
+                   (newpath, quote_plus(exclude), name)
+        
+        for chunk in _show_branch(root[name], base, newpath, pct, showpct, exclude):
+            yield chunk
+    
+    # Now list the files
+    if path.lower().startswith(base):
+        relpath = path[len(base):]
+        files = [k for k, v in root.items() if not v]
+        files.sort()
+        for name in files:
+            newpath = os.path.join(path, name)
+            
+            pc_str = ""
+            if showpct:
+                try:
+                    _, statements, _, missing, _ = coverage.analysis2(newpath)
+                except:
+                    # Yes, we really want to pass on all errors.
+                    pass
+                else:
+                    pc = _percent(statements, missing)
+                    pc_str = ("%3d%% " % pc).replace(' ', '&nbsp;')
+                    if pc < float(pct) or pc == -1:
+                        pc_str = "<span class='fail'>%s</span>" % pc_str
+                    else:
+                        pc_str = "<span class='pass'>%s</span>" % pc_str
+            
+            yield TEMPLATE_ITEM % ("| " * (relpath.count(os.sep) + 1),
+                                   pc_str, newpath, name)
+
+def _skip_file(path, exclude):
+    if exclude:
+        return bool(re.search(exclude, path))
+
+def _graft(path, tree):
+    d = tree
+    
+    p = path
+    atoms = []
+    while True:
+        p, tail = os.path.split(p)
+        if not tail:
+            break
+        atoms.append(tail)
+    atoms.append(p)
+    if p != "/":
+        atoms.append("/")
+    
+    atoms.reverse()
+    for node in atoms:
+        if node:
+            d = d.setdefault(node, {})
+
+def get_tree(base, exclude):
+    """Return covered module names as a nested dict."""
+    tree = {}
+    coverage.get_ready()
+    runs = list(coverage.cexecuted.keys())
+    if runs:
+        for path in runs:
+            if not _skip_file(path, exclude) and not os.path.isdir(path):
+                _graft(path, tree)
+    return tree
+
+class CoverStats(object):
+    
+    def __init__(self, root=None):
+        if root is None:
+            # Guess initial depth. Files outside this path will not be
+            # reachable from the web interface.
+            import cherrypy
+            root = os.path.dirname(cherrypy.__file__)
+        self.root = root
+    
+    def index(self):
+        return TEMPLATE_FRAMESET % self.root.lower()
+    index.exposed = True
+    
+    def menu(self, base="/", pct="50", showpct="",
+             exclude=r'python\d\.\d|test|tut\d|tutorial'):
+        
+        # The coverage module uses all-lower-case names.
+        base = base.lower().rstrip(os.sep)
+        
+        yield TEMPLATE_MENU
+        yield TEMPLATE_FORM % locals()
+        
+        # Start by showing links for parent paths
+        yield "<div id='crumbs'>"
+        path = ""
+        atoms = base.split(os.sep)
+        atoms.pop()
+        for atom in atoms:
+            path += atom + os.sep
+            yield ("<a href='menu?base=%s&exclude=%s'>%s</a> %s"
+                   % (path, quote_plus(exclude), atom, os.sep))
+        yield "</div>"
+        
+        yield "<div id='tree'>"
+        
+        # Then display the tree
+        tree = get_tree(base, exclude)
+        if not tree:
+            yield "<p>No modules covered.</p>"
+        else:
+            for chunk in _show_branch(tree, base, "/", pct,
+                                      showpct == 'checked', exclude):
+                yield chunk
+        
+        yield "</div>"
+        yield "</body></html>"
+    menu.exposed = True
+    
+    def annotated_file(self, filename, statements, excluded, missing):
+        source = open(filename, 'r')
+        buffer = []
+        for lineno, line in enumerate(source.readlines()):
+            lineno += 1
+            line = line.strip("\n\r")
+            empty_the_buffer = True
+            if lineno in excluded:
+                template = TEMPLATE_LOC_EXCLUDED
+            elif lineno in missing:
+                template = TEMPLATE_LOC_NOT_COVERED
+            elif lineno in statements:
+                template = TEMPLATE_LOC_COVERED
+            else:
+                empty_the_buffer = False
+                buffer.append((lineno, line))
+            if empty_the_buffer:
+                for lno, pastline in buffer:
+                    yield template % (lno, cgi.escape(pastline))
+                buffer = []
+                yield template % (lineno, cgi.escape(line))
+    
+    def report(self, name):
+        coverage.get_ready()
+        filename, statements, excluded, missing, _ = coverage.analysis2(name)
+        pc = _percent(statements, missing)
+        yield TEMPLATE_COVERAGE % dict(name=os.path.basename(name),
+                                       fullpath=name,
+                                       pc=pc)
+        yield '<table>\n'
+        for line in self.annotated_file(filename, statements, excluded,
+                                        missing):
+            yield line
+        yield '</table>'
+        yield '</body>'
+        yield '</html>'
+    report.exposed = True
+
+
+def serve(path=localFile, port=8080, root=None):
+    if coverage is None:
+        raise ImportError("The coverage module could not be imported.")
+    coverage.cache_default = path
+    
+    import cherrypy
+    cherrypy.config.update({'server.socket_port': int(port),
+                            'server.thread_pool': 10,
+                            'environment': "production",
+                            })
+    cherrypy.quickstart(CoverStats(root))
+
+if __name__ == "__main__":
+    serve(*tuple(sys.argv[1:]))
+
diff --git a/cherrypy/lib/httpauth.py b/cherrypy/lib/httpauth.py
index c8616a4d4..39e632c2b 100644
--- a/cherrypy/lib/httpauth.py
+++ b/cherrypy/lib/httpauth.py
@@ -1,361 +1,361 @@
-"""
-httpauth modules defines functions to implement HTTP Digest Authentication (RFC 2617).
-This has full compliance with 'Digest' and 'Basic' authentication methods. In
-'Digest' it supports both MD5 and MD5-sess algorithms.
-
-Usage:
-
-    First use 'doAuth' to request the client authentication for a
-    certain resource. You should send an httplib.UNAUTHORIZED response to the
-    client so he knows he has to authenticate itself.
-    
-    Then use 'parseAuthorization' to retrieve the 'auth_map' used in
-    'checkResponse'.
-
-    To use 'checkResponse' you must have already verified the password associated
-    with the 'username' key in 'auth_map' dict. Then you use the 'checkResponse'
-    function to verify if the password matches the one sent by the client.
-
-SUPPORTED_ALGORITHM - list of supported 'Digest' algorithms
-SUPPORTED_QOP - list of supported 'Digest' 'qop'.
-"""
-__version__ = 1, 0, 1
-__author__ = "Tiago Cogumbreiro <cogumbreiro@users.sf.net>"
-__credits__ = """
-    Peter van Kampen for its recipe which implement most of Digest authentication:
-    http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/302378
-"""
-
-__license__ = """
-Copyright (c) 2005, Tiago Cogumbreiro <cogumbreiro@users.sf.net>
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification, 
-are permitted provided that the following conditions are met:
-
-    * Redistributions of source code must retain the above copyright notice, 
-      this list of conditions and the following disclaimer.
-    * Redistributions in binary form must reproduce the above copyright notice, 
-      this list of conditions and the following disclaimer in the documentation 
-      and/or other materials provided with the distribution.
-    * Neither the name of Sylvain Hellegouarch nor the names of his contributors 
-      may be used to endorse or promote products derived from this software 
-      without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE 
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 
-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 
-OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-"""
-
-__all__ = ("digestAuth", "basicAuth", "doAuth", "checkResponse",
-           "parseAuthorization", "SUPPORTED_ALGORITHM", "md5SessionKey",
-           "calculateNonce", "SUPPORTED_QOP")
-
-################################################################################
-try:
-    # Python 2.5+
-    from hashlib import md5
-except ImportError:
-    from md5 import new as md5
-import time
-import base64
-from urllib2 import parse_http_list, parse_keqv_list
-
-MD5 = "MD5"
-MD5_SESS = "MD5-sess"
-AUTH = "auth"
-AUTH_INT = "auth-int"
-
-SUPPORTED_ALGORITHM = (MD5, MD5_SESS)
-SUPPORTED_QOP = (AUTH, AUTH_INT)
-
-################################################################################
-# doAuth
-#
-DIGEST_AUTH_ENCODERS = {
-    MD5: lambda val: md5(val).hexdigest(),
-    MD5_SESS: lambda val: md5(val).hexdigest(),
-#    SHA: lambda val: sha.new (val).hexdigest (),
-}
-
-def calculateNonce (realm, algorithm=MD5):
-    """This is an auxaliary function that calculates 'nonce' value. It is used
-    to handle sessions."""
-
-    global SUPPORTED_ALGORITHM, DIGEST_AUTH_ENCODERS
-    assert algorithm in SUPPORTED_ALGORITHM
-
-    try:
-        encoder = DIGEST_AUTH_ENCODERS[algorithm]
-    except KeyError:
-        raise NotImplementedError ("The chosen algorithm (%s) does not have "\
-                                   "an implementation yet" % algorithm)
-
-    return encoder ("%d:%s" % (time.time(), realm))
-
-def digestAuth (realm, algorithm=MD5, nonce=None, qop=AUTH):
-    """Challenges the client for a Digest authentication."""
-    global SUPPORTED_ALGORITHM, DIGEST_AUTH_ENCODERS, SUPPORTED_QOP
-    assert algorithm in SUPPORTED_ALGORITHM
-    assert qop in SUPPORTED_QOP
-
-    if nonce is None:
-        nonce = calculateNonce (realm, algorithm)
-
-    return 'Digest realm="%s", nonce="%s", algorithm="%s", qop="%s"' % (
-        realm, nonce, algorithm, qop
-    )
-
-def basicAuth (realm):
-    """Challengenes the client for a Basic authentication."""
-    assert '"' not in realm, "Realms cannot contain the \" (quote) character."
-
-    return 'Basic realm="%s"' % realm
-
-def doAuth (realm):
-    """'doAuth' function returns the challenge string b giving priority over
-    Digest and fallback to Basic authentication when the browser doesn't
-    support the first one.
-    
-    This should be set in the HTTP header under the key 'WWW-Authenticate'."""
-
-    return digestAuth (realm) + " " + basicAuth (realm)
-
-
-################################################################################
-# Parse authorization parameters
-#
-def _parseDigestAuthorization (auth_params):
-    # Convert the auth params to a dict
-    items = parse_http_list(auth_params)
-    params = parse_keqv_list(items)
-
-    # Now validate the params
-
-    # Check for required parameters
-    required = ["username", "realm", "nonce", "uri", "response"]
-    for k in required:
-        if k not in params:
-            return None
-
-    # If qop is sent then cnonce and nc MUST be present
-    if "qop" in params and not ("cnonce" in params \
-                                      and "nc" in params):
-        return None
-
-    # If qop is not sent, neither cnonce nor nc can be present
-    if ("cnonce" in params or "nc" in params) and \
-       "qop" not in params:
-        return None
-
-    return params
-
-
-def _parseBasicAuthorization (auth_params):
-    username, password = base64.decodestring (auth_params).split (":", 1)
-    return {"username": username, "password": password}
-
-AUTH_SCHEMES = {
-    "basic": _parseBasicAuthorization,
-    "digest": _parseDigestAuthorization,
-}
-
-def parseAuthorization (credentials):
-    """parseAuthorization will convert the value of the 'Authorization' key in
-    the HTTP header to a map itself. If the parsing fails 'None' is returned.
-    """
-
-    global AUTH_SCHEMES
-
-    auth_scheme, auth_params = credentials.split(" ", 1)
-    auth_scheme = auth_scheme.lower ()
-
-    parser = AUTH_SCHEMES[auth_scheme]
-    params = parser (auth_params)
-
-    if params is None:
-        return
-
-    assert "auth_scheme" not in params
-    params["auth_scheme"] = auth_scheme
-    return params
-
-
-################################################################################
-# Check provided response for a valid password
-#
-def md5SessionKey (params, password):
-    """
-    If the "algorithm" directive's value is "MD5-sess", then A1 
-    [the session key] is calculated only once - on the first request by the
-    client following receipt of a WWW-Authenticate challenge from the server.
-
-    This creates a 'session key' for the authentication of subsequent
-    requests and responses which is different for each "authentication
-    session", thus limiting the amount of material hashed with any one
-    key.
-
-    Because the server need only use the hash of the user
-    credentials in order to create the A1 value, this construction could
-    be used in conjunction with a third party authentication service so
-    that the web server would not need the actual password value.  The
-    specification of such a protocol is beyond the scope of this
-    specification.
-"""
-
-    keys = ("username", "realm", "nonce", "cnonce")
-    params_copy = {}
-    for key in keys:
-        params_copy[key] = params[key]
-
-    params_copy["algorithm"] = MD5_SESS
-    return _A1 (params_copy, password)
-
-def _A1(params, password):
-    algorithm = params.get ("algorithm", MD5)
-    H = DIGEST_AUTH_ENCODERS[algorithm]
-
-    if algorithm == MD5:
-        # If the "algorithm" directive's value is "MD5" or is
-        # unspecified, then A1 is:
-        # A1 = unq(username-value) ":" unq(realm-value) ":" passwd
-        return "%s:%s:%s" % (params["username"], params["realm"], password)
-
-    elif algorithm == MD5_SESS:
-
-        # This is A1 if qop is set
-        # A1 = H( unq(username-value) ":" unq(realm-value) ":" passwd )
-        #         ":" unq(nonce-value) ":" unq(cnonce-value)
-        h_a1 = H ("%s:%s:%s" % (params["username"], params["realm"], password))
-        return "%s:%s:%s" % (h_a1, params["nonce"], params["cnonce"])
-
-
-def _A2(params, method, kwargs):
-    # If the "qop" directive's value is "auth" or is unspecified, then A2 is:
-    # A2 = Method ":" digest-uri-value
-
-    qop = params.get ("qop", "auth")
-    if qop == "auth":
-        return method + ":" + params["uri"]
-    elif qop == "auth-int":
-        # If the "qop" value is "auth-int", then A2 is:
-        # A2 = Method ":" digest-uri-value ":" H(entity-body)
-        entity_body = kwargs.get ("entity_body", "")
-        H = kwargs["H"]
-
-        return "%s:%s:%s" % (
-            method,
-            params["uri"],
-            H(entity_body)
-        )
-
-    else:
-        raise NotImplementedError ("The 'qop' method is unknown: %s" % qop)
-
-def _computeDigestResponse(auth_map, password, method="GET", A1=None, **kwargs):
-    """
-    Generates a response respecting the algorithm defined in RFC 2617
-    """
-    params = auth_map
-
-    algorithm = params.get ("algorithm", MD5)
-
-    H = DIGEST_AUTH_ENCODERS[algorithm]
-    KD = lambda secret, data: H(secret + ":" + data)
-
-    qop = params.get ("qop", None)
-
-    H_A2 = H(_A2(params, method, kwargs))
-
-    if algorithm == MD5_SESS and A1 is not None:
-        H_A1 = H(A1)
-    else:
-        H_A1 = H(_A1(params, password))
-
-    if qop in ("auth", "auth-int"):
-        # If the "qop" value is "auth" or "auth-int":
-        # request-digest  = <"> < KD ( H(A1),     unq(nonce-value)
-        #                              ":" nc-value
-        #                              ":" unq(cnonce-value)
-        #                              ":" unq(qop-value)
-        #                              ":" H(A2)
-        #                      ) <">
-        request = "%s:%s:%s:%s:%s" % (
-            params["nonce"],
-            params["nc"],
-            params["cnonce"],
-            params["qop"],
-            H_A2,
-        )
-    elif qop is None:
-        # If the "qop" directive is not present (this construction is
-        # for compatibility with RFC 2069):
-        # request-digest  =
-        #         <"> < KD ( H(A1), unq(nonce-value) ":" H(A2) ) > <">
-        request = "%s:%s" % (params["nonce"], H_A2)
-
-    return KD(H_A1, request)
-
-def _checkDigestResponse(auth_map, password, method="GET", A1=None, **kwargs):
-    """This function is used to verify the response given by the client when
-    he tries to authenticate.
-    Optional arguments:
-     entity_body - when 'qop' is set to 'auth-int' you MUST provide the
-                   raw data you are going to send to the client (usually the
-                   HTML page.
-     request_uri - the uri from the request line compared with the 'uri'
-                   directive of the authorization map. They must represent
-                   the same resource (unused at this time).
-    """
-
-    if auth_map['realm'] != kwargs.get('realm', None):
-        return False
-
-    response = _computeDigestResponse(auth_map, password, method, A1, **kwargs)
-
-    return response == auth_map["response"]
-
-def _checkBasicResponse (auth_map, password, method='GET', encrypt=None, **kwargs):
-    # Note that the Basic response doesn't provide the realm value so we cannot
-    # test it
-    try:
-        return encrypt(auth_map["password"], auth_map["username"]) == password
-    except TypeError:
-        return encrypt(auth_map["password"]) == password
-
-AUTH_RESPONSES = {
-    "basic": _checkBasicResponse,
-    "digest": _checkDigestResponse,
-}
-
-def checkResponse (auth_map, password, method="GET", encrypt=None, **kwargs):
-    """'checkResponse' compares the auth_map with the password and optionally
-    other arguments that each implementation might need.
-    
-    If the response is of type 'Basic' then the function has the following
-    signature:
-    
-    checkBasicResponse (auth_map, password) -> bool
-    
-    If the response is of type 'Digest' then the function has the following
-    signature:
-    
-    checkDigestResponse (auth_map, password, method = 'GET', A1 = None) -> bool
-    
-    The 'A1' argument is only used in MD5_SESS algorithm based responses.
-    Check md5SessionKey() for more info.
-    """
-    global AUTH_RESPONSES
-    checker = AUTH_RESPONSES[auth_map["auth_scheme"]]
-    return checker (auth_map, password, method=method, encrypt=encrypt, **kwargs)
- 
-
-
-
+"""
+httpauth modules defines functions to implement HTTP Digest Authentication (RFC 2617).
+This has full compliance with 'Digest' and 'Basic' authentication methods. In
+'Digest' it supports both MD5 and MD5-sess algorithms.
+
+Usage:
+
+    First use 'doAuth' to request the client authentication for a
+    certain resource. You should send an httplib.UNAUTHORIZED response to the
+    client so he knows he has to authenticate itself.
+    
+    Then use 'parseAuthorization' to retrieve the 'auth_map' used in
+    'checkResponse'.
+
+    To use 'checkResponse' you must have already verified the password associated
+    with the 'username' key in 'auth_map' dict. Then you use the 'checkResponse'
+    function to verify if the password matches the one sent by the client.
+
+SUPPORTED_ALGORITHM - list of supported 'Digest' algorithms
+SUPPORTED_QOP - list of supported 'Digest' 'qop'.
+"""
+__version__ = 1, 0, 1
+__author__ = "Tiago Cogumbreiro <cogumbreiro@users.sf.net>"
+__credits__ = """
+    Peter van Kampen for its recipe which implement most of Digest authentication:
+    http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/302378
+"""
+
+__license__ = """
+Copyright (c) 2005, Tiago Cogumbreiro <cogumbreiro@users.sf.net>
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, 
+are permitted provided that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright notice, 
+      this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright notice, 
+      this list of conditions and the following disclaimer in the documentation 
+      and/or other materials provided with the distribution.
+    * Neither the name of Sylvain Hellegouarch nor the names of his contributors 
+      may be used to endorse or promote products derived from this software 
+      without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE 
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""
+
+__all__ = ("digestAuth", "basicAuth", "doAuth", "checkResponse",
+           "parseAuthorization", "SUPPORTED_ALGORITHM", "md5SessionKey",
+           "calculateNonce", "SUPPORTED_QOP")
+
+################################################################################
+try:
+    # Python 2.5+
+    from hashlib import md5
+except ImportError:
+    from md5 import new as md5
+import time
+import base64
+from urllib2 import parse_http_list, parse_keqv_list
+
+MD5 = "MD5"
+MD5_SESS = "MD5-sess"
+AUTH = "auth"
+AUTH_INT = "auth-int"
+
+SUPPORTED_ALGORITHM = (MD5, MD5_SESS)
+SUPPORTED_QOP = (AUTH, AUTH_INT)
+
+################################################################################
+# doAuth
+#
+DIGEST_AUTH_ENCODERS = {
+    MD5: lambda val: md5(val).hexdigest(),
+    MD5_SESS: lambda val: md5(val).hexdigest(),
+#    SHA: lambda val: sha.new (val).hexdigest (),
+}
+
+def calculateNonce (realm, algorithm=MD5):
+    """This is an auxaliary function that calculates 'nonce' value. It is used
+    to handle sessions."""
+
+    global SUPPORTED_ALGORITHM, DIGEST_AUTH_ENCODERS
+    assert algorithm in SUPPORTED_ALGORITHM
+
+    try:
+        encoder = DIGEST_AUTH_ENCODERS[algorithm]
+    except KeyError:
+        raise NotImplementedError ("The chosen algorithm (%s) does not have "\
+                                   "an implementation yet" % algorithm)
+
+    return encoder ("%d:%s" % (time.time(), realm))
+
+def digestAuth (realm, algorithm=MD5, nonce=None, qop=AUTH):
+    """Challenges the client for a Digest authentication."""
+    global SUPPORTED_ALGORITHM, DIGEST_AUTH_ENCODERS, SUPPORTED_QOP
+    assert algorithm in SUPPORTED_ALGORITHM
+    assert qop in SUPPORTED_QOP
+
+    if nonce is None:
+        nonce = calculateNonce (realm, algorithm)
+
+    return 'Digest realm="%s", nonce="%s", algorithm="%s", qop="%s"' % (
+        realm, nonce, algorithm, qop
+    )
+
+def basicAuth (realm):
+    """Challengenes the client for a Basic authentication."""
+    assert '"' not in realm, "Realms cannot contain the \" (quote) character."
+
+    return 'Basic realm="%s"' % realm
+
+def doAuth (realm):
+    """'doAuth' function returns the challenge string b giving priority over
+    Digest and fallback to Basic authentication when the browser doesn't
+    support the first one.
+    
+    This should be set in the HTTP header under the key 'WWW-Authenticate'."""
+
+    return digestAuth (realm) + " " + basicAuth (realm)
+
+
+################################################################################
+# Parse authorization parameters
+#
+def _parseDigestAuthorization (auth_params):
+    # Convert the auth params to a dict
+    items = parse_http_list(auth_params)
+    params = parse_keqv_list(items)
+
+    # Now validate the params
+
+    # Check for required parameters
+    required = ["username", "realm", "nonce", "uri", "response"]
+    for k in required:
+        if k not in params:
+            return None
+
+    # If qop is sent then cnonce and nc MUST be present
+    if "qop" in params and not ("cnonce" in params \
+                                      and "nc" in params):
+        return None
+
+    # If qop is not sent, neither cnonce nor nc can be present
+    if ("cnonce" in params or "nc" in params) and \
+       "qop" not in params:
+        return None
+
+    return params
+
+
+def _parseBasicAuthorization (auth_params):
+    username, password = base64.decodestring (auth_params).split (":", 1)
+    return {"username": username, "password": password}
+
+AUTH_SCHEMES = {
+    "basic": _parseBasicAuthorization,
+    "digest": _parseDigestAuthorization,
+}
+
+def parseAuthorization (credentials):
+    """parseAuthorization will convert the value of the 'Authorization' key in
+    the HTTP header to a map itself. If the parsing fails 'None' is returned.
+    """
+
+    global AUTH_SCHEMES
+
+    auth_scheme, auth_params = credentials.split(" ", 1)
+    auth_scheme = auth_scheme.lower ()
+
+    parser = AUTH_SCHEMES[auth_scheme]
+    params = parser (auth_params)
+
+    if params is None:
+        return
+
+    assert "auth_scheme" not in params
+    params["auth_scheme"] = auth_scheme
+    return params
+
+
+################################################################################
+# Check provided response for a valid password
+#
+def md5SessionKey (params, password):
+    """
+    If the "algorithm" directive's value is "MD5-sess", then A1 
+    [the session key] is calculated only once - on the first request by the
+    client following receipt of a WWW-Authenticate challenge from the server.
+
+    This creates a 'session key' for the authentication of subsequent
+    requests and responses which is different for each "authentication
+    session", thus limiting the amount of material hashed with any one
+    key.
+
+    Because the server need only use the hash of the user
+    credentials in order to create the A1 value, this construction could
+    be used in conjunction with a third party authentication service so
+    that the web server would not need the actual password value.  The
+    specification of such a protocol is beyond the scope of this
+    specification.
+"""
+
+    keys = ("username", "realm", "nonce", "cnonce")
+    params_copy = {}
+    for key in keys:
+        params_copy[key] = params[key]
+
+    params_copy["algorithm"] = MD5_SESS
+    return _A1 (params_copy, password)
+
+def _A1(params, password):
+    algorithm = params.get ("algorithm", MD5)
+    H = DIGEST_AUTH_ENCODERS[algorithm]
+
+    if algorithm == MD5:
+        # If the "algorithm" directive's value is "MD5" or is
+        # unspecified, then A1 is:
+        # A1 = unq(username-value) ":" unq(realm-value) ":" passwd
+        return "%s:%s:%s" % (params["username"], params["realm"], password)
+
+    elif algorithm == MD5_SESS:
+
+        # This is A1 if qop is set
+        # A1 = H( unq(username-value) ":" unq(realm-value) ":" passwd )
+        #         ":" unq(nonce-value) ":" unq(cnonce-value)
+        h_a1 = H ("%s:%s:%s" % (params["username"], params["realm"], password))
+        return "%s:%s:%s" % (h_a1, params["nonce"], params["cnonce"])
+
+
+def _A2(params, method, kwargs):
+    # If the "qop" directive's value is "auth" or is unspecified, then A2 is:
+    # A2 = Method ":" digest-uri-value
+
+    qop = params.get ("qop", "auth")
+    if qop == "auth":
+        return method + ":" + params["uri"]
+    elif qop == "auth-int":
+        # If the "qop" value is "auth-int", then A2 is:
+        # A2 = Method ":" digest-uri-value ":" H(entity-body)
+        entity_body = kwargs.get ("entity_body", "")
+        H = kwargs["H"]
+
+        return "%s:%s:%s" % (
+            method,
+            params["uri"],
+            H(entity_body)
+        )
+
+    else:
+        raise NotImplementedError ("The 'qop' method is unknown: %s" % qop)
+
+def _computeDigestResponse(auth_map, password, method="GET", A1=None, **kwargs):
+    """
+    Generates a response respecting the algorithm defined in RFC 2617
+    """
+    params = auth_map
+
+    algorithm = params.get ("algorithm", MD5)
+
+    H = DIGEST_AUTH_ENCODERS[algorithm]
+    KD = lambda secret, data: H(secret + ":" + data)
+
+    qop = params.get ("qop", None)
+
+    H_A2 = H(_A2(params, method, kwargs))
+
+    if algorithm == MD5_SESS and A1 is not None:
+        H_A1 = H(A1)
+    else:
+        H_A1 = H(_A1(params, password))
+
+    if qop in ("auth", "auth-int"):
+        # If the "qop" value is "auth" or "auth-int":
+        # request-digest  = <"> < KD ( H(A1),     unq(nonce-value)
+        #                              ":" nc-value
+        #                              ":" unq(cnonce-value)
+        #                              ":" unq(qop-value)
+        #                              ":" H(A2)
+        #                      ) <">
+        request = "%s:%s:%s:%s:%s" % (
+            params["nonce"],
+            params["nc"],
+            params["cnonce"],
+            params["qop"],
+            H_A2,
+        )
+    elif qop is None:
+        # If the "qop" directive is not present (this construction is
+        # for compatibility with RFC 2069):
+        # request-digest  =
+        #         <"> < KD ( H(A1), unq(nonce-value) ":" H(A2) ) > <">
+        request = "%s:%s" % (params["nonce"], H_A2)
+
+    return KD(H_A1, request)
+
+def _checkDigestResponse(auth_map, password, method="GET", A1=None, **kwargs):
+    """This function is used to verify the response given by the client when
+    he tries to authenticate.
+    Optional arguments:
+     entity_body - when 'qop' is set to 'auth-int' you MUST provide the
+                   raw data you are going to send to the client (usually the
+                   HTML page.
+     request_uri - the uri from the request line compared with the 'uri'
+                   directive of the authorization map. They must represent
+                   the same resource (unused at this time).
+    """
+
+    if auth_map['realm'] != kwargs.get('realm', None):
+        return False
+
+    response = _computeDigestResponse(auth_map, password, method, A1, **kwargs)
+
+    return response == auth_map["response"]
+
+def _checkBasicResponse (auth_map, password, method='GET', encrypt=None, **kwargs):
+    # Note that the Basic response doesn't provide the realm value so we cannot
+    # test it
+    try:
+        return encrypt(auth_map["password"], auth_map["username"]) == password
+    except TypeError:
+        return encrypt(auth_map["password"]) == password
+
+AUTH_RESPONSES = {
+    "basic": _checkBasicResponse,
+    "digest": _checkDigestResponse,
+}
+
+def checkResponse (auth_map, password, method="GET", encrypt=None, **kwargs):
+    """'checkResponse' compares the auth_map with the password and optionally
+    other arguments that each implementation might need.
+    
+    If the response is of type 'Basic' then the function has the following
+    signature:
+    
+    checkBasicResponse (auth_map, password) -> bool
+    
+    If the response is of type 'Digest' then the function has the following
+    signature:
+    
+    checkDigestResponse (auth_map, password, method = 'GET', A1 = None) -> bool
+    
+    The 'A1' argument is only used in MD5_SESS algorithm based responses.
+    Check md5SessionKey() for more info.
+    """
+    global AUTH_RESPONSES
+    checker = AUTH_RESPONSES[auth_map["auth_scheme"]]
+    return checker (auth_map, password, method=method, encrypt=encrypt, **kwargs)
+ 
+
+
+
diff --git a/cherrypy/process/win32.py b/cherrypy/process/win32.py
index ad082def1..49a83d402 100644
--- a/cherrypy/process/win32.py
+++ b/cherrypy/process/win32.py
@@ -1,174 +1,174 @@
-"""Windows service. Requires pywin32."""
-
-import os
-import win32api
-import win32con
-import win32event
-import win32service
-import win32serviceutil
-
-from cherrypy.process import wspbus, plugins
-
-
-class ConsoleCtrlHandler(plugins.SimplePlugin):
-    """A WSPBus plugin for handling Win32 console events (like Ctrl-C)."""
-    
-    def __init__(self, bus):
-        self.is_set = False
-        plugins.SimplePlugin.__init__(self, bus)
-    
-    def start(self):
-        if self.is_set:
-            self.bus.log('Handler for console events already set.', level=40)
-            return
-        
-        result = win32api.SetConsoleCtrlHandler(self.handle, 1)
-        if result == 0:
-            self.bus.log('Could not SetConsoleCtrlHandler (error %r)' % 
-                         win32api.GetLastError(), level=40)
-        else:
-            self.bus.log('Set handler for console events.', level=40)
-            self.is_set = True
-    
-    def stop(self):
-        if not self.is_set:
-            self.bus.log('Handler for console events already off.', level=40)
-            return
-        
-        try:
-            result = win32api.SetConsoleCtrlHandler(self.handle, 0)
-        except ValueError:
-            # "ValueError: The object has not been registered"
-            result = 1
-        
-        if result == 0:
-            self.bus.log('Could not remove SetConsoleCtrlHandler (error %r)' % 
-                         win32api.GetLastError(), level=40)
-        else:
-            self.bus.log('Removed handler for console events.', level=40)
-            self.is_set = False
-    
-    def handle(self, event):
-        """Handle console control events (like Ctrl-C)."""
-        if event in (win32con.CTRL_C_EVENT, win32con.CTRL_LOGOFF_EVENT,
-                     win32con.CTRL_BREAK_EVENT, win32con.CTRL_SHUTDOWN_EVENT,
-                     win32con.CTRL_CLOSE_EVENT):
-            self.bus.log('Console event %s: shutting down bus' % event)
-            
-            # Remove self immediately so repeated Ctrl-C doesn't re-call it.
-            try:
-                self.stop()
-            except ValueError:
-                pass
-            
-            self.bus.exit()
-            # 'First to return True stops the calls'
-            return 1
-        return 0
-
-
-class Win32Bus(wspbus.Bus):
-    """A Web Site Process Bus implementation for Win32.
-    
-    Instead of time.sleep, this bus blocks using native win32event objects.
-    """
-    
-    def __init__(self):
-        self.events = {}
-        wspbus.Bus.__init__(self)
-    
-    def _get_state_event(self, state):
-        """Return a win32event for the given state (creating it if needed)."""
-        try:
-            return self.events[state]
-        except KeyError:
-            event = win32event.CreateEvent(None, 0, 0,
-                                           "WSPBus %s Event (pid=%r)" % 
-                                           (state.name, os.getpid()))
-            self.events[state] = event
-            return event
-    
-    def _get_state(self):
-        return self._state
-    def _set_state(self, value):
-        self._state = value
-        event = self._get_state_event(value)
-        win32event.PulseEvent(event)
-    state = property(_get_state, _set_state)
-    
-    def wait(self, state, interval=0.1, channel=None):
-        """Wait for the given state(s), KeyboardInterrupt or SystemExit.
-        
-        Since this class uses native win32event objects, the interval
-        argument is ignored.
-        """
-        if isinstance(state, (tuple, list)):
-            # Don't wait for an event that beat us to the punch ;)
-            if self.state not in state:
-                events = tuple([self._get_state_event(s) for s in state])
-                win32event.WaitForMultipleObjects(events, 0, win32event.INFINITE)
-        else:
-            # Don't wait for an event that beat us to the punch ;)
-            if self.state != state:
-                event = self._get_state_event(state)
-                win32event.WaitForSingleObject(event, win32event.INFINITE)
-
-
-class _ControlCodes(dict):
-    """Control codes used to "signal" a service via ControlService.
-    
-    User-defined control codes are in the range 128-255. We generally use
-    the standard Python value for the Linux signal and add 128. Example:
-    
-        >>> signal.SIGUSR1
-        10
-        control_codes['graceful'] = 128 + 10
-    """
-    
-    def key_for(self, obj):
-        """For the given value, return its corresponding key."""
-        for key, val in self.items():
-            if val is obj:
-                return key
-        raise ValueError("The given object could not be found: %r" % obj)
-
-control_codes = _ControlCodes({'graceful': 138})
-
-
-def signal_child(service, command):
-    if command == 'stop':
-        win32serviceutil.StopService(service)
-    elif command == 'restart':
-        win32serviceutil.RestartService(service)
-    else:
-        win32serviceutil.ControlService(service, control_codes[command])
-
-
-class PyWebService(win32serviceutil.ServiceFramework):
-    """Python Web Service."""
-    
-    _svc_name_ = "Python Web Service"
-    _svc_display_name_ = "Python Web Service"
-    _svc_deps_ = None        # sequence of service names on which this depends
-    _exe_name_ = "pywebsvc"
-    _exe_args_ = None        # Default to no arguments
-    
-    # Only exists on Windows 2000 or later, ignored on windows NT
-    _svc_description_ = "Python Web Service"
-    
-    def SvcDoRun(self):
-        from cherrypy import process
-        process.bus.start()
-        process.bus.block()
-    
-    def SvcStop(self):
-        from cherrypy import process
-        self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
-        process.bus.exit()
-    
-    def SvcOther(self, control):
-        process.bus.publish(control_codes.key_for(control))
-
-
-if __name__ == '__main__':
-    win32serviceutil.HandleCommandLine(PyWebService)
+"""Windows service. Requires pywin32."""
+
+import os
+import win32api
+import win32con
+import win32event
+import win32service
+import win32serviceutil
+
+from cherrypy.process import wspbus, plugins
+
+
+class ConsoleCtrlHandler(plugins.SimplePlugin):
+    """A WSPBus plugin for handling Win32 console events (like Ctrl-C)."""
+    
+    def __init__(self, bus):
+        self.is_set = False
+        plugins.SimplePlugin.__init__(self, bus)
+    
+    def start(self):
+        if self.is_set:
+            self.bus.log('Handler for console events already set.', level=40)
+            return
+        
+        result = win32api.SetConsoleCtrlHandler(self.handle, 1)
+        if result == 0:
+            self.bus.log('Could not SetConsoleCtrlHandler (error %r)' % 
+                         win32api.GetLastError(), level=40)
+        else:
+            self.bus.log('Set handler for console events.', level=40)
+            self.is_set = True
+    
+    def stop(self):
+        if not self.is_set:
+            self.bus.log('Handler for console events already off.', level=40)
+            return
+        
+        try:
+            result = win32api.SetConsoleCtrlHandler(self.handle, 0)
+        except ValueError:
+            # "ValueError: The object has not been registered"
+            result = 1
+        
+        if result == 0:
+            self.bus.log('Could not remove SetConsoleCtrlHandler (error %r)' % 
+                         win32api.GetLastError(), level=40)
+        else:
+            self.bus.log('Removed handler for console events.', level=40)
+            self.is_set = False
+    
+    def handle(self, event):
+        """Handle console control events (like Ctrl-C)."""
+        if event in (win32con.CTRL_C_EVENT, win32con.CTRL_LOGOFF_EVENT,
+                     win32con.CTRL_BREAK_EVENT, win32con.CTRL_SHUTDOWN_EVENT,
+                     win32con.CTRL_CLOSE_EVENT):
+            self.bus.log('Console event %s: shutting down bus' % event)
+            
+            # Remove self immediately so repeated Ctrl-C doesn't re-call it.
+            try:
+                self.stop()
+            except ValueError:
+                pass
+            
+            self.bus.exit()
+            # 'First to return True stops the calls'
+            return 1
+        return 0
+
+
+class Win32Bus(wspbus.Bus):
+    """A Web Site Process Bus implementation for Win32.
+    
+    Instead of time.sleep, this bus blocks using native win32event objects.
+    """
+    
+    def __init__(self):
+        self.events = {}
+        wspbus.Bus.__init__(self)
+    
+    def _get_state_event(self, state):
+        """Return a win32event for the given state (creating it if needed)."""
+        try:
+            return self.events[state]
+        except KeyError:
+            event = win32event.CreateEvent(None, 0, 0,
+                                           "WSPBus %s Event (pid=%r)" % 
+                                           (state.name, os.getpid()))
+            self.events[state] = event
+            return event
+    
+    def _get_state(self):
+        return self._state
+    def _set_state(self, value):
+        self._state = value
+        event = self._get_state_event(value)
+        win32event.PulseEvent(event)
+    state = property(_get_state, _set_state)
+    
+    def wait(self, state, interval=0.1, channel=None):
+        """Wait for the given state(s), KeyboardInterrupt or SystemExit.
+        
+        Since this class uses native win32event objects, the interval
+        argument is ignored.
+        """
+        if isinstance(state, (tuple, list)):
+            # Don't wait for an event that beat us to the punch ;)
+            if self.state not in state:
+                events = tuple([self._get_state_event(s) for s in state])
+                win32event.WaitForMultipleObjects(events, 0, win32event.INFINITE)
+        else:
+            # Don't wait for an event that beat us to the punch ;)
+            if self.state != state:
+                event = self._get_state_event(state)
+                win32event.WaitForSingleObject(event, win32event.INFINITE)
+
+
+class _ControlCodes(dict):
+    """Control codes used to "signal" a service via ControlService.
+    
+    User-defined control codes are in the range 128-255. We generally use
+    the standard Python value for the Linux signal and add 128. Example:
+    
+        >>> signal.SIGUSR1
+        10
+        control_codes['graceful'] = 128 + 10
+    """
+    
+    def key_for(self, obj):
+        """For the given value, return its corresponding key."""
+        for key, val in self.items():
+            if val is obj:
+                return key
+        raise ValueError("The given object could not be found: %r" % obj)
+
+control_codes = _ControlCodes({'graceful': 138})
+
+
+def signal_child(service, command):
+    if command == 'stop':
+        win32serviceutil.StopService(service)
+    elif command == 'restart':
+        win32serviceutil.RestartService(service)
+    else:
+        win32serviceutil.ControlService(service, control_codes[command])
+
+
+class PyWebService(win32serviceutil.ServiceFramework):
+    """Python Web Service."""
+    
+    _svc_name_ = "Python Web Service"
+    _svc_display_name_ = "Python Web Service"
+    _svc_deps_ = None        # sequence of service names on which this depends
+    _exe_name_ = "pywebsvc"
+    _exe_args_ = None        # Default to no arguments
+    
+    # Only exists on Windows 2000 or later, ignored on windows NT
+    _svc_description_ = "Python Web Service"
+    
+    def SvcDoRun(self):
+        from cherrypy import process
+        process.bus.start()
+        process.bus.block()
+    
+    def SvcStop(self):
+        from cherrypy import process
+        self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
+        process.bus.exit()
+    
+    def SvcOther(self, control):
+        process.bus.publish(control_codes.key_for(control))
+
+
+if __name__ == '__main__':
+    win32serviceutil.HandleCommandLine(PyWebService)
diff --git a/data/interfaces/default/config_general.tmpl b/data/interfaces/default/config_general.tmpl
index a6b28d703..5ab25dbab 100644
--- a/data/interfaces/default/config_general.tmpl
+++ b/data/interfaces/default/config_general.tmpl
@@ -1,217 +1,217 @@
-#import os.path
-#import sickbeard
-#from sickbeard.common import *
-#from sickbeard import config
-#from sickbeard import metadata
-#from sickbeard.metadata.generic import GenericMetadata
-#set global $title  = "Config - General"
-#set global $header = "General Configuration"
-
-#set global $sbPath="../.."
-
-#set global $topmenu="config"#
-#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_top.tmpl")
-
-<script type="text/javascript" src="$sbRoot/js/config.js?$sbPID"></script>
-
-<div id="config">
-<div id="config-content">
-<h5>All non-absolute folder locations are relative to <span class="path">$sickbeard.DATA_DIR</span></h5>
-
-<form id="configForm" action="saveGeneral" method="post">
-
-            <div id="config-components">
-
-                <div id="core-component-group1" class="component-group clearfix">
-
-                    <div class="component-group-desc">
-                        <h3>Misc</h3>
-                        <p><b>Some options may require a manual restart to take effect.</b></p>
-                    </div>
-
-                    <fieldset class="component-group-list">
-                        <div class="field-pair">
-                            <input type="checkbox" name="launch_browser" id="launch_browser" #if $sickbeard.LAUNCH_BROWSER then "checked=\"checked\"" else ""#/>
-                            <label class="clearfix" for="launch_browser">
-                                <span class="component-title">Launch Browser</span>
-                                <span class="component-desc">Should Sick Beard open its home page when started?</span>
-                            </label>
-                        </div>
-                        
-                        <div class="field-pair">
-                            <input type="checkbox" name="version_notify" id="version_notify" #if $sickbeard.VERSION_NOTIFY then "checked=\"checked\"" else ""#/>
-                            <label class="clearfix" for="version_notify">
-                                <span class="component-title">Check for Update</span>
-                                <span class="component-desc">Show notification about updates for Sick Beard if available.</span>
-                            </label>
-                            <label class="nocheck clearfix">
-                                <span class="component-title">&nbsp;</span>
-                                <span class="component-desc">Checking for updates runs on startup and every 12 hours.</span>
-                            </label>
-                        </div>
-
-                        <div class="field-pair">
-                            <input type="checkbox" name="display_posters" id="display_posters" #if $sickbeard.DISPLAY_POSTERS then "checked=\"checked\"" else ""#/>
-                            <label class="clearfix" for="display_posters">
-                                <span class="component-title">Display Posters</span>
-                                <span class="component-desc">Display Posters in main view.</span>
-                            </label>
-                        </div>
-
-                        <div class="field-pair">
-                            <label class="nocheck clearfix" for="log_dir">
-                                <span class="component-title">Logging Directory</span>
-                                <input type="text" name="log_dir" id="log_dir" value="$sickbeard.LOG_DIR" size="35" />
-                            </label>
-                        </div>
-
-                        <input type="submit" class="btn config_submitter" value="Save Changes" />
-                    </fieldset>
-                </div><!-- /component-group1 //-->
-
-
-                <div id="core-component-group2" class="component-group clearfix">
-
-                    <div class="component-group-desc">
-                        <h3>Web Interface</h3>
-                        <p>It is recommended that you enable a username and password to secure Sick Beard from being tampered with remotely.</p>
-                        <p><b>These options require a manual restart to take effect.</b></p>
-                    </div>
-
-                    <fieldset class="component-group-list">
-                        
-                        <div class="field-pair">
-                            <input type="checkbox" name="web_ipv6" id="web_ipv6" #if $sickbeard.WEB_IPV6 then "checked=\"checked\"" else ""#/>
-                            <label class="clearfix" for="web_ipv6">
-                                <span class="component-title">Listen on IPv6</span>
-                                <span class="component-desc">Allow Sick Beard to bind to any available IPv6 address?</span>
-                            </label>
-                        </div>
-                        
-                        <div class="field-pair">
-                            <input type="checkbox" name="web_log" id="web_log" #if $sickbeard.WEB_LOG then "checked=\"checked\"" else ""#/>
-                            <label class="clearfix" for="web_log">
-                                <span class="component-title">HTTP Logs</span>
-                                <span class="component-desc">Have Sick Beard's web server (cherrypy) generate logs?</span>
-                            </label>
-                        </div>
-                        
-                        <div class="field-pair">
-                            <label class="nocheck clearfix">
-                                <span class="component-title">HTTP Port</span>
-                                <input type="text" name="web_port" value="$sickbeard.WEB_PORT" size="10" />
-                            </label>
-                            <label class="nocheck clearfix">
-                                <span class="component-title">&nbsp;</span>
-                                <span class="component-desc">Web port that Sick Beard should listen on (eg. 8081)</span>
-                            </label>
-                        </div>
-
-                        <div class="field-pair">
-                            <label class="nocheck clearfix">
-                                <span class="component-title">HTTP Username</span>
-                                <input type="text" name="web_username" value="$sickbeard.WEB_USERNAME" size="35" />
-                            </label>
-                            <label class="nocheck clearfix">
-                                <span class="component-title">&nbsp;</span>
-                                <span class="component-desc">Username for authentication (blank for none)</span>
-                            </label>
-                        </div>
-
-                        <div class="field-pair">
-                            <label class="nocheck clearfix">
-                                <span class="component-title">HTTP Password</span>
-                                <input type="password" name="web_password" value="$sickbeard.WEB_PASSWORD" size="35" />
-                            </label>
-                            <label class="nocheck clearfix">
-                                <span class="component-title">&nbsp;</span>
-                                <span class="component-desc">Password for authentication (blank for none)</span>
-                            </label>
-                        </div>
-
-                        <div class="field-pair">
-                            <label class="clearfix">
-                                <input type="checkbox" name="enable_https" class="enabler" id="enable_https" #if $sickbeard.ENABLE_HTTPS then "checked=\"checked\"" else ""#/>
-                                <span class="component-title">Enable HTTPS</span>
-                                <span class="component-desc">Enable accessing the interface from a HTTPS address.</span>
-                            </label>
-                        </div>
-                        
-                        <div id="content_enable_https">
-                            <div class="field-pair">
-                                <label class="nocheck clearfix">
-                                    <span class="component-title">HTTPS Certificate</span>
-                                    <input type="text" name="https_cert" value="$sickbeard.HTTPS_CERT" size="35" />
-                                </label>
-                                <label class="nocheck clearfix">
-                                    <span class="component-title">&nbsp;</span>
-                                    <span class="component-desc">File name or path to HTTPS Certificate.</span>
-                                </label>
-                            </div>
-    
-                            <div class="field-pair">
-                                <label class="nocheck clearfix">
-                                    <span class="component-title">HTTPS Key</span>
-                                    <input type="text" name="https_key" value="$sickbeard.HTTPS_KEY" size="35" />
-                                </label>
-                                <label class="nocheck clearfix">
-                                    <span class="component-title">&nbsp;</span>
-                                    <span class="component-desc">File name or path to HTTPS Key.</span>
-                                </label>
-                            </div>
-                        </div>
-
-                        <input type="submit" class="btn config_submitter" value="Save Changes" />
-                    </fieldset>
-                </div><!-- /component-group2 //-->
-
-                <div id="core-component-group4" class="component-group clearfix">
-
-                    <div class="component-group-desc">
-                        <h3>API</h3>
-                        <p>Allow 3rd party programs to interact with Sick-Beard.</p>
-                    </div>
-
-                    <fieldset class="component-group-list">
-                        <div class="field-pair">
-                            <input type="checkbox" name="use_api" class="enabler" id="use_api" #if $sickbeard.USE_API then "checked=\"checked\"" else ""#/>
-                            <label class="clearfix" for="use_api">
-                                <span class="component-title">Enable API</span>
-                                <span class="component-desc">Allow the use of the Sick-Beard API.</span>
-                            </label>
-                        </div>
-
-                        <div id="content_use_api">
-                        <div class="field-pair">
-                            <label class="nocheck clearfix" for="api_key">
-                                <span class="component-title">API Key</span>
-                                <input type="text" name="api_key" id="api_key" value="$sickbeard.API_KEY" size="35" readonly="readonly" />
-                                <input type="button" class="btn" id="generate_new_apikey" value="Generate">
-                            </label>
-                            <label class="nocheck clearfix">
-                                <span class="component-title">&nbsp;</span>
-                                <span class="component-desc">Used to give 3rd party programs limited access to Sick-Beard.</span>
-                            </label>
-                        </div>
-                        </div>
-
-                        <input type="submit" class="btn config_submitter" value="Save Changes" />
-                    </fieldset>
-                </div><!-- /component-group4 //-->
-
-                <div class="component-group-save">
-                    <input type="submit" class="btn config_submitter" value="Save Changes" />
-                </div><br />
-
-            </div><!-- /config-components -->
-
-</form>
-</div></div>
-
-<script type="text/javascript" charset="utf-8">
-<!--
-    jQuery('#log_dir').fileBrowser({ title: 'Select Log Directory' });
-//-->
-</script>
-
-#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_bottom.tmpl")
+#import os.path
+#import sickbeard
+#from sickbeard.common import *
+#from sickbeard import config
+#from sickbeard import metadata
+#from sickbeard.metadata.generic import GenericMetadata
+#set global $title  = "Config - General"
+#set global $header = "General Configuration"
+
+#set global $sbPath="../.."
+
+#set global $topmenu="config"#
+#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_top.tmpl")
+
+<script type="text/javascript" src="$sbRoot/js/config.js?$sbPID"></script>
+
+<div id="config">
+<div id="config-content">
+<h5>All non-absolute folder locations are relative to <span class="path">$sickbeard.DATA_DIR</span></h5>
+
+<form id="configForm" action="saveGeneral" method="post">
+
+            <div id="config-components">
+
+                <div id="core-component-group1" class="component-group clearfix">
+
+                    <div class="component-group-desc">
+                        <h3>Misc</h3>
+                        <p><b>Some options may require a manual restart to take effect.</b></p>
+                    </div>
+
+                    <fieldset class="component-group-list">
+                        <div class="field-pair">
+                            <input type="checkbox" name="launch_browser" id="launch_browser" #if $sickbeard.LAUNCH_BROWSER then "checked=\"checked\"" else ""#/>
+                            <label class="clearfix" for="launch_browser">
+                                <span class="component-title">Launch Browser</span>
+                                <span class="component-desc">Should Sick Beard open its home page when started?</span>
+                            </label>
+                        </div>
+                        
+                        <div class="field-pair">
+                            <input type="checkbox" name="version_notify" id="version_notify" #if $sickbeard.VERSION_NOTIFY then "checked=\"checked\"" else ""#/>
+                            <label class="clearfix" for="version_notify">
+                                <span class="component-title">Check for Update</span>
+                                <span class="component-desc">Show notification about updates for Sick Beard if available.</span>
+                            </label>
+                            <label class="nocheck clearfix">
+                                <span class="component-title">&nbsp;</span>
+                                <span class="component-desc">Checking for updates runs on startup and every 12 hours.</span>
+                            </label>
+                        </div>
+
+                        <div class="field-pair">
+                            <input type="checkbox" name="display_posters" id="display_posters" #if $sickbeard.DISPLAY_POSTERS then "checked=\"checked\"" else ""#/>
+                            <label class="clearfix" for="display_posters">
+                                <span class="component-title">Display Posters</span>
+                                <span class="component-desc">Display Posters in main view.</span>
+                            </label>
+                        </div>
+
+                        <div class="field-pair">
+                            <label class="nocheck clearfix" for="log_dir">
+                                <span class="component-title">Logging Directory</span>
+                                <input type="text" name="log_dir" id="log_dir" value="$sickbeard.LOG_DIR" size="35" />
+                            </label>
+                        </div>
+
+                        <input type="submit" class="btn config_submitter" value="Save Changes" />
+                    </fieldset>
+                </div><!-- /component-group1 //-->
+
+
+                <div id="core-component-group2" class="component-group clearfix">
+
+                    <div class="component-group-desc">
+                        <h3>Web Interface</h3>
+                        <p>It is recommended that you enable a username and password to secure Sick Beard from being tampered with remotely.</p>
+                        <p><b>These options require a manual restart to take effect.</b></p>
+                    </div>
+
+                    <fieldset class="component-group-list">
+                        
+                        <div class="field-pair">
+                            <input type="checkbox" name="web_ipv6" id="web_ipv6" #if $sickbeard.WEB_IPV6 then "checked=\"checked\"" else ""#/>
+                            <label class="clearfix" for="web_ipv6">
+                                <span class="component-title">Listen on IPv6</span>
+                                <span class="component-desc">Allow Sick Beard to bind to any available IPv6 address?</span>
+                            </label>
+                        </div>
+                        
+                        <div class="field-pair">
+                            <input type="checkbox" name="web_log" id="web_log" #if $sickbeard.WEB_LOG then "checked=\"checked\"" else ""#/>
+                            <label class="clearfix" for="web_log">
+                                <span class="component-title">HTTP Logs</span>
+                                <span class="component-desc">Have Sick Beard's web server (cherrypy) generate logs?</span>
+                            </label>
+                        </div>
+                        
+                        <div class="field-pair">
+                            <label class="nocheck clearfix">
+                                <span class="component-title">HTTP Port</span>
+                                <input type="text" name="web_port" value="$sickbeard.WEB_PORT" size="10" />
+                            </label>
+                            <label class="nocheck clearfix">
+                                <span class="component-title">&nbsp;</span>
+                                <span class="component-desc">Web port that Sick Beard should listen on (eg. 8081)</span>
+                            </label>
+                        </div>
+
+                        <div class="field-pair">
+                            <label class="nocheck clearfix">
+                                <span class="component-title">HTTP Username</span>
+                                <input type="text" name="web_username" value="$sickbeard.WEB_USERNAME" size="35" />
+                            </label>
+                            <label class="nocheck clearfix">
+                                <span class="component-title">&nbsp;</span>
+                                <span class="component-desc">Username for authentication (blank for none)</span>
+                            </label>
+                        </div>
+
+                        <div class="field-pair">
+                            <label class="nocheck clearfix">
+                                <span class="component-title">HTTP Password</span>
+                                <input type="password" name="web_password" value="$sickbeard.WEB_PASSWORD" size="35" />
+                            </label>
+                            <label class="nocheck clearfix">
+                                <span class="component-title">&nbsp;</span>
+                                <span class="component-desc">Password for authentication (blank for none)</span>
+                            </label>
+                        </div>
+
+                        <div class="field-pair">
+                            <label class="clearfix">
+                                <input type="checkbox" name="enable_https" class="enabler" id="enable_https" #if $sickbeard.ENABLE_HTTPS then "checked=\"checked\"" else ""#/>
+                                <span class="component-title">Enable HTTPS</span>
+                                <span class="component-desc">Enable accessing the interface from a HTTPS address.</span>
+                            </label>
+                        </div>
+                        
+                        <div id="content_enable_https">
+                            <div class="field-pair">
+                                <label class="nocheck clearfix">
+                                    <span class="component-title">HTTPS Certificate</span>
+                                    <input type="text" name="https_cert" value="$sickbeard.HTTPS_CERT" size="35" />
+                                </label>
+                                <label class="nocheck clearfix">
+                                    <span class="component-title">&nbsp;</span>
+                                    <span class="component-desc">File name or path to HTTPS Certificate.</span>
+                                </label>
+                            </div>
+    
+                            <div class="field-pair">
+                                <label class="nocheck clearfix">
+                                    <span class="component-title">HTTPS Key</span>
+                                    <input type="text" name="https_key" value="$sickbeard.HTTPS_KEY" size="35" />
+                                </label>
+                                <label class="nocheck clearfix">
+                                    <span class="component-title">&nbsp;</span>
+                                    <span class="component-desc">File name or path to HTTPS Key.</span>
+                                </label>
+                            </div>
+                        </div>
+
+                        <input type="submit" class="btn config_submitter" value="Save Changes" />
+                    </fieldset>
+                </div><!-- /component-group2 //-->
+
+                <div id="core-component-group4" class="component-group clearfix">
+
+                    <div class="component-group-desc">
+                        <h3>API</h3>
+                        <p>Allow 3rd party programs to interact with Sick-Beard.</p>
+                    </div>
+
+                    <fieldset class="component-group-list">
+                        <div class="field-pair">
+                            <input type="checkbox" name="use_api" class="enabler" id="use_api" #if $sickbeard.USE_API then "checked=\"checked\"" else ""#/>
+                            <label class="clearfix" for="use_api">
+                                <span class="component-title">Enable API</span>
+                                <span class="component-desc">Allow the use of the Sick-Beard API.</span>
+                            </label>
+                        </div>
+
+                        <div id="content_use_api">
+                        <div class="field-pair">
+                            <label class="nocheck clearfix" for="api_key">
+                                <span class="component-title">API Key</span>
+                                <input type="text" name="api_key" id="api_key" value="$sickbeard.API_KEY" size="35" readonly="readonly" />
+                                <input type="button" class="btn" id="generate_new_apikey" value="Generate">
+                            </label>
+                            <label class="nocheck clearfix">
+                                <span class="component-title">&nbsp;</span>
+                                <span class="component-desc">Used to give 3rd party programs limited access to Sick-Beard.</span>
+                            </label>
+                        </div>
+                        </div>
+
+                        <input type="submit" class="btn config_submitter" value="Save Changes" />
+                    </fieldset>
+                </div><!-- /component-group4 //-->
+
+                <div class="component-group-save">
+                    <input type="submit" class="btn config_submitter" value="Save Changes" />
+                </div><br />
+
+            </div><!-- /config-components -->
+
+</form>
+</div></div>
+
+<script type="text/javascript" charset="utf-8">
+<!--
+    jQuery('#log_dir').fileBrowser({ title: 'Select Log Directory' });
+//-->
+</script>
+
+#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_bottom.tmpl")
diff --git a/data/interfaces/default/config_postProcessing.tmpl b/data/interfaces/default/config_postProcessing.tmpl
index ef60eef6a..787c8b495 100644
--- a/data/interfaces/default/config_postProcessing.tmpl
+++ b/data/interfaces/default/config_postProcessing.tmpl
@@ -1,577 +1,577 @@
-#import os.path
-#import sickbeard
-#from sickbeard.common import *
-#from sickbeard import config
-#from sickbeard import metadata
-#from sickbeard.metadata.generic import GenericMetadata
-#from sickbeard import naming
-
-#set global $title  = "Config - Post Processing"
-#set global $header = "Post Processing"
-
-#set global $sbPath="../.."
-
-#set global $topmenu="config"#
-#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_top.tmpl")
-
-<script type="text/javascript" src="$sbRoot/js/configPostProcessing.js?$sbPID"></script>
-<script type="text/javascript" src="$sbRoot/js/config.js?$sbPID"></script>
-
-<div id="config">
-<div id="config-content">
-<h5>All non-absolute folder locations are relative to <span class="path">$sickbeard.DATA_DIR</span></h5>
-
-<form id="configForms" action="savePostProcessing" method="post">
-
-            <div id="config-components">
-
-                <div class="component-group clearfix">
-
-                    <div class="component-group-desc">
-                        <h3>NZB Post-Processing</h3>
-                        <p>Settings that dictate how Sick Beard should process completed NZB downloads.</p>
-                    </div>
-
-                    <fieldset class="component-group-list">
-                        <div class="field-pair">
-                            <label class="nocheck clearfix" for="tv_download_dir">
-                                <span class="component-title">TV Download Dir</span>
-                                <input type="text" name="tv_download_dir" id="tv_download_dir" value="$sickbeard.TV_DOWNLOAD_DIR" size="35" />
-                            </label>
-                            <label class="nocheck clearfix">
-                                <span class="component-title">&nbsp;</span>
-                                <span class="component-desc">The folder where your download client puts TV downloads.</span>
-                            </label>
-                            <label class="nocheck clearfix">
-                                <span class="component-title">&nbsp;</span>
-                                <span class="component-desc"><b>NOTE:</b> Use only if not using SABnzbd+ post processing.</span>
-                            </label>
-                            <label class="nocheck clearfix">
-                                <span class="component-title">&nbsp;</span>
-                                <span class="component-desc">Or if SABnzbd+ and Sick Beard are on different PCs.</span>
-                            </label>
-                        </div>
-
-                        <div class="field-pair">
-                            <input type="checkbox" name="process_automatically" id="process_automatically" #if $sickbeard.PROCESS_AUTOMATICALLY == True then "checked=\"checked\"" else ""# />
-                            <label class="clearfix" for="process_automatically">
-                                <span class="component-title">Scan and Process</span>
-                                <span class="component-desc">Scan and post-process any files in your <i>TV Download Dir</i>?</span>
-                            </label>
-                            <label class="nocheck clearfix" for="process_automatically">
-                                <span class="component-title">&nbsp;</span>
-                                <span class="component-desc"><b>NOTE:</b> Do not use if you use sabToSickbeard w/ SABnzbd+!</span>
-                                <span class="component-desc"><b>You must restart Sickbeard once everything is configured to start the thread</b></span>
-                            </label>
-                        </div>
-
-                        <div class="clearfix"></div>
-                        <input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
-
-                    </fieldset>
-                </div><!-- /component-pp //-->
-
-                <div class="component-group clearfix">
-
-                    <div class="component-group-desc">
-                        <h3>Torrent Post-Processing</h3>
-                        <p>Settings that dictate how Sick Beard should process completed torrent downloads.</p>
-                    </div>
-
-                    <fieldset class="component-group-list">
-                        <div class="field-pair">
-                            <label class="nocheck clearfix" for="torrent_download_dir">
-                                <span class="component-title">Torrent Download Dir</span>
-                                <input type="text" name="torrent_download_dir" id="torrent_download_dir" value="$sickbeard.TORRENT_DOWNLOAD_DIR" size="35" />
-                            </label>
-                            <label class="nocheck clearfix">
-                                <span class="component-title">&nbsp;</span>
-                                <span class="component-desc">The folder where your torrent client puts finished downloads.</span>
-                            </label>
-                        </div>
-                         <div class="field-pair">
-                            <input type="checkbox" name="process_automatically_torrent" id="process_automatically_torrent" #if $sickbeard.PROCESS_AUTOMATICALLY_TORRENT == True then "checked=\"checked\"" else ""# />
-                            <label class="clearfix" for="process_automatically">
-                                <span class="component-title">Scan and Process</span>
-                                <span class="component-desc">Scan and post-process any files in your <i>Torrent Download Dir</i>?</span>
-                            </label>
-                            <label class="nocheck clearfix" for="process_automatically">
-                                <span class="component-title">&nbsp;</span>
-                                <span class="component-desc"><b>NOTE:</b> Do not use if your Torrent Download Dir is the same as TV Download Dir</span>
-                                <span class="component-desc"><b>You must restart Sickbeard once everything is configured to start the thread</b></span>
-                            </label>
-                        </div>
-
-                        <div class="clearfix"></div>
-                        <input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
-
-                    </fieldset>
-                </div><!-- /component-pp //-->
-
-                <div class="component-group clearfix">
-
-                    <div class="component-group-desc">
-                        <h3>Common Post-Processing options</h3>
-                        <p>Settings that dictate how Sick Beard should process completed downloads.</p>
-                    </div>
-
-                    <fieldset class="component-group-list">
-
-                        <div class="field-pair">
-                            <input type="checkbox" name="keep_processed_dir" id="keep_processed_dir" #if $sickbeard.KEEP_PROCESSED_DIR == True then "checked=\"checked\"" else ""# />
-                            <label class="clearfix" for="keep_processed_dir">
-                                <span class="component-title">Keep Original Files</span>
-                                <span class="component-desc">Keep original files after they've been processed?</span>
-                            </label>
-                        </div>
-
-                        <div class="field-pair">
-                            <input type="checkbox" name="move_associated_files" id="move_associated_files" #if $sickbeard.MOVE_ASSOCIATED_FILES == True then "checked=\"checked\"" else ""# />
-                            <label class="clearfix" for="move_associated_files">
-                                <span class="component-title">Move Associated Files</span>
-                                <span class="component-desc">Move srr/srt/sfv/etc files with the episode when processed?</span>
-                            </label>
-                            <label class="nocheck clearfix" for="move_associated_files">
-                                <span class="component-title">&nbsp;</span>
-                                <span class="component-desc"><b>NOTE:</b> <i>.nfo</i> will be renamed to <i>.nfo-orig</i> when moved.</span>
-                            </label>
-                        </div>
-
-                        <div class="field-pair">
-                            <input type="checkbox" name="rename_episodes" id="rename_episodes" #if $sickbeard.RENAME_EPISODES == True then "checked=\"checked\"" else ""# />
-                            <label class="clearfix" for="rename_episodes">
-                                <span class="component-title">Rename Episodes</span>
-                                <span class="component-desc">Rename episode using the naming settings below?</span>
-                            </label>
-                        </div>
-
-                        <div class="clearfix"></div>
-                        <input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
-
-                    </fieldset>
-                </div><!-- /component-pp //-->
-                
-                <div class="component-group clearfix">
-                    <div class="component-group-desc">
-                        <h3>Naming</h3>
-                        <p>How Sick Beard will name and sort your episodes.</p>
-                    </div>
-
-                    <fieldset class="component-group-list">
-
-                        <div class="field-pair">
-                            <label class="nocheck clearfix" for="name_presets">
-                                <span class="component-title">Name Pattern:</span>
-                                <span class="component-desc">
-                                    <select id="name_presets">
-                                        #set is_custom = True
-                                        #for $cur_preset in $naming.name_presets:
-                                            #set $tmp = $naming.test_name($cur_preset)
-                                            #if $cur_preset == $sickbeard.NAMING_PATTERN:
-                                                #set is_custom = False
-                                            #end if
-                                            <option id="$cur_preset" #if $cur_preset == $sickbeard.NAMING_PATTERN then "selected=\"selected\"" else ""#>$os.path.join($tmp['dir'], $tmp['name'])</option>
-                                        #end for
-                                        <option id="$sickbeard.NAMING_PATTERN" #if $is_custom then "selected=\"selected\"" else ""#>Custom...</option>
-                                    </select>
-                                </span>
-                            </label>
-                        </div>
-
-                        <div id="naming_custom">
-                            <div class="field-pair clearfix" style="padding-top: 0;">
-                                <label class="nocheck clearfix">
-                                    <span class="component-title">
-                                        &nbsp;
-                                    </span>
-                                    <span class="component-desc">
-                                        <input type="text" size="45" name="naming_pattern" id="naming_pattern" class="custom-pattern" value="$sickbeard.NAMING_PATTERN" />
-                                        <img src="$sbRoot/images/legend16.png" width="16" height="16" alt="[Toggle Key]" id="show_naming_key" title="Toggle Naming Legend" style="padding: 6px 0 0 3px;" />
-                                    </span>
-                                </label>
-                            </div>
-
-                            <div id="naming_key" class="nocheck clearfix" style="display: none;">
-                                  <table class="Key">
-                                    <thead>
-                                        <tr>
-                                          <th class="align-right">Meaning</th>
-                                          <th>Pattern</th>
-                                          <th width="60%">Result</th>
-                                        </tr>
-                                    </thead>
-                                    <tfoot>
-                                        <tr>
-                                          <th colspan="3">Use lower case if you want lower case names (eg. %sn, %e.n, %q_n etc)</th>
-                                        </tr>
-                                    </tfoot>
-                                    <tbody>
-                                        <tr>
-                                          <td class="align-right"><b>Show Name:</b></td>
-                                          <td>%SN</td>
-                                          <td>Show Name</td>
-                                        </tr>
-                                        <tr class="even">
-                                          <td>&nbsp;</td>
-                                          <td>%S.N</td>
-                                          <td>Show.Name</td>
-                                        </tr>
-                                        <tr>
-                                          <td>&nbsp;</td>
-                                          <td>%S_N</td>
-                                          <td>Show_Name</td>
-                                        </tr>
-                                        <tr class="even">
-                                          <td class="align-right"><b>Season Number:</b></td>
-                                          <td>%S</td>
-                                          <td>2</td>
-                                        </tr>
-                                        <tr>
-                                          <td>&nbsp;</td>
-                                          <td>%0S</td>
-                                          <td>02</td>
-                                        </tr>
-                                        <tr class="even">
-                                          <td class="align-right"><b>Episode Number:</b></td>
-                                          <td>%E</td>
-                                          <td>3</td>
-                                        </tr>
-                                        <tr>
-                                          <td>&nbsp;</td>
-                                          <td>%0E</td>
-                                          <td>03</td>
-                                        </tr>
-                                        <tr class="even">
-                                          <td class="align-right"><b>Episode Name:</b></td>
-                                          <td>%EN</td>
-                                          <td>Episode Name</td>
-                                        </tr>
-                                        <tr>
-                                          <td>&nbsp;</td>
-                                          <td>%E.N</td>
-                                          <td>Episode.Name</td>
-                                        </tr>
-                                        <tr class="even">
-                                          <td>&nbsp;</td>
-                                          <td>%E_N</td>
-                                          <td>Episode_Name</td>
-                                        </tr>
-                                        <tr>
-                                          <td class="align-right"><b>Quality:</b></td>
-                                          <td>%QN</td>
-                                          <td>720p BluRay</td>
-                                        </tr>
-                                        <tr class="even">
-                                          <td>&nbsp;</td>
-                                          <td>%Q.N</td>
-                                          <td>720p.BluRay</td>
-                                        </tr>
-                                        <tr>
-                                          <td>&nbsp;</td>
-                                          <td>%Q_N</td>
-                                          <td>720p_BluRay</td>
-                                        </tr>
-                                        <tr class="even">
-                                          <td class="align-right"><i class="icon-info-sign" title="Multi-EP style is ignored"></i> <b>Release Name:</b></td>
-                                          <td>%RN</td>
-                                          <td>Show.Name.S02E03.HDTV.XviD-RLSGROUP</td>
-                                        </tr>
-                                        <tr>
-                                          <td class="align-right"><i class="icon-info-sign" title="'SiCKBEARD' is used in place of RLSGROUP if it could not be properly detected"></i> <b>Release Group:</b></td>
-                                          <td>%RG</td>
-                                          <td>RLSGROUP</td>
-                                        </tr>
-                                    </tbody>
-                                  </table>
-                                  <br/>
-                            </div>
-                        </div>
-
-                        <div class="field-pair">
-                            <label class="nocheck clearfix" for="naming_multi_ep">
-                                <span class="component-title">Multi-Episode Style:</span>
-                                <span class="component-desc">
-                                    <select id="naming_multi_ep" name="naming_multi_ep">
-                                    #for $cur_multi_ep in sorted($multiEpStrings.items(), key=lambda x: x[1]):
-                                        <option value="$cur_multi_ep[0]" #if $cur_multi_ep[0] == $sickbeard.NAMING_MULTI_EP then "selected=\"selected\" class=\"selected\"" else ""#>$cur_multi_ep[1]</option>
-                                    #end for
-                                    </select>
-                                </span>
-                            </label>
-                        </div>
-
-                        <div id="naming_example_div">
-                            <h3>Sample:</h3>
-                            <div class="example">
-                                <span class="jumbo" id="naming_example">&nbsp;</span>
-                            </div>
-                            <br/>
-                        </div>
-
-                        <div id="naming_example_multi_div">
-                            <h3>Multi-EP sample:</h3>
-                            <div class="example">
-                                <span class="jumbo" id="naming_example_multi">&nbsp;</span>
-                            </div>
-                            <br/><br />
-                        </div>
-
-                        <div class="field-pair clearfix">
-                            <input type="checkbox" class="enabler" id="naming_custom_abd" name="naming_custom_abd" #if $sickbeard.NAMING_CUSTOM_ABD then "checked=\"checked\"" else ""#/>
-                            <label class="clearfix" for="naming_custom_abd">
-                                <span class="component-title">Custom Air-By-Date</span>
-                                <span class="component-desc">Name Air-By-Date shows differently than regular shows?</span>
-                            </label>
-                        </div>
-
-                        <div id="content_naming_custom_abd">
-                            <div class="field-pair">
-                                <label class="nocheck clearfix" for="name_abd_presets">
-                                    <span class="component-title">Name Pattern:</span>
-                                    <span class="component-desc">
-                                        <select id="name_abd_presets">
-                                            #set is_abd_custom = True
-                                            #for $cur_preset in $naming.name_abd_presets:
-                                                #set $tmp = $naming.test_name($cur_preset)
-                                                #if $cur_preset == $sickbeard.NAMING_ABD_PATTERN:
-                                                    #set is_abd_custom = False
-                                                #end if
-                                                <option id="$cur_preset" #if $cur_preset == $sickbeard.NAMING_ABD_PATTERN then "selected=\"selected\"" else ""#>$os.path.join($tmp['dir'], $tmp['name'])</option>
-                                            #end for
-                                            <option id="$sickbeard.NAMING_ABD_PATTERN" #if $is_abd_custom then "selected=\"selected\"" else ""#>Custom...</option>
-                                        </select>
-                                    </span>
-                                </label>
-                            </div>
-
-                            <div id="naming_abd_custom">
-                                <div class="field-pair clearfix" style="padding-top: 0;">
-                                    <label class="nocheck clearfix">
-                                        <span class="component-title">
-                                            &nbsp;
-                                        </span>
-                                        <span class="component-desc">
-                                            <input type="text" size="45" name="naming_abd_pattern" id="naming_abd_pattern" class="custom-pattern" value="$sickbeard.NAMING_ABD_PATTERN" />
-                                            <img src="$sbRoot/images/legend16.png" width="16" height="16" alt="[Toggle Key]" id="show_naming_abd_key" title="Toggle ABD Naming Legend" style="padding: 6px 0 0 3px;" />
-                                        </span>
-                                    </label>
-                                </div>
-
-                                <div id="naming_abd_key" class="nocheck clearfix" style="display: none;">
-                                      <table class="Key">
-                                        <thead>
-                                            <tr>
-                                              <th class="align-right">Meaning</th>
-                                              <th>Pattern</th>
-                                              <th width="60%">Result</th>
-                                            </tr>
-                                        </thead>
-                                        <tfoot>
-                                            <tr>
-                                              <th colspan="3">Use lower case if you want lower case names (eg. %sn, %e.n, %q_n etc)</th>
-                                            </tr>
-                                        </tfoot>
-                                        <tbody>
-                                            <tr>
-                                              <td class="align-right"><b>Show Name:</b></td>
-                                              <td>%SN</td>
-                                              <td>Show Name</td>
-                                            </tr>
-                                            <tr class="even">
-                                              <td>&nbsp;</td>
-                                              <td>%S.N</td>
-                                              <td>Show.Name</td>
-                                            </tr>
-                                            <tr>
-                                              <td>&nbsp;</td>
-                                              <td>%S_N</td>
-                                              <td>Show_Name</td>
-                                            </tr>
-                                            <tr class="even">
-                                              <td class="align-right"><b>Air-By-Date:</b></td>
-                                              <td>%AD</td>
-                                              <td>2010 03 09</td>
-                                            </tr>
-                                            <tr>
-                                              <td>&nbsp;</td>
-                                              <td>%A.D</td>
-                                              <td>2010.03.09</td>
-                                            </tr>
-                                            <tr class="even">
-                                              <td>&nbsp;</td>
-                                              <td>%A_D</td>
-                                              <td>2010_03_09</td>
-                                            </tr>
-                                            <tr>
-                                              <td>&nbsp;</td>
-                                              <td>%A-D</td>
-                                              <td>2010-03-09</td>
-                                            </tr>
-                                            <tr class="even">
-                                              <td class="align-right"><b>Episode Name:</b></td>
-                                              <td>%EN</td>
-                                              <td>Episode Name</td>
-                                            </tr>
-                                            <tr>
-                                              <td>&nbsp;</td>
-                                              <td>%E.N</td>
-                                              <td>Episode.Name</td>
-                                            </tr>
-                                            <tr class="even">
-                                              <td>&nbsp;</td>
-                                              <td>%E_N</td>
-                                              <td>Episode_Name</td>
-                                            </tr>
-                                            <tr>
-                                              <td class="align-right"><b>Quality:</b></td>
-                                              <td>%QN</td>
-                                              <td>720p BluRay</td>
-                                            </tr>
-                                            <tr class="even">
-                                              <td>&nbsp;</td>
-                                              <td>%Q.N</td>
-                                              <td>720p.BluRay</td>
-                                            </tr>
-                                            <tr>
-                                              <td>&nbsp;</td>
-                                              <td>%Q_N</td>
-                                              <td>720p_BluRay</td>
-                                            </tr>
-                                            <tr class="even">
-                                              <td class="align-right"><b>Year:</b></td>
-                                              <td>%Y</td>
-                                              <td>2010</td>
-                                            </tr>
-                                            <tr>
-                                              <td class="align-right"><b>Month:</b></td>
-                                              <td>%M</td>
-                                              <td>3</td>
-                                            </tr>
-                                            <tr class="even">
-                                              <td class="align-right">&nbsp;</td>
-                                              <td>%0M</td>
-                                              <td>03</td>
-                                            </tr>
-                                            <tr>
-                                              <td class="align-right"><b>Day:</b></td>
-                                              <td>%D</td>
-                                              <td>9</td>
-                                            </tr>
-                                            <tr class="even">
-                                              <td class="align-right">&nbsp;</td>
-                                              <td>%0D</td>
-                                              <td>09</td>
-                                            </tr>
-                                            <tr>
-                                              <td class="align-right"><i class="icon-info-sign" title="Multi-EP style is ignored"></i> <b>Release Name:</b></td>
-                                              <td>%RN</td>
-                                              <td>Show.Name.2010.03.09.HDTV.XviD-RLSGROUP</td>
-                                            </tr>
-                                            <tr class="even">
-                                              <td class="align-right"><i class="icon-info-sign" title="'SiCKBEARD' is used in place of RLSGROUP if it could not be properly detected"></i> <b>Release Group:</b></td>
-                                              <td>%RG</td>
-                                              <td>RLSGROUP</td>
-                                            </tr>
-                                        </tbody>
-                                      </table>
-                                      <br/>
-                                </div>
-                            </div><!-- /naming_abd_custom -->
-
-                            <div id="naming_abd_example_div">
-                                <h3>Sample:</h3>
-                                <div class="example">
-                                    <span class="jumbo" id="naming_abd_example">&nbsp;</span>
-                                </div>
-                                <br/>
-                            </div>
-
-                        </div><!-- /naming_abd_different -->
-
-                        <div class="clearfix"></div>
-                        <input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
-
-                    </fieldset>
-                </div><!-- /component-naming //-->
-
-                <div class="component-group clearfix">
-
-                    <div class="component-group-desc">
-                        <h3>Metadata</h3>
-                        <p>The data associated to the data. These are files associated to a TV show in the form of images and text that, when supported, will enhance the viewing experience.</p>
-                    </div>
-
-                    <fieldset class="component-group-list">
-                        <div class="field-pair clearfix">
-                            <label class="clearfix">
-                                <span class="component-title jumbo">Metadata Type:</span>
-                                <span class="component-desc">
-                                    #set $m_dict = $metadata.get_metadata_generator_dict()
-                                    <select id="metadataType" class="input-medium" >
-                                    #for ($cur_name, $cur_generator) in $m_dict.items():
-                                        <option value="$GenericMetadata.makeID($cur_name)">$cur_name</option>
-                                    #end for
-                                    </select>
-                                </span>
-                            </label>
-                            <span>Toggle the metadata options that you wish to be created. <b>Multiple targets may be used.</b></span>
-                        </div>
-
-#for ($cur_name, $cur_generator) in $m_dict.items(): 
-#set $cur_metadata_inst = $sickbeard.metadata_provider_dict[$cur_generator.name]
-#set $cur_id = $GenericMetadata.makeID($cur_name)
-<div class="metadataDiv clearfix" id="$cur_id">
-    <div class="metadata-options-wrapper">
-        <h4>Create:</h4>
-        <div class="metadata-options">
-            <label for="${cur_id}_show_metadata" class="clearfix"><input type="checkbox" class="float-left metadata_checkbox" id="${cur_id}_show_metadata" #if $cur_metadata_inst.show_metadata then "checked=\"checked\"" else ""#/>&nbsp;Show Metadata</label>
-            <label for="${cur_id}_episode_metadata" class="clearfix"><input type="checkbox" class="float-left metadata_checkbox" id="${cur_id}_episode_metadata" #if $cur_metadata_inst.episode_metadata then "checked=\"checked\"" else ""#/>&nbsp;Episode Metadata</label>
-            <label for="${cur_id}_fanart" class="clearfix"><input type="checkbox" class="float-left metadata_checkbox" id="${cur_id}_fanart" #if $cur_metadata_inst.fanart then "checked=\"checked\"" else ""#/>&nbsp;Show Fanart Image</label>
-            <label for="${cur_id}_poster" class="clearfix"><input type="checkbox" class="float-left metadata_checkbox" id="${cur_id}_poster" #if $cur_metadata_inst.poster then "checked=\"checked\"" else ""#/>&nbsp;Show Folder Image</label>
-            <label for="${cur_id}_episode_thumbnails" class="clearfix"><input type="checkbox" class="float-left metadata_checkbox" id="${cur_id}_episode_thumbnails" #if $cur_metadata_inst.episode_thumbnails then "checked=\"checked\"" else ""#/>&nbsp;Episode Thumbnail</label>
-            <label for="${cur_id}_season_thumbnails" class="clearfix"><input type="checkbox" class="float-left metadata_checkbox" id="${cur_id}_season_thumbnails" #if $cur_metadata_inst.season_thumbnails then "checked=\"checked\"" else ""#/>&nbsp;Season Thumbnail</label>
-        </div>
-    </div>
-    <div class="metadata-example-wrapper">
-        <h4>Results:</h4>
-        <div class="metadata-example">
-            <label for="${cur_id}_show_metadata"><span id="${cur_id}_eg_show_metadata">$cur_metadata_inst.eg_show_metadata</span></label>
-            <label for="${cur_id}_episode_metadata"><span id="${cur_id}_eg_episode_metadata">$cur_metadata_inst.eg_episode_metadata</span></label>
-            <label for="${cur_id}_fanart"><span id="${cur_id}_eg_fanart">$cur_metadata_inst.eg_fanart</span></label>
-            <label for="${cur_id}_poster"><span id="${cur_id}_eg_poster">$cur_metadata_inst.eg_poster</span></label>
-            <label for="${cur_id}_episode_thumbnails"><span id="${cur_id}_eg_episode_thumbnails">$cur_metadata_inst.eg_episode_thumbnails</span></label>
-            <label for="${cur_id}_season_thumbnails"><span id="${cur_id}_eg_season_thumbnails">$cur_metadata_inst.eg_season_thumbnails</span></label>
-        </div>
-    </div>
-
-    <input type="hidden" name="${cur_id}_data" id="${cur_id}_data" value="$cur_metadata_inst.get_config()" />
-</div>
-#end for
-
-                        <div class="field-pair clearfix">
-                            <input type="checkbox" name="use_banner" id="use_banner" #if $sickbeard.USE_BANNER then "checked=checked" else ""#/>
-                            <label class="clearfix" for="use_banner">
-                                <span class="component-title">Use Banners</span>
-                                <span class="component-desc">Use banners instead of posters for 'Show Folder Image'</span>
-                            </label>
-                        </div>
-
-                        <input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
-
-                    </fieldset>
-                </div><!-- /component-metadata //-->
-
-            <br/><input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
-            </div><!-- /config-components -->
-
-</form>
-</div></div>
-
-<script type="text/javascript" charset="utf-8">
-<!--
-    jQuery('#tv_download_dir').fileBrowser({ title: 'Select TV Download Directory' });
-    jQuery('#torrent_download_dir').fileBrowser({ title: 'Select Torrent finished downloads Directory' });
-//-->
-</script>
-
-#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_bottom.tmpl")
+#import os.path
+#import sickbeard
+#from sickbeard.common import *
+#from sickbeard import config
+#from sickbeard import metadata
+#from sickbeard.metadata.generic import GenericMetadata
+#from sickbeard import naming
+
+#set global $title  = "Config - Post Processing"
+#set global $header = "Post Processing"
+
+#set global $sbPath="../.."
+
+#set global $topmenu="config"#
+#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_top.tmpl")
+
+<script type="text/javascript" src="$sbRoot/js/configPostProcessing.js?$sbPID"></script>
+<script type="text/javascript" src="$sbRoot/js/config.js?$sbPID"></script>
+
+<div id="config">
+<div id="config-content">
+<h5>All non-absolute folder locations are relative to <span class="path">$sickbeard.DATA_DIR</span></h5>
+
+<form id="configForms" action="savePostProcessing" method="post">
+
+            <div id="config-components">
+
+                <div class="component-group clearfix">
+
+                    <div class="component-group-desc">
+                        <h3>NZB Post-Processing</h3>
+                        <p>Settings that dictate how Sick Beard should process completed NZB downloads.</p>
+                    </div>
+
+                    <fieldset class="component-group-list">
+                        <div class="field-pair">
+                            <label class="nocheck clearfix" for="tv_download_dir">
+                                <span class="component-title">TV Download Dir</span>
+                                <input type="text" name="tv_download_dir" id="tv_download_dir" value="$sickbeard.TV_DOWNLOAD_DIR" size="35" />
+                            </label>
+                            <label class="nocheck clearfix">
+                                <span class="component-title">&nbsp;</span>
+                                <span class="component-desc">The folder where your download client puts TV downloads.</span>
+                            </label>
+                            <label class="nocheck clearfix">
+                                <span class="component-title">&nbsp;</span>
+                                <span class="component-desc"><b>NOTE:</b> Use only if not using SABnzbd+ post processing.</span>
+                            </label>
+                            <label class="nocheck clearfix">
+                                <span class="component-title">&nbsp;</span>
+                                <span class="component-desc">Or if SABnzbd+ and Sick Beard are on different PCs.</span>
+                            </label>
+                        </div>
+
+                        <div class="field-pair">
+                            <input type="checkbox" name="process_automatically" id="process_automatically" #if $sickbeard.PROCESS_AUTOMATICALLY == True then "checked=\"checked\"" else ""# />
+                            <label class="clearfix" for="process_automatically">
+                                <span class="component-title">Scan and Process</span>
+                                <span class="component-desc">Scan and post-process any files in your <i>TV Download Dir</i>?</span>
+                            </label>
+                            <label class="nocheck clearfix" for="process_automatically">
+                                <span class="component-title">&nbsp;</span>
+                                <span class="component-desc"><b>NOTE:</b> Do not use if you use sabToSickbeard w/ SABnzbd+!</span>
+                                <span class="component-desc"><b>You must restart Sickbeard once everything is configured to start the thread</b></span>
+                            </label>
+                        </div>
+
+                        <div class="clearfix"></div>
+                        <input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
+
+                    </fieldset>
+                </div><!-- /component-pp //-->
+
+                <div class="component-group clearfix">
+
+                    <div class="component-group-desc">
+                        <h3>Torrent Post-Processing</h3>
+                        <p>Settings that dictate how Sick Beard should process completed torrent downloads.</p>
+                    </div>
+
+                    <fieldset class="component-group-list">
+                        <div class="field-pair">
+                            <label class="nocheck clearfix" for="torrent_download_dir">
+                                <span class="component-title">Torrent Download Dir</span>
+                                <input type="text" name="torrent_download_dir" id="torrent_download_dir" value="$sickbeard.TORRENT_DOWNLOAD_DIR" size="35" />
+                            </label>
+                            <label class="nocheck clearfix">
+                                <span class="component-title">&nbsp;</span>
+                                <span class="component-desc">The folder where your torrent client puts finished downloads.</span>
+                            </label>
+                        </div>
+                         <div class="field-pair">
+                            <input type="checkbox" name="process_automatically_torrent" id="process_automatically_torrent" #if $sickbeard.PROCESS_AUTOMATICALLY_TORRENT == True then "checked=\"checked\"" else ""# />
+                            <label class="clearfix" for="process_automatically">
+                                <span class="component-title">Scan and Process</span>
+                                <span class="component-desc">Scan and post-process any files in your <i>Torrent Download Dir</i>?</span>
+                            </label>
+                            <label class="nocheck clearfix" for="process_automatically">
+                                <span class="component-title">&nbsp;</span>
+                                <span class="component-desc"><b>NOTE:</b> Do not use if your Torrent Download Dir is the same as TV Download Dir</span>
+                                <span class="component-desc"><b>You must restart Sickbeard once everything is configured to start the thread</b></span>
+                            </label>
+                        </div>
+
+                        <div class="clearfix"></div>
+                        <input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
+
+                    </fieldset>
+                </div><!-- /component-pp //-->
+
+                <div class="component-group clearfix">
+
+                    <div class="component-group-desc">
+                        <h3>Common Post-Processing options</h3>
+                        <p>Settings that dictate how Sick Beard should process completed downloads.</p>
+                    </div>
+
+                    <fieldset class="component-group-list">
+
+                        <div class="field-pair">
+                            <input type="checkbox" name="keep_processed_dir" id="keep_processed_dir" #if $sickbeard.KEEP_PROCESSED_DIR == True then "checked=\"checked\"" else ""# />
+                            <label class="clearfix" for="keep_processed_dir">
+                                <span class="component-title">Keep Original Files</span>
+                                <span class="component-desc">Keep original files after they've been processed?</span>
+                            </label>
+                        </div>
+
+                        <div class="field-pair">
+                            <input type="checkbox" name="move_associated_files" id="move_associated_files" #if $sickbeard.MOVE_ASSOCIATED_FILES == True then "checked=\"checked\"" else ""# />
+                            <label class="clearfix" for="move_associated_files">
+                                <span class="component-title">Move Associated Files</span>
+                                <span class="component-desc">Move srr/srt/sfv/etc files with the episode when processed?</span>
+                            </label>
+                            <label class="nocheck clearfix" for="move_associated_files">
+                                <span class="component-title">&nbsp;</span>
+                                <span class="component-desc"><b>NOTE:</b> <i>.nfo</i> will be renamed to <i>.nfo-orig</i> when moved.</span>
+                            </label>
+                        </div>
+
+                        <div class="field-pair">
+                            <input type="checkbox" name="rename_episodes" id="rename_episodes" #if $sickbeard.RENAME_EPISODES == True then "checked=\"checked\"" else ""# />
+                            <label class="clearfix" for="rename_episodes">
+                                <span class="component-title">Rename Episodes</span>
+                                <span class="component-desc">Rename episode using the naming settings below?</span>
+                            </label>
+                        </div>
+
+                        <div class="clearfix"></div>
+                        <input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
+
+                    </fieldset>
+                </div><!-- /component-pp //-->
+                
+                <div class="component-group clearfix">
+                    <div class="component-group-desc">
+                        <h3>Naming</h3>
+                        <p>How Sick Beard will name and sort your episodes.</p>
+                    </div>
+
+                    <fieldset class="component-group-list">
+
+                        <div class="field-pair">
+                            <label class="nocheck clearfix" for="name_presets">
+                                <span class="component-title">Name Pattern:</span>
+                                <span class="component-desc">
+                                    <select id="name_presets">
+                                        #set is_custom = True
+                                        #for $cur_preset in $naming.name_presets:
+                                            #set $tmp = $naming.test_name($cur_preset)
+                                            #if $cur_preset == $sickbeard.NAMING_PATTERN:
+                                                #set is_custom = False
+                                            #end if
+                                            <option id="$cur_preset" #if $cur_preset == $sickbeard.NAMING_PATTERN then "selected=\"selected\"" else ""#>$os.path.join($tmp['dir'], $tmp['name'])</option>
+                                        #end for
+                                        <option id="$sickbeard.NAMING_PATTERN" #if $is_custom then "selected=\"selected\"" else ""#>Custom...</option>
+                                    </select>
+                                </span>
+                            </label>
+                        </div>
+
+                        <div id="naming_custom">
+                            <div class="field-pair clearfix" style="padding-top: 0;">
+                                <label class="nocheck clearfix">
+                                    <span class="component-title">
+                                        &nbsp;
+                                    </span>
+                                    <span class="component-desc">
+                                        <input type="text" size="45" name="naming_pattern" id="naming_pattern" class="custom-pattern" value="$sickbeard.NAMING_PATTERN" />
+                                        <img src="$sbRoot/images/legend16.png" width="16" height="16" alt="[Toggle Key]" id="show_naming_key" title="Toggle Naming Legend" style="padding: 6px 0 0 3px;" />
+                                    </span>
+                                </label>
+                            </div>
+
+                            <div id="naming_key" class="nocheck clearfix" style="display: none;">
+                                  <table class="Key">
+                                    <thead>
+                                        <tr>
+                                          <th class="align-right">Meaning</th>
+                                          <th>Pattern</th>
+                                          <th width="60%">Result</th>
+                                        </tr>
+                                    </thead>
+                                    <tfoot>
+                                        <tr>
+                                          <th colspan="3">Use lower case if you want lower case names (eg. %sn, %e.n, %q_n etc)</th>
+                                        </tr>
+                                    </tfoot>
+                                    <tbody>
+                                        <tr>
+                                          <td class="align-right"><b>Show Name:</b></td>
+                                          <td>%SN</td>
+                                          <td>Show Name</td>
+                                        </tr>
+                                        <tr class="even">
+                                          <td>&nbsp;</td>
+                                          <td>%S.N</td>
+                                          <td>Show.Name</td>
+                                        </tr>
+                                        <tr>
+                                          <td>&nbsp;</td>
+                                          <td>%S_N</td>
+                                          <td>Show_Name</td>
+                                        </tr>
+                                        <tr class="even">
+                                          <td class="align-right"><b>Season Number:</b></td>
+                                          <td>%S</td>
+                                          <td>2</td>
+                                        </tr>
+                                        <tr>
+                                          <td>&nbsp;</td>
+                                          <td>%0S</td>
+                                          <td>02</td>
+                                        </tr>
+                                        <tr class="even">
+                                          <td class="align-right"><b>Episode Number:</b></td>
+                                          <td>%E</td>
+                                          <td>3</td>
+                                        </tr>
+                                        <tr>
+                                          <td>&nbsp;</td>
+                                          <td>%0E</td>
+                                          <td>03</td>
+                                        </tr>
+                                        <tr class="even">
+                                          <td class="align-right"><b>Episode Name:</b></td>
+                                          <td>%EN</td>
+                                          <td>Episode Name</td>
+                                        </tr>
+                                        <tr>
+                                          <td>&nbsp;</td>
+                                          <td>%E.N</td>
+                                          <td>Episode.Name</td>
+                                        </tr>
+                                        <tr class="even">
+                                          <td>&nbsp;</td>
+                                          <td>%E_N</td>
+                                          <td>Episode_Name</td>
+                                        </tr>
+                                        <tr>
+                                          <td class="align-right"><b>Quality:</b></td>
+                                          <td>%QN</td>
+                                          <td>720p BluRay</td>
+                                        </tr>
+                                        <tr class="even">
+                                          <td>&nbsp;</td>
+                                          <td>%Q.N</td>
+                                          <td>720p.BluRay</td>
+                                        </tr>
+                                        <tr>
+                                          <td>&nbsp;</td>
+                                          <td>%Q_N</td>
+                                          <td>720p_BluRay</td>
+                                        </tr>
+                                        <tr class="even">
+                                          <td class="align-right"><i class="icon-info-sign" title="Multi-EP style is ignored"></i> <b>Release Name:</b></td>
+                                          <td>%RN</td>
+                                          <td>Show.Name.S02E03.HDTV.XviD-RLSGROUP</td>
+                                        </tr>
+                                        <tr>
+                                          <td class="align-right"><i class="icon-info-sign" title="'SiCKBEARD' is used in place of RLSGROUP if it could not be properly detected"></i> <b>Release Group:</b></td>
+                                          <td>%RG</td>
+                                          <td>RLSGROUP</td>
+                                        </tr>
+                                    </tbody>
+                                  </table>
+                                  <br/>
+                            </div>
+                        </div>
+
+                        <div class="field-pair">
+                            <label class="nocheck clearfix" for="naming_multi_ep">
+                                <span class="component-title">Multi-Episode Style:</span>
+                                <span class="component-desc">
+                                    <select id="naming_multi_ep" name="naming_multi_ep">
+                                    #for $cur_multi_ep in sorted($multiEpStrings.items(), key=lambda x: x[1]):
+                                        <option value="$cur_multi_ep[0]" #if $cur_multi_ep[0] == $sickbeard.NAMING_MULTI_EP then "selected=\"selected\" class=\"selected\"" else ""#>$cur_multi_ep[1]</option>
+                                    #end for
+                                    </select>
+                                </span>
+                            </label>
+                        </div>
+
+                        <div id="naming_example_div">
+                            <h3>Sample:</h3>
+                            <div class="example">
+                                <span class="jumbo" id="naming_example">&nbsp;</span>
+                            </div>
+                            <br/>
+                        </div>
+
+                        <div id="naming_example_multi_div">
+                            <h3>Multi-EP sample:</h3>
+                            <div class="example">
+                                <span class="jumbo" id="naming_example_multi">&nbsp;</span>
+                            </div>
+                            <br/><br />
+                        </div>
+
+                        <div class="field-pair clearfix">
+                            <input type="checkbox" class="enabler" id="naming_custom_abd" name="naming_custom_abd" #if $sickbeard.NAMING_CUSTOM_ABD then "checked=\"checked\"" else ""#/>
+                            <label class="clearfix" for="naming_custom_abd">
+                                <span class="component-title">Custom Air-By-Date</span>
+                                <span class="component-desc">Name Air-By-Date shows differently than regular shows?</span>
+                            </label>
+                        </div>
+
+                        <div id="content_naming_custom_abd">
+                            <div class="field-pair">
+                                <label class="nocheck clearfix" for="name_abd_presets">
+                                    <span class="component-title">Name Pattern:</span>
+                                    <span class="component-desc">
+                                        <select id="name_abd_presets">
+                                            #set is_abd_custom = True
+                                            #for $cur_preset in $naming.name_abd_presets:
+                                                #set $tmp = $naming.test_name($cur_preset)
+                                                #if $cur_preset == $sickbeard.NAMING_ABD_PATTERN:
+                                                    #set is_abd_custom = False
+                                                #end if
+                                                <option id="$cur_preset" #if $cur_preset == $sickbeard.NAMING_ABD_PATTERN then "selected=\"selected\"" else ""#>$os.path.join($tmp['dir'], $tmp['name'])</option>
+                                            #end for
+                                            <option id="$sickbeard.NAMING_ABD_PATTERN" #if $is_abd_custom then "selected=\"selected\"" else ""#>Custom...</option>
+                                        </select>
+                                    </span>
+                                </label>
+                            </div>
+
+                            <div id="naming_abd_custom">
+                                <div class="field-pair clearfix" style="padding-top: 0;">
+                                    <label class="nocheck clearfix">
+                                        <span class="component-title">
+                                            &nbsp;
+                                        </span>
+                                        <span class="component-desc">
+                                            <input type="text" size="45" name="naming_abd_pattern" id="naming_abd_pattern" class="custom-pattern" value="$sickbeard.NAMING_ABD_PATTERN" />
+                                            <img src="$sbRoot/images/legend16.png" width="16" height="16" alt="[Toggle Key]" id="show_naming_abd_key" title="Toggle ABD Naming Legend" style="padding: 6px 0 0 3px;" />
+                                        </span>
+                                    </label>
+                                </div>
+
+                                <div id="naming_abd_key" class="nocheck clearfix" style="display: none;">
+                                      <table class="Key">
+                                        <thead>
+                                            <tr>
+                                              <th class="align-right">Meaning</th>
+                                              <th>Pattern</th>
+                                              <th width="60%">Result</th>
+                                            </tr>
+                                        </thead>
+                                        <tfoot>
+                                            <tr>
+                                              <th colspan="3">Use lower case if you want lower case names (eg. %sn, %e.n, %q_n etc)</th>
+                                            </tr>
+                                        </tfoot>
+                                        <tbody>
+                                            <tr>
+                                              <td class="align-right"><b>Show Name:</b></td>
+                                              <td>%SN</td>
+                                              <td>Show Name</td>
+                                            </tr>
+                                            <tr class="even">
+                                              <td>&nbsp;</td>
+                                              <td>%S.N</td>
+                                              <td>Show.Name</td>
+                                            </tr>
+                                            <tr>
+                                              <td>&nbsp;</td>
+                                              <td>%S_N</td>
+                                              <td>Show_Name</td>
+                                            </tr>
+                                            <tr class="even">
+                                              <td class="align-right"><b>Air-By-Date:</b></td>
+                                              <td>%AD</td>
+                                              <td>2010 03 09</td>
+                                            </tr>
+                                            <tr>
+                                              <td>&nbsp;</td>
+                                              <td>%A.D</td>
+                                              <td>2010.03.09</td>
+                                            </tr>
+                                            <tr class="even">
+                                              <td>&nbsp;</td>
+                                              <td>%A_D</td>
+                                              <td>2010_03_09</td>
+                                            </tr>
+                                            <tr>
+                                              <td>&nbsp;</td>
+                                              <td>%A-D</td>
+                                              <td>2010-03-09</td>
+                                            </tr>
+                                            <tr class="even">
+                                              <td class="align-right"><b>Episode Name:</b></td>
+                                              <td>%EN</td>
+                                              <td>Episode Name</td>
+                                            </tr>
+                                            <tr>
+                                              <td>&nbsp;</td>
+                                              <td>%E.N</td>
+                                              <td>Episode.Name</td>
+                                            </tr>
+                                            <tr class="even">
+                                              <td>&nbsp;</td>
+                                              <td>%E_N</td>
+                                              <td>Episode_Name</td>
+                                            </tr>
+                                            <tr>
+                                              <td class="align-right"><b>Quality:</b></td>
+                                              <td>%QN</td>
+                                              <td>720p BluRay</td>
+                                            </tr>
+                                            <tr class="even">
+                                              <td>&nbsp;</td>
+                                              <td>%Q.N</td>
+                                              <td>720p.BluRay</td>
+                                            </tr>
+                                            <tr>
+                                              <td>&nbsp;</td>
+                                              <td>%Q_N</td>
+                                              <td>720p_BluRay</td>
+                                            </tr>
+                                            <tr class="even">
+                                              <td class="align-right"><b>Year:</b></td>
+                                              <td>%Y</td>
+                                              <td>2010</td>
+                                            </tr>
+                                            <tr>
+                                              <td class="align-right"><b>Month:</b></td>
+                                              <td>%M</td>
+                                              <td>3</td>
+                                            </tr>
+                                            <tr class="even">
+                                              <td class="align-right">&nbsp;</td>
+                                              <td>%0M</td>
+                                              <td>03</td>
+                                            </tr>
+                                            <tr>
+                                              <td class="align-right"><b>Day:</b></td>
+                                              <td>%D</td>
+                                              <td>9</td>
+                                            </tr>
+                                            <tr class="even">
+                                              <td class="align-right">&nbsp;</td>
+                                              <td>%0D</td>
+                                              <td>09</td>
+                                            </tr>
+                                            <tr>
+                                              <td class="align-right"><i class="icon-info-sign" title="Multi-EP style is ignored"></i> <b>Release Name:</b></td>
+                                              <td>%RN</td>
+                                              <td>Show.Name.2010.03.09.HDTV.XviD-RLSGROUP</td>
+                                            </tr>
+                                            <tr class="even">
+                                              <td class="align-right"><i class="icon-info-sign" title="'SiCKBEARD' is used in place of RLSGROUP if it could not be properly detected"></i> <b>Release Group:</b></td>
+                                              <td>%RG</td>
+                                              <td>RLSGROUP</td>
+                                            </tr>
+                                        </tbody>
+                                      </table>
+                                      <br/>
+                                </div>
+                            </div><!-- /naming_abd_custom -->
+
+                            <div id="naming_abd_example_div">
+                                <h3>Sample:</h3>
+                                <div class="example">
+                                    <span class="jumbo" id="naming_abd_example">&nbsp;</span>
+                                </div>
+                                <br/>
+                            </div>
+
+                        </div><!-- /naming_abd_different -->
+
+                        <div class="clearfix"></div>
+                        <input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
+
+                    </fieldset>
+                </div><!-- /component-naming //-->
+
+                <div class="component-group clearfix">
+
+                    <div class="component-group-desc">
+                        <h3>Metadata</h3>
+                        <p>The data associated to the data. These are files associated to a TV show in the form of images and text that, when supported, will enhance the viewing experience.</p>
+                    </div>
+
+                    <fieldset class="component-group-list">
+                        <div class="field-pair clearfix">
+                            <label class="clearfix">
+                                <span class="component-title jumbo">Metadata Type:</span>
+                                <span class="component-desc">
+                                    #set $m_dict = $metadata.get_metadata_generator_dict()
+                                    <select id="metadataType" class="input-medium" >
+                                    #for ($cur_name, $cur_generator) in $m_dict.items():
+                                        <option value="$GenericMetadata.makeID($cur_name)">$cur_name</option>
+                                    #end for
+                                    </select>
+                                </span>
+                            </label>
+                            <span>Toggle the metadata options that you wish to be created. <b>Multiple targets may be used.</b></span>
+                        </div>
+
+#for ($cur_name, $cur_generator) in $m_dict.items(): 
+#set $cur_metadata_inst = $sickbeard.metadata_provider_dict[$cur_generator.name]
+#set $cur_id = $GenericMetadata.makeID($cur_name)
+<div class="metadataDiv clearfix" id="$cur_id">
+    <div class="metadata-options-wrapper">
+        <h4>Create:</h4>
+        <div class="metadata-options">
+            <label for="${cur_id}_show_metadata" class="clearfix"><input type="checkbox" class="float-left metadata_checkbox" id="${cur_id}_show_metadata" #if $cur_metadata_inst.show_metadata then "checked=\"checked\"" else ""#/>&nbsp;Show Metadata</label>
+            <label for="${cur_id}_episode_metadata" class="clearfix"><input type="checkbox" class="float-left metadata_checkbox" id="${cur_id}_episode_metadata" #if $cur_metadata_inst.episode_metadata then "checked=\"checked\"" else ""#/>&nbsp;Episode Metadata</label>
+            <label for="${cur_id}_fanart" class="clearfix"><input type="checkbox" class="float-left metadata_checkbox" id="${cur_id}_fanart" #if $cur_metadata_inst.fanart then "checked=\"checked\"" else ""#/>&nbsp;Show Fanart Image</label>
+            <label for="${cur_id}_poster" class="clearfix"><input type="checkbox" class="float-left metadata_checkbox" id="${cur_id}_poster" #if $cur_metadata_inst.poster then "checked=\"checked\"" else ""#/>&nbsp;Show Folder Image</label>
+            <label for="${cur_id}_episode_thumbnails" class="clearfix"><input type="checkbox" class="float-left metadata_checkbox" id="${cur_id}_episode_thumbnails" #if $cur_metadata_inst.episode_thumbnails then "checked=\"checked\"" else ""#/>&nbsp;Episode Thumbnail</label>
+            <label for="${cur_id}_season_thumbnails" class="clearfix"><input type="checkbox" class="float-left metadata_checkbox" id="${cur_id}_season_thumbnails" #if $cur_metadata_inst.season_thumbnails then "checked=\"checked\"" else ""#/>&nbsp;Season Thumbnail</label>
+        </div>
+    </div>
+    <div class="metadata-example-wrapper">
+        <h4>Results:</h4>
+        <div class="metadata-example">
+            <label for="${cur_id}_show_metadata"><span id="${cur_id}_eg_show_metadata">$cur_metadata_inst.eg_show_metadata</span></label>
+            <label for="${cur_id}_episode_metadata"><span id="${cur_id}_eg_episode_metadata">$cur_metadata_inst.eg_episode_metadata</span></label>
+            <label for="${cur_id}_fanart"><span id="${cur_id}_eg_fanart">$cur_metadata_inst.eg_fanart</span></label>
+            <label for="${cur_id}_poster"><span id="${cur_id}_eg_poster">$cur_metadata_inst.eg_poster</span></label>
+            <label for="${cur_id}_episode_thumbnails"><span id="${cur_id}_eg_episode_thumbnails">$cur_metadata_inst.eg_episode_thumbnails</span></label>
+            <label for="${cur_id}_season_thumbnails"><span id="${cur_id}_eg_season_thumbnails">$cur_metadata_inst.eg_season_thumbnails</span></label>
+        </div>
+    </div>
+
+    <input type="hidden" name="${cur_id}_data" id="${cur_id}_data" value="$cur_metadata_inst.get_config()" />
+</div>
+#end for
+
+                        <div class="field-pair clearfix">
+                            <input type="checkbox" name="use_banner" id="use_banner" #if $sickbeard.USE_BANNER then "checked=checked" else ""#/>
+                            <label class="clearfix" for="use_banner">
+                                <span class="component-title">Use Banners</span>
+                                <span class="component-desc">Use banners instead of posters for 'Show Folder Image'</span>
+                            </label>
+                        </div>
+
+                        <input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
+
+                    </fieldset>
+                </div><!-- /component-metadata //-->
+
+            <br/><input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
+            </div><!-- /config-components -->
+
+</form>
+</div></div>
+
+<script type="text/javascript" charset="utf-8">
+<!--
+    jQuery('#tv_download_dir').fileBrowser({ title: 'Select TV Download Directory' });
+    jQuery('#torrent_download_dir').fileBrowser({ title: 'Select Torrent finished downloads Directory' });
+//-->
+</script>
+
+#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_bottom.tmpl")
diff --git a/data/interfaces/default/home_newShow.tmpl b/data/interfaces/default/home_newShow.tmpl
index 73f4c7631..729a731a9 100644
--- a/data/interfaces/default/home_newShow.tmpl
+++ b/data/interfaces/default/home_newShow.tmpl
@@ -1,85 +1,85 @@
-#import os.path
-#import sickbeard
-#set global $title="New Show"
-
-#set global $sbPath="../.."
-
-#set global $statpath="../.."#
-#set global $topmenu="home"#
-#import os.path
-
-#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_top.tmpl")
-
-<script type="text/javascript" src="$sbRoot/js/lib/formwizard.js?$sbPID"></script>
-<script type="text/javascript" src="$sbRoot/js/qualityChooser.js?$sbPID"></script>
-<script type="text/javascript" src="$sbRoot/js/newShow.js?$sbPID"></script>
-<script type="text/javascript" src="$sbRoot/js/addShowOptions.js?$sbPID"></script>
-
-<div id="displayText">aoeu</div>
-<br />
-
-<form id="addShowForm" method="post" action="$sbRoot/home/addShows/addNewShow" accept-charset="utf-8">
-
-<fieldset class="sectionwrap">
-    <legend class="legendStep">Find a show on the TVDB</legend>
-
-    <div class="stepDiv">
-        #if $use_provided_info:
-            Show retrieved from existing metadata: <a href="http://thetvdb.com/?tab=series&id=$provided_tvdb_id">$provided_tvdb_name</a>
-            <input type="hidden" name="tvdbLang" value="fr" />
-            <input type="hidden" name="whichSeries" value="$provided_tvdb_id" />
-            <input type="hidden" id="providedName" value="$provided_tvdb_name" />
-        #else:
-            <input type="text" id="nameToSearch" value="$default_show_name" />
-            <select name="tvdbLang" id="tvdbLangSelect">
-                <option value="fr" selected="selected">fr</option>
-            </select><b>*</b>
-            <input type="button" id="searchName" value="Search" class="btn" /><br /><br />
-
-            <b>*</b> This will only affect the language of the retrieved metadata file contents and episode filenames.<br />
-            This <b>DOES NOT</b> allow Sick Beard to download non-english TV episodes!<br />
-            <br />
-            <div id="searchResults" style="max-height: 225px; overflow: auto;"><br/></div>
-        #end if
-    </div>
-</fieldset>
-
-<fieldset class="sectionwrap">
-    <legend class="legendStep">Pick the parent folder</legend>
-
-    <div class="stepDiv">
-        #if $provided_show_dir:
-            Pre-chosen Destination Folder: <b>$provided_show_dir</b> <br />
-            <input type="hidden" id="fullShowPath" name="fullShowPath" value="$provided_show_dir" /><br />
-        #else
-            #include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_rootDirs.tmpl")
-        #end if
-    </div>
-</fieldset>
-
-<fieldset class="sectionwrap">
-    <legend class="legendStep">Customize options</legend>
-
-    <div class="stepDiv">
-        #include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_addShowOptions.tmpl")
-    </div>
-</fieldset>
-
-#for $curNextDir in $other_shows:
-<input type="hidden" name="other_shows" value="$curNextDir" />
-#end for
-<input type="hidden" name="skipShow" id="skipShow" value="" />
-</form>
-
-<br />
-
-<div style="width: 800px; text-align: center;">
-<input class="btn" type="button" id="addShowButton" value="Add Show" disabled="disabled" />
-#if $provided_show_dir:
-<input class="btn" type="button" id="skipShowButton" value="Skip Show" />
-#end if
-</div>
-
-<script type="text/javascript" src="$sbRoot/js/rootDirs.js?$sbPID"></script>
-
-#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_bottom.tmpl")
+#import os.path
+#import sickbeard
+#set global $title="New Show"
+
+#set global $sbPath="../.."
+
+#set global $statpath="../.."#
+#set global $topmenu="home"#
+#import os.path
+
+#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_top.tmpl")
+
+<script type="text/javascript" src="$sbRoot/js/lib/formwizard.js?$sbPID"></script>
+<script type="text/javascript" src="$sbRoot/js/qualityChooser.js?$sbPID"></script>
+<script type="text/javascript" src="$sbRoot/js/newShow.js?$sbPID"></script>
+<script type="text/javascript" src="$sbRoot/js/addShowOptions.js?$sbPID"></script>
+
+<div id="displayText">aoeu</div>
+<br />
+
+<form id="addShowForm" method="post" action="$sbRoot/home/addShows/addNewShow" accept-charset="utf-8">
+
+<fieldset class="sectionwrap">
+    <legend class="legendStep">Find a show on the TVDB</legend>
+
+    <div class="stepDiv">
+        #if $use_provided_info:
+            Show retrieved from existing metadata: <a href="http://thetvdb.com/?tab=series&id=$provided_tvdb_id">$provided_tvdb_name</a>
+            <input type="hidden" name="tvdbLang" value="fr" />
+            <input type="hidden" name="whichSeries" value="$provided_tvdb_id" />
+            <input type="hidden" id="providedName" value="$provided_tvdb_name" />
+        #else:
+            <input type="text" id="nameToSearch" value="$default_show_name" />
+            <select name="tvdbLang" id="tvdbLangSelect">
+                <option value="fr" selected="selected">fr</option>
+            </select><b>*</b>
+            <input type="button" id="searchName" value="Search" class="btn" /><br /><br />
+
+            <b>*</b> This will only affect the language of the retrieved metadata file contents and episode filenames.<br />
+            This <b>DOES NOT</b> allow Sick Beard to download non-english TV episodes!<br />
+            <br />
+            <div id="searchResults" style="max-height: 225px; overflow: auto;"><br/></div>
+        #end if
+    </div>
+</fieldset>
+
+<fieldset class="sectionwrap">
+    <legend class="legendStep">Pick the parent folder</legend>
+
+    <div class="stepDiv">
+        #if $provided_show_dir:
+            Pre-chosen Destination Folder: <b>$provided_show_dir</b> <br />
+            <input type="hidden" id="fullShowPath" name="fullShowPath" value="$provided_show_dir" /><br />
+        #else
+            #include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_rootDirs.tmpl")
+        #end if
+    </div>
+</fieldset>
+
+<fieldset class="sectionwrap">
+    <legend class="legendStep">Customize options</legend>
+
+    <div class="stepDiv">
+        #include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_addShowOptions.tmpl")
+    </div>
+</fieldset>
+
+#for $curNextDir in $other_shows:
+<input type="hidden" name="other_shows" value="$curNextDir" />
+#end for
+<input type="hidden" name="skipShow" id="skipShow" value="" />
+</form>
+
+<br />
+
+<div style="width: 800px; text-align: center;">
+<input class="btn" type="button" id="addShowButton" value="Add Show" disabled="disabled" />
+#if $provided_show_dir:
+<input class="btn" type="button" id="skipShowButton" value="Skip Show" />
+#end if
+</div>
+
+<script type="text/javascript" src="$sbRoot/js/rootDirs.js?$sbPID"></script>
+
+#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_bottom.tmpl")
diff --git a/data/interfaces/default/inc_top.tmpl b/data/interfaces/default/inc_top.tmpl
index 82fc9e19b..5e10f9a22 100644
--- a/data/interfaces/default/inc_top.tmpl
+++ b/data/interfaces/default/inc_top.tmpl
@@ -1,250 +1,250 @@
-#import sickbeard.version
-<!DOCTYPE HTML>
-<html>
-    <head>
-        <meta charset="utf-8">
-        <title>Sick Beard - $sickbeard.version.SICKBEARD_VERSION - $title</title>
-        <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
-        <meta name="robots" content="noindex">
-        <!--[if lt IE 9]>
-            <script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>
-        <![endif]-->
-
-        <link rel="shortcut icon" href="$sbRoot/images/ico/favicon.ico">
-        <link rel="apple-touch-icon-precomposed" sizes="144x144" href="$sbRoot/images/ico/apple-touch-icon-144x144-precomposed.png">
-        <link rel="apple-touch-icon-precomposed" sizes="114x114" href="$sbRoot/images/ico/apple-touch-icon-114x114-precomposed.png">
-        <link rel="apple-touch-icon-precomposed" sizes="72x72" href="$sbRoot/images/ico/apple-touch-icon-72x72-precomposed.png">
-        <link rel="apple-touch-icon-precomposed" href="$sbRoot/images/ico/apple-touch-icon-57x57-precomposed.png">
-
-        <link rel="stylesheet" type="text/css" href="$sbRoot/css/lib/bootstrap.css?$sbPID"/>
-        <link rel="stylesheet" type="text/css" href="$sbRoot/css/lib/jquery.pnotify.default.css?$sbPID" />
-        <link rel="stylesheet" type="text/css" href="$sbRoot/css/lib/jquery-ui-1.8.24.custom.css?$sbPID" />
-        <link rel="stylesheet" type="text/css" href="$sbRoot/css/lib/jquery.qtip.css?$sbPID"/>
-        <link rel="stylesheet" type="text/css" href="$sbRoot/css/style.css?$sbPID"/>
-
-<style type="text/css">
-<!--
-#contentWrapper { background: url("$sbRoot/images/bg.png") repeat fixed 0 0 transparent; }
-
-[class^="icon-"], [class*=" icon-"] { background-image: url("$sbRoot/images/glyphicons-halflings.png"); }
-.icon-white { background-image: url("$sbRoot/images/glyphicons-halflings-white.png"); }
-.dropdown-menu li > a:hover > [class^="icon-"], .dropdown-menu li > a:hover > [class*=" icon-"] { background-image: url("$sbRoot/images/glyphicons-halflings-white.png"); }
-[class^="icon16-"], [class*=" icon16-"] { background-image: url("$sbRoot/images/glyphicons-config.png"); }
-
-.ui-autocomplete-loading { background: white url("$sbRoot/images/loading16.gif") right center no-repeat; }
-.browserDialog.busy .ui-dialog-buttonpane { background: url("$sbRoot/images/loading.gif") 10px 50% no-repeat !important; }
-.ui-dialog, .ui-dialog-buttonpane { background: #eceadf url("$sbRoot/css/lib/images/ui-bg_fine-grain_10_eceadf_60x60.png") 50% 50% repeat !important; }
-.ui-accordion-content, .ui-tabs-panel { background: #ededed !important; background-image: none !important; }
-
-.ui-widget-content { border: 1px solid #aaaaaa; background: #dcdcdc url("$sbRoot/css/lib/images/ui-bg_highlight-soft_75_dcdcdc_1x100.png") 50% top repeat-x; color: #222222; }
-.ui-widget-header { border: 1px solid #aaaaaa; background: #ffffff url("$sbRoot/css/lib/images/ui-bg_flat_0_ffffff_40x100.png") 50% 50% repeat-x; color: #222222; font-weight: bold; }
-
-.ui-state-default, .ui-widget-content .ui-state-default, .ui-widget-header .ui-state-default { border: 1px solid #aaaaaa; background: #efefef url("$sbRoot/css/lib/images/ui-bg_highlight-soft_75_efefef_1x100.png") 50% 50% repeat-x; font-weight: bold; color: #222222; }
-.ui-state-hover, .ui-widget-content .ui-state-hover, .ui-widget-header .ui-state-hover, .ui-state-focus, .ui-widget-content .ui-state-focus, .ui-widget-header .ui-state-focus { border: 1px solid #999999; background: #dddddd url("$sbRoot/css/lib/images/ui-bg_highlight-soft_75_dddddd_1x100.png") 50% 50% repeat-x; font-weight: bold; color: #222222; }
-.ui-state-active, .ui-widget-content .ui-state-active, .ui-widget-header .ui-state-active { border: 1px solid #aaaaaa; background: #dfdfdf url("$sbRoot/css/lib/images/ui-bg_inset-soft_75_dfdfdf_1x100.png") 50% 50% repeat-x; font-weight: bold; color: #140f06; }
-
-.ui-state-highlight, .ui-widget-content .ui-state-highlight, .ui-widget-header .ui-state-highlight {border: 1px solid #aaaaaa; background: #fbf9ee url("$sbRoot/css/lib/images/ui-bg_glass_55_fbf9ee_1x400.png") 50% 50% repeat-x; color: #363636; }
-.ui-state-error, .ui-widget-content .ui-state-error, .ui-widget-header .ui-state-error {border: 1px solid #aaaaaa; background: #fef1ec url("$sbRoot/css/lib/images/ui-bg_glass_95_fef1ec_1x400.png") 50% 50% repeat-x; color: #8c291d; }
-
-.ui-icon { width: 16px; height: 16px; background-image: url("$sbRoot/css/lib/images/ui-icons_222222_256x240.png"); }
-.ui-widget-content .ui-icon {background-image: url("$sbRoot/css/lib/images/ui-icons_222222_256x240.png"); }
-.ui-widget-header .ui-icon {background-image: url("$sbRoot/css/lib/images/ui-icons_222222_256x240.png"); }
-.ui-state-default .ui-icon { background-image: url("$sbRoot/css/lib/images/ui-icons_8c291d_256x240.png"); }
-.ui-state-hover .ui-icon, .ui-state-focus .ui-icon {background-image: url("$sbRoot/css/lib/images/ui-icons_222222_256x240.png"); }
-.ui-state-active .ui-icon {background-image: url("$sbRoot/css/lib/images/ui-icons_8c291d_256x240.png"); }
-.ui-state-highlight .ui-icon {background-image: url("$sbRoot/css/lib/images/ui-icons_2e83ff_256x240.png"); }
-.ui-state-error .ui-icon, .ui-state-error-text .ui-icon {background-image: url("$sbRoot/css/lib/images/ui-icons_cd0a0a_256x240.png"); }
-
-.ui-widget-overlay { background: #aaaaaa url("$sbRoot/css/lib/images/ui-bg_flat_0_aaaaaa_40x100.png") 50% 50% repeat-x; opacity: .35;filter:Alpha(Opacity=35); }
-.ui-widget-shadow { margin: -8px 0 0 -8px; padding: 8px; background: #000000 url("$sbRoot/css/lib/images/ui-bg_flat_0_000000_40x100.png") 50% 50% repeat-x; opacity: .35;filter:Alpha(Opacity=35); -moz-border-radius: 8px; -khtml-border-radius: 8px; -webkit-border-radius: 8px; border-radius: 8px; }
-
-#if $sickbeard.NEWEST_VERSION_STRING:
-.ui-pnotify { top: 30px !important; }
-#end if
-//--> 
-</style>
-
-    <script type="text/javascript" src="$sbRoot/js/lib/jquery-1.7.2.min.js?$sbPID"></script>
-    <script type="text/javascript" src="$sbRoot/js/lib/jquery-ui-1.8.24.custom.min.js?$sbPID"></script>
-    <script type="text/javascript" src="$sbRoot/js/lib/bootstrap.min.js?$sbPID"></script>
-    <script type="text/javascript" src="$sbRoot/js/lib/jquery.cookie.js?$sbPID"></script>
-    <script type="text/javascript" src="$sbRoot/js/lib/jquery.cookiejar.js?$sbPID"></script>
-    <script type="text/javascript" src="$sbRoot/js/lib/jquery.json-2.2.min.js?$sbPID"></script>
-    <script type="text/javascript" src="$sbRoot/js/lib/jquery.selectboxes.min.js?$sbPID"></script>
-    <script type="text/javascript" src="$sbRoot/js/lib/jquery.tablesorter-2.1.19.min.js?$sbPID"></script>
-    <script type="text/javascript" src="$sbRoot/js/lib/jquery.tablesorter.widgets.min.js?$sbPID"></script>
-    <script type="text/javascript" src="$sbRoot/js/lib/jquery.qtip-2012-04-26.min.js?$sbPID"></script>
-    <script type="text/javascript" src="$sbRoot/js/lib/jquery.pnotify-1.2.0.min.js?$sbPID"></script>
-    <script type="text/javascript" src="$sbRoot/js/lib/jquery.expand-1.3.8.js?$sbPID"></script>
-    <script type="text/javascript" src="$sbRoot/js/lib/jquery.form-2.92.js?$sbPID"></script>
-
-    <script type="text/javascript" charset="utf-8">
-    <!--
-        sbRoot = "$sbRoot"; // needed for browser.js & ajaxNotifications.js
-        //HTML for scrolltopcontrol, which is auto wrapped in DIV w/ ID="topcontrol"
-        top_image_html = '<img src="$sbRoot/images/top.gif" style="width:31px; height:11px" alt="Jump to top" />'; 
-    //-->
-    </script>
-    <script type="text/javascript" src="$sbRoot/js/lib/jquery.scrolltopcontrol-1.1.js?$sbPID"></script>
-    <script type="text/javascript" src="$sbRoot/js/browser.js?$sbPID"></script>
-    <script type="text/javascript" src="$sbRoot/js/ajaxNotifications.js?$sbPID"></script>
-    <script type="text/javascript">
-    <!--
-        \$(document).ready(function(){ 
-            \$("#NAV$topmenu").addClass("active");
-
-            \$("a.confirm").bind("click",function(e) {
-                e.preventDefault();
-                var target = \$( this ).attr('href');
-                if ( confirm("Are you sure you want to " + \$(this).text() + "?") )
-                    location.href = target;
-                return false;
-            });
-
-        });
-    //-->
-    </script>
-</head>
-
-<body>
-    <header>
-#if $sickbeard.NEWEST_VERSION_STRING:
-<div id="upgrade-notification">
-    <div>
-        <span class="notify-text">$sickbeard.NEWEST_VERSION_STRING</span>
-    </div>
-</div>
-<div id="header-fix"></div>
-#end if
-<div id="header">
-    <a name="top"></a>
-    <span id="logo"><a href="$sbRoot/home/" title="Sick Beard homepage"><img alt="Sick Beard" src="$sbRoot/images/sickbeard.png" width="150" height="90" /></a></span>
-    <span id="versiontext"><a>$sickbeard.version.SICKBEARD_VERSION</a></span>
-</div>
-
-    <div class="navbar">
-        <div class="navbar-inner">
-            <div class="container">
-                <a class="btn btn-navbar" data-toggle="collapse" data-target=".nav-collapse">
-                    <span class="icon-bar"></span>
-                    <span class="icon-bar"></span>
-                    <span class="icon-bar"></span>
-                </a>
-
-                <!--
-                <a class="brand" href="$sbRoot/home/" title="Sick Beard"><img alt="Sick Beard" src="$sbRoot/images/sickbeard.png" width="150" height="72" /></a>
-                -->
-
-                <div class="nav-collapse">
-                    <ul class="nav">
-                        <li class="divider-vertical"></li>
-                        <li id="NAVsystem" class="dropdown">
-                            <a data-toggle="dropdown" class="dropdown-toggle" href="#" onclick="return false;"><img src="$sbRoot/images/menu/system18.png" alt="" width="18" height="18" /> <b class="caret"></b></a>
-                            <ul class="dropdown-menu">
-                                <li><a href="$sbRoot/manage/manageSearches/forceVersionCheck"><i class="icon-check"></i> Force Version Check</a></li>
-                                <li class="divider"></li>
-                                <li><a href="$sbRoot/home/restart/?pid=$sbPID" class="confirm"><i class="icon-repeat"></i> Restart</a></li>
-                                <li class="divider"></li>
-                                <li><a href="$sbRoot/home/shutdown/?pid=$sbPID" class="confirm"><i class="icon-off"></i> Shutdown</a></li>
-                            </ul>
-                        </li>
-                        <li class="divider-vertical"></li>
-                        <li id="NAVhome" class="dropdown">
-                            <a data-toggle="dropdown" class="dropdown-toggle disabled" href="$sbRoot/home/">Home <b class="caret"></b></a>
-                            <ul class="dropdown-menu">
-                                <li><a href="$sbRoot/home/"><i class="icon-home"></i> Home</a></li>
-                                <li class="divider"></li>
-                                <li><a href="$sbRoot/home/addShows/"><i class="icon-plus-sign"></i> Add Shows</a></li>
-                                <li class="divider"></li>
-                                <li><a href="$sbRoot/home/postprocess/"><i class="icon-random"></i> Manual Post-Processing</a></li>
-                            </ul>
-                        </li>
-                        <li class="divider-vertical"></li>
-                        <li id="NAVcomingEpisodes">
-                            <a href="$sbRoot/comingEpisodes/">Coming Episodes</a>
-                        </li>
-                        <li class="divider-vertical"></li>
-                        <li id="NAVhistory">
-                            <a href="$sbRoot/history/">History</a>
-                        </li>
-                        <li class="divider-vertical"></li>
-                        <li id="NAVmanage" class="dropdown">
-                            <a data-toggle="dropdown" class="dropdown-toggle disabled" href="$sbRoot/manage/">Manage <b class="caret"></b></a>
-                            <ul class="dropdown-menu">
-                                <li><a href="$sbRoot/manage/"><i class="icon-tasks"></i> Mass Update</a></li>
-                                <li class="divider"></li>
-                                <li><a href="$sbRoot/manage/backlogOverview/"><i class="icon-retweet"></i> Backlog Overview</a></li>
-                                <li class="divider"></li>
-                                <li><a href="$sbRoot/manage/manageSearches/"><i class="icon-search"></i> Manage Searches</a></li>
-                                <li class="divider"></li>
-                                <li><a href="$sbRoot/manage/episodeStatuses/"><i class="icon-list-alt"></i> Episode Status Management</a></li>
-								<li class="divider"></li>
-                                <li><a href="$sbRoot/manage/subtitleMissed/"><i class="icon-list-alt"></i> Manage Missed Subtitles</a></li>
-								<li class="divider"></li>
-                            </ul>
-                        </li>
-                        <li class="divider-vertical"></li>
-                        <li id="NAVconfig" class="dropdown">
-                            <a data-toggle="dropdown" class="dropdown-toggle disabled" href="$sbRoot/config/">Config <b class="caret"></b></a>
-                            <ul class="dropdown-menu">
-                                <li><a href="$sbRoot/config/"><i class="icon-question-sign"></i> Help &amp; Info</a></li>
-                                <li class="divider"></li>
-                                <li><a href="$sbRoot/config/general/"><i class="icon-cog"></i> General</a></li>
-                                <li class="divider"></li>
-                                <li><a href="$sbRoot/config/search/"><i class="icon-cog"></i> Search Settings</a></li>
-                                <li class="divider"></li>
-                                <li><a href="$sbRoot/config/providers/"><i class="icon-cog"></i> Search Providers</a></li>
-                                <li class="divider"></li>
-                                <li><a href="$sbRoot/config/postProcessing/"><i class="icon-cog"></i> Post Processing</a></li>
-                                <li class="divider"></li>
-                                <li><a href="$sbRoot/config/subtitles/"><i class="icon-cog"></i> Subtitles Settings</a></li>
-								<li class="divider"></li>
-                                <li><a href="$sbRoot/config/notifications/"><i class="icon-cog"></i> Notifications</a></li>
-                            </ul>
-                        </li>
-                        <li class="divider-vertical"></li>
-                        <li id="NAVerrorlogs" class="dropdown">
-                            <a data-toggle="dropdown" class="dropdown-toggle disabled" href="$sbRoot/errorlogs/">$logPageTitle <b class="caret"></b></a>
-                            <ul class="dropdown-menu">
-                                <li><a href="$sbRoot/errorlogs/"><i class="icon-file"></i> View Log (Errors)</a></li>
-                                <li class="divider"></li>
-                                <li><a href="$sbRoot/errorlogs/viewlog/"><i class="icon-file"></i> View Log</a></li>
-                            </ul>
-                        </li>
-                        <li class="divider-vertical"></li>
-                    </ul>
-                    
-                </div><!-- /nav-collapse -->
-
-            </div><!-- /container -->
-        </div><!-- /navbar-inner -->
-    </div><!-- /navbar -->
-
-#if $varExists('submenu'):
-    <div id="SubMenu">
-        <span>
-    #set $first = True
-    #for $menuItem in $submenu:
-        #if 'requires' not in $menuItem or $menuItem.requires():
-            #if type($menuItem.path) == dict:
-                #if $first then "" else "</span>| <span>"#<b>$menuItem.title</b>
-                #set $first = False
-                #set $inner_first = True
-                #for $cur_link in $menuItem.path:
-                    #if $inner_first then "" else "&middot; "#<a class="inner" href="$sbRoot/$menuItem.path[$cur_link]">$cur_link</a>
-                    #set $inner_first = False
-                #end for
-            #else
-                #if $first then "" else "| "#<a href="$sbRoot/$menuItem.path" #if 'confirm' in $menuItem then "class=\"confirm\"" else "" #>$menuItem.title</a>
-                #set $first = False
-            #end if
-        #end if
-    #end for
-        </span>
-    </div>
-#end if
-    </header>
-
-<div id="contentWrapper">
-    <div id="content">
-    <h1 class="title">#if $varExists('header') then $header else $title#</h1>
+#import sickbeard.version
+<!DOCTYPE HTML>
+<html>
+    <head>
+        <meta charset="utf-8">
+        <title>Sick Beard - $sickbeard.version.SICKBEARD_VERSION - $title</title>
+        <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
+        <meta name="robots" content="noindex">
+        <!--[if lt IE 9]>
+            <script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>
+        <![endif]-->
+
+        <link rel="shortcut icon" href="$sbRoot/images/ico/favicon.ico">
+        <link rel="apple-touch-icon-precomposed" sizes="144x144" href="$sbRoot/images/ico/apple-touch-icon-144x144-precomposed.png">
+        <link rel="apple-touch-icon-precomposed" sizes="114x114" href="$sbRoot/images/ico/apple-touch-icon-114x114-precomposed.png">
+        <link rel="apple-touch-icon-precomposed" sizes="72x72" href="$sbRoot/images/ico/apple-touch-icon-72x72-precomposed.png">
+        <link rel="apple-touch-icon-precomposed" href="$sbRoot/images/ico/apple-touch-icon-57x57-precomposed.png">
+
+        <link rel="stylesheet" type="text/css" href="$sbRoot/css/lib/bootstrap.css?$sbPID"/>
+        <link rel="stylesheet" type="text/css" href="$sbRoot/css/lib/jquery.pnotify.default.css?$sbPID" />
+        <link rel="stylesheet" type="text/css" href="$sbRoot/css/lib/jquery-ui-1.8.24.custom.css?$sbPID" />
+        <link rel="stylesheet" type="text/css" href="$sbRoot/css/lib/jquery.qtip.css?$sbPID"/>
+        <link rel="stylesheet" type="text/css" href="$sbRoot/css/style.css?$sbPID"/>
+
+<style type="text/css">
+<!--
+#contentWrapper { background: url("$sbRoot/images/bg.png") repeat fixed 0 0 transparent; }
+
+[class^="icon-"], [class*=" icon-"] { background-image: url("$sbRoot/images/glyphicons-halflings.png"); }
+.icon-white { background-image: url("$sbRoot/images/glyphicons-halflings-white.png"); }
+.dropdown-menu li > a:hover > [class^="icon-"], .dropdown-menu li > a:hover > [class*=" icon-"] { background-image: url("$sbRoot/images/glyphicons-halflings-white.png"); }
+[class^="icon16-"], [class*=" icon16-"] { background-image: url("$sbRoot/images/glyphicons-config.png"); }
+
+.ui-autocomplete-loading { background: white url("$sbRoot/images/loading16.gif") right center no-repeat; }
+.browserDialog.busy .ui-dialog-buttonpane { background: url("$sbRoot/images/loading.gif") 10px 50% no-repeat !important; }
+.ui-dialog, .ui-dialog-buttonpane { background: #eceadf url("$sbRoot/css/lib/images/ui-bg_fine-grain_10_eceadf_60x60.png") 50% 50% repeat !important; }
+.ui-accordion-content, .ui-tabs-panel { background: #ededed !important; background-image: none !important; }
+
+.ui-widget-content { border: 1px solid #aaaaaa; background: #dcdcdc url("$sbRoot/css/lib/images/ui-bg_highlight-soft_75_dcdcdc_1x100.png") 50% top repeat-x; color: #222222; }
+.ui-widget-header { border: 1px solid #aaaaaa; background: #ffffff url("$sbRoot/css/lib/images/ui-bg_flat_0_ffffff_40x100.png") 50% 50% repeat-x; color: #222222; font-weight: bold; }
+
+.ui-state-default, .ui-widget-content .ui-state-default, .ui-widget-header .ui-state-default { border: 1px solid #aaaaaa; background: #efefef url("$sbRoot/css/lib/images/ui-bg_highlight-soft_75_efefef_1x100.png") 50% 50% repeat-x; font-weight: bold; color: #222222; }
+.ui-state-hover, .ui-widget-content .ui-state-hover, .ui-widget-header .ui-state-hover, .ui-state-focus, .ui-widget-content .ui-state-focus, .ui-widget-header .ui-state-focus { border: 1px solid #999999; background: #dddddd url("$sbRoot/css/lib/images/ui-bg_highlight-soft_75_dddddd_1x100.png") 50% 50% repeat-x; font-weight: bold; color: #222222; }
+.ui-state-active, .ui-widget-content .ui-state-active, .ui-widget-header .ui-state-active { border: 1px solid #aaaaaa; background: #dfdfdf url("$sbRoot/css/lib/images/ui-bg_inset-soft_75_dfdfdf_1x100.png") 50% 50% repeat-x; font-weight: bold; color: #140f06; }
+
+.ui-state-highlight, .ui-widget-content .ui-state-highlight, .ui-widget-header .ui-state-highlight {border: 1px solid #aaaaaa; background: #fbf9ee url("$sbRoot/css/lib/images/ui-bg_glass_55_fbf9ee_1x400.png") 50% 50% repeat-x; color: #363636; }
+.ui-state-error, .ui-widget-content .ui-state-error, .ui-widget-header .ui-state-error {border: 1px solid #aaaaaa; background: #fef1ec url("$sbRoot/css/lib/images/ui-bg_glass_95_fef1ec_1x400.png") 50% 50% repeat-x; color: #8c291d; }
+
+.ui-icon { width: 16px; height: 16px; background-image: url("$sbRoot/css/lib/images/ui-icons_222222_256x240.png"); }
+.ui-widget-content .ui-icon {background-image: url("$sbRoot/css/lib/images/ui-icons_222222_256x240.png"); }
+.ui-widget-header .ui-icon {background-image: url("$sbRoot/css/lib/images/ui-icons_222222_256x240.png"); }
+.ui-state-default .ui-icon { background-image: url("$sbRoot/css/lib/images/ui-icons_8c291d_256x240.png"); }
+.ui-state-hover .ui-icon, .ui-state-focus .ui-icon {background-image: url("$sbRoot/css/lib/images/ui-icons_222222_256x240.png"); }
+.ui-state-active .ui-icon {background-image: url("$sbRoot/css/lib/images/ui-icons_8c291d_256x240.png"); }
+.ui-state-highlight .ui-icon {background-image: url("$sbRoot/css/lib/images/ui-icons_2e83ff_256x240.png"); }
+.ui-state-error .ui-icon, .ui-state-error-text .ui-icon {background-image: url("$sbRoot/css/lib/images/ui-icons_cd0a0a_256x240.png"); }
+
+.ui-widget-overlay { background: #aaaaaa url("$sbRoot/css/lib/images/ui-bg_flat_0_aaaaaa_40x100.png") 50% 50% repeat-x; opacity: .35;filter:Alpha(Opacity=35); }
+.ui-widget-shadow { margin: -8px 0 0 -8px; padding: 8px; background: #000000 url("$sbRoot/css/lib/images/ui-bg_flat_0_000000_40x100.png") 50% 50% repeat-x; opacity: .35;filter:Alpha(Opacity=35); -moz-border-radius: 8px; -khtml-border-radius: 8px; -webkit-border-radius: 8px; border-radius: 8px; }
+
+#if $sickbeard.NEWEST_VERSION_STRING:
+.ui-pnotify { top: 30px !important; }
+#end if
+//--> 
+</style>
+
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery-1.7.2.min.js?$sbPID"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery-ui-1.8.24.custom.min.js?$sbPID"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/bootstrap.min.js?$sbPID"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.cookie.js?$sbPID"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.cookiejar.js?$sbPID"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.json-2.2.min.js?$sbPID"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.selectboxes.min.js?$sbPID"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.tablesorter-2.1.19.min.js?$sbPID"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.tablesorter.widgets.min.js?$sbPID"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.qtip-2012-04-26.min.js?$sbPID"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.pnotify-1.2.0.min.js?$sbPID"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.expand-1.3.8.js?$sbPID"></script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.form-2.92.js?$sbPID"></script>
+
+    <script type="text/javascript" charset="utf-8">
+    <!--
+        sbRoot = "$sbRoot"; // needed for browser.js & ajaxNotifications.js
+        //HTML for scrolltopcontrol, which is auto wrapped in DIV w/ ID="topcontrol"
+        top_image_html = '<img src="$sbRoot/images/top.gif" style="width:31px; height:11px" alt="Jump to top" />'; 
+    //-->
+    </script>
+    <script type="text/javascript" src="$sbRoot/js/lib/jquery.scrolltopcontrol-1.1.js?$sbPID"></script>
+    <script type="text/javascript" src="$sbRoot/js/browser.js?$sbPID"></script>
+    <script type="text/javascript" src="$sbRoot/js/ajaxNotifications.js?$sbPID"></script>
+    <script type="text/javascript">
+    <!--
+        \$(document).ready(function(){ 
+            \$("#NAV$topmenu").addClass("active");
+
+            \$("a.confirm").bind("click",function(e) {
+                e.preventDefault();
+                var target = \$( this ).attr('href');
+                if ( confirm("Are you sure you want to " + \$(this).text() + "?") )
+                    location.href = target;
+                return false;
+            });
+
+        });
+    //-->
+    </script>
+</head>
+
+<body>
+    <header>
+#if $sickbeard.NEWEST_VERSION_STRING:
+<div id="upgrade-notification">
+    <div>
+        <span class="notify-text">$sickbeard.NEWEST_VERSION_STRING</span>
+    </div>
+</div>
+<div id="header-fix"></div>
+#end if
+<div id="header">
+    <a name="top"></a>
+    <span id="logo"><a href="$sbRoot/home/" title="Sick Beard homepage"><img alt="Sick Beard" src="$sbRoot/images/sickbeard.png" width="150" height="90" /></a></span>
+    <span id="versiontext"><a>$sickbeard.version.SICKBEARD_VERSION</a></span>
+</div>
+
+    <div class="navbar">
+        <div class="navbar-inner">
+            <div class="container">
+                <a class="btn btn-navbar" data-toggle="collapse" data-target=".nav-collapse">
+                    <span class="icon-bar"></span>
+                    <span class="icon-bar"></span>
+                    <span class="icon-bar"></span>
+                </a>
+
+                <!--
+                <a class="brand" href="$sbRoot/home/" title="Sick Beard"><img alt="Sick Beard" src="$sbRoot/images/sickbeard.png" width="150" height="72" /></a>
+                -->
+
+                <div class="nav-collapse">
+                    <ul class="nav">
+                        <li class="divider-vertical"></li>
+                        <li id="NAVsystem" class="dropdown">
+                            <a data-toggle="dropdown" class="dropdown-toggle" href="#" onclick="return false;"><img src="$sbRoot/images/menu/system18.png" alt="" width="18" height="18" /> <b class="caret"></b></a>
+                            <ul class="dropdown-menu">
+                                <li><a href="$sbRoot/manage/manageSearches/forceVersionCheck"><i class="icon-check"></i> Force Version Check</a></li>
+                                <li class="divider"></li>
+                                <li><a href="$sbRoot/home/restart/?pid=$sbPID" class="confirm"><i class="icon-repeat"></i> Restart</a></li>
+                                <li class="divider"></li>
+                                <li><a href="$sbRoot/home/shutdown/?pid=$sbPID" class="confirm"><i class="icon-off"></i> Shutdown</a></li>
+                            </ul>
+                        </li>
+                        <li class="divider-vertical"></li>
+                        <li id="NAVhome" class="dropdown">
+                            <a data-toggle="dropdown" class="dropdown-toggle disabled" href="$sbRoot/home/">Home <b class="caret"></b></a>
+                            <ul class="dropdown-menu">
+                                <li><a href="$sbRoot/home/"><i class="icon-home"></i> Home</a></li>
+                                <li class="divider"></li>
+                                <li><a href="$sbRoot/home/addShows/"><i class="icon-plus-sign"></i> Add Shows</a></li>
+                                <li class="divider"></li>
+                                <li><a href="$sbRoot/home/postprocess/"><i class="icon-random"></i> Manual Post-Processing</a></li>
+                            </ul>
+                        </li>
+                        <li class="divider-vertical"></li>
+                        <li id="NAVcomingEpisodes">
+                            <a href="$sbRoot/comingEpisodes/">Coming Episodes</a>
+                        </li>
+                        <li class="divider-vertical"></li>
+                        <li id="NAVhistory">
+                            <a href="$sbRoot/history/">History</a>
+                        </li>
+                        <li class="divider-vertical"></li>
+                        <li id="NAVmanage" class="dropdown">
+                            <a data-toggle="dropdown" class="dropdown-toggle disabled" href="$sbRoot/manage/">Manage <b class="caret"></b></a>
+                            <ul class="dropdown-menu">
+                                <li><a href="$sbRoot/manage/"><i class="icon-tasks"></i> Mass Update</a></li>
+                                <li class="divider"></li>
+                                <li><a href="$sbRoot/manage/backlogOverview/"><i class="icon-retweet"></i> Backlog Overview</a></li>
+                                <li class="divider"></li>
+                                <li><a href="$sbRoot/manage/manageSearches/"><i class="icon-search"></i> Manage Searches</a></li>
+                                <li class="divider"></li>
+                                <li><a href="$sbRoot/manage/episodeStatuses/"><i class="icon-list-alt"></i> Episode Status Management</a></li>
+								<li class="divider"></li>
+                                <li><a href="$sbRoot/manage/subtitleMissed/"><i class="icon-list-alt"></i> Manage Missed Subtitles</a></li>
+								<li class="divider"></li>
+                            </ul>
+                        </li>
+                        <li class="divider-vertical"></li>
+                        <li id="NAVconfig" class="dropdown">
+                            <a data-toggle="dropdown" class="dropdown-toggle disabled" href="$sbRoot/config/">Config <b class="caret"></b></a>
+                            <ul class="dropdown-menu">
+                                <li><a href="$sbRoot/config/"><i class="icon-question-sign"></i> Help &amp; Info</a></li>
+                                <li class="divider"></li>
+                                <li><a href="$sbRoot/config/general/"><i class="icon-cog"></i> General</a></li>
+                                <li class="divider"></li>
+                                <li><a href="$sbRoot/config/search/"><i class="icon-cog"></i> Search Settings</a></li>
+                                <li class="divider"></li>
+                                <li><a href="$sbRoot/config/providers/"><i class="icon-cog"></i> Search Providers</a></li>
+                                <li class="divider"></li>
+                                <li><a href="$sbRoot/config/postProcessing/"><i class="icon-cog"></i> Post Processing</a></li>
+                                <li class="divider"></li>
+                                <li><a href="$sbRoot/config/subtitles/"><i class="icon-cog"></i> Subtitles Settings</a></li>
+								<li class="divider"></li>
+                                <li><a href="$sbRoot/config/notifications/"><i class="icon-cog"></i> Notifications</a></li>
+                            </ul>
+                        </li>
+                        <li class="divider-vertical"></li>
+                        <li id="NAVerrorlogs" class="dropdown">
+                            <a data-toggle="dropdown" class="dropdown-toggle disabled" href="$sbRoot/errorlogs/">$logPageTitle <b class="caret"></b></a>
+                            <ul class="dropdown-menu">
+                                <li><a href="$sbRoot/errorlogs/"><i class="icon-file"></i> View Log (Errors)</a></li>
+                                <li class="divider"></li>
+                                <li><a href="$sbRoot/errorlogs/viewlog/"><i class="icon-file"></i> View Log</a></li>
+                            </ul>
+                        </li>
+                        <li class="divider-vertical"></li>
+                    </ul>
+                    
+                </div><!-- /nav-collapse -->
+
+            </div><!-- /container -->
+        </div><!-- /navbar-inner -->
+    </div><!-- /navbar -->
+
+#if $varExists('submenu'):
+    <div id="SubMenu">
+        <span>
+    #set $first = True
+    #for $menuItem in $submenu:
+        #if 'requires' not in $menuItem or $menuItem.requires():
+            #if type($menuItem.path) == dict:
+                #if $first then "" else "</span>| <span>"#<b>$menuItem.title</b>
+                #set $first = False
+                #set $inner_first = True
+                #for $cur_link in $menuItem.path:
+                    #if $inner_first then "" else "&middot; "#<a class="inner" href="$sbRoot/$menuItem.path[$cur_link]">$cur_link</a>
+                    #set $inner_first = False
+                #end for
+            #else
+                #if $first then "" else "| "#<a href="$sbRoot/$menuItem.path" #if 'confirm' in $menuItem then "class=\"confirm\"" else "" #>$menuItem.title</a>
+                #set $first = False
+            #end if
+        #end if
+    #end for
+        </span>
+    </div>
+#end if
+    </header>
+
+<div id="contentWrapper">
+    <div id="content">
+    <h1 class="title">#if $varExists('header') then $header else $title#</h1>
diff --git a/data/interfaces/default/manage_manageSearches.tmpl b/data/interfaces/default/manage_manageSearches.tmpl
index 78c3b0b4c..e8e19a0f4 100644
--- a/data/interfaces/default/manage_manageSearches.tmpl
+++ b/data/interfaces/default/manage_manageSearches.tmpl
@@ -1,40 +1,40 @@
-#import sickbeard
-#import datetime
-#from sickbeard.common import *
-#set global $title="Manage Searches"
-
-#set global $sbPath=".."
-
-#set global $topmenu="manage"#
-#import os.path
-#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_top.tmpl")
-
-<script type="text/javascript" src="$sbRoot/js/plotTooltip.js?$sbPID"></script>
-
-
-<h3>Backlog Search:</h3>
-<a class="btn" href="$sbRoot/manage/manageSearches/pauseBacklog?paused=#if $backlogPaused then "0" else "1"#"><i class="#if $backlogPaused then "icon-play" else "icon-pause"#"></i> #if $backlogPaused then "Unpause" else "Pause"#</a> 
-#if not $backlogRunning:
-Not in progress<br />
-#else:
-#if $backlogPaused then "Paused: " else ""#
-Currently running<br />
-#end if
-
-<br />
-<h3>Daily Episode Search:</h3>
-<a class="btn" href="$sbRoot/manage/manageSearches/forceSearch"><i class="icon-exclamation-sign"></i> Force</a> 
-#if not $searchStatus:
-Not in progress<br />
-#else:
-In Progress<br />
-#end if
-<br />
-
-<h3>Version Check:</h3>
-<a class="btn" href="$sbRoot/manage/manageSearches/forceVersionCheck"><i class="icon-check"></i> Force Check</a>
-<br />
-
-<br />
-
-#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_bottom.tmpl")
+#import sickbeard
+#import datetime
+#from sickbeard.common import *
+#set global $title="Manage Searches"
+
+#set global $sbPath=".."
+
+#set global $topmenu="manage"#
+#import os.path
+#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_top.tmpl")
+
+<script type="text/javascript" src="$sbRoot/js/plotTooltip.js?$sbPID"></script>
+
+
+<h3>Backlog Search:</h3>
+<a class="btn" href="$sbRoot/manage/manageSearches/pauseBacklog?paused=#if $backlogPaused then "0" else "1"#"><i class="#if $backlogPaused then "icon-play" else "icon-pause"#"></i> #if $backlogPaused then "Unpause" else "Pause"#</a> 
+#if not $backlogRunning:
+Not in progress<br />
+#else:
+#if $backlogPaused then "Paused: " else ""#
+Currently running<br />
+#end if
+
+<br />
+<h3>Daily Episode Search:</h3>
+<a class="btn" href="$sbRoot/manage/manageSearches/forceSearch"><i class="icon-exclamation-sign"></i> Force</a> 
+#if not $searchStatus:
+Not in progress<br />
+#else:
+In Progress<br />
+#end if
+<br />
+
+<h3>Version Check:</h3>
+<a class="btn" href="$sbRoot/manage/manageSearches/forceVersionCheck"><i class="icon-check"></i> Force Check</a>
+<br />
+
+<br />
+
+#include $os.path.join($sickbeard.PROG_DIR, "data/interfaces/default/inc_bottom.tmpl")
diff --git a/data/js/ajaxNotifications.js b/data/js/ajaxNotifications.js
index 186028da8..66d07b091 100644
--- a/data/js/ajaxNotifications.js
+++ b/data/js/ajaxNotifications.js
@@ -1,27 +1,27 @@
-var message_url = sbRoot + '/ui/get_messages';
-$.pnotify.defaults.pnotify_width = "340px";
-$.pnotify.defaults.styling = "jqueryui";
-$.pnotify.defaults.pnotify_history = false;
-$.pnotify.defaults.pnotify_delay = 4000;
-
-function check_notifications() {
-    $.getJSON(message_url, function (data) {
-        $.each(data, function (name, data) {
-            $.pnotify({
-                pnotify_type: data.type,
-                pnotify_hide: data.type == 'notice',
-                pnotify_title: data.title,
-                pnotify_text: data.message,
-                pnotify_shadow: false
-            });
-        });
-    });
-
-    setTimeout(check_notifications, 3000);
-}
-
-$(document).ready(function () {
-
-    check_notifications();
-
+var message_url = sbRoot + '/ui/get_messages';
+$.pnotify.defaults.pnotify_width = "340px";
+$.pnotify.defaults.styling = "jqueryui";
+$.pnotify.defaults.pnotify_history = false;
+$.pnotify.defaults.pnotify_delay = 4000;
+
+function check_notifications() {
+    $.getJSON(message_url, function (data) {
+        $.each(data, function (name, data) {
+            $.pnotify({
+                pnotify_type: data.type,
+                pnotify_hide: data.type == 'notice',
+                pnotify_title: data.title,
+                pnotify_text: data.message,
+                pnotify_shadow: false
+            });
+        });
+    });
+
+    setTimeout(check_notifications, 3000);
+}
+
+$(document).ready(function () {
+
+    check_notifications();
+
 });
\ No newline at end of file
diff --git a/data/js/configNotifications.js b/data/js/configNotifications.js
index 4b8813a1a..da3b289e4 100644
--- a/data/js/configNotifications.js
+++ b/data/js/configNotifications.js
@@ -1,205 +1,205 @@
-$(document).ready(function () {
-    var loading = '<img src="' + sbRoot + '/images/loading16.gif" height="16" width="16" />';
-
-    $('#testGrowl').click(function () {
-        $('#testGrowl-result').html(loading);
-        var growl_host = $("#growl_host").val();
-        var growl_password = $("#growl_password").val();
-        $.get(sbRoot + "/home/testGrowl", {'host': growl_host, 'password': growl_password},
-            function (data) { $('#testGrowl-result').html(data); });
-    });
-
-    $('#testProwl').click(function () {
-        $('#testProwl-result').html(loading);
-        var prowl_api = $("#prowl_api").val();
-        var prowl_priority = $("#prowl_priority").val();
-        $.get(sbRoot + "/home/testProwl", {'prowl_api': prowl_api, 'prowl_priority': prowl_priority},
-            function (data) { $('#testProwl-result').html(data); });
-    });
-
-    $('#testXBMC').click(function () {
-        $("#testXBMC").attr("disabled", true);
-        $('#testXBMC-result').html(loading);
-        var xbmc_host = $("#xbmc_host").val();
-        var xbmc_username = $("#xbmc_username").val();
-        var xbmc_password = $("#xbmc_password").val();
-        $.get(sbRoot + "/home/testXBMC", {'host': xbmc_host, 'username': xbmc_username, 'password': xbmc_password})
-            .done(function (data) {
-                $('#testXBMC-result').html(data);
-                $("#testXBMC").attr("disabled", false);
-            });
-    });
-
-    $('#testPLEX').click(function () {
-        $('#testPLEX-result').html(loading);
-        var plex_host = $("#plex_host").val();
-        var plex_username = $("#plex_username").val();
-        var plex_password = $("#plex_password").val();
-        $.get(sbRoot + "/home/testPLEX", {'host': plex_host, 'username': plex_username, 'password': plex_password},
-            function (data) { $('#testPLEX-result').html(data); });
-    });
-
-    $('#testNotifo').click(function () {
-        $('#testNotifo-result').html(loading);
-        var notifo_username = $("#notifo_username").val();
-        var notifo_apisecret = $("#notifo_apisecret").val();
-        $.get(sbRoot + "/home/testNotifo", {'username': notifo_username, 'apisecret': notifo_apisecret},
-            function (data) { $('#testNotifo-result').html(data); });
-    });
-
-    $('#testBoxcar').click(function () {
-        $('#testBoxcar-result').html(loading);
-        var boxcar_username = $("#boxcar_username").val();
-        $.get(sbRoot + "/home/testBoxcar", {'username': boxcar_username},
-            function (data) { $('#testBoxcar-result').html(data); });
-    });
-
-    $('#testPushover').click(function () {
-        $('#testPushover-result').html(loading);
-        var pushover_userkey = $("#pushover_userkey").val();
-        $.get(sbRoot + "/home/testPushover", {'userKey': pushover_userkey},
-            function (data) { $('#testPushover-result').html(data); });
-    });
-
-    $('#testLibnotify').click(function () {
-        $('#testLibnotify-result').html(loading);
-        $.get(sbRoot + "/home/testLibnotify",
-            function (data) { $('#testLibnotify-result').html(data); });
-    });
-
-    $('#twitterStep1').click(function () {
-        $('#testTwitter-result').html(loading);
-        $.get(sbRoot + "/home/twitterStep1", function (data) {window.open(data); })
-            .done(function () { $('#testTwitter-result').html('<b>Step1:</b> Confirm Authorization'); });
-    });
-
-    $('#twitterStep2').click(function () {
-        $('#testTwitter-result').html(loading);
-        var twitter_key = $("#twitter_key").val();
-        $.get(sbRoot + "/home/twitterStep2", {'key': twitter_key},
-            function (data) { $('#testTwitter-result').html(data); });
-    });
-
-    $('#testTwitter').click(function () {
-        $.get(sbRoot + "/home/testTwitter",
-            function (data) { $('#testTwitter-result').html(data); });
-    });
-
-    $('#settingsNMJ').click(function () {
-        if (!$('#nmj_host').val()) {
-            alert('Please fill in the Popcorn IP address');
-            $('#nmj_host').focus();
-            return;
-        }
-        $('#testNMJ-result').html(loading);
-        var nmj_host = $('#nmj_host').val();
-
-        $.get(sbRoot + "/home/settingsNMJ", {'host': nmj_host},
-            function (data) {
-                if (data === null) {
-                    $('#nmj_database').removeAttr('readonly');
-                    $('#nmj_mount').removeAttr('readonly');
-                }
-                var JSONData = $.parseJSON(data);
-                $('#testNMJ-result').html(JSONData.message);
-                $('#nmj_database').val(JSONData.database);
-                $('#nmj_mount').val(JSONData.mount);
-
-                if (JSONData.database) {
-                    $('#nmj_database').attr('readonly', true);
-                } else {
-                    $('#nmj_database').removeAttr('readonly');
-                }
-                if (JSONData.mount) {
-                    $('#nmj_mount').attr('readonly', true);
-                } else {
-                    $('#nmj_mount').removeAttr('readonly');
-                }
-            });
-    });
-
-    $('#testNMJ').click(function () {
-        $('#testNMJ-result').html(loading);
-        var nmj_host = $("#nmj_host").val();
-        var nmj_database = $("#nmj_database").val();
-        var nmj_mount = $("#nmj_mount").val();
-
-        $.get(sbRoot + "/home/testNMJ", {'host': nmj_host, 'database': nmj_database, 'mount': nmj_mount},
-            function (data) { $('#testNMJ-result').html(data); });
-    });
-
-    $('#settingsNMJv2').click(function () {
-        if (!$('#nmjv2_host').val()) {
-            alert('Please fill in the Popcorn IP address');
-            $('#nmjv2_host').focus();
-            return;
-        }
-        $('#testNMJv2-result').html(loading);
-        var nmjv2_host = $('#nmjv2_host').val();
-        var nmjv2_dbloc;
-        var radios = document.getElementsByName("nmjv2_dbloc");
-        for (var i = 0; i < radios.length; i++) {
-            if (radios[i].checked) {
-                nmjv2_dbloc=radios[i].value;
-                break;
-            }
-        }
-
-        var nmjv2_dbinstance=$('#NMJv2db_instance').val();
-        $.get(sbRoot + "/home/settingsNMJv2", {'host': nmjv2_host, 'dbloc': nmjv2_dbloc, 'instance': nmjv2_dbinstance},
-        function (data){
-            if (data == null) {
-                $('#nmjv2_database').removeAttr('readonly');
-            }
-            var JSONData = $.parseJSON(data);
-            $('#testNMJv2-result').html(JSONData.message);
-            $('#nmjv2_database').val(JSONData.database);
-            
-            if (JSONData.database) {
-                $('#nmjv2_database').attr('readonly', true);
-            } else {
-                $('#nmjv2_database').removeAttr('readonly');
-            }
-        });
-    });
-
-    $('#testNMJv2').click(function () {
-        $('#testNMJv2-result').html(loading);
-        var nmjv2_host = $("#nmjv2_host").val();
-        
-        $.get(sbRoot + "/home/testNMJv2", {'host': nmjv2_host},
-            function (data){ $('#testNMJv2-result').html(data); });
-    });
-
-    $('#testTrakt').click(function () {
-        $('#testTrakt-result').html(loading);
-        var trakt_api = $("#trakt_api").val();
-        var trakt_username = $("#trakt_username").val();
-        var trakt_password = $("#trakt_password").val();
-
-        $.get(sbRoot + "/home/testTrakt", {'api': trakt_api, 'username': trakt_username, 'password': trakt_password},
-            function (data) { $('#testTrakt-result').html(data); });
-    });
-
-    $('#testNMA').click(function () {
-        $('#testNMA-result').html(loading);
-        var nma_api = $("#nma_api").val();
-        var nma_priority = $("#nma_priority").val();
-        $.get(sbRoot + "/home/testNMA", {'nma_api': nma_api, 'nma_priority': nma_priority},
-            function (data) { $('#testNMA-result').html(data); });
-    });
-    
-    $('#testMail').click(function () {
-        $('#testMail-result').html(loading);
-        var mail_from = $("#mail_from").val();
-        var mail_to = $("#mail_to").val();
-        var mail_server = $("#mail_server").val();
-        var mail_ssl = $("#mail_ssl").val();
-        var mail_username = $("#mail_username").val();
-        var mail_password = $("#mail_password").val();
-           
-        $.get(sbRoot + "/home/testMail", {},
-            function (data) { $('#testMail-result').html(data); });
-    });
-    
-});
+$(document).ready(function () {
+    var loading = '<img src="' + sbRoot + '/images/loading16.gif" height="16" width="16" />';
+
+    $('#testGrowl').click(function () {
+        $('#testGrowl-result').html(loading);
+        var growl_host = $("#growl_host").val();
+        var growl_password = $("#growl_password").val();
+        $.get(sbRoot + "/home/testGrowl", {'host': growl_host, 'password': growl_password},
+            function (data) { $('#testGrowl-result').html(data); });
+    });
+
+    $('#testProwl').click(function () {
+        $('#testProwl-result').html(loading);
+        var prowl_api = $("#prowl_api").val();
+        var prowl_priority = $("#prowl_priority").val();
+        $.get(sbRoot + "/home/testProwl", {'prowl_api': prowl_api, 'prowl_priority': prowl_priority},
+            function (data) { $('#testProwl-result').html(data); });
+    });
+
+    $('#testXBMC').click(function () {
+        $("#testXBMC").attr("disabled", true);
+        $('#testXBMC-result').html(loading);
+        var xbmc_host = $("#xbmc_host").val();
+        var xbmc_username = $("#xbmc_username").val();
+        var xbmc_password = $("#xbmc_password").val();
+        $.get(sbRoot + "/home/testXBMC", {'host': xbmc_host, 'username': xbmc_username, 'password': xbmc_password})
+            .done(function (data) {
+                $('#testXBMC-result').html(data);
+                $("#testXBMC").attr("disabled", false);
+            });
+    });
+
+    $('#testPLEX').click(function () {
+        $('#testPLEX-result').html(loading);
+        var plex_host = $("#plex_host").val();
+        var plex_username = $("#plex_username").val();
+        var plex_password = $("#plex_password").val();
+        $.get(sbRoot + "/home/testPLEX", {'host': plex_host, 'username': plex_username, 'password': plex_password},
+            function (data) { $('#testPLEX-result').html(data); });
+    });
+
+    $('#testNotifo').click(function () {
+        $('#testNotifo-result').html(loading);
+        var notifo_username = $("#notifo_username").val();
+        var notifo_apisecret = $("#notifo_apisecret").val();
+        $.get(sbRoot + "/home/testNotifo", {'username': notifo_username, 'apisecret': notifo_apisecret},
+            function (data) { $('#testNotifo-result').html(data); });
+    });
+
+    $('#testBoxcar').click(function () {
+        $('#testBoxcar-result').html(loading);
+        var boxcar_username = $("#boxcar_username").val();
+        $.get(sbRoot + "/home/testBoxcar", {'username': boxcar_username},
+            function (data) { $('#testBoxcar-result').html(data); });
+    });
+
+    $('#testPushover').click(function () {
+        $('#testPushover-result').html(loading);
+        var pushover_userkey = $("#pushover_userkey").val();
+        $.get(sbRoot + "/home/testPushover", {'userKey': pushover_userkey},
+            function (data) { $('#testPushover-result').html(data); });
+    });
+
+    $('#testLibnotify').click(function () {
+        $('#testLibnotify-result').html(loading);
+        $.get(sbRoot + "/home/testLibnotify",
+            function (data) { $('#testLibnotify-result').html(data); });
+    });
+
+    $('#twitterStep1').click(function () {
+        $('#testTwitter-result').html(loading);
+        $.get(sbRoot + "/home/twitterStep1", function (data) {window.open(data); })
+            .done(function () { $('#testTwitter-result').html('<b>Step1:</b> Confirm Authorization'); });
+    });
+
+    $('#twitterStep2').click(function () {
+        $('#testTwitter-result').html(loading);
+        var twitter_key = $("#twitter_key").val();
+        $.get(sbRoot + "/home/twitterStep2", {'key': twitter_key},
+            function (data) { $('#testTwitter-result').html(data); });
+    });
+
+    $('#testTwitter').click(function () {
+        $.get(sbRoot + "/home/testTwitter",
+            function (data) { $('#testTwitter-result').html(data); });
+    });
+
+    $('#settingsNMJ').click(function () {
+        if (!$('#nmj_host').val()) {
+            alert('Please fill in the Popcorn IP address');
+            $('#nmj_host').focus();
+            return;
+        }
+        $('#testNMJ-result').html(loading);
+        var nmj_host = $('#nmj_host').val();
+
+        $.get(sbRoot + "/home/settingsNMJ", {'host': nmj_host},
+            function (data) {
+                if (data === null) {
+                    $('#nmj_database').removeAttr('readonly');
+                    $('#nmj_mount').removeAttr('readonly');
+                }
+                var JSONData = $.parseJSON(data);
+                $('#testNMJ-result').html(JSONData.message);
+                $('#nmj_database').val(JSONData.database);
+                $('#nmj_mount').val(JSONData.mount);
+
+                if (JSONData.database) {
+                    $('#nmj_database').attr('readonly', true);
+                } else {
+                    $('#nmj_database').removeAttr('readonly');
+                }
+                if (JSONData.mount) {
+                    $('#nmj_mount').attr('readonly', true);
+                } else {
+                    $('#nmj_mount').removeAttr('readonly');
+                }
+            });
+    });
+
+    $('#testNMJ').click(function () {
+        $('#testNMJ-result').html(loading);
+        var nmj_host = $("#nmj_host").val();
+        var nmj_database = $("#nmj_database").val();
+        var nmj_mount = $("#nmj_mount").val();
+
+        $.get(sbRoot + "/home/testNMJ", {'host': nmj_host, 'database': nmj_database, 'mount': nmj_mount},
+            function (data) { $('#testNMJ-result').html(data); });
+    });
+
+    $('#settingsNMJv2').click(function () {
+        if (!$('#nmjv2_host').val()) {
+            alert('Please fill in the Popcorn IP address');
+            $('#nmjv2_host').focus();
+            return;
+        }
+        $('#testNMJv2-result').html(loading);
+        var nmjv2_host = $('#nmjv2_host').val();
+        var nmjv2_dbloc;
+        var radios = document.getElementsByName("nmjv2_dbloc");
+        for (var i = 0; i < radios.length; i++) {
+            if (radios[i].checked) {
+                nmjv2_dbloc=radios[i].value;
+                break;
+            }
+        }
+
+        var nmjv2_dbinstance=$('#NMJv2db_instance').val();
+        $.get(sbRoot + "/home/settingsNMJv2", {'host': nmjv2_host, 'dbloc': nmjv2_dbloc, 'instance': nmjv2_dbinstance},
+        function (data){
+            if (data == null) {
+                $('#nmjv2_database').removeAttr('readonly');
+            }
+            var JSONData = $.parseJSON(data);
+            $('#testNMJv2-result').html(JSONData.message);
+            $('#nmjv2_database').val(JSONData.database);
+            
+            if (JSONData.database) {
+                $('#nmjv2_database').attr('readonly', true);
+            } else {
+                $('#nmjv2_database').removeAttr('readonly');
+            }
+        });
+    });
+
+    $('#testNMJv2').click(function () {
+        $('#testNMJv2-result').html(loading);
+        var nmjv2_host = $("#nmjv2_host").val();
+        
+        $.get(sbRoot + "/home/testNMJv2", {'host': nmjv2_host},
+            function (data){ $('#testNMJv2-result').html(data); });
+    });
+
+    $('#testTrakt').click(function () {
+        $('#testTrakt-result').html(loading);
+        var trakt_api = $("#trakt_api").val();
+        var trakt_username = $("#trakt_username").val();
+        var trakt_password = $("#trakt_password").val();
+
+        $.get(sbRoot + "/home/testTrakt", {'api': trakt_api, 'username': trakt_username, 'password': trakt_password},
+            function (data) { $('#testTrakt-result').html(data); });
+    });
+
+    $('#testNMA').click(function () {
+        $('#testNMA-result').html(loading);
+        var nma_api = $("#nma_api").val();
+        var nma_priority = $("#nma_priority").val();
+        $.get(sbRoot + "/home/testNMA", {'nma_api': nma_api, 'nma_priority': nma_priority},
+            function (data) { $('#testNMA-result').html(data); });
+    });
+    
+    $('#testMail').click(function () {
+        $('#testMail-result').html(loading);
+        var mail_from = $("#mail_from").val();
+        var mail_to = $("#mail_to").val();
+        var mail_server = $("#mail_server").val();
+        var mail_ssl = $("#mail_ssl").val();
+        var mail_username = $("#mail_username").val();
+        var mail_password = $("#mail_password").val();
+           
+        $.get(sbRoot + "/home/testMail", {},
+            function (data) { $('#testMail-result').html(data); });
+    });
+    
+});
diff --git a/data/js/configProviders.js b/data/js/configProviders.js
index 07817fb1c..2824378f7 100644
--- a/data/js/configProviders.js
+++ b/data/js/configProviders.js
@@ -1,211 +1,211 @@
-$(document).ready(function(){
-
-    $.fn.showHideProviders = function() {
-        $('.providerDiv').each(function(){
-            var providerName = $(this).attr('id');
-            var selectedProvider = $('#editAProvider :selected').val();
-
-            if (selectedProvider+'Div' == providerName)
-                $(this).show();
-            else
-                $(this).hide();
-
-        });
-    } 
-
-    $.fn.addProvider = function (id, name, url, key, isDefault) {
-
-        if (url.match('/$') == null)
-            url = url + '/'
-
-        var newData = [isDefault, [name, url, key]];
-        newznabProviders[id] = newData;
-
-        if (!isDefault)
-        {
-            $('#editANewznabProvider').addOption(id, name);
-            $(this).populateNewznabSection();
-        }
-
-        if ($('#providerOrderList > #'+id).length == 0) {
-            var toAdd = '<li class="ui-state-default" id="'+id+'"> <input type="checkbox" id="enable_'+id+'" class="provider_enabler" CHECKED> <a href="'+url+'" class="imgLink" target="_new"><img src="'+sbRoot+'/images/providers/newznab.gif" alt="'+name+'" width="16" height="16"></a> '+name+'</li>'
-
-            $('#providerOrderList').append(toAdd);
-            $('#providerOrderList').sortable("refresh");
-        }
-
-        $(this).makeNewznabProviderString();
-
-    }
-
-    $.fn.updateProvider = function (id, url, key) {
-
-        newznabProviders[id][1][1] = url;
-        newznabProviders[id][1][2] = key;
-
-        $(this).populateNewznabSection();
-
-        $(this).makeNewznabProviderString();
-    
-    }
-
-    $.fn.deleteProvider = function (id) {
-    
-        $('#editANewznabProvider').removeOption(id);
-        delete newznabProviders[id];
-        $(this).populateNewznabSection();
-
-        $('#providerOrderList > #'+id).remove();
-
-        $(this).makeNewznabProviderString();
-
-    }
-
-    $.fn.populateNewznabSection = function() {
-
-        var selectedProvider = $('#editANewznabProvider :selected').val();
-
-        if (selectedProvider == 'addNewznab') {
-            var data = ['','',''];
-            var isDefault = 0;
-            $('#newznab_add_div').show();
-            $('#newznab_update_div').hide();
-        } else {
-            var data = newznabProviders[selectedProvider][1];
-            var isDefault = newznabProviders[selectedProvider][0];
-            $('#newznab_add_div').hide();
-            $('#newznab_update_div').show();
-        }
-
-        $('#newznab_name').val(data[0]);
-        $('#newznab_url').val(data[1]);
-        $('#newznab_key').val(data[2]);
-        
-        if (selectedProvider == 'addNewznab') {
-            $('#newznab_name').removeAttr("disabled");
-            $('#newznab_url').removeAttr("disabled");
-        } else {
-
-            $('#newznab_name').attr("disabled", "disabled");
-
-            if (isDefault) {
-                $('#newznab_url').attr("disabled", "disabled");
-                $('#newznab_delete').attr("disabled", "disabled");
-            } else {
-                $('#newznab_url').removeAttr("disabled");
-                $('#newznab_delete').removeAttr("disabled");
-            }
-        }
-
-    }
-    
-    $.fn.makeNewznabProviderString = function() {
-
-        var provStrings = new Array();
-        
-        for (var id in newznabProviders) {
-            provStrings.push(newznabProviders[id][1].join('|'));
-        }
-
-        $('#newznab_string').val(provStrings.join('!!!'))
-
-    }
-    
-    $.fn.refreshProviderList = function() {
-            var idArr = $("#providerOrderList").sortable('toArray');
-            var finalArr = new Array();
-            $.each(idArr, function(key, val) {
-                    var checked = + $('#enable_'+val).prop('checked') ? '1' : '0';
-                    finalArr.push(val + ':' + checked);
-            });
-
-            $("#provider_order").val(finalArr.join(' '));
-    }
-
-    var newznabProviders = new Array();
-
-    $('.newznab_key').change(function(){
-
-        var provider_id = $(this).attr('id');
-        provider_id = provider_id.substring(0, provider_id.length-'_hash'.length);
-
-        var url = $('#'+provider_id+'_url').val();
-        var key = $(this).val();
-
-        $(this).updateProvider(provider_id, url, key);
-
-    });
-    
-    $('#newznab_key,#newznab_url').change(function(){
-        
-        var selectedProvider = $('#editANewznabProvider :selected').val();
-
-		if (selectedProvider == "addNewznab")
-			return;
-
-        var url = $('#newznab_url').val();
-        var key = $('#newznab_key').val();
-        
-        $(this).updateProvider(selectedProvider, url, key);
-        
-    });
-    
-    $('#editAProvider').change(function(){
-        $(this).showHideProviders();
-    });
-
-    $('#editANewznabProvider').change(function(){
-        $(this).populateNewznabSection();
-    });
-    
-    $('.provider_enabler').live('click', function(){
-        $(this).refreshProviderList();
-    }); 
-    
-
-    $('#newznab_add').click(function(){
-        
-        var selectedProvider = $('#editANewznabProvider :selected').val();
-        
-        var name = $('#newznab_name').val();
-        var url = $('#newznab_url').val();
-        var key = $('#newznab_key').val();
-        
-        var params = { name: name }
-        
-        // send to the form with ajax, get a return value
-        $.getJSON(sbRoot + '/config/providers/canAddNewznabProvider', params,
-            function(data){
-                if (data.error != undefined) {
-                    alert(data.error);
-                    return;
-                }
-
-                $(this).addProvider(data.success, name, url, key, 0);
-        });
-
-
-    });
-
-    $('.newznab_delete').click(function(){
-    
-        var selectedProvider = $('#editANewznabProvider :selected').val();
-
-        $(this).deleteProvider(selectedProvider);
-
-    });
-
-    // initialization stuff
-
-    $(this).showHideProviders();
-
-    $("#providerOrderList").sortable({
-        placeholder: 'ui-state-highlight',
-        update: function (event, ui) {
-            $(this).refreshProviderList();
-        }
-    });
-
-    $("#providerOrderList").disableSelection();
-
+$(document).ready(function(){
+
+    $.fn.showHideProviders = function() {
+        $('.providerDiv').each(function(){
+            var providerName = $(this).attr('id');
+            var selectedProvider = $('#editAProvider :selected').val();
+
+            if (selectedProvider+'Div' == providerName)
+                $(this).show();
+            else
+                $(this).hide();
+
+        });
+    } 
+
+    $.fn.addProvider = function (id, name, url, key, isDefault) {
+
+        if (url.match('/$') == null)
+            url = url + '/'
+
+        var newData = [isDefault, [name, url, key]];
+        newznabProviders[id] = newData;
+
+        if (!isDefault)
+        {
+            $('#editANewznabProvider').addOption(id, name);
+            $(this).populateNewznabSection();
+        }
+
+        if ($('#providerOrderList > #'+id).length == 0) {
+            var toAdd = '<li class="ui-state-default" id="'+id+'"> <input type="checkbox" id="enable_'+id+'" class="provider_enabler" CHECKED> <a href="'+url+'" class="imgLink" target="_new"><img src="'+sbRoot+'/images/providers/newznab.gif" alt="'+name+'" width="16" height="16"></a> '+name+'</li>'
+
+            $('#providerOrderList').append(toAdd);
+            $('#providerOrderList').sortable("refresh");
+        }
+
+        $(this).makeNewznabProviderString();
+
+    }
+
+    $.fn.updateProvider = function (id, url, key) {
+
+        newznabProviders[id][1][1] = url;
+        newznabProviders[id][1][2] = key;
+
+        $(this).populateNewznabSection();
+
+        $(this).makeNewznabProviderString();
+    
+    }
+
+    $.fn.deleteProvider = function (id) {
+    
+        $('#editANewznabProvider').removeOption(id);
+        delete newznabProviders[id];
+        $(this).populateNewznabSection();
+
+        $('#providerOrderList > #'+id).remove();
+
+        $(this).makeNewznabProviderString();
+
+    }
+
+    $.fn.populateNewznabSection = function() {
+
+        var selectedProvider = $('#editANewznabProvider :selected').val();
+
+        if (selectedProvider == 'addNewznab') {
+            var data = ['','',''];
+            var isDefault = 0;
+            $('#newznab_add_div').show();
+            $('#newznab_update_div').hide();
+        } else {
+            var data = newznabProviders[selectedProvider][1];
+            var isDefault = newznabProviders[selectedProvider][0];
+            $('#newznab_add_div').hide();
+            $('#newznab_update_div').show();
+        }
+
+        $('#newznab_name').val(data[0]);
+        $('#newznab_url').val(data[1]);
+        $('#newznab_key').val(data[2]);
+        
+        if (selectedProvider == 'addNewznab') {
+            $('#newznab_name').removeAttr("disabled");
+            $('#newznab_url').removeAttr("disabled");
+        } else {
+
+            $('#newznab_name').attr("disabled", "disabled");
+
+            if (isDefault) {
+                $('#newznab_url').attr("disabled", "disabled");
+                $('#newznab_delete').attr("disabled", "disabled");
+            } else {
+                $('#newznab_url').removeAttr("disabled");
+                $('#newznab_delete').removeAttr("disabled");
+            }
+        }
+
+    }
+    
+    $.fn.makeNewznabProviderString = function() {
+
+        var provStrings = new Array();
+        
+        for (var id in newznabProviders) {
+            provStrings.push(newznabProviders[id][1].join('|'));
+        }
+
+        $('#newznab_string').val(provStrings.join('!!!'))
+
+    }
+    
+    $.fn.refreshProviderList = function() {
+            var idArr = $("#providerOrderList").sortable('toArray');
+            var finalArr = new Array();
+            $.each(idArr, function(key, val) {
+                    var checked = + $('#enable_'+val).prop('checked') ? '1' : '0';
+                    finalArr.push(val + ':' + checked);
+            });
+
+            $("#provider_order").val(finalArr.join(' '));
+    }
+
+    var newznabProviders = new Array();
+
+    $('.newznab_key').change(function(){
+
+        var provider_id = $(this).attr('id');
+        provider_id = provider_id.substring(0, provider_id.length-'_hash'.length);
+
+        var url = $('#'+provider_id+'_url').val();
+        var key = $(this).val();
+
+        $(this).updateProvider(provider_id, url, key);
+
+    });
+    
+    $('#newznab_key,#newznab_url').change(function(){
+        
+        var selectedProvider = $('#editANewznabProvider :selected').val();
+
+		if (selectedProvider == "addNewznab")
+			return;
+
+        var url = $('#newznab_url').val();
+        var key = $('#newznab_key').val();
+        
+        $(this).updateProvider(selectedProvider, url, key);
+        
+    });
+    
+    $('#editAProvider').change(function(){
+        $(this).showHideProviders();
+    });
+
+    $('#editANewznabProvider').change(function(){
+        $(this).populateNewznabSection();
+    });
+    
+    $('.provider_enabler').live('click', function(){
+        $(this).refreshProviderList();
+    }); 
+    
+
+    $('#newznab_add').click(function(){
+        
+        var selectedProvider = $('#editANewznabProvider :selected').val();
+        
+        var name = $('#newznab_name').val();
+        var url = $('#newznab_url').val();
+        var key = $('#newznab_key').val();
+        
+        var params = { name: name }
+        
+        // send to the form with ajax, get a return value
+        $.getJSON(sbRoot + '/config/providers/canAddNewznabProvider', params,
+            function(data){
+                if (data.error != undefined) {
+                    alert(data.error);
+                    return;
+                }
+
+                $(this).addProvider(data.success, name, url, key, 0);
+        });
+
+
+    });
+
+    $('.newznab_delete').click(function(){
+    
+        var selectedProvider = $('#editANewznabProvider :selected').val();
+
+        $(this).deleteProvider(selectedProvider);
+
+    });
+
+    // initialization stuff
+
+    $(this).showHideProviders();
+
+    $("#providerOrderList").sortable({
+        placeholder: 'ui-state-highlight',
+        update: function (event, ui) {
+            $(this).refreshProviderList();
+        }
+    });
+
+    $("#providerOrderList").disableSelection();
+
 });
\ No newline at end of file
diff --git a/lib/tvdb_api/__init__.py b/lib/tvdb_api/__init__.py
index d3f5a12fa..8b1378917 100644
--- a/lib/tvdb_api/__init__.py
+++ b/lib/tvdb_api/__init__.py
@@ -1 +1 @@
-
+
diff --git a/sickbeard/clients/requests/packages/charade/cp949prober.py b/sickbeard/clients/requests/packages/charade/cp949prober.py
index 543501fe0..ff4272f82 100644
--- a/sickbeard/clients/requests/packages/charade/cp949prober.py
+++ b/sickbeard/clients/requests/packages/charade/cp949prober.py
@@ -1,44 +1,44 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import EUCKRDistributionAnalysis
-from .mbcssm import CP949SMModel
-
-
-class CP949Prober(MultiByteCharSetProber):
-    def __init__(self):
-        MultiByteCharSetProber.__init__(self)
-        self._mCodingSM = CodingStateMachine(CP949SMModel)
-        # NOTE: CP949 is a superset of EUC-KR, so the distribution should be
-        #       not different.
-        self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
-        self.reset()
-
-    def get_charset_name(self):
-        return "CP949"
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import EUCKRDistributionAnalysis
+from .mbcssm import CP949SMModel
+
+
+class CP949Prober(MultiByteCharSetProber):
+    def __init__(self):
+        MultiByteCharSetProber.__init__(self)
+        self._mCodingSM = CodingStateMachine(CP949SMModel)
+        # NOTE: CP949 is a superset of EUC-KR, so the distribution should be
+        #       not different.
+        self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
+        self.reset()
+
+    def get_charset_name(self):
+        return "CP949"
diff --git a/sickbeard/clients/requests/packages/charade/langbulgarianmodel.py b/sickbeard/clients/requests/packages/charade/langbulgarianmodel.py
index ea5a60ba0..e5788fc64 100644
--- a/sickbeard/clients/requests/packages/charade/langbulgarianmodel.py
+++ b/sickbeard/clients/requests/packages/charade/langbulgarianmodel.py
@@ -1,229 +1,229 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# 255: Control characters that usually does not exist in any text
-# 254: Carriage/Return
-# 253: symbol (punctuation) that does not belong to word
-# 252: 0 - 9
-
-# Character Mapping Table:
-# this table is modified base on win1251BulgarianCharToOrderMap, so
-# only number <64 is sure valid
-
-Latin5_BulgarianCharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82,  # 40
-110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253,  # 50
-253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71,  # 60
-116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253,  # 70
-194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,  # 80
-210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,  # 90
- 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238,  # a0
- 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30,  # b0
- 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56,  # c0
-  1, 18,  9, 20, 11,  3, 23, 15,  2, 26, 12, 10, 14,  6,  4, 13,  # d0
-  7,  8,  5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16,  # e0
- 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253,  # f0
-)
-
-win1251BulgarianCharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82,  # 40
-110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253,  # 50
-253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71,  # 60
-116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253,  # 70
-206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220,  # 80
-221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229,  # 90
- 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240,  # a0
- 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250,  # b0
- 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30,  # c0
- 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56,  # d0
-  1, 18,  9, 20, 11,  3, 23, 15,  2, 26, 12, 10, 14,  6,  4, 13,  # e0
-  7,  8,  5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16,  # f0
-)
-
-# Model Table:
-# total sequences: 100%
-# first 512 sequences: 96.9392%
-# first 1024 sequences:3.0618%
-# rest  sequences:     0.2992%
-# negative sequences:  0.0020%
-BulgarianLangModel = (
-0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
-3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
-0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
-0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
-0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
-1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
-0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
-0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
-2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
-3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
-1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
-3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
-1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
-2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
-2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
-3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
-1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
-2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
-2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
-3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
-1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
-2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
-2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
-2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
-1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
-2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
-1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
-3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
-1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
-3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
-1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
-2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
-1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
-2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
-1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
-2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
-1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
-3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
-1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
-1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
-2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
-1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
-2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
-1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
-0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
-1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
-2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
-1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
-1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
-0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
-0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
-0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
-2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
-1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
-0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
-0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
-1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
-1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
-1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-)
-
-Latin5BulgarianModel = {
-  'charToOrderMap': Latin5_BulgarianCharToOrderMap,
-  'precedenceMatrix': BulgarianLangModel,
-  'mTypicalPositiveRatio': 0.969392,
-  'keepEnglishLetter': False,
-  'charsetName': "ISO-8859-5"
-}
-
-Win1251BulgarianModel = {
-  'charToOrderMap': win1251BulgarianCharToOrderMap,
-  'precedenceMatrix': BulgarianLangModel,
-  'mTypicalPositiveRatio': 0.969392,
-  'keepEnglishLetter': False,
-  'charsetName': "windows-1251"
-}
-
-
-# flake8: noqa
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Character Mapping Table:
+# this table is modified base on win1251BulgarianCharToOrderMap, so
+# only number <64 is sure valid
+
+Latin5_BulgarianCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
+253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82,  # 40
+110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253,  # 50
+253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71,  # 60
+116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253,  # 70
+194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,  # 80
+210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,  # 90
+ 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238,  # a0
+ 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30,  # b0
+ 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56,  # c0
+  1, 18,  9, 20, 11,  3, 23, 15,  2, 26, 12, 10, 14,  6,  4, 13,  # d0
+  7,  8,  5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16,  # e0
+ 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253,  # f0
+)
+
+win1251BulgarianCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
+253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82,  # 40
+110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253,  # 50
+253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71,  # 60
+116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253,  # 70
+206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220,  # 80
+221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229,  # 90
+ 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240,  # a0
+ 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250,  # b0
+ 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30,  # c0
+ 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56,  # d0
+  1, 18,  9, 20, 11,  3, 23, 15,  2, 26, 12, 10, 14,  6,  4, 13,  # e0
+  7,  8,  5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16,  # f0
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 96.9392%
+# first 1024 sequences:3.0618%
+# rest  sequences:     0.2992%
+# negative sequences:  0.0020%
+BulgarianLangModel = (
+0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
+3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
+0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
+0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
+0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
+1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
+0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
+0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
+2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
+3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
+1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
+3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
+1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
+2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
+2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
+3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
+1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
+2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
+2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
+3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
+1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
+2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
+2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
+2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
+1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
+2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
+1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
+3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
+1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
+3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
+1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
+2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
+1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
+2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
+1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
+2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
+1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
+3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
+1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
+1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
+2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
+1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
+2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
+1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
+0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
+1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
+1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
+1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
+0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
+0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
+0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
+1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
+0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
+0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
+1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
+1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
+1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+)
+
+Latin5BulgarianModel = {
+  'charToOrderMap': Latin5_BulgarianCharToOrderMap,
+  'precedenceMatrix': BulgarianLangModel,
+  'mTypicalPositiveRatio': 0.969392,
+  'keepEnglishLetter': False,
+  'charsetName': "ISO-8859-5"
+}
+
+Win1251BulgarianModel = {
+  'charToOrderMap': win1251BulgarianCharToOrderMap,
+  'precedenceMatrix': BulgarianLangModel,
+  'mTypicalPositiveRatio': 0.969392,
+  'keepEnglishLetter': False,
+  'charsetName': "windows-1251"
+}
+
+
+# flake8: noqa
diff --git a/sickbeard/clients/requests/packages/charade/sjisprober.py b/sickbeard/clients/requests/packages/charade/sjisprober.py
index 9bb0cdcf1..b173614e6 100644
--- a/sickbeard/clients/requests/packages/charade/sjisprober.py
+++ b/sickbeard/clients/requests/packages/charade/sjisprober.py
@@ -1,91 +1,91 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-import sys
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import SJISDistributionAnalysis
-from .jpcntx import SJISContextAnalysis
-from .mbcssm import SJISSMModel
-from . import constants
-
-
-class SJISProber(MultiByteCharSetProber):
-    def __init__(self):
-        MultiByteCharSetProber.__init__(self)
-        self._mCodingSM = CodingStateMachine(SJISSMModel)
-        self._mDistributionAnalyzer = SJISDistributionAnalysis()
-        self._mContextAnalyzer = SJISContextAnalysis()
-        self.reset()
-
-    def reset(self):
-        MultiByteCharSetProber.reset(self)
-        self._mContextAnalyzer.reset()
-
-    def get_charset_name(self):
-        return "SHIFT_JIS"
-
-    def feed(self, aBuf):
-        aLen = len(aBuf)
-        for i in range(0, aLen):
-            codingState = self._mCodingSM.next_state(aBuf[i])
-            if codingState == constants.eError:
-                if constants._debug:
-                    sys.stderr.write(self.get_charset_name()
-                                     + ' prober hit error at byte ' + str(i)
-                                     + '\n')
-                self._mState = constants.eNotMe
-                break
-            elif codingState == constants.eItsMe:
-                self._mState = constants.eFoundIt
-                break
-            elif codingState == constants.eStart:
-                charLen = self._mCodingSM.get_current_charlen()
-                if i == 0:
-                    self._mLastChar[1] = aBuf[0]
-                    self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:],
-                                                charLen)
-                    self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
-                else:
-                    self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3
-                                                     - charLen], charLen)
-                    self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
-                                                     charLen)
-
-        self._mLastChar[0] = aBuf[aLen - 1]
-
-        if self.get_state() == constants.eDetecting:
-            if (self._mContextAnalyzer.got_enough_data() and
-               (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
-                self._mState = constants.eFoundIt
-
-        return self.get_state()
-
-    def get_confidence(self):
-        contxtCf = self._mContextAnalyzer.get_confidence()
-        distribCf = self._mDistributionAnalyzer.get_confidence()
-        return max(contxtCf, distribCf)
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+import sys
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import SJISDistributionAnalysis
+from .jpcntx import SJISContextAnalysis
+from .mbcssm import SJISSMModel
+from . import constants
+
+
+class SJISProber(MultiByteCharSetProber):
+    def __init__(self):
+        MultiByteCharSetProber.__init__(self)
+        self._mCodingSM = CodingStateMachine(SJISSMModel)
+        self._mDistributionAnalyzer = SJISDistributionAnalysis()
+        self._mContextAnalyzer = SJISContextAnalysis()
+        self.reset()
+
+    def reset(self):
+        MultiByteCharSetProber.reset(self)
+        self._mContextAnalyzer.reset()
+
+    def get_charset_name(self):
+        return "SHIFT_JIS"
+
+    def feed(self, aBuf):
+        aLen = len(aBuf)
+        for i in range(0, aLen):
+            codingState = self._mCodingSM.next_state(aBuf[i])
+            if codingState == constants.eError:
+                if constants._debug:
+                    sys.stderr.write(self.get_charset_name()
+                                     + ' prober hit error at byte ' + str(i)
+                                     + '\n')
+                self._mState = constants.eNotMe
+                break
+            elif codingState == constants.eItsMe:
+                self._mState = constants.eFoundIt
+                break
+            elif codingState == constants.eStart:
+                charLen = self._mCodingSM.get_current_charlen()
+                if i == 0:
+                    self._mLastChar[1] = aBuf[0]
+                    self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:],
+                                                charLen)
+                    self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
+                else:
+                    self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3
+                                                     - charLen], charLen)
+                    self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
+                                                     charLen)
+
+        self._mLastChar[0] = aBuf[aLen - 1]
+
+        if self.get_state() == constants.eDetecting:
+            if (self._mContextAnalyzer.got_enough_data() and
+               (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
+                self._mState = constants.eFoundIt
+
+        return self.get_state()
+
+    def get_confidence(self):
+        contxtCf = self._mContextAnalyzer.get_confidence()
+        distribCf = self._mDistributionAnalyzer.get_confidence()
+        return max(contxtCf, distribCf)
diff --git a/sickbeard/clients/requests/packages/charade/universaldetector.py b/sickbeard/clients/requests/packages/charade/universaldetector.py
index 6175bfbc3..6307155d2 100644
--- a/sickbeard/clients/requests/packages/charade/universaldetector.py
+++ b/sickbeard/clients/requests/packages/charade/universaldetector.py
@@ -1,172 +1,172 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from . import constants
-import sys
-import codecs
-from .latin1prober import Latin1Prober  # windows-1252
-from .mbcsgroupprober import MBCSGroupProber  # multi-byte character sets
-from .sbcsgroupprober import SBCSGroupProber  # single-byte character sets
-from .escprober import EscCharSetProber  # ISO-2122, etc.
-import re
-
-MINIMUM_THRESHOLD = 0.20
-ePureAscii = 0
-eEscAscii = 1
-eHighbyte = 2
-
-
-class UniversalDetector:
-    def __init__(self):
-        self._highBitDetector = re.compile(b'[\x80-\xFF]')
-        self._escDetector = re.compile(b'(\033|~{)')
-        self._mEscCharSetProber = None
-        self._mCharSetProbers = []
-        self.reset()
-
-    def reset(self):
-        self.result = {'encoding': None, 'confidence': 0.0}
-        self.done = False
-        self._mStart = True
-        self._mGotData = False
-        self._mInputState = ePureAscii
-        self._mLastChar = b''
-        if self._mEscCharSetProber:
-            self._mEscCharSetProber.reset()
-        for prober in self._mCharSetProbers:
-            prober.reset()
-
-    def feed(self, aBuf):
-        if self.done:
-            return
-
-        aLen = len(aBuf)
-        if not aLen:
-            return
-
-        if not self._mGotData:
-            # If the data starts with BOM, we know it is UTF
-            if aBuf[:3] == codecs.BOM:
-                # EF BB BF  UTF-8 with BOM
-                self.result = {'encoding': "UTF-8", 'confidence': 1.0}
-            elif aBuf[:4] == codecs.BOM_UTF32_LE:
-                # FF FE 00 00  UTF-32, little-endian BOM
-                self.result = {'encoding': "UTF-32LE", 'confidence': 1.0}
-            elif aBuf[:4] == codecs.BOM_UTF32_BE:
-                # 00 00 FE FF  UTF-32, big-endian BOM
-                self.result = {'encoding': "UTF-32BE", 'confidence': 1.0}
-            elif aBuf[:4] == b'\xFE\xFF\x00\x00':
-                # FE FF 00 00  UCS-4, unusual octet order BOM (3412)
-                self.result = {
-                    'encoding': "X-ISO-10646-UCS-4-3412",
-                    'confidence': 1.0
-                }
-            elif aBuf[:4] == b'\x00\x00\xFF\xFE':
-                # 00 00 FF FE  UCS-4, unusual octet order BOM (2143)
-                self.result = {
-                    'encoding': "X-ISO-10646-UCS-4-2143",
-                    'confidence': 1.0
-                }
-            elif aBuf[:2] == codecs.BOM_LE:
-                # FF FE  UTF-16, little endian BOM
-                self.result = {'encoding': "UTF-16LE", 'confidence': 1.0}
-            elif aBuf[:2] == codecs.BOM_BE:
-                # FE FF  UTF-16, big endian BOM
-                self.result = {'encoding': "UTF-16BE", 'confidence': 1.0}
-
-        self._mGotData = True
-        if self.result['encoding'] and (self.result['confidence'] > 0.0):
-            self.done = True
-            return
-
-        if self._mInputState == ePureAscii:
-            if self._highBitDetector.search(aBuf):
-                self._mInputState = eHighbyte
-            elif ((self._mInputState == ePureAscii) and
-                    self._escDetector.search(self._mLastChar + aBuf)):
-                self._mInputState = eEscAscii
-
-        self._mLastChar = aBuf[-1:]
-
-        if self._mInputState == eEscAscii:
-            if not self._mEscCharSetProber:
-                self._mEscCharSetProber = EscCharSetProber()
-            if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt:
-                self.result = {
-                    'encoding': self._mEscCharSetProber.get_charset_name(),
-                    'confidence': self._mEscCharSetProber.get_confidence()
-                }
-                self.done = True
-        elif self._mInputState == eHighbyte:
-            if not self._mCharSetProbers:
-                self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(),
-                                         Latin1Prober()]
-            for prober in self._mCharSetProbers:
-                if prober.feed(aBuf) == constants.eFoundIt:
-                    self.result = {'encoding': prober.get_charset_name(),
-                                   'confidence': prober.get_confidence()}
-                    self.done = True
-                    break
-
-    def close(self):
-        if self.done:
-            return
-        if not self._mGotData:
-            if constants._debug:
-                sys.stderr.write('no data received!\n')
-            return
-        self.done = True
-
-        if self._mInputState == ePureAscii:
-            self.result = {'encoding': 'ascii', 'confidence': 1.0}
-            return self.result
-
-        if self._mInputState == eHighbyte:
-            proberConfidence = None
-            maxProberConfidence = 0.0
-            maxProber = None
-            for prober in self._mCharSetProbers:
-                if not prober:
-                    continue
-                proberConfidence = prober.get_confidence()
-                if proberConfidence > maxProberConfidence:
-                    maxProberConfidence = proberConfidence
-                    maxProber = prober
-            if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD):
-                self.result = {'encoding': maxProber.get_charset_name(),
-                               'confidence': maxProber.get_confidence()}
-                return self.result
-
-        if constants._debug:
-            sys.stderr.write('no probers hit minimum threshhold\n')
-            for prober in self._mCharSetProbers[0].mProbers:
-                if not prober:
-                    continue
-                sys.stderr.write('%s confidence = %s\n' %
-                                 (prober.get_charset_name(),
-                                  prober.get_confidence()))
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#   Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from . import constants
+import sys
+import codecs
+from .latin1prober import Latin1Prober  # windows-1252
+from .mbcsgroupprober import MBCSGroupProber  # multi-byte character sets
+from .sbcsgroupprober import SBCSGroupProber  # single-byte character sets
+from .escprober import EscCharSetProber  # ISO-2122, etc.
+import re
+
+MINIMUM_THRESHOLD = 0.20
+ePureAscii = 0
+eEscAscii = 1
+eHighbyte = 2
+
+
+class UniversalDetector:
+    def __init__(self):
+        self._highBitDetector = re.compile(b'[\x80-\xFF]')
+        self._escDetector = re.compile(b'(\033|~{)')
+        self._mEscCharSetProber = None
+        self._mCharSetProbers = []
+        self.reset()
+
+    def reset(self):
+        self.result = {'encoding': None, 'confidence': 0.0}
+        self.done = False
+        self._mStart = True
+        self._mGotData = False
+        self._mInputState = ePureAscii
+        self._mLastChar = b''
+        if self._mEscCharSetProber:
+            self._mEscCharSetProber.reset()
+        for prober in self._mCharSetProbers:
+            prober.reset()
+
+    def feed(self, aBuf):
+        if self.done:
+            return
+
+        aLen = len(aBuf)
+        if not aLen:
+            return
+
+        if not self._mGotData:
+            # If the data starts with BOM, we know it is UTF
+            if aBuf[:3] == codecs.BOM:
+                # EF BB BF  UTF-8 with BOM
+                self.result = {'encoding': "UTF-8", 'confidence': 1.0}
+            elif aBuf[:4] == codecs.BOM_UTF32_LE:
+                # FF FE 00 00  UTF-32, little-endian BOM
+                self.result = {'encoding': "UTF-32LE", 'confidence': 1.0}
+            elif aBuf[:4] == codecs.BOM_UTF32_BE:
+                # 00 00 FE FF  UTF-32, big-endian BOM
+                self.result = {'encoding': "UTF-32BE", 'confidence': 1.0}
+            elif aBuf[:4] == b'\xFE\xFF\x00\x00':
+                # FE FF 00 00  UCS-4, unusual octet order BOM (3412)
+                self.result = {
+                    'encoding': "X-ISO-10646-UCS-4-3412",
+                    'confidence': 1.0
+                }
+            elif aBuf[:4] == b'\x00\x00\xFF\xFE':
+                # 00 00 FF FE  UCS-4, unusual octet order BOM (2143)
+                self.result = {
+                    'encoding': "X-ISO-10646-UCS-4-2143",
+                    'confidence': 1.0
+                }
+            elif aBuf[:2] == codecs.BOM_LE:
+                # FF FE  UTF-16, little endian BOM
+                self.result = {'encoding': "UTF-16LE", 'confidence': 1.0}
+            elif aBuf[:2] == codecs.BOM_BE:
+                # FE FF  UTF-16, big endian BOM
+                self.result = {'encoding': "UTF-16BE", 'confidence': 1.0}
+
+        self._mGotData = True
+        if self.result['encoding'] and (self.result['confidence'] > 0.0):
+            self.done = True
+            return
+
+        if self._mInputState == ePureAscii:
+            if self._highBitDetector.search(aBuf):
+                self._mInputState = eHighbyte
+            elif ((self._mInputState == ePureAscii) and
+                    self._escDetector.search(self._mLastChar + aBuf)):
+                self._mInputState = eEscAscii
+
+        self._mLastChar = aBuf[-1:]
+
+        if self._mInputState == eEscAscii:
+            if not self._mEscCharSetProber:
+                self._mEscCharSetProber = EscCharSetProber()
+            if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt:
+                self.result = {
+                    'encoding': self._mEscCharSetProber.get_charset_name(),
+                    'confidence': self._mEscCharSetProber.get_confidence()
+                }
+                self.done = True
+        elif self._mInputState == eHighbyte:
+            if not self._mCharSetProbers:
+                self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(),
+                                         Latin1Prober()]
+            for prober in self._mCharSetProbers:
+                if prober.feed(aBuf) == constants.eFoundIt:
+                    self.result = {'encoding': prober.get_charset_name(),
+                                   'confidence': prober.get_confidence()}
+                    self.done = True
+                    break
+
+    def close(self):
+        if self.done:
+            return
+        if not self._mGotData:
+            if constants._debug:
+                sys.stderr.write('no data received!\n')
+            return
+        self.done = True
+
+        if self._mInputState == ePureAscii:
+            self.result = {'encoding': 'ascii', 'confidence': 1.0}
+            return self.result
+
+        if self._mInputState == eHighbyte:
+            proberConfidence = None
+            maxProberConfidence = 0.0
+            maxProber = None
+            for prober in self._mCharSetProbers:
+                if not prober:
+                    continue
+                proberConfidence = prober.get_confidence()
+                if proberConfidence > maxProberConfidence:
+                    maxProberConfidence = proberConfidence
+                    maxProber = prober
+            if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD):
+                self.result = {'encoding': maxProber.get_charset_name(),
+                               'confidence': maxProber.get_confidence()}
+                return self.result
+
+        if constants._debug:
+            sys.stderr.write('no probers hit minimum threshhold\n')
+            for prober in self._mCharSetProbers[0].mProbers:
+                if not prober:
+                    continue
+                sys.stderr.write('%s confidence = %s\n' %
+                                 (prober.get_charset_name(),
+                                  prober.get_confidence()))
diff --git a/sickbeard/databases/cache_db.py b/sickbeard/databases/cache_db.py
index 997b09bf2..635e8a0db 100644
--- a/sickbeard/databases/cache_db.py
+++ b/sickbeard/databases/cache_db.py
@@ -1,51 +1,51 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#  GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-from sickbeard import db
-
-# Add new migrations at the bottom of the list; subclass the previous migration.
-class InitialSchema (db.SchemaUpgrade):
-    def test(self):
-        return self.hasTable("lastUpdate")
-
-    def execute(self):
-
-        queries = [
-            ("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);",),
-            ("CREATE TABLE db_version (db_version INTEGER);",),
-            ("INSERT INTO db_version (db_version) VALUES (?)", 1),
-        ]
-        for query in queries:
-            if len(query) == 1:
-                self.connection.action(query[0])
-            else:
-                self.connection.action(query[0], query[1:])
-
-class AddSceneExceptions(InitialSchema):
-    def test(self):
-        return self.hasTable("scene_exceptions")
-
-    def execute(self):
-        self.connection.action("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, tvdb_id INTEGER KEY, show_name TEXT)")
-
-class AddSceneNameCache(AddSceneExceptions):
-    def test(self):
-        return self.hasTable("scene_names")
-
-    def execute(self):
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+from sickbeard import db
+
+# Add new migrations at the bottom of the list; subclass the previous migration.
+class InitialSchema (db.SchemaUpgrade):
+    def test(self):
+        return self.hasTable("lastUpdate")
+
+    def execute(self):
+
+        queries = [
+            ("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);",),
+            ("CREATE TABLE db_version (db_version INTEGER);",),
+            ("INSERT INTO db_version (db_version) VALUES (?)", 1),
+        ]
+        for query in queries:
+            if len(query) == 1:
+                self.connection.action(query[0])
+            else:
+                self.connection.action(query[0], query[1:])
+
+class AddSceneExceptions(InitialSchema):
+    def test(self):
+        return self.hasTable("scene_exceptions")
+
+    def execute(self):
+        self.connection.action("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, tvdb_id INTEGER KEY, show_name TEXT)")
+
+class AddSceneNameCache(AddSceneExceptions):
+    def test(self):
+        return self.hasTable("scene_names")
+
+    def execute(self):
         self.connection.action("CREATE TABLE scene_names (tvdb_id INTEGER, name TEXT)")
\ No newline at end of file
diff --git a/sickbeard/encodingKludge.py b/sickbeard/encodingKludge.py
index cdd95f29d..9f24fe328 100644
--- a/sickbeard/encodingKludge.py
+++ b/sickbeard/encodingKludge.py
@@ -1,69 +1,69 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#  GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-import os
-
-from sickbeard import logger
-import sickbeard
-
-# This module tries to deal with the apparently random behavior of python when dealing with unicode <-> utf-8
-# encodings. It tries to just use unicode, but if that fails then it tries forcing it to utf-8. Any functions
-# which return something should always return unicode.
-
-def fixStupidEncodings(x, silent=False):
-    if type(x) == str:
-        try:
-            return x.decode(sickbeard.SYS_ENCODING)
-        except UnicodeDecodeError:
-            logger.log(u"Unable to decode value: "+repr(x), logger.ERROR)
-            return None
-    elif type(x) == unicode:
-        return x
-    else:
-        logger.log(u"Unknown value passed in, ignoring it: "+str(type(x))+" ("+repr(x)+":"+repr(type(x))+")", logger.DEBUG if silent else logger.ERROR)
-        return None
-
-    return None
-
-def fixListEncodings(x):
-    if type(x) != list and type(x) != tuple:
-        return x
-    else:
-        return filter(lambda x: x != None, map(fixStupidEncodings, x))
-
-def callPeopleStupid(x):
-    try:
-        return x.encode(sickbeard.SYS_ENCODING)
-    except UnicodeEncodeError:
-        logger.log(u"YOUR COMPUTER SUCKS! Your data is being corrupted by a bad locale/encoding setting. Report this error on the forums or IRC please: "+repr(x)+", "+sickbeard.SYS_ENCODING, logger.ERROR)
-        return x.encode(sickbeard.SYS_ENCODING, 'ignore')
-
-def ek(func, *args):
-    result = None
-
-    if os.name == 'nt':
-        result = func(*args)
-    else:
-        result = func(*[callPeopleStupid(x) if type(x) in (str, unicode) else x for x in args])
-
-    if type(result) in (list, tuple):
-        return fixListEncodings(result)
-    elif type(result) == str:
-        return fixStupidEncodings(result)
-    else:
-        return result
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+import os
+
+from sickbeard import logger
+import sickbeard
+
+# This module tries to deal with the apparently random behavior of python when dealing with unicode <-> utf-8
+# encodings. It tries to just use unicode, but if that fails then it tries forcing it to utf-8. Any functions
+# which return something should always return unicode.
+
+def fixStupidEncodings(x, silent=False):
+    if type(x) == str:
+        try:
+            return x.decode(sickbeard.SYS_ENCODING)
+        except UnicodeDecodeError:
+            logger.log(u"Unable to decode value: "+repr(x), logger.ERROR)
+            return None
+    elif type(x) == unicode:
+        return x
+    else:
+        logger.log(u"Unknown value passed in, ignoring it: "+str(type(x))+" ("+repr(x)+":"+repr(type(x))+")", logger.DEBUG if silent else logger.ERROR)
+        return None
+
+    return None
+
+def fixListEncodings(x):
+    if type(x) != list and type(x) != tuple:
+        return x
+    else:
+        return filter(lambda x: x != None, map(fixStupidEncodings, x))
+
+def callPeopleStupid(x):
+    try:
+        return x.encode(sickbeard.SYS_ENCODING)
+    except UnicodeEncodeError:
+        logger.log(u"YOUR COMPUTER SUCKS! Your data is being corrupted by a bad locale/encoding setting. Report this error on the forums or IRC please: "+repr(x)+", "+sickbeard.SYS_ENCODING, logger.ERROR)
+        return x.encode(sickbeard.SYS_ENCODING, 'ignore')
+
+def ek(func, *args):
+    result = None
+
+    if os.name == 'nt':
+        result = func(*args)
+    else:
+        result = func(*[callPeopleStupid(x) if type(x) in (str, unicode) else x for x in args])
+
+    if type(result) in (list, tuple):
+        return fixListEncodings(result)
+    elif type(result) == str:
+        return fixStupidEncodings(result)
+    else:
+        return result
diff --git a/sickbeard/generic_queue.py b/sickbeard/generic_queue.py
index eb1d3801c..fd72911eb 100644
--- a/sickbeard/generic_queue.py
+++ b/sickbeard/generic_queue.py
@@ -1,134 +1,134 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-import datetime
-import threading
-
-from sickbeard import logger
-
-class QueuePriorities:
-    LOW = 10
-    NORMAL = 20
-    HIGH = 30
-
-class GenericQueue(object):
-
-    def __init__(self):
-
-        self.currentItem = None
-        self.queue = []
-
-        self.thread = None
-
-        self.queue_name = "QUEUE"
-
-        self.min_priority = 0
-        
-        self.currentItem = None
-
-    def pause(self):
-        logger.log(u"Pausing queue")
-        self.min_priority = 999999999999
-    
-    def unpause(self):
-        logger.log(u"Unpausing queue")
-        self.min_priority = 0
-
-    def add_item(self, item):
-        item.added = datetime.datetime.now()
-        self.queue.append(item)
-        
-        return item
-
-    def run(self):
-
-        # only start a new task if one isn't already going
-        if self.thread == None or self.thread.isAlive() == False:
-
-            # if the thread is dead then the current item should be finished
-            if self.currentItem != None:
-                self.currentItem.finish()
-                self.currentItem = None
-
-            # if there's something in the queue then run it in a thread and take it out of the queue
-            if len(self.queue) > 0:
-
-                # sort by priority
-                def sorter(x,y):
-                    """
-                    Sorts by priority descending then time ascending
-                    """
-                    if x.priority == y.priority:
-                        if y.added == x.added:
-                            return 0
-                        elif y.added < x.added:
-                            return 1
-                        elif y.added > x.added:
-                            return -1
-                    else:
-                        return y.priority-x.priority
-
-                self.queue.sort(cmp=sorter)
-                
-                queueItem = self.queue[0]
-
-                if queueItem.priority < self.min_priority:
-                    return
-
-                # launch the queue item in a thread
-                # TODO: improve thread name
-                threadName = self.queue_name + '-' + queueItem.get_thread_name()
-                self.thread = threading.Thread(None, queueItem.execute, threadName)
-                self.thread.start()
-
-                self.currentItem = queueItem
-
-                # take it out of the queue
-                del self.queue[0]
-
-class QueueItem:
-    def __init__(self, name, action_id = 0):
-        self.name = name
-
-        self.inProgress = False
-
-        self.priority = QueuePriorities.NORMAL
-
-        self.thread_name = None
-
-        self.action_id = action_id
-        
-        self.added = None
-
-    def get_thread_name(self):
-        if self.thread_name:
-            return self.thread_name
-        else:
-            return self.name.replace(" ","-").upper()
-
-    def execute(self):
-        """Implementing classes should call this"""
-
-        self.inProgress = True
-
-    def finish(self):
-        """Implementing Classes should call this"""
-
-        self.inProgress = False
-
-
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+import datetime
+import threading
+
+from sickbeard import logger
+
+class QueuePriorities:
+    LOW = 10
+    NORMAL = 20
+    HIGH = 30
+
+class GenericQueue(object):
+
+    def __init__(self):
+
+        self.currentItem = None
+        self.queue = []
+
+        self.thread = None
+
+        self.queue_name = "QUEUE"
+
+        self.min_priority = 0
+        
+        self.currentItem = None
+
+    def pause(self):
+        logger.log(u"Pausing queue")
+        self.min_priority = 999999999999
+    
+    def unpause(self):
+        logger.log(u"Unpausing queue")
+        self.min_priority = 0
+
+    def add_item(self, item):
+        item.added = datetime.datetime.now()
+        self.queue.append(item)
+        
+        return item
+
+    def run(self):
+
+        # only start a new task if one isn't already going
+        if self.thread == None or self.thread.isAlive() == False:
+
+            # if the thread is dead then the current item should be finished
+            if self.currentItem != None:
+                self.currentItem.finish()
+                self.currentItem = None
+
+            # if there's something in the queue then run it in a thread and take it out of the queue
+            if len(self.queue) > 0:
+
+                # sort by priority
+                def sorter(x,y):
+                    """
+                    Sorts by priority descending then time ascending
+                    """
+                    if x.priority == y.priority:
+                        if y.added == x.added:
+                            return 0
+                        elif y.added < x.added:
+                            return 1
+                        elif y.added > x.added:
+                            return -1
+                    else:
+                        return y.priority-x.priority
+
+                self.queue.sort(cmp=sorter)
+                
+                queueItem = self.queue[0]
+
+                if queueItem.priority < self.min_priority:
+                    return
+
+                # launch the queue item in a thread
+                # TODO: improve thread name
+                threadName = self.queue_name + '-' + queueItem.get_thread_name()
+                self.thread = threading.Thread(None, queueItem.execute, threadName)
+                self.thread.start()
+
+                self.currentItem = queueItem
+
+                # take it out of the queue
+                del self.queue[0]
+
+class QueueItem:
+    def __init__(self, name, action_id = 0):
+        self.name = name
+
+        self.inProgress = False
+
+        self.priority = QueuePriorities.NORMAL
+
+        self.thread_name = None
+
+        self.action_id = action_id
+        
+        self.added = None
+
+    def get_thread_name(self):
+        if self.thread_name:
+            return self.thread_name
+        else:
+            return self.name.replace(" ","-").upper()
+
+    def execute(self):
+        """Implementing classes should call this"""
+
+        self.inProgress = True
+
+    def finish(self):
+        """Implementing Classes should call this"""
+
+        self.inProgress = False
+
+
diff --git a/sickbeard/gh_api.py b/sickbeard/gh_api.py
index 3ed976288..481bbc019 100644
--- a/sickbeard/gh_api.py
+++ b/sickbeard/gh_api.py
@@ -1,59 +1,59 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-try:
-    import json
-except ImportError:
-    from lib import simplejson as json
-
-import urllib
-
-class GitHub(object):
-    """
-    Simple api wrapper for the Github API v3. Currently only supports the small thing that SB
-    needs it for - list of cimmots.
-    """
-    
-    def _access_API(self, path, params=None):
-        """
-        Access the API at the path given and with the optional params given.
-        
-        path: A list of the path elements to use (eg. ['repos', 'midgetspy', 'Sick-Beard', 'commits'])
-        params: Optional dict of name/value pairs for extra params to send. (eg. {'per_page': 10})
-        
-        Returns a deserialized json object of the result. Doesn't do any error checking (hope it works).
-        """
-        
-        url = 'https://api.github.com/' + '/'.join(path)
-        
-        if params and type(params) is dict:
-            url += '?' + '&'.join([str(x) + '=' + str(params[x]) for x in params.keys()])
-        
-        return json.load(urllib.urlopen(url)) 
-    
-    def commits(self, user, repo, branch='master'):
-        """
-        Uses the API to get a list of the 100 most recent commits from the specified user/repo/branch, starting from HEAD.
-        
-        user: The github username of the person whose repo you're querying
-        repo: The repo name to query
-        branch: Optional, the branch name to show commits from
-        
-        Returns a deserialized json object containing the commit info. See http://developer.github.com/v3/repos/commits/
-        """
-        return self._access_API(['repos', user, repo, 'commits'], {'per_page': 100, 'sha': branch})
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+try:
+    import json
+except ImportError:
+    from lib import simplejson as json
+
+import urllib
+
+class GitHub(object):
+    """
+    Simple api wrapper for the Github API v3. Currently only supports the small thing that SB
+    needs it for - list of cimmots.
+    """
+    
+    def _access_API(self, path, params=None):
+        """
+        Access the API at the path given and with the optional params given.
+        
+        path: A list of the path elements to use (eg. ['repos', 'midgetspy', 'Sick-Beard', 'commits'])
+        params: Optional dict of name/value pairs for extra params to send. (eg. {'per_page': 10})
+        
+        Returns a deserialized json object of the result. Doesn't do any error checking (hope it works).
+        """
+        
+        url = 'https://api.github.com/' + '/'.join(path)
+        
+        if params and type(params) is dict:
+            url += '?' + '&'.join([str(x) + '=' + str(params[x]) for x in params.keys()])
+        
+        return json.load(urllib.urlopen(url)) 
+    
+    def commits(self, user, repo, branch='master'):
+        """
+        Uses the API to get a list of the 100 most recent commits from the specified user/repo/branch, starting from HEAD.
+        
+        user: The github username of the person whose repo you're querying
+        repo: The repo name to query
+        branch: Optional, the branch name to show commits from
+        
+        Returns a deserialized json object containing the commit info. See http://developer.github.com/v3/repos/commits/
+        """
+        return self._access_API(['repos', user, repo, 'commits'], {'per_page': 100, 'sha': branch})
diff --git a/sickbeard/image_cache.py b/sickbeard/image_cache.py
index 0a485a5a2..fecc41de4 100644
--- a/sickbeard/image_cache.py
+++ b/sickbeard/image_cache.py
@@ -1,225 +1,225 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-import os.path
-
-import sickbeard
-
-from sickbeard import helpers, logger, exceptions
-from sickbeard import encodingKludge as ek
-
-from sickbeard.metadata.generic import GenericMetadata
-
-from lib.hachoir_parser import createParser
-from lib.hachoir_metadata import extractMetadata
-
-class ImageCache:
-    
-    def __init__(self):
-        pass
-    
-    def _cache_dir(self):
-        """
-        Builds up the full path to the image cache directory
-        """
-        return ek.ek(os.path.abspath, ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images'))
-
-    def poster_path(self, tvdb_id):
-        """
-        Builds up the path to a poster cache for a given tvdb id
-
-        returns: a full path to the cached poster file for the given tvdb id 
-        
-        tvdb_id: ID of the show to use in the file name
-        """
-        poster_file_name = str(tvdb_id) + '.poster.jpg'
-        return ek.ek(os.path.join, self._cache_dir(), poster_file_name)
-    
-    def banner_path(self, tvdb_id):
-        """
-        Builds up the path to a banner cache for a given tvdb id
-
-        returns: a full path to the cached banner file for the given tvdb id 
-        
-        tvdb_id: ID of the show to use in the file name
-        """
-        banner_file_name = str(tvdb_id) + '.banner.jpg'
-        return ek.ek(os.path.join, self._cache_dir(), banner_file_name)
-
-    def has_poster(self, tvdb_id):
-        """
-        Returns true if a cached poster exists for the given tvdb id
-        """
-        poster_path = self.poster_path(tvdb_id)
-        logger.log(u"Checking if file "+str(poster_path)+" exists", logger.DEBUG)
-        return ek.ek(os.path.isfile, poster_path)
-
-    def has_banner(self, tvdb_id):
-        """
-        Returns true if a cached banner exists for the given tvdb id
-        """
-        banner_path = self.banner_path(tvdb_id)
-        logger.log(u"Checking if file "+str(banner_path)+" exists", logger.DEBUG)
-        return ek.ek(os.path.isfile, banner_path)
-
-    BANNER = 1
-    POSTER = 2
-    
-    def which_type(self, path):
-        """
-        Analyzes the image provided and attempts to determine whether it is a poster or banner.
-        
-        returns: BANNER, POSTER if it concluded one or the other, or None if the image was neither (or didn't exist)
-        
-        path: full path to the image
-        """
-
-        if not ek.ek(os.path.isfile, path):
-            logger.log(u"Couldn't check the type of "+str(path)+" cause it doesn't exist", logger.WARNING)
-            return None
-
-        # use hachoir to parse the image for us
-        img_parser = createParser(path)
-        img_metadata = extractMetadata(img_parser)
-
-        if not img_metadata:
-            logger.log(u"Unable to get metadata from "+str(path)+", not using your existing image", logger.DEBUG)
-            return None
-        
-        img_ratio = float(img_metadata.get('width'))/float(img_metadata.get('height'))
-
-        img_parser.stream._input.close()
-
-        # most posters are around 0.68 width/height ratio (eg. 680/1000)
-        if 0.55 < img_ratio < 0.8:
-            return self.POSTER
-        
-        # most banners are around 5.4 width/height ratio (eg. 758/140)
-        elif 5 < img_ratio < 6:
-            return self.BANNER
-        else:
-            logger.log(u"Image has size ratio of "+str(img_ratio)+", unknown type", logger.WARNING)
-            return None
-    
-    def _cache_image_from_file(self, image_path, img_type, tvdb_id):
-        """
-        Takes the image provided and copies it to the cache folder
-        
-        returns: bool representing success
-        
-        image_path: path to the image we're caching
-        img_type: BANNER or POSTER
-        tvdb_id: id of the show this image belongs to
-        """
-
-        # generate the path based on the type & tvdb_id
-        if img_type == self.POSTER:
-            dest_path = self.poster_path(tvdb_id)
-        elif img_type == self.BANNER:
-            dest_path = self.banner_path(tvdb_id)
-        else:
-            logger.log(u"Invalid cache image type: "+str(img_type), logger.ERROR)
-            return False
-
-        # make sure the cache folder exists before we try copying to it
-        if not ek.ek(os.path.isdir, self._cache_dir()):
-            logger.log(u"Image cache dir didn't exist, creating it at "+str(self._cache_dir()))
-            ek.ek(os.makedirs, self._cache_dir())
-
-        logger.log(u"Copying from "+image_path+" to "+dest_path)
-        helpers.copyFile(image_path, dest_path)
-        
-        return True
-
-    def _cache_image_from_tvdb(self, show_obj, img_type):
-        """
-        Retrieves an image of the type specified from TVDB and saves it to the cache folder
-        
-        returns: bool representing success
-        
-        show_obj: TVShow object that we want to cache an image for
-        img_type: BANNER or POSTER
-        """
-
-        # generate the path based on the type & tvdb_id
-        if img_type == self.POSTER:
-            img_type_name = 'poster'
-            dest_path = self.poster_path(show_obj.tvdbid)
-        elif img_type == self.BANNER:
-            img_type_name = 'banner'
-            dest_path = self.banner_path(show_obj.tvdbid)
-        else:
-            logger.log(u"Invalid cache image type: "+str(img_type), logger.ERROR)
-            return False
-
-        # retrieve the image from TVDB using the generic metadata class
-        #TODO: refactor
-        metadata_generator = GenericMetadata()
-        img_data = metadata_generator._retrieve_show_image(img_type_name, show_obj)
-        result = metadata_generator._write_image(img_data, dest_path)
-
-        return result
-    
-    def fill_cache(self, show_obj):
-        """
-        Caches all images for the given show. Copies them from the show dir if possible, or
-        downloads them from TVDB if they aren't in the show dir.
-        
-        show_obj: TVShow object to cache images for
-        """
-
-        logger.log(u"Checking if we need any cache images for show "+str(show_obj.tvdbid), logger.DEBUG)
-
-        # check if the images are already cached or not
-        need_images = {self.POSTER: not self.has_poster(show_obj.tvdbid),
-                       self.BANNER: not self.has_banner(show_obj.tvdbid),
-                       }
-        
-        if not need_images[self.POSTER] and not need_images[self.BANNER]:
-            logger.log(u"No new cache images needed, not retrieving new ones")
-            return
-        
-        # check the show dir for images and use them
-        try:
-            for cur_provider in sickbeard.metadata_provider_dict.values():
-                logger.log(u"Checking if we can use the show image from the "+cur_provider.name+" metadata", logger.DEBUG)
-                if ek.ek(os.path.isfile, cur_provider.get_poster_path(show_obj)):
-                    cur_file_name = os.path.abspath(cur_provider.get_poster_path(show_obj))
-                    cur_file_type = self.which_type(cur_file_name)
-                    
-                    if cur_file_type == None:
-                        logger.log(u"Unable to retrieve image type, not using the image from "+str(cur_file_name), logger.WARNING)
-                        continue
-
-                    logger.log(u"Checking if image "+cur_file_name+" (type "+str(cur_file_type)+" needs metadata: "+str(need_images[cur_file_type]), logger.DEBUG)
-                    
-                    if cur_file_type in need_images and need_images[cur_file_type]:
-                        logger.log(u"Found an image in the show dir that doesn't exist in the cache, caching it: "+cur_file_name+", type "+str(cur_file_type), logger.DEBUG)
-                        self._cache_image_from_file(cur_file_name, cur_file_type, show_obj.tvdbid)
-                        need_images[cur_file_type] = False
-        except exceptions.ShowDirNotFoundException:
-            logger.log(u"Unable to search for images in show dir because it doesn't exist", logger.WARNING)
-                    
-        # download from TVDB for missing ones
-        for cur_image_type in [self.POSTER, self.BANNER]:
-            logger.log(u"Seeing if we still need an image of type "+str(cur_image_type)+": "+str(need_images[cur_image_type]), logger.DEBUG)
-            if cur_image_type in need_images and need_images[cur_image_type]:
-                self._cache_image_from_tvdb(show_obj, cur_image_type)
-        
-
-        logger.log(u"Done cache check")
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+import os.path
+
+import sickbeard
+
+from sickbeard import helpers, logger, exceptions
+from sickbeard import encodingKludge as ek
+
+from sickbeard.metadata.generic import GenericMetadata
+
+from lib.hachoir_parser import createParser
+from lib.hachoir_metadata import extractMetadata
+
+class ImageCache:
+    
+    def __init__(self):
+        pass
+    
+    def _cache_dir(self):
+        """
+        Builds up the full path to the image cache directory
+        """
+        return ek.ek(os.path.abspath, ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images'))
+
+    def poster_path(self, tvdb_id):
+        """
+        Builds up the path to a poster cache for a given tvdb id
+
+        returns: a full path to the cached poster file for the given tvdb id 
+        
+        tvdb_id: ID of the show to use in the file name
+        """
+        poster_file_name = str(tvdb_id) + '.poster.jpg'
+        return ek.ek(os.path.join, self._cache_dir(), poster_file_name)
+    
+    def banner_path(self, tvdb_id):
+        """
+        Builds up the path to a banner cache for a given tvdb id
+
+        returns: a full path to the cached banner file for the given tvdb id 
+        
+        tvdb_id: ID of the show to use in the file name
+        """
+        banner_file_name = str(tvdb_id) + '.banner.jpg'
+        return ek.ek(os.path.join, self._cache_dir(), banner_file_name)
+
+    def has_poster(self, tvdb_id):
+        """
+        Returns true if a cached poster exists for the given tvdb id
+        """
+        poster_path = self.poster_path(tvdb_id)
+        logger.log(u"Checking if file "+str(poster_path)+" exists", logger.DEBUG)
+        return ek.ek(os.path.isfile, poster_path)
+
+    def has_banner(self, tvdb_id):
+        """
+        Returns true if a cached banner exists for the given tvdb id
+        """
+        banner_path = self.banner_path(tvdb_id)
+        logger.log(u"Checking if file "+str(banner_path)+" exists", logger.DEBUG)
+        return ek.ek(os.path.isfile, banner_path)
+
+    BANNER = 1
+    POSTER = 2
+    
+    def which_type(self, path):
+        """
+        Analyzes the image provided and attempts to determine whether it is a poster or banner.
+        
+        returns: BANNER, POSTER if it concluded one or the other, or None if the image was neither (or didn't exist)
+        
+        path: full path to the image
+        """
+
+        if not ek.ek(os.path.isfile, path):
+            logger.log(u"Couldn't check the type of "+str(path)+" cause it doesn't exist", logger.WARNING)
+            return None
+
+        # use hachoir to parse the image for us
+        img_parser = createParser(path)
+        img_metadata = extractMetadata(img_parser)
+
+        if not img_metadata:
+            logger.log(u"Unable to get metadata from "+str(path)+", not using your existing image", logger.DEBUG)
+            return None
+        
+        img_ratio = float(img_metadata.get('width'))/float(img_metadata.get('height'))
+
+        img_parser.stream._input.close()
+
+        # most posters are around 0.68 width/height ratio (eg. 680/1000)
+        if 0.55 < img_ratio < 0.8:
+            return self.POSTER
+        
+        # most banners are around 5.4 width/height ratio (eg. 758/140)
+        elif 5 < img_ratio < 6:
+            return self.BANNER
+        else:
+            logger.log(u"Image has size ratio of "+str(img_ratio)+", unknown type", logger.WARNING)
+            return None
+    
+    def _cache_image_from_file(self, image_path, img_type, tvdb_id):
+        """
+        Takes the image provided and copies it to the cache folder
+        
+        returns: bool representing success
+        
+        image_path: path to the image we're caching
+        img_type: BANNER or POSTER
+        tvdb_id: id of the show this image belongs to
+        """
+
+        # generate the path based on the type & tvdb_id
+        if img_type == self.POSTER:
+            dest_path = self.poster_path(tvdb_id)
+        elif img_type == self.BANNER:
+            dest_path = self.banner_path(tvdb_id)
+        else:
+            logger.log(u"Invalid cache image type: "+str(img_type), logger.ERROR)
+            return False
+
+        # make sure the cache folder exists before we try copying to it
+        if not ek.ek(os.path.isdir, self._cache_dir()):
+            logger.log(u"Image cache dir didn't exist, creating it at "+str(self._cache_dir()))
+            ek.ek(os.makedirs, self._cache_dir())
+
+        logger.log(u"Copying from "+image_path+" to "+dest_path)
+        helpers.copyFile(image_path, dest_path)
+        
+        return True
+
+    def _cache_image_from_tvdb(self, show_obj, img_type):
+        """
+        Retrieves an image of the type specified from TVDB and saves it to the cache folder
+        
+        returns: bool representing success
+        
+        show_obj: TVShow object that we want to cache an image for
+        img_type: BANNER or POSTER
+        """
+
+        # generate the path based on the type & tvdb_id
+        if img_type == self.POSTER:
+            img_type_name = 'poster'
+            dest_path = self.poster_path(show_obj.tvdbid)
+        elif img_type == self.BANNER:
+            img_type_name = 'banner'
+            dest_path = self.banner_path(show_obj.tvdbid)
+        else:
+            logger.log(u"Invalid cache image type: "+str(img_type), logger.ERROR)
+            return False
+
+        # retrieve the image from TVDB using the generic metadata class
+        #TODO: refactor
+        metadata_generator = GenericMetadata()
+        img_data = metadata_generator._retrieve_show_image(img_type_name, show_obj)
+        result = metadata_generator._write_image(img_data, dest_path)
+
+        return result
+    
+    def fill_cache(self, show_obj):
+        """
+        Caches all images for the given show. Copies them from the show dir if possible, or
+        downloads them from TVDB if they aren't in the show dir.
+        
+        show_obj: TVShow object to cache images for
+        """
+
+        logger.log(u"Checking if we need any cache images for show "+str(show_obj.tvdbid), logger.DEBUG)
+
+        # check if the images are already cached or not
+        need_images = {self.POSTER: not self.has_poster(show_obj.tvdbid),
+                       self.BANNER: not self.has_banner(show_obj.tvdbid),
+                       }
+        
+        if not need_images[self.POSTER] and not need_images[self.BANNER]:
+            logger.log(u"No new cache images needed, not retrieving new ones")
+            return
+        
+        # check the show dir for images and use them
+        try:
+            for cur_provider in sickbeard.metadata_provider_dict.values():
+                logger.log(u"Checking if we can use the show image from the "+cur_provider.name+" metadata", logger.DEBUG)
+                if ek.ek(os.path.isfile, cur_provider.get_poster_path(show_obj)):
+                    cur_file_name = os.path.abspath(cur_provider.get_poster_path(show_obj))
+                    cur_file_type = self.which_type(cur_file_name)
+                    
+                    if cur_file_type == None:
+                        logger.log(u"Unable to retrieve image type, not using the image from "+str(cur_file_name), logger.WARNING)
+                        continue
+
+                    logger.log(u"Checking if image "+cur_file_name+" (type "+str(cur_file_type)+" needs metadata: "+str(need_images[cur_file_type]), logger.DEBUG)
+                    
+                    if cur_file_type in need_images and need_images[cur_file_type]:
+                        logger.log(u"Found an image in the show dir that doesn't exist in the cache, caching it: "+cur_file_name+", type "+str(cur_file_type), logger.DEBUG)
+                        self._cache_image_from_file(cur_file_name, cur_file_type, show_obj.tvdbid)
+                        need_images[cur_file_type] = False
+        except exceptions.ShowDirNotFoundException:
+            logger.log(u"Unable to search for images in show dir because it doesn't exist", logger.WARNING)
+                    
+        # download from TVDB for missing ones
+        for cur_image_type in [self.POSTER, self.BANNER]:
+            logger.log(u"Seeing if we still need an image of type "+str(cur_image_type)+": "+str(need_images[cur_image_type]), logger.DEBUG)
+            if cur_image_type in need_images and need_images[cur_image_type]:
+                self._cache_image_from_tvdb(show_obj, cur_image_type)
+        
+
+        logger.log(u"Done cache check")
diff --git a/sickbeard/logger.py b/sickbeard/logger.py
index 6c2953e6d..cb776d2ba 100644
--- a/sickbeard/logger.py
+++ b/sickbeard/logger.py
@@ -1,185 +1,185 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-from __future__ import with_statement 
-
-import os
-import threading
-
-import logging
-
-import sickbeard
-
-from sickbeard import classes
-
-
-# number of log files to keep
-NUM_LOGS = 3
-
-# log size in bytes
-LOG_SIZE = 10000000 # 10 megs
-
-ERROR = logging.ERROR
-WARNING = logging.WARNING
-MESSAGE = logging.INFO
-DEBUG = logging.DEBUG
-
-reverseNames = {u'ERROR': ERROR,
-                u'WARNING': WARNING,
-                u'INFO': MESSAGE,
-                u'DEBUG': DEBUG}
-
-class SBRotatingLogHandler(object):
-
-    def __init__(self, log_file, num_files, num_bytes):
-        self.num_files = num_files
-        self.num_bytes = num_bytes
-        
-        self.log_file = log_file
-        self.cur_handler = None
-
-        self.writes_since_check = 0
-
-        self.log_lock = threading.Lock()
-
-    def initLogging(self, consoleLogging=True):
-    
-        self.log_file = os.path.join(sickbeard.LOG_DIR, self.log_file)
-    
-        self.cur_handler = self._config_handler()
-    
-        logging.getLogger('sickbeard').addHandler(self.cur_handler)
-        logging.getLogger('subliminal').addHandler(self.cur_handler)
-        
-        # define a Handler which writes INFO messages or higher to the sys.stderr
-        if consoleLogging:
-            console = logging.StreamHandler()
-    
-            console.setLevel(logging.INFO)
-    
-            # set a format which is simpler for console use
-            console.setFormatter(logging.Formatter('%(asctime)s %(levelname)s::%(message)s', '%H:%M:%S'))
-    
-            # add the handler to the root logger
-            logging.getLogger('sickbeard').addHandler(console)
-            logging.getLogger('subliminal').addHandler(console)
-            
-        logging.getLogger('sickbeard').setLevel(logging.DEBUG)
-        logging.getLogger('subliminal').setLevel(logging.ERROR)
-        
-    def _config_handler(self):
-        """
-        Configure a file handler to log at file_name and return it.
-        """
-    
-        file_handler = logging.FileHandler(self.log_file)
-        file_handler.setLevel(logging.DEBUG)
-        file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s %(message)s', '%b-%d %H:%M:%S'))
-        return file_handler
-
-    def _log_file_name(self, i):
-        """
-        Returns a numbered log file name depending on i. If i==0 it just uses logName, if not it appends
-        it to the extension (blah.log.3 for i == 3)
-        
-        i: Log number to ues
-        """
-        return self.log_file + ('.' + str(i) if i else '')
-    
-    def _num_logs(self):
-        """
-        Scans the log folder and figures out how many log files there are already on disk
-        
-        Returns: The number of the last used file (eg. mylog.log.3 would return 3). If there are no logs it returns -1
-        """
-        cur_log = 0
-        while os.path.isfile(self._log_file_name(cur_log)):
-            cur_log += 1
-        return cur_log - 1
-    
-    def _rotate_logs(self):
-        
-        sb_logger = logging.getLogger('sickbeard')
-        subli_logger = logging.getLogger('subliminal')
-        
-        # delete the old handler
-        if self.cur_handler:
-            self.cur_handler.flush()
-            self.cur_handler.close()
-            sb_logger.removeHandler(self.cur_handler)
-            subli_logger.removeHandler(self.cur_handler)
-    
-        # rename or delete all the old log files
-        for i in range(self._num_logs(), -1, -1):
-            cur_file_name = self._log_file_name(i)
-            try:
-                if i >= NUM_LOGS:
-                    os.remove(cur_file_name)
-                else:
-                    os.rename(cur_file_name, self._log_file_name(i+1))
-            except WindowsError:
-                pass
-        
-        # the new log handler will always be on the un-numbered .log file
-        new_file_handler = self._config_handler()
-        
-        self.cur_handler = new_file_handler
-        
-        sb_logger.addHandler(new_file_handler)
-        subli_logger.addHandler(new_file_handler)
-
-    def log(self, toLog, logLevel=MESSAGE):
-    
-        with self.log_lock:
-    
-            # check the size and see if we need to rotate
-            if self.writes_since_check >= 10:
-                if os.path.isfile(self.log_file) and os.path.getsize(self.log_file) >= LOG_SIZE:
-                    self._rotate_logs()
-                self.writes_since_check = 0
-            else:
-                self.writes_since_check += 1
-    
-            meThread = threading.currentThread().getName()
-            message = meThread + u" :: " + toLog
-        
-            out_line = message.encode('utf-8')
-        
-            sb_logger = logging.getLogger('sickbeard')
-    
-            try:
-                if logLevel == DEBUG:
-                    sb_logger.debug(out_line)
-                elif logLevel == MESSAGE:
-                    sb_logger.info(out_line)
-                elif logLevel == WARNING:
-                    sb_logger.warning(out_line)
-                elif logLevel == ERROR:
-                    sb_logger.error(out_line)
-            
-                    # add errors to the UI logger
-                    classes.ErrorViewer.add(classes.UIError(message))
-                else:
-                    sb_logger.log(logLevel, out_line)
-            except ValueError:
-                pass
-
-sb_log_instance = SBRotatingLogHandler('sickbeard.log', NUM_LOGS, LOG_SIZE)
-
-def log(toLog, logLevel=MESSAGE):
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+from __future__ import with_statement 
+
+import os
+import threading
+
+import logging
+
+import sickbeard
+
+from sickbeard import classes
+
+
+# number of log files to keep
+NUM_LOGS = 3
+
+# log size in bytes
+LOG_SIZE = 10000000 # 10 megs
+
+ERROR = logging.ERROR
+WARNING = logging.WARNING
+MESSAGE = logging.INFO
+DEBUG = logging.DEBUG
+
+reverseNames = {u'ERROR': ERROR,
+                u'WARNING': WARNING,
+                u'INFO': MESSAGE,
+                u'DEBUG': DEBUG}
+
+class SBRotatingLogHandler(object):
+
+    def __init__(self, log_file, num_files, num_bytes):
+        self.num_files = num_files
+        self.num_bytes = num_bytes
+        
+        self.log_file = log_file
+        self.cur_handler = None
+
+        self.writes_since_check = 0
+
+        self.log_lock = threading.Lock()
+
+    def initLogging(self, consoleLogging=True):
+    
+        self.log_file = os.path.join(sickbeard.LOG_DIR, self.log_file)
+    
+        self.cur_handler = self._config_handler()
+    
+        logging.getLogger('sickbeard').addHandler(self.cur_handler)
+        logging.getLogger('subliminal').addHandler(self.cur_handler)
+        
+        # define a Handler which writes INFO messages or higher to the sys.stderr
+        if consoleLogging:
+            console = logging.StreamHandler()
+    
+            console.setLevel(logging.INFO)
+    
+            # set a format which is simpler for console use
+            console.setFormatter(logging.Formatter('%(asctime)s %(levelname)s::%(message)s', '%H:%M:%S'))
+    
+            # add the handler to the root logger
+            logging.getLogger('sickbeard').addHandler(console)
+            logging.getLogger('subliminal').addHandler(console)
+            
+        logging.getLogger('sickbeard').setLevel(logging.DEBUG)
+        logging.getLogger('subliminal').setLevel(logging.ERROR)
+        
+    def _config_handler(self):
+        """
+        Configure a file handler to log at file_name and return it.
+        """
+    
+        file_handler = logging.FileHandler(self.log_file)
+        file_handler.setLevel(logging.DEBUG)
+        file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s %(message)s', '%b-%d %H:%M:%S'))
+        return file_handler
+
+    def _log_file_name(self, i):
+        """
+        Returns a numbered log file name depending on i. If i==0 it just uses logName, if not it appends
+        it to the extension (blah.log.3 for i == 3)
+        
+        i: Log number to ues
+        """
+        return self.log_file + ('.' + str(i) if i else '')
+    
+    def _num_logs(self):
+        """
+        Scans the log folder and figures out how many log files there are already on disk
+        
+        Returns: The number of the last used file (eg. mylog.log.3 would return 3). If there are no logs it returns -1
+        """
+        cur_log = 0
+        while os.path.isfile(self._log_file_name(cur_log)):
+            cur_log += 1
+        return cur_log - 1
+    
+    def _rotate_logs(self):
+        
+        sb_logger = logging.getLogger('sickbeard')
+        subli_logger = logging.getLogger('subliminal')
+        
+        # delete the old handler
+        if self.cur_handler:
+            self.cur_handler.flush()
+            self.cur_handler.close()
+            sb_logger.removeHandler(self.cur_handler)
+            subli_logger.removeHandler(self.cur_handler)
+    
+        # rename or delete all the old log files
+        for i in range(self._num_logs(), -1, -1):
+            cur_file_name = self._log_file_name(i)
+            try:
+                if i >= NUM_LOGS:
+                    os.remove(cur_file_name)
+                else:
+                    os.rename(cur_file_name, self._log_file_name(i+1))
+            except WindowsError:
+                pass
+        
+        # the new log handler will always be on the un-numbered .log file
+        new_file_handler = self._config_handler()
+        
+        self.cur_handler = new_file_handler
+        
+        sb_logger.addHandler(new_file_handler)
+        subli_logger.addHandler(new_file_handler)
+
+    def log(self, toLog, logLevel=MESSAGE):
+    
+        with self.log_lock:
+    
+            # check the size and see if we need to rotate
+            if self.writes_since_check >= 10:
+                if os.path.isfile(self.log_file) and os.path.getsize(self.log_file) >= LOG_SIZE:
+                    self._rotate_logs()
+                self.writes_since_check = 0
+            else:
+                self.writes_since_check += 1
+    
+            meThread = threading.currentThread().getName()
+            message = meThread + u" :: " + toLog
+        
+            out_line = message.encode('utf-8')
+        
+            sb_logger = logging.getLogger('sickbeard')
+    
+            try:
+                if logLevel == DEBUG:
+                    sb_logger.debug(out_line)
+                elif logLevel == MESSAGE:
+                    sb_logger.info(out_line)
+                elif logLevel == WARNING:
+                    sb_logger.warning(out_line)
+                elif logLevel == ERROR:
+                    sb_logger.error(out_line)
+            
+                    # add errors to the UI logger
+                    classes.ErrorViewer.add(classes.UIError(message))
+                else:
+                    sb_logger.log(logLevel, out_line)
+            except ValueError:
+                pass
+
+sb_log_instance = SBRotatingLogHandler('sickbeard.log', NUM_LOGS, LOG_SIZE)
+
+def log(toLog, logLevel=MESSAGE):
     sb_log_instance.log(toLog, logLevel)
\ No newline at end of file
diff --git a/sickbeard/naming.py b/sickbeard/naming.py
index fd0a3b528..ceedc9af3 100644
--- a/sickbeard/naming.py
+++ b/sickbeard/naming.py
@@ -1,179 +1,179 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#  GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-import datetime
-import os
-
-import sickbeard
-from sickbeard import encodingKludge as ek
-from sickbeard import tv
-from sickbeard import common
-from sickbeard import logger
-from sickbeard.name_parser.parser import NameParser, InvalidNameException
-
-from common import Quality, DOWNLOADED
-
-name_presets = ('%SN - %Sx%0E - %EN',
-                '%S.N.S%0SE%0E.%E.N',
-                '%Sx%0E - %EN',
-                'S%0SE%0E - %EN',
-                'Season %0S/%S.N.S%0SE%0E.%Q.N-%RG'
-                )
-
-name_abd_presets = ('%SN - %A-D - %EN',
-                    '%S.N.%A.D.%E.N.%Q.N',
-                    '%Y/%0M/%S.N.%A.D.%E.N-%RG'
-                    )
-
-class TVShow():
-    def __init__(self):
-        self.name = "Show Name"
-        self.genre = "Comedy"
-        self.air_by_date = 0
-
-class TVEpisode(tv.TVEpisode):
-    def __init__(self, season, episode, name):
-        self.relatedEps = []
-        self._name = name
-        self._season = season
-        self._episode = episode
-        self._airdate = datetime.date(2010, 3, 9)
-        self.show = TVShow()
-        self._status = Quality.compositeStatus(common.DOWNLOADED, common.Quality.SDTV)
-        self._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP'
-
-def check_force_season_folders(pattern=None, multi=None):
-    """
-    Checks if the name can still be parsed if you strip off the folders to determine if we need to force season folders
-    to be enabled or not.
-    
-    Returns true if season folders need to be forced on or false otherwise.
-    """
-    if pattern == None:
-        pattern = sickbeard.NAMING_PATTERN
-    
-    valid = not validate_name(pattern, None, file_only=True) 
-    
-    if multi != None:
-        valid = valid or not validate_name(pattern, multi, file_only=True)
-
-    return valid
-
-def check_valid_naming(pattern=None, multi=None):
-    """
-    Checks if the name is can be parsed back to its original form for both single and multi episodes.
-    
-    Returns true if the naming is valid, false if not.
-    """
-    if pattern == None:
-        pattern = sickbeard.NAMING_PATTERN
-        
-    logger.log(u"Checking whether the pattern "+pattern+" is valid for a single episode", logger.DEBUG)
-    valid = validate_name(pattern, None)
-
-    if multi != None:
-        logger.log(u"Checking whether the pattern "+pattern+" is valid for a multi episode", logger.DEBUG)
-        valid = valid and validate_name(pattern, multi)
-
-    return valid
-
-def check_valid_abd_naming(pattern=None):
-    """
-    Checks if the name is can be parsed back to its original form for an air-by-date format.
-    
-    Returns true if the naming is valid, false if not.
-    """
-    if pattern == None:
-        pattern = sickbeard.NAMING_PATTERN
-        
-    logger.log(u"Checking whether the pattern "+pattern+" is valid for an air-by-date episode", logger.DEBUG)
-    valid = validate_name(pattern, abd=True)
-
-    return valid
-
-
-def validate_name(pattern, multi=None, file_only=False, abd=False):
-    ep = _generate_sample_ep(multi, abd)
-
-    parser = NameParser(True)
-
-    new_name = ep.formatted_filename(pattern, multi) + '.ext'
-    new_path = ep.formatted_dir(pattern, multi)
-    if not file_only:
-        new_name = ek.ek(os.path.join, new_path, new_name)
-
-    if not new_name:
-        logger.log(u"Unable to create a name out of "+pattern, logger.DEBUG)
-        return False
-
-    logger.log(u"Trying to parse "+new_name, logger.DEBUG)
-
-    try:
-        result = parser.parse(new_name)
-    except InvalidNameException:
-        logger.log(u"Unable to parse "+new_name+", not valid", logger.DEBUG)
-        return False
-    
-    logger.log("The name "+new_name + " parsed into " + str(result), logger.DEBUG)
-
-    if abd:
-        if result.air_date != ep.airdate:
-            logger.log(u"Air date incorrect in parsed episode, pattern isn't valid", logger.DEBUG)
-            return False
-    else:
-        if result.season_number != ep.season:
-            logger.log(u"Season incorrect in parsed episode, pattern isn't valid", logger.DEBUG)
-            return False
-        if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]:
-            logger.log(u"Episode incorrect in parsed episode, pattern isn't valid", logger.DEBUG)
-            return False
-
-    return True
-
-def _generate_sample_ep(multi=None, abd=False):
-    # make a fake episode object
-    ep = TVEpisode(2,3,"Ep Name")
-    ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
-    ep._airdate = datetime.date(2011, 3, 9)
-    if abd:
-        ep._release_name = 'Show.Name.2011.03.09.HDTV.XviD-RLSGROUP'
-    else:
-        ep._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP'
-
-    if multi != None:
-        ep._name = "Ep Name (1)"
-        ep._release_name = 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP'
-
-        secondEp = TVEpisode(2,4,"Ep Name (2)")
-        secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
-        secondEp._release_name = ep._release_name
-
-        thirdEp = TVEpisode(2,5,"Ep Name (3)")
-        thirdEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
-        thirdEp._release_name = ep._release_name
-
-        ep.relatedEps.append(secondEp)
-        ep.relatedEps.append(thirdEp)
-
-    return ep
-
-def test_name(pattern, multi=None, abd=False):
-
-    ep = _generate_sample_ep(multi, abd)
-
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+import datetime
+import os
+
+import sickbeard
+from sickbeard import encodingKludge as ek
+from sickbeard import tv
+from sickbeard import common
+from sickbeard import logger
+from sickbeard.name_parser.parser import NameParser, InvalidNameException
+
+from common import Quality, DOWNLOADED
+
+name_presets = ('%SN - %Sx%0E - %EN',
+                '%S.N.S%0SE%0E.%E.N',
+                '%Sx%0E - %EN',
+                'S%0SE%0E - %EN',
+                'Season %0S/%S.N.S%0SE%0E.%Q.N-%RG'
+                )
+
+name_abd_presets = ('%SN - %A-D - %EN',
+                    '%S.N.%A.D.%E.N.%Q.N',
+                    '%Y/%0M/%S.N.%A.D.%E.N-%RG'
+                    )
+
+class TVShow():
+    def __init__(self):
+        self.name = "Show Name"
+        self.genre = "Comedy"
+        self.air_by_date = 0
+
+class TVEpisode(tv.TVEpisode):
+    def __init__(self, season, episode, name):
+        self.relatedEps = []
+        self._name = name
+        self._season = season
+        self._episode = episode
+        self._airdate = datetime.date(2010, 3, 9)
+        self.show = TVShow()
+        self._status = Quality.compositeStatus(common.DOWNLOADED, common.Quality.SDTV)
+        self._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP'
+
+def check_force_season_folders(pattern=None, multi=None):
+    """
+    Checks if the name can still be parsed if you strip off the folders to determine if we need to force season folders
+    to be enabled or not.
+    
+    Returns true if season folders need to be forced on or false otherwise.
+    """
+    if pattern == None:
+        pattern = sickbeard.NAMING_PATTERN
+    
+    valid = not validate_name(pattern, None, file_only=True) 
+    
+    if multi != None:
+        valid = valid or not validate_name(pattern, multi, file_only=True)
+
+    return valid
+
+def check_valid_naming(pattern=None, multi=None):
+    """
+    Checks if the name is can be parsed back to its original form for both single and multi episodes.
+    
+    Returns true if the naming is valid, false if not.
+    """
+    if pattern == None:
+        pattern = sickbeard.NAMING_PATTERN
+        
+    logger.log(u"Checking whether the pattern "+pattern+" is valid for a single episode", logger.DEBUG)
+    valid = validate_name(pattern, None)
+
+    if multi != None:
+        logger.log(u"Checking whether the pattern "+pattern+" is valid for a multi episode", logger.DEBUG)
+        valid = valid and validate_name(pattern, multi)
+
+    return valid
+
+def check_valid_abd_naming(pattern=None):
+    """
+    Checks if the name is can be parsed back to its original form for an air-by-date format.
+    
+    Returns true if the naming is valid, false if not.
+    """
+    if pattern == None:
+        pattern = sickbeard.NAMING_PATTERN
+        
+    logger.log(u"Checking whether the pattern "+pattern+" is valid for an air-by-date episode", logger.DEBUG)
+    valid = validate_name(pattern, abd=True)
+
+    return valid
+
+
+def validate_name(pattern, multi=None, file_only=False, abd=False):
+    ep = _generate_sample_ep(multi, abd)
+
+    parser = NameParser(True)
+
+    new_name = ep.formatted_filename(pattern, multi) + '.ext'
+    new_path = ep.formatted_dir(pattern, multi)
+    if not file_only:
+        new_name = ek.ek(os.path.join, new_path, new_name)
+
+    if not new_name:
+        logger.log(u"Unable to create a name out of "+pattern, logger.DEBUG)
+        return False
+
+    logger.log(u"Trying to parse "+new_name, logger.DEBUG)
+
+    try:
+        result = parser.parse(new_name)
+    except InvalidNameException:
+        logger.log(u"Unable to parse "+new_name+", not valid", logger.DEBUG)
+        return False
+    
+    logger.log("The name "+new_name + " parsed into " + str(result), logger.DEBUG)
+
+    if abd:
+        if result.air_date != ep.airdate:
+            logger.log(u"Air date incorrect in parsed episode, pattern isn't valid", logger.DEBUG)
+            return False
+    else:
+        if result.season_number != ep.season:
+            logger.log(u"Season incorrect in parsed episode, pattern isn't valid", logger.DEBUG)
+            return False
+        if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]:
+            logger.log(u"Episode incorrect in parsed episode, pattern isn't valid", logger.DEBUG)
+            return False
+
+    return True
+
+def _generate_sample_ep(multi=None, abd=False):
+    # make a fake episode object
+    ep = TVEpisode(2,3,"Ep Name")
+    ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
+    ep._airdate = datetime.date(2011, 3, 9)
+    if abd:
+        ep._release_name = 'Show.Name.2011.03.09.HDTV.XviD-RLSGROUP'
+    else:
+        ep._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP'
+
+    if multi != None:
+        ep._name = "Ep Name (1)"
+        ep._release_name = 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP'
+
+        secondEp = TVEpisode(2,4,"Ep Name (2)")
+        secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
+        secondEp._release_name = ep._release_name
+
+        thirdEp = TVEpisode(2,5,"Ep Name (3)")
+        thirdEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
+        thirdEp._release_name = ep._release_name
+
+        ep.relatedEps.append(secondEp)
+        ep.relatedEps.append(thirdEp)
+
+    return ep
+
+def test_name(pattern, multi=None, abd=False):
+
+    ep = _generate_sample_ep(multi, abd)
+
     return {'name': ep.formatted_filename(pattern, multi), 'dir': ep.formatted_dir(pattern, multi)}
\ No newline at end of file
diff --git a/sickbeard/notifiers/nma.py b/sickbeard/notifiers/nma.py
index 447a7968f..1c67990f9 100644
--- a/sickbeard/notifiers/nma.py
+++ b/sickbeard/notifiers/nma.py
@@ -1,56 +1,56 @@
-import sickbeard
-
-from sickbeard import logger, common
-from lib.pynma import pynma
-
-class NMA_Notifier:
-    
-    def test_notify(self, nma_api, nma_priority):
-        return self._sendNMA(nma_api, nma_priority, event="Test", message="Testing NMA settings from Sick Beard", force=True)
-
-    def notify_snatch(self, ep_name):
-        if sickbeard.NMA_NOTIFY_ONSNATCH:
-            self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name)
-
-    def notify_download(self, ep_name):
-        if sickbeard.NMA_NOTIFY_ONDOWNLOAD:
-            self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name)
-
-    def notify_subtitle_download(self, ep_name, lang):
-        if sickbeard.NMA_NOTIFY_ONSUBTITLEDOWNLOAD:
-            self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang)
-        
-    def _sendNMA(self, nma_api=None, nma_priority=None, event=None, message=None, force=False):
-    
-        title = 'Sick-Beard'
-    
-        if not sickbeard.USE_NMA and not force:
-            return False
-        
-        if nma_api == None:
-            nma_api = sickbeard.NMA_API
-            
-        if nma_priority == None:
-            nma_priority = sickbeard.NMA_PRIORITY
-    
-        logger.log(u"NMA title: " + title, logger.DEBUG)
-        logger.log(u"NMA event: " + event, logger.DEBUG)
-        logger.log(u"NMA message: " + message, logger.DEBUG)
-        
-        batch = False
-        
-        p = pynma.PyNMA()
-        keys = nma_api.split(',')
-        p.addkey(keys)
-        
-        if len(keys) > 1: batch = True
-        
-        response = p.push(title, event, message, priority=nma_priority, batch_mode=batch)
-               
-        if not response[nma_api][u'code'] == u'200':
-            logger.log(u'Could not send notification to NotifyMyAndroid', logger.ERROR)
-            return False
-        else:
-            return True
-                        
+import sickbeard
+
+from sickbeard import logger, common
+from lib.pynma import pynma
+
+class NMA_Notifier:
+    
+    def test_notify(self, nma_api, nma_priority):
+        return self._sendNMA(nma_api, nma_priority, event="Test", message="Testing NMA settings from Sick Beard", force=True)
+
+    def notify_snatch(self, ep_name):
+        if sickbeard.NMA_NOTIFY_ONSNATCH:
+            self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name)
+
+    def notify_download(self, ep_name):
+        if sickbeard.NMA_NOTIFY_ONDOWNLOAD:
+            self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name)
+
+    def notify_subtitle_download(self, ep_name, lang):
+        if sickbeard.NMA_NOTIFY_ONSUBTITLEDOWNLOAD:
+            self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang)
+        
+    def _sendNMA(self, nma_api=None, nma_priority=None, event=None, message=None, force=False):
+    
+        title = 'Sick-Beard'
+    
+        if not sickbeard.USE_NMA and not force:
+            return False
+        
+        if nma_api == None:
+            nma_api = sickbeard.NMA_API
+            
+        if nma_priority == None:
+            nma_priority = sickbeard.NMA_PRIORITY
+    
+        logger.log(u"NMA title: " + title, logger.DEBUG)
+        logger.log(u"NMA event: " + event, logger.DEBUG)
+        logger.log(u"NMA message: " + message, logger.DEBUG)
+        
+        batch = False
+        
+        p = pynma.PyNMA()
+        keys = nma_api.split(',')
+        p.addkey(keys)
+        
+        if len(keys) > 1: batch = True
+        
+        response = p.push(title, event, message, priority=nma_priority, batch_mode=batch)
+               
+        if not response[nma_api][u'code'] == u'200':
+            logger.log(u'Could not send notification to NotifyMyAndroid', logger.ERROR)
+            return False
+        else:
+            return True
+                        
 notifier = NMA_Notifier
\ No newline at end of file
diff --git a/sickbeard/notifiers/nmj.py b/sickbeard/notifiers/nmj.py
index 2a2d8dc2c..ef0ce1426 100644
--- a/sickbeard/notifiers/nmj.py
+++ b/sickbeard/notifiers/nmj.py
@@ -1,183 +1,183 @@
-# Author: Nico Berlee http://nico.berlee.nl/
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#  GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-import urllib, urllib2
-import sickbeard
-import telnetlib
-import re
-
-from sickbeard import logger
-
-try:
-    import xml.etree.cElementTree as etree
-except ImportError:
-    import xml.etree.ElementTree as etree
-
-
-class NMJNotifier:
-    def notify_settings(self, host):
-        """
-        Retrieves the settings from a NMJ/Popcorn hour
-        
-        host: The hostname/IP of the Popcorn Hour server
-        
-        Returns: True if the settings were retrieved successfully, False otherwise
-        """
-        
-        # establish a terminal session to the PC
-        terminal = False
-        try:
-            terminal = telnetlib.Telnet(host)
-        except Exception:
-            logger.log(u"Warning: unable to get a telnet session to %s" % (host), logger.ERROR)
-            return False
-
-        # tell the terminal to output the necessary info to the screen so we can search it later
-        logger.log(u"Connected to %s via telnet" % (host), logger.DEBUG)
-        terminal.read_until("sh-3.00# ")
-        terminal.write("cat /tmp/source\n")
-        terminal.write("cat /tmp/netshare\n")
-        terminal.write("exit\n")
-        tnoutput = terminal.read_all()
-
-        database = ""
-        device = ""
-        match = re.search(r"(.+\.db)\r\n?(.+)(?=sh-3.00# cat /tmp/netshare)", tnoutput)
-
-        # if we found the database in the terminal output then save that database to the config
-        if match:
-            database = match.group(1)
-            device = match.group(2)
-            logger.log(u"Found NMJ database %s on device %s" % (database, device), logger.DEBUG)
-            sickbeard.NMJ_DATABASE = database
-        else:
-            logger.log(u"Could not get current NMJ database on %s, NMJ is probably not running!" % (host), logger.ERROR)
-            return False
-        
-        # if the device is a remote host then try to parse the mounting URL and save it to the config
-        if device.startswith("NETWORK_SHARE/"):
-            match = re.search(".*(?=\r\n?%s)" % (re.escape(device[14:])), tnoutput)
-
-            if match:
-                mount = match.group().replace("127.0.0.1", host)
-                logger.log(u"Found mounting url on the Popcorn Hour in configuration: %s" % (mount), logger.DEBUG)
-                sickbeard.NMJ_MOUNT = mount
-            else:
-                logger.log(u"Detected a network share on the Popcorn Hour, but could not get the mounting url", logger.DEBUG)
-                return False
-
-        return True
-    
-    def notify_snatch(self, ep_name):
-        return False
-        #Not implemented: Start the scanner when snatched does not make any sense
-
-    def notify_download(self, ep_name):
-        if sickbeard.USE_NMJ:
-            self._notifyNMJ()
-
-    def notify_subtitle_download(self, ep_name, lang):
-        if sickbeard.USE_NMJ:
-            self._notifyNMJ()
-    
-    def test_notify(self, host, database, mount):
-        return self._sendNMJ(host, database, mount)
-
-    def _sendNMJ(self, host, database, mount=None):
-        """
-        Sends a NMJ update command to the specified machine
-        
-        host: The hostname/IP to send the request to (no port)
-        database: The database to send the requst to
-        mount: The mount URL to use (optional)
-        
-        Returns: True if the request succeeded, False otherwise
-        """
-        
-        # if a mount URL is provided then attempt to open a handle to that URL
-        if mount:
-            try:
-                req = urllib2.Request(mount)
-                logger.log(u"Try to mount network drive via url: %s" % (mount), logger.DEBUG)
-                handle = urllib2.urlopen(req)
-            except IOError, e:
-                logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e))
-                return False
-
-        # build up the request URL and parameters
-        UPDATE_URL = "http://%(host)s:8008/metadata_database?%(params)s"
-        params = {
-            "arg0": "scanner_start",
-            "arg1": database,
-            "arg2": "background",
-            "arg3": ""}
-        params = urllib.urlencode(params)
-        updateUrl = UPDATE_URL % {"host": host, "params": params}
-
-        # send the request to the server
-        try:
-            req = urllib2.Request(updateUrl)
-            logger.log(u"Sending NMJ scan update command via url: %s" % (updateUrl), logger.DEBUG)
-            handle = urllib2.urlopen(req)
-            response = handle.read()
-        except IOError, e:
-            logger.log(u"Warning: Couldn't contact Popcorn Hour on host %s: %s" % (host, e))
-            return False
-
-        # try to parse the resulting XML
-        try:
-            et = etree.fromstring(response)
-            result = et.findtext("returnValue")
-        except SyntaxError, e:
-            logger.log(u"Unable to parse XML returned from the Popcorn Hour: %s" % (e), logger.ERROR)
-            return False
-        
-        # if the result was a number then consider that an error
-        if int(result) > 0:
-            logger.log(u"Popcorn Hour returned an errorcode: %s" % (result))
-            return False
-        else:
-            logger.log(u"NMJ started background scan")
-            return True
-
-    def _notifyNMJ(self, host=None, database=None, mount=None, force=False):
-        """
-        Sends a NMJ update command based on the SB config settings
-        
-        host: The host to send the command to (optional, defaults to the host in the config)
-        database: The database to use (optional, defaults to the database in the config)
-        mount: The mount URL (optional, defaults to the mount URL in the config)
-        force: If True then the notification will be sent even if NMJ is disabled in the config
-        """
-        if not sickbeard.USE_NMJ and not force:
-            logger.log("Notification for NMJ scan update not enabled, skipping this notification", logger.DEBUG)
-            return False
-
-        # fill in omitted parameters
-        if not host:
-            host = sickbeard.NMJ_HOST
-        if not database:
-            database = sickbeard.NMJ_DATABASE
-        if not mount:
-            mount = sickbeard.NMJ_MOUNT
-
-        logger.log(u"Sending scan command for NMJ ", logger.DEBUG)
-
-        return self._sendNMJ(host, database, mount)
-
-notifier = NMJNotifier
+# Author: Nico Berlee http://nico.berlee.nl/
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+import urllib, urllib2
+import sickbeard
+import telnetlib
+import re
+
+from sickbeard import logger
+
+try:
+    import xml.etree.cElementTree as etree
+except ImportError:
+    import xml.etree.ElementTree as etree
+
+
+class NMJNotifier:
+    def notify_settings(self, host):
+        """
+        Retrieves the settings from a NMJ/Popcorn hour
+        
+        host: The hostname/IP of the Popcorn Hour server
+        
+        Returns: True if the settings were retrieved successfully, False otherwise
+        """
+        
+        # establish a terminal session to the PC
+        terminal = False
+        try:
+            terminal = telnetlib.Telnet(host)
+        except Exception:
+            logger.log(u"Warning: unable to get a telnet session to %s" % (host), logger.ERROR)
+            return False
+
+        # tell the terminal to output the necessary info to the screen so we can search it later
+        logger.log(u"Connected to %s via telnet" % (host), logger.DEBUG)
+        terminal.read_until("sh-3.00# ")
+        terminal.write("cat /tmp/source\n")
+        terminal.write("cat /tmp/netshare\n")
+        terminal.write("exit\n")
+        tnoutput = terminal.read_all()
+
+        database = ""
+        device = ""
+        match = re.search(r"(.+\.db)\r\n?(.+)(?=sh-3.00# cat /tmp/netshare)", tnoutput)
+
+        # if we found the database in the terminal output then save that database to the config
+        if match:
+            database = match.group(1)
+            device = match.group(2)
+            logger.log(u"Found NMJ database %s on device %s" % (database, device), logger.DEBUG)
+            sickbeard.NMJ_DATABASE = database
+        else:
+            logger.log(u"Could not get current NMJ database on %s, NMJ is probably not running!" % (host), logger.ERROR)
+            return False
+        
+        # if the device is a remote host then try to parse the mounting URL and save it to the config
+        if device.startswith("NETWORK_SHARE/"):
+            match = re.search(".*(?=\r\n?%s)" % (re.escape(device[14:])), tnoutput)
+
+            if match:
+                mount = match.group().replace("127.0.0.1", host)
+                logger.log(u"Found mounting url on the Popcorn Hour in configuration: %s" % (mount), logger.DEBUG)
+                sickbeard.NMJ_MOUNT = mount
+            else:
+                logger.log(u"Detected a network share on the Popcorn Hour, but could not get the mounting url", logger.DEBUG)
+                return False
+
+        return True
+    
+    def notify_snatch(self, ep_name):
+        return False
+        #Not implemented: Start the scanner when snatched does not make any sense
+
+    def notify_download(self, ep_name):
+        if sickbeard.USE_NMJ:
+            self._notifyNMJ()
+
+    def notify_subtitle_download(self, ep_name, lang):
+        if sickbeard.USE_NMJ:
+            self._notifyNMJ()
+    
+    def test_notify(self, host, database, mount):
+        return self._sendNMJ(host, database, mount)
+
+    def _sendNMJ(self, host, database, mount=None):
+        """
+        Sends a NMJ update command to the specified machine
+        
+        host: The hostname/IP to send the request to (no port)
+        database: The database to send the requst to
+        mount: The mount URL to use (optional)
+        
+        Returns: True if the request succeeded, False otherwise
+        """
+        
+        # if a mount URL is provided then attempt to open a handle to that URL
+        if mount:
+            try:
+                req = urllib2.Request(mount)
+                logger.log(u"Try to mount network drive via url: %s" % (mount), logger.DEBUG)
+                handle = urllib2.urlopen(req)
+            except IOError, e:
+                logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e))
+                return False
+
+        # build up the request URL and parameters
+        UPDATE_URL = "http://%(host)s:8008/metadata_database?%(params)s"
+        params = {
+            "arg0": "scanner_start",
+            "arg1": database,
+            "arg2": "background",
+            "arg3": ""}
+        params = urllib.urlencode(params)
+        updateUrl = UPDATE_URL % {"host": host, "params": params}
+
+        # send the request to the server
+        try:
+            req = urllib2.Request(updateUrl)
+            logger.log(u"Sending NMJ scan update command via url: %s" % (updateUrl), logger.DEBUG)
+            handle = urllib2.urlopen(req)
+            response = handle.read()
+        except IOError, e:
+            logger.log(u"Warning: Couldn't contact Popcorn Hour on host %s: %s" % (host, e))
+            return False
+
+        # try to parse the resulting XML
+        try:
+            et = etree.fromstring(response)
+            result = et.findtext("returnValue")
+        except SyntaxError, e:
+            logger.log(u"Unable to parse XML returned from the Popcorn Hour: %s" % (e), logger.ERROR)
+            return False
+        
+        # if the result was a number then consider that an error
+        if int(result) > 0:
+            logger.log(u"Popcorn Hour returned an errorcode: %s" % (result))
+            return False
+        else:
+            logger.log(u"NMJ started background scan")
+            return True
+
+    def _notifyNMJ(self, host=None, database=None, mount=None, force=False):
+        """
+        Sends a NMJ update command based on the SB config settings
+        
+        host: The host to send the command to (optional, defaults to the host in the config)
+        database: The database to use (optional, defaults to the database in the config)
+        mount: The mount URL (optional, defaults to the mount URL in the config)
+        force: If True then the notification will be sent even if NMJ is disabled in the config
+        """
+        if not sickbeard.USE_NMJ and not force:
+            logger.log("Notification for NMJ scan update not enabled, skipping this notification", logger.DEBUG)
+            return False
+
+        # fill in omitted parameters
+        if not host:
+            host = sickbeard.NMJ_HOST
+        if not database:
+            database = sickbeard.NMJ_DATABASE
+        if not mount:
+            mount = sickbeard.NMJ_MOUNT
+
+        logger.log(u"Sending scan command for NMJ ", logger.DEBUG)
+
+        return self._sendNMJ(host, database, mount)
+
+notifier = NMJNotifier
diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py
index 1e4d2f869..ccf4957ec 100755
--- a/sickbeard/providers/__init__.py
+++ b/sickbeard/providers/__init__.py
@@ -1,129 +1,129 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#  GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-__all__ = ['ezrss',
-           'tvtorrents',
-           'torrentleech',
-           'nzbsrus',
-           'womble',
-           'btn',
-           'nzbx',
-           'omgwtfnzbs',
-           'binnewz',
-           't411',
-           'cpasbien',
-           'piratebay',
-           'gks',
-           'kat',
-           ]
-
-import sickbeard
-
-from os import sys
-
-
-def sortedProviderList():
-
-    initialList = sickbeard.providerList + sickbeard.newznabProviderList
-    providerDict = dict(zip([x.getID() for x in initialList], initialList))
-
-    newList = []
-
-    # add all modules in the priority list, in order
-    for curModule in sickbeard.PROVIDER_ORDER:
-        if curModule in providerDict:
-            newList.append(providerDict[curModule])
-
-    # add any modules that are missing from that list
-    for curModule in providerDict:
-        if providerDict[curModule] not in newList:
-            newList.append(providerDict[curModule])
-
-    return newList
-
-
-def makeProviderList():
-
-    return [x.provider for x in [getProviderModule(y) for y in __all__] if x]
-
-
-def getNewznabProviderList(data):
-
-    defaultList = [makeNewznabProvider(x) for x in getDefaultNewznabProviders().split('!!!')]
-    providerList = filter(lambda x: x, [makeNewznabProvider(x) for x in data.split('!!!')])
-
-    providerDict = dict(zip([x.name for x in providerList], providerList))
-
-    for curDefault in defaultList:
-        if not curDefault:
-            continue
-
-        # a 0 in the key spot indicates that no key is needed, so set this on the object
-        if curDefault.key == '0':
-            curDefault.key = ''
-            curDefault.needs_auth = False
-
-        if curDefault.name not in providerDict:
-            curDefault.default = True
-            providerList.append(curDefault)
-        else:
-            providerDict[curDefault.name].default = True
-            providerDict[curDefault.name].name = curDefault.name
-            providerDict[curDefault.name].url = curDefault.url
-            providerDict[curDefault.name].needs_auth = curDefault.needs_auth
-
-    return filter(lambda x: x, providerList)
-
-
-def makeNewznabProvider(configString):
-
-    if not configString:
-        return None
-
-    name, url, key, enabled = configString.split('|')
-
-    newznab = sys.modules['sickbeard.providers.newznab']
-
-    newProvider = newznab.NewznabProvider(name, url)
-    newProvider.key = key
-    newProvider.enabled = enabled == '1'
-
-    return newProvider
-
-
-def getDefaultNewznabProviders():
-    return 'Sick Beard Index|http://lolo.sickbeard.com/|0|0!!!NZBs.org|http://nzbs.org/||0!!!Usenet-Crawler|http://www.usenet-crawler.com/||0'
-
-
-def getProviderModule(name):
-    name = name.lower()
-    prefix = "sickbeard.providers."
-    if name in __all__ and prefix + name in sys.modules:
-        return sys.modules[prefix + name]
-    else:
-        raise Exception("Can't find " + prefix + name + " in " + repr(sys.modules))
-
-
-def getProviderClass(providerID):
-
-    providerMatch = [x for x in sickbeard.providerList + sickbeard.newznabProviderList if x.getID() == providerID]
-
-    if len(providerMatch) != 1:
-        return None
-    else:
-        return providerMatch[0]
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+__all__ = ['ezrss',
+           'tvtorrents',
+           'torrentleech',
+           'nzbsrus',
+           'womble',
+           'btn',
+           'nzbx',
+           'omgwtfnzbs',
+           'binnewz',
+           't411',
+           'cpasbien',
+           'piratebay',
+           'gks',
+           'kat',
+           ]
+
+import sickbeard
+
+from os import sys
+
+
+def sortedProviderList():
+
+    initialList = sickbeard.providerList + sickbeard.newznabProviderList
+    providerDict = dict(zip([x.getID() for x in initialList], initialList))
+
+    newList = []
+
+    # add all modules in the priority list, in order
+    for curModule in sickbeard.PROVIDER_ORDER:
+        if curModule in providerDict:
+            newList.append(providerDict[curModule])
+
+    # add any modules that are missing from that list
+    for curModule in providerDict:
+        if providerDict[curModule] not in newList:
+            newList.append(providerDict[curModule])
+
+    return newList
+
+
+def makeProviderList():
+
+    return [x.provider for x in [getProviderModule(y) for y in __all__] if x]
+
+
+def getNewznabProviderList(data):
+
+    defaultList = [makeNewznabProvider(x) for x in getDefaultNewznabProviders().split('!!!')]
+    providerList = filter(lambda x: x, [makeNewznabProvider(x) for x in data.split('!!!')])
+
+    providerDict = dict(zip([x.name for x in providerList], providerList))
+
+    for curDefault in defaultList:
+        if not curDefault:
+            continue
+
+        # a 0 in the key spot indicates that no key is needed, so set this on the object
+        if curDefault.key == '0':
+            curDefault.key = ''
+            curDefault.needs_auth = False
+
+        if curDefault.name not in providerDict:
+            curDefault.default = True
+            providerList.append(curDefault)
+        else:
+            providerDict[curDefault.name].default = True
+            providerDict[curDefault.name].name = curDefault.name
+            providerDict[curDefault.name].url = curDefault.url
+            providerDict[curDefault.name].needs_auth = curDefault.needs_auth
+
+    return filter(lambda x: x, providerList)
+
+
+def makeNewznabProvider(configString):
+
+    if not configString:
+        return None
+
+    name, url, key, enabled = configString.split('|')
+
+    newznab = sys.modules['sickbeard.providers.newznab']
+
+    newProvider = newznab.NewznabProvider(name, url)
+    newProvider.key = key
+    newProvider.enabled = enabled == '1'
+
+    return newProvider
+
+
+def getDefaultNewznabProviders():
+    return 'Sick Beard Index|http://lolo.sickbeard.com/|0|0!!!NZBs.org|http://nzbs.org/||0!!!Usenet-Crawler|http://www.usenet-crawler.com/||0'
+
+
+def getProviderModule(name):
+    name = name.lower()
+    prefix = "sickbeard.providers."
+    if name in __all__ and prefix + name in sys.modules:
+        return sys.modules[prefix + name]
+    else:
+        raise Exception("Can't find " + prefix + name + " in " + repr(sys.modules))
+
+
+def getProviderClass(providerID):
+
+    providerMatch = [x for x in sickbeard.providerList + sickbeard.newznabProviderList if x.getID() == providerID]
+
+    if len(providerMatch) != 1:
+        return None
+    else:
+        return providerMatch[0]
diff --git a/sickbeard/providers/binnewz/nzbdownloader.py b/sickbeard/providers/binnewz/nzbdownloader.py
index 21e5e45de..d1e602b74 100644
--- a/sickbeard/providers/binnewz/nzbdownloader.py
+++ b/sickbeard/providers/binnewz/nzbdownloader.py
@@ -1,96 +1,96 @@
-# Author: Guillaume Serre <guillaume.serre@gmail.com>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#  GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-import urllib2
-from StringIO import StringIO
-import gzip
-import cookielib
-import time
-
-class NZBDownloader(object):
-
-    def __init__( self ):
-        self.cj = cookielib.CookieJar()
-        self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))        
-        self.lastRequestTime = None
-        
-    def waitBeforeNextRequest(self):
-        if self.lastRequestTime and self.lastRequestTime > ( time.mktime(time.localtime()) - 3):
-            time.sleep( 3 )
-        self.lastRequestTime = time.gmtime()
-        
-    def open(self, request):
-        self.waitBeforeNextRequest()
-        return self.opener.open(request)
-        
-class NZBSearchResult(object):
-    
-    def __init__(self, downloader, sizeInMegs, refererURL):
-        self.downloader = downloader
-        self.refererURL = refererURL
-        self.sizeInMegs = sizeInMegs
-        
-    def readRequest(self, request):
-        request.add_header('Accept-encoding', 'gzip')
-        request.add_header('Referer', self.refererURL)
-        request.add_header('Accept-Encoding', 'gzip')
-        request.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.57 Safari/537.17')
-
-        response = self.downloader.open(request)
-        if response.info().get('Content-Encoding') == 'gzip':
-            buf = StringIO( response.read())
-            f = gzip.GzipFile(fileobj=buf)
-            return f.read()
-        else:
-            return response.read()      
-        
-    def getNZB(self):
-        pass          
-        
-class NZBGetURLSearchResult( NZBSearchResult ):
-
-    def __init__(self, downloader, nzburl, sizeInMegs, refererURL):
-        NZBSearchResult.__init__(self, downloader, sizeInMegs, refererURL)
-        self.nzburl = nzburl
-        
-    def getNZB(self):
-        request = urllib2.Request( self.nzburl )
-        self.nzbdata = NZBSearchResult.readRequest( self, request )
-        return self.nzbdata
-
-class NZBPostURLSearchResult( NZBSearchResult ):
-
-    def __init__(self, downloader, nzburl, postData, sizeInMegs, refererURL):
-        NZBSearchResult.__init__(self, downloader, sizeInMegs, refererURL)
-        self.nzburl = nzburl
-        self.postData = postData
-        
-    def getNZB(self):
-        request = urllib2.Request( self.nzburl, self.postData )
-        self.nzbdata = NZBSearchResult.readRequest( self, request )
-        return self.nzbdata
-
-class NZBDataSearchResult( NZBSearchResult ):
-
-    def __init__(self, nzbdata, sizeInMegs, refererURL):
-        NZBSearchResult.__init__(self, None, refererURL)
-        self.nzbdata = nzbdata
-
-    def getNZB(self):
-        return self.nzbdata
+# Author: Guillaume Serre <guillaume.serre@gmail.com>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+import urllib2
+from StringIO import StringIO
+import gzip
+import cookielib
+import time
+
+class NZBDownloader(object):
+
+    def __init__( self ):
+        self.cj = cookielib.CookieJar()
+        self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))        
+        self.lastRequestTime = None
+        
+    def waitBeforeNextRequest(self):
+        if self.lastRequestTime and self.lastRequestTime > ( time.mktime(time.localtime()) - 3):
+            time.sleep( 3 )
+        self.lastRequestTime = time.gmtime()
+        
+    def open(self, request):
+        self.waitBeforeNextRequest()
+        return self.opener.open(request)
+        
+class NZBSearchResult(object):
+    
+    def __init__(self, downloader, sizeInMegs, refererURL):
+        self.downloader = downloader
+        self.refererURL = refererURL
+        self.sizeInMegs = sizeInMegs
+        
+    def readRequest(self, request):
+        request.add_header('Accept-encoding', 'gzip')
+        request.add_header('Referer', self.refererURL)
+        request.add_header('Accept-Encoding', 'gzip')
+        request.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.57 Safari/537.17')
+
+        response = self.downloader.open(request)
+        if response.info().get('Content-Encoding') == 'gzip':
+            buf = StringIO( response.read())
+            f = gzip.GzipFile(fileobj=buf)
+            return f.read()
+        else:
+            return response.read()      
+        
+    def getNZB(self):
+        pass          
+        
+class NZBGetURLSearchResult( NZBSearchResult ):
+
+    def __init__(self, downloader, nzburl, sizeInMegs, refererURL):
+        NZBSearchResult.__init__(self, downloader, sizeInMegs, refererURL)
+        self.nzburl = nzburl
+        
+    def getNZB(self):
+        request = urllib2.Request( self.nzburl )
+        self.nzbdata = NZBSearchResult.readRequest( self, request )
+        return self.nzbdata
+
+class NZBPostURLSearchResult( NZBSearchResult ):
+
+    def __init__(self, downloader, nzburl, postData, sizeInMegs, refererURL):
+        NZBSearchResult.__init__(self, downloader, sizeInMegs, refererURL)
+        self.nzburl = nzburl
+        self.postData = postData
+        
+    def getNZB(self):
+        request = urllib2.Request( self.nzburl, self.postData )
+        self.nzbdata = NZBSearchResult.readRequest( self, request )
+        return self.nzbdata
+
+class NZBDataSearchResult( NZBSearchResult ):
+
+    def __init__(self, nzbdata, sizeInMegs, refererURL):
+        NZBSearchResult.__init__(self, None, refererURL)
+        self.nzbdata = nzbdata
+
+    def getNZB(self):
+        return self.nzbdata
         
\ No newline at end of file
diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py
index 0794f087e..956bd8431 100644
--- a/sickbeard/providers/cpasbien.py
+++ b/sickbeard/providers/cpasbien.py
@@ -1,154 +1,154 @@
-# -*- coding: latin-1 -*-
-# Author: Guillaume Serre <guillaume.serre@gmail.com>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#  GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-from bs4 import BeautifulSoup
-from sickbeard import logger, classes, show_name_helpers
-from sickbeard.common import Quality
-from sickbeard.exceptions import ex
-import cookielib
-import generic
-import sickbeard
-import urllib
-import urllib2
-
-
-class CpasbienProvider(generic.TorrentProvider):
-
-    def __init__(self):
-        
-        generic.TorrentProvider.__init__(self, "Cpasbien")
-
-        self.supportsBacklog = True
-        
-        self.cj = cookielib.CookieJar()
-        self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
-        
-        self.url = "http://www.cpasbien.me"
-        
-        
-    def isEnabled(self):
-        return sickbeard.Cpasbien
-
-    def _get_season_search_strings(self, show, season):
-
-        showNames = show_name_helpers.allPossibleShowNames(show)
-        result = []
-        for showName in showNames:
-            result.append( showName + " S%02d" % season )
-        return result
-
-    def _get_episode_search_strings(self, ep_obj):
-
-        strings = []
-
-        showNames = show_name_helpers.allPossibleShowNames(ep_obj.show)
-        for showName in showNames:
-            strings.append("%s S%02dE%02d" % ( showName, ep_obj.season, ep_obj.episode) )
-            strings.append("%s %dx%d" % ( showName, ep_obj.season, ep_obj.episode ) )
-
-        return strings
-    
-    def _get_title_and_url(self, item):
-        return (item.title, item.url)
-    
-    def getQuality(self, item):
-        return item.getQuality()
-        
-    def _doSearch(self, searchString, show=None, season=None):
-
-        results = []
-        searchUrl = self.url + '/recherche/'
-
-        data = urllib.urlencode({'champ_recherche': searchString})
-
-        try:
-            soup = BeautifulSoup( urllib2.urlopen(searchUrl, data) )
-        except Exception, e:
-            logger.log(u"Error trying to load cpasbien response: "+ex(e), logger.ERROR)
-            return []
-
-        rows = soup.findAll(attrs = {'class' : ["color0", "color1"]})
-
-        for row in rows:
-            link = row.find("a", title=True)
-            title = str(link.text).lower().strip()  
-            pageURL = link['href']
-
-            if "vostfr" in title and (not show.subtitles) and show.audio_lang == "fr":
-                continue
-
-            torrentPage = self.opener.open( pageURL )
-            torrentSoup = BeautifulSoup( torrentPage )
-
-            downloadTorrentLink = torrentSoup.find("a", title=u"Cliquer ici pour t�l�charger ce torrent")
-            if downloadTorrentLink:
-                
-                downloadURL = downloadTorrentLink['href']
-
-                if "720p" in title:
-                    if "bluray" in title:
-                        quality = Quality.HDBLURAY
-                    elif "web-dl" in title.lower() or "web.dl" in title.lower():
-                        quality = Quality.HDWEBDL
-                    else:
-                        quality = Quality.HDTV
-                elif "1080p" in title:
-                    quality = Quality.FULLHDBLURAY
-                elif "hdtv" in title:
-                    if "720p" in title:
-                        quality = Quality.HDTV
-                    elif "1080p" in title:
-                        quality = Quality.FULLHDTV
-                    else:
-                        quality = Quality.SDTV
-                else:
-                    quality = Quality.SDTV
-
-                if show:
-                    results.append( CpasbienSearchResult( self.opener, title, downloadURL, quality, str(show.audio_lang) ) )
-                else:
-                    results.append( CpasbienSearchResult( self.opener, title, downloadURL, quality ) )
-
-        return results
-    
-    def getResult(self, episodes):
-        """
-        Returns a result of the correct type for this provider
-        """
-        result = classes.TorrentDataSearchResult(episodes)
-        result.provider = self
-
-        return result    
-    
-class CpasbienSearchResult:
-    
-    def __init__(self, opener, title, url, quality, audio_langs=None):
-        self.opener = opener
-        self.title = title
-        self.url = url
-        self.quality = quality
-        self.audio_langs=audio_langs
-        
-    def getNZB(self):
-        return self.opener.open( self.url , 'wb').read()
-
-    def getQuality(self):
-        return self.quality
-
+# -*- coding: latin-1 -*-
+# Author: Guillaume Serre <guillaume.serre@gmail.com>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+from bs4 import BeautifulSoup
+from sickbeard import logger, classes, show_name_helpers
+from sickbeard.common import Quality
+from sickbeard.exceptions import ex
+import cookielib
+import generic
+import sickbeard
+import urllib
+import urllib2
+
+
+class CpasbienProvider(generic.TorrentProvider):
+
+    def __init__(self):
+        
+        generic.TorrentProvider.__init__(self, "Cpasbien")
+
+        self.supportsBacklog = True
+        
+        self.cj = cookielib.CookieJar()
+        self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
+        
+        self.url = "http://www.cpasbien.me"
+        
+        
+    def isEnabled(self):
+        return sickbeard.Cpasbien
+
+    def _get_season_search_strings(self, show, season):
+
+        showNames = show_name_helpers.allPossibleShowNames(show)
+        result = []
+        for showName in showNames:
+            result.append( showName + " S%02d" % season )
+        return result
+
+    def _get_episode_search_strings(self, ep_obj):
+
+        strings = []
+
+        showNames = show_name_helpers.allPossibleShowNames(ep_obj.show)
+        for showName in showNames:
+            strings.append("%s S%02dE%02d" % ( showName, ep_obj.season, ep_obj.episode) )
+            strings.append("%s %dx%d" % ( showName, ep_obj.season, ep_obj.episode ) )
+
+        return strings
+    
+    def _get_title_and_url(self, item):
+        return (item.title, item.url)
+    
+    def getQuality(self, item):
+        return item.getQuality()
+        
+    def _doSearch(self, searchString, show=None, season=None):
+
+        results = []
+        searchUrl = self.url + '/recherche/'
+
+        data = urllib.urlencode({'champ_recherche': searchString})
+
+        try:
+            soup = BeautifulSoup( urllib2.urlopen(searchUrl, data) )
+        except Exception, e:
+            logger.log(u"Error trying to load cpasbien response: "+ex(e), logger.ERROR)
+            return []
+
+        rows = soup.findAll(attrs = {'class' : ["color0", "color1"]})
+
+        for row in rows:
+            link = row.find("a", title=True)
+            title = str(link.text).lower().strip()  
+            pageURL = link['href']
+
+            if "vostfr" in title and (not show.subtitles) and show.audio_lang == "fr":
+                continue
+
+            torrentPage = self.opener.open( pageURL )
+            torrentSoup = BeautifulSoup( torrentPage )
+
+            downloadTorrentLink = torrentSoup.find("a", title=u"Cliquer ici pour t�l�charger ce torrent")
+            if downloadTorrentLink:
+                
+                downloadURL = downloadTorrentLink['href']
+
+                if "720p" in title:
+                    if "bluray" in title:
+                        quality = Quality.HDBLURAY
+                    elif "web-dl" in title.lower() or "web.dl" in title.lower():
+                        quality = Quality.HDWEBDL
+                    else:
+                        quality = Quality.HDTV
+                elif "1080p" in title:
+                    quality = Quality.FULLHDBLURAY
+                elif "hdtv" in title:
+                    if "720p" in title:
+                        quality = Quality.HDTV
+                    elif "1080p" in title:
+                        quality = Quality.FULLHDTV
+                    else:
+                        quality = Quality.SDTV
+                else:
+                    quality = Quality.SDTV
+
+                if show:
+                    results.append( CpasbienSearchResult( self.opener, title, downloadURL, quality, str(show.audio_lang) ) )
+                else:
+                    results.append( CpasbienSearchResult( self.opener, title, downloadURL, quality ) )
+
+        return results
+    
+    def getResult(self, episodes):
+        """
+        Returns a result of the correct type for this provider
+        """
+        result = classes.TorrentDataSearchResult(episodes)
+        result.provider = self
+
+        return result    
+    
+class CpasbienSearchResult:
+    
+    def __init__(self, opener, title, url, quality, audio_langs=None):
+        self.opener = opener
+        self.title = title
+        self.url = url
+        self.quality = quality
+        self.audio_langs=audio_langs
+        
+    def getNZB(self):
+        return self.opener.open( self.url , 'wb').read()
+
+    def getQuality(self):
+        return self.quality
+
 provider = CpasbienProvider()
\ No newline at end of file
diff --git a/sickbeard/providers/newzbin.py b/sickbeard/providers/newzbin.py
index 34b833fd0..25b0c0394 100644
--- a/sickbeard/providers/newzbin.py
+++ b/sickbeard/providers/newzbin.py
@@ -1,384 +1,384 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-import os
-import re
-import sys
-import time
-import urllib
-
-from xml.dom.minidom import parseString
-from datetime import datetime, timedelta
-
-import sickbeard
-import generic
-
-import sickbeard.encodingKludge as ek
-from sickbeard import classes, logger, helpers, exceptions, show_name_helpers
-from sickbeard import tvcache
-from sickbeard.common import Quality
-from sickbeard.exceptions import ex
-from lib.dateutil.parser import parse as parseDate
-
-class NewzbinDownloader(urllib.FancyURLopener):
-
-    def __init__(self):
-        urllib.FancyURLopener.__init__(self)
-
-    def http_error_default(self, url, fp, errcode, errmsg, headers):
-
-        # if newzbin is throttling us, wait seconds and try again
-        if errcode == 400:
-
-            newzbinErrCode = int(headers.getheader('X-DNZB-RCode'))
-
-            if newzbinErrCode == 450:
-                rtext = str(headers.getheader('X-DNZB-RText'))
-                result = re.search("wait (\d+) seconds", rtext)
-
-            elif newzbinErrCode == 401:
-                raise exceptions.AuthException("Newzbin username or password incorrect")
-
-            elif newzbinErrCode == 402:
-                raise exceptions.AuthException("Newzbin account not premium status, can't download NZBs")
-
-            logger.log("Newzbin throttled our NZB downloading, pausing for " + result.group(1) + "seconds")
-
-            time.sleep(int(result.group(1)))
-
-            raise exceptions.NewzbinAPIThrottled()
-
-class NewzbinProvider(generic.NZBProvider):
-
-    def __init__(self):
-
-        generic.NZBProvider.__init__(self, "Newzbin")
-
-        self.supportsBacklog = True
-
-        self.cache = NewzbinCache(self)
-
-        self.url = 'https://www.newzbin2.es/'
-
-        self.NEWZBIN_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S %Z'
-
-    def isEnabled(self):
-        return sickbeard.NEWZBIN
-
-    def getQuality(self, item):
-        attributes = item.getElementsByTagName('report:attributes')[0]
-        attr_dict = {}
-
-        for attribute in attributes.getElementsByTagName('report:attribute'):
-            cur_attr = attribute.getAttribute('type')
-            cur_attr_value = helpers.get_xml_text(attribute)
-            if cur_attr not in attr_dict:
-                attr_dict[cur_attr] = [cur_attr_value]
-            else:
-                attr_dict[cur_attr].append(cur_attr_value)
-
-        logger.log("Finding quality of item based on attributes "+str(attr_dict), logger.DEBUG)
-
-        if self._is_SDTV(attr_dict):
-            quality = Quality.SDTV
-        elif self._is_SDDVD(attr_dict):
-            quality = Quality.SDDVD
-        elif self._is_HDTV(attr_dict):
-            quality = Quality.HDTV
-        elif self._is_WEBDL(attr_dict):
-            quality = Quality.HDWEBDL
-        elif self._is_720pBluRay(attr_dict):
-            quality = Quality.HDBLURAY
-        elif self._is_1080pBluRay(attr_dict):
-            quality = Quality.FULLHDBLURAY
-        else:
-            quality = Quality.UNKNOWN
-
-        logger.log("Resulting quality: "+str(quality), logger.DEBUG)
-
-        return quality
-
-    def _is_SDTV(self, attrs):
-
-        # Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i
-        video_fmt = 'Video Fmt' in attrs and ('XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \
-                            and ('720p' not in attrs['Video Fmt']) \
-                            and ('1080p' not in attrs['Video Fmt']) \
-                            and ('1080i' not in attrs['Video Fmt'])
-
-        # Source: TV Cap or HDTV or (None)
-        source = 'Source' not in attrs or 'TV Cap' in attrs['Source'] or 'HDTV' in attrs['Source']
-
-        # Subtitles: (None)
-        subs = 'Subtitles' not in attrs
-
-        return video_fmt and source and subs
-
-    def _is_SDDVD(self, attrs):
-
-        # Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i
-        video_fmt = 'Video Fmt' in attrs and ('XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \
-                            and ('720p' not in attrs['Video Fmt']) \
-                            and ('1080p' not in attrs['Video Fmt']) \
-                            and ('1080i' not in attrs['Video Fmt'])
-    						
-        # Source: DVD
-        source = 'Source' in attrs and 'DVD' in attrs['Source']
-
-        # Subtitles: (None)
-        subs = 'Subtitles' not in attrs
-
-        return video_fmt and source and subs
-
-    def _is_HDTV(self, attrs):
-        # Video Fmt: H.264/x264, 720p
-        video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
-                            and ('720p' in attrs['Video Fmt'])
-
-        # Source: TV Cap or HDTV or (None)
-        source = 'Source' not in attrs or 'TV Cap' in attrs['Source'] or 'HDTV' in attrs['Source']
-
-        # Subtitles: (None)
-        subs = 'Subtitles' not in attrs
-
-        return video_fmt and source and subs
-
-    def _is_WEBDL(self, attrs):
-
-        # Video Fmt: H.264/x264, 720p
-        video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
-                            and ('720p' in attrs['Video Fmt'])
-
-        # Source: WEB-DL
-        source = 'Source' in attrs and 'WEB-DL' in attrs['Source']
-
-        # Subtitles: (None)
-        subs = 'Subtitles' not in attrs
-
-        return video_fmt and source and subs
-
-    def _is_720pBluRay(self, attrs):
-
-        # Video Fmt: H.264/x264, 720p
-        video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
-                            and ('720p' in attrs['Video Fmt'])
-
-        # Source: Blu-ray or HD-DVD
-        source = 'Source' in attrs and ('Blu-ray' in attrs['Source'] or 'HD-DVD' in attrs['Source'])
-
-        return video_fmt and source
-
-    def _is_1080pBluRay(self, attrs):
-
-        # Video Fmt: H.264/x264, 1080p
-        video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
-                            and ('1080p' in attrs['Video Fmt'])
-
-        # Source: Blu-ray or HD-DVD
-        source = 'Source' in attrs and ('Blu-ray' in attrs['Source'] or 'HD-DVD' in attrs['Source'])
-
-        return video_fmt and source
-
-
-    def getIDFromURL(self, url):
-        id_regex = re.escape(self.url) + 'browse/post/(\d+)/'
-        id_match = re.match(id_regex, url)
-        if not id_match:
-            return None
-        else:
-            return id_match.group(1)
-
-    def downloadResult(self, nzb):
-
-        id = self.getIDFromURL(nzb.url)
-        if not id:
-            logger.log("Unable to get an ID from "+str(nzb.url)+", can't download from Newzbin's API", logger.ERROR)
-            return False
-
-        logger.log("Downloading an NZB from newzbin with id "+id)
-
-        fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(nzb.name)+'.nzb')
-        logger.log("Saving to " + fileName)
-
-        urllib._urlopener = NewzbinDownloader()
-
-        params = urllib.urlencode({"username": sickbeard.NEWZBIN_USERNAME, "password": sickbeard.NEWZBIN_PASSWORD, "reportid": id})
-        try:
-            urllib.urlretrieve(self.url+"api/dnzb/", fileName, data=params)
-        except exceptions.NewzbinAPIThrottled:
-            logger.log("Done waiting for Newzbin API throttle limit, starting downloads again")
-            self.downloadResult(nzb)
-        except (urllib.ContentTooShortError, IOError), e:
-            logger.log("Error downloading NZB: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
-            return False
-
-        return True
-
-    def getURL(self, url):
-
-        myOpener = classes.AuthURLOpener(sickbeard.NEWZBIN_USERNAME, sickbeard.NEWZBIN_PASSWORD)
-        try:
-            f = myOpener.openit(url)
-        except (urllib.ContentTooShortError, IOError), e:
-            logger.log("Error loading search results: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
-            return None
-
-        data = f.read()
-        f.close()
-
-        return data
-
-    def _get_season_search_strings(self, show, season):
-
-        nameList = set(show_name_helpers.allPossibleShowNames(show))
-
-        if show.air_by_date:
-            suffix = ''
-        else:
-            suffix = 'x'
-        searchTerms = ['^"'+x+' - '+str(season)+suffix+'"' for x in nameList]
-        #searchTerms += ['^"'+x+' - Season '+str(season)+'"' for x in nameList]
-        searchStr = " OR ".join(searchTerms)
-
-        searchStr += " -subpack -extras"
-
-        logger.log("Searching newzbin for string "+searchStr, logger.DEBUG)
-        
-        return [searchStr]
-
-    def _get_episode_search_strings(self, ep_obj):
-
-        nameList = set(show_name_helpers.allPossibleShowNames(ep_obj.show))
-        if not ep_obj.show.air_by_date:
-            searchStr = " OR ".join(['^"'+x+' - %dx%02d"'%(ep_obj.season, ep_obj.episode) for x in nameList])
-        else:
-            searchStr = " OR ".join(['^"'+x+' - '+str(ep_obj.airdate)+'"' for x in nameList])
-        return [searchStr]
-
-    def _doSearch(self, searchStr, show=None):
-
-        data = self._getRSSData(searchStr.encode('utf-8'))
-        
-        item_list = []
-
-        try:
-            parsedXML = parseString(data)
-            items = parsedXML.getElementsByTagName('item')
-        except Exception, e:
-            logger.log("Error trying to load Newzbin RSS feed: "+ex(e), logger.ERROR)
-            return []
-
-        for cur_item in items:
-            title = helpers.get_xml_text(cur_item.getElementsByTagName('title')[0])
-            if title == 'Feeds Error':
-                raise exceptions.AuthException("The feed wouldn't load, probably because of invalid auth info")
-            if sickbeard.USENET_RETENTION is not None:
-                try:
-                    dateString = helpers.get_xml_text(cur_item.getElementsByTagName('report:postdate')[0])
-                    # use the parse (imported as parseDate) function from the dateutil lib
-                    # and we have to remove the timezone info from it because the retention_date will not have one
-                    # and a comparison of them is not possible
-                    post_date = parseDate(dateString).replace(tzinfo=None)
-                    retention_date = datetime.now() - timedelta(days=sickbeard.USENET_RETENTION)
-                    if post_date < retention_date:
-                        logger.log(u"Date "+str(post_date)+" is out of retention range, skipping", logger.DEBUG)
-                        continue
-                except Exception, e:
-                    logger.log("Error parsing date from Newzbin RSS feed: " + str(e), logger.ERROR)
-                    continue
-
-            item_list.append(cur_item)
-
-        return item_list
-
-
-    def _getRSSData(self, search=None):
-
-        params = {
-                'searchaction': 'Search',
-                'fpn': 'p',
-                'category': 8,
-                'u_nfo_posts_only': 0,
-                'u_url_posts_only': 0,
-                'u_comment_posts_only': 0,
-                'u_show_passworded': 0,
-                'u_v3_retention': 0,
-                'ps_rb_video_format': 3082257,
-                'ps_rb_language': 4096,
-                'sort': 'date',
-                'order': 'desc',
-                'u_post_results_amt': 50,
-                'feed': 'rss',
-                'hauth': 1,
-        }
-
-        if search:
-            params['q'] = search + " AND "
-        else:
-            params['q'] = ''
-
-        params['q'] += 'Attr:Lang~Eng AND NOT Attr:VideoF=DVD'
-
-        url = self.url + "search/?%s" % urllib.urlencode(params)
-        logger.log("Newzbin search URL: " + url, logger.DEBUG)
-
-        data = self.getURL(url)
-
-        return data
-
-    def _checkAuth(self):
-        if sickbeard.NEWZBIN_USERNAME in (None, "") or sickbeard.NEWZBIN_PASSWORD in (None, ""):
-            raise exceptions.AuthException("Newzbin authentication details are empty, check your config")
-
-class NewzbinCache(tvcache.TVCache):
-
-    def __init__(self, provider):
-
-        tvcache.TVCache.__init__(self, provider)
-
-        # only poll Newzbin every 10 mins max
-        self.minTime = 1
-
-    def _getRSSData(self):
-
-        data = self.provider._getRSSData()
-
-        return data
-
-    def _parseItem(self, item):
-
-        (title, url) = self.provider._get_title_and_url(item)
-
-        if title == 'Feeds Error':
-            logger.log("There's an error in the feed, probably bad auth info", logger.DEBUG)
-            raise exceptions.AuthException("Invalid Newzbin username/password")
-
-        if not title or not url:
-            logger.log("The XML returned from the "+self.provider.name+" feed is incomplete, this result is unusable", logger.ERROR)
-            return
-
-        quality = self.provider.getQuality(item)
-
-        logger.log("Found quality "+str(quality), logger.DEBUG)
-
-        logger.log("Adding item from RSS to cache: "+title, logger.DEBUG)
-
-        self._addCacheEntry(title, url, quality=quality)
-
-
-provider = NewzbinProvider()
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import re
+import sys
+import time
+import urllib
+
+from xml.dom.minidom import parseString
+from datetime import datetime, timedelta
+
+import sickbeard
+import generic
+
+import sickbeard.encodingKludge as ek
+from sickbeard import classes, logger, helpers, exceptions, show_name_helpers
+from sickbeard import tvcache
+from sickbeard.common import Quality
+from sickbeard.exceptions import ex
+from lib.dateutil.parser import parse as parseDate
+
+class NewzbinDownloader(urllib.FancyURLopener):
+
+    def __init__(self):
+        urllib.FancyURLopener.__init__(self)
+
+    def http_error_default(self, url, fp, errcode, errmsg, headers):
+
+        # if newzbin is throttling us, wait seconds and try again
+        if errcode == 400:
+
+            newzbinErrCode = int(headers.getheader('X-DNZB-RCode'))
+
+            if newzbinErrCode == 450:
+                rtext = str(headers.getheader('X-DNZB-RText'))
+                result = re.search("wait (\d+) seconds", rtext)
+
+            elif newzbinErrCode == 401:
+                raise exceptions.AuthException("Newzbin username or password incorrect")
+
+            elif newzbinErrCode == 402:
+                raise exceptions.AuthException("Newzbin account not premium status, can't download NZBs")
+
+            logger.log("Newzbin throttled our NZB downloading, pausing for " + result.group(1) + "seconds")
+
+            time.sleep(int(result.group(1)))
+
+            raise exceptions.NewzbinAPIThrottled()
+
+class NewzbinProvider(generic.NZBProvider):
+
+    def __init__(self):
+
+        generic.NZBProvider.__init__(self, "Newzbin")
+
+        self.supportsBacklog = True
+
+        self.cache = NewzbinCache(self)
+
+        self.url = 'https://www.newzbin2.es/'
+
+        self.NEWZBIN_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S %Z'
+
+    def isEnabled(self):
+        return sickbeard.NEWZBIN
+
+    def getQuality(self, item):
+        attributes = item.getElementsByTagName('report:attributes')[0]
+        attr_dict = {}
+
+        for attribute in attributes.getElementsByTagName('report:attribute'):
+            cur_attr = attribute.getAttribute('type')
+            cur_attr_value = helpers.get_xml_text(attribute)
+            if cur_attr not in attr_dict:
+                attr_dict[cur_attr] = [cur_attr_value]
+            else:
+                attr_dict[cur_attr].append(cur_attr_value)
+
+        logger.log("Finding quality of item based on attributes "+str(attr_dict), logger.DEBUG)
+
+        if self._is_SDTV(attr_dict):
+            quality = Quality.SDTV
+        elif self._is_SDDVD(attr_dict):
+            quality = Quality.SDDVD
+        elif self._is_HDTV(attr_dict):
+            quality = Quality.HDTV
+        elif self._is_WEBDL(attr_dict):
+            quality = Quality.HDWEBDL
+        elif self._is_720pBluRay(attr_dict):
+            quality = Quality.HDBLURAY
+        elif self._is_1080pBluRay(attr_dict):
+            quality = Quality.FULLHDBLURAY
+        else:
+            quality = Quality.UNKNOWN
+
+        logger.log("Resulting quality: "+str(quality), logger.DEBUG)
+
+        return quality
+
+    def _is_SDTV(self, attrs):
+
+        # Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i
+        video_fmt = 'Video Fmt' in attrs and ('XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \
+                            and ('720p' not in attrs['Video Fmt']) \
+                            and ('1080p' not in attrs['Video Fmt']) \
+                            and ('1080i' not in attrs['Video Fmt'])
+
+        # Source: TV Cap or HDTV or (None)
+        source = 'Source' not in attrs or 'TV Cap' in attrs['Source'] or 'HDTV' in attrs['Source']
+
+        # Subtitles: (None)
+        subs = 'Subtitles' not in attrs
+
+        return video_fmt and source and subs
+
+    def _is_SDDVD(self, attrs):
+
+        # Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i
+        video_fmt = 'Video Fmt' in attrs and ('XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \
+                            and ('720p' not in attrs['Video Fmt']) \
+                            and ('1080p' not in attrs['Video Fmt']) \
+                            and ('1080i' not in attrs['Video Fmt'])
+    						
+        # Source: DVD
+        source = 'Source' in attrs and 'DVD' in attrs['Source']
+
+        # Subtitles: (None)
+        subs = 'Subtitles' not in attrs
+
+        return video_fmt and source and subs
+
+    def _is_HDTV(self, attrs):
+        # Video Fmt: H.264/x264, 720p
+        video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
+                            and ('720p' in attrs['Video Fmt'])
+
+        # Source: TV Cap or HDTV or (None)
+        source = 'Source' not in attrs or 'TV Cap' in attrs['Source'] or 'HDTV' in attrs['Source']
+
+        # Subtitles: (None)
+        subs = 'Subtitles' not in attrs
+
+        return video_fmt and source and subs
+
+    def _is_WEBDL(self, attrs):
+
+        # Video Fmt: H.264/x264, 720p
+        video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
+                            and ('720p' in attrs['Video Fmt'])
+
+        # Source: WEB-DL
+        source = 'Source' in attrs and 'WEB-DL' in attrs['Source']
+
+        # Subtitles: (None)
+        subs = 'Subtitles' not in attrs
+
+        return video_fmt and source and subs
+
+    def _is_720pBluRay(self, attrs):
+
+        # Video Fmt: H.264/x264, 720p
+        video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
+                            and ('720p' in attrs['Video Fmt'])
+
+        # Source: Blu-ray or HD-DVD
+        source = 'Source' in attrs and ('Blu-ray' in attrs['Source'] or 'HD-DVD' in attrs['Source'])
+
+        return video_fmt and source
+
+    def _is_1080pBluRay(self, attrs):
+
+        # Video Fmt: H.264/x264, 1080p
+        video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
+                            and ('1080p' in attrs['Video Fmt'])
+
+        # Source: Blu-ray or HD-DVD
+        source = 'Source' in attrs and ('Blu-ray' in attrs['Source'] or 'HD-DVD' in attrs['Source'])
+
+        return video_fmt and source
+
+
+    def getIDFromURL(self, url):
+        id_regex = re.escape(self.url) + 'browse/post/(\d+)/'
+        id_match = re.match(id_regex, url)
+        if not id_match:
+            return None
+        else:
+            return id_match.group(1)
+
+    def downloadResult(self, nzb):
+
+        id = self.getIDFromURL(nzb.url)
+        if not id:
+            logger.log("Unable to get an ID from "+str(nzb.url)+", can't download from Newzbin's API", logger.ERROR)
+            return False
+
+        logger.log("Downloading an NZB from newzbin with id "+id)
+
+        fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(nzb.name)+'.nzb')
+        logger.log("Saving to " + fileName)
+
+        urllib._urlopener = NewzbinDownloader()
+
+        params = urllib.urlencode({"username": sickbeard.NEWZBIN_USERNAME, "password": sickbeard.NEWZBIN_PASSWORD, "reportid": id})
+        try:
+            urllib.urlretrieve(self.url+"api/dnzb/", fileName, data=params)
+        except exceptions.NewzbinAPIThrottled:
+            logger.log("Done waiting for Newzbin API throttle limit, starting downloads again")
+            self.downloadResult(nzb)
+        except (urllib.ContentTooShortError, IOError), e:
+            logger.log("Error downloading NZB: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
+            return False
+
+        return True
+
+    def getURL(self, url):
+
+        myOpener = classes.AuthURLOpener(sickbeard.NEWZBIN_USERNAME, sickbeard.NEWZBIN_PASSWORD)
+        try:
+            f = myOpener.openit(url)
+        except (urllib.ContentTooShortError, IOError), e:
+            logger.log("Error loading search results: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
+            return None
+
+        data = f.read()
+        f.close()
+
+        return data
+
+    def _get_season_search_strings(self, show, season):
+
+        nameList = set(show_name_helpers.allPossibleShowNames(show))
+
+        if show.air_by_date:
+            suffix = ''
+        else:
+            suffix = 'x'
+        searchTerms = ['^"'+x+' - '+str(season)+suffix+'"' for x in nameList]
+        #searchTerms += ['^"'+x+' - Season '+str(season)+'"' for x in nameList]
+        searchStr = " OR ".join(searchTerms)
+
+        searchStr += " -subpack -extras"
+
+        logger.log("Searching newzbin for string "+searchStr, logger.DEBUG)
+        
+        return [searchStr]
+
+    def _get_episode_search_strings(self, ep_obj):
+
+        nameList = set(show_name_helpers.allPossibleShowNames(ep_obj.show))
+        if not ep_obj.show.air_by_date:
+            searchStr = " OR ".join(['^"'+x+' - %dx%02d"'%(ep_obj.season, ep_obj.episode) for x in nameList])
+        else:
+            searchStr = " OR ".join(['^"'+x+' - '+str(ep_obj.airdate)+'"' for x in nameList])
+        return [searchStr]
+
+    def _doSearch(self, searchStr, show=None):
+
+        data = self._getRSSData(searchStr.encode('utf-8'))
+        
+        item_list = []
+
+        try:
+            parsedXML = parseString(data)
+            items = parsedXML.getElementsByTagName('item')
+        except Exception, e:
+            logger.log("Error trying to load Newzbin RSS feed: "+ex(e), logger.ERROR)
+            return []
+
+        for cur_item in items:
+            title = helpers.get_xml_text(cur_item.getElementsByTagName('title')[0])
+            if title == 'Feeds Error':
+                raise exceptions.AuthException("The feed wouldn't load, probably because of invalid auth info")
+            if sickbeard.USENET_RETENTION is not None:
+                try:
+                    dateString = helpers.get_xml_text(cur_item.getElementsByTagName('report:postdate')[0])
+                    # use the parse (imported as parseDate) function from the dateutil lib
+                    # and we have to remove the timezone info from it because the retention_date will not have one
+                    # and a comparison of them is not possible
+                    post_date = parseDate(dateString).replace(tzinfo=None)
+                    retention_date = datetime.now() - timedelta(days=sickbeard.USENET_RETENTION)
+                    if post_date < retention_date:
+                        logger.log(u"Date "+str(post_date)+" is out of retention range, skipping", logger.DEBUG)
+                        continue
+                except Exception, e:
+                    logger.log("Error parsing date from Newzbin RSS feed: " + str(e), logger.ERROR)
+                    continue
+
+            item_list.append(cur_item)
+
+        return item_list
+
+
+    def _getRSSData(self, search=None):
+
+        params = {
+                'searchaction': 'Search',
+                'fpn': 'p',
+                'category': 8,
+                'u_nfo_posts_only': 0,
+                'u_url_posts_only': 0,
+                'u_comment_posts_only': 0,
+                'u_show_passworded': 0,
+                'u_v3_retention': 0,
+                'ps_rb_video_format': 3082257,
+                'ps_rb_language': 4096,
+                'sort': 'date',
+                'order': 'desc',
+                'u_post_results_amt': 50,
+                'feed': 'rss',
+                'hauth': 1,
+        }
+
+        if search:
+            params['q'] = search + " AND "
+        else:
+            params['q'] = ''
+
+        params['q'] += 'Attr:Lang~Eng AND NOT Attr:VideoF=DVD'
+
+        url = self.url + "search/?%s" % urllib.urlencode(params)
+        logger.log("Newzbin search URL: " + url, logger.DEBUG)
+
+        data = self.getURL(url)
+
+        return data
+
+    def _checkAuth(self):
+        if sickbeard.NEWZBIN_USERNAME in (None, "") or sickbeard.NEWZBIN_PASSWORD in (None, ""):
+            raise exceptions.AuthException("Newzbin authentication details are empty, check your config")
+
+class NewzbinCache(tvcache.TVCache):
+
+    def __init__(self, provider):
+
+        tvcache.TVCache.__init__(self, provider)
+
+        # only poll Newzbin every 10 mins max
+        self.minTime = 1
+
+    def _getRSSData(self):
+
+        data = self.provider._getRSSData()
+
+        return data
+
+    def _parseItem(self, item):
+
+        (title, url) = self.provider._get_title_and_url(item)
+
+        if title == 'Feeds Error':
+            logger.log("There's an error in the feed, probably bad auth info", logger.DEBUG)
+            raise exceptions.AuthException("Invalid Newzbin username/password")
+
+        if not title or not url:
+            logger.log("The XML returned from the "+self.provider.name+" feed is incomplete, this result is unusable", logger.ERROR)
+            return
+
+        quality = self.provider.getQuality(item)
+
+        logger.log("Found quality "+str(quality), logger.DEBUG)
+
+        logger.log("Adding item from RSS to cache: "+title, logger.DEBUG)
+
+        self._addCacheEntry(title, url, quality=quality)
+
+
+provider = NewzbinProvider()
diff --git a/sickbeard/providers/nzbmatrix.py b/sickbeard/providers/nzbmatrix.py
index daa02df65..dd622d722 100644
--- a/sickbeard/providers/nzbmatrix.py
+++ b/sickbeard/providers/nzbmatrix.py
@@ -1,181 +1,181 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-import re
-import time
-import urllib
-import datetime
-
-from xml.dom.minidom import parseString
-
-import sickbeard
-import generic
-
-from sickbeard import classes, logger, show_name_helpers, helpers
-from sickbeard import tvcache
-from sickbeard.exceptions import ex
-
-class NZBMatrixProvider(generic.NZBProvider):
-
-    def __init__(self):
-
-        generic.NZBProvider.__init__(self, "NZBMatrix")
-
-        self.supportsBacklog = True
-
-        self.cache = NZBMatrixCache(self)
-
-        self.url = 'http://www.nzbmatrix.com/'
-
-    def isEnabled(self):
-        return sickbeard.NZBMATRIX
-
-    def _get_season_search_strings(self, show, season):
-        sceneSearchStrings = set(show_name_helpers.makeSceneSeasonSearchString(show, season, "nzbmatrix"))
-
-        # search for all show names and episode numbers like ("a","b","c") in a single search
-        return [' '.join(sceneSearchStrings)]
-
-    def _get_episode_search_strings(self, ep_obj):
-
-        sceneSearchStrings = set(show_name_helpers.makeSceneSearchString(ep_obj))
-
-        # search for all show names and episode numbers like ("a","b","c") in a single search
-        return ['("' + '","'.join(sceneSearchStrings) + '")']
-
-    def _doSearch(self, curString, quotes=False, show=None):
-
-        term =  re.sub('[\.\-]', ' ', curString).encode('utf-8')
-        if quotes:
-            term = "\""+term+"\""
-
-        params = {"term": term,
-                  "maxage": sickbeard.USENET_RETENTION,
-                  "page": "download",
-                  "username": sickbeard.NZBMATRIX_USERNAME,
-                  "apikey": sickbeard.NZBMATRIX_APIKEY,
-                  "subcat": "6,41",
-                  "english": 1,
-                  "ssl": 1,
-                  "scenename": 1}
-
-        # don't allow it to be missing
-        if not params['maxage']:
-            params['maxage'] = '0'
-
-        # if the show is a documentary use those cats on nzbmatrix
-        if show and show.genre and 'documentary' in show.genre.lower():
-            params['subcat'] = params['subcat'] + ',53,9' 
-
-        searchURL = "https://rss.nzbmatrix.com/rss.php?" + urllib.urlencode(params)
-
-        logger.log(u"Search string: " + searchURL, logger.DEBUG)
-
-        logger.log(u"Sleeping 10 seconds to respect NZBMatrix's rules")
-        time.sleep(10)
-
-        searchResult = self.getURL(searchURL)
-
-        if not searchResult:
-            return []
-
-        try:
-            parsedXML = parseString(searchResult)
-            items = parsedXML.getElementsByTagName('item')
-        except Exception, e:
-            logger.log(u"Error trying to load NZBMatrix RSS feed: "+ex(e), logger.ERROR)
-            return []
-
-        results = []
-
-        for curItem in items:
-            (title, url) = self._get_title_and_url(curItem)
-
-            if title == 'Error: No Results Found For Your Search':
-                continue
-
-            if not title or not url:
-                logger.log(u"The XML returned from the NZBMatrix RSS feed is incomplete, this result is unusable", logger.ERROR)
-                continue
-
-            results.append(curItem)
-
-        return results
-
-
-    def findPropers(self, date=None):
-
-        results = []
-
-        for curResult in self._doSearch("(PROPER,REPACK)"):
-
-            (title, url) = self._get_title_and_url(curResult)
-
-            description_node = curResult.getElementsByTagName('description')[0]
-            descriptionStr = helpers.get_xml_text(description_node)
-
-            dateStr = re.search('<b>Added:</b> (\d{4}-\d\d-\d\d \d\d:\d\d:\d\d)', descriptionStr).group(1)
-            if not dateStr:
-                logger.log(u"Unable to figure out the date for entry "+title+", skipping it")
-                continue
-            else:
-                resultDate = datetime.datetime.strptime(dateStr, "%Y-%m-%d %H:%M:%S")
-
-            if date == None or resultDate > date:
-                results.append(classes.Proper(title, url, resultDate))
-
-        return results
-
-
-class NZBMatrixCache(tvcache.TVCache):
-
-    def __init__(self, provider):
-
-        tvcache.TVCache.__init__(self, provider)
-
-        # only poll NZBMatrix every 25 minutes max
-        self.minTime = 25
-
-
-    def _getRSSData(self):
-        # get all records since the last timestamp
-        url = "https://rss.nzbmatrix.com/rss.php?"
-
-        urlArgs = {'page': 'download',
-                   'username': sickbeard.NZBMATRIX_USERNAME,
-                   'apikey': sickbeard.NZBMATRIX_APIKEY,
-                   'maxage': sickbeard.USENET_RETENTION,
-                   'english': 1,
-                   'ssl': 1,
-                   'scenename': 1,
-                   'subcat': '6,41'}
-
-        # don't allow it to be missing
-        if not urlArgs['maxage']:
-            urlArgs['maxage'] = '0'
-
-        url += urllib.urlencode(urlArgs)
-
-        logger.log(u"NZBMatrix cache update URL: "+ url, logger.DEBUG)
-
-        data = self.provider.getURL(url)
-
-        return data
-
-
-provider = NZBMatrixProvider()
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+import re
+import time
+import urllib
+import datetime
+
+from xml.dom.minidom import parseString
+
+import sickbeard
+import generic
+
+from sickbeard import classes, logger, show_name_helpers, helpers
+from sickbeard import tvcache
+from sickbeard.exceptions import ex
+
+class NZBMatrixProvider(generic.NZBProvider):
+
+    def __init__(self):
+
+        generic.NZBProvider.__init__(self, "NZBMatrix")
+
+        self.supportsBacklog = True
+
+        self.cache = NZBMatrixCache(self)
+
+        self.url = 'http://www.nzbmatrix.com/'
+
+    def isEnabled(self):
+        return sickbeard.NZBMATRIX
+
+    def _get_season_search_strings(self, show, season):
+        sceneSearchStrings = set(show_name_helpers.makeSceneSeasonSearchString(show, season, "nzbmatrix"))
+
+        # search for all show names and episode numbers like ("a","b","c") in a single search
+        return [' '.join(sceneSearchStrings)]
+
+    def _get_episode_search_strings(self, ep_obj):
+
+        sceneSearchStrings = set(show_name_helpers.makeSceneSearchString(ep_obj))
+
+        # search for all show names and episode numbers like ("a","b","c") in a single search
+        return ['("' + '","'.join(sceneSearchStrings) + '")']
+
+    def _doSearch(self, curString, quotes=False, show=None):
+
+        term =  re.sub('[\.\-]', ' ', curString).encode('utf-8')
+        if quotes:
+            term = "\""+term+"\""
+
+        params = {"term": term,
+                  "maxage": sickbeard.USENET_RETENTION,
+                  "page": "download",
+                  "username": sickbeard.NZBMATRIX_USERNAME,
+                  "apikey": sickbeard.NZBMATRIX_APIKEY,
+                  "subcat": "6,41",
+                  "english": 1,
+                  "ssl": 1,
+                  "scenename": 1}
+
+        # don't allow it to be missing
+        if not params['maxage']:
+            params['maxage'] = '0'
+
+        # if the show is a documentary use those cats on nzbmatrix
+        if show and show.genre and 'documentary' in show.genre.lower():
+            params['subcat'] = params['subcat'] + ',53,9' 
+
+        searchURL = "https://rss.nzbmatrix.com/rss.php?" + urllib.urlencode(params)
+
+        logger.log(u"Search string: " + searchURL, logger.DEBUG)
+
+        logger.log(u"Sleeping 10 seconds to respect NZBMatrix's rules")
+        time.sleep(10)
+
+        searchResult = self.getURL(searchURL)
+
+        if not searchResult:
+            return []
+
+        try:
+            parsedXML = parseString(searchResult)
+            items = parsedXML.getElementsByTagName('item')
+        except Exception, e:
+            logger.log(u"Error trying to load NZBMatrix RSS feed: "+ex(e), logger.ERROR)
+            return []
+
+        results = []
+
+        for curItem in items:
+            (title, url) = self._get_title_and_url(curItem)
+
+            if title == 'Error: No Results Found For Your Search':
+                continue
+
+            if not title or not url:
+                logger.log(u"The XML returned from the NZBMatrix RSS feed is incomplete, this result is unusable", logger.ERROR)
+                continue
+
+            results.append(curItem)
+
+        return results
+
+
+    def findPropers(self, date=None):
+
+        results = []
+
+        for curResult in self._doSearch("(PROPER,REPACK)"):
+
+            (title, url) = self._get_title_and_url(curResult)
+
+            description_node = curResult.getElementsByTagName('description')[0]
+            descriptionStr = helpers.get_xml_text(description_node)
+
+            dateStr = re.search('<b>Added:</b> (\d{4}-\d\d-\d\d \d\d:\d\d:\d\d)', descriptionStr).group(1)
+            if not dateStr:
+                logger.log(u"Unable to figure out the date for entry "+title+", skipping it")
+                continue
+            else:
+                resultDate = datetime.datetime.strptime(dateStr, "%Y-%m-%d %H:%M:%S")
+
+            if date == None or resultDate > date:
+                results.append(classes.Proper(title, url, resultDate))
+
+        return results
+
+
+class NZBMatrixCache(tvcache.TVCache):
+
+    def __init__(self, provider):
+
+        tvcache.TVCache.__init__(self, provider)
+
+        # only poll NZBMatrix every 25 minutes max
+        self.minTime = 25
+
+
+    def _getRSSData(self):
+        # get all records since the last timestamp
+        url = "https://rss.nzbmatrix.com/rss.php?"
+
+        urlArgs = {'page': 'download',
+                   'username': sickbeard.NZBMATRIX_USERNAME,
+                   'apikey': sickbeard.NZBMATRIX_APIKEY,
+                   'maxage': sickbeard.USENET_RETENTION,
+                   'english': 1,
+                   'ssl': 1,
+                   'scenename': 1,
+                   'subcat': '6,41'}
+
+        # don't allow it to be missing
+        if not urlArgs['maxage']:
+            urlArgs['maxage'] = '0'
+
+        url += urllib.urlencode(urlArgs)
+
+        logger.log(u"NZBMatrix cache update URL: "+ url, logger.DEBUG)
+
+        data = self.provider.getURL(url)
+
+        return data
+
+
+provider = NZBMatrixProvider()
diff --git a/sickbeard/providers/nzbsrus.py b/sickbeard/providers/nzbsrus.py
index 9d140c568..84fcbb9e6 100644
--- a/sickbeard/providers/nzbsrus.py
+++ b/sickbeard/providers/nzbsrus.py
@@ -1,122 +1,122 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-import urllib
-import generic
-import sickbeard
-
-try:
-    import xml.etree.cElementTree as etree
-except ImportError:
-    import xml.etree.ElementTree as etree
-
-from sickbeard import exceptions, logger
-from sickbeard import tvcache, show_name_helpers
-
-
-class NZBsRUSProvider(generic.NZBProvider):
-
-    def __init__(self):
-        generic.NZBProvider.__init__(self, "NZBs'R'US")
-        self.cache = NZBsRUSCache(self)
-        self.url = 'https://www.nzbsrus.com/'
-        self.supportsBacklog = True
-
-    def isEnabled(self):
-        return sickbeard.NZBSRUS
-
-    def _checkAuth(self):
-        if sickbeard.NZBSRUS_UID in (None, "") or sickbeard.NZBSRUS_HASH in (None, ""):
-            raise exceptions.AuthException("NZBs'R'US authentication details are empty, check your config")
-
-    def _get_season_search_strings(self, show, season):
-        return [x for x in show_name_helpers.makeSceneSeasonSearchString(show, season)]
-
-    def _get_episode_search_strings(self, ep_obj):
-        return [x for x in show_name_helpers.makeSceneSearchString(ep_obj)]
-
-    def _doSearch(self, search, show=None, season=None):
-        params = {'uid': sickbeard.NZBSRUS_UID,
-                  'key': sickbeard.NZBSRUS_HASH,
-                  'xml': 1,
-                  'age': sickbeard.USENET_RETENTION,
-                  'lang0': 1,   # English only from CouchPotato
-                  'lang1': 1,
-                  'lang3': 1,
-                  'c91': 1,     # TV:HD
-                  'c104': 1,    # TV:SD-x264
-                  'c75': 1,     # TV:XviD
-                  'searchtext': search}
-
-        if not params['age']:
-            params['age'] = 500
-
-        searchURL = self.url + 'api.php?' + urllib.urlencode(params)
-        logger.log(u"NZBS'R'US search url: " + searchURL, logger.DEBUG)
-
-        data = self.getURL(searchURL)
-        if not data:
-            return []
-
-        if not data.startswith('<?xml'):  # Error will be a single line of text
-            logger.log(u"NZBs'R'US error: " + data, logger.ERROR)
-            return []
-
-        root = etree.fromstring(data)
-        if root is None:
-            logger.log(u"Error trying to parse NZBS'R'US XML data.", logger.ERROR)
-            logger.log(u"RSS data: " + data, logger.DEBUG)
-            return []
-        return root.findall('./results/result')
-
-    def _get_title_and_url(self, element):
-        if element.find('title'):  # RSS feed
-            title = element.find('title').text
-            url = element.find('link').text.replace('&amp;', '&')
-        else:  # API item
-            title = element.find('name').text
-            nzbID = element.find('id').text
-            key = element.find('key').text
-            url = self.url + 'nzbdownload_rss.php' + '/' + \
-                nzbID + '/' + sickbeard.NZBSRUS_UID + '/' + key + '/'
-        return (title, url)
-
-
-class NZBsRUSCache(tvcache.TVCache):
-
-    def __init__(self, provider):
-        tvcache.TVCache.__init__(self, provider)
-        # only poll NZBs'R'US every 15 minutes max
-        self.minTime = 15
-
-    def _getRSSData(self):
-        url = self.provider.url + 'rssfeed.php?'
-        urlArgs = {'cat': '91,75,104',  # HD,XviD,SD-x264
-                   'i': sickbeard.NZBSRUS_UID,
-                   'h': sickbeard.NZBSRUS_HASH}
-
-        url += urllib.urlencode(urlArgs)
-        logger.log(u"NZBs'R'US cache update URL: " + url, logger.DEBUG)
-
-        data = self.provider.getURL(url)
-        return data
-
-    def _checkAuth(self, data):
-        return data != 'Invalid Link'
-
-provider = NZBsRUSProvider()
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+import urllib
+import generic
+import sickbeard
+
+try:
+    import xml.etree.cElementTree as etree
+except ImportError:
+    import xml.etree.ElementTree as etree
+
+from sickbeard import exceptions, logger
+from sickbeard import tvcache, show_name_helpers
+
+
+class NZBsRUSProvider(generic.NZBProvider):
+
+    def __init__(self):
+        generic.NZBProvider.__init__(self, "NZBs'R'US")
+        self.cache = NZBsRUSCache(self)
+        self.url = 'https://www.nzbsrus.com/'
+        self.supportsBacklog = True
+
+    def isEnabled(self):
+        return sickbeard.NZBSRUS
+
+    def _checkAuth(self):
+        if sickbeard.NZBSRUS_UID in (None, "") or sickbeard.NZBSRUS_HASH in (None, ""):
+            raise exceptions.AuthException("NZBs'R'US authentication details are empty, check your config")
+
+    def _get_season_search_strings(self, show, season):
+        return [x for x in show_name_helpers.makeSceneSeasonSearchString(show, season)]
+
+    def _get_episode_search_strings(self, ep_obj):
+        return [x for x in show_name_helpers.makeSceneSearchString(ep_obj)]
+
+    def _doSearch(self, search, show=None, season=None):
+        params = {'uid': sickbeard.NZBSRUS_UID,
+                  'key': sickbeard.NZBSRUS_HASH,
+                  'xml': 1,
+                  'age': sickbeard.USENET_RETENTION,
+                  'lang0': 1,   # English only from CouchPotato
+                  'lang1': 1,
+                  'lang3': 1,
+                  'c91': 1,     # TV:HD
+                  'c104': 1,    # TV:SD-x264
+                  'c75': 1,     # TV:XviD
+                  'searchtext': search}
+
+        if not params['age']:
+            params['age'] = 500
+
+        searchURL = self.url + 'api.php?' + urllib.urlencode(params)
+        logger.log(u"NZBS'R'US search url: " + searchURL, logger.DEBUG)
+
+        data = self.getURL(searchURL)
+        if not data:
+            return []
+
+        if not data.startswith('<?xml'):  # Error will be a single line of text
+            logger.log(u"NZBs'R'US error: " + data, logger.ERROR)
+            return []
+
+        root = etree.fromstring(data)
+        if root is None:
+            logger.log(u"Error trying to parse NZBS'R'US XML data.", logger.ERROR)
+            logger.log(u"RSS data: " + data, logger.DEBUG)
+            return []
+        return root.findall('./results/result')
+
+    def _get_title_and_url(self, element):
+        if element.find('title'):  # RSS feed
+            title = element.find('title').text
+            url = element.find('link').text.replace('&amp;', '&')
+        else:  # API item
+            title = element.find('name').text
+            nzbID = element.find('id').text
+            key = element.find('key').text
+            url = self.url + 'nzbdownload_rss.php' + '/' + \
+                nzbID + '/' + sickbeard.NZBSRUS_UID + '/' + key + '/'
+        return (title, url)
+
+
+class NZBsRUSCache(tvcache.TVCache):
+
+    def __init__(self, provider):
+        tvcache.TVCache.__init__(self, provider)
+        # only poll NZBs'R'US every 15 minutes max
+        self.minTime = 15
+
+    def _getRSSData(self):
+        url = self.provider.url + 'rssfeed.php?'
+        urlArgs = {'cat': '91,75,104',  # HD,XviD,SD-x264
+                   'i': sickbeard.NZBSRUS_UID,
+                   'h': sickbeard.NZBSRUS_HASH}
+
+        url += urllib.urlencode(urlArgs)
+        logger.log(u"NZBs'R'US cache update URL: " + url, logger.DEBUG)
+
+        data = self.provider.getURL(url)
+        return data
+
+    def _checkAuth(self, data):
+        return data != 'Invalid Link'
+
+provider = NZBsRUSProvider()
diff --git a/sickbeard/providers/piratebay/__init__.py b/sickbeard/providers/piratebay/__init__.py
index d1f2dd85d..f1fd78e6e 100644
--- a/sickbeard/providers/piratebay/__init__.py
+++ b/sickbeard/providers/piratebay/__init__.py
@@ -1,404 +1,404 @@
-# Author: Mr_Orange <mr_orange@hotmail.it>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#  GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-import re
-import urllib, urllib2
-import sys
-import os
-
-import sickbeard
-from sickbeard.providers import generic
-from sickbeard.common import Quality
-from sickbeard.name_parser.parser import NameParser, InvalidNameException
-from sickbeard import logger
-from sickbeard import tvcache
-from sickbeard import helpers
-from sickbeard import show_name_helpers
-from sickbeard.common import Overview 
-from sickbeard.exceptions import ex
-from sickbeard import encodingKludge as ek
-
-proxy_dict = {
-              'Getprivate.eu (NL)' : 'http://getprivate.eu/',
-              '15bb51.info (US)' : 'http://15bb51.info/',
-              'Hideme.nl (NL)' : 'http://hideme.nl/',
-              'Rapidproxy.us (GB)' : 'http://rapidproxy.us/',
-              'Proxite.eu (DE)' :'http://proxite.eu/',
-              'Shieldmagic.com (GB)' : 'http://www.shieldmagic.com/',
-              'Webproxy.cz (CZ)' : 'http://webproxy.cz/',
-              'Freeproxy.cz (CZ)' : 'http://www.freeproxy.cz/',
-             }
-
-class ThePirateBayProvider(generic.TorrentProvider):
-
-    def __init__(self):
-
-        generic.TorrentProvider.__init__(self, "PirateBay")
-        
-        self.supportsBacklog = True
-
-        self.cache = ThePirateBayCache(self)
-        
-        self.proxy = ThePirateBayWebproxy() 
-        
-        self.url = 'http://thepiratebay.se/'
-
-        self.searchurl = self.url+'search/%s/0/7/200'  # order by seed       
-
-        self.re_title_url =  '/torrent/(?P<id>\d+)/(?P<title>.*?)//1".+?(?P<url>magnet.*?)//1".+?(?P<seeders>\d+)</td>.+?(?P<leechers>\d+)</td>'
-
-    def isEnabled(self):
-        return sickbeard.THEPIRATEBAY
-        
-    def imageName(self):
-        return 'piratebay.png'
-    
-    def getQuality(self, item):
-        
-        quality = Quality.nameQuality(item[0])
-        return quality    
-
-    def _reverseQuality(self,quality):
-
-        quality_string = ''
-
-        if quality == Quality.SDTV:
-            quality_string = 'HDTV x264'
-        if quality == Quality.SDDVD:
-            quality_string = 'DVDRIP'    
-        elif quality == Quality.HDTV:    
-            quality_string = '720p HDTV x264'
-        elif quality == Quality.FULLHDTV:
-            quality_string = '1080p HDTV x264'        
-        elif quality == Quality.RAWHDTV:
-            quality_string = '1080i HDTV mpeg2'
-        elif quality == Quality.HDWEBDL:
-            quality_string = '720p WEB-DL'
-        elif quality == Quality.FULLHDWEBDL:
-            quality_string = '1080p WEB-DL'            
-        elif quality == Quality.HDBLURAY:
-            quality_string = '720p Bluray x264'
-        elif quality == Quality.FULLHDBLURAY:
-            quality_string = '1080p Bluray x264'  
-        
-        return quality_string
-
-    def _find_season_quality(self,title,torrent_id):
-        """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """
-
-        mediaExtensions = ['avi', 'mkv', 'wmv', 'divx',
-                           'vob', 'dvr-ms', 'wtv', 'ts'
-                           'ogv', 'rar', 'zip'] 
-        
-        quality = Quality.UNKNOWN        
-        
-        fileName = None
-        
-        fileURL = self.proxy._buildURL(self.url+'ajax_details_filelist.php?id='+str(torrent_id))
-      
-        data = self.getURL(fileURL)
-        
-        if not data:
-            return None
-        
-        filesList = re.findall('<td.+>(.*?)</td>',data) 
-        
-        if not filesList: 
-            logger.log(u"Unable to get the torrent file list for "+title, logger.ERROR)
-            
-        for fileName in filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList):
-            quality = Quality.nameQuality(os.path.basename(fileName))
-            if quality != Quality.UNKNOWN: break
-
-        if fileName!=None and quality == Quality.UNKNOWN:
-            quality = Quality.assumeQuality(os.path.basename(fileName))            
-
-        if quality == Quality.UNKNOWN:
-            logger.log(u"No Season quality for "+title, logger.DEBUG)
-            return None
-
-        try:
-            myParser = NameParser()
-            parse_result = myParser.parse(fileName)
-        except InvalidNameException:
-            return None
-        
-        logger.log(u"Season quality for "+title+" is "+Quality.qualityStrings[quality], logger.DEBUG)
-        
-        if parse_result.series_name and parse_result.season_number: 
-            title = parse_result.series_name+' S%02d' % int(parse_result.season_number)+' '+self._reverseQuality(quality)
-        
-        return title
-
-    def _get_season_search_strings(self, show, season=None):
-
-        search_string = {'Episode': []}
-    
-        if not show:
-            return []
-
-        seasonEp = show.getAllEpisodes(season)
-
-        wantedEp = [x for x in seasonEp if show.getOverview(x.status) in (Overview.WANTED, Overview.QUAL)]          
-
-        #If Every episode in Season is a wanted Episode then search for Season first
-        if wantedEp == seasonEp and not show.air_by_date:
-            search_string = {'Season': [], 'Episode': []}
-            for show_name in set(show_name_helpers.allPossibleShowNames(show)):
-                ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX   
-                search_string['Season'].append(ep_string)
-                      
-                ep_string = show_name+' Season '+str(season)+' -Ep*' #2) ShowName Season X  
-                search_string['Season'].append(ep_string)
-
-        #Building the search string with the episodes we need         
-        for ep_obj in wantedEp:
-            search_string['Episode'] += self._get_episode_search_strings(ep_obj)[0]['Episode']
-        
-        #If no Episode is needed then return an empty list
-        if not search_string['Episode']:
-            return []
-        
-        return [search_string]
-
-    def _get_episode_search_strings(self, ep_obj):
-       
-        search_string = {'Episode': []}
-       
-        if not ep_obj:
-            return []
-                
-        if ep_obj.show.air_by_date:
-            for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
-                ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ str(ep_obj.airdate)
-                search_string['Episode'].append(ep_string)
-        else:
-            for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
-                ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \
-                sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} +'|'+\
-                sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} +'|'+\
-                sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} \
-
-                search_string['Episode'].append(ep_string)
-    
-        return [search_string]
-
-    def _doSearch(self, search_params, show=None, season=None):
-    
-        results = []
-        items = {'Season': [], 'Episode': []}
-
-        for mode in search_params.keys():
-            for search_string in search_params[mode]:
-
-                searchURL = self.proxy._buildURL(self.searchurl %(urllib.quote(search_string.encode("utf-8"))))    
-        
-                logger.log(u"Search string: " + searchURL, logger.DEBUG)
-        
-                data = self.getURL(searchURL)
-                if not data:
-                    return []
-        
-                re_title_url = self.proxy._buildRE(self.re_title_url)
-                
-                #Extracting torrent information from data returned by searchURL                   
-                match = re.compile(re_title_url, re.DOTALL ).finditer(urllib.unquote(data))
-                for torrent in match:
-
-                    title = torrent.group('title').replace('_','.')#Do not know why but SickBeard skip release with '_' in name
-                    url = torrent.group('url')
-                    id = int(torrent.group('id'))
-                    seeders = int(torrent.group('seeders'))
-                    leechers = int(torrent.group('leechers'))
-
-                    #Filter unseeded torrent
-                    if seeders == 0 or not title \
-                    or not show_name_helpers.filterBadReleases(title):
-                        continue 
-                   
-                    #Accept Torrent only from Good People for every Episode Search
-                    if sickbeard.THEPIRATEBAY_TRUSTED and re.search('(VIP|Trusted|Helper)',torrent.group(0))== None:
-                        logger.log(u"ThePirateBay Provider found result "+torrent.group('title')+" but that doesn't seem like a trusted result so I'm ignoring it",logger.DEBUG)
-                        continue
-
-                    #Try to find the real Quality for full season torrent analyzing files in torrent 
-                    if mode == 'Season' and Quality.nameQuality(title) == Quality.UNKNOWN:     
-                        if not self._find_season_quality(title,id): continue
-                        
-                    item = title, url, id, seeders, leechers
-                    
-                    items[mode].append(item)    
-
-            #For each search mode sort all the items by seeders
-            items[mode].sort(key=lambda tup: tup[3], reverse=True)        
-
-            results += items[mode]  
-                
-        return results
-
-    def _get_title_and_url(self, item):
-        
-        title, url, id, seeders, leechers = item
-        
-        if url:
-            url = url.replace('&amp;','&')
-
-        return (title, url)
-
-    def getURL(self, url, headers=None):
-
-        if not headers:
-            headers = []
-
-        # Glype Proxies does not support Direct Linking.
-        # We have to fake a search on the proxy site to get data
-        if self.proxy.isEnabled():
-            headers.append(('Referer', self.proxy.getProxyURL()))
-            
-        result = None
-
-        try:
-            result = helpers.getURL(url, headers)
-        except (urllib2.HTTPError, IOError), e:
-            logger.log(u"Error loading "+self.name+" URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
-            return None
-
-        return result
-
-    def downloadResult(self, result):
-        """
-        Save the result to disk.
-        """
-        
-        #Hack for rtorrent user (it will not work for other torrent client)
-        if sickbeard.TORRENT_METHOD == "blackhole" and result.url.startswith('magnet'): 
-            magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType)
-            magnetFileContent = 'd10:magnet-uri' + `len(result.url)` + ':' + result.url + 'e'
-
-            try:
-                fileOut = open(magnetFileName, 'wb')
-                fileOut.write(magnetFileContent)
-                fileOut.close()
-                helpers.chmodAsParent(magnetFileName)
-            except IOError, e:
-                logger.log("Unable to save the file: "+ex(e), logger.ERROR)
-                return False
-            logger.log(u"Saved magnet link to "+magnetFileName+" ", logger.MESSAGE)
-            return True
-
-class ThePirateBayCache(tvcache.TVCache):
-
-    def __init__(self, provider):
-
-        tvcache.TVCache.__init__(self, provider)
-
-        # only poll ThePirateBay every 10 minutes max
-        self.minTime = 20
-
-    def updateCache(self):
-
-        re_title_url = self.provider.proxy._buildRE(self.provider.re_title_url)
-                
-        if not self.shouldUpdate():
-            return
-
-        data = self._getData()
-
-        # as long as the http request worked we count this as an update
-        if data:
-            self.setLastUpdate()
-        else:
-            return []
-
-        # now that we've loaded the current RSS feed lets delete the old cache
-        logger.log(u"Clearing "+self.provider.name+" cache and updating with new information")
-        self._clearCache()
-
-        match = re.compile(re_title_url, re.DOTALL).finditer(urllib.unquote(data))
-        if not match:
-            logger.log(u"The Data returned from the ThePirateBay is incomplete, this result is unusable", logger.ERROR)
-            return []
-                
-        for torrent in match:
-
-            title = torrent.group('title').replace('_','.')#Do not know why but SickBeard skip release with '_' in name
-            url = torrent.group('url')
-           
-            #accept torrent only from Trusted people
-            if sickbeard.THEPIRATEBAY_TRUSTED and re.search('(VIP|Trusted|Helper)',torrent.group(0))== None:
-                logger.log(u"ThePirateBay Provider found result "+torrent.group('title')+" but that doesn't seem like a trusted result so I'm ignoring it",logger.DEBUG)
-                continue
-           
-            item = (title,url)
-
-            self._parseItem(item)
-
-    def _getData(self):
-       
-        #url for the last 50 tv-show
-        url = self.provider.proxy._buildURL(self.provider.url+'tv/latest/')
-
-        logger.log(u"ThePirateBay cache update URL: "+ url, logger.DEBUG)
-
-        data = self.provider.getURL(url)
-
-        return data
-
-    def _parseItem(self, item):
-
-        (title, url) = item
-
-        if not title or not url:
-            return
-
-        logger.log(u"Adding item to cache: "+title, logger.DEBUG)
-
-        self._addCacheEntry(title, url)
-
-class ThePirateBayWebproxy:
-    
-    def __init__(self):
-        self.Type   = 'GlypeProxy'
-        self.param  = 'browse.php?u='
-        self.option = '&b=32'
-        
-    def isEnabled(self):
-        """ Return True if we Choose to call TPB via Proxy """ 
-        return sickbeard.THEPIRATEBAY_PROXY
-    
-    def getProxyURL(self):
-        """ Return the Proxy URL Choosen via Provider Setting """
-        return str(sickbeard.THEPIRATEBAY_PROXY_URL)
-    
-    def _buildURL(self,url):
-        """ Return the Proxyfied URL of the page """ 
-        if self.isEnabled():
-            url = self.getProxyURL() + self.param + url + self.option
-        
-        return url      
-
-    def _buildRE(self,regx):
-        """ Return the Proxyfied RE string """
-        if self.isEnabled():
-            regx = re.sub('//1',self.option,regx).replace('&','&amp;')
-        else:
-            regx = re.sub('//1','',regx)  
-
-        return regx    
-    
+# Author: Mr_Orange <mr_orange@hotmail.it>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+import re
+import urllib, urllib2
+import sys
+import os
+
+import sickbeard
+from sickbeard.providers import generic
+from sickbeard.common import Quality
+from sickbeard.name_parser.parser import NameParser, InvalidNameException
+from sickbeard import logger
+from sickbeard import tvcache
+from sickbeard import helpers
+from sickbeard import show_name_helpers
+from sickbeard.common import Overview 
+from sickbeard.exceptions import ex
+from sickbeard import encodingKludge as ek
+
+proxy_dict = {
+              'Getprivate.eu (NL)' : 'http://getprivate.eu/',
+              '15bb51.info (US)' : 'http://15bb51.info/',
+              'Hideme.nl (NL)' : 'http://hideme.nl/',
+              'Rapidproxy.us (GB)' : 'http://rapidproxy.us/',
+              'Proxite.eu (DE)' :'http://proxite.eu/',
+              'Shieldmagic.com (GB)' : 'http://www.shieldmagic.com/',
+              'Webproxy.cz (CZ)' : 'http://webproxy.cz/',
+              'Freeproxy.cz (CZ)' : 'http://www.freeproxy.cz/',
+             }
+
+class ThePirateBayProvider(generic.TorrentProvider):
+
+    def __init__(self):
+
+        generic.TorrentProvider.__init__(self, "PirateBay")
+        
+        self.supportsBacklog = True
+
+        self.cache = ThePirateBayCache(self)
+        
+        self.proxy = ThePirateBayWebproxy() 
+        
+        self.url = 'http://thepiratebay.se/'
+
+        self.searchurl = self.url+'search/%s/0/7/200'  # order by seed       
+
+        self.re_title_url =  '/torrent/(?P<id>\d+)/(?P<title>.*?)//1".+?(?P<url>magnet.*?)//1".+?(?P<seeders>\d+)</td>.+?(?P<leechers>\d+)</td>'
+
+    def isEnabled(self):
+        return sickbeard.THEPIRATEBAY
+        
+    def imageName(self):
+        return 'piratebay.png'
+    
+    def getQuality(self, item):
+        
+        quality = Quality.nameQuality(item[0])
+        return quality    
+
+    def _reverseQuality(self,quality):
+
+        quality_string = ''
+
+        if quality == Quality.SDTV:
+            quality_string = 'HDTV x264'
+        if quality == Quality.SDDVD:
+            quality_string = 'DVDRIP'    
+        elif quality == Quality.HDTV:    
+            quality_string = '720p HDTV x264'
+        elif quality == Quality.FULLHDTV:
+            quality_string = '1080p HDTV x264'        
+        elif quality == Quality.RAWHDTV:
+            quality_string = '1080i HDTV mpeg2'
+        elif quality == Quality.HDWEBDL:
+            quality_string = '720p WEB-DL'
+        elif quality == Quality.FULLHDWEBDL:
+            quality_string = '1080p WEB-DL'            
+        elif quality == Quality.HDBLURAY:
+            quality_string = '720p Bluray x264'
+        elif quality == Quality.FULLHDBLURAY:
+            quality_string = '1080p Bluray x264'  
+        
+        return quality_string
+
+    def _find_season_quality(self,title,torrent_id):
+        """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """
+
+        mediaExtensions = ['avi', 'mkv', 'wmv', 'divx',
+                           'vob', 'dvr-ms', 'wtv', 'ts'
+                           'ogv', 'rar', 'zip'] 
+        
+        quality = Quality.UNKNOWN        
+        
+        fileName = None
+        
+        fileURL = self.proxy._buildURL(self.url+'ajax_details_filelist.php?id='+str(torrent_id))
+      
+        data = self.getURL(fileURL)
+        
+        if not data:
+            return None
+        
+        filesList = re.findall('<td.+>(.*?)</td>',data) 
+        
+        if not filesList: 
+            logger.log(u"Unable to get the torrent file list for "+title, logger.ERROR)
+            
+        for fileName in filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList):
+            quality = Quality.nameQuality(os.path.basename(fileName))
+            if quality != Quality.UNKNOWN: break
+
+        if fileName!=None and quality == Quality.UNKNOWN:
+            quality = Quality.assumeQuality(os.path.basename(fileName))            
+
+        if quality == Quality.UNKNOWN:
+            logger.log(u"No Season quality for "+title, logger.DEBUG)
+            return None
+
+        try:
+            myParser = NameParser()
+            parse_result = myParser.parse(fileName)
+        except InvalidNameException:
+            return None
+        
+        logger.log(u"Season quality for "+title+" is "+Quality.qualityStrings[quality], logger.DEBUG)
+        
+        if parse_result.series_name and parse_result.season_number: 
+            title = parse_result.series_name+' S%02d' % int(parse_result.season_number)+' '+self._reverseQuality(quality)
+        
+        return title
+
+    def _get_season_search_strings(self, show, season=None):
+
+        search_string = {'Episode': []}
+    
+        if not show:
+            return []
+
+        seasonEp = show.getAllEpisodes(season)
+
+        wantedEp = [x for x in seasonEp if show.getOverview(x.status) in (Overview.WANTED, Overview.QUAL)]          
+
+        #If Every episode in Season is a wanted Episode then search for Season first
+        if wantedEp == seasonEp and not show.air_by_date:
+            search_string = {'Season': [], 'Episode': []}
+            for show_name in set(show_name_helpers.allPossibleShowNames(show)):
+                ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX   
+                search_string['Season'].append(ep_string)
+                      
+                ep_string = show_name+' Season '+str(season)+' -Ep*' #2) ShowName Season X  
+                search_string['Season'].append(ep_string)
+
+        #Building the search string with the episodes we need         
+        for ep_obj in wantedEp:
+            search_string['Episode'] += self._get_episode_search_strings(ep_obj)[0]['Episode']
+        
+        #If no Episode is needed then return an empty list
+        if not search_string['Episode']:
+            return []
+        
+        return [search_string]
+
+    def _get_episode_search_strings(self, ep_obj):
+       
+        search_string = {'Episode': []}
+       
+        if not ep_obj:
+            return []
+                
+        if ep_obj.show.air_by_date:
+            for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
+                ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ str(ep_obj.airdate)
+                search_string['Episode'].append(ep_string)
+        else:
+            for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
+                ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \
+                sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} +'|'+\
+                sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} +'|'+\
+                sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} \
+
+                search_string['Episode'].append(ep_string)
+    
+        return [search_string]
+
+    def _doSearch(self, search_params, show=None, season=None):
+    
+        results = []
+        items = {'Season': [], 'Episode': []}
+
+        for mode in search_params.keys():
+            for search_string in search_params[mode]:
+
+                searchURL = self.proxy._buildURL(self.searchurl %(urllib.quote(search_string.encode("utf-8"))))    
+        
+                logger.log(u"Search string: " + searchURL, logger.DEBUG)
+        
+                data = self.getURL(searchURL)
+                if not data:
+                    return []
+        
+                re_title_url = self.proxy._buildRE(self.re_title_url)
+                
+                #Extracting torrent information from data returned by searchURL                   
+                match = re.compile(re_title_url, re.DOTALL ).finditer(urllib.unquote(data))
+                for torrent in match:
+
+                    title = torrent.group('title').replace('_','.')#Do not know why but SickBeard skip release with '_' in name
+                    url = torrent.group('url')
+                    id = int(torrent.group('id'))
+                    seeders = int(torrent.group('seeders'))
+                    leechers = int(torrent.group('leechers'))
+
+                    #Filter unseeded torrent
+                    if seeders == 0 or not title \
+                    or not show_name_helpers.filterBadReleases(title):
+                        continue 
+                   
+                    #Accept Torrent only from Good People for every Episode Search
+                    if sickbeard.THEPIRATEBAY_TRUSTED and re.search('(VIP|Trusted|Helper)',torrent.group(0))== None:
+                        logger.log(u"ThePirateBay Provider found result "+torrent.group('title')+" but that doesn't seem like a trusted result so I'm ignoring it",logger.DEBUG)
+                        continue
+
+                    #Try to find the real Quality for full season torrent analyzing files in torrent 
+                    if mode == 'Season' and Quality.nameQuality(title) == Quality.UNKNOWN:     
+                        if not self._find_season_quality(title,id): continue
+                        
+                    item = title, url, id, seeders, leechers
+                    
+                    items[mode].append(item)    
+
+            #For each search mode sort all the items by seeders
+            items[mode].sort(key=lambda tup: tup[3], reverse=True)        
+
+            results += items[mode]  
+                
+        return results
+
+    def _get_title_and_url(self, item):
+        
+        title, url, id, seeders, leechers = item
+        
+        if url:
+            url = url.replace('&amp;','&')
+
+        return (title, url)
+
+    def getURL(self, url, headers=None):
+
+        if not headers:
+            headers = []
+
+        # Glype Proxies does not support Direct Linking.
+        # We have to fake a search on the proxy site to get data
+        if self.proxy.isEnabled():
+            headers.append(('Referer', self.proxy.getProxyURL()))
+            
+        result = None
+
+        try:
+            result = helpers.getURL(url, headers)
+        except (urllib2.HTTPError, IOError), e:
+            logger.log(u"Error loading "+self.name+" URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
+            return None
+
+        return result
+
+    def downloadResult(self, result):
+        """
+        Save the result to disk.
+        """
+        
+        #Hack for rtorrent user (it will not work for other torrent client)
+        if sickbeard.TORRENT_METHOD == "blackhole" and result.url.startswith('magnet'): 
+            magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType)
+            magnetFileContent = 'd10:magnet-uri' + `len(result.url)` + ':' + result.url + 'e'
+
+            try:
+                fileOut = open(magnetFileName, 'wb')
+                fileOut.write(magnetFileContent)
+                fileOut.close()
+                helpers.chmodAsParent(magnetFileName)
+            except IOError, e:
+                logger.log("Unable to save the file: "+ex(e), logger.ERROR)
+                return False
+            logger.log(u"Saved magnet link to "+magnetFileName+" ", logger.MESSAGE)
+            return True
+
+class ThePirateBayCache(tvcache.TVCache):
+
+    def __init__(self, provider):
+
+        tvcache.TVCache.__init__(self, provider)
+
+        # only poll ThePirateBay every 10 minutes max
+        self.minTime = 20
+
+    def updateCache(self):
+
+        re_title_url = self.provider.proxy._buildRE(self.provider.re_title_url)
+                
+        if not self.shouldUpdate():
+            return
+
+        data = self._getData()
+
+        # as long as the http request worked we count this as an update
+        if data:
+            self.setLastUpdate()
+        else:
+            return []
+
+        # now that we've loaded the current RSS feed lets delete the old cache
+        logger.log(u"Clearing "+self.provider.name+" cache and updating with new information")
+        self._clearCache()
+
+        match = re.compile(re_title_url, re.DOTALL).finditer(urllib.unquote(data))
+        if not match:
+            logger.log(u"The Data returned from the ThePirateBay is incomplete, this result is unusable", logger.ERROR)
+            return []
+                
+        for torrent in match:
+
+            title = torrent.group('title').replace('_','.')#Do not know why but SickBeard skip release with '_' in name
+            url = torrent.group('url')
+           
+            #accept torrent only from Trusted people
+            if sickbeard.THEPIRATEBAY_TRUSTED and re.search('(VIP|Trusted|Helper)',torrent.group(0))== None:
+                logger.log(u"ThePirateBay Provider found result "+torrent.group('title')+" but that doesn't seem like a trusted result so I'm ignoring it",logger.DEBUG)
+                continue
+           
+            item = (title,url)
+
+            self._parseItem(item)
+
+    def _getData(self):
+       
+        #url for the last 50 tv-show
+        url = self.provider.proxy._buildURL(self.provider.url+'tv/latest/')
+
+        logger.log(u"ThePirateBay cache update URL: "+ url, logger.DEBUG)
+
+        data = self.provider.getURL(url)
+
+        return data
+
+    def _parseItem(self, item):
+
+        (title, url) = item
+
+        if not title or not url:
+            return
+
+        logger.log(u"Adding item to cache: "+title, logger.DEBUG)
+
+        self._addCacheEntry(title, url)
+
+class ThePirateBayWebproxy:
+    
+    def __init__(self):
+        self.Type   = 'GlypeProxy'
+        self.param  = 'browse.php?u='
+        self.option = '&b=32'
+        
+    def isEnabled(self):
+        """ Return True if we Choose to call TPB via Proxy """ 
+        return sickbeard.THEPIRATEBAY_PROXY
+    
+    def getProxyURL(self):
+        """ Return the Proxy URL Choosen via Provider Setting """
+        return str(sickbeard.THEPIRATEBAY_PROXY_URL)
+    
+    def _buildURL(self,url):
+        """ Return the Proxyfied URL of the page """ 
+        if self.isEnabled():
+            url = self.getProxyURL() + self.param + url + self.option
+        
+        return url      
+
+    def _buildRE(self,regx):
+        """ Return the Proxyfied RE string """
+        if self.isEnabled():
+            regx = re.sub('//1',self.option,regx).replace('&','&amp;')
+        else:
+            regx = re.sub('//1','',regx)  
+
+        return regx    
+    
 provider = ThePirateBayProvider()
\ No newline at end of file
diff --git a/sickbeard/scene_exceptions.py b/sickbeard/scene_exceptions.py
index 759029ef6..22011a7b7 100644
--- a/sickbeard/scene_exceptions.py
+++ b/sickbeard/scene_exceptions.py
@@ -1,118 +1,118 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-import re
-
-from sickbeard import helpers
-from sickbeard import name_cache
-from sickbeard import logger
-from sickbeard import db
-
-def get_scene_exceptions(tvdb_id):
-    """
-    Given a tvdb_id, return a list of all the scene exceptions.
-    """
-
-    myDB = db.DBConnection("cache.db")
-    exceptions = myDB.select("SELECT show_name FROM scene_exceptions WHERE tvdb_id = ?", [tvdb_id])
-    return [cur_exception["show_name"] for cur_exception in exceptions]
-
-
-def get_scene_exception_by_name(show_name):
-    """
-    Given a show name, return the tvdbid of the exception, None if no exception
-    is present.
-    """
-
-    myDB = db.DBConnection("cache.db")
-
-    # try the obvious case first
-    exception_result = myDB.select("SELECT tvdb_id FROM scene_exceptions WHERE LOWER(show_name) = ?", [show_name.lower()])
-    if exception_result:
-        return int(exception_result[0]["tvdb_id"])
-
-    all_exception_results = myDB.select("SELECT show_name, tvdb_id FROM scene_exceptions")
-    for cur_exception in all_exception_results:
-
-        cur_exception_name = cur_exception["show_name"]
-        cur_tvdb_id = int(cur_exception["tvdb_id"])
-
-        if show_name.lower() in (cur_exception_name.lower(), helpers.sanitizeSceneName(cur_exception_name).lower().replace('.', ' ')):
-            logger.log(u"Scene exception lookup got tvdb id "+str(cur_tvdb_id)+u", using that", logger.DEBUG)
-            return cur_tvdb_id
-
-    return None
-
-
-def retrieve_exceptions():
-    """
-    Looks up the exceptions on github, parses them into a dict, and inserts them into the
-    scene_exceptions table in cache.db. Also clears the scene name cache.
-    """
-
-    exception_dict = {}
-
-    # exceptions are stored on github pages
-    url = 'http://midgetspy.github.com/sb_tvdb_scene_exceptions/exceptions.txt'
-
-    logger.log(u"Check scene exceptions update")
-    url_data = helpers.getURL(url)
-
-    if url_data is None:
-        # When urlData is None, trouble connecting to github
-        logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
-        return
-
-    else:
-        # each exception is on one line with the format tvdb_id: 'show name 1', 'show name 2', etc
-        for cur_line in url_data.splitlines():
-            cur_line = cur_line.decode('utf-8')
-            tvdb_id, sep, aliases = cur_line.partition(':') #@UnusedVariable
-
-            if not aliases:
-                continue
-
-            tvdb_id = int(tvdb_id)
-
-            # regex out the list of shows, taking \' into account
-            alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
-
-            exception_dict[tvdb_id] = alias_list
-
-        myDB = db.DBConnection("cache.db")
-
-        changed_exceptions = False
-
-        # write all the exceptions we got off the net into the database
-        for cur_tvdb_id in exception_dict:
-
-            # get a list of the existing exceptions for this ID
-            existing_exceptions = [x["show_name"] for x in myDB.select("SELECT * FROM scene_exceptions WHERE tvdb_id = ?", [cur_tvdb_id])]
-
-            for cur_exception in exception_dict[cur_tvdb_id]:
-                # if this exception isn't already in the DB then add it
-                if cur_exception not in existing_exceptions:
-                    myDB.action("INSERT INTO scene_exceptions (tvdb_id, show_name) VALUES (?,?)", [cur_tvdb_id, cur_exception])
-                    changed_exceptions = True
-
-        # since this could invalidate the results of the cache we clear it out after updating
-        if changed_exceptions:
-            logger.log(u"Updated scene exceptions")
-            name_cache.clearCache()
-        else:
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+import re
+
+from sickbeard import helpers
+from sickbeard import name_cache
+from sickbeard import logger
+from sickbeard import db
+
+def get_scene_exceptions(tvdb_id):
+    """
+    Given a tvdb_id, return a list of all the scene exceptions.
+    """
+
+    myDB = db.DBConnection("cache.db")
+    exceptions = myDB.select("SELECT show_name FROM scene_exceptions WHERE tvdb_id = ?", [tvdb_id])
+    return [cur_exception["show_name"] for cur_exception in exceptions]
+
+
+def get_scene_exception_by_name(show_name):
+    """
+    Given a show name, return the tvdbid of the exception, None if no exception
+    is present.
+    """
+
+    myDB = db.DBConnection("cache.db")
+
+    # try the obvious case first
+    exception_result = myDB.select("SELECT tvdb_id FROM scene_exceptions WHERE LOWER(show_name) = ?", [show_name.lower()])
+    if exception_result:
+        return int(exception_result[0]["tvdb_id"])
+
+    all_exception_results = myDB.select("SELECT show_name, tvdb_id FROM scene_exceptions")
+    for cur_exception in all_exception_results:
+
+        cur_exception_name = cur_exception["show_name"]
+        cur_tvdb_id = int(cur_exception["tvdb_id"])
+
+        if show_name.lower() in (cur_exception_name.lower(), helpers.sanitizeSceneName(cur_exception_name).lower().replace('.', ' ')):
+            logger.log(u"Scene exception lookup got tvdb id "+str(cur_tvdb_id)+u", using that", logger.DEBUG)
+            return cur_tvdb_id
+
+    return None
+
+
+def retrieve_exceptions():
+    """
+    Looks up the exceptions on github, parses them into a dict, and inserts them into the
+    scene_exceptions table in cache.db. Also clears the scene name cache.
+    """
+
+    exception_dict = {}
+
+    # exceptions are stored on github pages
+    url = 'http://midgetspy.github.com/sb_tvdb_scene_exceptions/exceptions.txt'
+
+    logger.log(u"Check scene exceptions update")
+    url_data = helpers.getURL(url)
+
+    if url_data is None:
+        # When urlData is None, trouble connecting to github
+        logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
+        return
+
+    else:
+        # each exception is on one line with the format tvdb_id: 'show name 1', 'show name 2', etc
+        for cur_line in url_data.splitlines():
+            cur_line = cur_line.decode('utf-8')
+            tvdb_id, sep, aliases = cur_line.partition(':') #@UnusedVariable
+
+            if not aliases:
+                continue
+
+            tvdb_id = int(tvdb_id)
+
+            # regex out the list of shows, taking \' into account
+            alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
+
+            exception_dict[tvdb_id] = alias_list
+
+        myDB = db.DBConnection("cache.db")
+
+        changed_exceptions = False
+
+        # write all the exceptions we got off the net into the database
+        for cur_tvdb_id in exception_dict:
+
+            # get a list of the existing exceptions for this ID
+            existing_exceptions = [x["show_name"] for x in myDB.select("SELECT * FROM scene_exceptions WHERE tvdb_id = ?", [cur_tvdb_id])]
+
+            for cur_exception in exception_dict[cur_tvdb_id]:
+                # if this exception isn't already in the DB then add it
+                if cur_exception not in existing_exceptions:
+                    myDB.action("INSERT INTO scene_exceptions (tvdb_id, show_name) VALUES (?,?)", [cur_tvdb_id, cur_exception])
+                    changed_exceptions = True
+
+        # since this could invalidate the results of the cache we clear it out after updating
+        if changed_exceptions:
+            logger.log(u"Updated scene exceptions")
+            name_cache.clearCache()
+        else:
             logger.log(u"No scene exceptions update needed")
\ No newline at end of file
diff --git a/sickbeard/tv.py b/sickbeard/tv.py
index a6cb51cb0..cac6be7e3 100644
--- a/sickbeard/tv.py
+++ b/sickbeard/tv.py
@@ -1,1912 +1,1912 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-from __future__ import with_statement
-
-import os.path
-import datetime
-import threading
-import re
-import glob
-
-import sickbeard
-
-import xml.etree.cElementTree as etree
-
-from name_parser.parser import NameParser, InvalidNameException
-
-from lib import subliminal
-
-from lib.tvdb_api import tvdb_api, tvdb_exceptions
-
-from sickbeard import db
-from sickbeard import helpers, exceptions, logger
-from sickbeard.exceptions import ex
-from sickbeard import tvrage
-from sickbeard import image_cache
-from sickbeard import notifiers
-from sickbeard import postProcessor
-from sickbeard import subtitles
-from sickbeard import history
-
-from sickbeard import encodingKludge as ek
-
-from common import Quality, Overview
-from common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, ARCHIVED, IGNORED, UNAIRED, WANTED, SKIPPED, UNKNOWN
-from common import NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_SEPARATED_REPEAT, NAMING_LIMITED_EXTEND_E_PREFIXED
-
-class TVShow(object):
-
-    def __init__ (self, tvdbid, lang="", audio_lang=""):
-
-        self.tvdbid = tvdbid
-
-        self._location = ""
-        self.name = ""
-        self.tvrid = 0
-        self.tvrname = ""
-        self.network = ""
-        self.genre = ""
-        self.runtime = 0
-        self.quality = int(sickbeard.QUALITY_DEFAULT)
-        self.flatten_folders = int(sickbeard.FLATTEN_FOLDERS_DEFAULT)
-
-        self.status = ""
-        self.airs = ""
-        self.startyear = 0
-        self.paused = 0
-        self.air_by_date = 0
-        self.subtitles = int(sickbeard.SUBTITLES_DEFAULT)
-        self.lang = lang
-        self.audio_lang = audio_lang
-        self.custom_search_names = ""
-
-        self.lock = threading.Lock()
-        self._isDirGood = False
-
-        self.episodes = {}
-        
-        otherShow = helpers.findCertainShow(sickbeard.showList, self.tvdbid)
-        if otherShow != None:
-            raise exceptions.MultipleShowObjectsException("Can't create a show if it already exists")
-
-        self.loadFromDB()
-
-        self.saveToDB()
-
-    def _getLocation(self):
-        # no dir check needed if missing show dirs are created during post-processing
-        if sickbeard.CREATE_MISSING_SHOW_DIRS:
-            return self._location
-        
-        if ek.ek(os.path.isdir, self._location):
-            return self._location
-        else:
-            raise exceptions.ShowDirNotFoundException("Show folder doesn't exist, you shouldn't be using it")
-
-        if self._isDirGood:
-            return self._location
-        else:
-            raise exceptions.NoNFOException("Show folder doesn't exist, you shouldn't be using it")
-
-    def _setLocation(self, newLocation):
-        logger.log(u"Setter sets location to " + newLocation, logger.DEBUG)
-        # Don't validate dir if user wants to add shows without creating a dir
-        if sickbeard.ADD_SHOWS_WO_DIR or ek.ek(os.path.isdir, newLocation):
-            self._location = newLocation
-            self._isDirGood = True
-        else:
-            raise exceptions.NoNFOException("Invalid folder for the show!")
-
-    location = property(_getLocation, _setLocation)
-
-    # delete references to anything that's not in the internal lists
-    def flushEpisodes(self):
-
-        for curSeason in self.episodes:
-            for curEp in self.episodes[curSeason]:
-                myEp = self.episodes[curSeason][curEp]
-                self.episodes[curSeason][curEp] = None
-                del myEp
-
-    def getAllEpisodes(self, season=None, has_location=False):
-
-        myDB = db.DBConnection()
-
-        sql_selection = "SELECT season, episode, "
-
-        # subselection to detect multi-episodes early, share_location > 0
-        sql_selection = sql_selection + " (SELECT COUNT (*) FROM tv_episodes WHERE showid = tve.showid AND season = tve.season AND location != '' AND location = tve.location AND episode != tve.episode) AS share_location "
-
-        sql_selection = sql_selection + " FROM tv_episodes tve WHERE showid = " + str(self.tvdbid)
-
-        if season is not None:
-            sql_selection = sql_selection + " AND season = " + str(season)
-        if has_location:
-            sql_selection = sql_selection + " AND location != '' "
-
-        # need ORDER episode ASC to rename multi-episodes in order S01E01-02
-        sql_selection = sql_selection + " ORDER BY season ASC, episode ASC"
-
-        results = myDB.select(sql_selection)
-
-        ep_list = []
-        for cur_result in results:
-            cur_ep = self.getEpisode(int(cur_result["season"]), int(cur_result["episode"]))
-            if cur_ep:
-                if cur_ep.location:
-                    # if there is a location, check if it's a multi-episode (share_location > 0) and put them in relatedEps
-                    if cur_result["share_location"] > 0:
-                        related_eps_result = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND location = ? AND episode != ? ORDER BY episode ASC", [self.tvdbid, cur_ep.season, cur_ep.location, cur_ep.episode])
-                        for cur_related_ep in related_eps_result:
-                            related_ep = self.getEpisode(int(cur_related_ep["season"]), int(cur_related_ep["episode"]))
-                            if related_ep not in cur_ep.relatedEps:
-                                cur_ep.relatedEps.append(related_ep)
-                ep_list.append(cur_ep)
-
-        return ep_list
-
-
-    def getEpisode(self, season, episode, file=None, noCreate=False):
-
-        #return TVEpisode(self, season, episode)
-
-        if not season in self.episodes:
-            self.episodes[season] = {}
-
-        ep = None
-
-        if not episode in self.episodes[season] or self.episodes[season][episode] == None:
-            if noCreate:
-                return None
-
-            logger.log(str(self.tvdbid) + ": An object for episode " + str(season) + "x" + str(episode) + " didn't exist in the cache, trying to create it", logger.DEBUG)
-
-            if file != None:
-                ep = TVEpisode(self, season, episode, file)
-            else:
-                ep = TVEpisode(self, season, episode)
-
-            if ep != None:
-                self.episodes[season][episode] = ep
-
-        return self.episodes[season][episode]
-
-    def writeShowNFO(self):
-
-        result = False
-
-        if not ek.ek(os.path.isdir, self._location):
-            logger.log(str(self.tvdbid) + u": Show dir doesn't exist, skipping NFO generation")
-            return False
-
-        for cur_provider in sickbeard.metadata_provider_dict.values():
-            result = cur_provider.create_show_metadata(self) or result
-
-        return result
-
-    def writeMetadata(self, show_only=False):
-
-        if not ek.ek(os.path.isdir, self._location):
-            logger.log(str(self.tvdbid) + u": Show dir doesn't exist, skipping NFO generation")
-            return
-
-        self.getImages()
-
-        self.writeShowNFO()
-        
-        if not show_only:
-            self.writeEpisodeNFOs()
-
-    def writeEpisodeNFOs (self):
-
-        if not ek.ek(os.path.isdir, self._location):
-            logger.log(str(self.tvdbid) + ": Show dir doesn't exist, skipping NFO generation")
-            return
-
-        logger.log(str(self.tvdbid) + ": Writing NFOs for all episodes")
-
-        myDB = db.DBConnection()
-        sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.tvdbid])
-
-        for epResult in sqlResults:
-            logger.log(str(self.tvdbid) + ": Retrieving/creating episode " + str(epResult["season"]) + "x" + str(epResult["episode"]), logger.DEBUG)
-            curEp = self.getEpisode(epResult["season"], epResult["episode"])
-            curEp.createMetaFiles()
-
-
-    # find all media files in the show folder and create episodes for as many as possible
-    def loadEpisodesFromDir (self):
-
-        if not ek.ek(os.path.isdir, self._location):
-            logger.log(str(self.tvdbid) + ": Show dir doesn't exist, not loading episodes from disk")
-            return
-
-        logger.log(str(self.tvdbid) + ": Loading all episodes from the show directory " + self._location)
-
-        # get file list
-        mediaFiles = helpers.listMediaFiles(self._location)
-
-        # create TVEpisodes from each media file (if possible)
-        for mediaFile in mediaFiles:
-
-            curEpisode = None
-
-            logger.log(str(self.tvdbid) + ": Creating episode from " + mediaFile, logger.DEBUG)
-            try:
-                curEpisode = self.makeEpFromFile(ek.ek(os.path.join, self._location, mediaFile))
-            except (exceptions.ShowNotFoundException, exceptions.EpisodeNotFoundException), e:
-                logger.log(u"Episode "+mediaFile+" returned an exception: "+ex(e), logger.ERROR)
-                continue
-            except exceptions.EpisodeDeletedException:
-                logger.log(u"The episode deleted itself when I tried making an object for it", logger.DEBUG)
-
-            if curEpisode is None:
-                continue
-
-            # see if we should save the release name in the db
-            ep_file_name = ek.ek(os.path.basename, curEpisode.location)
-            ep_file_name = ek.ek(os.path.splitext, ep_file_name)[0]
-            
-            parse_result = None
-            try:
-                np = NameParser(False)
-                parse_result = np.parse(ep_file_name)
-            except InvalidNameException:
-                pass
-        
-            if not ' ' in ep_file_name and parse_result and parse_result.release_group:
-                logger.log(u"Name " + ep_file_name + " gave release group of " + parse_result.release_group + ", seems valid", logger.DEBUG)
-                curEpisode.release_name = ep_file_name
-
-            # store the reference in the show
-            if curEpisode != None:
-                if self.subtitles:
-                    try:
-                        curEpisode.refreshSubtitles()
-                    except:
-                        logger.log(str(self.tvdbid) + ": Could not refresh subtitles", logger.ERROR)
-                        logger.log(traceback.format_exc(), logger.DEBUG)
-                curEpisode.saveToDB()
-
-
-    def loadEpisodesFromDB(self):
-
-        logger.log(u"Loading all episodes from the DB")
-
-        myDB = db.DBConnection()
-        sql = "SELECT * FROM tv_episodes WHERE showid = ?"
-        sqlResults = myDB.select(sql, [self.tvdbid])
-
-        scannedEps = {}
-
-        ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
-
-        if self.lang:
-            ltvdb_api_parms['language'] = self.lang
-
-        t = tvdb_api.Tvdb(**ltvdb_api_parms)
-
-        cachedShow = t[self.tvdbid]
-        cachedSeasons = {}
-
-        for curResult in sqlResults:
-
-            deleteEp = False
-                    
-            curSeason = int(curResult["season"])
-            curEpisode = int(curResult["episode"])
-            if curSeason not in cachedSeasons:
-                try:
-                    cachedSeasons[curSeason] = cachedShow[curSeason]
-                except tvdb_exceptions.tvdb_seasonnotfound, e:
-                    logger.log(u"Error when trying to load the episode from TVDB: "+e.message, logger.WARNING)
-                    deleteEp = True
-
-            if not curSeason in scannedEps:
-                scannedEps[curSeason] = {}
-
-            logger.log(u"Loading episode "+str(curSeason)+"x"+str(curEpisode)+" from the DB", logger.DEBUG)
-
-            try:
-                curEp = self.getEpisode(curSeason, curEpisode)
-                
-                # if we found out that the ep is no longer on TVDB then delete it from our database too
-                if deleteEp:
-                    curEp.deleteEpisode()
-                
-                curEp.loadFromDB(curSeason, curEpisode)
-                curEp.loadFromTVDB(tvapi=t, cachedSeason=cachedSeasons[curSeason])
-                scannedEps[curSeason][curEpisode] = True
-            except exceptions.EpisodeDeletedException:
-                logger.log(u"Tried loading an episode from the DB that should have been deleted, skipping it", logger.DEBUG)
-                continue
-
-        return scannedEps
-
-
-    def loadEpisodesFromTVDB(self, cache=True):
-
-        # There's gotta be a better way of doing this but we don't wanna
-        # change the cache value elsewhere
-        ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
-
-        if not cache:
-            ltvdb_api_parms['cache'] = False
-
-        if self.lang:
-            ltvdb_api_parms['language'] = self.lang
-
-        try:
-            t = tvdb_api.Tvdb(**ltvdb_api_parms)
-            showObj = t[self.tvdbid]
-        except tvdb_exceptions.tvdb_error:
-            logger.log(u"TVDB timed out, unable to update episodes from TVDB", logger.ERROR)
-            return None
-
-        logger.log(str(self.tvdbid) + ": Loading all episodes from theTVDB...")
-
-        scannedEps = {}
-
-        for season in showObj:
-            scannedEps[season] = {}
-            for episode in showObj[season]:
-                # need some examples of wtf episode 0 means to decide if we want it or not
-                if episode == 0:
-                    continue
-                try:
-                    #ep = TVEpisode(self, season, episode)
-                    ep = self.getEpisode(season, episode)
-                except exceptions.EpisodeNotFoundException:
-                    logger.log(str(self.tvdbid) + ": TVDB object for " + str(season) + "x" + str(episode) + " is incomplete, skipping this episode")
-                    continue
-                else:
-                    try:
-                        ep.loadFromTVDB(tvapi=t)
-                    except exceptions.EpisodeDeletedException:
-                        logger.log(u"The episode was deleted, skipping the rest of the load")
-                        continue
-
-                with ep.lock:
-                    logger.log(str(self.tvdbid) + ": Loading info from theTVDB for episode " + str(season) + "x" + str(episode), logger.DEBUG)
-                    ep.loadFromTVDB(season, episode, tvapi=t)
-                    if ep.dirty:
-                        ep.saveToDB()
-
-                scannedEps[season][episode] = True
-
-        return scannedEps
-
-    def setTVRID(self, force=False):
-
-        if self.tvrid != 0 and not force:
-            logger.log(u"No need to get the TVRage ID, it's already populated", logger.DEBUG)
-            return
-
-        logger.log(u"Attempting to retrieve the TVRage ID", logger.DEBUG)
-
-        try:
-            # load the tvrage object, it will set the ID in its constructor if possible
-            tvrage.TVRage(self)
-            self.saveToDB()
-        except exceptions.TVRageException, e:
-            logger.log(u"Couldn't get TVRage ID because we're unable to sync TVDB and TVRage: "+ex(e), logger.DEBUG)
-            return
-
-    def getImages(self, fanart=None, poster=None):
-
-        poster_result = fanart_result = season_thumb_result = False
-
-        for cur_provider in sickbeard.metadata_provider_dict.values():
-            logger.log("Running season folders for "+cur_provider.name, logger.DEBUG)
-            poster_result = cur_provider.create_poster(self) or poster_result
-            fanart_result = cur_provider.create_fanart(self) or fanart_result
-            season_thumb_result = cur_provider.create_season_thumbs(self) or season_thumb_result
-
-        return poster_result or fanart_result or season_thumb_result
-
-    def loadLatestFromTVRage(self):
-
-        try:
-            # load the tvrage object
-            tvr = tvrage.TVRage(self)
-
-            newEp = tvr.findLatestEp()
-
-            if newEp != None:
-                logger.log(u"TVRage gave us an episode object - saving it for now", logger.DEBUG)
-                newEp.saveToDB()
-
-            # make an episode out of it
-        except exceptions.TVRageException, e:
-            logger.log(u"Unable to add TVRage info: " + ex(e), logger.WARNING)
-
-
-
-    # make a TVEpisode object from a media file
-    def makeEpFromFile(self, file):
-
-        if not ek.ek(os.path.isfile, file):
-            logger.log(str(self.tvdbid) + ": That isn't even a real file dude... " + file)
-            return None
-
-        logger.log(str(self.tvdbid) + ": Creating episode object from " + file, logger.DEBUG)
-
-        try:
-            myParser = NameParser()
-            parse_result = myParser.parse(file)
-        except InvalidNameException:
-            logger.log(u"Unable to parse the filename "+file+" into a valid episode", logger.ERROR)
-            return None
-
-        if len(parse_result.episode_numbers) == 0 and not parse_result.air_by_date:
-            logger.log("parse_result: "+str(parse_result))
-            logger.log(u"No episode number found in "+file+", ignoring it", logger.ERROR)
-            return None
-
-        # for now lets assume that any episode in the show dir belongs to that show
-        season = parse_result.season_number if parse_result.season_number != None else 1
-        episodes = parse_result.episode_numbers
-        rootEp = None
-
-        # if we have an air-by-date show then get the real season/episode numbers
-        if parse_result.air_by_date:
-            try:
-                # There's gotta be a better way of doing this but we don't wanna
-                # change the cache value elsewhere
-                ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
-
-                if self.lang:
-                    ltvdb_api_parms['language'] = self.lang
-
-                t = tvdb_api.Tvdb(**ltvdb_api_parms)
-
-                epObj = t[self.tvdbid].airedOn(parse_result.air_date)[0]
-                season = int(epObj["seasonnumber"])
-                episodes = [int(epObj["episodenumber"])]
-            except tvdb_exceptions.tvdb_episodenotfound:
-                logger.log(u"Unable to find episode with date " + str(parse_result.air_date) + " for show " + self.name + ", skipping", logger.WARNING)
-                return None
-            except tvdb_exceptions.tvdb_error, e:
-                logger.log(u"Unable to contact TVDB: "+ex(e), logger.WARNING)
-                return None
-
-        for curEpNum in episodes:
-
-            episode = int(curEpNum)
-
-            logger.log(str(self.tvdbid) + ": " + file + " parsed to " + self.name + " " + str(season) + "x" + str(episode), logger.DEBUG)
-
-            checkQualityAgain = False
-            same_file = False
-            curEp = self.getEpisode(season, episode)
-
-            if curEp == None:
-                try:
-                    curEp = self.getEpisode(season, episode, file)
-                except exceptions.EpisodeNotFoundException:
-                    logger.log(str(self.tvdbid) + ": Unable to figure out what this file is, skipping", logger.ERROR)
-                    continue
-
-            else:
-                # if there is a new file associated with this ep then re-check the quality
-                if curEp.location and ek.ek(os.path.normpath, curEp.location) != ek.ek(os.path.normpath, file):
-                    logger.log(u"The old episode had a different file associated with it, I will re-check the quality based on the new filename "+file, logger.DEBUG)
-                    checkQualityAgain = True
-
-                with curEp.lock:
-                    old_size = curEp.file_size
-                    curEp.location = file
-                    # if the sizes are the same then it's probably the same file
-                    if old_size and curEp.file_size == old_size:
-                        same_file = True
-                    else:
-                        same_file = False
-
-                    curEp.checkForMetaFiles()
-                
-
-            if rootEp == None:
-                rootEp = curEp
-            else:
-                if curEp not in rootEp.relatedEps:
-                    rootEp.relatedEps.append(curEp)
-
-            # if it's a new file then 
-            if not same_file:
-                curEp.release_name = ''
-
-            # if they replace a file on me I'll make some attempt at re-checking the quality unless I know it's the same file
-            if checkQualityAgain and not same_file:
-                newQuality = Quality.nameQuality(file)
-                logger.log(u"Since this file has been renamed, I checked "+file+" and found quality "+Quality.qualityStrings[newQuality], logger.DEBUG)
-                if newQuality != Quality.UNKNOWN:
-                    curEp.status = Quality.compositeStatus(DOWNLOADED, newQuality)
-
-
-            # check for status/quality changes as long as it's a new file
-            elif not same_file and sickbeard.helpers.isMediaFile(file) and curEp.status not in Quality.DOWNLOADED + [ARCHIVED, IGNORED]:
-
-                oldStatus, oldQuality = Quality.splitCompositeStatus(curEp.status)
-                newQuality = Quality.nameQuality(file)
-                if newQuality == Quality.UNKNOWN:
-                    newQuality = Quality.assumeQuality(file)
-
-                newStatus = None
-
-                # if it was snatched and now exists then set the status correctly
-                if oldStatus == SNATCHED and oldQuality <= newQuality:
-                    logger.log(u"STATUS: this ep used to be snatched with quality "+Quality.qualityStrings[oldQuality]+" but a file exists with quality "+Quality.qualityStrings[newQuality]+" so I'm setting the status to DOWNLOADED", logger.DEBUG)
-                    newStatus = DOWNLOADED
-
-                # if it was snatched proper and we found a higher quality one then allow the status change
-                elif oldStatus == SNATCHED_PROPER and oldQuality < newQuality:
-                    logger.log(u"STATUS: this ep used to be snatched proper with quality "+Quality.qualityStrings[oldQuality]+" but a file exists with quality "+Quality.qualityStrings[newQuality]+" so I'm setting the status to DOWNLOADED", logger.DEBUG)
-                    newStatus = DOWNLOADED
-
-                elif oldStatus not in (SNATCHED, SNATCHED_PROPER):
-                    newStatus = DOWNLOADED
-
-                if newStatus != None:
-                    with curEp.lock:
-                        logger.log(u"STATUS: we have an associated file, so setting the status from "+str(curEp.status)+" to DOWNLOADED/" + str(Quality.statusFromName(file)), logger.DEBUG)
-                        curEp.status = Quality.compositeStatus(newStatus, newQuality)
-
-            with curEp.lock:
-                curEp.saveToDB()
-
-        # creating metafiles on the root should be good enough
-        if rootEp != None:
-            with rootEp.lock:
-                rootEp.createMetaFiles()
-
-        return rootEp
-
-
-    def loadFromDB(self, skipNFO=False):
-
-        logger.log(str(self.tvdbid) + ": Loading show info from database")
-
-        myDB = db.DBConnection()
-
-        sqlResults = myDB.select("SELECT * FROM tv_shows WHERE tvdb_id = ?", [self.tvdbid])
-
-        if len(sqlResults) > 1:
-            raise exceptions.MultipleDBShowsException()
-        elif len(sqlResults) == 0:
-            logger.log(str(self.tvdbid) + ": Unable to find the show in the database")
-            return
-        else:
-            if self.name == "":
-                self.name = sqlResults[0]["show_name"]
-            self.tvrname = sqlResults[0]["tvr_name"]
-            if self.network == "":
-                self.network = sqlResults[0]["network"]
-            if self.genre == "":
-                self.genre = sqlResults[0]["genre"]
-
-            self.runtime = sqlResults[0]["runtime"]
-
-            self.status = sqlResults[0]["status"]
-            if self.status == None:
-                self.status = ""
-            self.airs = sqlResults[0]["airs"]
-            if self.airs == None:
-                self.airs = ""
-            self.startyear = sqlResults[0]["startyear"]
-            if self.startyear == None:
-                self.startyear = 0
-
-            self.air_by_date = sqlResults[0]["air_by_date"]
-            if self.air_by_date == None:
-                self.air_by_date = 0
-            
-            self.subtitles = sqlResults[0]["subtitles"]
-            if self.subtitles:
-                self.subtitles = 1
-            else:
-                self.subtitles = 0    
-
-            self.quality = int(sqlResults[0]["quality"])
-            self.flatten_folders = int(sqlResults[0]["flatten_folders"])
-            self.paused = int(sqlResults[0]["paused"])
-
-            self._location = sqlResults[0]["location"]
-
-            if self.tvrid == 0:
-                self.tvrid = int(sqlResults[0]["tvr_id"])
-
-            if self.lang == "":
-                self.lang = sqlResults[0]["lang"]
-                
-            if self.audio_lang == "":
-                self.audio_lang = sqlResults[0]["audio_lang"]                
-
-            if self.custom_search_names == "":
-                self.custom_search_names = sqlResults[0]["custom_search_names"]                
-
-    def loadFromTVDB(self, cache=True, tvapi=None, cachedSeason=None):
-
-        logger.log(str(self.tvdbid) + ": Loading show info from theTVDB")
-
-        # There's gotta be a better way of doing this but we don't wanna
-        # change the cache value elsewhere
-        if tvapi is None:
-            ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
-
-            if not cache:
-                ltvdb_api_parms['cache'] = False
-            
-            if self.lang:
-                ltvdb_api_parms['language'] = self.lang
-
-            t = tvdb_api.Tvdb(**ltvdb_api_parms)
-
-        else:
-            t = tvapi
-
-        myEp = t[self.tvdbid]
-
-        self.name = myEp["seriesname"]
-
-        self.genre = myEp['genre']
-        self.network = myEp['network']
-
-        if myEp["airs_dayofweek"] != None and myEp["airs_time"] != None:
-            self.airs = myEp["airs_dayofweek"] + " " + myEp["airs_time"]
-
-        if myEp["firstaired"] != None and myEp["firstaired"]:
-            self.startyear = int(myEp["firstaired"].split('-')[0])
-
-        if self.airs == None:
-            self.airs = ""
-
-        if myEp["status"] != None:
-            self.status = myEp["status"]
-
-        if self.status == None:
-            self.status = ""
-
-        self.saveToDB()
-
-
-    def loadNFO (self):
-
-        if not os.path.isdir(self._location):
-            logger.log(str(self.tvdbid) + ": Show dir doesn't exist, can't load NFO")
-            raise exceptions.NoNFOException("The show dir doesn't exist, no NFO could be loaded")
-
-        logger.log(str(self.tvdbid) + ": Loading show info from NFO")
-
-        xmlFile = os.path.join(self._location, "tvshow.nfo")
-
-        try:
-            xmlFileObj = open(xmlFile, 'r')
-            showXML = etree.ElementTree(file = xmlFileObj)
-
-            if showXML.findtext('title') == None or (showXML.findtext('tvdbid') == None and showXML.findtext('id') == None):
-                raise exceptions.NoNFOException("Invalid info in tvshow.nfo (missing name or id):" \
-                    + str(showXML.findtext('title')) + " " \
-                    + str(showXML.findtext('tvdbid')) + " " \
-                    + str(showXML.findtext('id')))
-
-            self.name = showXML.findtext('title')
-            if showXML.findtext('tvdbid') != None:
-                self.tvdbid = int(showXML.findtext('tvdbid'))
-            elif showXML.findtext('id'):
-                self.tvdbid = int(showXML.findtext('id'))
-            else:
-                raise exceptions.NoNFOException("Empty <id> or <tvdbid> field in NFO")
-
-        except (exceptions.NoNFOException, SyntaxError, ValueError), e:
-            logger.log(u"There was an error parsing your existing tvshow.nfo file: " + ex(e), logger.ERROR)
-            logger.log(u"Attempting to rename it to tvshow.nfo.old", logger.DEBUG)
-
-            try:
-                xmlFileObj.close()
-                ek.ek(os.rename, xmlFile, xmlFile + ".old")
-            except Exception, e:
-                logger.log(u"Failed to rename your tvshow.nfo file - you need to delete it or fix it: " + ex(e), logger.ERROR)
-            raise exceptions.NoNFOException("Invalid info in tvshow.nfo")
-
-        if showXML.findtext('studio') != None:
-            self.network = showXML.findtext('studio')
-        if self.network == None and showXML.findtext('network') != None:
-            self.network = ""
-        if showXML.findtext('genre') != None:
-            self.genre = showXML.findtext('genre')
-        else:
-            self.genre = ""
-
-        # TODO: need to validate the input, I'm assuming it's good until then
-
-
-    def nextEpisode(self):
-
-        logger.log(str(self.tvdbid) + ": Finding the episode which airs next", logger.DEBUG)
-
-        myDB = db.DBConnection()
-        innerQuery = "SELECT airdate FROM tv_episodes WHERE showid = ? AND airdate >= ? AND status = ? ORDER BY airdate ASC LIMIT 1"
-        innerParams = [self.tvdbid, datetime.date.today().toordinal(), UNAIRED]
-        query = "SELECT * FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= (" + innerQuery + ") and status = ?"
-        params = [self.tvdbid, datetime.date.today().toordinal()] + innerParams + [UNAIRED]
-        sqlResults = myDB.select(query, params)
-
-        if sqlResults == None or len(sqlResults) == 0:
-            logger.log(str(self.tvdbid) + ": No episode found... need to implement tvrage and also show status", logger.DEBUG)
-            return []
-        else:
-            logger.log(str(self.tvdbid) + ": Found episode " + str(sqlResults[0]["season"]) + "x" + str(sqlResults[0]["episode"]), logger.DEBUG)
-            foundEps = []
-            for sqlEp in sqlResults:
-                curEp = self.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"]))
-                foundEps.append(curEp)
-            return foundEps
-
-        # if we didn't get an episode then try getting one from tvrage
-
-        # load tvrage info
-
-        # extract NextEpisode info
-
-        # verify that we don't have it in the DB somehow (ep mismatch)
-
-
-    def deleteShow(self):
-
-        myDB = db.DBConnection()
-        myDB.action("DELETE FROM tv_episodes WHERE showid = ?", [self.tvdbid])
-        myDB.action("DELETE FROM tv_shows WHERE tvdb_id = ?", [self.tvdbid])
-
-        # remove self from show list
-        sickbeard.showList = [x for x in sickbeard.showList if x.tvdbid != self.tvdbid]
-        
-        # clear the cache
-        image_cache_dir = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images')
-        for cache_file in ek.ek(glob.glob, ek.ek(os.path.join, image_cache_dir, str(self.tvdbid)+'.*')):
-            logger.log(u"Deleting cache file "+cache_file)
-            os.remove(cache_file)
-
-    def populateCache(self):
-        cache_inst = image_cache.ImageCache()
-        
-        logger.log(u"Checking & filling cache for show "+self.name)
-        cache_inst.fill_cache(self)
-
-    def refreshDir(self):
-
-        # make sure the show dir is where we think it is unless dirs are created on the fly
-        if not ek.ek(os.path.isdir, self._location) and not sickbeard.CREATE_MISSING_SHOW_DIRS:
-            return False
-
-        # load from dir
-        self.loadEpisodesFromDir()
-
-        # run through all locations from DB, check that they exist
-        logger.log(str(self.tvdbid) + ": Loading all episodes with a location from the database")
-
-        myDB = db.DBConnection()
-        sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.tvdbid])
-
-        for ep in sqlResults:
-            curLoc = os.path.normpath(ep["location"])
-            season = int(ep["season"])
-            episode = int(ep["episode"])
-
-            try:
-                curEp = self.getEpisode(season, episode)
-            except exceptions.EpisodeDeletedException:
-                logger.log(u"The episode was deleted while we were refreshing it, moving on to the next one", logger.DEBUG)
-                continue
-
-            # if the path doesn't exist or if it's not in our show dir
-            if not ek.ek(os.path.isfile, curLoc) or not os.path.normpath(curLoc).startswith(os.path.normpath(self.location)):
-
-                with curEp.lock:
-                    # if it used to have a file associated with it and it doesn't anymore then set it to IGNORED
-                    if curEp.location and curEp.status in Quality.DOWNLOADED:
-                        logger.log(str(self.tvdbid) + ": Location for " + str(season) + "x" + str(episode) + " doesn't exist, removing it and changing our status to IGNORED", logger.DEBUG)
-                        curEp.status = IGNORED
-                        curEp.subtitles = list()
-                        curEp.subtitles_searchcount = 0
-                        curEp.subtitles_lastsearch = str(datetime.datetime.min)
-                    curEp.location = ''
-                    curEp.hasnfo = False
-                    curEp.hastbn = False
-                    curEp.release_name = ''
-                    curEp.saveToDB()
-
-
-    def downloadSubtitles(self):
-        #TODO: Add support for force option
-        if not ek.ek(os.path.isdir, self._location):
-            logger.log(str(self.tvdbid) + ": Show dir doesn't exist, can't download subtitles", logger.DEBUG)
-            return
-        logger.log(str(self.tvdbid) + ": Downloading subtitles", logger.DEBUG)
-        
-        try:
-            episodes = db.DBConnection().select("SELECT location FROM tv_episodes WHERE showid = ? AND location NOT LIKE '' ORDER BY season DESC, episode DESC", [self.tvdbid])
-            for episodeLoc in episodes:
-                episode = self.makeEpFromFile(episodeLoc['location']);
-                subtitles = episode.downloadSubtitles()
-                
-        except Exception as e:
-            logger.log("Error occurred when downloading subtitles: " + str(e), logger.DEBUG)
-            return
-
-
-    def saveToDB(self):
-        logger.log(str(self.tvdbid) + ": Saving show info to database", logger.DEBUG)
-
-        myDB = db.DBConnection()
-
-        controlValueDict = {"tvdb_id": self.tvdbid}
-        newValueDict = {"show_name": self.name,
-                        "tvr_id": self.tvrid,
-                        "location": self._location,
-                        "network": self.network,
-                        "genre": self.genre,
-                        "runtime": self.runtime,
-                        "quality": self.quality,
-                        "airs": self.airs,
-                        "status": self.status,
-                        "flatten_folders": self.flatten_folders,
-                        "paused": self.paused,
-                        "air_by_date": self.air_by_date,
-                        "subtitles": self.subtitles,
-                        "startyear": self.startyear,
-                        "tvr_name": self.tvrname,
-                        "lang": self.lang,
-                        "audio_lang": self.audio_lang,
-                        "custom_search_names": self.custom_search_names
-                        }
-
-        myDB.upsert("tv_shows", newValueDict, controlValueDict)
-
-
-    def __str__(self):
-        toReturn = ""
-        toReturn += "name: " + self.name + "\n"
-        toReturn += "location: " + self._location + "\n"
-        toReturn += "tvdbid: " + str(self.tvdbid) + "\n"
-        if self.network != None:
-            toReturn += "network: " + self.network + "\n"
-        if self.airs != None:
-            toReturn += "airs: " + self.airs + "\n"
-        if self.status != None:
-            toReturn += "status: " + self.status + "\n"
-        toReturn += "startyear: " + str(self.startyear) + "\n"
-        toReturn += "genre: " + self.genre + "\n"
-        toReturn += "runtime: " + str(self.runtime) + "\n"
-        toReturn += "quality: " + str(self.quality) + "\n"
-        return toReturn
-
-
-    def wantEpisode(self, season, episode, quality, manualSearch=False):
-
-        logger.log(u"Checking if we want episode "+str(season)+"x"+str(episode)+" at quality "+Quality.qualityStrings[quality], logger.DEBUG)
-
-        # if the quality isn't one we want under any circumstances then just say no
-        anyQualities, bestQualities = Quality.splitQuality(self.quality)
-        logger.log(u"any,best = "+str(anyQualities)+" "+str(bestQualities)+" and we are "+str(quality), logger.DEBUG)
-
-        if quality not in anyQualities + bestQualities:
-            logger.log(u"I know for sure I don't want this episode, saying no", logger.DEBUG)
-            return False
-
-        myDB = db.DBConnection()
-        sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [self.tvdbid, season, episode])
-
-        if not sqlResults or not len(sqlResults):
-            logger.log(u"Unable to find the episode", logger.DEBUG)
-            return False
-
-        epStatus = int(sqlResults[0]["status"])
-
-        logger.log(u"current episode status: "+str(epStatus), logger.DEBUG)
-
-        # if we know we don't want it then just say no
-        if epStatus in (SKIPPED, IGNORED, ARCHIVED) and not manualSearch:
-            logger.log(u"Ep is skipped, not bothering", logger.DEBUG)
-            return False
-
-        # if it's one of these then we want it as long as it's in our allowed initial qualities
-        if quality in anyQualities + bestQualities:
-            if epStatus in (WANTED, UNAIRED, SKIPPED):
-                logger.log(u"Ep is wanted/unaired/skipped, definitely get it", logger.DEBUG)
-                return True
-            elif manualSearch:
-                logger.log(u"Usually I would ignore this ep but because you forced the search I'm overriding the default and allowing the quality", logger.DEBUG)
-                return True
-            else:
-                logger.log(u"This quality looks like something we might want but I don't know for sure yet", logger.DEBUG)
-
-        curStatus, curQuality = Quality.splitCompositeStatus(epStatus)
-
-        # if we are re-downloading then we only want it if it's in our bestQualities list and better than what we have
-        if curStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER and quality in bestQualities and quality > curQuality:
-            logger.log(u"We already have this ep but the new one is better quality, saying yes", logger.DEBUG)
-            return True
-
-        logger.log(u"None of the conditions were met so I'm just saying no", logger.DEBUG)
-        return False
-
-
-    def getOverview(self, epStatus):
-
-        if epStatus == WANTED:
-            return Overview.WANTED
-        elif epStatus in (UNAIRED, UNKNOWN):
-            return Overview.UNAIRED
-        elif epStatus in (SKIPPED, IGNORED):
-            return Overview.SKIPPED
-        elif epStatus == ARCHIVED:
-            return Overview.GOOD
-        elif epStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER:
-
-            anyQualities, bestQualities = Quality.splitQuality(self.quality) #@UnusedVariable
-            if bestQualities:
-                maxBestQuality = max(bestQualities)
-            else:
-                maxBestQuality = None
-
-            epStatus, curQuality = Quality.splitCompositeStatus(epStatus)
-
-            if epStatus in (SNATCHED, SNATCHED_PROPER):
-                return Overview.SNATCHED
-            # if they don't want re-downloads then we call it good if they have anything
-            elif maxBestQuality == None:
-                return Overview.GOOD
-            # if they have one but it's not the best they want then mark it as qual
-            elif curQuality < maxBestQuality:
-                return Overview.QUAL
-            # if it's >= maxBestQuality then it's good
-            else:
-                return Overview.GOOD
-
-def dirty_setter(attr_name):
-    def wrapper(self, val):
-        if getattr(self, attr_name) != val:
-            setattr(self, attr_name, val)
-            self.dirty = True
-    return wrapper
-
-class TVEpisode(object):
-
-    def __init__(self, show, season, episode, file=""):
-
-        self._name = ""
-        self._season = season
-        self._episode = episode
-        self._description = ""
-        self._subtitles = list()
-        self._subtitles_searchcount = 0
-        self._subtitles_lastsearch = str(datetime.datetime.min)
-        self._airdate = datetime.date.fromordinal(1)
-        self._hasnfo = False
-        self._hastbn = False
-        self._status = UNKNOWN
-        self._tvdbid = 0
-        self._file_size = 0
-        self._audio_langs = ''
-        self._release_name = ''
-
-        # setting any of the above sets the dirty flag
-        self.dirty = True
-
-        self.show = show
-        self._location = file
-
-        self.lock = threading.Lock()
-
-        self.specifyEpisode(self.season, self.episode)
-
-        self.relatedEps = []
-
-        self.checkForMetaFiles()
-
-    name = property(lambda self: self._name, dirty_setter("_name"))
-    season = property(lambda self: self._season, dirty_setter("_season"))
-    episode = property(lambda self: self._episode, dirty_setter("_episode"))
-    description = property(lambda self: self._description, dirty_setter("_description"))
-    subtitles = property(lambda self: self._subtitles, dirty_setter("_subtitles"))
-    subtitles_searchcount = property(lambda self: self._subtitles_searchcount, dirty_setter("_subtitles_searchcount"))
-    subtitles_lastsearch = property(lambda self: self._subtitles_lastsearch, dirty_setter("_subtitles_lastsearch"))
-    airdate = property(lambda self: self._airdate, dirty_setter("_airdate"))
-    hasnfo = property(lambda self: self._hasnfo, dirty_setter("_hasnfo"))
-    hastbn = property(lambda self: self._hastbn, dirty_setter("_hastbn"))
-    status = property(lambda self: self._status, dirty_setter("_status"))
-    tvdbid = property(lambda self: self._tvdbid, dirty_setter("_tvdbid"))
-    #location = property(lambda self: self._location, dirty_setter("_location"))
-    file_size = property(lambda self: self._file_size, dirty_setter("_file_size"))
-    audio_langs = property(lambda self: self._audio_langs, dirty_setter("_audio_langs"))
-    release_name = property(lambda self: self._release_name, dirty_setter("_release_name"))
-
-    def _set_location(self, new_location):
-        logger.log(u"Setter sets location to " + new_location, logger.DEBUG)
-        
-        #self._location = newLocation
-        dirty_setter("_location")(self, new_location)
-
-        if new_location and ek.ek(os.path.isfile, new_location):
-            self.file_size = ek.ek(os.path.getsize, new_location)
-        else:
-            self.file_size = 0
-
-    location = property(lambda self: self._location, _set_location)
-    def refreshSubtitles(self):
-        """Look for subtitles files and refresh the subtitles property"""
-        self.subtitles = subtitles.subtitlesLanguages(self.location)
-
-    def downloadSubtitles(self):
-        #TODO: Add support for force option
-        if not ek.ek(os.path.isfile, self.location):
-            logger.log(str(self.show.tvdbid) + ": Episode file doesn't exist, can't download subtitles for episode " + str(self.season) + "x" + str(self.episode), logger.DEBUG)
-            return
-        logger.log(str(self.show.tvdbid) + ": Downloading subtitles for episode " + str(self.season) + "x" + str(self.episode), logger.DEBUG)
-        
-        previous_subtitles = self.subtitles
-
-        try:
-                                    
-            need_languages = set(sickbeard.SUBTITLES_LANGUAGES) - set(self.subtitles)
-            subtitles = subliminal.download_subtitles([self.location], languages=need_languages, services=sickbeard.subtitles.getEnabledServiceList(), force=False, multi=True, cache_dir=sickbeard.CACHE_DIR)
-            
-        except Exception as e:
-            logger.log("Error occurred when downloading subtitles: " + str(e), logger.DEBUG)
-            return
-
-        self.refreshSubtitles()
-        self.subtitles_searchcount = self.subtitles_searchcount + 1
-        self.subtitles_lastsearch = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
-        self.saveToDB()
-        
-        newsubtitles = set(self.subtitles).difference(set(previous_subtitles))
-        
-        if newsubtitles:
-            subtitleList = ", ".join(subliminal.language.Language(x).name for x in newsubtitles)
-            logger.log(str(self.show.tvdbid) + ": Downloaded " + subtitleList + " subtitles for episode " + str(self.season) + "x" + str(self.episode), logger.DEBUG)
-            
-            notifiers.notify_subtitle_download(self.prettyName(), subtitleList)
-
-        else:
-            logger.log(str(self.show.tvdbid) + ": No subtitles downloaded for episode " + str(self.season) + "x" + str(self.episode), logger.DEBUG)
-
-        if sickbeard.SUBTITLES_HISTORY:
-            for video in subtitles:
-                for subtitle in subtitles.get(video):
-                    history.logSubtitle(self.show.tvdbid, self.season, self.episode, self.status, subtitle)
-        if sickbeard.SUBTITLES_DIR:
-            for video in subtitles:
-                subs_new_path = ek.ek(os.path.join, os.path.dirname(video.path), sickbeard.SUBTITLES_DIR)
-                if not ek.ek(os.path.isdir, subs_new_path):
-                    ek.ek(os.mkdir, subs_new_path)
-                        
-                for subtitle in subtitles.get(video):
-                    new_file_path = ek.ek(os.path.join, subs_new_path, os.path.basename(subtitle.path))
-                    helpers.moveFile(subtitle.path, new_file_path)
-                    if sickbeard.SUBSNOLANG:
-                                helpers.copyFile(new_file_path,new_file_path[:-6]+"srt")
-                       
-        elif sickbeard.SUBTITLES_DIR_SUB:
-            for video in subtitles:
-                subs_new_path = os.path.join(os.path.dirname(video.path), "Subs")
-                if not os.path.isdir(subs_new_path):
-                    os.makedirs(subs_new_path)
-                        
-                for subtitle in subtitles.get(video):
-                    new_file_path = ek.ek(os.path.join, subs_new_path, os.path.basename(subtitle.path))
-                    helpers.moveFile(subtitle.path, new_file_path)
-                    subtitle.path=new_file_path
-                    if sickbeard.SUBSNOLANG:
-                                helpers.copyFile(new_file_path,new_file_path[:-6]+"srt")
-                                subtitle.path=new_file_path
-        else:
-                for video in subtitles:
-                    for subtitle in subtitles.get(video):
-                        if sickbeard.SUBSNOLANG:
-                            helpers.copyFile(subtitle.path,subtitle.path[:-6]+"srt")
-                            helpers.chmodAsParent(subtitle.path[:-6]+"srt")
-                        helpers.chmodAsParent(subtitle.path) 
-        return subtitles
-
-
-    def checkForMetaFiles(self):
-
-        oldhasnfo = self.hasnfo
-        oldhastbn = self.hastbn
-
-        cur_nfo = False
-        cur_tbn = False
-
-        # check for nfo and tbn
-        if ek.ek(os.path.isfile, self.location):
-            for cur_provider in sickbeard.metadata_provider_dict.values():
-                if cur_provider.episode_metadata:
-                    new_result = cur_provider._has_episode_metadata(self)
-                else:
-                    new_result = False
-                cur_nfo = new_result or cur_nfo
-                
-                if cur_provider.episode_thumbnails:
-                    new_result = cur_provider._has_episode_thumb(self)
-                else:
-                    new_result = False
-                cur_tbn = new_result or cur_tbn
-
-        self.hasnfo = cur_nfo
-        self.hastbn = cur_tbn
-
-        # if either setting has changed return true, if not return false
-        return oldhasnfo != self.hasnfo or oldhastbn != self.hastbn
-
-    def specifyEpisode(self, season, episode):
-
-        sqlResult = self.loadFromDB(season, episode)
-
-        if not sqlResult:
-            # only load from NFO if we didn't load from DB
-            if ek.ek(os.path.isfile, self.location):
-                try:
-                    self.loadFromNFO(self.location)
-                except exceptions.NoNFOException:
-                    logger.log(str(self.show.tvdbid) + ": There was an error loading the NFO for episode " + str(season) + "x" + str(episode), logger.ERROR)
-                    pass
-
-                # if we tried loading it from NFO and didn't find the NFO, use TVDB
-                if self.hasnfo == False:
-                    try:
-                        result = self.loadFromTVDB(season, episode)
-                    except exceptions.EpisodeDeletedException:
-                        result = False
-
-                    # if we failed SQL *and* NFO, TVDB then fail
-                    if result == False:
-                        raise exceptions.EpisodeNotFoundException("Couldn't find episode " + str(season) + "x" + str(episode))
-        
-        # don't update if not needed
-        if self.dirty:
-            self.saveToDB()
-
-    def loadFromDB(self, season, episode):
-
-        logger.log(str(self.show.tvdbid) + ": Loading episode details from DB for episode " + str(season) + "x" + str(episode), logger.DEBUG)
-
-        myDB = db.DBConnection()
-        sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [self.show.tvdbid, season, episode])
-
-        if len(sqlResults) > 1:
-            raise exceptions.MultipleDBEpisodesException("Your DB has two records for the same show somehow.")
-        elif len(sqlResults) == 0:
-            logger.log(str(self.show.tvdbid) + ": Episode " + str(self.season) + "x" + str(self.episode) + " not found in the database", logger.DEBUG)
-            return False
-        else:
-            #NAMEIT logger.log(u"AAAAA from" + str(self.season)+"x"+str(self.episode) + " -" + self.name + " to " + str(sqlResults[0]["name"]))
-            if sqlResults[0]["name"] != None:
-                self.name = sqlResults[0]["name"]
-            self.season = season
-            self.episode = episode
-            self.description = sqlResults[0]["description"]
-            if self.description == None:
-                self.description = ""
-            if sqlResults[0]["subtitles"] != None and sqlResults[0]["subtitles"] != '':
-                self.subtitles = sqlResults[0]["subtitles"].split(",")
-            self.subtitles_searchcount = sqlResults[0]["subtitles_searchcount"]
-            self.subtitles_lastsearch = sqlResults[0]["subtitles_lastsearch"]
-            self.airdate = datetime.date.fromordinal(int(sqlResults[0]["airdate"]))
-            #logger.log(u"1 Status changes from " + str(self.status) + " to " + str(sqlResults[0]["status"]), logger.DEBUG)
-            self.status = int(sqlResults[0]["status"])
-
-            # don't overwrite my location
-            if sqlResults[0]["location"] != "" and sqlResults[0]["location"] != None:
-                self.location = os.path.normpath(sqlResults[0]["location"])
-            if sqlResults[0]["file_size"]:
-                self.file_size = int(sqlResults[0]["file_size"])
-            else:
-                self.file_size = 0
-
-            self.tvdbid = int(sqlResults[0]["tvdbid"])
-
-            if sqlResults[0]["audio_langs"] != None:
-                self.audio_langs = sqlResults[0]["audio_langs"]
-            
-            if sqlResults[0]["release_name"] != None:
-                self.release_name = sqlResults[0]["release_name"]
-
-            self.dirty = False
-            return True
-
-    def loadFromTVDB(self, season=None, episode=None, cache=True, tvapi=None, cachedSeason=None):
-
-        if season == None:
-            season = self.season
-        if episode == None:
-            episode = self.episode
-
-        logger.log(str(self.show.tvdbid) + ": Loading episode details from theTVDB for episode " + str(season) + "x" + str(episode), logger.DEBUG)
-
-        tvdb_lang = self.show.lang
-
-        try:
-            if cachedSeason is None:
-                if tvapi is None:
-                    # There's gotta be a better way of doing this but we don't wanna
-                    # change the cache value elsewhere
-                    ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
-
-                    if not cache:
-                        ltvdb_api_parms['cache'] = False
-
-                    if tvdb_lang:
-                            ltvdb_api_parms['language'] = tvdb_lang
-
-                    t = tvdb_api.Tvdb(**ltvdb_api_parms)
-                else:
-                    t = tvapi
-                myEp = t[self.show.tvdbid][season][episode]
-            else:
-                myEp = cachedSeason[episode]
-
-        except (tvdb_exceptions.tvdb_error, IOError), e:
-            logger.log(u"TVDB threw up an error: "+ex(e), logger.DEBUG)
-            # if the episode is already valid just log it, if not throw it up
-            if self.name:
-                logger.log(u"TVDB timed out but we have enough info from other sources, allowing the error", logger.DEBUG)
-                return
-            else:
-                logger.log(u"TVDB timed out, unable to create the episode", logger.ERROR)
-                return False
-        except (tvdb_exceptions.tvdb_episodenotfound, tvdb_exceptions.tvdb_seasonnotfound):
-            logger.log(u"Unable to find the episode on tvdb... has it been removed? Should I delete from db?", logger.DEBUG)
-            # if I'm no longer on TVDB but I once was then delete myself from the DB
-            if self.tvdbid != -1:
-                self.deleteEpisode()
-            return
-
-
-        if not myEp["firstaired"] or myEp["firstaired"] == "0000-00-00":
-            myEp["firstaired"] = str(datetime.date.fromordinal(1))
-
-        if myEp["episodename"] == None or myEp["episodename"] == "":
-            logger.log(u"This episode ("+self.show.name+" - "+str(season)+"x"+str(episode)+") has no name on TVDB")
-            # if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now
-            if self.tvdbid != -1:
-                self.deleteEpisode()
-            return False
-
-        #NAMEIT logger.log(u"BBBBBBBB from " + str(self.season)+"x"+str(self.episode) + " -" +self.name+" to "+myEp["episodename"])
-        self.name = myEp["episodename"]
-        self.season = season
-        self.episode = episode
-        tmp_description = myEp["overview"]
-        if tmp_description == None:
-            self.description = ""
-        else:
-            self.description = tmp_description
-        rawAirdate = [int(x) for x in myEp["firstaired"].split("-")]
-        try:
-            self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2])
-        except ValueError:
-            logger.log(u"Malformed air date retrieved from TVDB ("+self.show.name+" - "+str(season)+"x"+str(episode)+")", logger.ERROR)
-            # if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now
-            if self.tvdbid != -1:
-                self.deleteEpisode()
-            return False
-        
-        #early conversion to int so that episode doesn't get marked dirty
-        self.tvdbid = int(myEp["id"])
-        
-        #don't update show status if show dir is missing, unless missing show dirs are created during post-processing
-        if not ek.ek(os.path.isdir, self.show._location) and not sickbeard.CREATE_MISSING_SHOW_DIRS:
-            logger.log(u"The show dir is missing, not bothering to change the episode statuses since it'd probably be invalid")
-            return
-
-        logger.log(str(self.show.tvdbid) + ": Setting status for " + str(season) + "x" + str(episode) + " based on status " + str(self.status) + " and existence of " + self.location, logger.DEBUG)
-
-        if not ek.ek(os.path.isfile, self.location):
-
-            # if we don't have the file
-            if self.airdate >= datetime.date.today() and self.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER:
-                # and it hasn't aired yet set the status to UNAIRED
-                logger.log(u"Episode airs in the future, changing status from " + str(self.status) + " to " + str(UNAIRED), logger.DEBUG)
-                self.status = UNAIRED
-            # if there's no airdate then set it to skipped (and respect ignored)
-            elif self.airdate == datetime.date.fromordinal(1):
-                if self.status == IGNORED:
-                    logger.log(u"Episode has no air date, but it's already marked as ignored", logger.DEBUG)
-                else:
-                    logger.log(u"Episode has no air date, automatically marking it skipped", logger.DEBUG)
-                    self.status = SKIPPED
-            # if we don't have the file and the airdate is in the past
-            else:
-                if self.status == UNAIRED:
-                    self.status = WANTED
-
-                # if we somehow are still UNKNOWN then just skip it
-                elif self.status == UNKNOWN:
-                    self.status = SKIPPED
-
-                else:
-                    logger.log(u"Not touching status because we have no ep file, the airdate is in the past, and the status is "+str(self.status), logger.DEBUG)
-
-        # if we have a media file then it's downloaded
-        elif sickbeard.helpers.isMediaFile(self.location):
-            # leave propers alone, you have to either post-process them or manually change them back
-            if self.status not in Quality.SNATCHED_PROPER + Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED]:
-                logger.log(u"5 Status changes from " + str(self.status) + " to " + str(Quality.statusFromName(self.location)), logger.DEBUG)
-                self.status = Quality.statusFromName(self.location)
-
-        # shouldn't get here probably
-        else:
-            logger.log(u"6 Status changes from " + str(self.status) + " to " + str(UNKNOWN), logger.DEBUG)
-            self.status = UNKNOWN
-
-
-        # hasnfo, hastbn, status?
-
-
-    def loadFromNFO(self, location):
-
-        if not os.path.isdir(self.show._location):
-            logger.log(str(self.show.tvdbid) + ": The show dir is missing, not bothering to try loading the episode NFO")
-            return
-
-        logger.log(str(self.show.tvdbid) + ": Loading episode details from the NFO file associated with " + location, logger.DEBUG)
-
-        self.location = location
-
-        if self.location != "":
-
-            if self.status == UNKNOWN:
-                if sickbeard.helpers.isMediaFile(self.location):
-                    logger.log(u"7 Status changes from " + str(self.status) + " to " + str(Quality.statusFromName(self.location)), logger.DEBUG)
-                    self.status = Quality.statusFromName(self.location)
-
-            nfoFile = sickbeard.helpers.replaceExtension(self.location, "nfo")
-            logger.log(str(self.show.tvdbid) + ": Using NFO name " + nfoFile, logger.DEBUG)
-
-            if ek.ek(os.path.isfile, nfoFile):
-                try:
-                    showXML = etree.ElementTree(file = nfoFile)
-                except (SyntaxError, ValueError), e:
-                    logger.log(u"Error loading the NFO, backing up the NFO and skipping for now: " + ex(e), logger.ERROR) #TODO: figure out what's wrong and fix it
-                    try:
-                        ek.ek(os.rename, nfoFile, nfoFile + ".old")
-                    except Exception, e:
-                        logger.log(u"Failed to rename your episode's NFO file - you need to delete it or fix it: " + ex(e), logger.ERROR)
-                    raise exceptions.NoNFOException("Error in NFO format")
-
-                for epDetails in showXML.getiterator('episodedetails'):
-                    if epDetails.findtext('season') == None or int(epDetails.findtext('season')) != self.season or \
-                       epDetails.findtext('episode') == None or int(epDetails.findtext('episode')) != self.episode:
-                        logger.log(str(self.show.tvdbid) + ": NFO has an <episodedetails> block for a different episode - wanted " + str(self.season) + "x" + str(self.episode) + " but got " + str(epDetails.findtext('season')) + "x" + str(epDetails.findtext('episode')), logger.DEBUG)
-                        continue
-
-                    if epDetails.findtext('title') == None or epDetails.findtext('aired') == None:
-                        raise exceptions.NoNFOException("Error in NFO format (missing episode title or airdate)")
-
-                    self.name = epDetails.findtext('title')
-                    self.episode = int(epDetails.findtext('episode'))
-                    self.season = int(epDetails.findtext('season'))
-
-                    self.description = epDetails.findtext('plot')
-                    if self.description == None:
-                        self.description = ""
-
-                    if epDetails.findtext('aired'):
-                        rawAirdate = [int(x) for x in epDetails.findtext('aired').split("-")]
-                        self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2])
-                    else:
-                        self.airdate = datetime.date.fromordinal(1)
-
-                    self.hasnfo = True
-            else:
-                self.hasnfo = False
-
-            if ek.ek(os.path.isfile, sickbeard.helpers.replaceExtension(nfoFile, "tbn")):
-                self.hastbn = True
-            else:
-                self.hastbn = False
-
-    def __str__ (self):
-
-        toReturn = ""
-        toReturn += str(self.show.name) + " - " + str(self.season) + "x" + str(self.episode) + " - " + str(self.name) + "\n"
-        toReturn += "location: " + str(self.location) + "\n"
-        toReturn += "description: " + str(self.description) + "\n"
-        toReturn += "subtitles: " + str(",".join(self.subtitles)) + "\n"
-        toReturn += "subtitles_searchcount: " + str(self.subtitles_searchcount) + "\n"
-        toReturn += "subtitles_lastsearch: " + str(self.subtitles_lastsearch) + "\n"
-        toReturn += "airdate: " + str(self.airdate.toordinal()) + " (" + str(self.airdate) + ")\n"
-        toReturn += "hasnfo: " + str(self.hasnfo) + "\n"
-        toReturn += "hastbn: " + str(self.hastbn) + "\n"
-        toReturn += "status: " + str(self.status) + "\n"
-        toReturn += "languages: " + str(self.audio_langs) + "\n"
-        return toReturn
-
-    def createMetaFiles(self, force=False):
-
-        if not ek.ek(os.path.isdir, self.show._location):
-            logger.log(str(self.show.tvdbid) + ": The show dir is missing, not bothering to try to create metadata")
-            return
-
-        self.createNFO(force)
-        self.createThumbnail(force)
-
-        if self.checkForMetaFiles():
-            self.saveToDB()
-
-    def createNFO(self, force=False):
-
-        result = False
-
-        for cur_provider in sickbeard.metadata_provider_dict.values():
-            result = cur_provider.create_episode_metadata(self) or result
-
-        return result
-
-    def createThumbnail(self, force=False):
-
-        result = False
-
-        for cur_provider in sickbeard.metadata_provider_dict.values():
-            result = cur_provider.create_episode_thumb(self) or result
-
-        return result
-
-    def deleteEpisode(self):
-
-        logger.log(u"Deleting "+self.show.name+" "+str(self.season)+"x"+str(self.episode)+" from the DB", logger.DEBUG)
-
-        # remove myself from the show dictionary
-        if self.show.getEpisode(self.season, self.episode, noCreate=True) == self:
-            logger.log(u"Removing myself from my show's list", logger.DEBUG)
-            del self.show.episodes[self.season][self.episode]
-
-        # delete myself from the DB
-        logger.log(u"Deleting myself from the database", logger.DEBUG)
-        myDB = db.DBConnection()
-        sql = "DELETE FROM tv_episodes WHERE showid="+str(self.show.tvdbid)+" AND season="+str(self.season)+" AND episode="+str(self.episode)
-        myDB.action(sql)
-
-        raise exceptions.EpisodeDeletedException()
-
-    def saveToDB(self, forceSave=False):
-        """
-        Saves this episode to the database if any of its data has been changed since the last save.
-        
-        forceSave: If True it will save to the database even if no data has been changed since the
-                    last save (aka if the record is not dirty).
-        """
-        
-        if not self.dirty and not forceSave:
-            logger.log(str(self.show.tvdbid) + ": Not saving episode to db - record is not dirty", logger.DEBUG)
-            return
-
-        logger.log(str(self.show.tvdbid) + ": Saving episode details to database", logger.DEBUG)
-
-        logger.log(u"STATUS IS " + str(self.status), logger.DEBUG)
-
-        myDB = db.DBConnection()
-        
-        newValueDict = {"tvdbid": self.tvdbid,
-                        "name": self.name,
-                        "description": self.description,
-                        "subtitles": ",".join([sub for sub in self.subtitles]),
-                        "subtitles_searchcount": self.subtitles_searchcount,
-                        "subtitles_lastsearch": self.subtitles_lastsearch,
-                        "airdate": self.airdate.toordinal(),
-                        "hasnfo": self.hasnfo,
-                        "hastbn": self.hastbn,
-                        "status": self.status,
-                        "location": self.location,
-                        "audio_langs": self.audio_langs,
-                        "file_size": self.file_size,
-                        "release_name": self.release_name}
-        controlValueDict = {"showid": self.show.tvdbid,
-                            "season": self.season,
-                            "episode": self.episode}
-
-        # use a custom update/insert method to get the data into the DB
-        myDB.upsert("tv_episodes", newValueDict, controlValueDict)
-
-    def fullPath (self):
-        if self.location == None or self.location == "":
-            return None
-        else:
-            return ek.ek(os.path.join, self.show.location, self.location)
-
-    def prettyName(self):
-        """
-        Returns the name of this episode in a "pretty" human-readable format. Used for logging
-        and notifications and such.
-        
-        Returns: A string representing the episode's name and season/ep numbers 
-        """
-
-        return self._format_pattern('%SN - %Sx%0E - %EN')
-
-    def _ep_name(self):
-        """
-        Returns the name of the episode to use during renaming. Combines the names of related episodes.
-        Eg. "Ep Name (1)" and "Ep Name (2)" becomes "Ep Name"
-            "Ep Name" and "Other Ep Name" becomes "Ep Name & Other Ep Name"
-        """
-        
-        multiNameRegex = "(.*) \(\d\)"
-
-        self.relatedEps = sorted(self.relatedEps, key=lambda x: x.episode)
-
-        if len(self.relatedEps) == 0:
-            goodName = self.name
-
-        else:
-            goodName = ''
-
-            singleName = True
-            curGoodName = None
-
-            for curName in [self.name] + [x.name for x in self.relatedEps]:
-                match = re.match(multiNameRegex, curName)
-                if not match:
-                    singleName = False
-                    break
-
-                if curGoodName == None:
-                    curGoodName = match.group(1)
-                elif curGoodName != match.group(1):
-                    singleName = False
-                    break
-
-            if singleName:
-                goodName = curGoodName
-            else:
-                goodName = self.name
-                for relEp in self.relatedEps:
-                    goodName += " & " + relEp.name
-
-        return goodName
-
-    def _replace_map(self):
-        """
-        Generates a replacement map for this episode which maps all possible custom naming patterns to the correct
-        value for this episode.
-        
-        Returns: A dict with patterns as the keys and their replacement values as the values.
-        """
-        
-        ep_name = self._ep_name()
-        
-        def dot(name):
-            return helpers.sanitizeSceneName(name)
-        
-        def us(name):
-            return re.sub('[ -]','_', name)
-
-        def release_name(name):
-            if name and name.lower().endswith('.nzb'):
-                name = name.rpartition('.')[0]
-            return name
-
-        def release_group(name):
-            if not name:
-                return ''
-
-            np = NameParser(name)
-
-            try:
-                parse_result = np.parse(name)
-            except InvalidNameException, e:
-                logger.log(u"Unable to get parse release_group: "+ex(e), logger.DEBUG)
-                return ''
-
-            if not parse_result.release_group:
-                return ''
-            return parse_result.release_group
-
-        epStatus, epQual = Quality.splitCompositeStatus(self.status) #@UnusedVariable
-        
-        return {
-                   '%SN': self.show.name,
-                   '%S.N': dot(self.show.name),
-                   '%S_N': us(self.show.name),
-                   '%EN': ep_name,
-                   '%E.N': dot(ep_name),
-                   '%E_N': us(ep_name),
-                   '%QN': Quality.qualityStrings[epQual],
-                   '%Q.N': dot(Quality.qualityStrings[epQual]),
-                   '%Q_N': us(Quality.qualityStrings[epQual]),
-                   '%S': str(self.season),
-                   '%0S': '%02d' % self.season,
-                   '%E': str(self.episode),
-                   '%0E': '%02d' % self.episode,
-                   '%RN': release_name(self.release_name),
-                   '%RG': release_group(self.release_name),
-                   '%AD': str(self.airdate).replace('-', ' '),
-                   '%A.D': str(self.airdate).replace('-', '.'),
-                   '%A_D': us(str(self.airdate)),
-                   '%A-D': str(self.airdate),
-                   '%Y': str(self.airdate.year),
-                   '%M': str(self.airdate.month),
-                   '%D': str(self.airdate.day),
-                   '%0M': '%02d' % self.airdate.month,
-                   '%0D': '%02d' % self.airdate.day,
-                   }
-
-    def _format_string(self, pattern, replace_map):
-        """
-        Replaces all template strings with the correct value
-        """
-
-        result_name = pattern
-
-        # do the replacements
-        for cur_replacement in sorted(replace_map.keys(), reverse=True):
-            result_name = result_name.replace(cur_replacement, helpers.sanitizeFileName(replace_map[cur_replacement]))
-            result_name = result_name.replace(cur_replacement.lower(), helpers.sanitizeFileName(replace_map[cur_replacement].lower()))
-
-        return result_name
-
-    def _format_pattern(self, pattern=None, multi=None):
-        """
-        Manipulates an episode naming pattern and then fills the template in
-        """
-        
-        if pattern == None:
-            pattern = sickbeard.NAMING_PATTERN
-        
-        if multi == None:
-            multi = sickbeard.NAMING_MULTI_EP
-        
-        replace_map = self._replace_map()
-
-        result_name = pattern
-        
-        # if there's no release group then replace it with a reasonable facsimile
-        if not replace_map['%RN']:
-            if self.show.air_by_date:
-                result_name = result_name.replace('%RN', '%S.N.%A.D.%E.N-SiCKBEARD')
-                result_name = result_name.replace('%rn', '%s.n.%A.D.%e.n-sickbeard')
-            else:
-                result_name = result_name.replace('%RN', '%S.N.S%0SE%0E.%E.N-SiCKBEARD')
-                result_name = result_name.replace('%rn', '%s.n.s%0se%0e.%e.n-sickbeard')
-
-            result_name = result_name.replace('%RG', 'SiCKBEARD')
-            result_name = result_name.replace('%rg', 'sickbeard')
-            logger.log(u"Episode has no release name, replacing it with a generic one: "+result_name, logger.DEBUG)
-        
-        # split off ep name part only
-        name_groups = re.split(r'[\\/]', result_name)
-        
-        # figure out the double-ep numbering style for each group, if applicable
-        for cur_name_group in name_groups:
-        
-            season_format = sep = ep_sep = ep_format = None
-        
-            season_ep_regex = '''
-                                (?P<pre_sep>[ _.-]*)
-                                ((?:s(?:eason|eries)?\s*)?%0?S(?![._]?N))
-                                (.*?)
-                                (%0?E(?![._]?N))
-                                (?P<post_sep>[ _.-]*)
-                              '''
-            ep_only_regex = '(E?%0?E(?![._]?N))'
-        
-            # try the normal way
-            season_ep_match = re.search(season_ep_regex, cur_name_group, re.I|re.X)
-            ep_only_match = re.search(ep_only_regex, cur_name_group, re.I|re.X)
-            
-            # if we have a season and episode then collect the necessary data
-            if season_ep_match:
-                season_format = season_ep_match.group(2)
-                ep_sep = season_ep_match.group(3)
-                ep_format = season_ep_match.group(4)
-                sep = season_ep_match.group('pre_sep')
-                if not sep:
-                    sep = season_ep_match.group('post_sep')
-                if not sep:
-                    sep = ' '
-
-                # force 2-3-4 format if they chose to extend
-                if multi in (NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_LIMITED_EXTEND_E_PREFIXED):
-                    ep_sep = '-'
-                
-                regex_used = season_ep_regex
-
-            # if there's no season then there's not much choice so we'll just force them to use 03-04-05 style
-            elif ep_only_match:
-                season_format = ''
-                ep_sep = '-'
-                ep_format = ep_only_match.group(1)
-                sep = ''
-                regex_used = ep_only_regex
-
-            else:
-                continue
-
-            # we need at least this much info to continue
-            if not ep_sep or not ep_format:
-                continue
-            
-            # start with the ep string, eg. E03
-            ep_string = self._format_string(ep_format.upper(), replace_map)
-            for other_ep in self.relatedEps:
-                
-                # for limited extend we only append the last ep
-                if multi in (NAMING_LIMITED_EXTEND, NAMING_LIMITED_EXTEND_E_PREFIXED) and other_ep != self.relatedEps[-1]:
-                    continue
-                
-                elif multi == NAMING_DUPLICATE:
-                    # add " - S01"
-                    ep_string += sep + season_format
-                
-                elif multi == NAMING_SEPARATED_REPEAT:
-                    ep_string += sep
-
-                # add "E04"
-                ep_string += ep_sep
-
-                if multi == NAMING_LIMITED_EXTEND_E_PREFIXED:
-                    ep_string += 'E'
-
-                ep_string += other_ep._format_string(ep_format.upper(), other_ep._replace_map())
-
-            if season_ep_match:
-                regex_replacement = r'\g<pre_sep>\g<2>\g<3>' + ep_string + r'\g<post_sep>'
-            elif ep_only_match:
-                regex_replacement = ep_string
-
-            # fill out the template for this piece and then insert this piece into the actual pattern
-            cur_name_group_result = re.sub('(?i)(?x)'+regex_used, regex_replacement, cur_name_group)
-            #cur_name_group_result = cur_name_group.replace(ep_format, ep_string)
-            #logger.log(u"found "+ep_format+" as the ep pattern using "+regex_used+" and replaced it with "+regex_replacement+" to result in "+cur_name_group_result+" from "+cur_name_group, logger.DEBUG)
-            result_name = result_name.replace(cur_name_group, cur_name_group_result)
-
-        result_name = self._format_string(result_name, replace_map)
-
-        logger.log(u"formatting pattern: "+pattern+" -> "+result_name, logger.DEBUG)
-        
-        
-        return result_name
-
-    def proper_path(self):
-        """    
-        Figures out the path where this episode SHOULD live according to the renaming rules, relative from the show dir
-        """
-        
-        result = self.formatted_filename()
-
-        # if they want us to flatten it and we're allowed to flatten it then we will
-        if self.show.flatten_folders and not sickbeard.NAMING_FORCE_FOLDERS:
-            return result
-        
-        # if not we append the folder on and use that
-        else:
-            result = ek.ek(os.path.join, self.formatted_dir(), result)
-        
-        return result
-        
-
-    def formatted_dir(self, pattern=None, multi=None):
-        """
-        Just the folder name of the episode
-        """
-
-        if pattern == None:
-            # we only use ABD if it's enabled, this is an ABD show, AND this is not a multi-ep
-            if self.show.air_by_date and sickbeard.NAMING_CUSTOM_ABD and not self.relatedEps:
-                pattern = sickbeard.NAMING_ABD_PATTERN
-            else:
-                pattern = sickbeard.NAMING_PATTERN
-        
-        # split off the dirs only, if they exist
-        name_groups = re.split(r'[\\/]', pattern)
-        
-        if len(name_groups) == 1:
-            return ''
-        else:
-            return self._format_pattern(os.sep.join(name_groups[:-1]), multi)
-
-
-    def formatted_filename(self, pattern=None, multi=None):
-        """
-        Just the filename of the episode, formatted based on the naming settings
-        """
-        
-        if pattern == None:
-            # we only use ABD if it's enabled, this is an ABD show, AND this is not a multi-ep
-            if self.show.air_by_date and sickbeard.NAMING_CUSTOM_ABD and not self.relatedEps:
-                pattern = sickbeard.NAMING_ABD_PATTERN
-            else:
-                pattern = sickbeard.NAMING_PATTERN
-            
-        # split off the filename only, if they exist
-        name_groups = re.split(r'[\\/]', pattern)
-        
-        return self._format_pattern(name_groups[-1], multi)
-
-    def rename(self):
-        """
-        Renames an episode file and all related files to the location and filename as specified
-        in the naming settings.
-        """
-
-        if not ek.ek(os.path.isfile, self.location):
-            logger.log(u"Can't perform rename on " + self.location + " when it doesn't exist, skipping", logger.WARNING)
-            return
-
-        proper_path = self.proper_path()
-        absolute_proper_path = ek.ek(os.path.join, self.show.location, proper_path)
-        absolute_current_path_no_ext, file_ext = os.path.splitext(self.location)
-        
-        related_subs = []
-
-        current_path = absolute_current_path_no_ext
-
-        if absolute_current_path_no_ext.startswith(self.show.location):
-            current_path = absolute_current_path_no_ext[len(self.show.location):]
-
-        logger.log(u"Renaming/moving episode from the base path " + self.location + " to " + absolute_proper_path, logger.DEBUG)
-
-        # if it's already named correctly then don't do anything
-        if proper_path == current_path:
-            logger.log(str(self.tvdbid) + ": File " + self.location + " is already named correctly, skipping", logger.DEBUG)
-            return
-
-        related_files = postProcessor.PostProcessor(self.location)._list_associated_files(self.location)
-
-        if self.show.subtitles and sickbeard.SUBTITLES_DIR != '':
-            related_subs = postProcessor.PostProcessor(self.location)._list_associated_files(sickbeard.SUBTITLES_DIR, subtitles_only=True)
-            absolute_proper_subs_path = ek.ek(os.path.join, sickbeard.SUBTITLES_DIR, self.formatted_filename())
-            
-        if self.show.subtitles and sickbeard.SUBTITLES_DIR_SUB:
-            related_subs = postProcessor.PostProcessor(self.location)._list_associated_files(os.path.dirname(self.location)+"\\Subs", subtitles_only=True)
-            absolute_proper_subs_path = ek.ek(os.path.join, os.path.dirname(self.location)+"\\Subs", self.formatted_filename())
-            
-        logger.log(u"Files associated to " + self.location + ": " + str(related_files), logger.DEBUG)
-
-        # move the ep file
-        result = helpers.rename_ep_file(self.location, absolute_proper_path)
-
-        # move related files
-        for cur_related_file in related_files:
-            cur_result = helpers.rename_ep_file(cur_related_file, absolute_proper_path)
-            if cur_result == False:
-                logger.log(str(self.tvdbid) + ": Unable to rename file " + cur_related_file, logger.ERROR)
-
-        for cur_related_sub in related_subs:
-            cur_result = helpers.rename_ep_file(cur_related_sub, absolute_proper_subs_path)
-            if cur_result == False:
-                logger.log(str(self.tvdbid) + ": Unable to rename file " + cur_related_sub, logger.ERROR)
-
-        # save the ep
-        with self.lock:
-            if result != False:
-                self.location = absolute_proper_path + file_ext
-                for relEp in self.relatedEps:
-                    relEp.location = absolute_proper_path + file_ext
-
-        # in case something changed with the metadata just do a quick check
-        for curEp in [self] + self.relatedEps:
-            curEp.checkForMetaFiles()
-
-        # save any changes to the database
-        with self.lock:
-            self.saveToDB()
-            for relEp in self.relatedEps:
-                relEp.saveToDB()
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+from __future__ import with_statement
+
+import os.path
+import datetime
+import threading
+import re
+import glob
+
+import sickbeard
+
+import xml.etree.cElementTree as etree
+
+from name_parser.parser import NameParser, InvalidNameException
+
+from lib import subliminal
+
+from lib.tvdb_api import tvdb_api, tvdb_exceptions
+
+from sickbeard import db
+from sickbeard import helpers, exceptions, logger
+from sickbeard.exceptions import ex
+from sickbeard import tvrage
+from sickbeard import image_cache
+from sickbeard import notifiers
+from sickbeard import postProcessor
+from sickbeard import subtitles
+from sickbeard import history
+
+from sickbeard import encodingKludge as ek
+
+from common import Quality, Overview
+from common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, ARCHIVED, IGNORED, UNAIRED, WANTED, SKIPPED, UNKNOWN
+from common import NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_SEPARATED_REPEAT, NAMING_LIMITED_EXTEND_E_PREFIXED
+
+class TVShow(object):
+
+    def __init__ (self, tvdbid, lang="", audio_lang=""):
+
+        self.tvdbid = tvdbid
+
+        self._location = ""
+        self.name = ""
+        self.tvrid = 0
+        self.tvrname = ""
+        self.network = ""
+        self.genre = ""
+        self.runtime = 0
+        self.quality = int(sickbeard.QUALITY_DEFAULT)
+        self.flatten_folders = int(sickbeard.FLATTEN_FOLDERS_DEFAULT)
+
+        self.status = ""
+        self.airs = ""
+        self.startyear = 0
+        self.paused = 0
+        self.air_by_date = 0
+        self.subtitles = int(sickbeard.SUBTITLES_DEFAULT)
+        self.lang = lang
+        self.audio_lang = audio_lang
+        self.custom_search_names = ""
+
+        self.lock = threading.Lock()
+        self._isDirGood = False
+
+        self.episodes = {}
+        
+        otherShow = helpers.findCertainShow(sickbeard.showList, self.tvdbid)
+        if otherShow != None:
+            raise exceptions.MultipleShowObjectsException("Can't create a show if it already exists")
+
+        self.loadFromDB()
+
+        self.saveToDB()
+
+    def _getLocation(self):
+        # no dir check needed if missing show dirs are created during post-processing
+        if sickbeard.CREATE_MISSING_SHOW_DIRS:
+            return self._location
+        
+        if ek.ek(os.path.isdir, self._location):
+            return self._location
+        else:
+            raise exceptions.ShowDirNotFoundException("Show folder doesn't exist, you shouldn't be using it")
+
+        if self._isDirGood:
+            return self._location
+        else:
+            raise exceptions.NoNFOException("Show folder doesn't exist, you shouldn't be using it")
+
+    def _setLocation(self, newLocation):
+        logger.log(u"Setter sets location to " + newLocation, logger.DEBUG)
+        # Don't validate dir if user wants to add shows without creating a dir
+        if sickbeard.ADD_SHOWS_WO_DIR or ek.ek(os.path.isdir, newLocation):
+            self._location = newLocation
+            self._isDirGood = True
+        else:
+            raise exceptions.NoNFOException("Invalid folder for the show!")
+
+    location = property(_getLocation, _setLocation)
+
+    # delete references to anything that's not in the internal lists
+    def flushEpisodes(self):
+
+        for curSeason in self.episodes:
+            for curEp in self.episodes[curSeason]:
+                myEp = self.episodes[curSeason][curEp]
+                self.episodes[curSeason][curEp] = None
+                del myEp
+
+    def getAllEpisodes(self, season=None, has_location=False):
+
+        myDB = db.DBConnection()
+
+        sql_selection = "SELECT season, episode, "
+
+        # subselection to detect multi-episodes early, share_location > 0
+        sql_selection = sql_selection + " (SELECT COUNT (*) FROM tv_episodes WHERE showid = tve.showid AND season = tve.season AND location != '' AND location = tve.location AND episode != tve.episode) AS share_location "
+
+        sql_selection = sql_selection + " FROM tv_episodes tve WHERE showid = " + str(self.tvdbid)
+
+        if season is not None:
+            sql_selection = sql_selection + " AND season = " + str(season)
+        if has_location:
+            sql_selection = sql_selection + " AND location != '' "
+
+        # need ORDER episode ASC to rename multi-episodes in order S01E01-02
+        sql_selection = sql_selection + " ORDER BY season ASC, episode ASC"
+
+        results = myDB.select(sql_selection)
+
+        ep_list = []
+        for cur_result in results:
+            cur_ep = self.getEpisode(int(cur_result["season"]), int(cur_result["episode"]))
+            if cur_ep:
+                if cur_ep.location:
+                    # if there is a location, check if it's a multi-episode (share_location > 0) and put them in relatedEps
+                    if cur_result["share_location"] > 0:
+                        related_eps_result = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND location = ? AND episode != ? ORDER BY episode ASC", [self.tvdbid, cur_ep.season, cur_ep.location, cur_ep.episode])
+                        for cur_related_ep in related_eps_result:
+                            related_ep = self.getEpisode(int(cur_related_ep["season"]), int(cur_related_ep["episode"]))
+                            if related_ep not in cur_ep.relatedEps:
+                                cur_ep.relatedEps.append(related_ep)
+                ep_list.append(cur_ep)
+
+        return ep_list
+
+
+    def getEpisode(self, season, episode, file=None, noCreate=False):
+
+        #return TVEpisode(self, season, episode)
+
+        if not season in self.episodes:
+            self.episodes[season] = {}
+
+        ep = None
+
+        if not episode in self.episodes[season] or self.episodes[season][episode] == None:
+            if noCreate:
+                return None
+
+            logger.log(str(self.tvdbid) + ": An object for episode " + str(season) + "x" + str(episode) + " didn't exist in the cache, trying to create it", logger.DEBUG)
+
+            if file != None:
+                ep = TVEpisode(self, season, episode, file)
+            else:
+                ep = TVEpisode(self, season, episode)
+
+            if ep != None:
+                self.episodes[season][episode] = ep
+
+        return self.episodes[season][episode]
+
+    def writeShowNFO(self):
+
+        result = False
+
+        if not ek.ek(os.path.isdir, self._location):
+            logger.log(str(self.tvdbid) + u": Show dir doesn't exist, skipping NFO generation")
+            return False
+
+        for cur_provider in sickbeard.metadata_provider_dict.values():
+            result = cur_provider.create_show_metadata(self) or result
+
+        return result
+
+    def writeMetadata(self, show_only=False):
+
+        if not ek.ek(os.path.isdir, self._location):
+            logger.log(str(self.tvdbid) + u": Show dir doesn't exist, skipping NFO generation")
+            return
+
+        self.getImages()
+
+        self.writeShowNFO()
+        
+        if not show_only:
+            self.writeEpisodeNFOs()
+
+    def writeEpisodeNFOs (self):
+
+        if not ek.ek(os.path.isdir, self._location):
+            logger.log(str(self.tvdbid) + ": Show dir doesn't exist, skipping NFO generation")
+            return
+
+        logger.log(str(self.tvdbid) + ": Writing NFOs for all episodes")
+
+        myDB = db.DBConnection()
+        sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.tvdbid])
+
+        for epResult in sqlResults:
+            logger.log(str(self.tvdbid) + ": Retrieving/creating episode " + str(epResult["season"]) + "x" + str(epResult["episode"]), logger.DEBUG)
+            curEp = self.getEpisode(epResult["season"], epResult["episode"])
+            curEp.createMetaFiles()
+
+
+    # find all media files in the show folder and create episodes for as many as possible
+    def loadEpisodesFromDir (self):
+
+        if not ek.ek(os.path.isdir, self._location):
+            logger.log(str(self.tvdbid) + ": Show dir doesn't exist, not loading episodes from disk")
+            return
+
+        logger.log(str(self.tvdbid) + ": Loading all episodes from the show directory " + self._location)
+
+        # get file list
+        mediaFiles = helpers.listMediaFiles(self._location)
+
+        # create TVEpisodes from each media file (if possible)
+        for mediaFile in mediaFiles:
+
+            curEpisode = None
+
+            logger.log(str(self.tvdbid) + ": Creating episode from " + mediaFile, logger.DEBUG)
+            try:
+                curEpisode = self.makeEpFromFile(ek.ek(os.path.join, self._location, mediaFile))
+            except (exceptions.ShowNotFoundException, exceptions.EpisodeNotFoundException), e:
+                logger.log(u"Episode "+mediaFile+" returned an exception: "+ex(e), logger.ERROR)
+                continue
+            except exceptions.EpisodeDeletedException:
+                logger.log(u"The episode deleted itself when I tried making an object for it", logger.DEBUG)
+
+            if curEpisode is None:
+                continue
+
+            # see if we should save the release name in the db
+            ep_file_name = ek.ek(os.path.basename, curEpisode.location)
+            ep_file_name = ek.ek(os.path.splitext, ep_file_name)[0]
+            
+            parse_result = None
+            try:
+                np = NameParser(False)
+                parse_result = np.parse(ep_file_name)
+            except InvalidNameException:
+                pass
+        
+            if not ' ' in ep_file_name and parse_result and parse_result.release_group:
+                logger.log(u"Name " + ep_file_name + " gave release group of " + parse_result.release_group + ", seems valid", logger.DEBUG)
+                curEpisode.release_name = ep_file_name
+
+            # store the reference in the show
+            if curEpisode != None:
+                if self.subtitles:
+                    try:
+                        curEpisode.refreshSubtitles()
+                    except:
+                        logger.log(str(self.tvdbid) + ": Could not refresh subtitles", logger.ERROR)
+                        logger.log(traceback.format_exc(), logger.DEBUG)
+                curEpisode.saveToDB()
+
+
+    def loadEpisodesFromDB(self):
+
+        logger.log(u"Loading all episodes from the DB")
+
+        myDB = db.DBConnection()
+        sql = "SELECT * FROM tv_episodes WHERE showid = ?"
+        sqlResults = myDB.select(sql, [self.tvdbid])
+
+        scannedEps = {}
+
+        ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
+
+        if self.lang:
+            ltvdb_api_parms['language'] = self.lang
+
+        t = tvdb_api.Tvdb(**ltvdb_api_parms)
+
+        cachedShow = t[self.tvdbid]
+        cachedSeasons = {}
+
+        for curResult in sqlResults:
+
+            deleteEp = False
+                    
+            curSeason = int(curResult["season"])
+            curEpisode = int(curResult["episode"])
+            if curSeason not in cachedSeasons:
+                try:
+                    cachedSeasons[curSeason] = cachedShow[curSeason]
+                except tvdb_exceptions.tvdb_seasonnotfound, e:
+                    logger.log(u"Error when trying to load the episode from TVDB: "+e.message, logger.WARNING)
+                    deleteEp = True
+
+            if not curSeason in scannedEps:
+                scannedEps[curSeason] = {}
+
+            logger.log(u"Loading episode "+str(curSeason)+"x"+str(curEpisode)+" from the DB", logger.DEBUG)
+
+            try:
+                curEp = self.getEpisode(curSeason, curEpisode)
+                
+                # if we found out that the ep is no longer on TVDB then delete it from our database too
+                if deleteEp:
+                    curEp.deleteEpisode()
+                
+                curEp.loadFromDB(curSeason, curEpisode)
+                curEp.loadFromTVDB(tvapi=t, cachedSeason=cachedSeasons[curSeason])
+                scannedEps[curSeason][curEpisode] = True
+            except exceptions.EpisodeDeletedException:
+                logger.log(u"Tried loading an episode from the DB that should have been deleted, skipping it", logger.DEBUG)
+                continue
+
+        return scannedEps
+
+
+    def loadEpisodesFromTVDB(self, cache=True):
+
+        # There's gotta be a better way of doing this but we don't wanna
+        # change the cache value elsewhere
+        ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
+
+        if not cache:
+            ltvdb_api_parms['cache'] = False
+
+        if self.lang:
+            ltvdb_api_parms['language'] = self.lang
+
+        try:
+            t = tvdb_api.Tvdb(**ltvdb_api_parms)
+            showObj = t[self.tvdbid]
+        except tvdb_exceptions.tvdb_error:
+            logger.log(u"TVDB timed out, unable to update episodes from TVDB", logger.ERROR)
+            return None
+
+        logger.log(str(self.tvdbid) + ": Loading all episodes from theTVDB...")
+
+        scannedEps = {}
+
+        for season in showObj:
+            scannedEps[season] = {}
+            for episode in showObj[season]:
+                # need some examples of wtf episode 0 means to decide if we want it or not
+                if episode == 0:
+                    continue
+                try:
+                    #ep = TVEpisode(self, season, episode)
+                    ep = self.getEpisode(season, episode)
+                except exceptions.EpisodeNotFoundException:
+                    logger.log(str(self.tvdbid) + ": TVDB object for " + str(season) + "x" + str(episode) + " is incomplete, skipping this episode")
+                    continue
+                else:
+                    try:
+                        ep.loadFromTVDB(tvapi=t)
+                    except exceptions.EpisodeDeletedException:
+                        logger.log(u"The episode was deleted, skipping the rest of the load")
+                        continue
+
+                with ep.lock:
+                    logger.log(str(self.tvdbid) + ": Loading info from theTVDB for episode " + str(season) + "x" + str(episode), logger.DEBUG)
+                    ep.loadFromTVDB(season, episode, tvapi=t)
+                    if ep.dirty:
+                        ep.saveToDB()
+
+                scannedEps[season][episode] = True
+
+        return scannedEps
+
+    def setTVRID(self, force=False):
+
+        if self.tvrid != 0 and not force:
+            logger.log(u"No need to get the TVRage ID, it's already populated", logger.DEBUG)
+            return
+
+        logger.log(u"Attempting to retrieve the TVRage ID", logger.DEBUG)
+
+        try:
+            # load the tvrage object, it will set the ID in its constructor if possible
+            tvrage.TVRage(self)
+            self.saveToDB()
+        except exceptions.TVRageException, e:
+            logger.log(u"Couldn't get TVRage ID because we're unable to sync TVDB and TVRage: "+ex(e), logger.DEBUG)
+            return
+
+    def getImages(self, fanart=None, poster=None):
+
+        poster_result = fanart_result = season_thumb_result = False
+
+        for cur_provider in sickbeard.metadata_provider_dict.values():
+            logger.log("Running season folders for "+cur_provider.name, logger.DEBUG)
+            poster_result = cur_provider.create_poster(self) or poster_result
+            fanart_result = cur_provider.create_fanart(self) or fanart_result
+            season_thumb_result = cur_provider.create_season_thumbs(self) or season_thumb_result
+
+        return poster_result or fanart_result or season_thumb_result
+
+    def loadLatestFromTVRage(self):
+
+        try:
+            # load the tvrage object
+            tvr = tvrage.TVRage(self)
+
+            newEp = tvr.findLatestEp()
+
+            if newEp != None:
+                logger.log(u"TVRage gave us an episode object - saving it for now", logger.DEBUG)
+                newEp.saveToDB()
+
+            # make an episode out of it
+        except exceptions.TVRageException, e:
+            logger.log(u"Unable to add TVRage info: " + ex(e), logger.WARNING)
+
+
+
+    # make a TVEpisode object from a media file
+    def makeEpFromFile(self, file):
+
+        if not ek.ek(os.path.isfile, file):
+            logger.log(str(self.tvdbid) + ": That isn't even a real file dude... " + file)
+            return None
+
+        logger.log(str(self.tvdbid) + ": Creating episode object from " + file, logger.DEBUG)
+
+        try:
+            myParser = NameParser()
+            parse_result = myParser.parse(file)
+        except InvalidNameException:
+            logger.log(u"Unable to parse the filename "+file+" into a valid episode", logger.ERROR)
+            return None
+
+        if len(parse_result.episode_numbers) == 0 and not parse_result.air_by_date:
+            logger.log("parse_result: "+str(parse_result))
+            logger.log(u"No episode number found in "+file+", ignoring it", logger.ERROR)
+            return None
+
+        # for now lets assume that any episode in the show dir belongs to that show
+        season = parse_result.season_number if parse_result.season_number != None else 1
+        episodes = parse_result.episode_numbers
+        rootEp = None
+
+        # if we have an air-by-date show then get the real season/episode numbers
+        if parse_result.air_by_date:
+            try:
+                # There's gotta be a better way of doing this but we don't wanna
+                # change the cache value elsewhere
+                ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
+
+                if self.lang:
+                    ltvdb_api_parms['language'] = self.lang
+
+                t = tvdb_api.Tvdb(**ltvdb_api_parms)
+
+                epObj = t[self.tvdbid].airedOn(parse_result.air_date)[0]
+                season = int(epObj["seasonnumber"])
+                episodes = [int(epObj["episodenumber"])]
+            except tvdb_exceptions.tvdb_episodenotfound:
+                logger.log(u"Unable to find episode with date " + str(parse_result.air_date) + " for show " + self.name + ", skipping", logger.WARNING)
+                return None
+            except tvdb_exceptions.tvdb_error, e:
+                logger.log(u"Unable to contact TVDB: "+ex(e), logger.WARNING)
+                return None
+
+        for curEpNum in episodes:
+
+            episode = int(curEpNum)
+
+            logger.log(str(self.tvdbid) + ": " + file + " parsed to " + self.name + " " + str(season) + "x" + str(episode), logger.DEBUG)
+
+            checkQualityAgain = False
+            same_file = False
+            curEp = self.getEpisode(season, episode)
+
+            if curEp == None:
+                try:
+                    curEp = self.getEpisode(season, episode, file)
+                except exceptions.EpisodeNotFoundException:
+                    logger.log(str(self.tvdbid) + ": Unable to figure out what this file is, skipping", logger.ERROR)
+                    continue
+
+            else:
+                # if there is a new file associated with this ep then re-check the quality
+                if curEp.location and ek.ek(os.path.normpath, curEp.location) != ek.ek(os.path.normpath, file):
+                    logger.log(u"The old episode had a different file associated with it, I will re-check the quality based on the new filename "+file, logger.DEBUG)
+                    checkQualityAgain = True
+
+                with curEp.lock:
+                    old_size = curEp.file_size
+                    curEp.location = file
+                    # if the sizes are the same then it's probably the same file
+                    if old_size and curEp.file_size == old_size:
+                        same_file = True
+                    else:
+                        same_file = False
+
+                    curEp.checkForMetaFiles()
+                
+
+            if rootEp == None:
+                rootEp = curEp
+            else:
+                if curEp not in rootEp.relatedEps:
+                    rootEp.relatedEps.append(curEp)
+
+            # if it's a new file then 
+            if not same_file:
+                curEp.release_name = ''
+
+            # if they replace a file on me I'll make some attempt at re-checking the quality unless I know it's the same file
+            if checkQualityAgain and not same_file:
+                newQuality = Quality.nameQuality(file)
+                logger.log(u"Since this file has been renamed, I checked "+file+" and found quality "+Quality.qualityStrings[newQuality], logger.DEBUG)
+                if newQuality != Quality.UNKNOWN:
+                    curEp.status = Quality.compositeStatus(DOWNLOADED, newQuality)
+
+
+            # check for status/quality changes as long as it's a new file
+            elif not same_file and sickbeard.helpers.isMediaFile(file) and curEp.status not in Quality.DOWNLOADED + [ARCHIVED, IGNORED]:
+
+                oldStatus, oldQuality = Quality.splitCompositeStatus(curEp.status)
+                newQuality = Quality.nameQuality(file)
+                if newQuality == Quality.UNKNOWN:
+                    newQuality = Quality.assumeQuality(file)
+
+                newStatus = None
+
+                # if it was snatched and now exists then set the status correctly
+                if oldStatus == SNATCHED and oldQuality <= newQuality:
+                    logger.log(u"STATUS: this ep used to be snatched with quality "+Quality.qualityStrings[oldQuality]+" but a file exists with quality "+Quality.qualityStrings[newQuality]+" so I'm setting the status to DOWNLOADED", logger.DEBUG)
+                    newStatus = DOWNLOADED
+
+                # if it was snatched proper and we found a higher quality one then allow the status change
+                elif oldStatus == SNATCHED_PROPER and oldQuality < newQuality:
+                    logger.log(u"STATUS: this ep used to be snatched proper with quality "+Quality.qualityStrings[oldQuality]+" but a file exists with quality "+Quality.qualityStrings[newQuality]+" so I'm setting the status to DOWNLOADED", logger.DEBUG)
+                    newStatus = DOWNLOADED
+
+                elif oldStatus not in (SNATCHED, SNATCHED_PROPER):
+                    newStatus = DOWNLOADED
+
+                if newStatus != None:
+                    with curEp.lock:
+                        logger.log(u"STATUS: we have an associated file, so setting the status from "+str(curEp.status)+" to DOWNLOADED/" + str(Quality.statusFromName(file)), logger.DEBUG)
+                        curEp.status = Quality.compositeStatus(newStatus, newQuality)
+
+            with curEp.lock:
+                curEp.saveToDB()
+
+        # creating metafiles on the root should be good enough
+        if rootEp != None:
+            with rootEp.lock:
+                rootEp.createMetaFiles()
+
+        return rootEp
+
+
+    def loadFromDB(self, skipNFO=False):
+
+        logger.log(str(self.tvdbid) + ": Loading show info from database")
+
+        myDB = db.DBConnection()
+
+        sqlResults = myDB.select("SELECT * FROM tv_shows WHERE tvdb_id = ?", [self.tvdbid])
+
+        if len(sqlResults) > 1:
+            raise exceptions.MultipleDBShowsException()
+        elif len(sqlResults) == 0:
+            logger.log(str(self.tvdbid) + ": Unable to find the show in the database")
+            return
+        else:
+            if self.name == "":
+                self.name = sqlResults[0]["show_name"]
+            self.tvrname = sqlResults[0]["tvr_name"]
+            if self.network == "":
+                self.network = sqlResults[0]["network"]
+            if self.genre == "":
+                self.genre = sqlResults[0]["genre"]
+
+            self.runtime = sqlResults[0]["runtime"]
+
+            self.status = sqlResults[0]["status"]
+            if self.status == None:
+                self.status = ""
+            self.airs = sqlResults[0]["airs"]
+            if self.airs == None:
+                self.airs = ""
+            self.startyear = sqlResults[0]["startyear"]
+            if self.startyear == None:
+                self.startyear = 0
+
+            self.air_by_date = sqlResults[0]["air_by_date"]
+            if self.air_by_date == None:
+                self.air_by_date = 0
+            
+            self.subtitles = sqlResults[0]["subtitles"]
+            if self.subtitles:
+                self.subtitles = 1
+            else:
+                self.subtitles = 0    
+
+            self.quality = int(sqlResults[0]["quality"])
+            self.flatten_folders = int(sqlResults[0]["flatten_folders"])
+            self.paused = int(sqlResults[0]["paused"])
+
+            self._location = sqlResults[0]["location"]
+
+            if self.tvrid == 0:
+                self.tvrid = int(sqlResults[0]["tvr_id"])
+
+            if self.lang == "":
+                self.lang = sqlResults[0]["lang"]
+                
+            if self.audio_lang == "":
+                self.audio_lang = sqlResults[0]["audio_lang"]                
+
+            if self.custom_search_names == "":
+                self.custom_search_names = sqlResults[0]["custom_search_names"]                
+
+    def loadFromTVDB(self, cache=True, tvapi=None, cachedSeason=None):
+
+        logger.log(str(self.tvdbid) + ": Loading show info from theTVDB")
+
+        # There's gotta be a better way of doing this but we don't wanna
+        # change the cache value elsewhere
+        if tvapi is None:
+            ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
+
+            if not cache:
+                ltvdb_api_parms['cache'] = False
+            
+            if self.lang:
+                ltvdb_api_parms['language'] = self.lang
+
+            t = tvdb_api.Tvdb(**ltvdb_api_parms)
+
+        else:
+            t = tvapi
+
+        myEp = t[self.tvdbid]
+
+        self.name = myEp["seriesname"]
+
+        self.genre = myEp['genre']
+        self.network = myEp['network']
+
+        if myEp["airs_dayofweek"] != None and myEp["airs_time"] != None:
+            self.airs = myEp["airs_dayofweek"] + " " + myEp["airs_time"]
+
+        if myEp["firstaired"] != None and myEp["firstaired"]:
+            self.startyear = int(myEp["firstaired"].split('-')[0])
+
+        if self.airs == None:
+            self.airs = ""
+
+        if myEp["status"] != None:
+            self.status = myEp["status"]
+
+        if self.status == None:
+            self.status = ""
+
+        self.saveToDB()
+
+
+    def loadNFO (self):
+
+        if not os.path.isdir(self._location):
+            logger.log(str(self.tvdbid) + ": Show dir doesn't exist, can't load NFO")
+            raise exceptions.NoNFOException("The show dir doesn't exist, no NFO could be loaded")
+
+        logger.log(str(self.tvdbid) + ": Loading show info from NFO")
+
+        xmlFile = os.path.join(self._location, "tvshow.nfo")
+
+        try:
+            xmlFileObj = open(xmlFile, 'r')
+            showXML = etree.ElementTree(file = xmlFileObj)
+
+            if showXML.findtext('title') == None or (showXML.findtext('tvdbid') == None and showXML.findtext('id') == None):
+                raise exceptions.NoNFOException("Invalid info in tvshow.nfo (missing name or id):" \
+                    + str(showXML.findtext('title')) + " " \
+                    + str(showXML.findtext('tvdbid')) + " " \
+                    + str(showXML.findtext('id')))
+
+            self.name = showXML.findtext('title')
+            if showXML.findtext('tvdbid') != None:
+                self.tvdbid = int(showXML.findtext('tvdbid'))
+            elif showXML.findtext('id'):
+                self.tvdbid = int(showXML.findtext('id'))
+            else:
+                raise exceptions.NoNFOException("Empty <id> or <tvdbid> field in NFO")
+
+        except (exceptions.NoNFOException, SyntaxError, ValueError), e:
+            logger.log(u"There was an error parsing your existing tvshow.nfo file: " + ex(e), logger.ERROR)
+            logger.log(u"Attempting to rename it to tvshow.nfo.old", logger.DEBUG)
+
+            try:
+                xmlFileObj.close()
+                ek.ek(os.rename, xmlFile, xmlFile + ".old")
+            except Exception, e:
+                logger.log(u"Failed to rename your tvshow.nfo file - you need to delete it or fix it: " + ex(e), logger.ERROR)
+            raise exceptions.NoNFOException("Invalid info in tvshow.nfo")
+
+        if showXML.findtext('studio') != None:
+            self.network = showXML.findtext('studio')
+        if self.network == None and showXML.findtext('network') != None:
+            self.network = ""
+        if showXML.findtext('genre') != None:
+            self.genre = showXML.findtext('genre')
+        else:
+            self.genre = ""
+
+        # TODO: need to validate the input, I'm assuming it's good until then
+
+
+    def nextEpisode(self):
+
+        logger.log(str(self.tvdbid) + ": Finding the episode which airs next", logger.DEBUG)
+
+        myDB = db.DBConnection()
+        innerQuery = "SELECT airdate FROM tv_episodes WHERE showid = ? AND airdate >= ? AND status = ? ORDER BY airdate ASC LIMIT 1"
+        innerParams = [self.tvdbid, datetime.date.today().toordinal(), UNAIRED]
+        query = "SELECT * FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= (" + innerQuery + ") and status = ?"
+        params = [self.tvdbid, datetime.date.today().toordinal()] + innerParams + [UNAIRED]
+        sqlResults = myDB.select(query, params)
+
+        if sqlResults == None or len(sqlResults) == 0:
+            logger.log(str(self.tvdbid) + ": No episode found... need to implement tvrage and also show status", logger.DEBUG)
+            return []
+        else:
+            logger.log(str(self.tvdbid) + ": Found episode " + str(sqlResults[0]["season"]) + "x" + str(sqlResults[0]["episode"]), logger.DEBUG)
+            foundEps = []
+            for sqlEp in sqlResults:
+                curEp = self.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"]))
+                foundEps.append(curEp)
+            return foundEps
+
+        # if we didn't get an episode then try getting one from tvrage
+
+        # load tvrage info
+
+        # extract NextEpisode info
+
+        # verify that we don't have it in the DB somehow (ep mismatch)
+
+
+    def deleteShow(self):
+
+        myDB = db.DBConnection()
+        myDB.action("DELETE FROM tv_episodes WHERE showid = ?", [self.tvdbid])
+        myDB.action("DELETE FROM tv_shows WHERE tvdb_id = ?", [self.tvdbid])
+
+        # remove self from show list
+        sickbeard.showList = [x for x in sickbeard.showList if x.tvdbid != self.tvdbid]
+        
+        # clear the cache
+        image_cache_dir = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images')
+        for cache_file in ek.ek(glob.glob, ek.ek(os.path.join, image_cache_dir, str(self.tvdbid)+'.*')):
+            logger.log(u"Deleting cache file "+cache_file)
+            os.remove(cache_file)
+
+    def populateCache(self):
+        cache_inst = image_cache.ImageCache()
+        
+        logger.log(u"Checking & filling cache for show "+self.name)
+        cache_inst.fill_cache(self)
+
+    def refreshDir(self):
+
+        # make sure the show dir is where we think it is unless dirs are created on the fly
+        if not ek.ek(os.path.isdir, self._location) and not sickbeard.CREATE_MISSING_SHOW_DIRS:
+            return False
+
+        # load from dir
+        self.loadEpisodesFromDir()
+
+        # run through all locations from DB, check that they exist
+        logger.log(str(self.tvdbid) + ": Loading all episodes with a location from the database")
+
+        myDB = db.DBConnection()
+        sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.tvdbid])
+
+        for ep in sqlResults:
+            curLoc = os.path.normpath(ep["location"])
+            season = int(ep["season"])
+            episode = int(ep["episode"])
+
+            try:
+                curEp = self.getEpisode(season, episode)
+            except exceptions.EpisodeDeletedException:
+                logger.log(u"The episode was deleted while we were refreshing it, moving on to the next one", logger.DEBUG)
+                continue
+
+            # if the path doesn't exist or if it's not in our show dir
+            if not ek.ek(os.path.isfile, curLoc) or not os.path.normpath(curLoc).startswith(os.path.normpath(self.location)):
+
+                with curEp.lock:
+                    # if it used to have a file associated with it and it doesn't anymore then set it to IGNORED
+                    if curEp.location and curEp.status in Quality.DOWNLOADED:
+                        logger.log(str(self.tvdbid) + ": Location for " + str(season) + "x" + str(episode) + " doesn't exist, removing it and changing our status to IGNORED", logger.DEBUG)
+                        curEp.status = IGNORED
+                        curEp.subtitles = list()
+                        curEp.subtitles_searchcount = 0
+                        curEp.subtitles_lastsearch = str(datetime.datetime.min)
+                    curEp.location = ''
+                    curEp.hasnfo = False
+                    curEp.hastbn = False
+                    curEp.release_name = ''
+                    curEp.saveToDB()
+
+
+    def downloadSubtitles(self):
+        #TODO: Add support for force option
+        if not ek.ek(os.path.isdir, self._location):
+            logger.log(str(self.tvdbid) + ": Show dir doesn't exist, can't download subtitles", logger.DEBUG)
+            return
+        logger.log(str(self.tvdbid) + ": Downloading subtitles", logger.DEBUG)
+        
+        try:
+            episodes = db.DBConnection().select("SELECT location FROM tv_episodes WHERE showid = ? AND location NOT LIKE '' ORDER BY season DESC, episode DESC", [self.tvdbid])
+            for episodeLoc in episodes:
+                episode = self.makeEpFromFile(episodeLoc['location']);
+                subtitles = episode.downloadSubtitles()
+                
+        except Exception as e:
+            logger.log("Error occurred when downloading subtitles: " + str(e), logger.DEBUG)
+            return
+
+
+    def saveToDB(self):
+        logger.log(str(self.tvdbid) + ": Saving show info to database", logger.DEBUG)
+
+        myDB = db.DBConnection()
+
+        controlValueDict = {"tvdb_id": self.tvdbid}
+        newValueDict = {"show_name": self.name,
+                        "tvr_id": self.tvrid,
+                        "location": self._location,
+                        "network": self.network,
+                        "genre": self.genre,
+                        "runtime": self.runtime,
+                        "quality": self.quality,
+                        "airs": self.airs,
+                        "status": self.status,
+                        "flatten_folders": self.flatten_folders,
+                        "paused": self.paused,
+                        "air_by_date": self.air_by_date,
+                        "subtitles": self.subtitles,
+                        "startyear": self.startyear,
+                        "tvr_name": self.tvrname,
+                        "lang": self.lang,
+                        "audio_lang": self.audio_lang,
+                        "custom_search_names": self.custom_search_names
+                        }
+
+        myDB.upsert("tv_shows", newValueDict, controlValueDict)
+
+
+    def __str__(self):
+        toReturn = ""
+        toReturn += "name: " + self.name + "\n"
+        toReturn += "location: " + self._location + "\n"
+        toReturn += "tvdbid: " + str(self.tvdbid) + "\n"
+        if self.network != None:
+            toReturn += "network: " + self.network + "\n"
+        if self.airs != None:
+            toReturn += "airs: " + self.airs + "\n"
+        if self.status != None:
+            toReturn += "status: " + self.status + "\n"
+        toReturn += "startyear: " + str(self.startyear) + "\n"
+        toReturn += "genre: " + self.genre + "\n"
+        toReturn += "runtime: " + str(self.runtime) + "\n"
+        toReturn += "quality: " + str(self.quality) + "\n"
+        return toReturn
+
+
+    def wantEpisode(self, season, episode, quality, manualSearch=False):
+
+        logger.log(u"Checking if we want episode "+str(season)+"x"+str(episode)+" at quality "+Quality.qualityStrings[quality], logger.DEBUG)
+
+        # if the quality isn't one we want under any circumstances then just say no
+        anyQualities, bestQualities = Quality.splitQuality(self.quality)
+        logger.log(u"any,best = "+str(anyQualities)+" "+str(bestQualities)+" and we are "+str(quality), logger.DEBUG)
+
+        if quality not in anyQualities + bestQualities:
+            logger.log(u"I know for sure I don't want this episode, saying no", logger.DEBUG)
+            return False
+
+        myDB = db.DBConnection()
+        sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [self.tvdbid, season, episode])
+
+        if not sqlResults or not len(sqlResults):
+            logger.log(u"Unable to find the episode", logger.DEBUG)
+            return False
+
+        epStatus = int(sqlResults[0]["status"])
+
+        logger.log(u"current episode status: "+str(epStatus), logger.DEBUG)
+
+        # if we know we don't want it then just say no
+        if epStatus in (SKIPPED, IGNORED, ARCHIVED) and not manualSearch:
+            logger.log(u"Ep is skipped, not bothering", logger.DEBUG)
+            return False
+
+        # if it's one of these then we want it as long as it's in our allowed initial qualities
+        if quality in anyQualities + bestQualities:
+            if epStatus in (WANTED, UNAIRED, SKIPPED):
+                logger.log(u"Ep is wanted/unaired/skipped, definitely get it", logger.DEBUG)
+                return True
+            elif manualSearch:
+                logger.log(u"Usually I would ignore this ep but because you forced the search I'm overriding the default and allowing the quality", logger.DEBUG)
+                return True
+            else:
+                logger.log(u"This quality looks like something we might want but I don't know for sure yet", logger.DEBUG)
+
+        curStatus, curQuality = Quality.splitCompositeStatus(epStatus)
+
+        # if we are re-downloading then we only want it if it's in our bestQualities list and better than what we have
+        if curStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER and quality in bestQualities and quality > curQuality:
+            logger.log(u"We already have this ep but the new one is better quality, saying yes", logger.DEBUG)
+            return True
+
+        logger.log(u"None of the conditions were met so I'm just saying no", logger.DEBUG)
+        return False
+
+
+    def getOverview(self, epStatus):
+
+        if epStatus == WANTED:
+            return Overview.WANTED
+        elif epStatus in (UNAIRED, UNKNOWN):
+            return Overview.UNAIRED
+        elif epStatus in (SKIPPED, IGNORED):
+            return Overview.SKIPPED
+        elif epStatus == ARCHIVED:
+            return Overview.GOOD
+        elif epStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER:
+
+            anyQualities, bestQualities = Quality.splitQuality(self.quality) #@UnusedVariable
+            if bestQualities:
+                maxBestQuality = max(bestQualities)
+            else:
+                maxBestQuality = None
+
+            epStatus, curQuality = Quality.splitCompositeStatus(epStatus)
+
+            if epStatus in (SNATCHED, SNATCHED_PROPER):
+                return Overview.SNATCHED
+            # if they don't want re-downloads then we call it good if they have anything
+            elif maxBestQuality == None:
+                return Overview.GOOD
+            # if they have one but it's not the best they want then mark it as qual
+            elif curQuality < maxBestQuality:
+                return Overview.QUAL
+            # if it's >= maxBestQuality then it's good
+            else:
+                return Overview.GOOD
+
+def dirty_setter(attr_name):
+    def wrapper(self, val):
+        if getattr(self, attr_name) != val:
+            setattr(self, attr_name, val)
+            self.dirty = True
+    return wrapper
+
+class TVEpisode(object):
+
+    def __init__(self, show, season, episode, file=""):
+
+        self._name = ""
+        self._season = season
+        self._episode = episode
+        self._description = ""
+        self._subtitles = list()
+        self._subtitles_searchcount = 0
+        self._subtitles_lastsearch = str(datetime.datetime.min)
+        self._airdate = datetime.date.fromordinal(1)
+        self._hasnfo = False
+        self._hastbn = False
+        self._status = UNKNOWN
+        self._tvdbid = 0
+        self._file_size = 0
+        self._audio_langs = ''
+        self._release_name = ''
+
+        # setting any of the above sets the dirty flag
+        self.dirty = True
+
+        self.show = show
+        self._location = file
+
+        self.lock = threading.Lock()
+
+        self.specifyEpisode(self.season, self.episode)
+
+        self.relatedEps = []
+
+        self.checkForMetaFiles()
+
+    name = property(lambda self: self._name, dirty_setter("_name"))
+    season = property(lambda self: self._season, dirty_setter("_season"))
+    episode = property(lambda self: self._episode, dirty_setter("_episode"))
+    description = property(lambda self: self._description, dirty_setter("_description"))
+    subtitles = property(lambda self: self._subtitles, dirty_setter("_subtitles"))
+    subtitles_searchcount = property(lambda self: self._subtitles_searchcount, dirty_setter("_subtitles_searchcount"))
+    subtitles_lastsearch = property(lambda self: self._subtitles_lastsearch, dirty_setter("_subtitles_lastsearch"))
+    airdate = property(lambda self: self._airdate, dirty_setter("_airdate"))
+    hasnfo = property(lambda self: self._hasnfo, dirty_setter("_hasnfo"))
+    hastbn = property(lambda self: self._hastbn, dirty_setter("_hastbn"))
+    status = property(lambda self: self._status, dirty_setter("_status"))
+    tvdbid = property(lambda self: self._tvdbid, dirty_setter("_tvdbid"))
+    #location = property(lambda self: self._location, dirty_setter("_location"))
+    file_size = property(lambda self: self._file_size, dirty_setter("_file_size"))
+    audio_langs = property(lambda self: self._audio_langs, dirty_setter("_audio_langs"))
+    release_name = property(lambda self: self._release_name, dirty_setter("_release_name"))
+
+    def _set_location(self, new_location):
+        logger.log(u"Setter sets location to " + new_location, logger.DEBUG)
+        
+        #self._location = newLocation
+        dirty_setter("_location")(self, new_location)
+
+        if new_location and ek.ek(os.path.isfile, new_location):
+            self.file_size = ek.ek(os.path.getsize, new_location)
+        else:
+            self.file_size = 0
+
+    location = property(lambda self: self._location, _set_location)
+    def refreshSubtitles(self):
+        """Look for subtitles files and refresh the subtitles property"""
+        self.subtitles = subtitles.subtitlesLanguages(self.location)
+
+    def downloadSubtitles(self):
+        #TODO: Add support for force option
+        if not ek.ek(os.path.isfile, self.location):
+            logger.log(str(self.show.tvdbid) + ": Episode file doesn't exist, can't download subtitles for episode " + str(self.season) + "x" + str(self.episode), logger.DEBUG)
+            return
+        logger.log(str(self.show.tvdbid) + ": Downloading subtitles for episode " + str(self.season) + "x" + str(self.episode), logger.DEBUG)
+        
+        previous_subtitles = self.subtitles
+
+        try:
+                                    
+            need_languages = set(sickbeard.SUBTITLES_LANGUAGES) - set(self.subtitles)
+            subtitles = subliminal.download_subtitles([self.location], languages=need_languages, services=sickbeard.subtitles.getEnabledServiceList(), force=False, multi=True, cache_dir=sickbeard.CACHE_DIR)
+            
+        except Exception as e:
+            logger.log("Error occurred when downloading subtitles: " + str(e), logger.DEBUG)
+            return
+
+        self.refreshSubtitles()
+        self.subtitles_searchcount = self.subtitles_searchcount + 1
+        self.subtitles_lastsearch = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+        self.saveToDB()
+        
+        newsubtitles = set(self.subtitles).difference(set(previous_subtitles))
+        
+        if newsubtitles:
+            subtitleList = ", ".join(subliminal.language.Language(x).name for x in newsubtitles)
+            logger.log(str(self.show.tvdbid) + ": Downloaded " + subtitleList + " subtitles for episode " + str(self.season) + "x" + str(self.episode), logger.DEBUG)
+            
+            notifiers.notify_subtitle_download(self.prettyName(), subtitleList)
+
+        else:
+            logger.log(str(self.show.tvdbid) + ": No subtitles downloaded for episode " + str(self.season) + "x" + str(self.episode), logger.DEBUG)
+
+        if sickbeard.SUBTITLES_HISTORY:
+            for video in subtitles:
+                for subtitle in subtitles.get(video):
+                    history.logSubtitle(self.show.tvdbid, self.season, self.episode, self.status, subtitle)
+        if sickbeard.SUBTITLES_DIR:
+            for video in subtitles:
+                subs_new_path = ek.ek(os.path.join, os.path.dirname(video.path), sickbeard.SUBTITLES_DIR)
+                if not ek.ek(os.path.isdir, subs_new_path):
+                    ek.ek(os.mkdir, subs_new_path)
+                        
+                for subtitle in subtitles.get(video):
+                    new_file_path = ek.ek(os.path.join, subs_new_path, os.path.basename(subtitle.path))
+                    helpers.moveFile(subtitle.path, new_file_path)
+                    if sickbeard.SUBSNOLANG:
+                                helpers.copyFile(new_file_path,new_file_path[:-6]+"srt")
+                       
+        elif sickbeard.SUBTITLES_DIR_SUB:
+            for video in subtitles:
+                subs_new_path = os.path.join(os.path.dirname(video.path), "Subs")
+                if not os.path.isdir(subs_new_path):
+                    os.makedirs(subs_new_path)
+                        
+                for subtitle in subtitles.get(video):
+                    new_file_path = ek.ek(os.path.join, subs_new_path, os.path.basename(subtitle.path))
+                    helpers.moveFile(subtitle.path, new_file_path)
+                    subtitle.path=new_file_path
+                    if sickbeard.SUBSNOLANG:
+                                helpers.copyFile(new_file_path,new_file_path[:-6]+"srt")
+                                subtitle.path=new_file_path
+        else:
+                for video in subtitles:
+                    for subtitle in subtitles.get(video):
+                        if sickbeard.SUBSNOLANG:
+                            helpers.copyFile(subtitle.path,subtitle.path[:-6]+"srt")
+                            helpers.chmodAsParent(subtitle.path[:-6]+"srt")
+                        helpers.chmodAsParent(subtitle.path) 
+        return subtitles
+
+
+    def checkForMetaFiles(self):
+
+        oldhasnfo = self.hasnfo
+        oldhastbn = self.hastbn
+
+        cur_nfo = False
+        cur_tbn = False
+
+        # check for nfo and tbn
+        if ek.ek(os.path.isfile, self.location):
+            for cur_provider in sickbeard.metadata_provider_dict.values():
+                if cur_provider.episode_metadata:
+                    new_result = cur_provider._has_episode_metadata(self)
+                else:
+                    new_result = False
+                cur_nfo = new_result or cur_nfo
+                
+                if cur_provider.episode_thumbnails:
+                    new_result = cur_provider._has_episode_thumb(self)
+                else:
+                    new_result = False
+                cur_tbn = new_result or cur_tbn
+
+        self.hasnfo = cur_nfo
+        self.hastbn = cur_tbn
+
+        # if either setting has changed return true, if not return false
+        return oldhasnfo != self.hasnfo or oldhastbn != self.hastbn
+
+    def specifyEpisode(self, season, episode):
+
+        sqlResult = self.loadFromDB(season, episode)
+
+        if not sqlResult:
+            # only load from NFO if we didn't load from DB
+            if ek.ek(os.path.isfile, self.location):
+                try:
+                    self.loadFromNFO(self.location)
+                except exceptions.NoNFOException:
+                    logger.log(str(self.show.tvdbid) + ": There was an error loading the NFO for episode " + str(season) + "x" + str(episode), logger.ERROR)
+                    pass
+
+                # if we tried loading it from NFO and didn't find the NFO, use TVDB
+                if self.hasnfo == False:
+                    try:
+                        result = self.loadFromTVDB(season, episode)
+                    except exceptions.EpisodeDeletedException:
+                        result = False
+
+                    # if we failed SQL *and* NFO, TVDB then fail
+                    if result == False:
+                        raise exceptions.EpisodeNotFoundException("Couldn't find episode " + str(season) + "x" + str(episode))
+        
+        # don't update if not needed
+        if self.dirty:
+            self.saveToDB()
+
+    def loadFromDB(self, season, episode):
+
+        logger.log(str(self.show.tvdbid) + ": Loading episode details from DB for episode " + str(season) + "x" + str(episode), logger.DEBUG)
+
+        myDB = db.DBConnection()
+        sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [self.show.tvdbid, season, episode])
+
+        if len(sqlResults) > 1:
+            raise exceptions.MultipleDBEpisodesException("Your DB has two records for the same show somehow.")
+        elif len(sqlResults) == 0:
+            logger.log(str(self.show.tvdbid) + ": Episode " + str(self.season) + "x" + str(self.episode) + " not found in the database", logger.DEBUG)
+            return False
+        else:
+            #NAMEIT logger.log(u"AAAAA from" + str(self.season)+"x"+str(self.episode) + " -" + self.name + " to " + str(sqlResults[0]["name"]))
+            if sqlResults[0]["name"] != None:
+                self.name = sqlResults[0]["name"]
+            self.season = season
+            self.episode = episode
+            self.description = sqlResults[0]["description"]
+            if self.description == None:
+                self.description = ""
+            if sqlResults[0]["subtitles"] != None and sqlResults[0]["subtitles"] != '':
+                self.subtitles = sqlResults[0]["subtitles"].split(",")
+            self.subtitles_searchcount = sqlResults[0]["subtitles_searchcount"]
+            self.subtitles_lastsearch = sqlResults[0]["subtitles_lastsearch"]
+            self.airdate = datetime.date.fromordinal(int(sqlResults[0]["airdate"]))
+            #logger.log(u"1 Status changes from " + str(self.status) + " to " + str(sqlResults[0]["status"]), logger.DEBUG)
+            self.status = int(sqlResults[0]["status"])
+
+            # don't overwrite my location
+            if sqlResults[0]["location"] != "" and sqlResults[0]["location"] != None:
+                self.location = os.path.normpath(sqlResults[0]["location"])
+            if sqlResults[0]["file_size"]:
+                self.file_size = int(sqlResults[0]["file_size"])
+            else:
+                self.file_size = 0
+
+            self.tvdbid = int(sqlResults[0]["tvdbid"])
+
+            if sqlResults[0]["audio_langs"] != None:
+                self.audio_langs = sqlResults[0]["audio_langs"]
+            
+            if sqlResults[0]["release_name"] != None:
+                self.release_name = sqlResults[0]["release_name"]
+
+            self.dirty = False
+            return True
+
+    def loadFromTVDB(self, season=None, episode=None, cache=True, tvapi=None, cachedSeason=None):
+
+        if season == None:
+            season = self.season
+        if episode == None:
+            episode = self.episode
+
+        logger.log(str(self.show.tvdbid) + ": Loading episode details from theTVDB for episode " + str(season) + "x" + str(episode), logger.DEBUG)
+
+        tvdb_lang = self.show.lang
+
+        try:
+            if cachedSeason is None:
+                if tvapi is None:
+                    # There's gotta be a better way of doing this but we don't wanna
+                    # change the cache value elsewhere
+                    ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
+
+                    if not cache:
+                        ltvdb_api_parms['cache'] = False
+
+                    if tvdb_lang:
+                            ltvdb_api_parms['language'] = tvdb_lang
+
+                    t = tvdb_api.Tvdb(**ltvdb_api_parms)
+                else:
+                    t = tvapi
+                myEp = t[self.show.tvdbid][season][episode]
+            else:
+                myEp = cachedSeason[episode]
+
+        except (tvdb_exceptions.tvdb_error, IOError), e:
+            logger.log(u"TVDB threw up an error: "+ex(e), logger.DEBUG)
+            # if the episode is already valid just log it, if not throw it up
+            if self.name:
+                logger.log(u"TVDB timed out but we have enough info from other sources, allowing the error", logger.DEBUG)
+                return
+            else:
+                logger.log(u"TVDB timed out, unable to create the episode", logger.ERROR)
+                return False
+        except (tvdb_exceptions.tvdb_episodenotfound, tvdb_exceptions.tvdb_seasonnotfound):
+            logger.log(u"Unable to find the episode on tvdb... has it been removed? Should I delete from db?", logger.DEBUG)
+            # if I'm no longer on TVDB but I once was then delete myself from the DB
+            if self.tvdbid != -1:
+                self.deleteEpisode()
+            return
+
+
+        if not myEp["firstaired"] or myEp["firstaired"] == "0000-00-00":
+            myEp["firstaired"] = str(datetime.date.fromordinal(1))
+
+        if myEp["episodename"] == None or myEp["episodename"] == "":
+            logger.log(u"This episode ("+self.show.name+" - "+str(season)+"x"+str(episode)+") has no name on TVDB")
+            # if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now
+            if self.tvdbid != -1:
+                self.deleteEpisode()
+            return False
+
+        #NAMEIT logger.log(u"BBBBBBBB from " + str(self.season)+"x"+str(self.episode) + " -" +self.name+" to "+myEp["episodename"])
+        self.name = myEp["episodename"]
+        self.season = season
+        self.episode = episode
+        tmp_description = myEp["overview"]
+        if tmp_description == None:
+            self.description = ""
+        else:
+            self.description = tmp_description
+        rawAirdate = [int(x) for x in myEp["firstaired"].split("-")]
+        try:
+            self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2])
+        except ValueError:
+            logger.log(u"Malformed air date retrieved from TVDB ("+self.show.name+" - "+str(season)+"x"+str(episode)+")", logger.ERROR)
+            # if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now
+            if self.tvdbid != -1:
+                self.deleteEpisode()
+            return False
+        
+        #early conversion to int so that episode doesn't get marked dirty
+        self.tvdbid = int(myEp["id"])
+        
+        #don't update show status if show dir is missing, unless missing show dirs are created during post-processing
+        if not ek.ek(os.path.isdir, self.show._location) and not sickbeard.CREATE_MISSING_SHOW_DIRS:
+            logger.log(u"The show dir is missing, not bothering to change the episode statuses since it'd probably be invalid")
+            return
+
+        logger.log(str(self.show.tvdbid) + ": Setting status for " + str(season) + "x" + str(episode) + " based on status " + str(self.status) + " and existence of " + self.location, logger.DEBUG)
+
+        if not ek.ek(os.path.isfile, self.location):
+
+            # if we don't have the file
+            if self.airdate >= datetime.date.today() and self.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER:
+                # and it hasn't aired yet set the status to UNAIRED
+                logger.log(u"Episode airs in the future, changing status from " + str(self.status) + " to " + str(UNAIRED), logger.DEBUG)
+                self.status = UNAIRED
+            # if there's no airdate then set it to skipped (and respect ignored)
+            elif self.airdate == datetime.date.fromordinal(1):
+                if self.status == IGNORED:
+                    logger.log(u"Episode has no air date, but it's already marked as ignored", logger.DEBUG)
+                else:
+                    logger.log(u"Episode has no air date, automatically marking it skipped", logger.DEBUG)
+                    self.status = SKIPPED
+            # if we don't have the file and the airdate is in the past
+            else:
+                if self.status == UNAIRED:
+                    self.status = WANTED
+
+                # if we somehow are still UNKNOWN then just skip it
+                elif self.status == UNKNOWN:
+                    self.status = SKIPPED
+
+                else:
+                    logger.log(u"Not touching status because we have no ep file, the airdate is in the past, and the status is "+str(self.status), logger.DEBUG)
+
+        # if we have a media file then it's downloaded
+        elif sickbeard.helpers.isMediaFile(self.location):
+            # leave propers alone, you have to either post-process them or manually change them back
+            if self.status not in Quality.SNATCHED_PROPER + Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED]:
+                logger.log(u"5 Status changes from " + str(self.status) + " to " + str(Quality.statusFromName(self.location)), logger.DEBUG)
+                self.status = Quality.statusFromName(self.location)
+
+        # shouldn't get here probably
+        else:
+            logger.log(u"6 Status changes from " + str(self.status) + " to " + str(UNKNOWN), logger.DEBUG)
+            self.status = UNKNOWN
+
+
+        # hasnfo, hastbn, status?
+
+
+    def loadFromNFO(self, location):
+
+        if not os.path.isdir(self.show._location):
+            logger.log(str(self.show.tvdbid) + ": The show dir is missing, not bothering to try loading the episode NFO")
+            return
+
+        logger.log(str(self.show.tvdbid) + ": Loading episode details from the NFO file associated with " + location, logger.DEBUG)
+
+        self.location = location
+
+        if self.location != "":
+
+            if self.status == UNKNOWN:
+                if sickbeard.helpers.isMediaFile(self.location):
+                    logger.log(u"7 Status changes from " + str(self.status) + " to " + str(Quality.statusFromName(self.location)), logger.DEBUG)
+                    self.status = Quality.statusFromName(self.location)
+
+            nfoFile = sickbeard.helpers.replaceExtension(self.location, "nfo")
+            logger.log(str(self.show.tvdbid) + ": Using NFO name " + nfoFile, logger.DEBUG)
+
+            if ek.ek(os.path.isfile, nfoFile):
+                try:
+                    showXML = etree.ElementTree(file = nfoFile)
+                except (SyntaxError, ValueError), e:
+                    logger.log(u"Error loading the NFO, backing up the NFO and skipping for now: " + ex(e), logger.ERROR) #TODO: figure out what's wrong and fix it
+                    try:
+                        ek.ek(os.rename, nfoFile, nfoFile + ".old")
+                    except Exception, e:
+                        logger.log(u"Failed to rename your episode's NFO file - you need to delete it or fix it: " + ex(e), logger.ERROR)
+                    raise exceptions.NoNFOException("Error in NFO format")
+
+                for epDetails in showXML.getiterator('episodedetails'):
+                    if epDetails.findtext('season') == None or int(epDetails.findtext('season')) != self.season or \
+                       epDetails.findtext('episode') == None or int(epDetails.findtext('episode')) != self.episode:
+                        logger.log(str(self.show.tvdbid) + ": NFO has an <episodedetails> block for a different episode - wanted " + str(self.season) + "x" + str(self.episode) + " but got " + str(epDetails.findtext('season')) + "x" + str(epDetails.findtext('episode')), logger.DEBUG)
+                        continue
+
+                    if epDetails.findtext('title') == None or epDetails.findtext('aired') == None:
+                        raise exceptions.NoNFOException("Error in NFO format (missing episode title or airdate)")
+
+                    self.name = epDetails.findtext('title')
+                    self.episode = int(epDetails.findtext('episode'))
+                    self.season = int(epDetails.findtext('season'))
+
+                    self.description = epDetails.findtext('plot')
+                    if self.description == None:
+                        self.description = ""
+
+                    if epDetails.findtext('aired'):
+                        rawAirdate = [int(x) for x in epDetails.findtext('aired').split("-")]
+                        self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2])
+                    else:
+                        self.airdate = datetime.date.fromordinal(1)
+
+                    self.hasnfo = True
+            else:
+                self.hasnfo = False
+
+            if ek.ek(os.path.isfile, sickbeard.helpers.replaceExtension(nfoFile, "tbn")):
+                self.hastbn = True
+            else:
+                self.hastbn = False
+
+    def __str__ (self):
+
+        toReturn = ""
+        toReturn += str(self.show.name) + " - " + str(self.season) + "x" + str(self.episode) + " - " + str(self.name) + "\n"
+        toReturn += "location: " + str(self.location) + "\n"
+        toReturn += "description: " + str(self.description) + "\n"
+        toReturn += "subtitles: " + str(",".join(self.subtitles)) + "\n"
+        toReturn += "subtitles_searchcount: " + str(self.subtitles_searchcount) + "\n"
+        toReturn += "subtitles_lastsearch: " + str(self.subtitles_lastsearch) + "\n"
+        toReturn += "airdate: " + str(self.airdate.toordinal()) + " (" + str(self.airdate) + ")\n"
+        toReturn += "hasnfo: " + str(self.hasnfo) + "\n"
+        toReturn += "hastbn: " + str(self.hastbn) + "\n"
+        toReturn += "status: " + str(self.status) + "\n"
+        toReturn += "languages: " + str(self.audio_langs) + "\n"
+        return toReturn
+
+    def createMetaFiles(self, force=False):
+
+        if not ek.ek(os.path.isdir, self.show._location):
+            logger.log(str(self.show.tvdbid) + ": The show dir is missing, not bothering to try to create metadata")
+            return
+
+        self.createNFO(force)
+        self.createThumbnail(force)
+
+        if self.checkForMetaFiles():
+            self.saveToDB()
+
+    def createNFO(self, force=False):
+
+        result = False
+
+        for cur_provider in sickbeard.metadata_provider_dict.values():
+            result = cur_provider.create_episode_metadata(self) or result
+
+        return result
+
+    def createThumbnail(self, force=False):
+
+        result = False
+
+        for cur_provider in sickbeard.metadata_provider_dict.values():
+            result = cur_provider.create_episode_thumb(self) or result
+
+        return result
+
+    def deleteEpisode(self):
+
+        logger.log(u"Deleting "+self.show.name+" "+str(self.season)+"x"+str(self.episode)+" from the DB", logger.DEBUG)
+
+        # remove myself from the show dictionary
+        if self.show.getEpisode(self.season, self.episode, noCreate=True) == self:
+            logger.log(u"Removing myself from my show's list", logger.DEBUG)
+            del self.show.episodes[self.season][self.episode]
+
+        # delete myself from the DB
+        logger.log(u"Deleting myself from the database", logger.DEBUG)
+        myDB = db.DBConnection()
+        sql = "DELETE FROM tv_episodes WHERE showid="+str(self.show.tvdbid)+" AND season="+str(self.season)+" AND episode="+str(self.episode)
+        myDB.action(sql)
+
+        raise exceptions.EpisodeDeletedException()
+
+    def saveToDB(self, forceSave=False):
+        """
+        Saves this episode to the database if any of its data has been changed since the last save.
+        
+        forceSave: If True it will save to the database even if no data has been changed since the
+                    last save (aka if the record is not dirty).
+        """
+        
+        if not self.dirty and not forceSave:
+            logger.log(str(self.show.tvdbid) + ": Not saving episode to db - record is not dirty", logger.DEBUG)
+            return
+
+        logger.log(str(self.show.tvdbid) + ": Saving episode details to database", logger.DEBUG)
+
+        logger.log(u"STATUS IS " + str(self.status), logger.DEBUG)
+
+        myDB = db.DBConnection()
+        
+        newValueDict = {"tvdbid": self.tvdbid,
+                        "name": self.name,
+                        "description": self.description,
+                        "subtitles": ",".join([sub for sub in self.subtitles]),
+                        "subtitles_searchcount": self.subtitles_searchcount,
+                        "subtitles_lastsearch": self.subtitles_lastsearch,
+                        "airdate": self.airdate.toordinal(),
+                        "hasnfo": self.hasnfo,
+                        "hastbn": self.hastbn,
+                        "status": self.status,
+                        "location": self.location,
+                        "audio_langs": self.audio_langs,
+                        "file_size": self.file_size,
+                        "release_name": self.release_name}
+        controlValueDict = {"showid": self.show.tvdbid,
+                            "season": self.season,
+                            "episode": self.episode}
+
+        # use a custom update/insert method to get the data into the DB
+        myDB.upsert("tv_episodes", newValueDict, controlValueDict)
+
+    def fullPath (self):
+        if self.location == None or self.location == "":
+            return None
+        else:
+            return ek.ek(os.path.join, self.show.location, self.location)
+
+    def prettyName(self):
+        """
+        Returns the name of this episode in a "pretty" human-readable format. Used for logging
+        and notifications and such.
+        
+        Returns: A string representing the episode's name and season/ep numbers 
+        """
+
+        return self._format_pattern('%SN - %Sx%0E - %EN')
+
+    def _ep_name(self):
+        """
+        Returns the name of the episode to use during renaming. Combines the names of related episodes.
+        Eg. "Ep Name (1)" and "Ep Name (2)" becomes "Ep Name"
+            "Ep Name" and "Other Ep Name" becomes "Ep Name & Other Ep Name"
+        """
+        
+        multiNameRegex = "(.*) \(\d\)"
+
+        self.relatedEps = sorted(self.relatedEps, key=lambda x: x.episode)
+
+        if len(self.relatedEps) == 0:
+            goodName = self.name
+
+        else:
+            goodName = ''
+
+            singleName = True
+            curGoodName = None
+
+            for curName in [self.name] + [x.name for x in self.relatedEps]:
+                match = re.match(multiNameRegex, curName)
+                if not match:
+                    singleName = False
+                    break
+
+                if curGoodName == None:
+                    curGoodName = match.group(1)
+                elif curGoodName != match.group(1):
+                    singleName = False
+                    break
+
+            if singleName:
+                goodName = curGoodName
+            else:
+                goodName = self.name
+                for relEp in self.relatedEps:
+                    goodName += " & " + relEp.name
+
+        return goodName
+
+    def _replace_map(self):
+        """
+        Generates a replacement map for this episode which maps all possible custom naming patterns to the correct
+        value for this episode.
+        
+        Returns: A dict with patterns as the keys and their replacement values as the values.
+        """
+        
+        ep_name = self._ep_name()
+        
+        def dot(name):
+            return helpers.sanitizeSceneName(name)
+        
+        def us(name):
+            return re.sub('[ -]','_', name)
+
+        def release_name(name):
+            if name and name.lower().endswith('.nzb'):
+                name = name.rpartition('.')[0]
+            return name
+
+        def release_group(name):
+            if not name:
+                return ''
+
+            np = NameParser(name)
+
+            try:
+                parse_result = np.parse(name)
+            except InvalidNameException, e:
+                logger.log(u"Unable to get parse release_group: "+ex(e), logger.DEBUG)
+                return ''
+
+            if not parse_result.release_group:
+                return ''
+            return parse_result.release_group
+
+        epStatus, epQual = Quality.splitCompositeStatus(self.status) #@UnusedVariable
+        
+        return {
+                   '%SN': self.show.name,
+                   '%S.N': dot(self.show.name),
+                   '%S_N': us(self.show.name),
+                   '%EN': ep_name,
+                   '%E.N': dot(ep_name),
+                   '%E_N': us(ep_name),
+                   '%QN': Quality.qualityStrings[epQual],
+                   '%Q.N': dot(Quality.qualityStrings[epQual]),
+                   '%Q_N': us(Quality.qualityStrings[epQual]),
+                   '%S': str(self.season),
+                   '%0S': '%02d' % self.season,
+                   '%E': str(self.episode),
+                   '%0E': '%02d' % self.episode,
+                   '%RN': release_name(self.release_name),
+                   '%RG': release_group(self.release_name),
+                   '%AD': str(self.airdate).replace('-', ' '),
+                   '%A.D': str(self.airdate).replace('-', '.'),
+                   '%A_D': us(str(self.airdate)),
+                   '%A-D': str(self.airdate),
+                   '%Y': str(self.airdate.year),
+                   '%M': str(self.airdate.month),
+                   '%D': str(self.airdate.day),
+                   '%0M': '%02d' % self.airdate.month,
+                   '%0D': '%02d' % self.airdate.day,
+                   }
+
+    def _format_string(self, pattern, replace_map):
+        """
+        Replaces all template strings with the correct value
+        """
+
+        result_name = pattern
+
+        # do the replacements
+        for cur_replacement in sorted(replace_map.keys(), reverse=True):
+            result_name = result_name.replace(cur_replacement, helpers.sanitizeFileName(replace_map[cur_replacement]))
+            result_name = result_name.replace(cur_replacement.lower(), helpers.sanitizeFileName(replace_map[cur_replacement].lower()))
+
+        return result_name
+
+    def _format_pattern(self, pattern=None, multi=None):
+        """
+        Manipulates an episode naming pattern and then fills the template in
+        """
+        
+        if pattern == None:
+            pattern = sickbeard.NAMING_PATTERN
+        
+        if multi == None:
+            multi = sickbeard.NAMING_MULTI_EP
+        
+        replace_map = self._replace_map()
+
+        result_name = pattern
+        
+        # if there's no release group then replace it with a reasonable facsimile
+        if not replace_map['%RN']:
+            if self.show.air_by_date:
+                result_name = result_name.replace('%RN', '%S.N.%A.D.%E.N-SiCKBEARD')
+                result_name = result_name.replace('%rn', '%s.n.%A.D.%e.n-sickbeard')
+            else:
+                result_name = result_name.replace('%RN', '%S.N.S%0SE%0E.%E.N-SiCKBEARD')
+                result_name = result_name.replace('%rn', '%s.n.s%0se%0e.%e.n-sickbeard')
+
+            result_name = result_name.replace('%RG', 'SiCKBEARD')
+            result_name = result_name.replace('%rg', 'sickbeard')
+            logger.log(u"Episode has no release name, replacing it with a generic one: "+result_name, logger.DEBUG)
+        
+        # split off ep name part only
+        name_groups = re.split(r'[\\/]', result_name)
+        
+        # figure out the double-ep numbering style for each group, if applicable
+        for cur_name_group in name_groups:
+        
+            season_format = sep = ep_sep = ep_format = None
+        
+            season_ep_regex = '''
+                                (?P<pre_sep>[ _.-]*)
+                                ((?:s(?:eason|eries)?\s*)?%0?S(?![._]?N))
+                                (.*?)
+                                (%0?E(?![._]?N))
+                                (?P<post_sep>[ _.-]*)
+                              '''
+            ep_only_regex = '(E?%0?E(?![._]?N))'
+        
+            # try the normal way
+            season_ep_match = re.search(season_ep_regex, cur_name_group, re.I|re.X)
+            ep_only_match = re.search(ep_only_regex, cur_name_group, re.I|re.X)
+            
+            # if we have a season and episode then collect the necessary data
+            if season_ep_match:
+                season_format = season_ep_match.group(2)
+                ep_sep = season_ep_match.group(3)
+                ep_format = season_ep_match.group(4)
+                sep = season_ep_match.group('pre_sep')
+                if not sep:
+                    sep = season_ep_match.group('post_sep')
+                if not sep:
+                    sep = ' '
+
+                # force 2-3-4 format if they chose to extend
+                if multi in (NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_LIMITED_EXTEND_E_PREFIXED):
+                    ep_sep = '-'
+                
+                regex_used = season_ep_regex
+
+            # if there's no season then there's not much choice so we'll just force them to use 03-04-05 style
+            elif ep_only_match:
+                season_format = ''
+                ep_sep = '-'
+                ep_format = ep_only_match.group(1)
+                sep = ''
+                regex_used = ep_only_regex
+
+            else:
+                continue
+
+            # we need at least this much info to continue
+            if not ep_sep or not ep_format:
+                continue
+            
+            # start with the ep string, eg. E03
+            ep_string = self._format_string(ep_format.upper(), replace_map)
+            for other_ep in self.relatedEps:
+                
+                # for limited extend we only append the last ep
+                if multi in (NAMING_LIMITED_EXTEND, NAMING_LIMITED_EXTEND_E_PREFIXED) and other_ep != self.relatedEps[-1]:
+                    continue
+                
+                elif multi == NAMING_DUPLICATE:
+                    # add " - S01"
+                    ep_string += sep + season_format
+                
+                elif multi == NAMING_SEPARATED_REPEAT:
+                    ep_string += sep
+
+                # add "E04"
+                ep_string += ep_sep
+
+                if multi == NAMING_LIMITED_EXTEND_E_PREFIXED:
+                    ep_string += 'E'
+
+                ep_string += other_ep._format_string(ep_format.upper(), other_ep._replace_map())
+
+            if season_ep_match:
+                regex_replacement = r'\g<pre_sep>\g<2>\g<3>' + ep_string + r'\g<post_sep>'
+            elif ep_only_match:
+                regex_replacement = ep_string
+
+            # fill out the template for this piece and then insert this piece into the actual pattern
+            cur_name_group_result = re.sub('(?i)(?x)'+regex_used, regex_replacement, cur_name_group)
+            #cur_name_group_result = cur_name_group.replace(ep_format, ep_string)
+            #logger.log(u"found "+ep_format+" as the ep pattern using "+regex_used+" and replaced it with "+regex_replacement+" to result in "+cur_name_group_result+" from "+cur_name_group, logger.DEBUG)
+            result_name = result_name.replace(cur_name_group, cur_name_group_result)
+
+        result_name = self._format_string(result_name, replace_map)
+
+        logger.log(u"formatting pattern: "+pattern+" -> "+result_name, logger.DEBUG)
+        
+        
+        return result_name
+
+    def proper_path(self):
+        """    
+        Figures out the path where this episode SHOULD live according to the renaming rules, relative from the show dir
+        """
+        
+        result = self.formatted_filename()
+
+        # if they want us to flatten it and we're allowed to flatten it then we will
+        if self.show.flatten_folders and not sickbeard.NAMING_FORCE_FOLDERS:
+            return result
+        
+        # if not we append the folder on and use that
+        else:
+            result = ek.ek(os.path.join, self.formatted_dir(), result)
+        
+        return result
+        
+
+    def formatted_dir(self, pattern=None, multi=None):
+        """
+        Just the folder name of the episode
+        """
+
+        if pattern == None:
+            # we only use ABD if it's enabled, this is an ABD show, AND this is not a multi-ep
+            if self.show.air_by_date and sickbeard.NAMING_CUSTOM_ABD and not self.relatedEps:
+                pattern = sickbeard.NAMING_ABD_PATTERN
+            else:
+                pattern = sickbeard.NAMING_PATTERN
+        
+        # split off the dirs only, if they exist
+        name_groups = re.split(r'[\\/]', pattern)
+        
+        if len(name_groups) == 1:
+            return ''
+        else:
+            return self._format_pattern(os.sep.join(name_groups[:-1]), multi)
+
+
+    def formatted_filename(self, pattern=None, multi=None):
+        """
+        Just the filename of the episode, formatted based on the naming settings
+        """
+        
+        if pattern == None:
+            # we only use ABD if it's enabled, this is an ABD show, AND this is not a multi-ep
+            if self.show.air_by_date and sickbeard.NAMING_CUSTOM_ABD and not self.relatedEps:
+                pattern = sickbeard.NAMING_ABD_PATTERN
+            else:
+                pattern = sickbeard.NAMING_PATTERN
+            
+        # split off the filename only, if they exist
+        name_groups = re.split(r'[\\/]', pattern)
+        
+        return self._format_pattern(name_groups[-1], multi)
+
+    def rename(self):
+        """
+        Renames an episode file and all related files to the location and filename as specified
+        in the naming settings.
+        """
+
+        if not ek.ek(os.path.isfile, self.location):
+            logger.log(u"Can't perform rename on " + self.location + " when it doesn't exist, skipping", logger.WARNING)
+            return
+
+        proper_path = self.proper_path()
+        absolute_proper_path = ek.ek(os.path.join, self.show.location, proper_path)
+        absolute_current_path_no_ext, file_ext = os.path.splitext(self.location)
+        
+        related_subs = []
+
+        current_path = absolute_current_path_no_ext
+
+        if absolute_current_path_no_ext.startswith(self.show.location):
+            current_path = absolute_current_path_no_ext[len(self.show.location):]
+
+        logger.log(u"Renaming/moving episode from the base path " + self.location + " to " + absolute_proper_path, logger.DEBUG)
+
+        # if it's already named correctly then don't do anything
+        if proper_path == current_path:
+            logger.log(str(self.tvdbid) + ": File " + self.location + " is already named correctly, skipping", logger.DEBUG)
+            return
+
+        related_files = postProcessor.PostProcessor(self.location)._list_associated_files(self.location)
+
+        if self.show.subtitles and sickbeard.SUBTITLES_DIR != '':
+            related_subs = postProcessor.PostProcessor(self.location)._list_associated_files(sickbeard.SUBTITLES_DIR, subtitles_only=True)
+            absolute_proper_subs_path = ek.ek(os.path.join, sickbeard.SUBTITLES_DIR, self.formatted_filename())
+            
+        if self.show.subtitles and sickbeard.SUBTITLES_DIR_SUB:
+            related_subs = postProcessor.PostProcessor(self.location)._list_associated_files(os.path.dirname(self.location)+"\\Subs", subtitles_only=True)
+            absolute_proper_subs_path = ek.ek(os.path.join, os.path.dirname(self.location)+"\\Subs", self.formatted_filename())
+            
+        logger.log(u"Files associated to " + self.location + ": " + str(related_files), logger.DEBUG)
+
+        # move the ep file
+        result = helpers.rename_ep_file(self.location, absolute_proper_path)
+
+        # move related files
+        for cur_related_file in related_files:
+            cur_result = helpers.rename_ep_file(cur_related_file, absolute_proper_path)
+            if cur_result == False:
+                logger.log(str(self.tvdbid) + ": Unable to rename file " + cur_related_file, logger.ERROR)
+
+        for cur_related_sub in related_subs:
+            cur_result = helpers.rename_ep_file(cur_related_sub, absolute_proper_subs_path)
+            if cur_result == False:
+                logger.log(str(self.tvdbid) + ": Unable to rename file " + cur_related_sub, logger.ERROR)
+
+        # save the ep
+        with self.lock:
+            if result != False:
+                self.location = absolute_proper_path + file_ext
+                for relEp in self.relatedEps:
+                    relEp.location = absolute_proper_path + file_ext
+
+        # in case something changed with the metadata just do a quick check
+        for curEp in [self] + self.relatedEps:
+            curEp.checkForMetaFiles()
+
+        # save any changes to the database
+        with self.lock:
+            self.saveToDB()
+            for relEp in self.relatedEps:
+                relEp.saveToDB()
diff --git a/sickbeard/versionChecker.py b/sickbeard/versionChecker.py
index 06c5c25e5..f36a56a29 100644
--- a/sickbeard/versionChecker.py
+++ b/sickbeard/versionChecker.py
@@ -1,529 +1,516 @@
-# Author: Nic Wolfe <nic@wolfeden.ca>
-# URL: http://code.google.com/p/sickbeard/
-#
-# This file is part of Sick Beard.
-#
-# Sick Beard is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Sick Beard is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
-
-import sickbeard
-from sickbeard import version, ui
-from sickbeard import logger
-from sickbeard import scene_exceptions
-from sickbeard.exceptions import ex
-
-import os, platform, shutil
-import subprocess, re
-import urllib, urllib2
-import zipfile, tarfile
-
-from urllib2 import URLError
-import gh_api as github
-
-class CheckVersion():
-    """
-    Version check class meant to run as a thread object with the SB scheduler.
-    """
-
-    def __init__(self):
-        self.install_type = self.find_install_type()
-
-        if self.install_type == 'win':
-            self.updater = WindowsUpdateManager()
-        elif self.install_type == 'git':
-            self.updater = GitUpdateManager()
-        elif self.install_type == 'source':
-            self.updater = SourceUpdateManager()
-        else:
-            self.updater = None
-
-    def run(self):
-        self.check_for_new_version()
-        
-        # refresh scene exceptions too
-        scene_exceptions.retrieve_exceptions()
-
-    def find_install_type(self):
-        """
-        Determines how this copy of SB was installed.
-        
-        returns: type of installation. Possible values are:
-            'win': any compiled windows build
-            'git': running from source using git
-            'source': running from source without git
-        """
-
-        # check if we're a windows build
-        if version.SICKBEARD_VERSION.startswith('build '):
-            install_type = 'win'
-        elif os.path.isdir(os.path.join(sickbeard.PROG_DIR, '.git')):
-            install_type = 'git'
-        else:
-            install_type = 'source'
-
-        return install_type
-
-    def check_for_new_version(self, force=False):
-        """
-        Checks the internet for a newer version.
-        
-        returns: bool, True for new version or False for no new version.
-        
-        force: if true the VERSION_NOTIFY setting will be ignored and a check will be forced
-        """
-
-        if not sickbeard.VERSION_NOTIFY and not force:
-            logger.log(u"Version checking is disabled, not checking for the newest version")
-            return False
-
-        logger.log(u"Checking if "+self.install_type+" needs an update")
-        if not self.updater.need_update():
-            logger.log(u"No update needed")
-            if force:
-                ui.notifications.message('No update needed')
-            return False
-
-        self.updater.set_newest_text()
-        return True
-
-    def update(self):
-        if self.updater.need_update():
-            return self.updater.update()
-
-class UpdateManager():
-    def get_update_url(self):
-        return sickbeard.WEB_ROOT+"/home/update/?pid="+str(sickbeard.PID)
-
-class WindowsUpdateManager(UpdateManager):
-
-    def __init__(self):
-        self._cur_version = None
-        self._cur_commit_hash = None
-        self._newest_version = None
-
-        self.gc_url = 'http://code.google.com/p/sickbeard/downloads/list'
-        self.version_url = 'https://raw.github.com/sarakha63/Sick-Beard/windows_binaries/updates.txt'
-
-    def _find_installed_version(self):
-        return int(sickbeard.version.SICKBEARD_VERSION[6:])
-
-    def _find_newest_version(self, whole_link=False):
-        """
-        Checks git for the newest Windows binary build. Returns either the
-        build number or the entire build URL depending on whole_link's value.
-
-        whole_link: If True, returns the entire URL to the release. If False, it returns
-                    only the build number. default: False
-        """
-
-        regex = ".*SickBeard\-win32\-alpha\-build(\d+)(?:\.\d+)?\.zip"
-
-        svnFile = urllib.urlopen(self.version_url)
-
-        for curLine in svnFile.readlines():
-            logger.log(u"checking line "+curLine, logger.DEBUG)
-            match = re.match(regex, curLine)
-            if match:
-                logger.log(u"found a match", logger.DEBUG)
-                if whole_link:
-                    return curLine.strip()
-                else:
-                    return int(match.group(1))
-
-        return None
-
-    def need_update(self):
-        self._cur_version = self._find_installed_version()
-        self._newest_version = self._find_newest_version()
-
-        logger.log(u"newest version: "+repr(self._newest_version), logger.DEBUG)
-
-        if self._newest_version and self._newest_version > self._cur_version:
-            return True
-
-    def set_newest_text(self):
-        new_str = 'There is a <a href="'+self.gc_url+'" onclick="window.open(this.href); return false;">newer version available</a> (build '+str(self._newest_version)+')'
-        new_str += "&mdash; <a href=\""+self.get_update_url()+"\">Update Now</a>"
-        sickbeard.NEWEST_VERSION_STRING = new_str
-
-    def update(self):
-
-        new_link = self._find_newest_version(True)
-
-        logger.log(u"new_link: " + repr(new_link), logger.DEBUG)
-
-        if not new_link:
-            logger.log(u"Unable to find a new version link on google code, not updating")
-            return False
-
-        # download the zip
-        try:
-            logger.log(u"Downloading update file from "+str(new_link))
-            (filename, headers) = urllib.urlretrieve(new_link) #@UnusedVariable
-
-            # prepare the update dir
-            sb_update_dir = os.path.join(sickbeard.PROG_DIR, 'sb-update')
-            logger.log(u"Clearing out update folder "+sb_update_dir+" before unzipping")
-            if os.path.isdir(sb_update_dir):
-                shutil.rmtree(sb_update_dir)
-
-            # unzip it to sb-update
-            logger.log(u"Unzipping from "+str(filename)+" to "+sb_update_dir)
-            update_zip = zipfile.ZipFile(filename, 'r')
-            update_zip.extractall(sb_update_dir)
-            update_zip.close()
-            
-            # find update dir name
-            update_dir_contents = os.listdir(sb_update_dir)
-            if len(update_dir_contents) != 1:
-                logger.log("Invalid update data, update failed. Maybe try deleting your sb-update folder?", logger.ERROR)
-                return False
-
-            content_dir = os.path.join(sb_update_dir, update_dir_contents[0])
-            old_update_path = os.path.join(content_dir, 'updater.exe')
-            new_update_path = os.path.join(sickbeard.PROG_DIR, 'updater.exe')
-            logger.log(u"Copying new update.exe file from "+old_update_path+" to "+new_update_path)
-            shutil.move(old_update_path, new_update_path)
-
-            # delete the zip
-            logger.log(u"Deleting zip file from "+str(filename))
-            os.remove(filename)
-
-        except Exception, e:
-            logger.log(u"Error while trying to update: "+ex(e), logger.ERROR)
-            return False
-
-        return True
-
-class GitUpdateManager(UpdateManager):
-
-    def __init__(self):
-        self._cur_commit_hash = None
-        self._newest_commit_hash = None
-        self._num_commits_behind = 0
-
-        self.git_url = 'http://code.google.com/p/sickbeard/downloads/list'
-
-        self.branch = self._find_git_branch()
-
-    def _git_error(self):
-        error_message = 'Unable to find your git executable - either delete your .git folder and run from source OR <a href="http://code.google.com/p/sickbeard/wiki/AdvancedSettings" onclick="window.open(this.href); return false;">set git_path in your config.ini</a> to enable updates.'
-        sickbeard.NEWEST_VERSION_STRING = error_message
-        
-        return None
-
-    def _run_git(self, args):
-        
-        if sickbeard.GIT_PATH:
-            git_locations = ['"'+sickbeard.GIT_PATH+'"']
-        else:
-            git_locations = ['git']
-        
-        # osx people who start SB from launchd have a broken path, so try a hail-mary attempt for them
-        if platform.system().lower() == 'darwin':
-            git_locations.append('/usr/local/git/bin/git')
-
-        output = err = None
-
-        for cur_git in git_locations:
-
-            cmd = cur_git+' '+args
-        
-            try:
-                logger.log(u"Executing "+cmd+" with your shell in "+sickbeard.PROG_DIR, logger.DEBUG)
-                p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=sickbeard.PROG_DIR)
-                output, err = p.communicate()
-                logger.log(u"git output: "+output, logger.DEBUG)
-            except OSError:
-                logger.log(u"Command "+cmd+" didn't work, couldn't find git.")
-                continue
-            
-            if p.returncode != 0 or 'not found' in output or "not recognized as an internal or external command" in output:
-                logger.log(u"Unable to find git with command "+cmd, logger.DEBUG)
-                output = None
-            elif 'fatal:' in output or err:
-                logger.log(u"Git returned bad info, are you sure this is a git installation?", logger.ERROR)
-                output = None
-            elif output:
-                break
-
-        return (output, err)
-
-    
-    def _find_installed_version(self):
-        """
-        Attempts to find the currently installed version of Sick Beard.
-
-        Uses git show to get commit version.
-
-        Returns: True for success or False for failure
-        """
-
-        output, err = self._run_git('rev-parse HEAD') #@UnusedVariable
-
-        if not output:
-            return self._git_error()
-
-        logger.log(u"Git output: "+str(output), logger.DEBUG)
-        cur_commit_hash = output.strip()
-
-        if not re.match('^[a-z0-9]+$', cur_commit_hash):
-            logger.log(u"Output doesn't look like a hash, not using it", logger.ERROR)
-            return self._git_error()
-        
-        self._cur_commit_hash = cur_commit_hash
-            
-        return True
-
-    def _find_git_branch(self):
-
-        branch_info = self._run_git('symbolic-ref -q HEAD')
-
-        if not branch_info or not branch_info[0]:
-            return 'master'
-
-        branch = branch_info[0].strip().replace('refs/heads/', '', 1)
-
-        return branch or 'master'
-
-
-    def _check_github_for_update(self):
-        """
-        Uses pygithub to ask github if there is a newer version that the provided
-        commit hash. If there is a newer version it sets Sick Beard's version text.
-
-        commit_hash: hash that we're checking against
-        """
-
-        self._num_commits_behind = 0
-        self._newest_commit_hash = None
-
-        gh = github.GitHub()
-
-        # find newest commit
-        for curCommit in gh.commits('sarakha63', 'Sick-Beard', self.branch):
-            if not self._newest_commit_hash:
-                self._newest_commit_hash = curCommit['sha']
-                if not self._cur_commit_hash:
-                    break
-
-            if curCommit['sha'] == self._cur_commit_hash:
-                break
-
-            self._num_commits_behind += 1
-
-        logger.log(u"newest: "+str(self._newest_commit_hash)+" and current: "+str(self._cur_commit_hash)+" and num_commits: "+str(self._num_commits_behind), logger.DEBUG)
-
-    def set_newest_text(self):
-
-        # if we're up to date then don't set this
-        if self._num_commits_behind == 100:
-            message = "or else you're ahead of master"
-
-        elif self._num_commits_behind > 0:
-            message = "you're %d commit" % self._num_commits_behind
-            if self._num_commits_behind > 1: message += 's'
-            message += ' behind'
-
-        else:
-            return
-
-        if self._newest_commit_hash:
-            url = 'http://github.com/sarakha63/Sick-Beard/compare/'+self._cur_commit_hash+'...'+self._newest_commit_hash
-        else:
-            url = 'http://github.com/sarakha63/Sick-Beard/commits/'
-
-        new_str = 'There is a <a href="'+url+'" onclick="window.open(this.href); return false;">newer version available</a> ('+message+')'
-        new_str += "&mdash; <a href=\""+self.get_update_url()+"\">Update Now</a>"
-
-        sickbeard.NEWEST_VERSION_STRING = new_str
-
-    def need_update(self):
-        self._find_installed_version()
-        try:
-            self._check_github_for_update()
-        except Exception, e:
-            logger.log(u"Unable to contact github, can't check for update: "+repr(e), logger.ERROR)
-            return False
-
-        logger.log(u"After checking, cur_commit = "+str(self._cur_commit_hash)+", newest_commit = "+str(self._newest_commit_hash)+", num_commits_behind = "+str(self._num_commits_behind), logger.DEBUG)
-
-        if self._num_commits_behind > 0:
-            return True
-
-        return False
-
-    def update(self):
-        """
-        Calls git pull origin <branch> in order to update Sick Beard. Returns a bool depending
-        on the call's success.
-        """
-        self._run_git('config remote.origin.url git://github.com/sarakha63/Sick-Beard.git')
-        self._run_git('stash')
-        output, err = self._run_git('pull git://github.com/sarakha63/Sick-Beard.git '+self.branch) #@UnusedVariable
-        logger.log(u"Writing commit History into the file", logger.DEBUG)
-        if sickbeard.GIT_PATH:
-            git_locations = ['"'+sickbeard.GIT_PATH+'"']
-        else:
-            git_locations = ['git']
-        for cur_git in git_locations:
-            cmd = cur_git +' log --pretty="%ar %h - %s" --no-merges -200'
-        
-            try:
-                logger.log(u"Executing "+cmd+" with your shell in "+sickbeard.PROG_DIR, logger.DEBUG)
-                p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=sickbeard.PROG_DIR)
-                output1, err1 = p.communicate()
-                fp = open (os.path.join(sickbeard.DATA_DIR, "hist.log"), 'wb')
-                fp.write (output1[0][0])
-                fp.close ()                        
-                os.chmod(os.path.join(sickbeard.DATA_DIR, "hist.log"), 0777)
-            except OSError:
-                logger.log(u"Command "+cmd+" didn't work, couldn't find git.")
-        
-        
-        if not output:
-            return self._git_error()
-
-        pull_regex = '(\d+) .+,.+(\d+).+\(\+\),.+(\d+) .+\(\-\)'
-
-        (files, insertions, deletions) = (None, None, None)
-
-        for line in output.split('\n'):
-
-            if 'Already up-to-date.' in line:
-                logger.log(u"No update available, not updating")
-                logger.log(u"Output: "+str(output))
-                return False
-            elif line.endswith('Aborting.'):
-                logger.log(u"Unable to update from git: "+line, logger.ERROR)
-                logger.log(u"Output: "+str(output))
-                return False
-
-            match = re.search(pull_regex, line)
-            if match:
-                (files, insertions, deletions) = match.groups()
-                break
-
-        if None in (files, insertions, deletions):
-            logger.log(u"Didn't find indication of success in output, assuming git pull succeeded", logger.DEBUG)
-            logger.log(u"Output: "+str(output))
-            return True
-        
-        return True
-
-
-
-class SourceUpdateManager(GitUpdateManager):
-
-    def _find_installed_version(self):
-
-        version_file = os.path.join(sickbeard.PROG_DIR, 'version.txt')
-
-        if not os.path.isfile(version_file):
-            self._cur_commit_hash = None
-            return
-
-        fp = open(version_file, 'r')
-        self._cur_commit_hash = fp.read().strip(' \n\r')
-        fp.close()
-
-        if not self._cur_commit_hash:
-            self._cur_commit_hash = None
-
-    def need_update(self):
-
-        parent_result = GitUpdateManager.need_update(self)
-
-        if not self._cur_commit_hash:
-            return True
-        else:
-            return parent_result
-
-
-    def set_newest_text(self):
-        if not self._cur_commit_hash:
-            logger.log(u"Unknown current version, don't know if we should update or not", logger.DEBUG)
-
-            new_str = "Unknown version: If you've never used the Sick Beard upgrade system then I don't know what version you have."
-            new_str += "&mdash; <a href=\""+self.get_update_url()+"\">Update Now</a>"
-
-            sickbeard.NEWEST_VERSION_STRING = new_str
-
-        else:
-            GitUpdateManager.set_newest_text(self)
-
-    def update(self):
-        """
-        Downloads the latest source tarball from github and installs it over the existing version.
-        """
-
-        tar_download_url = 'https://github.com/sarakha63/Sick-Beard/tarball/'+version.SICKBEARD_VERSION
-        sb_update_dir = os.path.join(sickbeard.PROG_DIR, 'sb-update')
-        version_path = os.path.join(sickbeard.PROG_DIR, 'version.txt')
-
-        # retrieve file
-        try:
-            logger.log(u"Downloading update from "+tar_download_url)
-            data = urllib2.urlopen(tar_download_url)
-        except (IOError, URLError):
-            logger.log(u"Unable to retrieve new version from "+tar_download_url+", can't update", logger.ERROR)
-            return False
-
-        download_name = data.geturl().split('/')[-1].split('?')[0]
-
-        tar_download_path = os.path.join(sickbeard.PROG_DIR, download_name)
-
-        # save to disk
-        f = open(tar_download_path, 'wb')
-        f.write(data.read())
-        f.close()
-
-        # extract to temp folder
-        logger.log(u"Extracting file "+tar_download_path)
-        tar = tarfile.open(tar_download_path)
-        tar.extractall(sb_update_dir)
-        tar.close()
-
-        # delete .tar.gz
-        logger.log(u"Deleting file "+tar_download_path)
-        os.remove(tar_download_path)
-
-        # find update dir name
-        update_dir_contents = [x for x in os.listdir(sb_update_dir) if os.path.isdir(os.path.join(sb_update_dir, x))]
-        if len(update_dir_contents) != 1:
-            logger.log(u"Invalid update data, update failed: "+str(update_dir_contents), logger.ERROR)
-            return False
-        content_dir = os.path.join(sb_update_dir, update_dir_contents[0])
-
-        # walk temp folder and move files to main folder
-        for dirname, dirnames, filenames in os.walk(content_dir): #@UnusedVariable
-            dirname = dirname[len(content_dir)+1:]
-            for curfile in filenames:
-                old_path = os.path.join(content_dir, dirname, curfile)
-                new_path = os.path.join(sickbeard.PROG_DIR, dirname, curfile)
-
-                if os.path.isfile(new_path):
-                    os.remove(new_path)
-                os.renames(old_path, new_path)
-
-        # update version.txt with commit hash
-        try:
-            ver_file = open(version_path, 'w')
-            ver_file.write(self._newest_commit_hash)
-            ver_file.close()
-        except IOError, e:
-            logger.log(u"Unable to write version file, update not complete: "+ex(e), logger.ERROR)
-            return False
-
-        return True
-
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+import sickbeard
+from sickbeard import version, ui
+from sickbeard import logger
+from sickbeard import scene_exceptions
+from sickbeard.exceptions import ex
+
+import os, platform, shutil
+import subprocess, re
+import urllib, urllib2
+import zipfile, tarfile
+
+from urllib2 import URLError
+import gh_api as github
+
+class CheckVersion():
+    """
+    Version check class meant to run as a thread object with the SB scheduler.
+    """
+
+    def __init__(self):
+        self.install_type = self.find_install_type()
+
+        if self.install_type == 'win':
+            self.updater = WindowsUpdateManager()
+        elif self.install_type == 'git':
+            self.updater = GitUpdateManager()
+        elif self.install_type == 'source':
+            self.updater = SourceUpdateManager()
+        else:
+            self.updater = None
+
+    def run(self):
+        self.check_for_new_version()
+        
+        # refresh scene exceptions too
+        scene_exceptions.retrieve_exceptions()
+
+    def find_install_type(self):
+        """
+        Determines how this copy of SB was installed.
+        
+        returns: type of installation. Possible values are:
+            'win': any compiled windows build
+            'git': running from source using git
+            'source': running from source without git
+        """
+
+        # check if we're a windows build
+        if version.SICKBEARD_VERSION.startswith('build '):
+            install_type = 'win'
+        elif os.path.isdir(os.path.join(sickbeard.PROG_DIR, '.git')):
+            install_type = 'git'
+        else:
+            install_type = 'source'
+
+        return install_type
+
+    def check_for_new_version(self, force=False):
+        """
+        Checks the internet for a newer version.
+        
+        returns: bool, True for new version or False for no new version.
+        
+        force: if true the VERSION_NOTIFY setting will be ignored and a check will be forced
+        """
+
+        if not sickbeard.VERSION_NOTIFY and not force:
+            logger.log(u"Version checking is disabled, not checking for the newest version")
+            return False
+
+        logger.log(u"Checking if "+self.install_type+" needs an update")
+        if not self.updater.need_update():
+            logger.log(u"No update needed")
+            if force:
+                ui.notifications.message('No update needed')
+            return False
+
+        self.updater.set_newest_text()
+        return True
+
+    def update(self):
+        if self.updater.need_update():
+            return self.updater.update()
+
+class UpdateManager():
+    def get_update_url(self):
+        return sickbeard.WEB_ROOT+"/home/update/?pid="+str(sickbeard.PID)
+
+class WindowsUpdateManager(UpdateManager):
+
+    def __init__(self):
+        self._cur_version = None
+        self._cur_commit_hash = None
+        self._newest_version = None
+
+        self.gc_url = 'http://code.google.com/p/sickbeard/downloads/list'
+        self.version_url = 'https://raw.github.com/sarakha63/Sick-Beard/windows_binaries/updates.txt'
+
+    def _find_installed_version(self):
+        return int(sickbeard.version.SICKBEARD_VERSION[6:])
+
+    def _find_newest_version(self, whole_link=False):
+        """
+        Checks git for the newest Windows binary build. Returns either the
+        build number or the entire build URL depending on whole_link's value.
+
+        whole_link: If True, returns the entire URL to the release. If False, it returns
+                    only the build number. default: False
+        """
+
+        regex = ".*SickBeard\-win32\-alpha\-build(\d+)(?:\.\d+)?\.zip"
+
+        svnFile = urllib.urlopen(self.version_url)
+
+        for curLine in svnFile.readlines():
+            logger.log(u"checking line "+curLine, logger.DEBUG)
+            match = re.match(regex, curLine)
+            if match:
+                logger.log(u"found a match", logger.DEBUG)
+                if whole_link:
+                    return curLine.strip()
+                else:
+                    return int(match.group(1))
+
+        return None
+
+    def need_update(self):
+        self._cur_version = self._find_installed_version()
+        self._newest_version = self._find_newest_version()
+
+        logger.log(u"newest version: "+repr(self._newest_version), logger.DEBUG)
+
+        if self._newest_version and self._newest_version > self._cur_version:
+            return True
+
+    def set_newest_text(self):
+        new_str = 'There is a <a href="'+self.gc_url+'" onclick="window.open(this.href); return false;">newer version available</a> (build '+str(self._newest_version)+')'
+        new_str += "&mdash; <a href=\""+self.get_update_url()+"\">Update Now</a>"
+        sickbeard.NEWEST_VERSION_STRING = new_str
+
+    def update(self):
+
+        new_link = self._find_newest_version(True)
+
+        logger.log(u"new_link: " + repr(new_link), logger.DEBUG)
+
+        if not new_link:
+            logger.log(u"Unable to find a new version link on google code, not updating")
+            return False
+
+        # download the zip
+        try:
+            logger.log(u"Downloading update file from "+str(new_link))
+            (filename, headers) = urllib.urlretrieve(new_link) #@UnusedVariable
+
+            # prepare the update dir
+            sb_update_dir = os.path.join(sickbeard.PROG_DIR, 'sb-update')
+            logger.log(u"Clearing out update folder "+sb_update_dir+" before unzipping")
+            if os.path.isdir(sb_update_dir):
+                shutil.rmtree(sb_update_dir)
+
+            # unzip it to sb-update
+            logger.log(u"Unzipping from "+str(filename)+" to "+sb_update_dir)
+            update_zip = zipfile.ZipFile(filename, 'r')
+            update_zip.extractall(sb_update_dir)
+            update_zip.close()
+            
+            # find update dir name
+            update_dir_contents = os.listdir(sb_update_dir)
+            if len(update_dir_contents) != 1:
+                logger.log("Invalid update data, update failed. Maybe try deleting your sb-update folder?", logger.ERROR)
+                return False
+
+            content_dir = os.path.join(sb_update_dir, update_dir_contents[0])
+            old_update_path = os.path.join(content_dir, 'updater.exe')
+            new_update_path = os.path.join(sickbeard.PROG_DIR, 'updater.exe')
+            logger.log(u"Copying new update.exe file from "+old_update_path+" to "+new_update_path)
+            shutil.move(old_update_path, new_update_path)
+
+            # delete the zip
+            logger.log(u"Deleting zip file from "+str(filename))
+            os.remove(filename)
+
+        except Exception, e:
+            logger.log(u"Error while trying to update: "+ex(e), logger.ERROR)
+            return False
+
+        return True
+
+class GitUpdateManager(UpdateManager):
+
+    def __init__(self):
+        self._cur_commit_hash = None
+        self._newest_commit_hash = None
+        self._num_commits_behind = 0
+
+        self.git_url = 'http://code.google.com/p/sickbeard/downloads/list'
+
+        self.branch = self._find_git_branch()
+
+    def _git_error(self):
+        error_message = 'Unable to find your git executable - either delete your .git folder and run from source OR <a href="http://code.google.com/p/sickbeard/wiki/AdvancedSettings" onclick="window.open(this.href); return false;">set git_path in your config.ini</a> to enable updates.'
+        sickbeard.NEWEST_VERSION_STRING = error_message
+        
+        return None
+
+    def _run_git(self, args):
+        
+        if sickbeard.GIT_PATH:
+            git_locations = ['"'+sickbeard.GIT_PATH+'"']
+        else:
+            git_locations = ['git']
+        
+        # osx people who start SB from launchd have a broken path, so try a hail-mary attempt for them
+        if platform.system().lower() == 'darwin':
+            git_locations.append('/usr/local/git/bin/git')
+
+        output = err = None
+
+        for cur_git in git_locations:
+
+            cmd = cur_git+' '+args
+        
+            try:
+                logger.log(u"Executing "+cmd+" with your shell in "+sickbeard.PROG_DIR, logger.DEBUG)
+                p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=sickbeard.PROG_DIR)
+                output, err = p.communicate()
+                logger.log(u"git output: "+output, logger.DEBUG)
+            except OSError:
+                logger.log(u"Command "+cmd+" didn't work, couldn't find git.")
+                continue
+            
+            if p.returncode != 0 or 'not found' in output or "not recognized as an internal or external command" in output:
+                logger.log(u"Unable to find git with command "+cmd, logger.DEBUG)
+                output = None
+            elif 'fatal:' in output or err:
+                logger.log(u"Git returned bad info, are you sure this is a git installation?", logger.ERROR)
+                output = None
+            elif output:
+                break
+
+        return (output, err)
+
+    
+    def _find_installed_version(self):
+        """
+        Attempts to find the currently installed version of Sick Beard.
+
+        Uses git show to get commit version.
+
+        Returns: True for success or False for failure
+        """
+
+        output, err = self._run_git('rev-parse HEAD') #@UnusedVariable
+
+        if not output:
+            return self._git_error()
+
+        logger.log(u"Git output: "+str(output), logger.DEBUG)
+        cur_commit_hash = output.strip()
+
+        if not re.match('^[a-z0-9]+$', cur_commit_hash):
+            logger.log(u"Output doesn't look like a hash, not using it", logger.ERROR)
+            return self._git_error()
+        
+        self._cur_commit_hash = cur_commit_hash
+            
+        return True
+
+    def _find_git_branch(self):
+
+        branch_info = self._run_git('symbolic-ref -q HEAD')
+
+        if not branch_info or not branch_info[0]:
+            return 'master'
+
+        branch = branch_info[0].strip().replace('refs/heads/', '', 1)
+
+        return branch or 'master'
+
+
+    def _check_github_for_update(self):
+        """
+        Uses pygithub to ask github if there is a newer version that the provided
+        commit hash. If there is a newer version it sets Sick Beard's version text.
+
+        commit_hash: hash that we're checking against
+        """
+
+        self._num_commits_behind = 0
+        self._newest_commit_hash = None
+
+        gh = github.GitHub()
+
+        # find newest commit
+        for curCommit in gh.commits('sarakha63', 'Sick-Beard', self.branch):
+            if not self._newest_commit_hash:
+                self._newest_commit_hash = curCommit['sha']
+                if not self._cur_commit_hash:
+                    break
+
+            if curCommit['sha'] == self._cur_commit_hash:
+                break
+
+            self._num_commits_behind += 1
+
+        logger.log(u"newest: "+str(self._newest_commit_hash)+" and current: "+str(self._cur_commit_hash)+" and num_commits: "+str(self._num_commits_behind), logger.DEBUG)
+
+    def set_newest_text(self):
+
+        # if we're up to date then don't set this
+        if self._num_commits_behind == 100:
+            message = "or else you're ahead of master"
+
+        elif self._num_commits_behind > 0:
+            message = "you're %d commit" % self._num_commits_behind
+            if self._num_commits_behind > 1: message += 's'
+            message += ' behind'
+
+        else:
+            return
+
+        if self._newest_commit_hash:
+            url = 'http://github.com/sarakha63/Sick-Beard/compare/'+self._cur_commit_hash+'...'+self._newest_commit_hash
+        else:
+            url = 'http://github.com/sarakha63/Sick-Beard/commits/'
+
+        new_str = 'There is a <a href="'+url+'" onclick="window.open(this.href); return false;">newer version available</a> ('+message+')'
+        new_str += "&mdash; <a href=\""+self.get_update_url()+"\">Update Now</a>"
+
+        sickbeard.NEWEST_VERSION_STRING = new_str
+
+    def need_update(self):
+        self._find_installed_version()
+        try:
+            self._check_github_for_update()
+        except Exception, e:
+            logger.log(u"Unable to contact github, can't check for update: "+repr(e), logger.ERROR)
+            return False
+
+        logger.log(u"After checking, cur_commit = "+str(self._cur_commit_hash)+", newest_commit = "+str(self._newest_commit_hash)+", num_commits_behind = "+str(self._num_commits_behind), logger.DEBUG)
+
+        if self._num_commits_behind > 0:
+            return True
+
+        return False
+
+    def update(self):
+        """
+        Calls git pull origin <branch> in order to update Sick Beard. Returns a bool depending
+        on the call's success.
+        """
+        self._run_git('config remote.origin.url git://github.com/sarakha63/Sick-Beard.git')
+        self._run_git('stash')
+        output, err = self._run_git('pull git://github.com/sarakha63/Sick-Beard.git '+self.branch) #@UnusedVariable
+        logger.log(u"Writing commit History into the file", logger.DEBUG)
+        output1,err1=self._run_git(' log --pretty="%ar %h - %s" --no-merges -200')
+        fp = open (os.path.join(sickbeard.DATA_DIR, "hist.log"), 'wb')
+        fp.write (output1[0][0])
+        fp.close ()                        
+        os.chmod(os.path.join(sickbeard.DATA_DIR, "hist.log"), 0777)        
+        
+        if not output:
+            return self._git_error()
+
+        pull_regex = '(\d+) .+,.+(\d+).+\(\+\),.+(\d+) .+\(\-\)'
+
+        (files, insertions, deletions) = (None, None, None)
+
+        for line in output.split('\n'):
+
+            if 'Already up-to-date.' in line:
+                logger.log(u"No update available, not updating")
+                logger.log(u"Output: "+str(output))
+                return False
+            elif line.endswith('Aborting.'):
+                logger.log(u"Unable to update from git: "+line, logger.ERROR)
+                logger.log(u"Output: "+str(output))
+                return False
+
+            match = re.search(pull_regex, line)
+            if match:
+                (files, insertions, deletions) = match.groups()
+                break
+
+        if None in (files, insertions, deletions):
+            logger.log(u"Didn't find indication of success in output, assuming git pull succeeded", logger.DEBUG)
+            logger.log(u"Output: "+str(output))
+            return True
+        
+        return True
+
+
+
+class SourceUpdateManager(GitUpdateManager):
+
+    def _find_installed_version(self):
+
+        version_file = os.path.join(sickbeard.PROG_DIR, 'version.txt')
+
+        if not os.path.isfile(version_file):
+            self._cur_commit_hash = None
+            return
+
+        fp = open(version_file, 'r')
+        self._cur_commit_hash = fp.read().strip(' \n\r')
+        fp.close()
+
+        if not self._cur_commit_hash:
+            self._cur_commit_hash = None
+
+    def need_update(self):
+
+        parent_result = GitUpdateManager.need_update(self)
+
+        if not self._cur_commit_hash:
+            return True
+        else:
+            return parent_result
+
+
+    def set_newest_text(self):
+        if not self._cur_commit_hash:
+            logger.log(u"Unknown current version, don't know if we should update or not", logger.DEBUG)
+
+            new_str = "Unknown version: If you've never used the Sick Beard upgrade system then I don't know what version you have."
+            new_str += "&mdash; <a href=\""+self.get_update_url()+"\">Update Now</a>"
+
+            sickbeard.NEWEST_VERSION_STRING = new_str
+
+        else:
+            GitUpdateManager.set_newest_text(self)
+
+    def update(self):
+        """
+        Downloads the latest source tarball from github and installs it over the existing version.
+        """
+
+        tar_download_url = 'https://github.com/sarakha63/Sick-Beard/tarball/'+version.SICKBEARD_VERSION
+        sb_update_dir = os.path.join(sickbeard.PROG_DIR, 'sb-update')
+        version_path = os.path.join(sickbeard.PROG_DIR, 'version.txt')
+
+        # retrieve file
+        try:
+            logger.log(u"Downloading update from "+tar_download_url)
+            data = urllib2.urlopen(tar_download_url)
+        except (IOError, URLError):
+            logger.log(u"Unable to retrieve new version from "+tar_download_url+", can't update", logger.ERROR)
+            return False
+
+        download_name = data.geturl().split('/')[-1].split('?')[0]
+
+        tar_download_path = os.path.join(sickbeard.PROG_DIR, download_name)
+
+        # save to disk
+        f = open(tar_download_path, 'wb')
+        f.write(data.read())
+        f.close()
+
+        # extract to temp folder
+        logger.log(u"Extracting file "+tar_download_path)
+        tar = tarfile.open(tar_download_path)
+        tar.extractall(sb_update_dir)
+        tar.close()
+
+        # delete .tar.gz
+        logger.log(u"Deleting file "+tar_download_path)
+        os.remove(tar_download_path)
+
+        # find update dir name
+        update_dir_contents = [x for x in os.listdir(sb_update_dir) if os.path.isdir(os.path.join(sb_update_dir, x))]
+        if len(update_dir_contents) != 1:
+            logger.log(u"Invalid update data, update failed: "+str(update_dir_contents), logger.ERROR)
+            return False
+        content_dir = os.path.join(sb_update_dir, update_dir_contents[0])
+
+        # walk temp folder and move files to main folder
+        for dirname, dirnames, filenames in os.walk(content_dir): #@UnusedVariable
+            dirname = dirname[len(content_dir)+1:]
+            for curfile in filenames:
+                old_path = os.path.join(content_dir, dirname, curfile)
+                new_path = os.path.join(sickbeard.PROG_DIR, dirname, curfile)
+
+                if os.path.isfile(new_path):
+                    os.remove(new_path)
+                os.renames(old_path, new_path)
+
+        # update version.txt with commit hash
+        try:
+            ver_file = open(version_path, 'w')
+            ver_file.write(self._newest_commit_hash)
+            ver_file.close()
+        except IOError, e:
+            logger.log(u"Unable to write version file, update not complete: "+ex(e), logger.ERROR)
+            return False
+
+        return True
+
-- 
GitLab